query
stringlengths 7
3.85k
| document
stringlengths 11
430k
| metadata
dict | negatives
sequencelengths 0
101
| negative_scores
sequencelengths 0
101
| document_score
stringlengths 3
10
| document_rank
stringclasses 102
values |
---|---|---|---|---|---|---|
startLiveReloadServer initializes a livereload to notify the browser of changes to code that does not need a recompile. | func startLiveReloadServer(tpls *template.Template, cfg *env.Config, staticAssets *static.Files) error {
if cfg.IsProduction {
return nil
}
log.Info("Initializing livereload")
paths := []string{
"assets",
"templates",
}
tmplFn := func(name string) (bool, error) {
templates, err := initTemplates(cfg, staticAssets)
if err != nil {
return false, err
}
*tpls = *templates
return true, nil
}
mappings := livereload.ReloadMapping{
".css": nil,
".js": nil,
".tmpl": tmplFn,
}
_, err := livereload.ListenAndServe(livereload.DefaultPort, paths, mappings)
return err
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func reload(l net.Listener, httpServer *http.Server) error {\n\tconst errLoc = \"main.reload()\"\n\n\t// Making duplicate for socket descriptor\n\t// to use them in child process.\n\tfile, err := (l.(*net.TCPListener)).File()\n\tif err != nil {\n\t\treturn fmt.Errorf(\n\t\t\t\"%s: failed to get file of listener, reason -> %s\",\n\t\t\terrLoc, err.Error(),\n\t\t)\n\t}\n\tfd, err := syscall.Dup(int(file.Fd()))\n\tfile.Close()\n\tif err != nil {\n\t\treturn fmt.Errorf(\n\t\t\t\"%s: failed to dup(2) listener, reason -> %s\", errLoc, err.Error(),\n\t\t)\n\t}\n\tif err := os.Setenv(envVarName, fmt.Sprint(fd)); err != nil {\n\t\treturn fmt.Errorf(\n\t\t\t\"%s: failed to write fd into environment variable, reason -> %s\",\n\t\t\terrLoc, err.Error(),\n\t\t)\n\t}\n\n\t// Unlock PID file to start normally child process.\n\tdaemon.UnlockPidFile()\n\n\t// Start child process.\n\tcmd := exec.Command(daemon.AppPath)\n\tif err := cmd.Start(); err != nil {\n\t\treturn fmt.Errorf(\n\t\t\t\"%s: failed to start child process, reason -> %s\",\n\t\t\terrLoc, err.Error(),\n\t\t)\n\t}\n\n\tselect {\n\t// Waiting for notification from child process that it starts successfully.\n\t// In real application it's better to move generation of chan for this case\n\t// before calling cmd.Start() to be sure to catch signal in any case.\n\tcase <-func() <-chan os.Signal {\n\t\tsig := make(chan os.Signal)\n\t\tsignal.Notify(sig, syscall.SIGUSR1)\n\t\treturn sig\n\t}():\n\t// If child process stopped without sending signal.\n\tcase <-func() chan struct{} {\n\t\tch := make(chan struct{}, 1)\n\t\tgo func() {\n\t\t\tcmd.Wait()\n\t\t\tch <- struct{}{}\n\t\t}()\n\t\treturn ch\n\t}():\n\t\terr = fmt.Errorf(\"%s: child process stopped unexpectably\", errLoc)\n\t// Timeout for waiting signal from child process.\n\tcase <-time.After(10 * time.Second):\n\t\terr = fmt.Errorf(\n\t\t\t\"%s: child process is not responding, closing by timeout\", errLoc,\n\t\t)\n\t}\n\n\t// Dealing with keep-alive connections.\n\thttpServer.SetKeepAlivesEnabled(false)\n\ttime.Sleep(100 * time.Millisecond)\n\n\t// Close current listener (stop server in fact).\n\tl.Close()\n\treturn err\n}",
"func startServer() {\n\tapi, err := gobroem.NewAPI(options.db)\n\tif err != nil {\n\t\tlog.Fatal(\"can not open db\", err)\n\t}\n\n\thttp.ListenAndServe(\n\t\tfmt.Sprintf(\"%s:%d\", options.host, options.port),\n\t\tapi.Handler(\"/\", \"/static/\"),\n\t)\n}",
"func StartWebserver() {\n\tlog.Fatal(http.ListenAndServe(\":8080\", nil))\n}",
"func RunServer(host string, port int) {\n\thandleWebsocket()\n\thandlePublicFiles()\n\n\tlogger.Println(fmt.Sprintf(\"server started at http://:%d/\", port))\n\tlog.Fatal(http.ListenAndServe(fmt.Sprintf(\":%d\", port), nil))\n}",
"func InitializeServer(host string) (server *network.WebServer) {\n\trand.Seed(time.Now().UTC().UnixNano())\n\t// Make sure folders exist that we want:\n\tif err := ensureBindDirs(); err != nil {\n\t\tLog.Error(\"Failed to have home working dir to put the files into at ~/Desktop/bind, err: \", err)\n\t} else {\n\t\tLog.Info(\"bind dirs ensured!\")\n\t}\n\tif os.Args[0] != \"d\" { //development mode\n\t\tgin.SetMode(gin.ReleaseMode)\n\t}\n\tr := gin.New()\n\tr.LoadHTMLGlob(\"public/tmpl/*.html\")\n\tr.StaticFS(\"/videos\", http.Dir(basePath+\"/videos\"))\n\tr.StaticFS(\"/frames\", http.Dir(basePath+\"/frames\"))\n\tr.Static(\"/public\", \"./public\")\n\tr.GET(\"/\", getIndex)\n\tr.POST(\"/g\", postIndex)\n\tr.GET(\"/g\", getIndex)\n\tr.GET(\"/about\", getAbout)\n\tr.GET(\"/jobs\", getJobs)\n\tr.GET(\"/code\", getCode)\n\tmel = melody.New() // melody middleware\n\n\t// websocket route\n\tr.GET(\"/ws\",func(ctx *gin.Context){\n\t\t// handle request with Melody\n\t\tmel.HandleRequest(ctx.Writer,ctx.Request)\n\t})\n\n\t// Melody message handler\n\tmel.HandleMessage(func(ses *melody.Session,msg []byte){\n\t\t// broadcast message to connected sockets\n\t\tmel.Broadcast(msg)\n\t})\n\n\n\tr.GET(\"/openframes\", func(c *gin.Context) {\n\t\topen.Run(basePath + \"/frames\")\n\t})\n\tr.GET(\"/openvideos\", func(c *gin.Context) {\n\t\topen.Run(basePath + \"/videos\")\n\t})\n\tr.GET(\"/openlogs\", func(c *gin.Context) {\n\t\topen.Run(basePath + \"/logs\")\n\t})\n\tr.GET(\"/toggleClipYt\", func(c *gin.Context) {\n\t\topen.Run(basePath + \"/logs\")\n\t})\n\t// go requests(mel)\n\t// go jobUpdates(mel)\n\n\treturn network.InitializeWebServer(r, host)\n}",
"func startServer(jsonarticle interface{}) {\n\te := echo.New()\n\t// e.Use(livereload.LiveReload())\n\n\te.GET(\"/scrape\", func(f echo.Context) error {\n\t\treturn f.JSON(http.StatusOK, jsonarticle)\n\t})\n\n\te.Logger.Fatal(e.Start(\":5000\"))\n\n}",
"func runServer() {\n\t// listen and serve on 0.0.0.0:8080 (for windows \"localhost:8080\")\n\tlog.Fatalln(router.Run(fmt.Sprintf(\":%s\", env.AppPort)))\n}",
"func (server *Server) runServer() {\n\tserver.G.Go(func() error {\n\t\tserver.API.log.Info(\"running server %v\", server.config.Server.ListenAddr)\n\t\treturn http.ListenAndServe(server.config.Server.ListenAddr, server.Server.Handler)\n\t})\n}",
"func (s *Server) StartServer() error {\n\tvar err error\n\ts.core, err = core.NewCore()\n\tif err != nil {\n\t\treturn err\n\t}\n\ts.r = mux.NewRouter()\n\ts.r.HandleFunc(\"/\", func(w http.ResponseWriter, r *http.Request) {\n\t\thttp.ServeFile(w, r, \"web_files/test.html\")\n\t})\n\ts.r.HandleFunc(\"/home\", s.core.HomeHandler)\n\ts.r.HandleFunc(\"/register\", s.core.RegisterHandler)\n\ts.r.HandleFunc(\"/login\", s.core.LoginHandler)\n\ts.r.HandleFunc(\"/test\", s.core.TestHandler)\n\ts.r.HandleFunc(\"/key/new/\", func(w http.ResponseWriter, r *http.Request) {\n\t\tcore.NewKey(w, r, &s.state)\n\t})\n\ts.r.HandleFunc(\"/key/aes/\", func(w http.ResponseWriter, r *http.Request) {\n\t\tcore.ImportAESKey(w, r, &s.state)\n\t})\n\ts.r.HandleFunc(\"/off/1234\", func(w http.ResponseWriter, r *http.Request) {\n\t\tw.WriteHeader(http.StatusOK)\n\t\tw.Write([]byte(\"shutdown\"))\n\t\ts.degradation <- 0\n\t})\n\ts.r.HandleFunc(\"/hello\", notImplemented)\n\thttp.Handle(\"/\", s.r)\n\tlog.Fatal(http.ListenAndServe(s.core.Config.Server.Port, handlers.CORS(handlers.AllowedHeaders([]string{\"X-Requested-With\", \"Content-Type\", \"Authorization\"}), handlers.AllowedMethods([]string{\"GET\", \"POST\", \"PUT\", \"HEAD\", \"OPTIONS\"}), handlers.AllowedOrigins([]string{\"*\"}))(s.r)))\n\treturn nil\n}",
"func StartServer() {\n\tif server == nil {\n\t\tGetInstance()\n\t}\n\n\tlog.Println(\"starting server on http://localhost\" + defaultPort)\n\tserver.Run(defaultPort)\n}",
"func startServer(t *testing.T, server *http.Server) {\n\tgo func() {\n\t\terr := server.ListenAndServe()\n\t\tif !errors.Is(err, http.ErrServerClosed) {\n\t\t\trequire.NoError(t, err)\n\t\t}\n\t}()\n}",
"func StartServer(lc fx.Lifecycle, cfg infrastructure.Configuration, logger *zap.Logger, mux *mux.Router,\n\tgw ControllersGatewayFx) {\n\tprefix := NewVersioning(cfg)\n\trouteControllers(prefix, mux, gw)\n\tlogServer(cfg, logger, prefix)\n\tstartServer(lc, cfg, mux)\n}",
"func (scu *SSH) ReloadHTTPServer() error {\n\tdefer scu.Client.Close()\n\tsession, err := scu.Client.NewSession()\n\tif err != nil {\n\t\treturn err\n\t}\n\t// Reload the scu\n\tif err := session.Run(scu.Config.RestartCMD); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}",
"func startServer(port string, handler http.Handler) {\n\terr := http.ListenAndServe(port, handler)\n\tif err != nil {\n\t\tlogger.Fatal(\"ListenAndServe: \", err)\n\t}\n}",
"func RunServer(configFile string) {\n\tc := make(chan os.Signal, 1)\n\tsignal.Notify(c, syscall.SIGTERM)\n\tserver, err := NewServer(configFile)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tlog.Info(\"Gohan no jikan desuyo (It's time for dinner!)\")\n\tlog.Info(\"Build version: %s\", version.Build.Version)\n\tlog.Info(\"Build timestamp: %s\", version.Build.Timestamp)\n\tlog.Info(\"Build host: %s\", version.Build.Host)\n\tlog.Info(\"Starting Gohan Server...\")\n\taddress := server.address\n\tif strings.HasPrefix(address, \":\") {\n\t\taddress = \"localhost\" + address\n\t}\n\tprotocol := \"http\"\n\tif server.tls != nil {\n\t\tprotocol = \"https\"\n\t}\n\tlog.Info(\" API Server %s://%s/\", protocol, address)\n\tlog.Info(\" Web UI %s://%s/webui/\", protocol, address)\n\tgo func() {\n\t\tfor range c {\n\t\t\tlog.Info(\"Stopping the server...\")\n\t\t\tlog.Info(\"Tearing down...\")\n\t\t\tlog.Info(\"Stopping server...\")\n\t\t\tserver.Stop()\n\t\t}\n\t}()\n\tserver.running = true\n\tserver.masterCtx, server.masterCtxCancel = context.WithCancel(context.Background())\n\n\tserver.startSyncProcesses()\n\n\tstartCRONProcess(server)\n\tmetrics.StartMetricsProcess()\n\terr = server.Start()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}",
"func (s *Server) Reload(paths []string) {\n\tcmd := cmdCSS\n\tfor _, path := range paths {\n\t\tif !strings.HasSuffix(path, \".css\") {\n\t\t\tcmd = cmdPage\n\t\t}\n\t}\n\ts.logger.SayAs(\"debug\", \"livereload %s, files changed: %s\", cmd, paths)\n\ts.broadcast <- cmd\n}",
"func BlobServerStart() {\n\thttp.Handle(\n\t\t\"/b/\",\n\t\thttp.StripPrefix(\n\t\t\t\"/b/\",\n\t\t\thttp.FileServer(http.Dir(\"bin/blobs\")),\n\t\t),\n\t)\n\thttp.Handle(\n\t\t\"/t/\",\n\t\thttp.StripPrefix(\n\t\t\t\"/t/\",\n\t\t\thttp.FileServer(http.Dir(\"bin/tags\")),\n\t\t),\n\t)\n\thttp.Handle(\n\t\t\"/c/\",\n\t\thttp.StripPrefix(\"/c/\",\n\t\t\thttp.FileServer(http.Dir(\"bin/commits\")),\n\t\t),\n\t)\n\thttp.ListenAndServe(\":8080\", nil)\n\tfmt.Println(\"Listening on :8080\")\n}",
"func StartServer(apiKey string) {\n\t// http.ListenAndServe(\":8080\", http.FileServer(http.Dir(\"./public\")))\n\tr := httprouter.New()\n\tr.GET(\"/\", HomeHandler)\n\tr.GET(\"/assets/css/:stylesheet\", CSSAssetHandler)\n\tr.GET(\"/assets/js/:script\", JSAssetHandler)\n\n\tapi.BootstrapAPI(&r, apiKey)\n\n\thttp.ListenAndServe(\":8080\", r)\n}",
"func (s *Server) Serve() error {\n\tloadTemplate()\n\thttp.HandleFunc(\"/watch\", s.watchChanges)\n\thttp.HandleFunc(\"/\", s.home)\n\treturn http.ListenAndServe(\":5152\", nil)\n}",
"func startServer(t testing.TB, h jsonrpc2.Handler) net.Listener {\n\tlistener, err := net.Listen(\"tcp\", bindAddr)\n\tif err != nil {\n\t\tt.Fatal(\"Listen:\", err)\n\t}\n\tgo func() {\n\t\tif err := serve(context.Background(), listener, h); err != nil && !strings.Contains(err.Error(), \"use of closed network connection\") {\n\t\t\tt.Fatal(\"jsonrpc2.Serve:\", err)\n\t\t}\n\t}()\n\treturn listener\n}",
"func StartServer() {\n\tfmt.Println(\"Server is started at 8082\")\n\thttp.ListenAndServe(\":8082\", r)\n}",
"func StartListening() {\n\thttp.HandleFunc(\"/health\", GenerateHandler(\"^/health$\", HealthHandler))\n\thttp.HandleFunc(\"/static/\", GenerateHandler(\"^/(static/(js/|css/|media/)[a-zA-Z0-9._]*)$\", FileHandler))\n\thttp.HandleFunc(\"/audits/\", GenerateHandler(\"^/(static/[a-zA-Z0-9._-]*)$\", FileHandler))\n\thttp.HandleFunc(\"/api/\", GenerateHandler(\"^/api/(get/(all|inventory|host))$\", APIHandler))\n\thttp.HandleFunc(\"/\", GenerateHandler(\"^/(.*)$\", FileHandler))\n\ta := fmt.Sprintf(\"%s:%s\", config.Host, config.Port)\n\tlogger.Infof(\"Start listening \\\"%s\\\"...\", a)\n\tlogger.Fatale(http.ListenAndServe(a, nil), \"Server crashed !\")\n}",
"func StartServer(config configuration.Config) {\n\tConfig = config\n\n\tportStr := strconv.FormatInt(config.ServerPort, 10)\n\tserverAddrStr := config.ServerAddress + \":\" + portStr\n\thttp.HandleFunc(\"/webhook\", hookHandler)\n\n\tlog.Printf(\"Listening at %s for inncomming webhook\", serverAddrStr)\n\tlog.Fatal(http.ListenAndServe(serverAddrStr, nil))\n}",
"func StartServer(config_path string) *HTMLServer {\n\n\tconfig.getConf(config_path)\n\n\tgo submitor.BuildImage(config.Marker.ImageName)\n\n\tvar i handlers.HandlerFunc\n\ti = &handlers.Handler{config.ServerConfig.TemplatePath, config.Marker, config.Labs}\n\n\tstore, rate := initalize_redis(&config.Redis)\n\tmiddleware := stdlib.NewMiddleware(limiter.New(*store, *rate))\n\n\trouter := mux.NewRouter()\n\n\trouter.Handle(\"/\", middleware.Handler(http.HandlerFunc(i.HandleIndex)))\n\trouter.HandleFunc(\"/upload\", i.Upload)\n\trouter.PathPrefix(\"/js/\").Handler(http.StripPrefix(\"/js/\", http.FileServer(http.Dir(\"./templates/js/\"))))\n\trouter.PathPrefix(\"/css/\").Handler(http.StripPrefix(\"/css/\", http.FileServer(http.Dir(\"./templates/css/\"))))\n\n\thtmlServer := HTMLServer{\n\t\tserver: &http.Server{\n\t\t\tAddr: config.ServerConfig.Host + \":\" + config.ServerConfig.ServerPort,\n\t\t\tHandler: router,\n\t\t\tReadTimeout: time.Second * time.Duration(config.ServerConfig.ReadTimeout),\n\t\t\tWriteTimeout: time.Second * time.Duration(config.ServerConfig.WriteTimeout),\n\t\t\tMaxHeaderBytes: 1 << 20,\n\t\t\t//ErrorLog: ,\n\t\t},\n\t}\n\n\thtmlServer.wg.Add(1)\n\n\tgo func() {\n\t\tlog.Info(\"HTMLServer : Service started : Host=\", config.ServerConfig.Host, \":\", config.ServerConfig.ServerPort)\n\t\terr := htmlServer.server.ListenAndServe()\n\t\tif err != nil {\n\t\t\tlog.Info(\"HTTP server failed to start \", err)\n\t\t}\n\t\thtmlServer.wg.Done()\n\t}()\n\n\treturn &htmlServer\n\n}",
"func Serverstart() {\n\n\t//defer connection to database until all db operations are completed\n\tdefer dbmap.Db.Close()\n\trouter := Router()\n\trouter.Run(\":9000\")\n}",
"func StartServer() {\n\t// Initialize\n\tinitialize()\n\n\t// Centralized middleware for error handling\n\tr := middleware.NewRecovery()\n\tm := middleware.With(http.HandlerFunc(omikujiHandler), r)\n\thttp.Handle(\"/omikuji\", m)\n\tif err := http.ListenAndServe(\":8080\", nil); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}",
"func (s *Server) startDevServer() (cleanup func()) {\n\troot := gitRootDir()\n\twebClientPath := filepath.Join(root, \"client\", \"web\")\n\n\tyarn := filepath.Join(root, \"tool\", \"yarn\")\n\tnode := filepath.Join(root, \"tool\", \"node\")\n\tvite := filepath.Join(webClientPath, \"node_modules\", \".bin\", \"vite\")\n\n\tlog.Printf(\"installing JavaScript deps using %s... (might take ~30s)\", yarn)\n\tout, err := exec.Command(yarn, \"--non-interactive\", \"-s\", \"--cwd\", webClientPath, \"install\").CombinedOutput()\n\tif err != nil {\n\t\tlog.Fatalf(\"error running tailscale web's yarn install: %v, %s\", err, out)\n\t}\n\tlog.Printf(\"starting JavaScript dev server...\")\n\tcmd := exec.Command(node, vite)\n\tcmd.Dir = webClientPath\n\tcmd.Stdout = os.Stdout\n\tcmd.Stderr = os.Stderr\n\tif err := cmd.Start(); err != nil {\n\t\tlog.Fatalf(\"Starting JS dev server: %v\", err)\n\t}\n\tlog.Printf(\"JavaScript dev server running as pid %d\", cmd.Process.Pid)\n\treturn func() {\n\t\tcmd.Process.Signal(os.Interrupt)\n\t\terr := cmd.Wait()\n\t\tlog.Printf(\"JavaScript dev server exited: %v\", err)\n\t}\n}",
"func (s *Server) starDebugServer() {\n\tglog.GLog.Infof(\"start debug server on %v\", s.conf.DebugServer)\n\tif err := http.ListenAndServe(s.conf.DebugServer, nil); err != nil {\n\t\tglog.GLog.Fatalln(err)\n\t}\n}",
"func (ws *WebServer) Start() {\n\tlog.Logger.Info(\"Launching webserver\")\n\tlastRun := &run.Result{}\n\n\ttemplate, err := sysutil.CreateTemplate(serverTemplatePath)\n\tif err != nil {\n\t\tws.Errors <- err\n\t\treturn\n\t}\n\n\tm := mux.NewRouter()\n\taddStatusEndpoints(m)\n\tstatusPageHandler := &StatusPageHandler{\n\t\ttemplate,\n\t\tlastRun,\n\t\tws.Clock,\n\t}\n\thttp.Handle(\"/\", statusPageHandler)\n\tm.PathPrefix(\"/static/\").Handler(http.StripPrefix(\"/static/\", http.FileServer(http.Dir(\"/static\"))))\n\tforceRunHandler := &ForceRunHandler{\n\t\tws.RunQueue,\n\t}\n\tm.PathPrefix(\"/api/v1/forceRun\").Handler(forceRunHandler)\n\tm.PathPrefix(\"/\").Handler(statusPageHandler)\n\n\tgo func() {\n\t\tfor result := range ws.RunResults {\n\t\t\t*lastRun = result\n\t\t}\n\t}()\n\n\terr = http.ListenAndServe(fmt.Sprintf(\":%v\", ws.ListenPort), m)\n\tws.Errors <- err\n}",
"func startServer() {\n\t// index file\n\thttp.HandleFunc(\"/\", func(w http.ResponseWriter, r *http.Request) {\n\t\thttp.Redirect(w, r, \"/static/\", http.StatusFound)\n\t}) //设置访问的路由\n\n\t// static file\n\thttp.HandleFunc(\"/static/\", func(w http.ResponseWriter, r *http.Request) {\n\t\thttp.ServeFile(w, r, r.URL.Path[1:])\n\t})\n\n\t// other logic handlers\n\thttp.HandleFunc(\"/rank\", rank)\n\thttp.HandleFunc(\"/top\", top)\n\t//\thttp.HandleFunc(\"/update\", update)\n\n\terr := http.ListenAndServe(\":9090\", nil) //设置监听的端口\n\tif err != nil {\n\t\tlog.Fatal(\"ListenAndServe: \", err)\n\t}\n}",
"func initialiseServer() {\n\tport := \":8090\"\n\tlog.Printf(\"Starting HTTP server at http://localhost:%s\", port)\n\n\t// Attach request handlers\n\thttp.HandleFunc(\"/api/v1/vehicles\", liveDataRequestHandler)\n\thttp.HandleFunc(\"/health\", healthEndpoint)\n\thttp.HandleFunc(\"/\", healthEndpoint)\n\n\t// Start HTTP server\n\tlog.Fatal(http.ListenAndServe(port, nil))\n}",
"func StartServer(flightsDB *flightsdb.FlightsDB) {\n\tdb = flightsDB\n\tapp := fiber.New()\n\tapp.Settings.ErrorHandler = ErrorHandler\n\tAddMiddleware(app)\n\tAddRoutes(app)\n\tapp.Listen(8080)\n}",
"func startWsServer(listen_addr string) {\n\t//hub = newHub()\n\tgo hub.Run()\n\n\t//http.HandleFunc(\"/\", cmdHandler)\n\thttp.HandleFunc(\"/upgrade\", func(w http.ResponseWriter, r *http.Request) {\n\t\tserveWs(hub, w, r)\n\t})\n\terr := http.ListenAndServe(listen_addr, nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"Could not listen to %s: %s\", listen_addr, err)\n\t}\n}",
"func (s *Refresh) StartBackgroundRefresh() {\n\tgo s.FindServerStateAdded()\n}",
"func updServer() {\n\t// starts a udp listener (connection) on port \"udpPort\"\n\t// remember connections are resources and need to be closed (aka 'freed') if opened ;)\n\t// use a \"for loop\" to continuously \"handle\" incoming messages (i.e use \"handleMessage\")\n\t// if there's an error when starting server, log.Fatal ;)\n}",
"func (srv *Server) StartServer() {\n\thttp.HandleFunc(\"/\", srv.rpcHandler)\n\tlog.Fatal(http.ListenAndServe(srv.addr, nil))\n}",
"func (s *Server) OnStart(ctx context.Context) error { s.run(ctx); return nil }",
"func StartServer(tlsCertFile, tlsKeyFile string) error {\n\tlogger := &log.Std{Debug: true}\n\n\tmutator := mutating.MutatorFunc(sidecarInjectMutator)\n\n\tconfig := mutating.WebhookConfig{\n\t\tName: \"fluentdSidecarInjector\",\n\t\tObj: &corev1.Pod{},\n\t}\n\twebhook, err := mutating.NewWebhook(config, mutator, nil, nil, logger)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Failed to create webhook: %s\", err)\n\t}\n\n\thandler, err := webhookhttp.HandlerFor(webhook)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Failed to create webhook handler: %s\", err)\n\t}\n\n\tlogger.Infof(\"Listing on :8080\")\n\terr = http.ListenAndServeTLS(\":8080\", tlsCertFile, tlsKeyFile, handler)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Failed to start server: %s\", err)\n\t}\n\n\treturn nil\n\n}",
"func (m *MetaNode) startServer() (err error) {\n\t// initialize and start the server.\n\tm.httpStopC = make(chan uint8)\n\tln, err := net.Listen(\"tcp\", \":\"+m.listen)\n\tif err != nil {\n\t\treturn\n\t}\n\tgo func(stopC chan uint8) {\n\t\tdefer ln.Close()\n\t\tfor {\n\t\t\tconn, err := ln.Accept()\n\t\t\tselect {\n\t\t\tcase <-stopC:\n\t\t\t\treturn\n\t\t\tdefault:\n\t\t\t}\n\t\t\tif err != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tgo m.serveConn(conn, stopC)\n\t\t}\n\t}(m.httpStopC)\n\tlog.LogInfof(\"start server over...\")\n\treturn\n}",
"func RunServer() {\n\tserverMutex.Lock()\n\tdefer serverMutex.Unlock()\n\tif serverStarted {\n\t\treturn\n\t}\n\n\terr := initialiseServer()\n\tif err != nil {\n\t\tlog.Panicln(\"server:\", err)\n\t}\n\tlog.Println(\"server: Server starting at\", config.Address)\n\tserverStarted = true\n\tgo func() {\n\t\terr = server.ListenAndServe()\n\t\tif err != http.ErrServerClosed {\n\t\t\tlog.Println(\"server:\", err)\n\t\t}\n\t}()\n}",
"func StartServer() {\n\thandlePesquisa()\n\n\tlog.Info.Println(\"WebServer started...\")\n\thttp.ListenAndServe(\":8080\", httpLogger.WriteLog(http.DefaultServeMux, os.Stdout))\n}",
"func StartServer() {\n\tvar wait time.Duration\n\tflag.DurationVar(&wait, \"graceful-timeout\", time.Second*15, \"the duration for which the server gracefully wait for existing connections to finish - e.g. 15s or 1m\")\n\tflag.Parse()\n\n\tr := mux.NewRouter()\n\troutes.RegisterRouter(r)\n\t//check db before staring web\n\tdb.STRG.Migrator()\n\tport := os.Getenv(\"PORT\")\n\taddress := \":\" + port\n\tsrv := &http.Server{\n\t\tAddr: address,\n\t\tWriteTimeout: time.Second * 15,\n\t\tReadTimeout: time.Second * 15,\n\t\tIdleTimeout: time.Second * 60,\n\t\tHandler: r, // Pass our instance of gorilla/mux in.\n\t}\n\n\tfmt.Println(\"Starting Server\")\n\tgo func() {\n\t\tif err := srv.ListenAndServe(); err != nil {\n\t\t\tlog.Println(err)\n\t\t}\n\t}()\n\n\tc := make(chan os.Signal, 1)\n\tsignal.Notify(c, os.Interrupt)\n\t<-c\n\tctx, cancel := context.WithTimeout(context.Background(), wait)\n\tdefer cancel()\n\tsrv.Shutdown(ctx)\n\tlog.Println(\"shutting down\")\n\tos.Exit(0)\n}",
"func startServerMode() {\n\t// Create or open log directory\n\tf, err := os.OpenFile(WORKDIR+`/server.log`, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)\n\tif err != nil {\n\t\tl(err.Error(), true, true)\n\t}\n\tdefer f.Close()\n\tlog.SetOutput(f)\n\tl(\"Starting server...\", false, true)\n\tvar listener net.Listener\n\tif appConfig.Tls {\n\t\tcert, err := tls.LoadX509KeyPair(WORKDIR+\"/cert.pem\", WORKDIR+\"/key.pem\")\n\t\tcheckErr(\"Unable to import TLS certificates\", err, true)\n\t\tconfig := tls.Config{Certificates: []tls.Certificate{cert}}\n\t\tnow := time.Now()\n\t\tconfig.Time = func() time.Time { return now }\n\t\tconfig.Rand = rand.Reader\n\t\tlistener, err = tls.Listen(\"tcp\", appConfig.Server.Address+\":\"+appConfig.Server.Port, &config)\n\t\tcheckErr(\"Unable to create TLS listener\", err, false)\n\t} else {\n\t\tvar err error\n\t\tlistener, err = net.Listen(\"tcp\", appConfig.Server.Address+\":\"+appConfig.Server.Port)\n\t\tcheckErr(\"Unable to create listener\", err, true)\n\t}\n\tgo server.start()\n\tif len(appConfig.Api.Port) > 0 {\n\t\tgo startHttpServer()\n\t}\n\tfor {\n\t\tconnection, err := listener.Accept()\n\t\tcheckErr(\"Unable to accept incoming connection\", err, true)\n\t\tclient := &Client{socket: connection, data: make(chan Job)}\n\t\tserver.register <- client\n\t\tgo server.receive(client)\n\t\tgo server.send(client)\n\t}\n}",
"func (server *Server) Start() {\n\tmux := http.NewServeMux()\n\n\tfileServer := server.attachStaticFileServer(mux)\n\tserver.attachSystemJSRewriteHandler(mux)\n\tserver.attachCustomHandlers(mux)\n\n\tif server.hub != nil {\n\t\t// add HMR support\n\t\tserver.attachIndexInjectionListener(mux, fileServer)\n\t\tserver.attachWebSocketListeners(mux, server.hub)\n\t\tgo server.hub.run()\n\t}\n\n\tserver.srv = &http.Server{\n\t\tAddr: makeServerAddress(server.port),\n\t\tHandler: mux,\n\t}\n\n\tif err := server.srv.ListenAndServe(); err != nil {\n\t\tpanic(err)\n\t}\n}",
"func RunServer() {\n\tgo func() {\n\t\tcounter := 5\n\t\tfor CurrentServer == nil {\n\t\t\tutils.Sugar.Warn(\"Server was not created waiting for a one second\")\n\t\t\ttime.Sleep(1 * time.Second)\n\t\t\tcounter = counter - 1\n\t\t\tif counter <= 0 {\n\t\t\t\tutils.Sugar.Error(\"Server was not created in time\")\n\t\t\t\tstopChan <- syscall.SIGINT\n\t\t\t}\n\t\t}\n\n\t\trunning = true\n\t\terr := CurrentServer.ListenAndServe()\n\t\tif err != nil {\n\t\t\tif err == http.ErrServerClosed {\n\t\t\t\tutils.Sugar.Info(\"Server was closed\")\n\t\t\t} else {\n\t\t\t\tutils.Sugar.Error(\"Error while running server: \", err)\n\t\t\t}\n\t\t}\n\t\trunning = false\n\t}()\n\tutils.Sugar.Info(\"Server started\")\n}",
"func RunServer() {\n\tapp := applicationContext{\n\t\tconfig: config.LoadConfig(),\n\t\ttrackerLevel: RATIOLESS,\n\t}\n\n\tmux := http.NewServeMux()\n\n\tmux.HandleFunc(\"/announce\", app.requestHandler)\n\tmux.HandleFunc(\"/scrape\", scrapeHandler)\n\thttp.ListenAndServe(\":3000\", mux)\n}",
"func (s *TodoServer) StartServer() error {\n\treturn s.httpServer.ListenAndServe()\n}",
"func startServer(socketPath string) error {\n\tlogrus.Debugf(\"Starting server...\")\n\n\tif config.path == \"\" {\n\t\tpath, err := os.MkdirTemp(\"\", \"test_volume_plugin\")\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"getting directory for plugin: %w\", err)\n\t\t}\n\t\tconfig.path = path\n\t} else {\n\t\tpathStat, err := os.Stat(config.path)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unable to access requested plugin state directory: %w\", err)\n\t\t}\n\t\tif !pathStat.IsDir() {\n\t\t\treturn fmt.Errorf(\"cannot use %v as plugin state dir as it is not a directory\", config.path)\n\t\t}\n\t}\n\n\thandle := makeDirDriver(config.path)\n\tlogrus.Infof(\"Using %s for volume path\", config.path)\n\n\tserver := volume.NewHandler(handle)\n\tif err := server.ServeUnix(socketPath, 0); err != nil {\n\t\treturn fmt.Errorf(\"starting server: %w\", err)\n\t}\n\treturn nil\n}",
"func RunServer(server *ophttp.Server) {\n\thttp.Handle(\"/greeting\", http.HandlerFunc(GreetingHandler))\n\tserver.Start()\n}",
"func (w *Webserver) Start() error {\n\n\t// listenAndServe the server\n\tgo func() {\n\t\tw.logger.Infof(\"Http server listening at %d!\", w.config.Port)\n\t\terr := w.listenAndServe()\n\t\tif err != nil && err != http.ErrServerClosed {\n\t\t\tw.logger.Errorw(fmt.Sprintf(\"webserver listening at port [%v] stopped\", w.config.Port), \"error\", err.Error())\n\t\t}\n\t}()\n\n\treturn nil\n}",
"func StartServer(router *mux.Router) error {\n\trouter.Handle(\"/ws\", websocket.Handler(socketHandler))\n\treturn nil\n}",
"func StartWebServer(pubSub *pubsub.PubSub) {\n\t// setup web server\n\te := echo.New()\n\te.HideBanner = true\n\te.Use(middleware.Logger())\n\n\t// disable CORS on the web server if desired\n\tdisableCORS = viper.GetBool(\"server_settings.disablecors\")\n\tif disableCORS {\n\t\tlogger.Warn(\"Running in disabled CORS mode. This is very dangerous! Be careful!\")\n\t\te.Use(middleware.CORSWithConfig(middleware.CORSConfig{\n\t\t\tAllowOrigins: []string{\"*\"},\n\t\t\tAllowHeaders: []string{echo.HeaderOrigin, echo.HeaderContentType, echo.HeaderAccept},\n\t\t}))\n\t}\n\n\tc, _ := handlers.NewContainer()\n\n\t// GetLogstationName - Get Logstation Name\n\te.GET(\"/settings/logstation-name\", c.GetLogstationName)\n\n\t// GetSettingsSyntax - Get Syntax Colors\n\te.GET(\"/settings/syntax\", c.GetSettingsSyntax)\n\n\t// package up the built web files and serve them to the clients\n\tfsys, err := fs.Sub(webServerFiles, \"web/dist\")\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error loading the web files into the server. error msg: %s\", err))\n\t}\n\tfileHandler := http.FileServer(http.FS(fsys))\n\te.GET(\"/*\", echo.WrapHandler(fileHandler))\n\n\t// pass message broker channel into websocket handler\n\twsHandlerChan := func(c echo.Context) error {\n\t\treturn WebSocketHandler(c, pubSub)\n\t}\n\te.GET(\"/ws\", wsHandlerChan)\n\n\t// start the web server\n\te.Logger.Fatal(e.Start(viper.GetString(\"server_settings.webserveraddress\") + \":\" + viper.GetString(\"server_settings.webserverport\")))\n}",
"func startServer(wg *sync.WaitGroup) {\n\tdefer wg.Done()\n\n\tlistener, err := net.Listen(\"tcp\", fmt.Sprintf(\"%s:%d\", MyHandle.Host, MyHandle.Port))\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to startServer: %v\", err)\n\t}\n\n\tgrpcServer := grpc.NewServer()\n\tapi.RegisterGoChatServer(grpcServer, &chatServer{})\n\n\terr = grpcServer.Serve(listener)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to serve: %v\", err)\n\t}\n}",
"func (a *Api) serveLiveStatic(res http.ResponseWriter, req *http.Request) {\n\tdata, err := ioutil.ReadFile(\"livecrowdin.html\")\n\tif err != nil {\n\t\tlog.Printf(\"templates - failure to read index.html\")\n\t}\n\tres.Header().Set(\"content-type\", \"text/html\")\n\tres.WriteHeader(200)\n\tres.Write(data)\n\treturn\n}",
"func main() {\n\twebserver.ServerStart()\n\twebserver.ServerRequest()\n}",
"func startHTTPListener() {\n\thttp.ListenAndServe(\":\"+GetConfig().Port, nil)\n}",
"func StartServer(s *server.ScrapeServer) {\n\thttp.HandleFunc(\"/\", s.Handle())\n\thttp.ListenAndServe(\":8080\", nil)\n}",
"func (s *Subscriber) StartHTTPServer() error {\n\treturn s.server.ListenAndServe()\n}",
"func (h *Server) Run() {\n\n\th.g.StartServer()\n}",
"func Start() {\n\twebServer.Engine.Run(\":\" + strconv.Itoa(cfg.Read().App.WebServerPort))\n}",
"func (app App) StartServer() {\n\tr := mux.NewRouter().StrictSlash(false)\n\tr.HandleFunc(\"/top/{location}\", app.topContributorsHandler)\n\tr.NotFoundHandler = http.HandlerFunc(usage)\n\tsrv := &http.Server{\n\t\tHandler: r,\n\t\tAddr: app.listenAddr,\n\t\tWriteTimeout: 60 * time.Second,\n\t\tReadTimeout: 60 * time.Second,\n\t}\n\tlogrus.Fatal(srv.ListenAndServe())\n}",
"func startServer() error {\n\n\tc, err := config()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// checking if a router is defined\n\tif cfgRouter == nil {\n\t\treturn ErrNoRouterConfig\n\t}\n\n\t// HTTPS Server\n\tcfgServer := http.Server{}\n\tcfgServer.Addr = fmt.Sprint(\":\", c.Server.HTTPPort)\n\n\t//TODO write own cors middleware\n\tcorsManager := cors.New(cors.Options{\n\t\tAllowCredentials: true,\n\t\tAllowedOrigins: []string{\"http://localhost:8080\"},\n\t\tAllowedMethods: []string{\"GET\", \"POST\", \"PUT\", \"DELETE\"},\n\t\tAllowedHeaders: []string{\"Authorization\", \"Origin\", \"Cache-Control\", \"Accept\", \"Content-Type\", \"X-Requested-With\"},\n\t\tDebug: true,\n\t})\n\n\t//\tcfgRouter.Handler()\n\tcfgServer.Handler = corsManager.Handler(cfgRouter.Handler())\n\t//cfgServer.Handler = cfgRouter.Handler()\n\n\terr = cfgServer.ListenAndServe()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefer cfgServer.Close()\n\n\treturn nil\n}",
"func startHTTPServer(ch chan<- bool) {\n\tserver := http.Server{\n\t\tAddr: \":80\",\n\t}\n\tlog.Println(\"HTTP server started (listening on port 80).\")\n\tlog.Println(\"HTTP server stopped with error:\", server.ListenAndServe())\n\tch <- true\n}",
"func main() {\n\tserver.StartUp(false)\n}",
"func StartServer(port int) {\n\thttp.Handle(\"/\", http.FileServer(http.Dir(\"./static\")))\n\terr := http.ListenAndServe(fmt.Sprintf(\":%v\", port), nil)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}",
"func StartHttpServer(srv *http.Server) error {\n\thttp.HandleFunc(\"/new\", newServer)\n\terr := srv.ListenAndServe()\n\treturn err\n}",
"func (rst *REST) LiveMessage(w http.ResponseWriter, r *http.Request) {\n\thttp.ServeFile(w, r, \"live.html\")\n}",
"func startServer(dataSlice []string) {\n\te := echo.New()\n\n\te.GET(\"/\", func(f echo.Context) error {\n\t\treturn f.JSON(http.StatusOK, dataSlice)\n\t})\n\n\tfmt.Println(\"Server running: http://localhost:8000\")\n\te.Logger.Fatal(e.Start(\":8000\"))\n}",
"func StartAPIServer(rfs *fs.RootFileSystem) {\n\tfilesys = rfs\n\thttp.Handle(\"/\", APIHandler{})\n\tserveString := fmt.Sprintf(\":%d\", config.PORT)\n\tfmt.Printf(\"Serving on %s\\n\", serveString)\n\thttp.ListenAndServe(serveString, nil)\n}",
"func StartServer() error {\n\tlistenPort := flag.Int(\"listen-port\", 3000, \"REST API server listen port\")\n\tlistenIP := flag.String(\"listen-address\", \"0.0.0.0\", \"REST API server listen ip address\")\n\tgeolitedb := flag.String(\"geolite2-db\", \"\", \"Geolite mmdb database file. If not defined, localization info based on IP will be disabled\")\n\tgeocitystatedb := flag.String(\"city-state-db\", \"\", \"City->State database file in CSV format 'country-code,city,state'. If defined, input '_ip_state' will be calculated according to '_ip_city'.\")\n\tlogLevel := flag.String(\"log-level\", \"info\", \"debug, info, warning or error\")\n\tflag.Parse()\n\n\tswitch *logLevel {\n\tcase \"debug\":\n\t\tlogrus.SetLevel(logrus.DebugLevel)\n\t\tbreak\n\tcase \"warning\":\n\t\tlogrus.SetLevel(logrus.WarnLevel)\n\t\tbreak\n\tcase \"error\":\n\t\tlogrus.SetLevel(logrus.ErrorLevel)\n\t\tbreak\n\tdefault:\n\t\tlogrus.SetLevel(logrus.InfoLevel)\n\t}\n\n\tprometheus.MustRegister(rulesProcessingHist)\n\tprometheus.MustRegister(groupRuleCount)\n\n\tgf := *geolitedb\n\tif gf == \"\" {\n\t\tlogrus.Infof(\"Geolite database file not found. Localization capabilities based on IP will be disabled\")\n\t} else {\n\t\tlogrus.Debugf(\"Loading GeoIP2 database %s\", gf)\n\t\tgdb, err := geoip2.Open(gf)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tgeodb = gdb\n\t\tdefer geodb.Close()\n\t\tlogrus.Infof(\"GeoIP2 database loaded\")\n\n\t\tcs := *geocitystatedb\n\t\tif cs == \"\" {\n\t\t\tlogrus.Infof(\"City State csv file not defined. _ip_state input won't be available\")\n\t\t} else {\n\t\t\tlogrus.Debugf(\"Loading City State CSV file %s\", cs)\n\t\t\tcsvFile, err := os.Open(cs)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\treader := csv.NewReader(bufio.NewReader(csvFile))\n\t\t\tfor {\n\t\t\t\tline, err := reader.Read()\n\t\t\t\tif err == io.EOF {\n\t\t\t\t\tbreak\n\t\t\t\t} else if err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tcountry := strings.ToLower(line[0])\n\t\t\t\tcity := strings.ToLower(line[1])\n\t\t\t\tstate := line[2]\n\t\t\t\tcm, exists := cityState[country]\n\t\t\t\tif !exists {\n\t\t\t\t\tcm = make(map[string]string)\n\t\t\t\t\tcityState[country] = cm\n\t\t\t\t}\n\t\t\t\tcm[city] = state\n\t\t\t}\n\t\t\tlogrus.Infof(\"City State CSV loaded\")\n\t\t}\n\t}\n\n\trouter := mux.NewRouter()\n\trouter.HandleFunc(\"/rules/{groupName}\", handleRuleGroup).Methods(\"POST\")\n\trouter.Handle(\"/metrics\", promhttp.Handler())\n\tlisten := fmt.Sprintf(\"%s:%d\", *listenIP, *listenPort)\n\tlogrus.Infof(\"Listening at %s\", listen)\n\terr := http.ListenAndServe(listen, router)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (c *Config) Start() error {\n\t//c.mux = http.NewServeMux()\n\ts := newServer(c)\n\ts.indexTemplate = template.Must(template.ParseFiles(c.indexTemplatePath))\n\ts.callTemplate = template.Must(template.New(\"callDetails.html\").Funcs(template.FuncMap{\n\t\t\"displaySendCounts\": func(cd []counts.CommDataT, leadRank int, callID int) string {\n\t\t\tfor _, data := range cd {\n\t\t\t\tif data.LeadRank == leadRank {\n\t\t\t\t\treturn strings.Join(cd[leadRank].CallData[callID].SendData.RawCounts, \"<br />\")\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn \"Call not found\"\n\t\t},\n\t\t\"displayRecvCounts\": func(cd []counts.CommDataT, leadRank int, callID int) string {\n\t\t\tfor _, data := range cd {\n\t\t\t\tif data.LeadRank == leadRank {\n\t\t\t\t\treturn strings.Join(cd[leadRank].CallData[callID].RecvData.RawCounts, \"<br />\")\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn \"Call not found\"\n\t\t},\n\t\t\"displayCallPlot\": func(leadRank int, callID int) string {\n\t\t\treturn fmt.Sprintf(\"profiler_rank%d_call%d.png\", leadRank, callID)\n\t\t}}).ParseFiles(c.callTemplatePath))\n\ts.callsTemplate = template.Must(template.ParseFiles(c.callsTemplatePath))\n\ts.patternsTemplate = template.Must(template.ParseFiles(c.patternsTemplatePath))\n\ts.heatmapTemplate = template.Must(template.New(\"heatmapDetails.html\").Funcs(template.FuncMap{\n\t\t\"displayHeatmap\": func(patternID int) string {\n\t\t\treturn fmt.Sprintf(\"%d_task3.png\", patternID)\n\t\t}}).ParseFiles(c.heatmapTemplatePath))\n\ts.heatmapsTemplate = template.Must(template.ParseFiles(c.heatmapsTemplatePath))\n\ts.stopTemplate = template.Must(template.ParseFiles(c.stopTemplatePath))\n\n\tc.srv = &http.Server{\n\t\tAddr: fmt.Sprintf(\":%d\", c.Port),\n\t\tHandler: s,\n\t}\n\n\tgo func(c *Config) {\n\t\tdefer c.wg.Done()\n\t\tc.srv.ListenAndServe()\n\t\tfmt.Println(\"HTTP server is now terminated\")\n\t}(c)\n\n\treturn nil\n}",
"func (s *Server) Start() {\n\tif s.URL != \"\" {\n\t\tpanic(\"Server already started\")\n\t}\n\ts.URL = s.Listener.Addr().String()\n\ts.goServe()\n\tif *serve != \"\" {\n\t\tfmt.Fprintln(os.Stderr, \"grpctest: serving on\", s.URL) // nolint: gas\n\t\tselect {}\n\t}\n}",
"func main() {\n\tservice.StartWebServer(\"8081\")\n}",
"func startLocalServer() {\n\tsURL, err := config.LocalServerIP()\n\tif err != nil {\n\t\tfmt.Println(xerrors.InternalError)\n\t\tlogrus.Fatalf(\"retrieving internal server IP: %v\", err)\n\t}\n\terr = server.Serve(sURL)\n\tif err != nil {\n\t\tfmt.Println(xerrors.InternalError)\n\t\tlogrus.Fatalf(\"starting local server: %v\", err)\n\t}\n}",
"func StartHTTPServer() chan struct{} {\n\thttpDone := make(chan struct{}, 1)\n\tgo func() {\n\t\tcfg := struct {\n\t\t\tHTTPListenPort string `mapstructure:\"httpListenPort\"`\n\t\t}{}\n\t\tapplyConfig(\"\", &cfg)\n\n\t\tloginfof(\"Starting http listen server on :%s\", cfg.HTTPListenPort)\n\t\tif err := http.ListenAndServe(\":\"+cfg.HTTPListenPort, nil); err != nil {\n\t\t\tfmt.Println(err.Error())\n\t\t}\n\t\thttpDone <- struct{}{}\n\t}()\n\treturn httpDone\n}",
"func StartServer() {\n\tr := gin.Default()\n\n\tcorsCfg := cors.DefaultConfig()\n\tcorsCfg.AllowOrigins = []string{\"http://localhost:1234\"}\n\tr.Use(cors.New(corsCfg))\n\n\tapi := r.Group(\"/api\")\n\t{\n\t\tapi.Any(\"/graphql\", graphQL)\n\t\tapi.GET(\"/players\", players)\n\t\tapi.GET(\"/player_datas\", playerDatas)\n\t}\n\n\tport := os.Getenv(\"PORT\")\n\tif len(port) == 0 {\n\t\tport = \"8080\"\n\t}\n\tr.Run(fmt.Sprintf(\":%s\", port))\n}",
"func (srv *Server) Serve() (err error) {\n\tsrv.state = StateRunning\n\tdefer func() { srv.state = StateTerminate }()\n\n\t// 主动重启导致的错误为ErrReloadClose\n\tif err = srv.serve(); err != nil && err != ErrReloadClose {\n\t\tlog.Println(syscall.Getpid(), \"Server.Serve() error:\", err)\n\t\treturn err\n\t}\n\n\tlog.Println(syscall.Getpid(), srv.ln.Addr(), \"Listener closed.\")\n\t// wait for Shutdown to return\n\treturn <-srv.terminalChan\n}",
"func (m *manager) start() error {\n\tw := newWatcher(m)\n\tw.start()\n\n\tgo m.test(fsnotify.Event{Name: \":start:\"})\n\n\t// watch files\n\tgo func() {\n\t\tlogrus.Info(\"watching files...\")\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase event := <-w.Events:\n\t\t\t\tif event.Op != fsnotify.Chmod {\n\t\t\t\t\tgo m.test(event)\n\t\t\t\t}\n\t\t\t\tw.Remove(event.Name)\n\t\t\t\tw.Add(event.Name)\n\t\t\tcase <-m.context.Done():\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}()\n\n\tgo func() {\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase err := <-w.Errors:\n\t\t\t\tlogrus.Error(err)\n\t\t\tcase <-m.context.Done():\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}()\n\n\tfor {\n\t\t_, err := os.Stat(\"test-coverage/index.html\")\n\t\tif err != nil {\n\t\t\ttime.Sleep(1 * time.Second)\n\t\t\tcontinue\n\t\t}\n\t\tbreak\n\t}\n\texec.Command(\"live-server\", \"test-coverage\").Run()\n\treturn nil\n}",
"func (c *Client) run() error {\n\tsignal.Notify(c.sigkil, os.Interrupt, syscall.SIGTERM, syscall.SIGQUIT, syscall.SIGHUP)\n\tc.setReloadSignals()\n\n\tif c.newCon {\n\t\t_, _ = c.Config.Write(c.Flags.ConfigFile)\n\t\t_ = ui.OpenFile(c.Flags.ConfigFile)\n\t\t_, _ = ui.Warning(Title, \"A new configuration file was created @ \"+\n\t\t\tc.Flags.ConfigFile+\" - it should open in a text editor. \"+\n\t\t\t\"Please edit the file and reload this application using the tray menu.\")\n\t}\n\n\tif c.Config.AutoUpdate != \"\" {\n\t\tgo c.AutoWatchUpdate()\n\t}\n\n\tswitch ui.HasGUI() {\n\tcase true:\n\t\tc.startTray() // This starts the web server.\n\t\treturn nil // startTray() calls os.Exit()\n\tdefault:\n\t\tc.StartWebServer()\n\t\treturn c.Exit()\n\t}\n}",
"func StartServer(fn func (conn net.Conn)) {\n\tlistener, err := net.Listen(\"tcp\", \"localhost:8081\")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tfor {\n\t\tconn, err := listener.Accept()\n\t\tif err != nil {\n\t\t\tlog.Print(err) // For ex.: connection failed\n\t\t\tcontinue\n\t\t}\n\t\tgo fn(conn)\n\t}\n}",
"func (l *LanguageServer) Start() error {\n\thttp.HandleFunc(\"/v1/statements\", l.statementHandler)\n\thttp.HandleFunc(\"/v1/inspect\", l.inspectHandler)\n\thttp.HandleFunc(\"/v1/notes\", l.notesPageHandler)\n\thttp.HandleFunc(\"/v1/pianoroll\", l.pianorollImageHandler)\n\thttp.HandleFunc(\"/version\", l.versionHandler)\n\treturn http.ListenAndServe(l.address, nil)\n}",
"func ServerStart(port string) (string, error) {\n\n\t// List of view handlers\n\thandlerStrings = append(handlerStrings, \"/\", \"/blockchain/view/<ID>\", \"/garage/view/<ID>\", \"serviceevent/add/\", \"/vehicle/view/<ID>\")\n\n\thttp.HandleFunc(\"/\", defaultHandler) // Each call to \"/\" will invoke defaultHandler\n\thttp.HandleFunc(\"/blockchain/view/\", blockchainViewHandler)\n\thttp.HandleFunc(\"/garage/view/\", garageViewHandler)\n\thttp.HandleFunc(\"/serviceevent/add/\", writeServiceEventHandler)\n\thttp.HandleFunc(\"/vehicle/view/\", vehicleViewHandler)\n\n\t//log.Fatal(http.ListenAndServe(\"localhost:\"+port, nil))\n\treturn \"Started on: \" + port, http.ListenAndServe(\"localhost:\"+port, nil)\n\n}",
"func (app *App) StartServer() {\n\t// Try connect to mongoDB\n\tapp.db = &mdb.Database{MongoURL: app.MongoURL, DBName: app.DBName}\n\tapp.db.CreateConnection()\n\tfmt.Println(\"Connected to mongoDB\")\n\n\t// Create handlers\n\tapp.infoHandler = NewInfoHandler()\n\tapp.trackHandler = track.NewTrackHandler(app.db)\n\tapp.tickerHandler = ticker.NewTickerHandler(app.TickerLimit, app.db)\n\tapp.webhookHandler = webhook.NewWebhookHandler(app.db)\n\tapp.adminHandler = admin.NewAdminHandler(app.db)\n\n\t// Registers a callback so that when a new track is registered, the webhook handler will check\n\t// if any webhooks should be triggered\n\tapp.trackHandler.SetTrackRegisterCallback(app.webhookHandler.CheckInvokeWebhooks)\n\n\t// Instantiate router, and configure the handlers and paths\n\tr := router.NewRouter()\n\tapp.configureRoutes(r)\n\tapp.configureValidators(r)\n\n\t// Start listen\n\tfmt.Printf(\"Server listening on port %s\\n\", app.ListenPort)\n\tif err := http.ListenAndServe(\":\"+app.ListenPort, r); err != nil {\n\t\tlog.Fatal(err.Error())\n\t}\n}",
"func (ws *WebServer) Start() error {\n\tif ws.server != nil {\n\t\treturn fmt.Errorf(\"WebServer already running\")\n\t}\n\n\tlog.Logger(\"webserver\").Info(\"Launching\")\n\n\ttemplatePath := ws.TemplatePath\n\tif templatePath == \"\" {\n\t\ttemplatePath = defaultServerTemplatePath\n\t}\n\ttemplate, err := createTemplate(templatePath)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tm := mux.NewRouter()\n\taddStatusEndpoints(m)\n\tstatusPageHandler := &StatusPageHandler{\n\t\tws.Authenticator,\n\t\tws.Clock,\n\t\tws.DiffURLFormat,\n\t\tws.KubeClient,\n\t\ttemplate,\n\t\tws.StatusTimeout,\n\t}\n\tforceRunHandler := &ForceRunHandler{\n\t\tws.Authenticator,\n\t\tws.KubeClient,\n\t\tws.RunQueue,\n\t}\n\tm.PathPrefix(\"/static/\").Handler(http.StripPrefix(\"/static/\", http.FileServer(http.Dir(\"static\"))))\n\tm.PathPrefix(\"/api/v1/forceRun\").Handler(forceRunHandler)\n\tm.PathPrefix(\"/\").Handler(statusPageHandler)\n\n\tws.server = &http.Server{\n\t\tAddr: fmt.Sprintf(\":%v\", ws.ListenPort),\n\t\tHandler: m,\n\t\tErrorLog: log.Logger(\"http.Server\").StandardLogger(nil),\n\t}\n\n\tgo func() {\n\t\tif err = ws.server.ListenAndServe(); err != nil {\n\t\t\tif !errors.Is(err, http.ErrServerClosed) {\n\t\t\t\tlog.Logger(\"webserver\").Error(\"Shutdown\", \"error\", err)\n\t\t\t}\n\t\t\tlog.Logger(\"webserver\").Info(\"Shutdown\")\n\t\t}\n\t}()\n\n\treturn nil\n}",
"func (config Config) RunHTTPServer() {\n\t// Set up a channel to listen to for interrupt signals\n\tvar runChan = make(chan os.Signal, 1)\n\n\t// Set up a context to allow for graceful server shutdowns in the event\n\t// of an OS interrupt (defers the cancel just in case)\n\tctx, cancel := context.WithTimeout(\n\t\tcontext.Background(),\n\t\tconfig.PilotLight.Server.Timeout.Server,\n\t)\n\tdefer cancel()\n\n\t// Create install-config.yaml file\n\tPreflightSetup(config)\n\n\t// Define server options\n\tserver := &http.Server{\n\t\tAddr: config.PilotLight.Server.Host + \":\" + config.PilotLight.Server.Port,\n\t\tHandler: NewRouter(config.PilotLight.Server.Path),\n\t\tReadTimeout: config.PilotLight.Server.Timeout.Read * time.Second,\n\t\tWriteTimeout: config.PilotLight.Server.Timeout.Write * time.Second,\n\t\tIdleTimeout: config.PilotLight.Server.Timeout.Idle * time.Second,\n\t}\n\n\t// Only listen on IPV4\n\tl, err := net.Listen(\"tcp4\", config.PilotLight.Server.Host+\":\"+config.PilotLight.Server.Port)\n\tcheck(err)\n\n\t// Handle ctrl+c/ctrl+x interrupt\n\tsignal.Notify(runChan, os.Interrupt, syscall.SIGTSTP)\n\n\t// Alert the user that the server is starting\n\tlog.Printf(\"Server is starting on %s\\n\", server.Addr)\n\n\t// Run the server on a new goroutine\n\tgo func() {\n\t\t//if err := server.ListenAndServe(); err != nil {\n\t\tif err := server.Serve(l); err != nil {\n\t\t\tif err == http.ErrServerClosed {\n\t\t\t\t// Normal interrupt operation, ignore\n\t\t\t} else {\n\t\t\t\tlog.Fatalf(\"Server failed to start due to err: %v\", err)\n\t\t\t}\n\t\t}\n\t}()\n\n\t// Block on this channel listeninf for those previously defined syscalls assign\n\t// to variable so we can let the user know why the server is shutting down\n\tinterrupt := <-runChan\n\n\t// If we get one of the pre-prescribed syscalls, gracefully terminate the server\n\t// while alerting the user\n\tlog.Printf(\"Server is shutting down due to %+v\\n\", interrupt)\n\tif err := server.Shutdown(ctx); err != nil {\n\t\tlog.Fatalf(\"Server was unable to gracefully shutdown due to err: %+v\", err)\n\t}\n}",
"func (c *Carnegie) Start() error {\n\tif c.Started {\n\t\treturn nil\n\t}\n\tc.Started = true\n\tgo c.updateCacheLoop()\n\tif certFile, keyFile := c.Config.GetString(\"cert\"), c.Config.GetString(\"key\"); certFile != \"\" && keyFile != \"\" {\n\t\tgo c.Server.ListenAndServeTLS(certFile, keyFile)\n\t}\n\treturn c.Server.ListenAndServe()\n}",
"func Run() {\n\tlog.Println(\"Starting simple webserver.\")\n\thttp.HandleFunc(\"/Packagename2Id\", createHandler(cache.C.Packagename2Id))\n\thttp.HandleFunc(\"/Id2Packagename\", createHandler(cache.C.Id2Packagename))\n\thttp.HandleFunc(\"/Updates\", createHandler(cache.C.Updates))\n\thttp.HandleFunc(\"/UpdatesIndex\", createHandler(cache.C.UpdatesIndex))\n\thttp.HandleFunc(\"/Evr2Id\", createHandler(cache.C.Evr2Id))\n\thttp.HandleFunc(\"/Id2Evr\", createHandler(cache.C.Id2Evr))\n\thttp.HandleFunc(\"/Id2Arch\", createHandler(cache.C.Id2Arch))\n\thttp.HandleFunc(\"/Arch2Id\", createHandler(cache.C.Arch2Id))\n\thttp.HandleFunc(\"/ArchCompat\", createHandler(cache.C.ArchCompat))\n\thttp.HandleFunc(\"/PackageDetails\", createHandler(cache.C.PackageDetails))\n\thttp.HandleFunc(\"/Nevra2PkgId\", createHandler(cache.C.Nevra2PkgId))\n\thttp.HandleFunc(\"/RepoDetails\", createHandler(cache.C.RepoDetails))\n\thttp.HandleFunc(\"/RepoLabel2Ids\", createHandler(cache.C.RepoLabel2Ids))\n\thttp.HandleFunc(\"/ProductId2RepoIds\", createHandler(cache.C.ProductId2RepoIds))\n\thttp.HandleFunc(\"/PkgId2RepoIds\", createHandler(cache.C.PkgId2RepoIds))\n\thttp.HandleFunc(\"/ErrataId2Name\", createHandler(cache.C.ErrataId2Name))\n\thttp.HandleFunc(\"/PkgId2ErrataIds\", createHandler(cache.C.PkgId2ErrataIds))\n\thttp.HandleFunc(\"/ErrataId2RepoIds\", createHandler(cache.C.ErrataId2RepoIds))\n\thttp.HandleFunc(\"/CveDetail\", createHandler(cache.C.CveDetail))\n\thttp.HandleFunc(\"/PkgErrata2Module\", createHandler(cache.C.PkgErrata2Module))\n\thttp.HandleFunc(\"/ModuleName2Ids\", createHandler(cache.C.ModuleName2Ids))\n\thttp.HandleFunc(\"/DbChange\", createHandler(cache.C.DbChange))\n\thttp.HandleFunc(\"/ErrataDetail\", createHandler(cache.C.ErrataDetail))\n\thttp.HandleFunc(\"/SrcPkgId2PkgId\", createHandler(cache.C.SrcPkgId2PkgId))\n\thttp.HandleFunc(\"/String\", createHandler(cache.C.String))\n\n\thttp.HandleFunc(\"/gc\", func(w http.ResponseWriter, r *http.Request) {\n\t\truntime.GC()\n\t\tw.WriteHeader(http.StatusOK)\n\t\tutils.PrintMemUsage()\n\t\treturn\n\t})\n\n err := http.ListenAndServe(\":8080\", nil)\n if err != nil {\n \tprint(err)\n\t}\n}",
"func (app *App) Listen(serverURL string) {\n\terr := app.htmlTemplates.parseFolder(app.settings.ViewsPath, app.settings.ViewExtension)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thttp.ListenAndServe(serverURL, app.craterRequestHandler)\n}",
"func startHTTPServer(app *AppContext) *http.Server {\n\n\tsrv := &http.Server{\n\t\tAddr: \":\" + app.configFile.loggerPort,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t}\n\tgo func() {\n\t\tif err := srv.ListenAndServe(); err != nil {\n\t\t\tlog.Debugf(\"Httpserver: ListenAndServe(): %s\", err)\n\t\t}\n\t}()\n\tlog.Debugf(\"HTTP Server started, listening on :%s\", app.configFile.loggerPort)\n\treturn srv // returning reference so caller can call Shutdown()\n}",
"func StartServer(cleanUpChan chan int){\n\tGrpcServer = &Server{\n CleanUpChan:cleanUpChan ,\n\t GrpcServer: grpc.NewServer(),\n\t}\n\tregisterGrpcServices(GrpcServer.GrpcServer)\n\tif err := GrpcServer.GrpcServer.Serve(getListner(port)); err != nil {\n\t\tpanic(err)\n\t}\n}",
"func (s *Server) Start() {\n\tlog.Println(\"Starting webhook receiver on port 8080...\")\n\terr := http.ListenAndServe(\":8080\", nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"Couldn't start server: %s\", err)\n\t}\n}",
"func (r *RuntimeServer) Start(errchan chan error) {\n\tgo func() {\n\t\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\"%s:%d\", r.conf.HostIP, r.conf.ServerPort))\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"failed to listen: %v\", err)\n\t\t\terrchan <- err\n\t\t}\n\t\tif err := r.server.Serve(lis); err != nil {\n\t\t\terrchan <- err\n\t\t}\n\t}()\n\tif err := r.registServer(); err != nil {\n\t\terrchan <- err\n\t}\n\tlogrus.Infof(\"runtime server start success\")\n}",
"func (f *Frontend) Start() error {\n\n\tlistenAddr := fmt.Sprintf(\"%s:%d\", f.cfg.Host, f.cfg.Port)\n\toriginalListener, err := net.Listen(\"tcp\", listenAddr)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tsl, err := stoppableListener.New(originalListener)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tserver := http.Server{Handler: context.ClearHandler(f.router)}\n\n\tstop := make(chan os.Signal)\n\tsignal.Notify(stop, syscall.SIGINT)\n\tvar wg sync.WaitGroup\n\tgo func() {\n\t\twg.Add(1)\n\t\tdefer wg.Done()\n\t\tserver.Serve(sl)\n\t}()\n\n\tf.log.Println(\"Start serving HTTP requests at \", listenAddr)\n\tselect {\n\tcase signal := <-stop:\n\t\tf.log.Println(\"Got signal: \", signal)\n\t}\n\tf.log.Println(\"Stopping listener\")\n\tsl.Stop()\n\tf.log.Println(\"Waiting on server\")\n\twg.Wait()\n\n\treturn nil\n}",
"func StartServer(port int) {\n\twsHandler := clientWebsocketHandler{upgrader: defaultUpgrader}\n\n\trouter := mux.NewRouter()\n\trouter.Handle(\"/client_ws\", wsHandler)\n\trouter.Handle(\"/d/{downloadId}\", downloadHandler{})\n\n\taddr := fmt.Sprintf(\":%d\", port)\n\thttp.ListenAndServe(addr, router)\n}",
"func (pv *SCFilePV) StartHTTPServer() error {\n\tpv.Logger.Info(\"Starting HTTP server...\")\n\n\tvar errCh chan error\n\tgo func() {\n\t\thttp.HandleFunc(\"/status\", pv.statusHandler)\n\t\tif err := pv.HTTP.ListenAndServe(); err != nil {\n\t\t\terrCh <- err\n\t\t}\n\t}()\n\tselect {\n\tcase <-time.After(100 * time.Millisecond):\n\t\treturn nil\n\tcase err := <-errCh:\n\t\treturn err\n\t}\n}",
"func (s *server) Run() error {\n\ts.logger.Info(\"starting http server\", logger.String(\"addr\", s.server.Addr))\n\ts.server.Handler = s.gin\n\t// Open listener.\n\ttrackedListener, err := conntrack.NewTrackedListener(\"tcp\", s.addr, s.r)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn s.server.Serve(trackedListener)\n}",
"func main() {\n\thttp.ListenAndServe(\"127.0.0.1:8080\", NewServer())\n}",
"func (s *Serverus) StartServerus() {\n\tif err := s.server.Serve(s.lis); err != nil {\n\t\tlog.Fatalf(\"Failed to serve %v\", err)\n\t}\n}",
"func runServer(port string, router http.Handler) error {\n\tfmt.Printf(\"Running on localhost:%v\\n\", port)\n\n\terr := http.ListenAndServe(\":\"+port, router)\n\n\tif err.Error() == usedPortError(port) {\n\t\tfmt.Printf(\"Port %v is busy\\n\\n\", port)\n\t\tnewPort, err := prompNewPort()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(newPort)\n\t\treturn runServer(strings.TrimSpace(newPort), router)\n\t}\n\treturn err\n}",
"func Start(addr string) {\n\tf = NewServer()\n\thttp.HandleFunc(\"/bayeux\", serveWs)\n\thttp.HandleFunc(\"/\", serveOther)\n\n\t// serve static assets workaround\n\t//http.Handle(\"/file/\", http.StripPrefix(\"/file\", http.FileServer(http.Dir(\"/Users/paul/go/src/github.com/pcrawfor/fayego/runner\"))))\n\n\terr := http.ListenAndServe(addr, nil)\n\tif err != nil {\n\t\tfmt.Println(\"Fatal error \", err.Error())\n\t\tos.Exit(1)\n\t}\n}"
] | [
"0.5976077",
"0.59226185",
"0.5880268",
"0.5784931",
"0.57804745",
"0.57739013",
"0.5734538",
"0.5710085",
"0.56784874",
"0.5660749",
"0.562987",
"0.56220937",
"0.56210876",
"0.56204873",
"0.56059647",
"0.56023747",
"0.5602115",
"0.5571002",
"0.5558408",
"0.5542132",
"0.55389893",
"0.5533808",
"0.5518656",
"0.5488205",
"0.5484353",
"0.5467971",
"0.54656345",
"0.5464551",
"0.546174",
"0.5454712",
"0.5435035",
"0.54329604",
"0.5431742",
"0.5427014",
"0.5426542",
"0.54217637",
"0.5415198",
"0.54083836",
"0.5403654",
"0.54011774",
"0.53963375",
"0.5390292",
"0.5380227",
"0.5379064",
"0.53735596",
"0.5371406",
"0.5369213",
"0.53616536",
"0.5358877",
"0.53588283",
"0.5330198",
"0.5328178",
"0.5321187",
"0.53206825",
"0.53082514",
"0.53029734",
"0.5302901",
"0.529721",
"0.5296831",
"0.52943873",
"0.5291389",
"0.5290783",
"0.5287163",
"0.52847207",
"0.52718574",
"0.52714187",
"0.5264927",
"0.52637124",
"0.5255944",
"0.5245638",
"0.5245539",
"0.5232582",
"0.52199286",
"0.5216387",
"0.5207527",
"0.5203551",
"0.51947796",
"0.51898867",
"0.518394",
"0.5180755",
"0.51771706",
"0.5176168",
"0.5174184",
"0.51673675",
"0.5161645",
"0.51605994",
"0.51597947",
"0.5154374",
"0.5152881",
"0.5151321",
"0.51500946",
"0.51471764",
"0.5144946",
"0.51380163",
"0.5135193",
"0.51342005",
"0.5128733",
"0.512656",
"0.5124723",
"0.51168406"
] | 0.85250896 | 0 |
CreateFooterView creates a footer area for the application | func CreateFooterView(g *gocui.Gui) error {
viewName := "footer"
if footer, err := g.SetView(viewName, maxX/6+1, maxY-maxY/4, maxX-1, maxY-1); err != nil {
if err != gocui.ErrUnknownView {
return err
}
footer.Wrap = true
footer.Title = "Top HeadLines"
footer.SelBgColor = gocui.ColorGreen
footer.SelFgColor = gocui.ColorRed
fmt.Fprintln(footer, Country, Source, Category)
}
views = append(views, viewName)
return nil
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (tui *TUI) drawFooter() {\n\tmin := int(tui.trackDuration.Minutes())\n\tsecs := int(tui.trackDuration.Seconds()) % 60\n\tvar title string\n\tif tui.currentTrack != nil {\n\t\ttitle = tui.currentTrack.Title\n\t}\n\ttui.footer.Clear()\n\tfmt.Fprintf(tui.footer, \"%02d:%02d / %s\", min, secs, title)\n\ttui.app.Draw()\n}",
"func (cc conciergeCat) viewFooter() string {\n\treturn termenv.String(\"\\n ↑/↓: Navigate • q: Quit\\n\").Foreground(term.Color(\"241\")).String()\n}",
"func footer() (*template.Template, error) {\n\ttpl := `<footer class=\"pt-2 border-top\">\n <div class=\"d-flex justify-content-center\">\n <h5><small class=\"text-muted\">© 2018-%d The Soteria DAG developers</small></h5>\n </div>\n</footer>`\n\n\tt := template.New(\"footer\")\n\treturn t.Parse(fmt.Sprintf(tpl, time.Now().Year()))\n}",
"func drawFooter(s tcell.Screen, content string) {\n\tcontent = \"FLShell v2.0 | Image File: \" + *imagepath + \" | \" + content\n\tcolourRow(s, footerStyle, windowHeight-1)\n\tputln(s, footerStyle, content, windowHeight-1)\n}",
"func (b *Bill) makeFooter() func() {\n\treturn func() {\n\t\tb.pdf.Ln(10)\n\t\tb.darkDrawColor()\n\t\tb.pdf.Line(8, 280, 200, 280)\n\t\tb.pdf.SetXY(8.0, 285)\n\t\tb.darkText()\n\t\tb.pdf.Cell(143, 0, b.config.Business.Name)\n\t\tb.lightText()\n\t\tb.pdf.Cell(40, 0, \"Generated: \"+time.Now().UTC().Format(\"2006-01-02 15:04:05\"))\n\t}\n}",
"func AddFooter(page *onthefly.Page, footerText, footerTextColor, footerColor string, elapsed time.Duration) (*onthefly.Tag, error) {\n\tbody, err := page.GetTag(\"body\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdiv := body.AddNewTag(\"div\")\n\tdiv.AddAttrib(\"id\", \"notice\")\n\tdiv.AddStyle(\"position\", \"fixed\")\n\tdiv.AddStyle(\"bottom\", \"0\")\n\tdiv.AddStyle(\"left\", \"0\")\n\tdiv.AddStyle(\"width\", \"100%\")\n\tdiv.AddStyle(\"display\", \"block\")\n\tdiv.AddStyle(\"padding\", \"0\")\n\tdiv.AddStyle(\"margin\", \"0\")\n\tdiv.AddStyle(\"background-color\", footerColor)\n\tdiv.AddStyle(\"font-size\", \"0.6em\")\n\tdiv.AddStyle(\"text-align\", \"right\")\n\tdiv.AddStyle(\"box-shadow\", \"1px -2px 3px rgba(0,0,0, .5)\")\n\n\tinnerdiv := div.AddNewTag(\"div\")\n\tinnerdiv.AddAttrib(\"id\", \"innernotice\")\n\tinnerdiv.AddStyle(\"padding\", \"0 2em 0 0\")\n\tinnerdiv.AddStyle(\"margin\", \"0\")\n\tinnerdiv.AddStyle(\"color\", footerTextColor)\n\tinnerdiv.AddContent(\"Generated in \" + elapsed.String() + \" | \" + footerText)\n\n\treturn div, nil\n}",
"func Footer_(children ...HTML) HTML {\n return Footer(nil, children...)\n}",
"func (r *txtRenderer) RenderFooter(w io.Writer, ast *blackfriday.Node) {}",
"func Footer(attrs []htmlgo.Attribute, children ...HTML) HTML {\n\treturn &htmlgo.Tree{Tag: \"footer\", Attributes: attrs, Children: children}\n}",
"func (r *roffRenderer) RenderFooter(w io.Writer, ast *blackfriday.Node) {\n}",
"func (r *ChromaRenderer) RenderFooter(w io.Writer, ast *blackfriday.Node) {}",
"func ToolbarFooter(append ...bool) vecty.Markup {\n\treturn AddClass(toolbarFooter, append...)\n}",
"func (v Binary) Footer(cursor, width int, baseStyle lipgloss.Style) string {\n\treturn baseStyle.Render(fmt.Sprintf(\"%d / %d bytes (%d bytes per row)\", cursor*dataWidth(width), v.size, dataWidth(width)))\n}",
"func Footer() string {\n\treturn \"```\\n\\n</details>\"\n}",
"func (s Spec) FooterLocation() panel.Point {\n\treturn panel.Point{X: s.Width() / 2, Y: s.MountingHoleBottomY()}\n}",
"func (dao *blockDAO) Footer(h hash.Hash256) (*block.Footer, error) {\n\treturn dao.footer(h)\n}",
"func (f *DynamicDiskBlockFactory) GetFooterRange() *common.IndexRange {\n\treturn common.NewIndexRangeFromLength(f.GetBlockCount()*f.GetBlockSize(), vhdcore.VhdFooterSize)\n}",
"func printLogbookFooter(pdf *gofpdf.Fpdf, logbookOwner string, totalPage logbookTotalRecord, totalPrevious logbookTotalRecord, totalTime logbookTotalRecord) {\n\n\tprintTotal := func(totalName string, total logbookTotalRecord) {\n\t\tpdf.SetFillColor(217, 217, 217)\n\t\tpdf.SetFont(\"LiberationSansNarrow-Bold\", \"\", 8)\n\n\t\tpdf.SetX(leftMargin)\n\n\t\tif totalName == \"TOTAL THIS PAGE\" {\n\t\t\tpdf.CellFormat(w4[0], footerRowHeight, \"\", \"LTR\", 0, \"\", true, 0, \"\")\n\t\t} else if totalName == \"TOTAL FROM PREVIOUS PAGES\" {\n\t\t\tpdf.CellFormat(w4[0], footerRowHeight, \"\", \"LR\", 0, \"\", true, 0, \"\")\n\t\t} else {\n\t\t\tpdf.CellFormat(w4[0], footerRowHeight, \"\", \"LBR\", 0, \"\", true, 0, \"\")\n\t\t}\n\t\tpdf.CellFormat(w4[1], footerRowHeight, totalName, \"1\", 0, \"C\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[2], footerRowHeight, total.time.se.GetTime(true), \"1\", 0, \"C\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[3], footerRowHeight, total.time.me.GetTime(true), \"1\", 0, \"C\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[4], footerRowHeight, total.time.mcc.GetTime(true), \"1\", 0, \"C\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[5], footerRowHeight, total.time.total.GetTime(true), \"1\", 0, \"C\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[6], footerRowHeight, \"\", \"1\", 0, \"\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[7], footerRowHeight, fmt.Sprintf(\"%d\", total.landings.day), \"1\", 0, \"C\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[8], footerRowHeight, fmt.Sprintf(\"%d\", total.landings.night), \"1\", 0, \"C\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[9], footerRowHeight, total.time.night.GetTime(true), \"1\", 0, \"C\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[10], footerRowHeight, total.time.ifr.GetTime(true), \"1\", 0, \"C\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[11], footerRowHeight, total.time.pic.GetTime(true), \"1\", 0, \"C\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[12], footerRowHeight, total.time.copilot.GetTime(true), \"1\", 0, \"C\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[13], footerRowHeight, total.time.dual.GetTime(true), \"1\", 0, \"C\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[14], footerRowHeight, total.time.instructor.GetTime(true), \"1\", 0, \"C\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[15], footerRowHeight, \"\", \"1\", 0, \"\", true, 0, \"\")\n\t\tpdf.CellFormat(w4[16], footerRowHeight, total.sim.time.GetTime(true), \"1\", 0, \"C\", true, 0, \"\")\n\n\t\tpdf.SetFont(\"LiberationSansNarrow-Regular\", \"\", 6)\n\t\tif totalName == \"TOTAL THIS PAGE\" {\n\t\t\tpdf.CellFormat(w4[17], footerRowHeight, \"I certify that the entries in this log are true.\", \"LTR\", 0, \"C\", true, 0, \"\")\n\t\t} else if totalName == \"TOTAL FROM PREVIOUS PAGES\" {\n\t\t\tpdf.CellFormat(w4[17], footerRowHeight, \"\", \"LR\", 0, \"\", true, 0, \"\")\n\t\t} else {\n\t\t\tpdf.CellFormat(w4[17], footerRowHeight, logbookOwner, \"LBR\", 0, \"C\", true, 0, \"\")\n\t\t}\n\n\t\tpdf.Ln(-1)\n\t}\n\n\tprintTotal(\"TOTAL THIS PAGE\", totalPage)\n\tprintTotal(\"TOTAL FROM PREVIOUS PAGES\", totalPrevious)\n\tprintTotal(\"TOTAL TIME\", totalTime)\n\n}",
"func footer_handler() string {\n\ttpl, err := gtpl.Open(\"templates/overall.html\")\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn \"\"\n\t}\n\n\ttpl.Parse(\"footer\")\n\treturn tpl.Out()\n}",
"func showTodosFooter() {\n\tdoc.QuerySelector(\"#footer\").SetAttribute(\"style\", \"display: block;\")\n}",
"func ExamplePdfMaroto_RegisterFooter() {\n\t// For register footer you need to call method RegisterFooter\n\t// that receives a closure.\n\t// In this closure you are free to set any components you want to compose\n\t// your footer.\n\t// In this example there is a signature and a text with right align.\n\t// It is important to remember that it is recommended to create Row's and\n\t// Col's if necessary.\n\t// You have to register the footer immediately after the Maroto\n\t// All footers will be rendered at the bottom of all pages\n\n\tm := pdf.NewMaroto(consts.Portrait, consts.A4)\n\n\tm.RegisterFooter(func() {\n\t\tm.Row(10, func() {\n\t\t\tm.Col(6, func() {\n\t\t\t\tm.Signature(\"lorem ipsum dolor\")\n\t\t\t})\n\t\t\tm.Col(6, func() {\n\t\t\t\tm.Text(time.Now().Format(\"02-January-2006\"), props.Text{Align: consts.Right})\n\t\t\t})\n\t\t})\n\t})\n\n\t// Do more things or not and save...\n}",
"func SerializeFooter(footer *Footer) []byte {\n\tbuffer := make([]byte, vhdcore.VhdFooterSize)\n\twriter := writer.NewVhdWriterFromByteSlice(buffer)\n\n\twriter.WriteBytes(0, footer.Cookie.Data)\n\twriter.WriteUInt32(8, uint32(footer.Features))\n\twriter.WriteUInt32(12, uint32(footer.FileFormatVersion))\n\twriter.WriteInt64(16, footer.HeaderOffset)\n\twriter.WriteTimeStamp(24, footer.TimeStamp)\n\tcreatorApp := make([]byte, 4)\n\tcopy(creatorApp, footer.CreatorApplication)\n\twriter.WriteBytes(28, creatorApp)\n\twriter.WriteUInt32(32, uint32(footer.CreatorVersion))\n\twriter.WriteUInt32(36, uint32(footer.CreatorHostOsType))\n\twriter.WriteInt64(40, footer.PhysicalSize)\n\twriter.WriteInt64(48, footer.VirtualSize)\n\t// + DiskGeometry\n\twriter.WriteUInt16(56, footer.DiskGeometry.Cylinder)\n\twriter.WriteByte(58, footer.DiskGeometry.Heads)\n\twriter.WriteByte(59, footer.DiskGeometry.Sectors)\n\t// - DiskGeometry\n\twriter.WriteUInt32(60, uint32(footer.DiskType))\n\twriter.WriteBytes(68, footer.UniqueID.ToByteSlice())\n\twriter.WriteBoolean(84, footer.SavedState)\n\twriter.WriteBytes(85, footer.Reserved)\n\t// + Checksum\n\t//\n\t// Checksum is one’s complement of the sum of all the bytes in the footer without the\n\t// checksum field.\n\tcheckSum := uint32(0)\n\tfor i := int(0); i < int(vhdcore.VhdFooterSize); i++ {\n\t\tif i < vhdcore.VhdFooterChecksumOffset || i >= vhdcore.VhdFooterChecksumOffset+4 {\n\t\t\tcheckSum += uint32(buffer[i])\n\t\t}\n\t}\n\n\twriter.WriteUInt32(64, ^checkSum)\n\t// - Checksum\n\n\treturn buffer\n}",
"func PrintFooter() {\n\tfmt.Println(\"***********************************************\")\n}",
"func FooterProperty(fk FooterKey) Option {\n\treturn func(p *PasetoMiddleware) {\n\t\tp.FooterProperty = fk\n\t}\n}",
"func putfooter(w io.Writer, r *http.Request) {\n\ttFooter.Execute(w, nil)\n}",
"func recordFooter() error {\n\treturn recordMessage(session.Kind_FOOTER, &session.Entry{\n\t\tKind: session.Kind_FOOTER,\n\t\tMsg: &session.Entry_Footer{\n\t\t\tFooter: &session.Footer{},\n\t\t},\n\t})\n}",
"func testFooter(t *testing.T) {\n\tlogging.Info(fmt.Sprintf(\"=============== Ending test [%s] ===============\", t.Name()))\n}",
"func (footer Footer) Render() vdom.VNode {\n\tif len(footer.Todos) < 1 {\n\t\treturn nil\n\t}\n\titemsLeft := 0\n\tfor _, todo := range footer.Todos {\n\t\tif !todo.Completed {\n\t\t\titemsLeft++\n\t\t}\n\t}\n\titemsLeftText := \" items left\"\n\tif itemsLeft == 1 {\n\t\titemsLeftText = \" item left\"\n\t}\n\treturn &vdom.VElement{\n\t\tTagName: \"footer\",\n\t\tProps: map[string]interface{}{\n\t\t\t\"className\": \"footer\",\n\t\t},\n\t\tChildren: []vdom.VNode{\n\t\t\t&vdom.VElement{\n\t\t\t\tTagName: \"span\",\n\t\t\t\tProps: map[string]interface{}{\n\t\t\t\t\t\"className\": \"todo-count\",\n\t\t\t\t},\n\t\t\t\tChildren: []vdom.VNode{\n\t\t\t\t\t&vdom.VElement{\n\t\t\t\t\t\tTagName: \"strong\",\n\t\t\t\t\t\tChildren: []vdom.VNode{\n\t\t\t\t\t\t\tvdom.VText(strconv.Itoa(itemsLeft)),\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tvdom.VText(itemsLeftText),\n\t\t\t\t},\n\t\t\t},\n\t\t\t&vdom.VElement{\n\t\t\t\tTagName: \"ul\",\n\t\t\t\tProps: map[string]interface{}{\n\t\t\t\t\t\"className\": \"filters\",\n\t\t\t\t},\n\t\t\t\tChildren: []vdom.VNode{\n\t\t\t\t\t&vdom.VElement{\n\t\t\t\t\t\tTagName: \"li\",\n\t\t\t\t\t\tChildren: []vdom.VNode{\n\t\t\t\t\t\t\t&vdom.VElement{\n\t\t\t\t\t\t\t\tTagName: \"a\",\n\t\t\t\t\t\t\t\tProps: map[string]interface{}{\n\t\t\t\t\t\t\t\t\t\"href\": \"\",\n\t\t\t\t\t\t\t\t\t\"className\": footer.getFilterClasses(\"\"),\n\t\t\t\t\t\t\t\t\t\"onclick\": footer.changeFilter,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\tChildren: []vdom.VNode{\n\t\t\t\t\t\t\t\t\tvdom.VText(\"All\"),\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t&vdom.VElement{\n\t\t\t\t\t\tTagName: \"li\",\n\t\t\t\t\t\tChildren: []vdom.VNode{\n\t\t\t\t\t\t\t&vdom.VElement{\n\t\t\t\t\t\t\t\tTagName: \"a\",\n\t\t\t\t\t\t\t\tProps: map[string]interface{}{\n\t\t\t\t\t\t\t\t\t\"href\": \"active\",\n\t\t\t\t\t\t\t\t\t\"className\": footer.getFilterClasses(\"active\"),\n\t\t\t\t\t\t\t\t\t\"onclick\": footer.changeFilter,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\tChildren: []vdom.VNode{\n\t\t\t\t\t\t\t\t\tvdom.VText(\"Active\"),\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t&vdom.VElement{\n\t\t\t\t\t\tTagName: \"li\",\n\t\t\t\t\t\tChildren: []vdom.VNode{\n\t\t\t\t\t\t\t&vdom.VElement{\n\t\t\t\t\t\t\t\tTagName: \"a\",\n\t\t\t\t\t\t\t\tProps: map[string]interface{}{\n\t\t\t\t\t\t\t\t\t\"href\": \"completed\",\n\t\t\t\t\t\t\t\t\t\"className\": footer.getFilterClasses(\"completed\"),\n\t\t\t\t\t\t\t\t\t\"onclick\": footer.changeFilter,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\tChildren: []vdom.VNode{\n\t\t\t\t\t\t\t\t\tvdom.VText(\"Completed\"),\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t&vdom.VElement{\n\t\t\t\tTagName: \"button\",\n\t\t\t\tProps: map[string]interface{}{\n\t\t\t\t\t\"className\": \"clear-completed\",\n\t\t\t\t\t\"onclick\": footer.clearCompleted,\n\t\t\t\t},\n\t\t\t\tChildren: []vdom.VNode{\n\t\t\t\t\tvdom.VText(\"Clear completed\"),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}",
"func newFooterTest(t *testing.T) {\n\n\tresult := newFooter(1, 15, 1, 1).List\n\texpected := []string{\"1\", \"2\", \"...\", \"15\"}\n\tfor i := 0; i < len(expected); i++ {\n\t\tif result[i] != expected[i] {\n\t\t\tt.Error()\n\t\t}\n\t}\n\n}",
"func (s *BasePlSqlParserListener) ExitCreate_view(ctx *Create_viewContext) {}",
"func (f *Footer) Close() error {\n\tf.DecRef()\n\treturn nil\n}",
"func Tfoot_(children ...HTML) HTML {\n return Tfoot(nil, children...)\n}",
"func CreateVoucherLedgerAmountView(db *sql.DB) {\n\n\tcreateVoucherLedgerAmountViewStr := `\n\tCREATE VIEW voucher_ledger_amount AS\n\tSELECT \n\t\tvla.'Account Code'\n\t\t,vla.'Account Free'\n\t\t,SUM(vla.Amount) 'Amount'\n\tFROM\n\t(SELECT \n\t\t62002 'Account Code'\n\t\t,NULL 'Account Free'\n\t\t,ipc_final.voucher 'Amount'\n\tFROM ipc_final\n\tUNION ALL\n\tSELECT \n\t\t62002 'Account Code'\n\t\t,NULL 'Account Free'\n\t\t,ipt_final.voucher 'Amount'\n\tFROM ipt_final) vla\n\tGROUP BY vla.'Account Code', vla.'Account Free'\n\t`\n\n\tcreateVoucherLedgerAmountView, err := db.Prepare(createVoucherLedgerAmountViewStr)\n\tcheckError(err)\n\tcreateVoucherLedgerAmountView.Exec()\n\n}",
"func FprintFooter(file *os.File) {\n\tfmt.Fprintf(file,\n\t\t\"%s\\n\\n\"+\n\t\t\t\"</body>\\n\"+\n\t\t\t\"</html>\\n\",\n\t\tFooter)\n}",
"func CreateIptPaidPriceLedgerAmountView(db *sql.DB) {\n\n\tcreateIptPaidPriceLedgerAmountViewStr := `\n\tCREATE VIEW ipt_paid_price_ledger_amount AS\n\tSELECT \n\t\tipt_final.ledger 'Account Code'\n\t\t,ipt_final.subledger 'Account Free'\n\t\t,ipt_final.paid_price 'Amount'\n\tFROM ipt_final\n\tWHERE ipt_final.ledger IN(` + ledgerBookedAtSubledgerLevel + `)\n\tGROUP BY ipt_final.ledger, ipt_final.subledger\n\t`\n\n\tcreateIptPaidPriceLedgerAmountView, err := db.Prepare(createIptPaidPriceLedgerAmountViewStr)\n\tcheckError(err)\n\tcreateIptPaidPriceLedgerAmountView.Exec()\n\n}",
"func (t *trs80) writeFooter() {\n\tf := t.pb.Footer\n\tt.writeIntLn(f.Version)\n\tt.writeIntLn(f.Adventure)\n\tt.writeIntLn(f.Magic)\n}",
"func CreateIpcPaidPriceLedgerAmountView(db *sql.DB) {\n\n\tcreateIpcPaidPriceLedgerAmountViewStr := `\n\tCREATE VIEW ipc_paid_price_ledger_amount AS\n\tSELECT \n\t\tipc_final.ledger 'Account Code'\n\t\t,ipc_final.subledger 'Account Free'\n\t\t,ipc_final.paid_price 'Amount'\n\tFROM ipc_final\n\tWHERE ipc_final.ledger IN(` + ledgerBookedAtSubledgerLevel + `)\n\tGROUP BY ipc_final.ledger, ipc_final.subledger\n\t`\n\n\tcreateIpcPaidPriceLedgerAmountView, err := db.Prepare(createIpcPaidPriceLedgerAmountViewStr)\n\tcheckError(err)\n\tcreateIpcPaidPriceLedgerAmountView.Exec()\n\n}",
"func Tfoot(attrs []htmlgo.Attribute, children ...HTML) HTML {\n\treturn &htmlgo.Tree{Tag: \"tfoot\", Attributes: attrs, Children: children}\n}",
"func WriteHTMLPageFooter(w io.Writer) {\n\tif err := footerTemplate.Execute(w, nil); err != nil {\n\t\tlog.Printf(\"zpages: executing template: %v\", err)\n\t}\n}",
"func CreateCommissionVatLedgerAmountView(db *sql.DB) {\n\n\tcreateCommissionVatLedgerAmountViewStr := `\n\tCREATE VIEW commission_vat_ledger_amount AS\n\tSELECT \n\tcf.'Account Code'\n\t,cf.'Account Free'\n\t,SUM(cf.Amount) 'Amount'\n\tFROM \n\t(SELECT \n\t\t32021 'Account Code'\n\t\t,NULL 'Account Free'\n\t\t,commission_final.commission_vat 'Amount'\n\tFROM commission_final) cf\n\tGROUP BY cf.'Account Code', cf.'Account Free'\n\t`\n\n\tcreateCommissionVatLedgerAmountView, err := db.Prepare(createCommissionVatLedgerAmountViewStr)\n\tcheckError(err)\n\tcreateCommissionVatLedgerAmountView.Exec()\n\n}",
"func (fd *FileDecryptionProperties) FooterKey() string { return fd.footerKey }",
"func (fe *FileEncryptionProperties) FooterKey() string { return fe.footerKey }",
"func CreateTotalLedgerAmountView(db *sql.DB) {\n\n\t// ipc_paid_price_ledger_amount: in R, no filter is used here...\n\t// which makes me think filtering by ledgerBookedAtSubledgerLevel is useless in the original ipc_paid_price_ledger_amount...\n\t// yes, I think in R it is only useful for the commission booking which is bullshit and you changed here so now no need of filter probably\n\tcreateTotalLedgerAmountViewStr := `\n\tCREATE VIEW total_ledger_amount AS\n\tSELECT \n\ttotal.'Account Code'\n\t,total.'Account Free'\n\t,SUM(total.Amount*-1) 'Amount' -- (-1) because total + sum of amounts should = 0\n\n\tFROM (\n\n\t-- ipc_voucher_ledger_amount\n\tSELECT\n\t\t31002 'Account Code'\n\t\t,ipc_final.beneficiary_code 'Account Free'\n\t\t,ipc_final.voucher 'Amount'\n\tFROM ipc_final\n\n\tUNION ALL\n\n\t-- ipt_voucher_ledger_amount\n\tSELECT \n\t\t31002 'Account Code'\n\t\t,ipt_final.beneficiary_code 'Account Free'\n\t\t,ipt_final.voucher 'Amount'\n\tFROM ipt_final\n\n\tUNION ALL\n\n\t-- ipc_paid_price_ledger_amount\n\tSELECT \n\t\t31002 'Account Code'\n\t\t,ipc_final.beneficiary_code 'Account Free'\n\t\t,ipc_final.paid_price 'Amount'\n\tFROM ipc_final\n\n\tUNION ALL\n\n\t-- ipt_paid_price_ledger_amount\n\tSELECT \n\t\t31002 'Account Code'\n\t\t,ipt_final.beneficiary_code 'Account Free'\n\t\t,ipt_final.paid_price 'Amount'\n\tFROM ipt_final\n\n\tUNION ALL\n\n\t-- commission_vat_ledger_amount\n\tSELECT \n\t\t31002 'Account Code'\n\t\t,commission_final.beneficiary_code 'Account Free'\n\t\t,commission_final.commission_vat 'Amount'\n\tFROM commission_final\n\n\tUNION ALL\n\n\t-- commission_revenue_ledger_amount\n\tSELECT \n\t\t31002 'Account Code'\n\t\t,commission_final.beneficiary_code 'Account Free'\n\t\t,commission_final.commission_revenue 'Amount'\n\tFROM commission_final\t\n\t\n\t) total\n\n\tGROUP BY total.'Account Code', total.'Account Free'\n\t`\n\n\tcreateTotalLedgerAmountView, err := db.Prepare(createTotalLedgerAmountViewStr)\n\tcheckError(err)\n\tcreateTotalLedgerAmountView.Exec()\n}",
"func (me TxsdPresentationAttributesGraphicsDisplay) IsTableFooterGroup() bool {\n\treturn me.String() == \"table-footer-group\"\n}",
"func (s *SVG) footer() string {\n\treturn \"</svg>\"\n}",
"func (e *DiscordWebhookEmbed) SetFooter(text string, icon string) {\n\te.Footer.IconURL = icon\n\te.Footer.Text = text\n}",
"func (this *MarkupConfluence) tableFooter() string {\n\treturn \"\\n\"\n}",
"func (s *BasePlSqlParserListener) ExitCreate_materialized_view(ctx *Create_materialized_viewContext) {\n}",
"func NewCreateResponseBody(res *warehouseviews.WarehouseView) *CreateResponseBody {\n\tbody := &CreateResponseBody{\n\t\tID: *res.ID,\n\t\tName: *res.Name,\n\t\tCode: *res.Code,\n\t\tAddress: *res.Address,\n\t\tType: *res.Type,\n\t}\n\tif res.Founder != nil {\n\t\tbody.Founder = marshalWarehouseviewsFounderViewToFounderResponseBody(res.Founder)\n\t}\n\treturn body\n}",
"func (s *BasePlSqlParserListener) ExitCreate_materialized_view_log(ctx *Create_materialized_view_logContext) {\n}",
"func ReadFooter(options *StoreOptions, file File) (*Footer, error) {\n\tfinfo, err := file.Stat()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfref := &FileRef{file: file, refs: 1}\n\n\t// To avoid an EOF while reading, start scanning the footer from\n\t// the last byte. This is under the assumption that the footer is\n\t// at least 2 bytes long.\n\tf, err := ScanFooter(options, fref, finfo.Name(), finfo.Size()-1)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfref.DecRef() // ScanFooter added its own ref-counts on success.\n\n\treturn f, err\n}",
"func (xs *Sheet) SetFooter(footer string, margin float64) int {\n\ttmp, _, _ := xs.xb.lib.NewProc(\"xlSheetSetFooterW\").\n\t\tCall(xs.self, S(footer), F(margin))\n\treturn int(tmp)\n}",
"func (fe *FileEncryptionProperties) FooterKeyMetadata() string { return fe.footerKeyMetadata }",
"func templatesLayoutFooterHtml() (*asset, error) {\n\tpath := filepath.Join(rootDir, \"templates/layout/footer.html\")\n\tname := \"templates/layout/footer.html\"\n\tbytes, err := bindataRead(path, name)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfi, err := os.Stat(path)\n\tif err != nil {\n\t\terr = fmt.Errorf(\"Error reading asset info %s at %s: %v\", name, path, err)\n\t}\n\n\ta := &asset{bytes: bytes, info: fi}\n\treturn a, err\n}",
"func (c *ColumnBase) FooterAttributes(ctx context.Context, row int, col int) html5tag.Attributes {\n\tif len(c.footerAttributes) < row+1 {\n\t\t// extend the attributes\n\t\tc.footerAttributes = append(c.footerAttributes, make([]html5tag.Attributes, row-len(c.footerAttributes)+1)...)\n\t}\n\tif c.footerAttributes[row] == nil {\n\t\tc.footerAttributes[row] = html5tag.NewAttributes()\n\t}\n\treturn c.footerAttributes[row]\n}",
"func ScanFooter(options *StoreOptions, fref *FileRef, fileName string,\n\tpos int64) (*Footer, error) {\n\tfooterBeg := make([]byte, footerBegLen)\n\n\t// Align pos to the start of a page (floor).\n\tpos = pageAlignFloor(pos)\n\n\tfor {\n\t\tfor { // Scan for StoreMagicBeg, which may be a potential footer.\n\t\t\tif pos <= 0 {\n\t\t\t\treturn nil, ErrNoValidFooter\n\t\t\t}\n\n\t\t\tn, err := fref.file.ReadAt(footerBeg, pos)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tif n == footerBegLen &&\n\t\t\t\tbytes.Equal(StoreMagicBeg, footerBeg[:lenMagicBeg]) &&\n\t\t\t\tbytes.Equal(StoreMagicBeg, footerBeg[lenMagicBeg:2*lenMagicBeg]) {\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\t// Move pos back by page size.\n\t\t\tpos -= int64(StorePageSize)\n\t\t}\n\n\t\t// Read and check the potential footer.\n\t\tfooterBegBuf := bytes.NewBuffer(footerBeg[2*lenMagicBeg:])\n\n\t\tvar version uint32\n\t\tif err := binary.Read(footerBegBuf, StoreEndian, &version); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif version != StoreVersion {\n\t\t\treturn nil, fmt.Errorf(\"store: version mismatch, \"+\n\t\t\t\t\"current: %v != found: %v\", StoreVersion, version)\n\t\t}\n\n\t\tvar length uint32\n\t\tif err := binary.Read(footerBegBuf, StoreEndian, &length); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tdata := make([]byte, int64(length)-int64(footerBegLen))\n\n\t\tn, err := fref.file.ReadAt(data, pos+int64(footerBegLen))\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif n == len(data) &&\n\t\t\tbytes.Equal(StoreMagicEnd, data[n-lenMagicEnd*2:n-lenMagicEnd]) &&\n\t\t\tbytes.Equal(StoreMagicEnd, data[n-lenMagicEnd:]) {\n\n\t\t\tcontent := int(length) - footerBegLen - footerEndLen\n\t\t\tb := bytes.NewBuffer(data[content:])\n\n\t\t\tvar offset int64\n\t\t\tif err = binary.Read(b, StoreEndian, &offset); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif offset != pos {\n\t\t\t\treturn nil, fmt.Errorf(\"store: offset mismatch, \"+\n\t\t\t\t\t\"wanted: %v != found: %v\", offset, pos)\n\t\t\t}\n\n\t\t\tvar length1 uint32\n\t\t\tif err = binary.Read(b, StoreEndian, &length1); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif length1 != length {\n\t\t\t\treturn nil, fmt.Errorf(\"store: length mismatch, \"+\n\t\t\t\t\t\"wanted: %v != found: %v\", length1, length)\n\t\t\t}\n\n\t\t\tf := &Footer{refs: 1, fileName: fileName, filePos: offset}\n\n\t\t\terr = json.Unmarshal(data[:content], f)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\t// json.Unmarshal would have just loaded the map.\n\t\t\t// We now need to load each segment into the map.\n\t\t\t// Also recursively load child footer segment stacks.\n\t\t\terr = f.loadSegments(options, fref)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\treturn f, nil\n\t\t}\n\t\t// Else, invalid footer - StoreMagicEnd missing and/or file\n\t\t// pos out of bounds.\n\n\t\t// Footer was invalid, so keep scanning.\n\t\tpos -= int64(StorePageSize)\n\t}\n}",
"func printFooter(w io.Writer, info *athena.QueryExecution) {\n\tstats := info.Statistics\n\trunTimeMs := aws.Int64Value(stats.EngineExecutionTimeInMillis)\n\tscannedBytes := aws.Int64Value(stats.DataScannedInBytes)\n\tloc := aws.StringValue(info.ResultConfiguration.OutputLocation)\n\tlog.Printf(\"EngineExecutionTimeInMillis: %d milliseconds\\n\", runTimeMs)\n\tlog.Printf(\"DataScannedInBytes: %d bytes\\n\", scannedBytes)\n\tlog.Printf(\"OutputLocation: %s\\n\", loc)\n\tfmt.Fprintf(w, \"Run time: %.2f seconds | Data scanned: %s\\nLocation: %s\\n\",\n\t\tfloat64(runTimeMs)/1000, FormatBytes(scannedBytes), loc)\n}",
"func Tfoot(props *TfootProps, children ...Element) *TfootElem {\n\trProps := &_TfootProps{\n\t\tBasicHTMLElement: newBasicHTMLElement(),\n\t}\n\n\tif props != nil {\n\t\tprops.assign(rProps)\n\t}\n\n\treturn &TfootElem{\n\t\tElement: createElement(\"tfoot\", rProps, children...),\n\t}\n}",
"func BottomPrimaryLayout(topView, bottomView string) func(*gocui.Gui) error {\n\treturn func(g *gocui.Gui) error {\n\t\t// ensure both of our target view exist\n\t\tbottom, err := g.View(bottomView)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\t_, err = g.View(topView)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// check the size of the top view and make sure it's reasonable\n\t\tmaxX, maxY := g.Size()\n\t\t_, bottomHeight := bottom.Size()\n\t\tif bottomHeight > maxY/2 {\n\t\t\tbottomHeight = maxY / 2\n\t\t}\n\n\t\t// configure the position and size of the bottom view\n\t\tbottomPosY := maxY - bottomHeight - 2\n\t\tif _, err = g.SetView(bottomView, 0, bottomPosY, maxX-1, maxY-1); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// configure the position and size of the top view\n\t\ttopHeight := bottomPosY - 1\n\t\tif _, err = g.SetView(topView, 0, 0, maxX-1, topHeight); err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t}\n}",
"func (w *Writer) Close() error {\n\tw.finishPublicSection()\n\thb := w.headerBytes()\n\n\tbuf := bytes.NewBuffer(hb)\n\tw.Stats.Footer = Footer{\n\t\tRefIndexOffset: w.Stats.BlockStats[BlockTypeRef].IndexOffset,\n\t\tObjOffset: w.Stats.BlockStats[BlockTypeObj].Offset,\n\t\tObjIndexOffset: w.Stats.BlockStats[BlockTypeObj].IndexOffset,\n\t\tLogOffset: w.Stats.BlockStats[BlockTypeLog].Offset,\n\t\tLogIndexOffset: w.Stats.BlockStats[BlockTypeLog].IndexOffset,\n\t}\n\n\tf := w.Stats.Footer\n\tf.ObjOffset = f.ObjOffset<<5 | uint64(w.Stats.ObjectIDLen)\n\n\tif err := binary.Write(buf, binary.BigEndian, &f); err != nil {\n\t\treturn err\n\t}\n\n\th := crc32.NewIEEE()\n\th.Write(buf.Bytes())\n\tcrc := h.Sum32()\n\n\tbinary.Write(buf, binary.BigEndian, crc)\n\n\tw.paddedWriter.pendingPadding = 0\n\tn, err := w.paddedWriter.Write(buf.Bytes(), 0)\n\tif n != footerSize {\n\t\tlog.Panicf(\"footer size %d\", n)\n\t}\n\treturn err\n}",
"func (logMainView *LogMainView) HSplit() {\n\tlogMainView.rootFlex.SetDirection(tview.FlexRow)\n\tv := logMainView.addView()\n\tlogMainView.app.SetFocus(v)\n\tlogMainView.views = append(logMainView.views, v)\n}",
"func NewView() *View {\n\tvar v = View{}\n\n\tv.Goal = widgets.NewParagraph()\n\tv.Goal.Title = \"Your goal\"\n\tv.Goal.SetRect(0, 0, 65, 3)\n\n\tv.Location = widgets.NewParagraph()\n\tv.Location.Title = \"Location\"\n\tv.Location.SetRect(0, 3, 38, 23)\n\n\tv.SkillsBar = widgets.NewTable()\n\tv.SkillsBar.Title = \"Skill bar\"\n\tv.SkillsBar.Rows = [][]string{{\"\"}}\n\tv.SkillsBar.SetRect(0, 23, 65, 30)\n\n\tv.CombatLog = widgets.NewParagraph()\n\tv.CombatLog.Title = \"Combat log\"\n\tv.CombatLog.SetRect(0, 30, 65, 37)\n\n\tv.Hero = widgets.NewTable()\n\tv.Hero.Title = \"My hero\"\n\tv.Hero.Rows = [][]string{{\"\"}}\n\tv.Hero.SetRect(40, 3, 65, 14)\n\n\tv.Monster = widgets.NewTable()\n\tv.Monster.Title = \"\"\n\tv.Monster.Rows = [][]string{{\"\"}}\n\tv.Monster.SetRect(40, 14, 65, 23)\n\n\tui.Render(v.All()...)\n\n\treturn &v\n}",
"func (p *Builder) writeProgramFooter(forXDP bool) {\n\t// Fall through here if there's no match. Also used when we hit an error or if policy rejects packet.\n\tp.b.LabelNextInsn(\"deny\")\n\n\t// Store the policy result in the state for the next program to see.\n\tp.b.MovImm32(R1, int32(state.PolicyDeny))\n\tp.b.Store32(R9, R1, stateOffPolResult)\n\n\t// Execute the tail call to drop program\n\tp.b.Mov64(R1, R6) // First arg is the context.\n\tp.b.LoadMapFD(R2, uint32(p.jumpMapFD)) // Second arg is the map.\n\tif p.useJmps {\n\t\tp.b.AddComment(fmt.Sprintf(\"Deny jump to %d\", p.denyJmp))\n\t\tp.b.MovImm32(R3, int32(p.denyJmp)) // Third arg is the index (rather than a pointer to the index).\n\t} else {\n\t\tp.b.Load32(R3, R6, skbCb1) // Third arg is the index from skb->cb[1]).\n\t}\n\tp.b.Call(HelperTailCall)\n\n\t// Fall through if tail call fails.\n\tp.b.LabelNextInsn(\"exit\")\n\tif forXDP {\n\t\tp.b.MovImm64(R0, 1 /* XDP_DROP */)\n\t} else {\n\t\tp.b.MovImm64(R0, 2 /* TC_ACT_SHOT */)\n\t}\n\tp.b.Exit()\n\n\tif forXDP {\n\t\tp.b.LabelNextInsn(\"xdp_pass\")\n\t\tp.b.MovImm64(R0, 2 /* XDP_PASS */)\n\t\tp.b.Exit()\n\t}\n\n\tif p.b.TargetIsUsed(\"allow\") {\n\t\tp.b.LabelNextInsn(\"allow\")\n\t\t// Store the policy result in the state for the next program to see.\n\t\tp.b.MovImm32(R1, int32(state.PolicyAllow))\n\t\tp.b.Store32(R9, R1, stateOffPolResult)\n\t\t// Execute the tail call.\n\t\tp.b.Mov64(R1, R6) // First arg is the context.\n\t\tp.b.LoadMapFD(R2, uint32(p.jumpMapFD)) // Second arg is the map.\n\t\tif p.useJmps {\n\t\t\tp.b.AddComment(fmt.Sprintf(\"Allow jump to %d\", p.allowJmp))\n\t\t\tp.b.MovImm32(R3, int32(p.allowJmp)) // Third arg is the index (rather than a pointer to the index).\n\t\t} else {\n\t\t\tp.b.Load32(R3, R6, skbCb0) // Third arg is the index from skb->cb[0]).\n\t\t}\n\t\tp.b.Call(HelperTailCall)\n\n\t\t// Fall through if tail call fails.\n\t\tp.b.MovImm32(R1, state.PolicyTailCallFailed)\n\t\tp.b.Store32(R9, R1, stateOffPolResult)\n\t\tif forXDP {\n\t\t\tp.b.MovImm64(R0, 1 /* XDP_DROP */)\n\t\t} else {\n\t\t\tp.b.MovImm64(R0, 2 /* TC_ACT_SHOT */)\n\t\t}\n\t\tp.b.Exit()\n\t}\n}",
"func (pg *AppOverviewPage) Layout(gtx layout.Context) layout.Dimensions {\n\tpageContent := []func(gtx C) D{\n\t\tfunc(gtx C) D {\n\t\t\tif len(pg.mixerWallets) == 0 {\n\t\t\t\treturn D{}\n\t\t\t}\n\n\t\t\treturn components.MixerInfoLayout(gtx, pg.Load, true, pg.toMixer.Layout, func(gtx C) D {\n\t\t\t\treturn pg.listMixer.Layout(gtx, len(pg.mixerWallets), func(gtx C, i int) D {\n\t\t\t\t\treturn layout.Inset{Bottom: values.MarginPadding5}.Layout(gtx, func(gtx C) D {\n\t\t\t\t\t\taccounts, _ := pg.mixerWallets[i].GetAccountsRaw()\n\t\t\t\t\t\tvar unmixedBalance string\n\t\t\t\t\t\tfor _, acct := range accounts.Acc {\n\t\t\t\t\t\t\tif acct.Number == pg.mixerWallets[i].UnmixedAccountNumber() {\n\t\t\t\t\t\t\t\tunmixedBalance = dcrutil.Amount(acct.TotalBalance).String()\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn components.MixerInfoContentWrapper(gtx, pg.Load, func(gtx C) D {\n\t\t\t\t\t\t\treturn layout.Flex{Axis: layout.Vertical}.Layout(gtx,\n\t\t\t\t\t\t\t\tlayout.Rigid(func(gtx C) D {\n\t\t\t\t\t\t\t\t\ttxt := pg.Theme.Label(values.TextSize14, pg.mixerWallets[i].Name)\n\t\t\t\t\t\t\t\t\ttxt.Font.Weight = text.Medium\n\n\t\t\t\t\t\t\t\t\treturn layout.Inset{Bottom: values.MarginPadding10}.Layout(gtx, txt.Layout)\n\t\t\t\t\t\t\t\t}),\n\t\t\t\t\t\t\t\tlayout.Rigid(func(gtx C) D {\n\t\t\t\t\t\t\t\t\treturn layout.Flex{Spacing: layout.SpaceBetween, Alignment: layout.Middle}.Layout(gtx,\n\t\t\t\t\t\t\t\t\t\tlayout.Rigid(func(gtx C) D {\n\t\t\t\t\t\t\t\t\t\t\tt := pg.Theme.Label(values.TextSize14, values.String(values.StrUnmixedBalance))\n\t\t\t\t\t\t\t\t\t\t\tt.Color = pg.Theme.Color.GrayText2\n\t\t\t\t\t\t\t\t\t\t\treturn t.Layout(gtx)\n\t\t\t\t\t\t\t\t\t\t}),\n\t\t\t\t\t\t\t\t\t\tlayout.Rigid(func(gtx C) D {\n\t\t\t\t\t\t\t\t\t\t\treturn components.LayoutBalanceSize(gtx, pg.Load, unmixedBalance, values.TextSize20)\n\t\t\t\t\t\t\t\t\t\t}),\n\t\t\t\t\t\t\t\t\t)\n\t\t\t\t\t\t\t\t}),\n\t\t\t\t\t\t\t)\n\t\t\t\t\t\t})\n\t\t\t\t\t})\n\t\t\t\t})\n\t\t\t})\n\t\t},\n\t\tfunc(gtx C) D {\n\t\t\t// allow the recentTransactionsSection to extend the entire width of the display area.\n\t\t\tgtx.Constraints.Min.X = gtx.Constraints.Max.X\n\t\t\treturn pg.recentTransactionsSection(gtx)\n\t\t},\n\t\tfunc(gtx C) D {\n\t\t\tif pg.WL.MultiWallet.ReadBoolConfigValueForKey(load.FetchProposalConfigKey, false) && len(pg.proposalItems) != 0 {\n\t\t\t\treturn pg.recentProposalsSection(gtx)\n\t\t\t}\n\t\t\treturn D{}\n\t\t},\n\t\tfunc(gtx C) D {\n\t\t\treturn pg.syncStatusSection(gtx)\n\t\t},\n\t}\n\n\tif pg.WL.MultiWallet.IsSyncing() || pg.WL.MultiWallet.IsRescanning() || pg.WL.MultiWallet.Politeia.IsSyncing() {\n\t\t// Will refresh the overview page every 2 seconds while\n\t\t// sync is active. When sync/rescan is started or ended,\n\t\t// sync is considered inactive and no refresh occurs. A\n\t\t// sync state change listener is used to refresh the display\n\t\t// when the sync state changes.\n\t\top.InvalidateOp{At: gtx.Now.Add(2 * time.Second)}.Add(gtx.Ops)\n\t}\n\n\treturn components.UniformPadding(gtx, func(gtx C) D {\n\t\treturn pg.Theme.List(pg.scrollContainer).Layout(gtx, len(pageContent), func(gtx C, i int) D {\n\t\t\tm := values.MarginPadding5\n\t\t\tif i == len(pageContent) {\n\t\t\t\t// remove padding after the last item\n\t\t\t\tm = values.MarginPadding0\n\t\t\t}\n\t\t\treturn layout.Inset{\n\t\t\t\tRight: values.MarginPadding2,\n\t\t\t\tBottom: m,\n\t\t\t}.Layout(gtx, pageContent[i])\n\t\t})\n\t})\n}",
"func WithFooterKeyID(key string) EncryptOption {\n\tif !utf8.ValidString(key) {\n\t\tpanic(\"parquet: footer key id should be UTF8 encoded\")\n\t}\n\treturn WithFooterKeyMetadata(key)\n}",
"func (e *Exporter) ExportView(vd *view.Data) {\n\te.bundler.Add(vd, 1)\n}",
"func newBarLayout(bar *bar) barLayout {\n\treturn barLayout{bar, false, fyne.NewPos(0, 0)}\n}",
"func (pg *walletPage) Layout(gtx layout.Context) layout.Dimensions {\n\tcommon := pg.common\n\tif *pg.refreshPage {\n\t\tcommon.refreshWindow()\n\t\t*pg.refreshPage = false\n\t}\n\n\tif common.info.LoadedWallets == 0 {\n\t\treturn common.Layout(gtx, func(gtx C) D {\n\t\t\treturn common.UniformPadding(gtx, func(gtx C) D {\n\t\t\t\treturn layout.Center.Layout(gtx, func(gtx C) D {\n\t\t\t\t\treturn common.theme.H3(values.String(values.StrNoWalletLoaded)).Layout(gtx)\n\t\t\t\t})\n\t\t\t})\n\t\t})\n\t}\n\n\tfor index := 0; index < common.info.LoadedWallets; index++ {\n\t\tif common.info.Wallets[index].IsWatchingOnly {\n\t\t\tif _, ok := pg.watchOnlyWalletMoreButtons[index]; !ok {\n\t\t\t\tpg.watchOnlyWalletMoreButtons[index] = decredmaterial.IconButton{\n\t\t\t\t\tIconButtonStyle: material.IconButtonStyle{\n\t\t\t\t\t\tButton: new(widget.Clickable),\n\t\t\t\t\t\tIcon: common.icons.navigationMore,\n\t\t\t\t\t\tSize: values.MarginPadding25,\n\t\t\t\t\t\tBackground: color.NRGBA{},\n\t\t\t\t\t\tColor: common.theme.Color.Text,\n\t\t\t\t\t\tInset: layout.UniformInset(values.MarginPadding0),\n\t\t\t\t\t},\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tif _, ok := pg.collapsibles[index]; !ok {\n\t\t\t\taddAcctBtn := common.theme.IconButton(new(widget.Clickable), common.icons.contentAdd)\n\t\t\t\taddAcctBtn.Inset = layout.UniformInset(values.MarginPadding0)\n\t\t\t\taddAcctBtn.Size = values.MarginPadding25\n\t\t\t\taddAcctBtn.Background = color.NRGBA{}\n\t\t\t\taddAcctBtn.Color = common.theme.Color.Text\n\n\t\t\t\tbackupBtn := common.theme.PlainIconButton(new(widget.Clickable), common.icons.navigationArrowForward)\n\t\t\t\tbackupBtn.Color = common.theme.Color.Surface\n\t\t\t\tbackupBtn.Inset = layout.UniformInset(values.MarginPadding0)\n\t\t\t\tbackupBtn.Size = values.MarginPadding20\n\n\t\t\t\tpg.collapsibles[index] = collapsible{\n\t\t\t\t\tcollapsible: pg.theme.CollapsibleWithOption(),\n\t\t\t\t\taddAcctBtn: addAcctBtn,\n\t\t\t\t\tbackupAcctBtn: backupBtn,\n\t\t\t\t}\n\t\t\t}\n\n\t\t}\n\t}\n\n\tpageContent := []func(gtx C) D{\n\t\tfunc(gtx C) D {\n\t\t\treturn pg.walletSection(gtx, common)\n\t\t},\n\t\tfunc(gtx C) D {\n\t\t\treturn pg.watchOnlyWalletSection(gtx, common)\n\t\t},\n\t}\n\n\tbody := func(gtx C) D {\n\t\treturn layout.Stack{Alignment: layout.SE}.Layout(gtx,\n\t\t\tlayout.Expanded(func(gtx C) D {\n\t\t\t\treturn pg.container.Layout(gtx, len(pageContent), func(gtx C, i int) D {\n\t\t\t\t\tdims := layout.UniformInset(values.MarginPadding5).Layout(gtx, pageContent[i])\n\t\t\t\t\tif pg.isAddWalletMenuOpen || pg.openPopupIndex != -1 {\n\t\t\t\t\t\tdims.Size.Y += 60\n\t\t\t\t\t}\n\t\t\t\t\treturn dims\n\t\t\t\t})\n\t\t\t}),\n\t\t\tlayout.Stacked(func(gtx C) D {\n\t\t\t\treturn pg.layoutAddWalletSection(gtx, common)\n\t\t\t}),\n\t\t)\n\t}\n\n\treturn common.Layout(gtx, func(gtx C) D {\n\t\treturn layout.Stack{}.Layout(gtx,\n\t\t\tlayout.Expanded(func(gtx C) D {\n\t\t\t\treturn common.UniformPadding(gtx, body)\n\t\t\t}),\n\t\t\tlayout.Expanded(func(gtx C) D {\n\t\t\t\tif pg.isAddWalletMenuOpen || pg.openPopupIndex != -1 {\n\t\t\t\t\thalfHeight := gtx.Constraints.Max.Y / 2\n\t\t\t\t\treturn pg.container.Layout(gtx, len(pg.backdrops), func(gtx C, i int) D {\n\t\t\t\t\t\tgtx.Constraints.Min.Y = halfHeight\n\t\t\t\t\t\treturn pg.backdrops[i].Layout(gtx)\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t\treturn D{}\n\t\t\t}),\n\t\t)\n\t})\n}",
"func (s *BasemumpsListener) ExitView_(ctx *View_Context) {}",
"func drawBottomBanner(ctx *gg.Context, text string) {\n\tx := float64(ctx.Width()) / 2\n\ty := float64(ctx.Height()) - IMAGE_MARGIN\n\tdrawText(ctx, text, x, y, 0.5, 1.0, BOTTOM_TEXT_DIVISOR)\n}",
"func CreateCommissionRevenueLedgerAmountView(db *sql.DB) {\n\n\tcreateCommissionRevenueLedgerAmountViewStr := `\n\tCREATE VIEW commission_revenue_ledger_amount AS\n\tSELECT \n\tcr.'Account Code'\n\t,cr.'Account Free'\n\t,SUM(cr.'Amount') 'Amount'\n\tFROM \n\t(SELECT \n\t\t62001 'Account Code'\n\t\t,commission_final.beneficiary_code 'Account Free'\n\t\t,commission_final.commission_revenue 'Amount'\n\tFROM commission_final) cr\n\tGROUP BY cr.'Account Code', cr.'Account Free'\n\t`\n\n\tcreateCommissionRevenueLedgerAmountView, err := db.Prepare(createCommissionRevenueLedgerAmountViewStr)\n\tcheckError(err)\n\tcreateCommissionRevenueLedgerAmountView.Exec()\n\n}",
"func (s *BasePlSqlParserListener) ExitCreate_package_body(ctx *Create_package_bodyContext) {}",
"func (*View) Close() error { return nil }",
"func (s *BaseSyslParserListener) ExitView(ctx *ViewContext) {}",
"func NewWeather(closeTapped func()) (view fyne.CanvasObject, viewModel *Weather) {\n\tw := Weather{}\n\tw.city = widget.NewLabel(\"City\")\n\tw.city.Alignment = fyne.TextAlignCenter\n\tw.city.TextStyle.Bold = true\n\n\tw.currentTemperature = widget.NewLabel(\"Current Temperature\")\n\tw.currentTemperature.Alignment = fyne.TextAlignCenter\n\n\tw.clock = widget.NewLabel(\"Clock\")\n\tw.clock.TextStyle.Bold = true\n\n\tw.lastUpdate = widget.NewLabel(\"Last update\")\n\tw.lastUpdate.Alignment = fyne.TextAlignCenter\n\n\tw.background = &canvas.Image{FillMode: canvas.ImageFillStretch}\n\tw.today = newForecast()\n\tw.tomorrow = newForecast()\n\tw.afterTomorrow = newForecast()\n\n\theader := container.New(layout.NewHBoxLayout(),\n\t\tlayout.NewSpacer(),\n\t\tcontainer.New(layout.NewVBoxLayout(),\n\t\t\tw.city,\n\t\t\tw.currentTemperature,\n\t\t),\n\t\tcontainer.NewVBox(),\n\t\tlayout.NewSpacer(),\n\t)\n\tfooter := container.New(layout.NewHBoxLayout(),\n\t\twidget.NewButton(assets.GetLabel(assets.Close), closeTapped),\n\t\tlayout.NewSpacer(),\n\t\tw.clock,\n\t)\n\tcenter := container.New(layout.NewVBoxLayout(),\n\t\tcontainer.New(layout.NewGridLayout(3),\n\t\t\tw.today.layout,\n\t\t\tw.tomorrow.layout,\n\t\t\tw.afterTomorrow.layout,\n\t\t),\n\t\tw.lastUpdate,\n\t)\n\tw.view = container.New(layout.NewMaxLayout(),\n\t\tw.background,\n\t\tcontainer.New(layout.NewVBoxLayout(),\n\t\t\theader,\n\t\t\tlayout.NewSpacer(),\n\t\t\tcenter,\n\t\t\tlayout.NewSpacer(),\n\t\t\tfooter,\n\t\t),\n\t)\n\n\tw.today.header.SetText(assets.GetLabel(assets.Today))\n\tw.tomorrow.header.SetText(assets.GetLabel(assets.Tomorrow))\n\tw.afterTomorrow.header.SetText(assets.GetLabel(assets.AfterTomorrow))\n\tdefaultBackground, _ := assets.GetBackgroundImage(weather.ConditionClear)\n\tw.SetBackground(defaultBackground)\n\n\treturn w.view, &w\n}",
"func WithFooterKey(key string) FileDecryptionOption {\n\treturn func(cfg *fileDecryptConfig) {\n\t\tif key != \"\" {\n\t\t\tcfg.footerKey = key\n\t\t}\n\t}\n}",
"func (s *BasePlSqlParserListener) ExitCreate_function_body(ctx *Create_function_bodyContext) {}",
"func (p *Packer) Close() error {\n\tp.writeFooter()\n\treturn p.outFile.Close()\n}",
"func NewBorderLayout(top, bottom, left, right fyne.CanvasObject) fyne.Layout {\n\treturn &borderLayout{top, bottom, left, right}\n}",
"func (c *ColumnBase) SetFooterTexter(s CellTexter) ColumnI {\n\tc.footerTexter = s\n\treturn c.this()\n}",
"func (f *Footer) Length() uint64 {\n\tjBuf, err := json.Marshal(f)\n\tif err != nil {\n\t\treturn 0\n\t}\n\n\tfooterLen := footerBegLen + len(jBuf) + footerEndLen\n\treturn uint64(footerLen)\n}",
"func (s *SimPDF) BottomLine(style models.Styles) {\n\tif style.Border.Width.Bottom > 0 {\n\t\ts.DrawBottomLine(style)\n\t}\n}",
"func (jsonExporter *JSONExportOutput) WriteFooter() error {\n\tif jsonExporter.ArrayOutput {\n\t\t_, err := jsonExporter.Out.Write([]byte{json.ArrayEnd, '\\n'})\n\t\t// TODO check # bytes written?\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif jsonExporter.PrettyOutput {\n\t\tif _, err := jsonExporter.Out.Write([]byte(\"\\n\")); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}",
"func finalizer(a *CasbinMenuAdapter) {\n}",
"func WithFooterKeyMetadata(keyMeta string) EncryptOption {\n\treturn func(cfg *configEncrypt) {\n\t\tif keyMeta != \"\" {\n\t\t\tcfg.keyMetadata = keyMeta\n\t\t}\n\t}\n}",
"func (b *Builder) WriteFinalTabstop() {\n\tfmt.Fprint(&b.sb, \"$0\")\n}",
"func (ct *Cointop) layout(g *gocui.Gui) error {\n\tmaxX, maxY := ct.size()\n\tchartHeight := 10\n\ttopOffset := 0\n\n\tif v, err := g.SetView(ct.marketbarviewname, 0, topOffset, maxX, 2); err != nil {\n\t\tif err != gocui.ErrUnknownView {\n\t\t\treturn err\n\t\t}\n\t\tct.marketbarview = v\n\t\tct.marketbarview.Frame = false\n\t\tct.marketbarview.BgColor = gocui.ColorBlack\n\t\tct.marketbarview.FgColor = gocui.ColorWhite\n\t\tgo func() {\n\t\t\tct.updateMarketbar()\n\t\t\t_, found := ct.cache.Get(ct.marketbarviewname)\n\t\t\tif found {\n\t\t\t\tct.cache.Delete(ct.marketbarviewname)\n\t\t\t\tct.updateMarketbar()\n\t\t\t}\n\t\t}()\n\t}\n\n\ttopOffset = topOffset + 1\n\tif v, err := g.SetView(ct.chartviewname, 0, topOffset, maxX, topOffset+chartHeight); err != nil {\n\t\tif err != gocui.ErrUnknownView {\n\t\t\treturn err\n\t\t}\n\t\tct.chartview = v\n\t\tct.chartview.Frame = false\n\t\tgo func() {\n\t\t\tct.updateChart()\n\t\t\tcachekey := strings.ToLower(fmt.Sprintf(\"%s_%s\", \"globaldata\", strings.Replace(ct.selectedchartrange, \" \", \"\", -1)))\n\t\t\t_, found := ct.cache.Get(cachekey)\n\t\t\tif found {\n\t\t\t\tct.cache.Delete(cachekey)\n\t\t\t\tct.updateChart()\n\t\t\t}\n\t\t}()\n\t}\n\n\ttopOffset = topOffset + chartHeight\n\tif v, err := g.SetView(ct.headerviewname, 0, topOffset, ct.maxtablewidth, topOffset+2); err != nil {\n\t\tif err != gocui.ErrUnknownView {\n\t\t\treturn err\n\t\t}\n\t\tct.headersview = v\n\t\tct.headersview.Frame = false\n\t\tct.headersview.FgColor = gocui.ColorBlack\n\t\tct.headersview.BgColor = gocui.ColorGreen\n\t\tgo ct.updateHeaders()\n\t}\n\n\ttopOffset = topOffset + 1\n\tif v, err := g.SetView(ct.tableviewname, 0, topOffset, ct.maxtablewidth, maxY-1); err != nil {\n\t\tif err != gocui.ErrUnknownView {\n\t\t\treturn err\n\t\t}\n\t\tct.tableview = v\n\t\tct.tableview.Frame = false\n\t\tct.tableview.Highlight = true\n\t\tct.tableview.SelBgColor = gocui.ColorCyan\n\t\tct.tableview.SelFgColor = gocui.ColorBlack\n\t\tgo func() {\n\t\t\tct.updateCoins()\n\t\t\tct.updateTable()\n\t\t\t_, found := ct.cache.Get(\"allcoinsmap\")\n\t\t\tif found {\n\t\t\t\tct.cache.Delete(\"allcoinsmap\")\n\t\t\t\tct.updateCoins()\n\t\t\t\tct.updateTable()\n\t\t\t}\n\t\t}()\n\t}\n\n\tif v, err := g.SetView(ct.statusbarviewname, 0, maxY-2, ct.maxtablewidth, maxY); err != nil {\n\t\tif err != gocui.ErrUnknownView {\n\t\t\treturn err\n\t\t}\n\t\tct.statusbarview = v\n\t\tct.statusbarview.Frame = false\n\t\tct.statusbarview.BgColor = gocui.ColorCyan\n\t\tct.statusbarview.FgColor = gocui.ColorBlack\n\t\tgo ct.updateStatusbar(\"\")\n\t}\n\n\tif v, err := g.SetView(ct.searchfieldviewname, 0, maxY-2, ct.maxtablewidth, maxY); err != nil {\n\t\tif err != gocui.ErrUnknownView {\n\t\t\treturn err\n\t\t}\n\t\tct.searchfield = v\n\t\tct.searchfield.Editable = true\n\t\tct.searchfield.Wrap = true\n\t\tct.searchfield.Frame = false\n\t\tct.searchfield.FgColor = gocui.ColorWhite\n\t}\n\n\tif v, err := g.SetView(ct.helpviewname, 1, 1, ct.maxtablewidth-2, maxY-1); err != nil {\n\t\tif err != gocui.ErrUnknownView {\n\t\t\treturn err\n\t\t}\n\t\tct.helpview = v\n\t\tct.helpview.Frame = false\n\t\tct.helpview.BgColor = gocui.ColorBlack\n\t\tct.helpview.FgColor = gocui.ColorWhite\n\t}\n\n\tif v, err := g.SetView(ct.convertmenuviewname, 1, 1, ct.maxtablewidth-2, maxY-1); err != nil {\n\t\tif err != gocui.ErrUnknownView {\n\t\t\treturn err\n\t\t}\n\t\tct.convertmenuview = v\n\t\tct.convertmenuview.Frame = false\n\t\tct.convertmenuview.BgColor = gocui.ColorBlack\n\t\tct.convertmenuview.FgColor = gocui.ColorWhite\n\n\t\t// run only once on init.\n\t\t// this bit of code should be at the bottom\n\t\tct.g = g\n\t\tg.SetViewOnBottom(ct.searchfieldviewname) // hide\n\t\tg.SetViewOnBottom(ct.helpviewname) // hide\n\t\tg.SetViewOnBottom(ct.convertmenuviewname) // hide\n\t\tct.setActiveView(ct.tableviewname)\n\t\tct.intervalFetchData()\n\t}\n\n\treturn nil\n}",
"func (h *Header) CreateRenderer() fyne.WidgetRenderer {\n\th.ExtendBaseWidget(h)\n\tobjects := []fyne.CanvasObject{}\n\tstyle := fyne.TextStyle{\n\t\tBold: true,\n\t}\n\tfor _, label := range h.labels {\n\t\tobj := canvas.NewText(label, h.color)\n\t\tobj.Alignment = fyne.TextAlignTrailing\n\t\tobj.TextStyle = style\n\t\tobjects = append(objects, obj)\n\t}\n\t// add 5 space margin on right side\n\tmargin := canvas.NewText(\" \", h.color)\n\tobjects = append(objects, margin)\n\n\treturn &headerRenderer{objects, margin, h}\n}",
"func NewFinalizer(\n\tlifelines []*dsl.Statement,\n\tspacer *Spacing,\n\tnoGoZones []nogozone.NoGoZone,\n\tboxes map[*dsl.Statement]*BoxTracker,\n\tsizer sizer.Sizer) *Finalizer {\n\treturn &Finalizer{\n\t\tlifelines: lifelines,\n\t\tspacer: spacer,\n\t\tnoGoZones: noGoZones,\n\t\tboxes: boxes,\n\t}\n}",
"func (s *BasePlSqlParserListener) ExitCreate_procedure_body(ctx *Create_procedure_bodyContext) {}",
"func (p PageMarginFooter) setPageMargins(pm *xlsxPageMargins) {\n\tpm.Footer = float64(p)\n}",
"func (c *ColumnBase) DrawFooterCell(ctx context.Context, row int, col int, count int, w io.Writer) {\n\tif c.isHidden {\n\t\treturn\n\t}\n\tcellHtml := c.this().FooterCellHtml(ctx, row, col)\n\n\ta := c.this().FooterAttributes(ctx, row, col)\n\ttag := \"td\"\n\tif c.asHeader {\n\t\ttag = \"th\"\n\t}\n\tpage.WriteString(w, html5tag.RenderTag(tag, a, cellHtml))\n\treturn\n}",
"func (s *BasePlSqlParserListener) ExitDrop_view(ctx *Drop_viewContext) {}",
"func (pg *BackupInstructionsPage) Layout(gtx layout.Context) layout.Dimensions {\n\tsp := components.SubPage{\n\t\tLoad: pg.Load,\n\t\tTitle: \"Keep in mind\",\n\t\tWalletName: pg.wallet.Name,\n\t\tBackButton: pg.backButton,\n\t\tBack: func() {\n\t\t\tpromptToExit(pg.Load, pg.ParentNavigator(), pg.ParentWindow())\n\t\t},\n\t\tBody: func(gtx C) D {\n\t\t\treturn pg.infoList.Layout(gtx, len(pg.checkBoxes), func(gtx C, i int) D {\n\t\t\t\treturn layout.Inset{Bottom: values.MarginPadding20}.Layout(gtx, pg.checkBoxes[i].Layout)\n\t\t\t})\n\t\t},\n\t}\n\n\tpg.viewSeedBtn.SetEnabled(pg.verifyCheckBoxes())\n\n\tlayout := func(gtx C) D {\n\t\treturn sp.Layout(pg.ParentWindow(), gtx)\n\t}\n\treturn container(gtx, *pg.Theme, layout, \"\", pg.viewSeedBtn)\n}",
"func (fe *FileEncryptionProperties) EncryptedFooter() bool { return fe.encryptedFooter }",
"func AddView(name string) {\n\tfmt.Println()\n\tviewName := ViewName(name)\n\tcontents := fmt.Sprintf(viewTemplate, TemplateFilename(name), viewName, ClassName(name), viewName)\n\tWriteFile(ViewFilename(name), contents, false);\n\tWriteFile(SassFilename(name), \"\", false);\n\tWriteFile(TemplateFilename(name), \"\", false);\n\tAddLinkTag(name);\n\tfmt.Println()\n}",
"func PrintInventoryFooter(inventory Inventory) {\n\tfmt.Print(\"\\n\")\n}",
"func (w *Window) createLayout(parent widgets.QWidget_ITF) {\n\t// Create the window layout, which will act as the layout for the underlying QMainWindow's\n\t// central widget's layout.\n\tw.windowLayout = widgets.NewQHBoxLayout2(parent)\n\tw.windowLayout.SetContentsMargins(7, 7, 7, 7)\n\n\tw.leftLayout = widgets.NewQHBoxLayout2(w.windowLayout.Widget())\n\tw.leftLayout.SetAlign(core.Qt__AlignLeft)\n\n\tw.rightLayout = widgets.NewQHBoxLayout2(w.windowLayout.Widget())\n\tw.rightLayout.SetAlign(core.Qt__AlignRight)\n\n\t// Add the left and right layout widgets, providing them equal, but positive stretch so they\n\t// meet in the middle of the window by default.\n\tw.windowLayout.AddLayout(w.leftLayout, 1)\n\tw.windowLayout.AddLayout(w.rightLayout, 1)\n}",
"func (pc *programCode) createExit(val string) {\n\tcode := \"\"\n\tcode += \"\\tmov rax, 60\\t; exit program\\n\\tmov rdi, \" + val + \"\\n\\tsyscall\\n\"\n\tpc.funcCode[0] += code\n\t// Appends this code snippet to the first\n\t// level of indentation e.g. main-function\n}",
"func WithPlaintextFooter() EncryptOption {\n\treturn func(cfg *configEncrypt) {\n\t\tcfg.encryptFooter = false\n\t}\n}"
] | [
"0.5952441",
"0.59190524",
"0.5908554",
"0.5871804",
"0.5760129",
"0.5682298",
"0.5645047",
"0.5610753",
"0.55848986",
"0.5469786",
"0.5427311",
"0.5382227",
"0.53675103",
"0.52462083",
"0.5153982",
"0.51538026",
"0.50929487",
"0.50820076",
"0.50206304",
"0.49679133",
"0.49436456",
"0.49209014",
"0.48864862",
"0.48553577",
"0.48407155",
"0.47390142",
"0.47284496",
"0.47167328",
"0.46925583",
"0.4653475",
"0.46404764",
"0.46205172",
"0.45975983",
"0.45943388",
"0.45905933",
"0.45719278",
"0.4554478",
"0.45395035",
"0.4535094",
"0.44723135",
"0.4450707",
"0.44478992",
"0.44362006",
"0.4432142",
"0.43430954",
"0.43416944",
"0.43207842",
"0.4266734",
"0.4234295",
"0.4232986",
"0.41843253",
"0.4168687",
"0.41662073",
"0.41581497",
"0.41396856",
"0.41320178",
"0.41140932",
"0.41045114",
"0.4082768",
"0.40439087",
"0.40250736",
"0.40048137",
"0.39912707",
"0.398669",
"0.39811626",
"0.3980784",
"0.3967945",
"0.396188",
"0.39582127",
"0.39436418",
"0.3941169",
"0.39325383",
"0.39269218",
"0.39246613",
"0.39232156",
"0.39214316",
"0.39132345",
"0.39034587",
"0.38932085",
"0.38735366",
"0.3871565",
"0.3863391",
"0.38574934",
"0.38457927",
"0.38368735",
"0.3836217",
"0.3823117",
"0.38199922",
"0.38077167",
"0.38061365",
"0.38011158",
"0.37731716",
"0.3769025",
"0.37495616",
"0.37464425",
"0.37310863",
"0.37139463",
"0.37101558",
"0.3699647",
"0.3696425"
] | 0.81811273 | 0 |
Search scrapes azlyrics.com for song lyrics and does regex magic to clean them up. Beware, your IP can AND will get blocked while running this, but it is only called in `go generate` (see midi/generate.go) so a normal user will never run this. | func Search(query string) (string, error) {
v := url.Values{
"q": []string{query},
}
uri := fmt.Sprintf("%s?%s", queryURI, v.Encode())
// start the scrape
resp, err := http.Get(uri)
if err != nil {
logrus.Fatalf("requesting %s failed: %v", uri, err)
}
defer resp.Body.Close()
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
logrus.Fatalf("creating document failed: %v", err)
}
link, ok := doc.Find("td").First().Find("a").Attr("href")
if !ok {
return "", fmt.Errorf("could not find top link at %s", uri)
}
// get the lyrics link
resp, err = http.Get(link)
if err != nil {
return "", fmt.Errorf("request to %s failed: %v", link, err)
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return "", fmt.Errorf("reading body from %s failed: %v", link, err)
}
// get the lyrics from the HTML
html := re.FindStringSubmatch(string(body))
if len(html) <= 0 {
return "", fmt.Errorf("[%s] regex parsing failed for body: %s", query, body)
}
// strip html tags from decoded lyrics
lyrics := reHTML.ReplaceAllString(html[0], "")
return lyrics, nil
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (e ExtractorFunc) ExtractLyrics(req request.Requester) (*lyrics.Info, error) {\n\treturn e(req)\n}",
"func wikia(track track.Track) ([]string, error) {\n\turl := getLyricsURL(track)\n\n\tresp, err := http.Get(url)\n\tif err != nil {\n\t\te := fmt.Sprintf(\"Could not access the URL: %s\", err)\n\t\treturn []string{}, errors.New(e)\n\t}\n\n\troot, err := html.Parse(resp.Body)\n\tif err != nil {\n\t\te := fmt.Sprintf(\"Could not parse the HTML body: %s\", err)\n\t\treturn []string{}, errors.New(e)\n\t}\n\n\tnode, ok := scrape.Find(root, scrape.ByClass(\"lyricbox\"))\n\tif ok {\n\t\tlyrics := buildLyrics(node.FirstChild)\n\t\treturn lyrics, nil\n\t}\n\n\treturn []string{}, errors.New(\"Could not fetch song lyrics\")\n}",
"func analyze() {\n\tlog.Printf(\"analyzing...\")\n\n\tregexp, err := regexp.Compile(\"https?://t\\\\.co/(\\\\w|-)+\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tfor tweet := range tweets {\n\t\tfor _, link := range regexp.FindAllString(tweet, -1) {\n\t\t\tlinks <- link\n\t\t}\n\t}\n}",
"func processYahooResponses(result string) []messageQueryBody {\n\n\tsubsl := \"<a class=\\\" ac-algo fz-l ac-21th lh-24\\\"\";\n\tlensubsl := len(subsl)\n\tsubsl2 := \"</a>\"\n\tlensubsl2 := len(subsl2)\n\tsubsl3 := \"<span class=\\\" fz-ms fw-m fc-12th wr-bw lh-17\\\">\"\n\tlensubsl3 := len(subsl3)\n\tsubsl4 := \"</span>\"\n\tlensubsl4 := len(subsl4)\n\tsubsl5 := \"<p class=\\\"lh-16\\\"\"\n\tlensubsl5 := len(subsl5)\n\n\tvar queryResult messageQueryBody\n\tvar queryResultArray []messageQueryBody\n\tfor i := 0; i < len(result) - lensubsl; i++ {\n\t\tmess := \"\"\n\t\tif result[i : i + lensubsl] == subsl {\n\t\t\tlength := i + lensubsl\n\t\t\tvar last int\n\t\t\tvar start int\n\n\t\t\tfor k := 1; ; k++ {\n\t\t\t\tif result[length + k: length+k+1 ] == \">\" {\n\t\t\t\t\tstart = length + k + 1;\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfor j:=1; ; j++ {\n\t\t\t\tif result[start + j: start + j + lensubsl2] == subsl2 {\n\t\t\t\t\tmess = result[start: start + j]\n\t\t\t\t\tqueryResult.Head = mess\n\t\t\t\t\tlast = start + j + lensubsl2\n\t\t\t\t\ti = last\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfound := false\n\t\t\tfor j:= 1; ; j++ {\n\t\t\t\tif result[last + j: last + j + lensubsl3] == subsl3 { // matched found for \"<span class=\\\" fz-ms fw-m fc-12th wr-bw lh-17\\\">\"\n\t\t\t\t\tfor k:= 1; ; k++ {\n\t\t\t\t\t\tif result[last + j + lensubsl3 + k: last + j + lensubsl3 + k + lensubsl4] == subsl4 { // finding index for \"</span>\"\n\t\t\t\t\t\t\tlink := result[last + j + lensubsl3 : last + j + lensubsl3 + k]\n\t\t\t\t\t\t\ti = last + j + lensubsl3 + k + lensubsl4\n\t\t\t\t\t\t\tfound = true\n\t\t\t\t\t\t\tlink = strings.Replace(link, \"<b>\", \"\", -1)\n\t\t\t\t\t\t\tlink = strings.Replace(link, \"</b>\", \"\", -1)\n\t\t\t\t\t\t\tif len(link) >= 7 {\n\t\t\t\t\t\t\t\tif link[0: 7] != \"http://\" && link[0: 8] != \"https://\" {\n\t\t\t\t\t\t\t\t\tlink = \"http://\" + link\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tqueryResult.Link = link\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tfor k := 1; ; k++ {\n\t\t\t\t\t\tif result[i + k : i + k + lensubsl5] == subsl5 {\n\t\t\t\t\t\t\tlength = i + k + lensubsl5 + 1;\n\t\t\t\t\t\t\tfor l := 1; ; l++ {\n\t\t\t\t\t\t\t\tif result[length + l: length + l + 4] == \"</p>\" {\n\t\t\t\t\t\t\t\t\tdesc := result[length: length + l]\n\t\t\t\t\t\t\t\t\tqueryResult.Desc = desc;\n\t\t\t\t\t\t\t\t\ti = length + l +4;\n\t\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif found {\n\t\t\t\t\tqueryResultArray = append(queryResultArray, queryResult)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn queryResultArray\n\n}",
"func Scrape(proxlist []m.ProxySource) []string {\n\n\tfor _, p := range proxlist { // turn to go routine\n\t\tfmt.Println(p.Reg + \"\\t\" + p.Url)\n\n\t\tresponse, err := http.Get(p.Url)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tdefer response.Body.Close()\n\n\t\t// convert io page to string\n\t\tbuf := new(bytes.Buffer)\n\t\tbuf.ReadFrom(response.Body)\n\t\tnewStr := buf.String()\n\n\t\t//fmt.Printf(newStr)\n\t\t//setup reg match\n\t\tre := regexp.MustCompile(p.Reg)\n\n\t\t//find all that match\n\t\tmatch := re.FindAllString(newStr, -1)\n\n\t\trawlist = append(rawlist, match...)\n\n\t\t//if rawlist != nil {\n\t\t//\treturn rawlist\n\t\t//}\n\n\t}\n\n\treturn rawlist\n}",
"func GetLyricsByArtist(w http.ResponseWriter, r *http.Request) {\n\tvar test foo\n\n\tif err := json.NewDecoder(r.Body).Decode(&test); err != nil {\n\t\terr := fmt.Errorf(\"error when reading request body: %w\", err)\n\t\tlog.Logger.Errorf(\"GetAllSongs failed: %v\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tif test.Search == \"\" {\n\t\tlog.Logger.Infof(\"GetAllSongs: request body was empty: %v\", test)\n\n\t\thttp.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tlog.Logger.Infof(\"GetLyricsByArtist: successfully read request body: %v\", test)\n\n\tsongData, err := internal.GetAllLyricsByArtist(test.Search)\n\tif err != nil {\n\t\terr := fmt.Errorf(\"error when getting all lyrics by artist: %w\", err)\n\t\tlog.Logger.Errorf(\"GetLyricsByArtist failed: %v\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\twordMap, err := internal.ScanWords(songData, &test.Words)\n\tif err != nil {\n\t\thttp.Error(w, http.StatusText(400), 400)\n\t}\n\n\tlog.Logger.Infof(\"finished scanning words: %v\", wordMap)\n\n\tresponse := models.Response{\n\t\tSongs: songData,\n\t\tWordMap: wordMap,\n\t}\n\n\tif err := json.NewEncoder(w).Encode(response); err != nil {\n\t\terr := fmt.Errorf(\"error when encoding response: %w\", err)\n\t\tlog.Logger.Errorf(\"GetLyricsByArtist failed: %v\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)\n\t\treturn\n\t}\n}",
"func processYahooResponses(result string) []messageQueryBody {\n\n\tsubsl := \"<a class=\\\" ac-algo fz-l ac-21th lh-24\\\"\";\n\tlensubsl := len(subsl)\n\tsubsl2 := \"</a>\"\n\tlensubsl2 := len(subsl2)\n\tsubsl3 := \"<span class=\\\" fz-ms fw-m fc-12th wr-bw lh-17\\\">\"\n\tlensubsl3 := len(subsl3)\n\tsubsl4 := \"</span>\"\n\tlensubsl4 := len(subsl4)\n\n\tvar queryResult messageQueryBody\n\tvar queryResultArray []messageQueryBody\n\tfor i := 0; i < len(result) - lensubsl; i++ {\n\t\tmess := \"\"\n\t\tif result[i : i + lensubsl] == subsl {\n\t\t\tlength := i + lensubsl\n\t\t\tvar last int\n\t\t\tvar start int\n\n\t\t\tfor k := 1; ; k++ {\n\t\t\t\tif result[length + k: length+k+1 ] == \">\" {\n\t\t\t\t\tstart = length + k + 1;\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfor j:=1; ; j++ {\n\t\t\t\tif result[start + j: start + j + lensubsl2] == subsl2 {\n\t\t\t\t\tmess = result[start: start + j]\n\t\t\t\t\tqueryResult.Head = mess\n\t\t\t\t\tlast = start + j + lensubsl2\n\t\t\t\t\ti = last\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfound := false\n\t\t\tfor j:= 1; ; j++ {\n\t\t\t\tif result[last + j: last + j + lensubsl3] == subsl3 { // matched found for \"<span class=\\\" fz-ms fw-m fc-12th wr-bw lh-17\\\">\"\n\t\t\t\t\tfor k:= 1; ; k++ {\n\t\t\t\t\t\tif result[last + j + lensubsl3 + k: last + j + lensubsl3 + k + lensubsl4] == subsl4 { // finding index for \"</span>\"\n\t\t\t\t\t\t\tlink := result[last + j + lensubsl3 : last + j + lensubsl3 + k]\n\t\t\t\t\t\t\ti = last + j + lensubsl3 + k + lensubsl4\n\t\t\t\t\t\t\tfound = true\n\t\t\t\t\t\t\tlink = strings.Replace(link, \"<b>\", \"\", -1)\n\t\t\t\t\t\t\tlink = strings.Replace(link, \"</b>\", \"\", -1)\n\t\t\t\t\t\t\tif link[0: 7] != \"http://\" && link[0: 4] != \"www.\" {\n\t\t\t\t\t\t\t\tlink = \"http://\" + link\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tqueryResult.Link = link\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif found {\n\t\t\t\t\tqueryResultArray = append(queryResultArray, queryResult)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn queryResultArray\n\n}",
"func GetLyricsBySearch(w http.ResponseWriter, r *http.Request) {\n\tvar test foo\n\n\tif err := json.NewDecoder(r.Body).Decode(&test); err != nil {\n\t\terr := fmt.Errorf(\"error when reading request body: %w\", err)\n\t\tlog.Logger.Errorf(\"GetLyricsBySearch failed: %v\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tif test.Search == \"\" {\n\t\tlog.Logger.Infof(\"GetAllSongs: request body was empty: %v\", test)\n\n\t\thttp.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tlog.Logger.Infof(\"GetLyricsBySearch: successfully read request body: %v\", test)\n\n\tsongData, err := internal.GetLyricsBySearch(test.Search)\n\tif err != nil {\n\t\terr := fmt.Errorf(\"error when getting lyrics by search: %w\", err)\n\t\tlog.Logger.Errorf(\"GetLyricsBySearch failed: %v\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\twordMap, err := internal.ScanWords(songData, &test.Words)\n\tif err != nil {\n\t\thttp.Error(w, http.StatusText(400), 400)\n\t}\n\n\tlog.Logger.Infof(\"finished scanning words... %v\", wordMap)\n\n\tresponse := models.Response{\n\t\tSongs: songData,\n\t\tWordMap: wordMap,\n\t}\n\n\tif err := json.NewEncoder(w).Encode(response); err != nil {\n\t\terr := fmt.Errorf(\"error when encoding response: %w\", err)\n\t\tlog.Logger.Errorf(\"GetLyricsBySearch failed: %v\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)\n\t\treturn\n\t}\n}",
"func GetLyricsOmakase(url string, id int) LyricsData {\n\tregTitle := regexp.MustCompile(\"(?is)<title>(.*?)</title>\")\n\tregInfo := regexp.MustCompile(\"(?is).*?作詞[::](.*?)/作曲[::](.*?)/編曲[::](.*?)/\\n歌[::](.*?)\\n\\n(.*?)</pre></td>\")\n\n\ttitle := \"\"\n\tsinger := \"\"\n\tlyricist := \"\"\n\tcomposer := \"\"\n\tarranger := \"\"\n\tlyrics := \"\"\n\n\terrCount := 0\n\thtml := \"\"\n\tfor errCount < 5 {\n\t\thtml1, err := GetHTML(url)\n\t\thtml = html1\n\t\tif err != nil {\n\t\t\terrCount++\n\t\t} else {\n\t\t\thtml, _ = sjis_to_utf8(html)\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif regTitle.MatchString(html) {\n\t\tgroup := regTitle.FindStringSubmatch(html)\n\t\ttitle = group[1]\n\t}\n\n\tif regInfo.MatchString(html) {\n\t\tgroup := regInfo.FindStringSubmatch(html)\n\t\tlyricist = group[1]\n\t\tcomposer = group[2]\n\t\tarranger = group[3]\n\t\tsinger = group[4]\n\t\tlyrics = group[5]\n\t}\n\n\tdata := LyricsData{\n\t\tID: id,\n\t\tTitle: title,\n\t\tLyricist: lyricist,\n\t\tComposer: composer,\n\t\tArranger: arranger,\n\t\tSinger: singer,\n\t\tLyrics: lyrics,\n\t}\n\n\treturn data\n}",
"func GetLyricsKasiTime(url string, id int) LyricsData {\n\n\tregUrl := regexp.MustCompile(`www.kasi-time.com/item-(.*?).html`)\n\tregTitle := regexp.MustCompile(`(?is)<div id=\"song_info_table\">.*?<h1>(.*?)</h1>`)\n\tregPronounce := regexp.MustCompile(`(?is)<td class=\"td2\">読み</td>.*?<td>(.*?)</td>`)\n\tregInfo := regexp.MustCompile(`(?is)<meta name=\"description\" content=\"歌手:(.*?)[ ]+作詞:(.*?)[ ]+作曲:(.*?)[ ]+.*?>`)\n\tregArranger := regexp.MustCompile(`(?is)<td class=\"td1\">編曲</td>.*?<td>(.*?)</td>`)\n\tregLyrics := regexp.MustCompile(`document.write\\('(.+)'\\);`)\n\n\tbaseUrl := \"http://www.kasi-time.com/item_js.php?no=\"\n\tpageUrl := baseUrl\n\n\ttitle := \"\"\n\tpronounce := \"\"\n\tsinger := \"\"\n\tlyricist := \"\"\n\tcomposer := \"\"\n\tarranger := \"\"\n\tlyrics := \"\"\n\n\tif regUrl.MatchString(url) {\n\t\tgroup := regUrl.FindStringSubmatch(url)\n\t\tpageID := group[1]\n\t\tpageUrl += pageID\n\t}\n\n\thtml1 := \"\"\n\thtml2 := \"\"\n\terrCount1 := 0\n\terrCount2 := 0\n\n\tfor errCount1 < 5 {\n\t\thtml, err := GetHTML(url)\n\t\thtml1 = html\n\t\tif err != nil {\n\t\t\terrCount1++\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tfor errCount2 < 5 {\n\t\thtml, err := GetHTML(pageUrl)\n\t\thtml2 = html\n\t\tif err != nil {\n\t\t\terrCount1++\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif regTitle.MatchString(html1) {\n\t\tgroup := regTitle.FindStringSubmatch(html1)\n\t\ttitle = group[1]\n\t}\n\n\tif regPronounce.MatchString(html1) {\n\t\tgroup := regPronounce.FindStringSubmatch(html1)\n\t\tpronounce = group[1]\n\t}\n\n\tif regInfo.MatchString(html1) {\n\t\tgroup := regInfo.FindStringSubmatch(html1)\n\t\tsinger = group[1]\n\t\tlyricist = group[2]\n\t\tcomposer = group[3]\n\t}\n\n\tif regArranger.MatchString(html1) {\n\t\tgroup := regArranger.FindStringSubmatch(html1)\n\t\tarranger = sanitize.HTML(group[1])\n\t}\n\n\tif regLyrics.MatchString(html2) {\n\t\tgroup := regLyrics.FindStringSubmatch(html2)\n\t\tlyrics = sanitize.HTML(group[1])\n\t}\n\n\tdata := LyricsData{\n\t\tID: id,\n\t\tTitle: title,\n\t\tPronounce: pronounce,\n\t\tSinger: singer,\n\t\tLyricist: lyricist,\n\t\tComposer: composer,\n\t\tArranger: arranger,\n\t\tLyrics: lyrics,\n\t}\n\n\treturn data\n}",
"func processYoutubeResponses(result string) []messageQueryBody {\n\n\tsubsl := \"<a id=\\\"video-title\\\"\"\n\tsubsl2 := \"href=\\\"\"\n\tsubsl3 := \"</a>\"\n\tlensubsl3 := len(subsl3)\n\n\tvar queryResult messageQueryBody\n\tvar queryResultArray []messageQueryBody\n\tvar mid int\n\n\tfor i := 0; i < len(result) - len(subsl); i++ {\n\t\tmess := \"\"\n\t\tif result[i : i + len(subsl)] == subsl {\n\t\t\tlength := i + len(subsl)\n\t\t\tvar last int\n\t\t\tfor j:=1; ; j++ {\n\t\t\t\tif result[length + j: length + j + len(subsl2)] == subsl2 {\n\t\t\t\t\tmid = length + j + len(subsl2)\n\t\t\t\t\tfor k := 1; ; k++ {\n\t\t\t\t\t\tif result[mid + k: mid + k + 2] == \"\\\">\" {\n\t\t\t\t\t\t\tlink := result[mid: mid + k]\n\t\t\t\t\t\t\tflink := \"https://www.youtube.com\" + link\n\t\t\t\t\t\t\tqueryResult.Link = flink\n\t\t\t\t\t\t\tlast = mid + k + 2\n\t\t\t\t\t\t\ti = last\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfound := false\n\t\t\tfor j:= 1; ; j++ {\n\t\t\t\tif result[last + j: last + j + lensubsl3] == subsl3 { // matched found for \"</a>\"\n\t\t\t\t\t\tmess = result[last: last + j]\n\t\t\t\t\t\ti = last + j + lensubsl3\n\t\t\t\t\t\tfound = true\n\t\t\t\t\t\tqueryResult.Head = mess\n\t\t\t\t\t}\n\t\t\t\tif found {\n\t\t\t\t\tqueryResultArray = append(queryResultArray, queryResult)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn queryResultArray\n\n}",
"func GetLyricsOneSong(w http.ResponseWriter, r *http.Request) {\n\tvar test foo\n\n\tif err := json.NewDecoder(r.Body).Decode(&test); err != nil {\n\t\terr := fmt.Errorf(\"error when reading request body: %w\", err)\n\t\tlog.Logger.Errorf(\"GetLyricsOneSong failed: %v\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tif test.Search == \"\" {\n\t\tlog.Logger.Infof(\"GetAllSongs: request body was empty: %v\", test)\n\n\t\thttp.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tlog.Logger.Infof(\"GetLyricsOneSong: successfully read request body: %v, %v\", test.Search, test.Words)\n\n\tsongs, err := internal.SearchSongs(test.Search)\n\tif err != nil {\n\t\terr := fmt.Errorf(\"error when searching songs: %w\", err)\n\t\tlog.Logger.Errorf(\"GetLyricsOneSong failed: %w\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tsingleSong, err := internal.GetOneSong(*songs)\n\tif err != nil {\n\t\terr := fmt.Errorf(\"error when getting song: %w\", err)\n\t\tlog.Logger.Errorf(\"GetLyricsOneSong failed: %v\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tsongWithLyrics, err := internal.GetLyricsForSingleSong(*singleSong)\n\tif err != nil {\n\t\terr := fmt.Errorf(\"error when getting lyrics for song: %w\", err)\n\t\tlog.Logger.Errorf(\"GetLyricsOneSong failed: %w\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tsongData := models.Song{\n\t\tID: singleSong.ID,\n\t\tTitle: singleSong.Title,\n\t\tArtist: singleSong.Artist,\n\t\tLyrics: models.Lyrics{\n\t\t\tID: songWithLyrics.ID,\n\t\t\tLyrics: songWithLyrics.Lyrics,\n\t\t},\n\t}\n\n\twordMap, err := internal.ScanWords([]models.Song{songData}, &test.Words)\n\tif err != nil {\n\t\thttp.Error(w, http.StatusText(400), 400)\n\t}\n\n\tlog.Logger.Infof(\"finished scanning words: %v\", wordMap)\n\n\tresponse := models.Response{\n\t\tSongs: []models.Song{songData},\n\t\tWordMap: wordMap,\n\t}\n\n\tif err := json.NewEncoder(w).Encode(response); err != nil {\n\t\terr := fmt.Errorf(\"error when encoding response: %w\", err)\n\t\tlog.Logger.Errorf(\"GetLyricsOneSong failed: %v\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)\n\t\treturn\n\t}\n}",
"func processYoutubeResponses(result string) []messageQueryBody {\n\n\tsubsl := \"<a id=\\\"video-title\\\"\"\n\tsubsl2 := \"href=\\\"\"\n\tsubsl3 := \"</a>\"\n\tlensubsl3 := len(subsl3)\n\tsubsl4 := \"<yt-formatted-string id=\\\"description-text\\\" class=\\\"style-scope ytd-video-renderer\\\">\"\n\tlensubsl4 := len(subsl4)\n\tsubsl5 := \"</yt-formatted-string>\"\n\tlensubsl5 := len(subsl5)\n\n\tvar queryResult messageQueryBody\n\tvar queryResultArray []messageQueryBody\n\tvar mid int\n\n\tfor i := 0; i < len(result) - len(subsl); i++ {\n\t\tmess := \"\"\n\t\tif result[i : i + len(subsl)] == subsl {\n\t\t\tlength := i + len(subsl)\n\t\t\tvar last int\n\t\t\tfor j:=1; ; j++ {\n\t\t\t\tif result[length + j: length + j + len(subsl2)] == subsl2 {\n\t\t\t\t\tmid = length + j + len(subsl2)\n\t\t\t\t\tfor k := 1; ; k++ {\n\t\t\t\t\t\tif result[mid + k: mid + k + 1] == \"\\\"\" {\n\t\t\t\t\t\t\tlink := result[mid: mid + k]\n\t\t\t\t\t\t\tflink := \"https://www.youtube.com\" + link\n\t\t\t\t\t\t\tqueryResult.Link = flink\n\t\t\t\t\t\t\tlast = mid + k + 1\n\t\t\t\t\t\t\tfor l := 1; ; l++ {\n\t\t\t\t\t\t\t\tif result[last + l: last+ l +2] == \"\\\">\" {\n\t\t\t\t\t\t\t\t\tlast = last + l +2\n\t\t\t\t\t\t\t\t\ti = last + l + 2\n\t\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfound := false\n\t\t\tfor j:= 1; ; j++ {\n\t\t\t\tif result[last + j: last + j + lensubsl3] == subsl3 { // matched found for \"</a>\"\n\t\t\t\t\tmess = result[last: last + j]\n\t\t\t\t\ti = last + j + lensubsl3\n\t\t\t\t\tfound = true\n\t\t\t\t\tqueryResult.Head = mess\n\t\t\t\t\tfor k := 1; ; k++ {\n\t\t\t\t\t\tif result[i + k : i + k + lensubsl4] == subsl4 {\n\t\t\t\t\t\t\tlength = i + k + lensubsl4;\n\t\t\t\t\t\t\tfor l := 1; ; l++ {\n\t\t\t\t\t\t\t\tif result[length + l: length + l + lensubsl5] == subsl5 {\n\t\t\t\t\t\t\t\t\tdesc := result[length: length + l]\n\t\t\t\t\t\t\t\t\tqueryResult.Desc = desc;\n\t\t\t\t\t\t\t\t\ti = length + l +4;\n\t\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif found {\n\t\t\t\t\tqueryResultArray = append(queryResultArray, queryResult)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn queryResultArray\n\n}",
"func processGoogleResponses(result string) []messageQueryBody {\n\n\tsubsl := \"<h3 class=\\\"LC20lb\\\">\"\n\tlensubsl := len(subsl)\n\tsubsl2 := \"</h3>\"\n\tlensubsl2 := len(subsl2)\n\tsubsl3 := \"<cite\"\n\tlensubsl3 := len(subsl3)\n\tsubsl4 := \"</cite>\"\n\tlensubsl4 := len(subsl4)\n\tvar queryResult messageQueryBody\n\tvar queryResultArray []messageQueryBody\n\tfor i := 0; i < len(result) - lensubsl; i++ {\n\t\tmess := \"\"\n\t\tif result[i : i + lensubsl] == subsl {\n\t\t\tlength := i + lensubsl\n\t\t\tvar last int\n\t\t\tfor j:=1; ; j++ {\n\t\t\t\tif result[length + j: length + j + lensubsl2] == subsl2 {\n\t\t\t\t\tmess = result[length: length + j]\n\t\t\t\t\tqueryResult.Head = mess\n\t\t\t\t\tlast = length + j + lensubsl2\n\t\t\t\t\ti = last\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfound := false\n\t\t\tfor j:= 1; ; j++ {\n\t\t\t\tif result[last + j: last + j + lensubsl3] == subsl3 { // matched found for \"<cite\"\n\t\t\t\t\tfor k:= 1; ; k++ {\n\t\t\t\t\t\tif result[last + j + lensubsl3 + k: last + j + lensubsl3 + k + lensubsl4] == subsl4 { // finding index for \"</cite>\"\n\t\t\t\t\t\t\tlink := result[last + j + lensubsl3 + 15 : last + j + lensubsl3 + k]\n\t\t\t\t\t\t\ti = last + j + lensubsl3 + k + lensubsl4\n\t\t\t\t\t\t\tfound = true\n\t\t\t\t\t\t\tif link[0: 7] != \"http://\" && link[0: 4] != \"www.\" && link[0: 8] != \"https://\" {\n\t\t\t\t\t\t\t\tlink = \"http://\" + link\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tqueryResult.Link = link\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif found {\n\t\t\t\t\tqueryResultArray = append(queryResultArray, queryResult)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn queryResultArray\n\n}",
"func (p *LDAPAuthProxy) RobotsTxt(r http.ResponseWriter) {\n\tr.WriteHeader(http.StatusOK)\n\tfmt.Fprintf(r, \"User-agent: *\\nDisallow: /\")\n}",
"func (s *Server) GetLyrics(ctx context.Context, track *api.TracksInfo) (*api.LyricsInfo, error) {\n\turi, err := s.GeniusClient.GetSongURL(ctx, track.GetArtist(), track.GetName())\n\tif err != nil {\n\t\tlog.Printf(\"Error geting uri from genius.com: %s\", err)\n\t\turi = \"\"\n\t}\n\n\tlyrics, err := s.GeniusClient.GetSongLyrics(ctx, uri)\n\tif err != nil {\n\t\tlog.Printf(\"Error geting lyrics from genius.com: %s\", err)\n\t\tlyrics = \"\"\n\t}\n\n\tresult := &api.LyricsInfo{\n\t\tGeniusURI: uri,\n\t\tLyrics: lyrics,\n\t}\n\treturn result, nil\n}",
"func (p *Proxy) RobotsTxt(w http.ResponseWriter, _ *http.Request) {\n\tw.WriteHeader(http.StatusOK)\n\tfmt.Fprintf(w, \"User-agent: *\\nDisallow: /\")\n}",
"func ScrubTrackTitle(original string) string {\n\tvar result string\n\tresult = normalizeParens(original)\n\tfor _, re := range TrackTitleIgnoredPhrases {\n\t\tresult = re.ReplaceAllString(result, \"\")\n\t}\n\tresult = Scrub(result)\n\tresult = strings.TrimSpace(result)\n\treturn result\n}",
"func sanitizeSkylinks(links []string) []string {\n\tvar result []string\n\n\tfor _, link := range links {\n\t\ttrimmed := strings.TrimPrefix(link, \"sia://\")\n\t\tresult = append(result, trimmed)\n\t}\n\n\treturn result\n}",
"func processBingResponses(result string) []messageQueryBody {\n\n\tsubsl := \"<li class=\\\"b_algo\\\"\"\n\tsubsl2 := \"<a\"\n\tsubsl3 := \"<cite\"\n\tlensubsl3 := len(subsl3)\n\tsubsl4 := \"</cite>\"\n\tlensubsl4 := len(subsl4)\n\n\tvar queryResult messageQueryBody\n\tvar queryResultArray []messageQueryBody\n\n\tfor i := 0; i < len(result) - len(subsl); i++ {\n\t\tmess := \"\"\n\t\tif result[i : i + len(subsl)] == subsl {\n\t\t\tlength := i + len(subsl)\n\t\t\tvar last int\n\t\t\tvar aStart int\n\t\t\tvar start int\n\n\t\t\tfor k := 1; ; k++ {\n\t\t\t\tif result[length + k: length + k + 2 ] == subsl2 {\n\t\t\t\t\taStart = length + k\n\t\t\t\t\tfor l := 1; ; l++ {\n\t\t\t\t\t\tif result[aStart + l: aStart + l + 1 ] == \">\" {\n\t\t\t\t\t\t\tstart = aStart + l + 1;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfor j:=1; ; j++ {\n\t\t\t\tif result[start + j: start + j + 4] == \"</a>\" {\n\t\t\t\t\tmess = result[start: start + j]\n\t\t\t\t\tfMess := strings.Replace(mess, \"<strong>\", \"\", -1)\n\t\t\t\t\tfinalMess := strings.Replace(fMess, \"</strong>\", \"\", -1)\n\t\t\t\t\tqueryResult.Head = finalMess\n\t\t\t\t\tlast = start + j + 4\n\t\t\t\t\ti = last\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfound := false\n\t\t\tfor j:= 1; ; j++ {\n\t\t\t\tif result[last + j: last + j + lensubsl3] == subsl3 { // matched found for \"<cite\"\n\t\t\t\t\tfor k:= 1; ; k++ {\n\t\t\t\t\t\tif result[last + j + lensubsl3 + k: last + j + lensubsl3 + k + lensubsl4] == subsl4 { // finding index for \"</cite>\"\n\t\t\t\t\t\t\tlink := result[last + j + lensubsl3 + 1 : last + j + lensubsl3 + k]\n\n\t\t\t\t\t\t\ti = last + j + lensubsl3 + k + lensubsl4\n\t\t\t\t\t\t\tfound = true\n\t\t\t\t\t\t\tlink = strings.Replace(link, \"<strong>\", \"\", -1)\n\t\t\t\t\t\t\tlink = strings.Replace(link, \"</strong>\", \"\", -1)\n\t\t\t\t\t\t\tif link[0: 7] != \"http://\" && link[0: 4] != \"www.\" {\n\t\t\t\t\t\t\t\tlink = \"http://\" + link\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tqueryResult.Link = link\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif found {\n\t\t\t\t\tqueryResultArray = append(queryResultArray, queryResult)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn queryResultArray\n}",
"func (p *OAuthProxy) RobotsTxt(rw http.ResponseWriter, _ *http.Request) {\n\trw.WriteHeader(http.StatusOK)\n\tfmt.Fprintf(rw, \"User-agent: *\\nDisallow: /\")\n}",
"func Sanitize(text string) string {\n sanitized := rePhoto.ReplaceAllString(text, \"/photo\")\n sanitized = reRetweet.ReplaceAllString(sanitized, \"$1 \")\n sanitized = reMention.ReplaceAllString(sanitized, \"$1 $2\")\n sanitized = reLink.ReplaceAllString(sanitized, \"$1 $2$3\")\n\n sanitized = reEllipsis.ReplaceAllString(sanitized, \"$1 \")\n sanitized = reHyphen.ReplaceAllString(sanitized, \"$1 \")\n sanitized = reComma.ReplaceAllString(sanitized, \"$1$2 $3\")\n\n sanitized = strings.Replace(sanitized, \"&\", \"&\", -1)\n sanitized = strings.Replace(sanitized, \">\", \">\", -1)\n sanitized = strings.Replace(sanitized, \"<\", \"<\", -1)\n\n sanitized = strings.Replace(sanitized, \"#\", \"#\", -1)\n sanitized = strings.Replace(sanitized, \"#\", \" #\", -1)\n\n return sanitized\n}",
"func (*Parser) ParseRobots(r *http.Response) ([]url.URL, error) {\n\tvar urls []url.URL\n\n\tbytes, err := ioutil.ReadAll(r.Body)\n\tdefer r.Body.Close()\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tbody := string(bytes)\n\n\tfor _, l := range strings.Split(body, \"\\n\") {\n\t\texp := regexp.MustCompile(`^(Allow|Disallow):\\s?([^\\s]*)\\s*?$`)\n\n\t\tif matches := exp.FindAllStringSubmatch(l, -1); len(matches) > 0 {\n\n\t\t\t// Bundle all our failure conditions together\n\t\t\tswitch {\n\t\t\tcase len(matches) == 0:\n\t\t\tcase len(matches[0]) < 2:\n\t\t\tcase matches[0][1] == \"Disallow\":\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tpath := matches[0][2]\n\n\t\t\tu, err := makeURL(*r.Request.URL, path)\n\t\t\tif err != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\turls = append(urls, u)\n\t\t}\n\t}\n\n\treturn urls, nil\n}",
"func (c App) RobotsTxt() revel.Result {\n\n\ttxt := \"User-agent: *\\n\"\n\tif revel.Config.BoolDefault(\"site.live\", false) == false {\n\t\ttxt += \"Disallow: /\\n\"\n\t}\n\ttxt += \"\\n\"\n\n\treturn c.RenderText(txt)\n}",
"func (b *bot) nuke(m dggchat.Message, s *dggchat.Session) {\n\tif !isMod(m.Sender) || !strings.HasPrefix(m.Message, \"!nuke\") {\n\t\treturn\n\t}\n\n\tparts := strings.SplitN(m.Message, \" \", 2)\n\tif len(parts) <= 1 {\n\t\treturn\n\t}\n\n\tisRegexNuke := parts[0] == \"!nukeregex\"\n\tbadstr := parts[1]\n\tbadregexp, err := regexp.Compile(badstr) // TODO when is error not nil??\n\tif isRegexNuke && err != nil {\n\t\tb.sendMessageDedupe(\"regexp error\", s)\n\t\treturn\n\t}\n\n\t// find anyone saying badstr\n\t// TODO limit by time, not amout of messages...\n\tvictimNames := []string{}\n\t// the command itself will be last in the log and caught, exclude that one.\n\t// TODO: except if the command was issued via PM...\n\tfor _, m := range b.log[:len(b.log)-1] {\n\t\t// don't nuke mods.\n\t\tif isMod(m.Sender) {\n\t\t\tcontinue\n\t\t}\n\n\t\tvar isBad bool\n\t\tif isRegexNuke {\n\t\t\tisBad = badregexp.MatchString(m.Message)\n\t\t} else {\n\t\t\tisBad = strings.Contains(m.Message, badstr)\n\t\t}\n\n\t\tif isBad {\n\t\t\t// TODO dont collect duplicates...\n\t\t\t// collect names in case we want to revert nuke\n\t\t\tvictimNames = append(victimNames, m.Sender.Nick)\n\n\t\t\tlog.Printf(\"[##] Nuking '%s' because of message '%s' with nuke '%s'\\n\",\n\t\t\t\tm.Sender.Nick, m.Message, badstr)\n\n\t\t\t// TODO duration, -1 means server default\n\t\t\ts.SendMute(m.Sender.Nick, -1)\n\t\t}\n\t\t// TODO print/send summary?\n\t}\n\n\tif b.lastNukeVictims == nil {\n\t\tb.lastNukeVictims = []string{}\n\t}\n\t// combine array so we are able to undo all past nukes at once, if necessary\n\tb.lastNukeVictims = append(b.lastNukeVictims, victimNames...)\n}",
"func Song() string {\n\tfirstLine := \"This is\"\n\tlastLine := \"the house that Jack built.\"\n\tvar verses = make([]string, 0)\n\tfor v := 0; v <= len(lyrics); v++ {\n\t\tverse := Verse(firstLine, reverse(lyrics[:v]), lastLine)\n\t\tverses = append(verses, verse)\n\t}\n\treturn strings.Join(verses, \"\\n\\n\")\n}",
"func ExtractSpells(jstr []byte, verbose bool) []string {\n\tspells := make([]string, 0)\n\tflags := KCodeFlags{Spells: true, Blocks: false, Parts: false, Scene: false}\n\treturn Extract(&spells, jstr, flags, verbose)\n}",
"func clean_url(cand string) string {\n // TODO: url pattern should be refined\n r, _ := regexp.Compile(\"^((http[s]?|ftp)://)?(www\\\\.)?(?P<body>[a-z]+\\\\.[a-z]+)$\")\n if r.MatchString(cand) {\n r2 := r.FindAllStringSubmatch(cand, -1)\n return r2[0][len(r2[0]) - 1]\n }\n return \"\"\n}",
"func crawl(url string, domain string) []string {\n\tfmt.Println(url)\n\tif getDomain(url) == domain {\n\t\tsaveLink(url)\n\t}\n\tlist, err := Extract(url)\n\tif err != nil {\n\t\tlog.Print(err)\n\t}\n\treturn list\n}",
"func imgurScrape(url string, messageCh chan string) {\n\tdoc, err := goquery.NewDocument(url)\n\tif err != nil {\n\t\tlog.Print(err)\n\t}\n\tdoc.Find(\"div#content .album-description\").Each(func(i int, s *goquery.Selection) {\n\t\ttitle := strings.Trim(s.Find(\"h1\").Text(), \"\\n \")\n\t\tif title != \"Imgur\" {\n\t\t\tlog.Printf(\"Imgur: %s\\n\", title)\n\t\t\tmessageCh <- fmt.Sprintf(\"Imgur: %s\\n\", title)\n\t\t}\n\t})\n}",
"func ScrubAlbumTitle(original string) string {\n\tvar result string\n\tresult = normalizeParens(original)\n\tfor _, re := range AlbumTitleIgnoredPhrases {\n\t\tresult = re.ReplaceAllString(result, \"\")\n\t}\n\tresult = Scrub(result)\n\tresult = strings.TrimSpace(result)\n\treturn result\n}",
"func SpellURLs(baseURL string, input io.Reader) ([]*Spell, error) {\n\tlog.Init(\"dontlookatme\", true, false, ioutil.Discard)\n\n\tresp, err := ioutil.ReadAll(input)\n\tif err != nil {\n\t\treturn nil, status.Errorf(codes.FailedPrecondition, \"%s\", err)\n\t}\n\tvar spells []*Spell\n\tspansRE := regexp.MustCompile(`<span.*?<\\/span?>`)\n\tidRE := regexp.MustCompile(`href=\".*?\"`)\n\tshortRE := regexp.MustCompile(`<\\/a><\\/b>: .*?</span`)\n\tfor _, entry := range spansRE.FindAllString(string(resp), -1) {\n\t\tif !strings.Contains(entry, \"SpellDisplay\") || ! strings.Contains(entry,\"Family=None\") {\n\t\t\tcontinue\n\t\t}\n\n\t\tspellQuery := strings.TrimSuffix(strings.TrimPrefix(idRE.FindString(entry), `href=\"`), `\"`)\n\t\tshortDesc := strings.TrimSuffix(strings.TrimSuffix(strings.TrimPrefix(shortRE.FindString(entry), `</a></b>: `), \"</span\"), \"<br />\")\n\t\tspellName := strings.Split(strings.TrimPrefix(spellQuery, \"SpellDisplay.aspx?ItemName=\"), \"&\")[0]\n\n\t\turl := fmt.Sprintf(\"%s/%s\", baseURL, strings.Replace(spellQuery, \" \", \"%20\", -1))\n\t\tvar tags []string\n\t\tspellEntry := &Spell{\n\t\t\tURL: url,\n\t\t\tShortDesc: shortDesc,\n\t\t\tName: spellName,\n\t\t\tTags: tags,\n\t\t}\n\t\tspells = append(spells, spellEntry)\n\t}\n\treturn spells, nil\n}",
"func crawl(url string, ch chan string, chFinished chan bool) {\n\tresp, err := http.Get(url)\n\n\tdefer func() {\n\t// Notify that we're done after this function\n\tchFinished <- true\n\t}()\n\n\tif err != nil {\n\tfmt.Println(\"ERROR: Failed to crawl \\\"\" + url + \"\\\"\")\n\treturn\n\t}\n\n\tb := resp.Body\n\tdefer b.Close() // close Body when the function returns\n\n\tz := html.NewTokenizer(b)\n\n\tfor {\n\ttt := z.Next()\n\n\tswitch {\n\tcase tt == html.ErrorToken:\n\t\t// End of the document, we're done\n\t\treturn\n\tcase tt == html.StartTagToken:\n\t\tt := z.Token()\n\n\t\t// Check if the token is an <a> tag\n\t\tisAnchor := t.Data == \"a\"\n\t\tif !isAnchor {\t\tcontinue\n\t\t}\n\n\t\tvar url string\n\n\t\t// Extract the href value, if there is one\n\t\tfor _, a := range t.Attr {\n\t\tif a.Key == \"href\" {\n\t\t\turl = a.Val\n\t\t\tbreak\n\t\t}\n\t\t}\n\n\t\tif len(string(url)) > 8 {\n\t\t\tif string(url)[:7] == \"/detail\" {\n\t\t\t\tch <- strings.Replace(url, \"#akce\", \"\", -1)\n\t\t\t}\n\t\t}\n\t}\n\t}\n}",
"func trimAndRemoveDuplicates(urls []string) []string {\n\tresult := make([]string, 0)\n\tfor _, url := range urls {\n\t\tparts := strings.Split(url, \"#\")\n\t\tif !isDuplicate(result, parts[0]) {\n\t\t\tresult = append(result, parts[0])\n\t\t}\n\t}\n\treturn result\n}",
"func processImageResponses(result string) []messageQueryBody {\n\n\tsubsl := \"<div class=\\\"rg_meta notranslate\\\">\"\n\tlensubsl := len(subsl)\n\tsubsl2 := \"\\\"ou\\\":\\\"\"\n\tlensubsl2 := len(subsl2)\n\tsubsl3 := \"\\\"pt\\\":\"\n\tlensubsl3 := len(subsl3)\n\tsubsl4 := \"\\\"rh\\\":\"\n\tlensubsl4 := len(subsl4)\n\tcount := 0\n\n\tvar queryResult messageQueryBody\n\tvar queryResultArray []messageQueryBody\n\n\tfor i := 0; i < len(result) - len(subsl); i++ {\n\t\tlink := \"\"\n\t\tif result[i : i + lensubsl] == subsl {\n\t\t\tlength := i + lensubsl\n\t\t\tvar mid int\n\t\t\tfor j := 1; ; j++ {\n\t\t\t\tfound := false\n\t\t\t\tif result[length + j: length + j + lensubsl2] == subsl2 {\n\t\t\t\t\tmid = length + j + lensubsl2\n\t\t\t\t\tfor k := 1; ; k++ {\n\t\t\t\t\t\tif result[mid + k: mid + k + 1] == \"\\\"\" {\n\t\t\t\t\t\t\tlink = result[mid: mid + k]\n\t\t\t\t\t\t\tqueryResult.Link = link\n\t\t\t\t\t\t\tfound = true\n\t\t\t\t\t\t\ti = mid + k + 1;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tfor a := 1; ; a++ {\n\t\t\t\t\t\tif result[i + a: i + a + lensubsl3] == subsl3 {\n\t\t\t\t\t\t\tmid = i + a + lensubsl3 + 1\n\t\t\t\t\t\t\tfor k := 1; ; k++ {\n\t\t\t\t\t\t\t\tif result[mid + k: mid + k + 1] == \"\\\"\" {\n\t\t\t\t\t\t\t\t\tdesc := result[mid: mid + k]\n\t\t\t\t\t\t\t\t\tqueryResult.Desc = desc\n\t\t\t\t\t\t\t\t\tfound = true\n\t\t\t\t\t\t\t\t\ti = mid + k + 1;\n\t\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tfor a := 1; ; a++ {\n\t\t\t\t\t\tif result[i + a: i + a + lensubsl4] == subsl4 {\n\t\t\t\t\t\t\tmid = i + a + lensubsl4 + 1\n\t\t\t\t\t\t\tfor k := 1; ; k++ {\n\t\t\t\t\t\t\t\tif result[mid + k: mid + k + 1] == \"\\\"\" {\n\t\t\t\t\t\t\t\t\tdlink := result[mid: mid + k]\n\t\t\t\t\t\t\t\t\tqueryResult.DescLink = dlink\n\t\t\t\t\t\t\t\t\tfound = true\n\t\t\t\t\t\t\t\t\ti = mid + k + 1;\n\t\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif found {\n\t\t\t\t\tqueryResultArray = append(queryResultArray, queryResult)\n\t\t\t\t\tcount ++\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif count == 10 {\n\t\t\tbreak;\n\t\t}\n\t}\n\treturn queryResultArray\n\n}",
"func (h *CrawlHandler) Crawls() []string {\n\th.mu.RLock()\n\tcrawls := make([]string, len(h.crawls))\n\ti := 0\n\tfor url := range h.crawls {\n\t\tcrawls[i] = url\n\t\ti++\n\t}\n\th.mu.RUnlock()\n\n\treturn crawls\n}",
"func (dns *EdgeDNS) cleanResolvForHost() {\n\tbs, err := ioutil.ReadFile(hostResolv)\n\tif err != nil {\n\t\tklog.Warningf(\"read file %s err: %v\", hostResolv, err)\n\t}\n\n\tresolv := strings.Split(string(bs), \"\\n\")\n\tif resolv == nil {\n\t\treturn\n\t}\n\tnameserver := \"\"\n\tfor _, item := range resolv {\n\t\tif strings.Contains(item, dns.ListenIP.String()) || item == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tnameserver = nameserver + item + \"\\n\"\n\t}\n\tif err := ioutil.WriteFile(hostResolv, []byte(nameserver), 0600); err != nil {\n\t\tklog.Errorf(\"failed to write nameserver to file %s, err: %v\", hostResolv, err)\n\t}\n}",
"func removeProxiedRecords(s string) string {\n\treturn proxiedRecordsRe.ReplaceAllString(s, \"\")\n}",
"func (me *Eliza) analyse(userinput string) string {\n\t// Loop through the responses, looking for a match for the user input.\n\tfor _, response := range me.responses {\n\t\tif matches := response.question.FindStringSubmatch(userinput); matches != nil {\n\n\t\t\t// Select a random answer.\n\t\t\tanswer := response.answers[rand.Intn(len(response.answers))]\n\n\t\t\t// Fill the answer with the captured groups from the matches.\n\t\t\tfor i, match := range matches[1:] {\n\t\t\t\t// Reflect the pronouns in the captured group.\n\t\t\t\tfor _, sub := range me.substitutions {\n\t\t\t\t\tmatch = sub.original.ReplaceAllString(match, sub.substitute)\n\t\t\t\t\t// Remove any spaces at the start or end.\n\t\t\t\t\tmatch = strings.TrimSpace(match)\n\t\t\t\t}\n\t\t\t\t// Replace $1 with the first reflected captured group, $2 with the second, etc.\n\t\t\t\tanswer = strings.Replace(answer, \"$\"+strconv.Itoa(i+1), match, -1)\n\t\t\t}\n\n\t\t\t// Clear any ~~ markers from the string. They prevent future matches.\n\t\t\tanswer = strings.Replace(answer, \"~~\", \"\", -1)\n\n\t\t\t// Send the filled answer back.\n\t\t\treturn answer\n\t\t}\n\t}\n\n\treturn \"I don't know what to say.\"\n}",
"func (c Page) RobotsTxt() revel.Result {\n\n\ttxt := \"User-agent: *\\n\"\n\tif revel.Config.BoolDefault(\"site.live\", false) == false {\n\t\ttxt += \"Disallow: /\\n\"\n\t}\n\ttxt += \"\\n\"\n\n\treturn c.RenderText(txt)\n}",
"func processImageResponses(result string) []messageQueryBody {\n\n\tsubsl := \"<div class=\\\"rg_meta notranslate\\\">\"\n\tlensubsl := len(subsl)\n\tsubsl2 := \"\\\"ou\\\":\\\"\"\n\tlensubsl2 := len(subsl2)\n\tcount := 0\n\n\tvar queryResult messageQueryBody\n\tvar queryResultArray []messageQueryBody\n\n\tfor i := 0; i < len(result) - len(subsl); i++ {\n\t\tlink := \"\"\n\t\tif result[i : i + lensubsl] == subsl {\n\t\t\tlength := i + lensubsl\n\t\t\tvar mid int\n\t\t\tfor j := 1; ; j++ {\n\t\t\t\tfound := false\n\t\t\t\tif result[length + j: length + j + lensubsl2] == subsl2 {\n\t\t\t\t\tmid = length + j + lensubsl2\n\t\t\t\t\tfor k := 1; ; k++ {\n\t\t\t\t\t\tif result[mid + k: mid + k + 1] == \"\\\"\" {\n\t\t\t\t\t\t\tlink = result[mid: mid + k]\n\t\t\t\t\t\t\tqueryResult.Link = link\n\t\t\t\t\t\t\tfound = true\n\t\t\t\t\t\t\ti = mid + k + 1;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif found {\n\t\t\t\t\tqueryResultArray = append(queryResultArray, queryResult)\n\t\t\t\t\tcount ++\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif count == 10 {\n\t\t\tbreak;\n\t\t}\n\t}\n\treturn queryResultArray\n\n}",
"func (s *Spider) getRobotsTxt() error {\n\tresp, err := http.Get(\"http://\" + s.URL.Host)\n\tif err != nil {\n\t\treturn err\n\t}\n\trobots, err := robotstxt.FromResponse(resp)\n\tif err != nil {\n\t\treturn err\n\t}\n\ts.robots = robots.FindGroup(s.Config.RobotUserAgent)\n\treturn nil\n}",
"func FilterOutClans(clanList []string) []string {\n\ttofetch := SplitToChucks(clanList)\n\ttoReturn := []string{}\n\tfor _, ids := range tofetch {\n\t\trawOut, err := CallRoute(\"clanDiscription\", map[string]string{\"clanID\": strings.Join(ids, \"%2C\")}) // %2C = ,\n\t\tif err != nil {\n\t\t\tapiErr(\"FilterOutClans\", err, \"error check CallRoute\")\n\t\t\tcontinue\n\t\t}\n\t\tvar out ClanDiscription\n\t\tjson.Unmarshal([]byte(rawOut), &out)\n\t\tif out.Status != \"ok\" {\n\t\t\tapiErr(\"FilterOutClans\", errors.New(out.Error.Message), \"api status is not OK json.Unmarshal\")\n\t\t\tcontinue\n\t\t}\n\t\tfor clanID, clan := range out.Data {\n\t\t\tif IsDutch(clan.Description) {\n\t\t\t\ttoReturn = append(toReturn, clanID)\n\t\t\t\tother.DevPrint(\"found clan:\", clan.Tag)\n\t\t\t}\n\t\t}\n\t}\n\treturn toReturn\n}",
"func (g *Game) Romhacks(embeds string) (*GameCollection, *Error) {\n\treturn fetchGamesLink(firstLink(g, \"romhacks\"), nil, nil, embeds)\n}",
"func createCheckText(text string) string {\n\t// Always lower case the string so we don't need case-insensitive regex's\n\tnewText := strings.ToLower(text)\n\n\t// Space out anything that we shouldn't search...\n\n\t// URL's\n\tnewText = xurls.Strict.ReplaceAllStringFunc(newText, func(toRep string) string {\n\t\treturn strings.Repeat(\" \", len(toRep))\n\t})\n\n\treturn newText\n}",
"func (w *Wikiquote) UnmarshalJSON(data []byte) error {\n\t// copy the fields of Wikiquote but not the\n\t// methods so we don't recursively call UnmarshalJSON\n\ttype Alias Wikiquote\n\ta := &struct {\n\t\t*Alias\n\t}{\n\t\tAlias: (*Alias)(w),\n\t}\n\n\tif err := json.Unmarshal(data, &a); err != nil {\n\t\treturn err\n\t}\n\n\t// The following is wikitext.\n\t// == indicates a new section (Quotes, Song lyrics, etc...)\n\t// === indicates a new subsection (eg the albulm of the song lyrics)\n\t// \\n\\n* is a new quote\n\t// \\n** indicates a source\n\t// '''some text''' indicates bold font\n\t// Wikitext cheat sheet: https://en.wikipedia.org/wiki/Help:Cheatsheet\n\t// not sure how to capture song lyrics (albums start with \"===\")\n\n\t// change the equal signs so they get out of the way of regex\n\tw.Source = reQuadEq.ReplaceAllString(w.Source, \"<h3>$1</h3>\") // Subsubheading\n\tw.Source = reTripleEq.ReplaceAllString(w.Source, \"<h2>$1</h2>\") // Subheading\n\tw.Source = reDoubleEq.ReplaceAllString(w.Source, \"<h1>$1</h1>\") // Heading\n\tw.Source = reSingleEq.ReplaceAllString(w.Source, \"[equals]\")\n\tw.Source = reH2.ReplaceAllString(w.Source, \"===$1===\") // Change heading back so we can use [^=]\n\tw.Source = reH1.ReplaceAllString(w.Source, \"==$1==\") // Change heading back so we can use [^=]\n\n\tfor _, m := range h1.FindAllStringSubmatch(w.Source, -1) {\n\t\tif len(m) < 2 {\n\t\t\tcontinue\n\t\t}\n\t\tsection := strings.ToLower(strings.TrimSpace(m[1]))\n\t\tif section == \"quotes\" || section == \"sourced\" { // any more sections we want???\n\t\t\tfor _, q := range strings.Split(m[2], \"\\n\") {\n\t\t\t\tif strings.HasPrefix(q, \"* \") {\n\t\t\t\t\t// Remove wikitext formatting. I couldn't find a good\n\t\t\t\t\t// library to convert wikitext to html or some other format.\n\t\t\t\t\tq = strings.TrimPrefix(q, \"* \")\n\t\t\t\t\tq = strings.Replace(q, `'''`, \"\", -1)\n\t\t\t\t\tq = reRefTags.ReplaceAllString(q, \"\")\n\t\t\t\t\tq = reWikiLinks.ReplaceAllString(q, `$1$4$6`)\n\t\t\t\t\tq = reBraces.ReplaceAllString(q, \"\")\n\t\t\t\t\tq = reBrackets.ReplaceAllString(q, \"$1\")\n\t\t\t\t\tq = sanitizer.Sanitize(q) // run this AFTER we strip <ref>link</ref> section\n\n\t\t\t\t\tw.Quotes = append(w.Quotes, q)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}",
"func vtlCleanup(b string) (string, error) {\n\tswitch {\n\tcase strings.HasPrefix(b, \"=\"):\n\t\tfallthrough\n\tcase strings.Contains(b, \"Luna\"):\n\t\treturn \"\", nil\n\t}\n\treturn strings.ReplaceAll(b, \"#\", \"\"), nil\n}",
"func SmartSearch(query string) (res []Result) {\n\tch := make(chan Result, 4)\n\tgo func() { ch <- FirstOf(query, web, ag, grep, onion) }()\n\tgo func() { ch <- FirstOf(query, gps, glonass) }()\n\tgo func() { ch <- FirstOf(query, memes, album2k16) }()\n\tgo func() { ch <- FirstOf(query, cctv, yourtube) }()\n\n\ttimeout := time.After(78 * time.Millisecond)\n\tfor i := 0; i < 4; i++ {\n\t\tselect {\n\t\tcase value := <-ch:\n\t\t\tres = append(res, value)\n\t\tcase <-timeout:\n\t\t\treturn res\n\t\t}\n\t}\n\treturn\n}",
"func findPlaceholdersAndRemoveWordTags(content string) string {\n\tvar cleanRegex = regexp.MustCompile(\"<.*?>\")\n\n\tfor _, matchRegex := range cleanupRegexes {\n\t\tregex := regexp.MustCompile(matchRegex)\n\t\tmatches := regex.FindAllString(content, -1)\n\t\tfor _, match := range matches {\n\t\t\tcleanString := cleanRegex.ReplaceAllString(match, \"\")\n\t\t\tcontent = strings.ReplaceAll(content, match, cleanString)\n\t\t}\n\t}\n\treturn content\n}",
"func (ua geniusLyricsUserAgent) Open(url string) error {\n\t// Request HTML document\n\tres, err := http.Get(url)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer res.Body.Close()\n\n\tif res.StatusCode != 200 {\n\t\treturn fmt.Errorf(\"status code error: %d %s\", res.StatusCode, res.Status)\n\t}\n\n\t// Render HTML document\n\tdoc, err := goquery.NewDocumentFromReader(res.Body)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlyricsHTML, err := doc.Find(\"div.lyrics\").Html()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlyricsText, err := html2text.FromString(lyricsHTML)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Println(lyricsText)\n\treturn nil\n}",
"func ProxiesFromClarketm() []string {\n\tresp, err := http.Get(\"https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list.txt\")\n\tif err != nil {\n\t\treturn nil\n\t}\n\tdefer resp.Body.Close()\n\tbb, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn nil\n\t}\n\tstr := string(bb)\n\tproxies := ipPortRegex.FindAllString(str, -1)\n\treturn proxies\n}",
"func DeleteLighthouseReports() {\n\tfiles, readDirErr := ioutil.ReadDir(\".\")\n\tif readDirErr != nil {\n\t\tmg.Fatal(readDirExit, readDirErr)\n\t}\n\n\tr, regexErr := regexp.Compile(`^jameslucktaylor\\.info_.*\\.report\\.html$`)\n\tif regexErr != nil {\n\t\tmg.Fatal(regexCompileExit, regexErr)\n\t}\n\n\tfor _, file := range files {\n\t\tif r.MatchString(file.Name()) {\n\t\t\tfmt.Println(file.Name())\n\t\t\tsh.Rm(file.Name())\n\t\t}\n\t}\n}",
"func Reply(input string) string {\n\n //Passes user input into the preprocess function \n input = preprocess(input)\n\n for pattern, responses := range phrases {\n re := regexp.MustCompile(pattern)\n matches := re.FindStringSubmatch(input)\n\n if len(matches) > 0 {\n var fragment string\n if len(matches) > 1 {\n fragment = reflect(matches[1])\n }\n\n //this selects a random reponse based on the keywords \n output := randChoice(responses)\n \n //Puts input and output together so it appears smart \n if strings.Contains(output, \"%s\") {\n output = fmt.Sprintf(output, fragment)\n }\n return output\n }\n }\n\n //if no response is found it selects randomly from convoRestart\n return randChoice(convoRestart)\n}",
"func Clean(local string, mirror io.Reader, dst io.Writer) (err error) {\n\tvar (\n\t\tinserted bool\n\t)\n\n\tscanner := bufio.NewScanner(mirror)\n\tfor scanner.Scan() {\n\t\tv := scanner.Text()\n\t\treplacement := v\n\t\tif strings.HasPrefix(v, \"#Server = \") {\n\t\t\treplacement = strings.Replace(v, \"#Server = \", \"Server = \", 1)\n\t\t}\n\n\t\tif !inserted && strings.HasPrefix(v, \"Server = \") {\n\t\t\tinserted = true\n\t\t\treplacement = \"Server = http://\" + local + \"/$repo/os/$arch\\r\\n\" + replacement\n\t\t}\n\n\t\tif _, err = fmt.Fprintln(dst, replacement); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}",
"func (hsa HTTPStringAnalyzer) Analyze(s string) AnalysisResult {\n\tresult := regex.FindStringSubmatch(s)\n\n\tif result != nil && len(result) > 0 {\n\t\t// Since go doesn't support negative lookahead, we are manually excluding results\n\t\t// instead of putting them directly in the regex.\n\t\tfor _, excludedDomain := range excludedDomains {\n\t\t\tif strings.Contains(result[2], excludedDomain) {\n\t\t\t\treturn AnalysisResult{}\n\t\t\t}\n\t\t}\n\n\t\treturn AnalysisResult{Type: \"HTTP\", Value: result[0]}\n\t}\n\n\treturn AnalysisResult{}\n}",
"func getSongId(content string) string {\n\n\t// Stop interpreting playlist links as song links \n\tif(strings.Contains(content, \"playlist\")){\n\t\treturn \"\"\n\t}\n\tre := regexp.MustCompile(\"[a-zA-Z0-9]{22}\")\n\tsongId := re.FindString(content)\n\tfmt.Println(songId)\n\treturn songId\n\n}",
"func (k KMP) DiscoverDiscords(kDiscords int, exclusionZone int) ([]int, error) {\n\treturn nil, errors.New(\"Discords for KMP has not been implemented yet.\")\n}",
"func getURLsfromPage( url string ) ([]string, error) {\n\n\turlList := []string{}\n\tcontents, err := downloadContentsfromURL(url)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// regular.\n\tchecker := regexp.MustCompile(urlRegex)\n\tdoesMatch := checker.MatchString( contents)\n\tif doesMatch {\n\t\turlList = checker.FindAllString(contents,100)\n\t}\n\tuniqueList := uniqueStringList(urlList)\n\n\n return uniqueList, nil\n}",
"func main() {\n\tkeyword := \"Plastiblends\"\n\tsource.YahooFinance(\"https://in.finance.yahoo.com\", keyword)\n\t// source.EconomicTimes(\"https://economictimes.indiatimes.com\", keyword)\n\t// // source.CNBC(\"https://www.cnbctv18.com\", keyword, `\\s*(?i)https://www[.]cnbctv18[.]com(\\\"([^\"]*\\\")|'[^']*'|([^'\">\\s]+))`)\n\t// source.MoneyControl(\"https://www.moneycontrol.com\", keyword, `\\s*(?i)https://www[.]moneycontrol[.]com(\\\"([^\"]*\\\")|'[^']*'|([^'\">\\s]+))`)\n\t// source.Investing(\"https://in.investing.com\", keyword)\n}",
"func main() {\n\n\tlog.SetOutput(os.Stdout)\n\n\ttoCrawl, _ := url.Parse(\"http://www.monzo.com\")\n\tvar filter crawler.Restriction = func(url *url.URL) bool {\n\t\treturn url.Host == toCrawl.Host\n\t}\n\tvar op1 crawler.Operation = func(in *url.URL) *url.URL {\n\t\tif in != nil {\n\t\t\thashIndex := strings.Index(in.String(), \"#\")\n\t\t\tif hashIndex > 0 {\n\t\t\t\tout, err := url.Parse(in.String()[:hashIndex])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn in\n\t\t\t\t}\n\t\t\t\treturn out\n\t\t\t}\n\t\t}\n\t\treturn in\n\t}\n\n\twg := sync.WaitGroup{}\n\twg.Add(1)\n\tout := make(chan model.CrawlerOutput, 100)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tfor each := range out {\n\t\t\tconsumer.CreateSiteMap(path, each.URL, each.PageLinks, each.ResponseBody)\n\t\t}\n\t}()\n\tdone := make(chan struct{})\n\n\tc := crawler.NewCrawler(nil, crawler.Setting{\n\t\tRestrictions: []crawler.Restriction{filter},\n\t\tOperation: op1,\n\t\tWaitTimes: 100 * time.Millisecond,\n\t\tWorkers: 10,\n\t\tGetResponseBody: true,\n\t})\n\tgo c.Crawl(toCrawl, out, done)\n\n\tselect {\n\tcase <-time.After(10 * time.Second):\n\t\tdone <- struct{}{}\n\t}\n\twg.Wait()\n}",
"func GetUrls(rootUrl string, body string) []string {\n\tbody = strings.ToLower(body)\n\turls := make([]string, 1)\n\n\tvar idx, idx2 int = 0, 0\n\tfor {\n\t\t// fmt.Println(idx, body[idx:idx+5])\n\t\tif idx = strings.Index(body, \"href\"); idx == -1 {\n\t\t\tbreak\n\t\t}\n\n\t\tbody = body[idx+len(\"href\"):]\n\t\tbody = CutFirstWhiteSpaces(body)\n\t\tif body != \"\" && body[0] != '='{\n\t\t\tcontinue\n\t\t}\n\t\tbody = body[1:]\n\t\tbody = CutFirstWhiteSpaces(body)\n\t\n\n\t\tif body == \"\" {\n\t\t\tbreak\n\t\t}\n\n\t\t// full url between quotes, or relative url\n\t\tif body[0] == '\"' {\n\t\t\tbody = body[1:]\n\t\t\tif idx2 = strings.IndexByte(body, '\"'); idx == -1 {\n\t\t\t\tbreak\n\t\t\t}\n\t\t} else {\n\t\t\t// should have idx2 at *any* first whitespace or >\n\t\t\tidx2 = int(math.Min(float64(strings.IndexByte(body, ' ')), \n\t\t\t\t\t\t\tfloat64(strings.IndexByte(body,'>'))))\n\t\t}\n\n\t\tu := cleanUrl(rootUrl, body[:idx2])\n\t\turls = append(urls, u)\n\t}\n\treturn urls\n}",
"func Id3FixSong(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {\n\n\tquery := r.URL.Query()\n\tsongUrl := query.Get(\"url\")\n\ttitle := query.Get(\"title\")\n\tartist := query.Get(\"artist\")\n\n\t// Check the response header to make sure the file is actually an audio file\n\tresp, err := http.Head(songUrl)\n\tif err := checkResponse(resp, err); err != nil {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Ensure the file size is not larger than 20 MB\n\tif resp.ContentLength > 0x1400000 {\n\t\twriteJsonError(w, http.StatusBadRequest, errors.New(\"File too large\"))\n\t\treturn\n\t}\n\n\t// Get the file\n\tresp, err = http.Get(songUrl)\n\tif err = checkResponse(resp, err); err != nil {\n\t\twriteJsonError(w, http.StatusBadRequest, err)\n\t\treturn\n\t}\n\n\tdefer resp.Body.Close()\n\t// Make sure we were given an audio file by checking the content type\n\tif match, _ := regexp.MatchString(`audio\\.+`, resp.Header.Get(\"Content-Type\")); match {\n\t\twriteJsonError(w, http.StatusBadRequest, errors.New(\"Not an audio file\"))\n\t\treturn\n\t}\n\n\tbody, err := ioutil.ReadAll(resp.Body)\n\n\tresponse, err := fixSong(artist, title, body)\n\tif err != nil {\n\t\twriteJsonError(w, http.StatusInternalServerError, err)\n\t\treturn\n\t}\n\n\tdefer response.File.Close()\n\n\tescapedName := quoteEscaper.Replace(response.Name)\n\tw.Header().Set(\"Content-Type\", \"audio/mpeg\")\n\tw.Header().Set(\"Content-Disposition\", fmt.Sprintf(\"attachment; filename=\\\"%s\\\";\", escapedName))\n\tio.Copy(w, response.File)\n}",
"func (c *Crawler) cleanUpResults() {\n\tfor path, page := range c.SiteMap {\n\t\tvar errChecked links\n\t\tfor _, link := range page.Links {\n\t\t\texists, ok := c.pagesWithErr[link.URL.Path]\n\t\t\tif !ok || !exists {\n\t\t\t\terrChecked = append(errChecked, link)\n\t\t\t}\n\t\t}\n\t\tc.SiteMap[path].Links = errChecked\n\t}\n}",
"func cleanLines(text string) string {\n\tlines := strings.Split(text, \"\\n\")\n\tfor i, l := range lines {\n\t\tlines[i] = strings.TrimSpace(l)\n\t}\n\n\treturn strings.Join(lines, \" \")\n}",
"func ParseResults(query string) []Recipe {\r\n\t// Create request string and get search page\r\n\tvar joinedQuery bytes.Buffer\r\n\tjoinedQuery.WriteString(\"https://www.allrecipes.com/search/results/?wt=\")\r\n\tjoinedQuery.WriteString(query)\r\n\troot := ParseHTML(joinedQuery.String())\r\n\t// Match recipe cards\r\n\tmatchCards := func(n *html.Node) bool {\r\n\t\tif n.DataAtom == atom.A && n.Parent != nil {\r\n\t\t\treturn scrape.Attr(n.Parent, \"class\") == \"fixed-recipe-card__h3\"\r\n\t\t}\r\n\t\treturn false\r\n\t}\r\n\r\n\tvar recipes []Recipe\r\n\t// For each recipe card, make a request for the recipe page\r\n\t// and then collect information into Recipe\r\n\trcps := scrape.FindAll(root, matchCards)\r\n\thm := make(chan bool)\r\n\tfor _, rcp := range rcps {\r\n\t\thref := scrape.Attr(rcp, \"href\")\r\n\t\tgo ParsePage(&recipes, href, hm)\r\n\t}\r\n\tfor i := 0; i < len(rcps); {\r\n\t\tselect {\r\n\t\tcase <-hm:\r\n\t\t\ti++\r\n\t\t}\r\n\t}\r\n\treturn recipes\r\n}",
"func extractProxiedRecords(s string) string {\n\tproxiedOnlyRecords := proxiedRecordsRe.FindAllString(s, -1)\n\treturn strings.Join(proxiedOnlyRecords, \"\\n\")\n}",
"func urlKeywords(URL string) string {\n\tu, err := url.Parse(URL)\n\tif err != nil {\n\t\treturn \"\"\n\t}\n\th := u.Host\n\tfor _, s := range urlKillWords {\n\t\th = strings.Replace(h, s, \"\", -1)\n\t}\n\treturn h\n}",
"func LiveScrape(name string, local string, t bool, cors bool) (List string) {\n\t// Request the HTML page.\n\tvar (\n\t\terr error\n\t\turl string\n\t)\n\tList = \"#EXTM3U\\n\"\n\turl = strings.Join([]string{\"https://members.sexcamvideos.net\", name}, \"/\")\n\tif name == \"index\" {\n\t\turl = \"https://members.sexcamvideos.net\"\n\t}\n\tif t {\n\t\turl = strings.Join([]string{\"https://members.sexcamvideos.net\", \"tag\", name}, \"/\")\n\t}\n\tif cors {\n\t\turl = strings.Join([]string{\"https://cors.zme.ink\", url}, \"/\")\n\t}\n\t// fmt.Println(url)\n\tres, err := http.Get(url)\n\tif err != nil {\n\t\treturn List\n\t}\n\tdefer res.Body.Close()\n\tif res.StatusCode != 200 {\n\t\t// log.Fatalf(\"status code error: %d %s\", res.StatusCode, res.Status)\n\t\treturn List\n\t}\n\n\t// Load the HTML document\n\tdoc, err := goquery.NewDocumentFromReader(res.Body)\n\tif err != nil {\n\t\treturn List\n\t}\n\troot := doc.Find(\"body ul#room_list > li.room_list_room\")\n\timgurl := \"https://roomimg.stream.highwebmedia.com/ri\"\n\troot.Each(func(index int, ele *goquery.Selection) {\n\t\ttitle := ele.Find(\"div.details > div.title > a\").Text()\n\t\ttitle = strings.Replace(title, \" \", \"\", -1)\n\t\tt := time.Now().Unix()\n\t\ttt := strconv.FormatInt(t, 10)\n\t\tcc := strings.Join([]string{imgurl, title}, \"/\")\n\t\tcc = strings.Join([]string{cc, \"jpg?\"}, \".\")\n\t\tcover := strings.Join([]string{cc, tt}, \"\")\n\t\tfirst := `#EXTINF:-1 tvg-id=\"\" tvg-name=\"`\n\t\tcv := `\" tvg-language=\"English\" tvg-logo=\"`\n\t\tafter := `\" group-title=\"livecam\",`\n\t\tr := \"\\n\"\n\t\tm3u8 := strings.Join([]string{local, \"livecam\", title, \"playlist.m3u8\"}, \"/\")\n\t\tm3u8 = strings.Join([]string{m3u8, \"\\n\"}, \"\")\n\t\tvar str []string = []string{first, title, cv, cover, after, title, r, m3u8}\n\t\tList += strings.Join(str, \"\")\n\t})\n\treturn List\n}",
"func process(job *Job) (Result, error) {\n\tvar result Result\n\tresult.Host = job.URL\n\tresult.Matches = make([]Match, 0)\n\tvar err error\n\n\tvar cookies []*http.Cookie\n\tvar cookiesMap = make(map[string]string)\n\tvar body []byte\n\tvar headers http.Header\n\n\t// get response from host if allowed\n\tif job.forceNotDownload {\n\t\tbody = job.Body\n\t\theaders = job.Headers\n\t\tcookies = job.Cookies\n\t} else {\n\t\tresp, err := fetchHost(job.URL)\n\t\tif err != nil {\n\t\t\tresult.Error = fmt.Sprintf(\"%s\", err)\n\t\t\treturn result, fmt.Errorf(\"Failed to retrieve\")\n\t\t}\n\n\t\tdefer resp.Body.Close()\n\n\t\tbody, err = ioutil.ReadAll(resp.Body)\n\t\tif err == nil {\n\t\t\theaders = resp.Header\n\t\t\tcookies = resp.Cookies()\n\t\t}\n\t}\n\n\tfor _, c := range cookies {\n\t\tcookiesMap[c.Name] = c.Value\n\t}\n\n\tdoc, err := goquery.NewDocumentFromReader(bytes.NewReader(body))\n\tif err != nil {\n\t\tresult.Error = fmt.Sprintf(\"%s\", err)\n\t\treturn result, err\n\t}\n\n\t// handle crawling\n\tif job.Crawl > 0 {\n\t\tbase, _ := url.Parse(job.URL)\n\n\t\tfor c, link := range parseLinks(doc, base) {\n\t\t\tif c >= job.Crawl {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\twa.schedule(NewOnlineJob(link, \"\", nil, 0))\n\t\t}\n\t\twa.wgJobs.Done()\n\t}\n\n\tvar title, keywords, descripiton string\n\t//查找网页标题、关键词、描述\n\ttitle = doc.Find(\"title\").First().Text()\n\n\tdoc.Find(\"meta\").Each(func(i int, s *goquery.Selection) {\n\t\tname := s.AttrOr(\"name\", \"\")\n\t\tif name == \"keywords\" {\n\t\t\tkeywords = s.AttrOr(\"content\", \"\")\n\t\t} else if name == \"description\" {\n\t\t\tdescripiton = s.AttrOr(\"content\", \"\")\n\t\t}\n\t})\n\n\tresult.WebTitle = title\n\tresult.WebKeywords = keywords\n\tresult.WebDesc = descripiton\n\n\tfor appname, app := range AppDefs.Apps {\n\t\t// TODO: Reduce complexity in this for-loop by functionalising out\n\t\t// the sub-loops and checks.\n\n\t\tfindings := Match{\n\t\t\tApp: app,\n\t\t\tAppName: appname,\n\t\t\tMatches: make([][]string, 0),\n\t\t}\n\n\t\t// check raw html\n\t\tif m, v := findMatches(string(body), app.HTMLRegex); len(m) > 0 {\n\t\t\tfindings.Matches = append(findings.Matches, m...)\n\t\t\tfindings.updateVersion(v)\n\t\t}\n\n\t\t// check response header\n\t\theaderFindings, version := app.FindInHeaders(headers)\n\t\tfindings.Matches = append(findings.Matches, headerFindings...)\n\t\tfindings.updateVersion(version)\n\n\t\t// check url\n\t\tif m, v := findMatches(job.URL, app.URLRegex); len(m) > 0 {\n\t\t\tfindings.Matches = append(findings.Matches, m...)\n\t\t\tfindings.updateVersion(v)\n\t\t}\n\n\t\t// check script tags\n\t\tdoc.Find(\"script\").Each(func(i int, s *goquery.Selection) {\n\t\t\tif script, exists := s.Attr(\"src\"); exists {\n\t\t\t\tif m, v := findMatches(script, app.ScriptRegex); len(m) > 0 {\n\t\t\t\t\tfindings.Matches = append(findings.Matches, m...)\n\t\t\t\t\tfindings.updateVersion(v)\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\n\t\t// check meta tags\n\t\tfor _, h := range app.MetaRegex {\n\t\t\tselector := fmt.Sprintf(\"meta[name='%s']\", h.Name)\n\t\t\tdoc.Find(selector).Each(func(i int, s *goquery.Selection) {\n\t\t\t\tcontent, _ := s.Attr(\"content\")\n\t\t\t\tif m, v := findMatches(content, []AppRegexp{h}); len(m) > 0 {\n\t\t\t\t\tfindings.Matches = append(findings.Matches, m...)\n\t\t\t\t\tfindings.updateVersion(v)\n\t\t\t\t}\n\t\t\t})\n\t\t}\n\n\t\t// check cookies\n\t\tfor _, c := range app.CookieRegex {\n\t\t\tif _, ok := cookiesMap[c.Name]; ok {\n\n\t\t\t\t// if there is a regexp set, ensure it matches.\n\t\t\t\t// otherwise just add this as a match\n\t\t\t\tif c.Regexp != nil {\n\n\t\t\t\t\t// only match single AppRegexp on this specific cookie\n\t\t\t\t\tif m, v := findMatches(cookiesMap[c.Name], []AppRegexp{c}); len(m) > 0 {\n\t\t\t\t\t\tfindings.Matches = append(findings.Matches, m...)\n\t\t\t\t\t\tfindings.updateVersion(v)\n\t\t\t\t\t}\n\n\t\t\t\t} else {\n\t\t\t\t\tfindings.Matches = append(findings.Matches, []string{c.Name})\n\t\t\t\t}\n\t\t\t}\n\n\t\t}\n\n\t\tif len(findings.Matches) > 0 {\n\t\t\tresult.Matches = append(result.Matches, findings)\n\n\t\t\t// handle implies\n\t\t\tfor _, implies := range app.Implies {\n\t\t\t\tfor implyAppname, implyApp := range AppDefs.Apps {\n\t\t\t\t\tif implies != implyAppname {\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\n\t\t\t\t\tf2 := Match{\n\t\t\t\t\t\tApp: implyApp,\n\t\t\t\t\t\tAppName: implyAppname,\n\t\t\t\t\t\tMatches: make([][]string, 0),\n\t\t\t\t\t}\n\t\t\t\t\tresult.Matches = append(result.Matches, f2)\n\t\t\t\t}\n\n\t\t\t}\n\t\t}\n\t}\n\n\treturn result, nil\n}",
"func youtubeScrape(url string, messageCh chan string) {\n\tdoc, err := goquery.NewDocument(url)\n\tif err != nil {\n\t\tlog.Print(err)\n\t}\n\tdoc.Find(\"#eow-title\").Each(func(i int, s *goquery.Selection) {\n\t\tlog.Printf(\"Youtube: %s\\n\", strings.Trim(s.Text(), \"\\n \"))\n\t\tmessageCh <- fmt.Sprintf(\"YouTube: %s\\n\", strings.Trim(s.Text(), \"\\n \"))\n\t})\n}",
"func returnRobotsTxt(w http.ResponseWriter, r *http.Request) {\n\thttp.ServeFile(w, r, robots_txt)\n}",
"func cleansePrefixes(ss []string) []string {\n\n\tret := []string{}\n\tfor _, s := range ss {\n\t\tstripped := \"\"\n\t\tfor i := len(ss) - 1; i > -1; i-- { // reversely\n\t\t\tpref := ss[i]\n\t\t\tif s != pref && strings.HasPrefix(s, pref) {\n\n\t\t\t\tstripped = strings.TrimPrefix(s, pref)\n\n\t\t\t\tstripped = strings.TrimSpace(stripped)\n\t\t\t\tstripped = strings.TrimPrefix(stripped, \"-- \")\n\t\t\t\tstripped = strings.TrimSuffix(stripped, \" --\")\n\n\t\t\t\t// log.Printf(\"stripped off\\n\\t%q \\n\\t%q \\n\\t%q\", s, pref, stripped)\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif stripped == \"\" {\n\t\t\tret = append(ret, s)\n\t\t} else {\n\t\t\tret = append(ret, stripped)\n\t\t}\n\t}\n\n\treturn ret\n\n}",
"func cleanNick(nick string) string {\n\treturn cleanNickRE.ReplaceAllString(nick, \"\")\n}",
"func loadUrlsFromFile(includefile *string) []string {\n\tfile, err := os.Open(*includefile)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tdefer file.Close()\n\tvar u []string\n\tscanner := bufio.NewScanner(file)\n\tfor scanner.Scan() {\n\t\tif !strings.HasPrefix(scanner.Text(), \"!\") {\n\t\t\tu = append(u, scanner.Text())\n\t\t}\n\t}\n\tif err := scanner.Err(); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\treturn u\n}",
"func cleanupDomains() {\n\t// Look for our files in the resolver directory\n\tfmt.Println(\"Cleaning up\")\n\tfiles, err := ioutil.ReadDir(targetDir)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor _, f := range files {\n\t\tif f.IsDir() == false {\n\t\t\tcontent, err := ioutil.ReadFile(targetDir + f.Name())\n\t\t\tif err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\t\t\t// Check if it's one of ours\n\t\t\tif strings.HasPrefix(string(content), fileSig) {\n\t\t\t\tfmt.Printf(\"Removing file: (%s)\\n\", targetDir+f.Name())\n\t\t\t\terr := os.Remove(targetDir + f.Name())\n\t\t\t\tif err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tfmt.Printf(\"Skipping file: (%s)\", f.Name())\n\t\t\t}\n\t\t}\n\t}\n}",
"func clean(s *goquery.Selection, tag string) {\n\tisEmbed := tag == \"object\" || tag == \"embed\" || tag == \"iframe\"\n\n\ts.Find(tag).Each(func(i int, target *goquery.Selection) {\n\t\tattributeValues := \"\"\n\t\tfor _, attribute := range target.Nodes[0].Attr {\n\t\t\tattributeValues += \" \" + attribute.Val\n\t\t}\n\n\t\tif isEmbed && rxVideos.MatchString(attributeValues) {\n\t\t\treturn\n\t\t}\n\n\t\tif isEmbed && rxVideos.MatchString(target.Text()) {\n\t\t\treturn\n\t\t}\n\n\t\ttarget.Remove()\n\t})\n}",
"func (t *Ticket) Eliminate(words []string) []string {\n\tresults := []string{}\n\tfor _, word := range words {\n\t\tswitch word {\n\t\tcase \"Reserve\":\n\t\t\tresults = append(results, word)\n\t\tcase \"Field\":\n\t\t\tresults = append(results, word)\n\t\tcase \"Box\":\n\t\t\tresults = append(results, word)\n\t\tcase \"Top\":\n\t\t\tresults = append(results, word)\n\t\tcase \"Deck\":\n\t\t\tresults = append(results, word)\n\t\tcase \"Loge\":\n\t\t\tresults = append(results, word)\n\t\tcase \"Right\":\n\t\t\tresults = append(results, word)\n\t\tcase \"Left\":\n\t\t\tresults = append(results, word)\n\t\tcase \"Pavilion\":\n\t\t\tresults = append(results, word)\n\t\tcase \"Dugout\":\n\t\t\tresults = append(results, word)\n\t\tcase \"Club\":\n\t\t\tresults = append(results, word)\n\t\tcase \"Baseline\":\n\t\t\tresults = append(results, word)\n\t\t}\n\t}\n\n\treturn results\n}",
"func (ChannelStrips) AvalonVT737TubeChannelStrip(){}",
"func grepLines(data string, like string) []string {\n\tvar result = []string{}\n\tif like == \"\" {\n\t\tlog.Printf(\"ERROR: unable to look for empty pattern\")\n\t\treturn result\n\t}\n\tlike_bytes := []byte(like)\n\n\tscanner := bufio.NewScanner(strings.NewReader(data))\n\tfor scanner.Scan() {\n\t\tif bytes.Contains(scanner.Bytes(), like_bytes) {\n\t\t\tresult = append(result, scanner.Text())\n\t\t}\n\t}\n\tif err := scanner.Err(); err != nil {\n\t\tlog.Printf(\"WARN: error scanning string for %s: %s\", like, err)\n\t}\n\n\treturn result\n}",
"func DisplayNotesBySearch(search string) {\n\tnotes := jot.GetNotes()\n\tvar filtered jot.Notes\n\tkeywords := strings.Split(search, \" \")\n\n\t// First find notes with the keywords in the title\n\tfor i := 0; i < len(notes.Notes); i++ {\n\t\tfor j, found := 0, false; j < len(keywords) && !found; j++ {\n\t\t\tif strings.Contains(strings.ToLower(notes.Notes[i].Title), strings.ToLower(keywords[j])) {\n\t\t\t\tfiltered.Notes = append(filtered.Notes, notes.Notes[i])\n\t\t\t\tfound = true\n\t\t\t}\n\t\t}\n\t}\n\tdisplayNotes(filtered)\n}",
"func init() {\n\tfmt.Println(\"Reading stocks\")\n\tcapAlphabets := []string{\"A\", \"B\", \"C\", \"D\", \"E\", \"E\", \"F\", \"G\", \"H\", \"I\", \"J\", \"K\", \"L\", \"M\", \"N\", \"O\", \"P\", \"Q\", \"R\", \"S\", \"T\", \"U\", \"V\", \"W\", \"X\", \"Y\", \"Z\"}\n\tfor _, char := range capAlphabets {\n\t\tdoc, err := getStockQuote(sourceURL + char)\n\t\tif err != nil {\n\t\t\tlog.Panic(\"Error in fetching stock URLs \", err.Error())\n\t\t}\n\t\tdoc.Find(\".bl_12\").Each(func(i int, s *goquery.Selection) {\n\t\t\tlink, _ := s.Attr(\"href\")\n\t\t\tstockName := s.Text()\n\t\t\tif match, _ := regexp.MatchString(`^(http:\\/\\/www\\.|https:\\/\\/www\\.|http:\\/\\/|https:\\/\\/)?[a-z0-9]+([\\-\\.]{1}[a-z0-9]+)*\\.[a-z]{2,5}(:[0-9]{1,5})?(\\/.*)?$`, link); match {\n\t\t\t\tstockURLSplit := strings.Split(link, \"/\")\n\t\t\t\tstocksURL[strings.ToLower(stockName)] = stockURLValue{stockURLSplit[5], stockURLSplit[6], stockURLSplit[7]}\n\t\t\t}\n\t\t})\n\t}\n\tfmt.Println(\"Stocks Read Succesfull\")\n}",
"func init() {\n\n\t// whosonfirst-data-venue-us-ca-1533149830.tar.bz2\n\t// whosonfirst-data-venue-us-ny-latest.db.bz2\n\n\tre_distname = regexp.MustCompile(`([a-z\\-0-9]+)\\-(\\d+|latest)\\.(.*)$`)\n\n\t// this needs to be moved in to go-whosonfirst-dist\n\n\tre_disttype = regexp.MustCompile(`x\\-urn\\:([^\\:]+)\\:([^\\:]+)\\:([^\\#]+)(?:\\#(.*))?`)\n}",
"func ProxiesFromTheSpeedX() []string {\n\tresp, err := http.Get(\"https://github.com/TheSpeedX/PROXY-List/blob/master/http.txt\")\n\tif err != nil {\n\t\treturn nil\n\t}\n\tdefer resp.Body.Close()\n\tbb, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn nil\n\t}\n\tstr := string(bb)\n\tproxies := ipPortRegex.FindAllString(str, -1)\n\treturn proxies\n}",
"func (f *fetcher) fetchRobotsTxt(u *url.URL) (*robotstxt.RobotsData, error) {\n\t// check if given url is present in cache\n\tuString := u.String()\n\tr, err := f.robotsTxtCache.GetIFPresent(uString)\n\tif err == nil {\n\t\t// item is present in cache\n\t\treturn r.(*robotstxt.RobotsData), nil\n\t}\n\n\tresp, err := f.httpClient.Get(uString)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\tlog.Printf(\"INFO: Requested %s, statusCode=%d\", uString, resp.StatusCode)\n\n\trobots, err := robotstxt.FromResponse(resp)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// set parsed robots.txt to cache\n\tf.robotsTxtCache.Set(uString, robots)\n\n\treturn robots, nil\n}",
"func (c CustomScraper) Scrape(_ *http.Request, closer io.ReadCloser) ([]*url.URL, error) {\n\tdefer closer.Close()\n\n\tvar links []*url.URL\n\n\tz := html.NewTokenizer(closer)\n\n\tfor {\n\t\ttt := z.Next()\n\n\t\tif tt == html.ErrorToken {\n\t\t\treturn links, nil\n\t\t}\n\n\t\tif tt == html.TextToken {\n\t\t\ttoken := z.Token()\n\n\t\t\tfmt.Println(strings.TrimSpace(token.Data))\n\t\t}\n\t}\n}",
"func clean(podcastCount int, cleanc <-chan *cleaningWhitelist) (int, error) {\n\tkeepers := make(map[string]struct{})\n\n\tfor i := 0; i < podcastCount; i++ {\n\t\twl := <-cleanc\n\t\tfor _, p := range wl.paths {\n\t\t\tkeepers[p] = struct{}{}\n\t\t}\n\t\tdefer close(wl.cleanFinishedC)\n\t}\n\n\tvar rmCount int\n\n\tcleanSubdir := func(subd string) error {\n\t\tdirContents, err := ioutil.ReadDir(subd)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfor _, info := range dirContents {\n\t\t\tpath := filepath.Join(subd, info.Name())\n\t\t\tif _, found := keepers[path]; found {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := os.Remove(path); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\trmCount++\n\t\t}\n\t\treturn nil\n\t}\n\n\tfor _, subd := range []string{dataSubdirEpisodes, dataSubdirMetadata} {\n\t\tif err := cleanSubdir(subd); err != nil {\n\t\t\treturn rmCount, err\n\t\t}\n\t}\n\treturn rmCount, nil\n}",
"func Anime(originalTerm string, maxLength int) []*arn.Anime {\n\tterm := strings.ToLower(stringutils.RemoveSpecialCharacters(originalTerm))\n\tresults := make([]*Result, 0, maxLength)\n\n\tcheck := func(text string) float64 {\n\t\tif text == \"\" {\n\t\t\treturn 0\n\t\t}\n\n\t\treturn stringutils.AdvancedStringSimilarity(term, strings.ToLower(stringutils.RemoveSpecialCharacters(text)))\n\t}\n\n\tadd := func(anime *arn.Anime, similarity float64) {\n\t\tsimilarity += float64(anime.Popularity.Total()) * popularityDamping\n\n\t\tif anime.Type != \"tv\" && anime.Type != \"movie\" {\n\t\t\tsimilarity -= 0.3\n\t\t}\n\n\t\tresults = append(results, &Result{\n\t\t\tobj: anime,\n\t\t\tsimilarity: similarity,\n\t\t})\n\t}\n\n\tfor anime := range arn.StreamAnime() {\n\t\tif anime.IsDraft {\n\t\t\tcontinue\n\t\t}\n\n\t\tif anime.ID == originalTerm {\n\t\t\treturn []*arn.Anime{anime}\n\t\t}\n\n\t\t// Canonical title\n\t\tsimilarity := check(anime.Title.Canonical)\n\n\t\tif similarity >= MinStringSimilarity {\n\t\t\tadd(anime, similarity)\n\t\t\tcontinue\n\t\t}\n\n\t\t// English\n\t\tsimilarity = check(anime.Title.English)\n\n\t\tif similarity >= MinStringSimilarity {\n\t\t\tadd(anime, similarity)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Romaji\n\t\tsimilarity = check(anime.Title.Romaji)\n\n\t\tif similarity >= MinStringSimilarity {\n\t\t\tadd(anime, similarity)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Synonyms\n\t\tfor _, synonym := range anime.Title.Synonyms {\n\t\t\tsimilarity := check(synonym)\n\n\t\t\tif similarity >= MinStringSimilarity {\n\t\t\t\tadd(anime, similarity)\n\t\t\t\tgoto nextAnime\n\t\t\t}\n\t\t}\n\n\t\t// Japanese\n\t\tsimilarity = check(anime.Title.Japanese)\n\n\t\tif similarity >= MinStringSimilarity {\n\t\t\tadd(anime, similarity)\n\t\t\tcontinue\n\t\t}\n\n\tnextAnime:\n\t}\n\n\t// Sort\n\tsort.Slice(results, func(i, j int) bool {\n\t\treturn results[i].similarity > results[j].similarity\n\t})\n\n\t// Limit\n\tif len(results) >= maxLength {\n\t\tresults = results[:maxLength]\n\t}\n\n\t// Final list\n\tfinal := make([]*arn.Anime, len(results))\n\n\tfor i, result := range results {\n\t\tfinal[i] = result.obj.(*arn.Anime)\n\t}\n\n\treturn final\n}",
"func (parser *TwitterParser) HandleText(message *core.Message, bot core.IBot) error {\n\tr := regexp.MustCompile(`https://(?i:twitter|x)\\.com\\S+/(\\d+)\\S*`)\n\tmatch := r.FindAllStringSubmatch(message.Text, -1)\n\n\tif len(match) > 0 {\n\t\tparser.l.Infof(\"Processing %s\", match[0][0])\n\t} else {\n\t\treturn fmt.Errorf(\"not implemented\")\n\t}\n\n\tfor _, m := range match {\n\t\terr := parser.tweetHandler.Process(m[1], message, bot)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}",
"func (self *nntpConnection) scrapeServer(daemon NNTPDaemon, conn *textproto.Conn) (err error) {\n log.Println(self.name, \"scrape remote server\")\n success := true\n if success {\n // send newsgroups command\n err = conn.PrintfLine(\"NEWSGROUPS %d 000000 GMT\", timeNow())\n if err == nil {\n // read response line\n code, _, err := conn.ReadCodeLine(231)\n if code == 231 {\n var groups []string\n // valid response, we expect a multiline\n dr := conn.DotReader()\n // read lines\n sc := bufio.NewScanner(dr)\n for sc.Scan() {\n line := sc.Text()\n idx := strings.Index(line, \" \")\n if idx > 0 {\n groups = append(groups, line[:idx])\n } else {\n // invalid line? wtf.\n log.Println(self.name, \"invalid line in newsgroups multiline response:\", line)\n }\n }\n err = sc.Err()\n if err == nil {\n log.Println(self.name, \"got list of newsgroups\")\n // for each group\n for _, group := range groups {\n var banned bool\n // check if the newsgroup is banned\n banned, err = daemon.database.NewsgroupBanned(group)\n if banned {\n // we don't want it\n } else if err == nil {\n // scrape the group\n err = self.scrapeGroup(daemon, conn, group)\n if err != nil {\n log.Println(self.name, \"did not scrape\", group, err)\n break\n }\n } else {\n // error while checking for ban\n log.Println(self.name, \"checking for newsgroup ban failed\", err)\n break\n }\n }\n } else {\n // we got a bad multiline block?\n log.Println(self.name, \"bad multiline response from newsgroups command\", err)\n }\n } else if err == nil {\n // invalid response no error\n log.Println(self.name, \"gave us invalid response to newsgroups command\", code)\n } else {\n // invalid response with error\n log.Println(self.name, \"error while reading response from newsgroups command\", err)\n }\n } else {\n log.Println(self.name, \"failed to send newsgroups command\", err)\n }\n } else if err == nil {\n // failed to switch mode to reader\n log.Println(self.name, \"does not do reader mode, bailing scrape\")\n } else {\n // failt to switch mode because of error\n log.Println(self.name, \"failed to switch to reader mode when scraping\", err)\n }\n return\n}",
"func (p DefaultParser) Search(body io.Reader) []string {\n\tvar urls []string\n\n\ttokenizer := html.NewTokenizer(body)\n\n\tfound := false\n\tfor {\n\t\ttoken := tokenizer.Next()\n\n\t\tif token == html.ErrorToken {\n\t\t\tbreak\n\t\t}\n\n\t\tif token == html.StartTagToken {\n\t\t\tif !found {\n\t\t\t\th3 := tokenizer.Token()\n\n\t\t\t\tisH3 := h3.Data == \"h3\"\n\t\t\t\tif isH3 {\n\t\t\t\t\tfor _, attr := range h3.Attr {\n\t\t\t\t\t\tif attr.Key == \"class\" && strings.Contains(attr.Val, \"product_title\") {\n\t\t\t\t\t\t\tfound = true\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif found {\n\t\t\t\ta := tokenizer.Token()\n\n\t\t\t\tisA := a.Data == \"a\"\n\t\t\t\tif isA {\n\t\t\t\t\tfor _, attr := range a.Attr {\n\t\t\t\t\t\tif attr.Key == \"href\" && strings.HasPrefix(attr.Val, \"/game/\") {\n\t\t\t\t\t\t\turls = append(urls, \"https://www.metacritic.com\"+attr.Val)\n\t\t\t\t\t\t\tfound = false\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t}\n\t\t\t}\n\n\t\t}\n\t}\n\n\treturn urls\n}",
"func cronSpotify() {\n\tpauses, errDb := db.GetFromUts(time.Now().Unix())\n\tif errDb != nil {\n\t\tlog.Println(\"DB FAILURE : \", errDb)\n\t}\n\n\tfor _, pause := range pauses {\n\t\ttok, errTok := pause.GetToken()\n\t\tif errTok != nil {\n\t\t\tlog.Println(\"MARSHAL FAILURE : \", errTok)\n\t\t\tcontinue\n\t\t}\n\n\t\tclient := sleepspotify.GetClient(tok)\n\t\tgo pauseSpotifyRoutine(client, pause)\n\t}\n}",
"func Song() string {\n\tvar songTexts []string\n\tlines := len(songText)\n\n\tfor i := 0; i <= lines; i++ {\n\t\tsongTexts = append(songTexts, Verse(\"This is\", songText[lines-i:], \"the house that Jack built.\"))\n\t}\n\n\treturn strings.Join(songTexts, \"\\n\\n\")\n}",
"func GetAllSongs(w http.ResponseWriter, r *http.Request) {\n\tvar test foo\n\n\tif err := json.NewDecoder(r.Body).Decode(&test); err != nil {\n\t\terr := fmt.Errorf(\"error when reading request body: %w\", err)\n\t\tlog.Logger.Errorf(\"GetAllSongs failed: %v\", err)\n\t\treturn\n\t}\n\n\tif test.Search == \"\" {\n\t\tlog.Logger.Infof(\"GetAllSongs: request body was empty: %v\", test)\n\n\t\thttp.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tlog.Logger.Infof(\"GetAllSongs: successfully read request body: %v\", test)\n\n\tid, err := internal.GetArtistID(test.Search)\n\tif err != nil {\n\t\terr := fmt.Errorf(\"error when retrieving artist id: %w\", err)\n\t\tlog.Logger.Errorf(\"GetAllSongs failed: %v\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tsongs, err := internal.SongsByArtist(*id)\n\tif err != nil {\n\t\terr := fmt.Errorf(\"error when searching songs by artist: %w\", err)\n\t\tlog.Logger.Errorf(\"GetAllSongs failed: %v\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tif err := json.NewEncoder(w).Encode(songs); err != nil {\n\t\terr := fmt.Errorf(\"error when encoding response: %w\", err)\n\t\tlog.Logger.Errorf(\"GetAllSongs failed: %v\", err)\n\n\t\thttp.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)\n\t\treturn\n\t}\n}",
"func updateRegexesWrapped(name, mention string, alias rune) (preRe, postRe, bareRe *regexp.Regexp, errpre, errpost, errbare error) {\n\tpreRe = nil\n\tpostRe = nil\n\tif alias == 0 && len(name) == 0 {\n\t\tLog(robot.Error, \"Robot has no name or alias, and will only respond to direct messages\")\n\t\treturn\n\t}\n\tnames := []string{}\n\tbarenames := []string{}\n\tif alias != 0 {\n\t\tif strings.ContainsRune(string(escapeAliases), alias) {\n\t\t\tnames = append(names, `\\`+string(alias))\n\t\t\tbarenames = append(barenames, `\\`+string(alias))\n\t\t} else {\n\t\t\tnames = append(names, string(alias))\n\t\t\tbarenames = append(barenames, string(alias))\n\t\t}\n\t}\n\tif len(name) > 0 {\n\t\tif len(mention) > 0 {\n\t\t\tnames = append(names, `(?i:`+name+`)[:, ]`)\n\t\t\tbarenames = append(barenames, `(?i:`+name+`\\??)`)\n\t\t} else {\n\t\t\tnames = append(names, `(?i:@?`+name+`[:, ])`)\n\t\t\tbarenames = append(barenames, `(?i:@?`+name+`\\??)`)\n\t\t}\n\t}\n\tif len(mention) > 0 {\n\t\tnames = append(names, `(?:@`+mention+`[:, ])`)\n\t\tbarenames = append(barenames, `(?:@`+mention+`\\??)`)\n\t}\n\tpreString := `^(?s)(?i:(` + strings.Join(names, \"|\") + `)\\s*)(.*)$`\n\tpreRe, errpre = regexp.Compile(preString)\n\t// NOTE: the preString regex matches a bare alias, but not a bare name\n\tif len(name) > 0 {\n\t\tpostString := `^([^,@]+),\\s+(?i:@?` + name + `)([.?!])?$`\n\t\tpostRe, errpost = regexp.Compile(postString)\n\t\tbareString := `^@?(?i:` + strings.Join(barenames, \"|\") + `)$`\n\t\tbareRe, errbare = regexp.Compile(bareString)\n\t}\n\treturn\n}",
"func get_song_info(id string) {\n\tsongs := strings.Split(master_list, \"\\n\")\n\tfor _, s := range songs {\n\t\tsong_id := strings.Split(s, \":\")[0]\n\t\tif song_id == id {\n\t\t\tfmt.Println(s + \"\\n\")\n\t\t\treturn\n\t\t}\n\t}\n\tfmt.Println(\"Song not found.\")\n\treturn\n}",
"func (w *worker) requestRobotsTxt(ctx *URLContext) {\n\t// Ask if it should be fetched\n\tif robData, reqRob := w.opts.Extender.RequestRobots(ctx, w.opts.RobotUserAgent); !reqRob {\n\t\tw.logFunc(LogInfo, \"using robots.txt from cache\")\n\t\tw.robotsGroup = w.getRobotsTxtGroup(ctx, robData, nil)\n\n\t} else if res, ok := w.fetchURL(ctx, w.opts.UserAgent, false); ok {\n\t\t// Close the body on function end\n\t\tdefer res.Body.Close()\n\t\tw.robotsGroup = w.getRobotsTxtGroup(ctx, nil, res)\n\t}\n}",
"func LoadSongFromFile(pathToInput string) Song {\n // make sure that the path exists\n file, err := os.Open(pathToInput)\n if err != nil {\n log.Fatal(err)\n }\n defer file.Close()\n // define the new song\n newSong := Song{Name:filepath.Base(pathToInput), PathToInput:pathToInput, Length: -1}\n\n scanner := bufio.NewScanner(file)\n for scanner.Scan() {\n var rawLine = scanner.Text()\n //split the line into fields\n var line = strings.Fields(rawLine)\n //Make sure we have all the data presend in the track\n if len(line) < 3 {\n log.Fatal(\"Malformed track detected: \" + rawLine)\n }\n\n // interpret the parts of the stored track\n name, pattern, pathToSample := line[0], line[1], line[2]\n\n // get current track length and validate it matches\n var trackLength = len(pattern)\n if newSong.Length > 0 && newSong.Length != trackLength {\n log.Fatal(\"Mismatch on track length compared to previous track: \" + rawLine)\n }\n\n var newTrack = track.LoadTrack(name, pattern, pathToSample)\n\n // Since all tracks should be equal length\n // we can use the track's length as the whole song's length\n newSong.Length = trackLength\n\n // Ad the track to the song\n newSong.TrackList = append(newSong.TrackList, newTrack)\n }\n\n //log any read errors\n if err := scanner.Err(); err != nil {\n log.Fatal(err)\n }\n return newSong\n}",
"func parseQuery(m *dns.Msg) {\n\tfor _, q := range m.Question {\n\t\tswitch q.Qtype {\n\t\tcase dns.TypeTXT:\n\t\t\t// Debug log\n\t\t\tif mConfig.Debug {\n\t\t\t\tlog.Printf(\"TXT Query for %s\\n\", q.Name)\n\t\t\t}\n\n\t\t\t// Get IP\n\t\t\treplacer := strings.NewReplacer(\n\t\t\t\t\".\"+mConfig.Suffix+\".\", \"\",\n\t\t\t\t\"x\", \":\",\n\t\t\t\t\"z\", \".\")\n\t\t\tip := replacer.Replace(q.Name)\n\n\t\t\t// Send response\n\t\t\tfor _, response := range g.GeoHandle(ip) {\n\t\t\t\tr := new(dns.TXT)\n\t\t\t\tr.Hdr = dns.RR_Header{\n\t\t\t\t\tName: q.Name,\n\t\t\t\t\tRrtype: dns.TypeTXT,\n\t\t\t\t\tClass: dns.ClassINET,\n\t\t\t\t\tTtl: 1,\n\t\t\t\t}\n\t\t\t\tr.Txt = []string{response}\n\t\t\t\tm.Answer = append(m.Answer, r)\n\t\t\t}\n\t\t}\n\t}\n}",
"func GetDocsFromTxt(textURL string) (docs []*model.Document, err error) {\n\n\ttextDocument, err := getPage(textURL, retryAttempts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t//\n\ti := strings.LastIndex(textURL, \"/\")\n\tbaseURL := textURL[0 : i+1]\n\n\tdocs, err = parseFullText(baseURL, textDocument)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif len(docs) == 0 {\n\t\treturn nil, fmt.Errorf(\"no docs found during parse\")\n\t}\n\n\tfor _, doc := range docs {\n\t\t// replace links.\n\t\tdoc.Body = replaceRelativeLinks(baseURL, doc.Body)\n\t\tif doc.Body == \"\" {\n\t\t\treturn nil, fmt.Errorf(\"doc body error during parse\")\n\t\t}\n\t}\n\treturn\n}",
"func Verse(i int) string {\n\tsentence := fmt.Sprintf(\"I know an old lady who swallowed a %s.\\n\", lyrics[i][0])\n\tsentence = sentence + lyrics[i][1]\n\tfor k := i; k > 0; k-- {\n\t\tif i != 1 && i != 8 {\n\t\t\tsentence = sentence + \"\\n\" + lyrics[k][2]\n\t\t}\n\t}\n\treturn sentence\n\n}"
] | [
"0.52739537",
"0.51263285",
"0.5093622",
"0.5088494",
"0.50840664",
"0.5020079",
"0.50154114",
"0.49768645",
"0.49582195",
"0.48772937",
"0.47692233",
"0.4733775",
"0.47052535",
"0.4684843",
"0.46371776",
"0.463235",
"0.45981294",
"0.45856765",
"0.4562131",
"0.44904724",
"0.44352776",
"0.44002908",
"0.43787473",
"0.43763942",
"0.43549684",
"0.43482032",
"0.43402597",
"0.4306632",
"0.43043417",
"0.42944455",
"0.42938817",
"0.428025",
"0.4271312",
"0.42675853",
"0.42518663",
"0.424045",
"0.42251313",
"0.42201453",
"0.4202301",
"0.41807428",
"0.41770315",
"0.41659984",
"0.41455057",
"0.41332677",
"0.41303352",
"0.4122751",
"0.4119993",
"0.4110391",
"0.40894887",
"0.40869716",
"0.4086261",
"0.40847754",
"0.40687037",
"0.4066048",
"0.40506014",
"0.4048806",
"0.40432423",
"0.40415508",
"0.4028294",
"0.40077913",
"0.40073165",
"0.4006982",
"0.40054026",
"0.40027583",
"0.39808822",
"0.39781976",
"0.39511836",
"0.39391387",
"0.3938104",
"0.3935353",
"0.392437",
"0.39187035",
"0.39034873",
"0.38942078",
"0.3891223",
"0.38902214",
"0.38849017",
"0.38831857",
"0.38783976",
"0.38780358",
"0.38692656",
"0.38644916",
"0.3859115",
"0.3857361",
"0.385681",
"0.38497078",
"0.38467705",
"0.3837058",
"0.38282716",
"0.3828268",
"0.38260576",
"0.38247374",
"0.38222542",
"0.38221696",
"0.38205314",
"0.38168618",
"0.3815593",
"0.38119665",
"0.38087025",
"0.38086003"
] | 0.58847344 | 0 |
WithProxyConfig returns a proxy config functional option | func WithProxyConfig(cfg config.Proxy) GRPCOption {
return func(h *GRPCHandler) {
h.proxyCfg = cfg
}
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func WithProxyUrl(proxyURL string) configurer {\n\treturn func(conf *config) {\n\t\tconf.proxyURL = proxyURL\n\t}\n}",
"func (o MustGatherSpecOutput) ProxyConfig() MustGatherSpecProxyConfigPtrOutput {\n\treturn o.ApplyT(func(v MustGatherSpec) *MustGatherSpecProxyConfig { return v.ProxyConfig }).(MustGatherSpecProxyConfigPtrOutput)\n}",
"func Proxy(addr string) Option {\n\treturn func(o *Options) {\n\t\to.Proxy = addr\n\t}\n}",
"func WithProxy(proxyURL string) (Option, error) {\n\tu, err := url.Parse(proxyURL)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to parse proxy url %q: %s\", proxyURL, err)\n\t}\n\n\treturn func(c *Client) {\n\t\ttransport := http.DefaultTransport.(*http.Transport).Clone()\n\t\tif c.client.Transport != nil {\n\t\t\ttransport = c.client.Transport.(*http.Transport).Clone()\n\t\t}\n\n\t\ttransport.Proxy = http.ProxyURL(u)\n\n\t\tc.client.Transport = transport\n\t}, nil\n}",
"func WithProxy(proxyURL string) (Option, error) {\n\tu, err := url.Parse(proxyURL)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to parse proxy url %q: %s\", proxyURL, err)\n\t}\n\n\treturn func(c *Client) {\n\t\tvar transport *http.Transport = LazyCreateNewTransport(c)\n\t\ttransport.Proxy = http.ProxyURL(u)\n\t\tc.client.Transport = transport\n\t}, nil\n}",
"func WithProxy(p proxy.BackwardProxy) Option {\n\treturn Option{F: func(o *internal_server.Options, di *utils.Slice) {\n\t\to.Once.OnceOrPanic()\n\t\tdi.Push(fmt.Sprintf(\"WithProxy(%T)\", p))\n\n\t\tif o.Proxy != nil {\n\t\t\tpanic(fmt.Errorf(\"reassignment of Proxy is not allowed: %T -> %T\", o.Proxy, p))\n\t\t}\n\t\to.Proxy = p\n\t}}\n}",
"func WithProxy() func(*engine.Spec) {\n\tenviron := map[string]string{}\n\tif value := getenv(\"no_proxy\"); value != \"\" {\n\t\tenviron[\"no_proxy\"] = value\n\t\tenviron[\"NO_PROXY\"] = value\n\t}\n\tif value := getenv(\"http_proxy\"); value != \"\" {\n\t\tenviron[\"http_proxy\"] = value\n\t\tenviron[\"HTTP_PROXY\"] = value\n\t}\n\tif value := getenv(\"https_proxy\"); value != \"\" {\n\t\tenviron[\"https_proxy\"] = value\n\t\tenviron[\"HTTPS_PROXY\"] = value\n\t}\n\treturn WithEnviron(environ)\n}",
"func (o MustGatherSpecPtrOutput) ProxyConfig() MustGatherSpecProxyConfigPtrOutput {\n\treturn o.ApplyT(func(v *MustGatherSpec) *MustGatherSpecProxyConfig {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.ProxyConfig\n\t}).(MustGatherSpecProxyConfigPtrOutput)\n}",
"func WithHTTPProxy(proxy string) ClientOption {\n\treturn withHTTPProxy{proxy}\n}",
"func (c Capabilities) Proxy(p ProxyConfig) Capabilities {\n\tc[\"proxy\"] = p\n\treturn c\n}",
"func NewProxy(c *ProxyConfig) (proxy.Proxy, error) {\n\tif c == nil {\n\t\treturn nil, errors.New(\"config should be provided\")\n\t}\n\n\tif err := c.Validate(); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Yeah, not a good practice at all but I guess it's fine for now.\n\tkafkaproxy.ActualDefaultRequestHandler.RequestKeyHandlers.Set(protocol.RequestAPIKeyProduce, NewProduceRequestHandler(c.MessageHandlers...))\n\n\tif c.BrokersMapping == nil {\n\t\treturn nil, errors.New(\"Brokers mapping is required\")\n\t}\n\n\tif c.Debug {\n\t\t_ = server.Server.Flags().Set(\"log-level\", \"debug\")\n\t}\n\n\tfor _, v := range c.ExtraConfig {\n\t\tf := strings.Split(v, \"=\")\n\t\t_ = server.Server.Flags().Set(f[0], f[1])\n\t}\n\n\tfor _, v := range c.BrokersMapping {\n\t\t_ = server.Server.Flags().Set(\"bootstrap-server-mapping\", v)\n\t}\n\n\tfor _, v := range c.DialAddressMapping {\n\t\t_ = server.Server.Flags().Set(\"dial-address-mapping\", v)\n\t}\n\n\treturn func(_ context.Context) error {\n\t\treturn server.Server.Execute()\n\t}, nil\n}",
"func newProxyConfig(config PodConfig) interface{} {\n\tswitch config.ProxyType {\n\tcase NoopProxyType:\n\t\treturn nil\n\tcase CCProxyType:\n\t\tvar ccConfig CCProxyConfig\n\t\terr := mapstructure.Decode(config.ProxyConfig, &ccConfig)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn ccConfig\n\tdefault:\n\t\treturn nil\n\t}\n}",
"func NewProxyConfig(brokersMapping []string, opts ...ProxyOption) (*ProxyConfig, error) {\n\tc := &ProxyConfig{BrokersMapping: brokersMapping}\n\tfor _, opt := range opts {\n\t\tif err := opt(c); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn c, c.Validate()\n}",
"func ConfigureProxyDialer() error {\n\t// load proxy configuration for tests\n\tvar cert tls.Certificate\n\troots := x509.NewCertPool()\n\n\tsecretsPath := \"secrets\"\n\tif _, err := os.Stat(path.Join(secretsPath, \"proxy-client.pem\")); os.IsNotExist(err) {\n\t\tsecretsPath = \"../../secrets\"\n\t}\n\tcert, err := tls.LoadX509KeyPair(path.Join(secretsPath, \"proxy-client.pem\"), path.Join(secretsPath, \"proxy-client.key\"))\n\tif err != nil {\n\t\treturn err\n\t}\n\tca, err := ioutil.ReadFile(path.Join(secretsPath, \"proxy-ca.pem\"))\n\tif err != nil {\n\t\treturn err\n\t}\n\tif ok := roots.AppendCertsFromPEM(ca); !ok {\n\t\treturn fmt.Errorf(\"error configuring proxy\")\n\t}\n\n\troundtrippers.PrivateEndpointDialHook = func(location string) func(context.Context, string, string) (net.Conn, error) {\n\t\treturn func(ctx context.Context, network, address string) (net.Conn, error) {\n\t\t\tproxyEnvName := \"PROXYURL_\" + strings.ToUpper(location)\n\t\t\tproxyURL := os.Getenv(proxyEnvName)\n\t\t\tif proxyURL == \"\" {\n\t\t\t\treturn nil, fmt.Errorf(\"%s not set\", proxyEnvName)\n\t\t\t}\n\n\t\t\tc, err := tls.Dial(\"tcp\", proxyURL, &tls.Config{\n\t\t\t\tRootCAs: roots,\n\t\t\t\tCertificates: []tls.Certificate{cert},\n\t\t\t\tServerName: \"proxy-server\",\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tr := bufio.NewReader(c)\n\n\t\t\treq, err := http.NewRequest(http.MethodConnect, \"\", nil)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\treq.Host = address\n\n\t\t\terr = req.Write(c)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tresp, err := http.ReadResponse(r, req)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif resp.StatusCode != http.StatusOK {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected status code %d\", resp.StatusCode)\n\t\t\t}\n\n\t\t\treturn &conn{Conn: c, r: r}, nil\n\t\t}\n\t}\n\n\treturn nil\n}",
"func (s *HTTPServer) AgentConnectProxyConfig(resp http.ResponseWriter, req *http.Request) (interface{}, error) {\n\t// Get the proxy ID. Note that this is the ID of a proxy's service instance.\n\tid := strings.TrimPrefix(req.URL.Path, \"/v1/agent/connect/proxy/\")\n\n\t// Maybe block\n\tvar queryOpts structs.QueryOptions\n\tif parseWait(resp, req, &queryOpts) {\n\t\t// parseWait returns an error itself\n\t\treturn nil, nil\n\t}\n\n\t// Parse the token - don't resolve a proxy token to a real token\n\t// that will be done with a call to verifyProxyToken later along with\n\t// other security relevant checks.\n\tvar token string\n\ts.parseTokenWithoutResolvingProxyToken(req, &token)\n\n\t// Parse hash specially since it's only this endpoint that uses it currently.\n\t// Eventually this should happen in parseWait and end up in QueryOptions but I\n\t// didn't want to make very general changes right away.\n\thash := req.URL.Query().Get(\"hash\")\n\n\treturn s.agentLocalBlockingQuery(resp, hash, &queryOpts,\n\t\tfunc(ws memdb.WatchSet) (string, interface{}, error) {\n\t\t\t// Retrieve the proxy specified\n\t\t\tproxy := s.agent.State.Proxy(id)\n\t\t\tif proxy == nil {\n\t\t\t\tresp.WriteHeader(http.StatusNotFound)\n\t\t\t\tfmt.Fprintf(resp, \"unknown proxy service ID: %s\", id)\n\t\t\t\treturn \"\", nil, nil\n\t\t\t}\n\n\t\t\t// Lookup the target service as a convenience\n\t\t\ttarget := s.agent.State.Service(proxy.Proxy.TargetServiceID)\n\t\t\tif target == nil {\n\t\t\t\t// Not found since this endpoint is only useful for agent-managed proxies so\n\t\t\t\t// service missing means the service was deregistered racily with this call.\n\t\t\t\tresp.WriteHeader(http.StatusNotFound)\n\t\t\t\tfmt.Fprintf(resp, \"unknown target service ID: %s\", proxy.Proxy.TargetServiceID)\n\t\t\t\treturn \"\", nil, nil\n\t\t\t}\n\n\t\t\t// Validate the ACL token - because this endpoint uses data local to a single\n\t\t\t// agent, this function is responsible for all enforcement regarding\n\t\t\t// protection of the configuration. verifyProxyToken will match the proxies\n\t\t\t// token to the correct service or in the case of being provide a real ACL\n\t\t\t// token it will ensure that the requester has ServiceWrite privileges\n\t\t\t// for this service.\n\t\t\t_, isProxyToken, err := s.agent.verifyProxyToken(token, target.Service, id)\n\t\t\tif err != nil {\n\t\t\t\treturn \"\", nil, err\n\t\t\t}\n\n\t\t\t// Watch the proxy for changes\n\t\t\tws.Add(proxy.WatchCh)\n\n\t\t\thash, err := hashstructure.Hash(proxy.Proxy, nil)\n\t\t\tif err != nil {\n\t\t\t\treturn \"\", nil, err\n\t\t\t}\n\t\t\tcontentHash := fmt.Sprintf(\"%x\", hash)\n\n\t\t\t// Set defaults\n\t\t\tconfig, err := s.agent.applyProxyConfigDefaults(proxy.Proxy)\n\t\t\tif err != nil {\n\t\t\t\treturn \"\", nil, err\n\t\t\t}\n\n\t\t\t// Only merge in telemetry config from agent if the requested is\n\t\t\t// authorized with a proxy token. This prevents us leaking potentially\n\t\t\t// sensitive config like Circonus API token via a public endpoint. Proxy\n\t\t\t// tokens are only ever generated in-memory and passed via ENV to a child\n\t\t\t// proxy process so potential for abuse here seems small. This endpoint in\n\t\t\t// general is only useful for managed proxies now so it should _always_ be\n\t\t\t// true that auth is via a proxy token but inconvenient for testing if we\n\t\t\t// lock it down so strictly.\n\t\t\tif isProxyToken {\n\t\t\t\t// Add telemetry config. Copy the global config so we can customize the\n\t\t\t\t// prefix.\n\t\t\t\ttelemetryCfg := s.agent.config.Telemetry\n\t\t\t\ttelemetryCfg.MetricsPrefix = telemetryCfg.MetricsPrefix + \".proxy.\" + target.ID\n\n\t\t\t\t// First see if the user has specified telemetry\n\t\t\t\tif userRaw, ok := config[\"telemetry\"]; ok {\n\t\t\t\t\t// User specified domething, see if it is compatible with agent\n\t\t\t\t\t// telemetry config:\n\t\t\t\t\tvar uCfg lib.TelemetryConfig\n\t\t\t\t\tdec, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{\n\t\t\t\t\t\tResult: &uCfg,\n\t\t\t\t\t\t// Make sure that if the user passes something that isn't just a\n\t\t\t\t\t\t// simple override of a valid TelemetryConfig that we fail so that we\n\t\t\t\t\t\t// don't clobber their custom config.\n\t\t\t\t\t\tErrorUnused: true,\n\t\t\t\t\t})\n\t\t\t\t\tif err == nil {\n\t\t\t\t\t\tif err = dec.Decode(userRaw); err == nil {\n\t\t\t\t\t\t\t// It did decode! Merge any unspecified fields from agent config.\n\t\t\t\t\t\t\tuCfg.MergeDefaults(&telemetryCfg)\n\t\t\t\t\t\t\tconfig[\"telemetry\"] = uCfg\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\t// Failed to decode, just keep user's config[\"telemetry\"] verbatim\n\t\t\t\t\t// with no agent merge.\n\t\t\t\t} else {\n\t\t\t\t\t// Add agent telemetry config.\n\t\t\t\t\tconfig[\"telemetry\"] = telemetryCfg\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treply := &api.ConnectProxyConfig{\n\t\t\t\tProxyServiceID: proxy.Proxy.ProxyService.ID,\n\t\t\t\tTargetServiceID: target.ID,\n\t\t\t\tTargetServiceName: target.Service,\n\t\t\t\tContentHash: contentHash,\n\t\t\t\tExecMode: api.ProxyExecMode(proxy.Proxy.ExecMode.String()),\n\t\t\t\tCommand: proxy.Proxy.Command,\n\t\t\t\tConfig: config,\n\t\t\t\tUpstreams: proxy.Proxy.Upstreams.ToAPI(),\n\t\t\t}\n\t\t\treturn contentHash, reply, nil\n\t\t})\n}",
"func (optr *Operator) getProxyConfig() (*httpproxy.Config, error) {\n\tproxy, err := optr.proxyLister.Get(\"cluster\")\n\n\tif apierrors.IsNotFound(err) {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &httpproxy.Config{\n\t\tHTTPProxy: proxy.Status.HTTPProxy,\n\t\tHTTPSProxy: proxy.Status.HTTPSProxy,\n\t\tNoProxy: proxy.Status.NoProxy,\n\t}, nil\n}",
"func WithPROXYHeaderGetter(proxyHeaderGetter PROXYHeaderGetter) DialProxyOption {\n\treturn func(cfg *dialProxyConfig) {\n\t\tcfg.proxyHeaderGetter = proxyHeaderGetter\n\t}\n}",
"func WithProxy(ctx context.Context, addrs ...string) context.Context {\n\tif md, ok := metadata.FromContext(ctx); ok {\n\t\tmd[proxyKey] = append(md[proxyKey], addrs...)\n\t\treturn ctx\n\t}\n\treturn metadata.NewContext(ctx, metadata.MD{proxyKey: addrs})\n}",
"func Proxy(options ...Option) http.RoundTripper {\n\tp := &proxy{\n\t\tnext: http.DefaultTransport,\n\t\tscheme: \"http\",\n\t\tresolver: resolve.ResolverFunc(resolve.DNSSRV),\n\t\tpoolReporter: nil,\n\t\tfactory: pool.RoundRobin,\n\t\tregistry: nil,\n\t}\n\tp.setOptions(options...)\n\tp.registry = newRegistry(p.resolver, p.poolReporter, p.factory)\n\treturn p\n}",
"func NewProxyConfig(configMap *ProxyConfigMap, k8sUtils k8sutils.UtilsInterface) (*ProxyConfig, error) {\n\tvar proxyConfig ProxyConfig\n\terr := proxyConfig.ParseConfig(*configMap, k8sUtils)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &proxyConfig, nil\n}",
"func WithProxyClient(pc server.ProxyClient) Opt {\n\treturn func(fwdr *TCPForwarder) {\n\t\tfwdr.C = pc\n\t}\n}",
"func NewProxyFromConfig(cfg *config.ProxyConfig, hc HealthChecker) (*Proxy, []error) {\n\terrs := config.ValidateConfig(cfg)\n\tif errs != nil {\n\t\treturn nil, errs\n\t}\n\n\tp := &Proxy{\n\t\tconfig: *cfg,\n\t\thealthChecker: hc,\n\t}\n\n\tp.reverseProxy = make(map[string]*httputil.ReverseProxy)\n\tfor _, service := range p.config.Proxy.Services {\n\t\tp.reverseProxy[service.Domain] = NewRandomBackendReverseProxy(service.Hosts)\n\t}\n\n\treturn p, nil\n}",
"func LookupProxyConfig(ctx *pulumi.Context, args *LookupProxyConfigArgs, opts ...pulumi.InvokeOption) (*LookupProxyConfigResult, error) {\n\topts = internal.PkgInvokeDefaultOpts(opts)\n\tvar rv LookupProxyConfigResult\n\terr := ctx.Invoke(\"google-native:beyondcorp/v1alpha:getProxyConfig\", args, &rv, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &rv, nil\n}",
"func TelemetryHarvesterWithProxy(proxyURL *url.URL) TelemetryHarvesterOpt {\n\treturn func(cfg *telemetry.Config) {\n\t\trt := cfg.Client.Transport\n\t\tif rt == nil {\n\t\t\trt = http.DefaultTransport\n\t\t}\n\n\t\tt, ok := rt.(*http.Transport)\n\t\tif !ok {\n\t\t\tlogrus.Warning(\n\t\t\t\t\"telemetry emitter couldn't be configured with proxy, \",\n\t\t\t\t\"client transport is not an http.Transport, \",\n\t\t\t\t\"continuing without proxy support\",\n\t\t\t)\n\t\t\treturn\n\t\t}\n\n\t\tt = t.Clone()\n\t\tt.Proxy = http.ProxyURL(proxyURL)\n\t\tcfg.Client.Transport = http.RoundTripper(t)\n\t\treturn\n\t}\n}",
"func TelemetryHarvesterWithProxy(proxyURL *url.URL) TelemetryHarvesterOpt {\n\treturn func(cfg *telemetry.Config) {\n\t\trt := cfg.Client.Transport\n\t\tif rt == nil {\n\t\t\trt = http.DefaultTransport\n\t\t}\n\n\t\tt, ok := rt.(*http.Transport)\n\t\tif !ok {\n\t\t\tlogrus.Warning(\n\t\t\t\t\"telemetry emitter couldn't be configured with proxy, \",\n\t\t\t\t\"client transport is not an http.Transport, \",\n\t\t\t\t\"continuing without proxy support\",\n\t\t\t)\n\t\t\treturn\n\t\t}\n\n\t\tt = t.Clone()\n\t\tt.Proxy = http.ProxyURL(proxyURL)\n\t\tcfg.Client.Transport = http.RoundTripper(t)\n\t\treturn\n\t}\n}",
"func WithExtra(extra []string) ProxyOption {\n\treturn func(c *ProxyConfig) error {\n\t\tc.ExtraConfig = extra\n\t\treturn nil\n\t}\n}",
"func WithProxy(ctx context.Context, proxy *Proxy) context.Context {\n\tif proxy == nil {\n\t\tpanic(\"nil proxy\")\n\t}\n\treturn context.WithValue(ctx, proxyContextKey{}, proxy)\n}",
"func WithConfig(cfg Config) Option {\n\treturn optionFunc(func(c *config) {\n\t\tc.addr = cfg.Host + \":\" + strconv.FormatUint(uint64(cfg.Port), 10)\n\t})\n}",
"func withSingleProxyPort(t *testing.T) testOptionsFunc {\n\tt.Helper()\n\t// enable proxy single port mode\n\treturn func(options *testOptions) {\n\t\toptions.instanceConfigFuncs = append(options.instanceConfigFuncs, func(cfg *helpers.InstanceConfig) {\n\t\t\tcfg.Listeners = helpers.SingleProxyPortSetup(t, &cfg.Fds)\n\t\t})\n\t\toptions.serviceConfigFuncs = append(options.serviceConfigFuncs, func(cfg *servicecfg.Config) {\n\t\t\tcfg.Auth.NetworkingConfig.SetProxyListenerMode(types.ProxyListenerMode_Multiplex)\n\t\t})\n\t}\n}",
"func (e *Environment) PassThruProxyConfig() {\n\tif e.Map == nil {\n\t\treturn\n\t}\n\n\tfor _, key := range proxyEnv {\n\t\tvalue, ok := e.Map[key]\n\t\tif ok {\n\t\t e.AddIfMissing(fmt.Sprintf(\"%s%s\", public, key), value)\n\t\t}\n\t}\n}",
"func (o TaskDefinitionOutput) ProxyConfiguration() TaskDefinitionProxyConfigurationPtrOutput {\n\treturn o.ApplyT(func(v *TaskDefinition) TaskDefinitionProxyConfigurationPtrOutput { return v.ProxyConfiguration }).(TaskDefinitionProxyConfigurationPtrOutput)\n}",
"func (c *Config) GetProxyURL() string {\n\treturn c.ProxyURL\n}",
"func (c *ThreeScaleClient) GetProxyConfig(svcId string, env string, version string) (ProxyConfigElement, error) {\n\tendpoint := fmt.Sprintf(proxyConfigGet, svcId, env, version)\n\treturn c.getProxyConfig(endpoint)\n}",
"func GetProxyTransportFunc(p *config.Proxy) func(*http.Request) (*url.URL, error) {\n\treturn func(r *http.Request) (*url.URL, error) {\n\t\t// check no_proxy list first\n\t\tfor _, host := range p.NoProxy {\n\t\t\tif r.URL.Host == host {\n\t\t\t\tlog.Debugf(\"URL match no_proxy list item '%s': not using any proxy\", host)\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\n\t\t// check proxy by scheme\n\t\tconfProxy := \"\"\n\t\tif r.URL.Scheme == \"http\" {\n\t\t\tconfProxy = p.HTTP\n\t\t} else if r.URL.Scheme == \"https\" {\n\t\t\tconfProxy = p.HTTPS\n\t\t} else {\n\t\t\tlog.Warnf(\"Proxy configuration do not support scheme '%s'\", r.URL.Scheme)\n\t\t}\n\n\t\tif confProxy != \"\" {\n\t\t\tproxyURL, err := url.Parse(confProxy)\n\t\t\tif err != nil {\n\t\t\t\terr := fmt.Errorf(\"Could not parse the proxy URL for scheme %s from configuration: %s\", r.URL.Scheme, err)\n\t\t\t\tlog.Error(err.Error())\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tuserInfo := \"\"\n\t\t\tif proxyURL.User != nil {\n\t\t\t\tif _, isSet := proxyURL.User.Password(); isSet {\n\t\t\t\t\tuserInfo = \"*****:*****@\"\n\t\t\t\t} else {\n\t\t\t\t\tuserInfo = \"*****@\"\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tlog.Debugf(\"Using proxy %s://%s%s for URL '%s'\", proxyURL.Scheme, userInfo, proxyURL.Host, SanitizeURL(r.URL.String()))\n\t\t\treturn proxyURL, nil\n\t\t}\n\n\t\t// no proxy set for this request\n\t\treturn nil, nil\n\t}\n}",
"func WithConfig(config core.ConfigProvider) ConfigOption {\n\treturn func(gw *Gateway) error {\n\t\tvar err error\n\t\tsdk, err := fabsdk.New(config)\n\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tgw.sdk = sdk\n\n\t\tconfigBackend, err := config()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(configBackend) != 1 {\n\t\t\treturn errors.New(\"invalid config file\")\n\t\t}\n\n\t\tcfg := configBackend[0]\n\t\tgw.cfg = cfg\n\n\t\tvalue, ok := cfg.Lookup(\"client.organization\")\n\t\tif !ok {\n\t\t\treturn errors.New(\"No client organization defined in the config\")\n\t\t}\n\t\tgw.org = value.(string)\n\n\t\treturn nil\n\t}\n}",
"func kubeProxyConfiguration(pluginDefaults map[string]operv1.ProxyArgumentList, conf *operv1.NetworkSpec, pluginOverrides map[string]operv1.ProxyArgumentList) (string, error) {\n\tp := conf.KubeProxyConfig\n\n\targs := map[string]operv1.ProxyArgumentList{}\n\targs[\"bind-address\"] = []string{p.BindAddress}\n\tif len(conf.ClusterNetwork) == 1 {\n\t\targs[\"cluster-cidr\"] = []string{conf.ClusterNetwork[0].CIDR}\n\t}\n\targs[\"iptables-sync-period\"] = []string{p.IptablesSyncPeriod}\n\n\targs = k8sutil.MergeKubeProxyArguments(args, pluginDefaults)\n\targs = k8sutil.MergeKubeProxyArguments(args, p.ProxyArguments)\n\targs = k8sutil.MergeKubeProxyArguments(args, pluginOverrides)\n\n\treturn k8sutil.GenerateKubeProxyConfiguration(args)\n}",
"func (c *Config) UpdateProxy(pc ProxyConfig) {\n\tif pc.AllowLan != nil {\n\t\tc.general.AllowLan = *pc.AllowLan\n\t}\n\n\tc.general.Port = *or(pc.Port, &c.general.Port)\n\tif c.general.Port != 0 && (pc.AllowLan != nil || pc.Port != nil) {\n\t\tc.event <- &Event{Type: \"http-addr\", Payload: genAddr(c.general.Port, c.general.AllowLan)}\n\t}\n\n\tc.general.SocksPort = *or(pc.SocksPort, &c.general.SocksPort)\n\tif c.general.SocksPort != 0 && (pc.AllowLan != nil || pc.SocksPort != nil) {\n\t\tc.event <- &Event{Type: \"socks-addr\", Payload: genAddr(c.general.SocksPort, c.general.AllowLan)}\n\t}\n\n\tc.general.RedirPort = *or(pc.RedirPort, &c.general.RedirPort)\n\tif c.general.RedirPort != 0 && (pc.AllowLan != nil || pc.RedirPort != nil) {\n\t\tc.event <- &Event{Type: \"redir-addr\", Payload: genAddr(c.general.RedirPort, c.general.AllowLan)}\n\t}\n}",
"func (o MustGatherSpecProxyConfigOutput) HttpProxy() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v MustGatherSpecProxyConfig) *string { return v.HttpProxy }).(pulumi.StringPtrOutput)\n}",
"func (c *minecraftConn) config() *config.Config {\n\treturn c.proxy.config\n}",
"func (c *GlobalConfig) ProxyString() *gw.StringValue {\n\tif c.V1.Proxy == nil {\n\t\treturn nil\n\t}\n\n\tproxy := c.V1.Proxy\n\tif proxy.Host == nil {\n\t\treturn nil\n\t}\n\n\tb := strings.Builder{}\n\t// NOTE: from testing, it appears that Rust (hab) requires \"http://\" to be\n\t// at the head of the proxy URLs\n\tb.WriteString(\"http://\") // nolint: errcheck\n\n\tif proxy.User != nil {\n\t\tauthPart := fmt.Sprintf(\"%s:%s\", proxy.User.Value, proxy.Password.Value)\n\t\tb.WriteString(url.PathEscape(authPart)) // nolint: errcheck\n\t\tb.WriteString(\"@\") // nolint: errcheck\n\t}\n\n\thostPortPart := fmt.Sprintf(\"%s:%d\", proxy.Host.Value, proxy.Port.Value)\n\tb.WriteString(hostPortPart) // nolint: errcheck\n\treturn w.String(b.String())\n}",
"func NewProxy(opts ProxyOpts) Proxy {\n\treturn &proxy{\n\t\tappClientFn: opts.AppClientFn,\n\t\tappID: opts.AppID,\n\t\tconnectionFactory: opts.ConnectionFactory,\n\t\tacl: opts.ACL,\n\t\tresiliency: opts.Resiliency,\n\t\tmaxRequestBodySize: opts.MaxRequestBodySize,\n\t}\n}",
"func NewProxy(logger *zap.Logger, config *Config, krb5cl *client.Client) *Proxy {\n\tfp := httputil.NewForwardingProxy()\n\tfp.ErrorHandler = httpErrorHandler\n\n\tp := &Proxy{\n\t\tlogger: logger,\n\t\tconfig: config,\n\t\tkrb5cl: krb5cl,\n\t\thttpProxy: fp,\n\t}\n\n\tp.httpProxy.ErrorLog = zap.NewStdLog(logger)\n\tp.server = &http.Server{\n\t\tAddr: config.Addr.String(),\n\t\tHandler: p,\n\t\t// Disable HTTP/2.\n\t\tTLSNextProto: make(map[string]func(*http.Server, *tls.Conn, http.Handler)),\n\t\t// Timeouts\n\t\tReadTimeout: config.Timeouts.Server.ReadTimeout,\n\t\tReadHeaderTimeout: config.Timeouts.Server.ReadHeaderTimeout,\n\t\tWriteTimeout: config.Timeouts.Server.WriteTimeout,\n\t\tIdleTimeout: config.Timeouts.Server.IdleTimeout,\n\t}\n\n\treturn p\n}",
"func With(opt ...ConfigOpt) DoFunc {\n\tvar (\n\t\tdialer = &net.Dialer{\n\t\t\tLocalAddr: &net.TCPAddr{IP: net.IPv4zero},\n\t\t\tKeepAlive: 30 * time.Second,\n\t\t\tTimeout: 5 * time.Second,\n\t\t}\n\t\ttransport = &http.Transport{\n\t\t\tProxy: http.ProxyFromEnvironment,\n\t\t\tDial: dialer.Dial,\n\t\t\tResponseHeaderTimeout: 5 * time.Second,\n\t\t\tTLSClientConfig: &tls.Config{InsecureSkipVerify: false},\n\t\t\tTLSHandshakeTimeout: 5 * time.Second,\n\t\t}\n\t\tconfig = &Config{\n\t\t\tdialer: dialer,\n\t\t\ttransport: transport,\n\t\t\tclient: &http.Client{Transport: transport},\n\t\t}\n\t)\n\tfor _, o := range opt {\n\t\tif o != nil {\n\t\t\to(config)\n\t\t}\n\t}\n\treturn config.client.Do\n}",
"func WithListen(listen string) Option {\n\treturn func(p *proxy) {\n\t\tp.listen = listen\n\t}\n}",
"func getProxyURL() (bool, *url.URL) {\n\t// Grab the list of HTTP proxies from the configuration\n\tlog.Debug(\"Attempting to use one of the proxies defined in the configuration file\")\n\thttpProxyStringMap := viper.GetStringMap(\"HTTPProxies\")\n\n\t// This will be set to the URL to use or remain nil\n\tvar proxyURL *url.URL\n\n\t// Try each proxy and use it if it's available\n\tfor proxyAlias, httpProxy := range httpProxyStringMap {\n\t\tproxyURLString := httpProxy.(map[string]interface{})[\"proxyurl\"]\n\t\tif proxyURLString == nil {\n\t\t\tlog.Warnf(\"The proxy entry %s needs a ProxyURL in the configuration file: %s\", proxyAlias, httpProxy)\n\t\t\tcontinue\n\t\t}\n\n\t\tlog.Debugf(\"Checking access to proxy: %s\", proxyURLString)\n\n\t\tvar parseError error\n\t\tproxyURL, parseError = url.Parse(proxyURLString.(string))\n\t\tif parseError != nil {\n\t\t\tlog.Debugf(\"Skipping proxy URL that couldn't be parsed: %s\", parseError)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Get the proxy hostname\n\t\tproxyHost := proxyURL.Hostname()\n\n\t\t// Try looking up the hostname IP\n\t\tlog.Debugf(\"Looking up IP address for: %s\", proxyHost)\n\t\t_, lookupError := net.LookupHost(proxyHost)\n\t\tif lookupError != nil {\n\t\t\tlog.Debugf(\"Skipping proxy because the IP lookup failed: %s\", proxyHost)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Get the proxy hostname\n\t\tproxyPort := proxyURL.Port()\n\n\t\t// Try connecting to the proxy port\n\t\tlog.Debugf(\"Attempting to connect to %s on port %s\", proxyHost, proxyPort)\n\t\tconnection, dialError := net.Dial(\"tcp\", proxyHost+\":\"+proxyPort)\n\t\tif dialError != nil {\n\t\t\tlog.Debugf(\"Unable to connect to proxy %s on port %s\", proxyHost, proxyPort)\n\t\t\tcontinue\n\t\t}\n\t\terr := connection.Close()\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Unable to close connection to proxy host: %s\", err)\n\t\t}\n\n\t\t// Set the no proxy based on this config... this may be futile, need more research\n\t\tnoProxy := httpProxy.(map[string]interface{})[\"noproxy\"]\n\t\tif noProxy != nil {\n\t\t\tlog.Debugf(\"Setting NO_PROXY to %s\", noProxy)\n\t\t\terr := os.Setenv(\"NO_PROXY\", noProxy.(string))\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatalf(\"Unable to set NO_PROXY environment variable: %s\", err)\n\t\t\t}\n\t\t}\n\n\t\t// If we made it this far, the proxy is usable\n\t\tlog.Infof(\"Found a working proxy from the configuration file: %s on port %s\", proxyHost, proxyPort)\n\t\treturn true, proxyURL\n\t}\n\n\treturn false, proxyURL\n}",
"func (c *ThreeScaleClient) ListProxyConfig(svcId string, env string) (ProxyConfigList, error) {\n\tvar pc ProxyConfigList\n\n\tendpoint := fmt.Sprintf(proxyConfigList, svcId, env)\n\treq, err := c.buildGetReq(endpoint)\n\tif err != nil {\n\t\treturn pc, httpReqError\n\t}\n\treq.Header.Set(\"Accept\", \"application/json\")\n\n\tvalues := url.Values{}\n\treq.URL.RawQuery = values.Encode()\n\n\tresp, err := c.httpClient.Do(req)\n\tif err != nil {\n\t\treturn pc, err\n\t}\n\n\tdefer resp.Body.Close()\n\n\terr = handleJsonResp(resp, http.StatusOK, &pc)\n\treturn pc, err\n}",
"func expandProxyOption(ctx context.Context, tconn *chrome.TestConn) error {\n\tapp := ossettings.New(tconn)\n\tif err := app.WaitUntilExists(ossettings.ShowProxySettingsTab)(ctx); err != nil {\n\t\treturn errors.Wrap(err, \"failed to find 'Shared networks' toggle button\")\n\t}\n\n\tif err := uiauto.Combine(\"expand 'Proxy' section\",\n\t\tapp.LeftClick(ossettings.ShowProxySettingsTab),\n\t\tapp.WaitForLocation(ossettings.SharedNetworksToggleButton),\n\t)(ctx); err != nil {\n\t\treturn err\n\t}\n\n\tif toggleInfo, err := app.Info(ctx, ossettings.SharedNetworksToggleButton); err != nil {\n\t\treturn errors.Wrap(err, \"failed to get toggle button info\")\n\t} else if toggleInfo.Checked == checked.True {\n\t\ttesting.ContextLog(ctx, \"'Allow proxies for shared networks' is already turned on\")\n\t\treturn nil\n\t}\n\n\treturn uiauto.Combine(\"turn on 'Allow proxies for shared networks' option\",\n\t\tapp.LeftClick(ossettings.SharedNetworksToggleButton),\n\t\tapp.LeftClick(ossettings.ConfirmButton),\n\t)(ctx)\n}",
"func WithConfig(cfg Config) Opt {\n\treturn func(t *Tortoise) {\n\t\tt.cfg = cfg\n\t}\n}",
"func OptConfig(cfg Config) Option {\n\treturn func(p *Profanity) {\n\t\tp.Config = cfg\n\t}\n}",
"func (proxyConfig *ProxyConfig) ParseConfig(proxyConfigMap ProxyConfigMap, k8sUtils k8sutils.UtilsInterface) error {\n\tproxyMode := proxyConfigMap.Mode\n\tproxyConfig.Mode = proxyMode\n\tproxyConfig.Port = proxyConfigMap.Port\n\tfmt.Printf(\"ConfigMap: %v\\n\", proxyConfigMap)\n\tif proxyMode == Linked {\n\t\tconfig := proxyConfigMap.LinkConfig\n\t\tif config == nil {\n\t\t\treturn fmt.Errorf(\"proxy mode is specified as Linked but unable to parse config\")\n\t\t}\n\t\tif config.Primary.URL == \"\" {\n\t\t\treturn fmt.Errorf(\"must provide Primary url\")\n\t\t}\n\t\tprimaryURL, err := url.Parse(config.Primary.URL)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tprimaryManagementServer := ManagementServer{\n\t\t\tURL: *primaryURL,\n\t\t\tSkipCertificateValidation: config.Primary.SkipCertificateValidation,\n\t\t\tLimits: config.Primary.Limits,\n\t\t}\n\t\tif config.Primary.CertSecret != \"\" {\n\t\t\tprimaryCertFile, err := k8sUtils.GetCertFileFromSecretName(config.Primary.CertSecret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tprimaryManagementServer.CertFile = primaryCertFile\n\t\t\tprimaryManagementServer.CertSecret = config.Primary.CertSecret\n\t\t}\n\t\tlinkedProxyConfig := LinkedProxyConfig{\n\t\t\tPrimary: &primaryManagementServer,\n\t\t}\n\t\tif config.Backup.URL != \"\" {\n\t\t\tbackupURL, err := url.Parse(config.Backup.URL)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tbackupManagementServer := ManagementServer{\n\t\t\t\tURL: *backupURL,\n\t\t\t\tSkipCertificateValidation: config.Backup.SkipCertificateValidation,\n\t\t\t\tLimits: config.Backup.Limits,\n\t\t\t}\n\t\t\tif config.Backup.CertSecret != \"\" {\n\t\t\t\tbackupCertFile, err := k8sUtils.GetCertFileFromSecretName(config.Backup.CertSecret)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tbackupManagementServer.CertFile = backupCertFile\n\t\t\t\tbackupManagementServer.CertSecret = config.Backup.CertSecret\n\t\t\t}\n\t\t\tlinkedProxyConfig.Backup = &backupManagementServer\n\t\t}\n\t\tproxyConfig.LinkProxyConfig = &linkedProxyConfig\n\t} else if proxyMode == StandAlone {\n\t\tconfig := proxyConfigMap.StandAloneConfig\n\t\tif config == nil {\n\t\t\treturn fmt.Errorf(\"proxy mode is specified as StandAlone but unable to parse config\")\n\t\t}\n\t\tvar proxy StandAloneProxyConfig\n\t\tproxy.managedArrays = make(map[string]*StorageArray)\n\t\tproxy.managementServers = make(map[url.URL]*ManagementServer)\n\t\tproxy.proxyCredentials = make(map[string]*ProxyUser)\n\t\tstorageArrayIdentifiers := make(map[url.URL][]string)\n\t\tipAddresses := make([]string, 0)\n\t\tfor _, mgmtServer := range config.ManagementServerConfig {\n\t\t\tipAddresses = append(ipAddresses, mgmtServer.URL)\n\t\t}\n\t\tfor _, array := range config.StorageArrayConfig {\n\t\t\tif array.PrimaryURL == \"\" {\n\t\t\t\treturn fmt.Errorf(\"primary URL not configured for array: %s\", array.StorageArrayID)\n\t\t\t}\n\t\t\tif !utils.IsStringInSlice(ipAddresses, array.PrimaryURL) {\n\t\t\t\treturn fmt.Errorf(\"primary URL: %s for array: %s not present among management URL addresses\",\n\t\t\t\t\tarray.PrimaryURL, array)\n\t\t\t}\n\t\t\tif array.BackupURL != \"\" {\n\t\t\t\tif !utils.IsStringInSlice(ipAddresses, array.BackupURL) {\n\t\t\t\t\treturn fmt.Errorf(\"backup URL: %s for array: %s is not in the list of management URL addresses. Ignoring it\",\n\t\t\t\t\t\tarray.BackupURL, array)\n\t\t\t\t}\n\t\t\t}\n\t\t\tprimaryURL, err := url.Parse(array.PrimaryURL)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tbackupURL := &url.URL{}\n\t\t\tif array.BackupURL != \"\" {\n\t\t\t\tbackupURL, err = url.Parse(array.BackupURL)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t\tproxy.managedArrays[array.StorageArrayID] = &StorageArray{\n\t\t\t\tStorageArrayIdentifier: array.StorageArrayID,\n\t\t\t\tPrimaryURL: *primaryURL,\n\t\t\t\tSecondaryURL: *backupURL,\n\t\t\t}\n\t\t\t// adding Primary and Backup URl to storageArrayIdentifier, later to be used in management server\n\t\t\tif _, ok := storageArrayIdentifiers[*primaryURL]; ok {\n\t\t\t\tstorageArrayIdentifiers[*primaryURL] = append(storageArrayIdentifiers[*primaryURL], array.StorageArrayID)\n\t\t\t} else {\n\t\t\t\tstorageArrayIdentifiers[*primaryURL] = []string{array.StorageArrayID}\n\t\t\t}\n\t\t\tif _, ok := storageArrayIdentifiers[*backupURL]; ok {\n\t\t\t\tstorageArrayIdentifiers[*backupURL] = append(storageArrayIdentifiers[*backupURL], array.StorageArrayID)\n\t\t\t} else {\n\t\t\t\tstorageArrayIdentifiers[*backupURL] = []string{array.StorageArrayID}\n\t\t\t}\n\n\t\t\t// Reading proxy credentials for the array\n\t\t\tif len(array.ProxyCredentialSecrets) > 0 {\n\t\t\t\tproxy.managedArrays[array.StorageArrayID].ProxyCredentialSecrets = make(map[string]ProxyCredentialSecret)\n\t\t\t\tfor _, secret := range array.ProxyCredentialSecrets {\n\t\t\t\t\tproxyCredentials, err := k8sUtils.GetCredentialsFromSecretName(secret)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t\tproxyCredentialSecret := &ProxyCredentialSecret{\n\t\t\t\t\t\tCredentials: *proxyCredentials,\n\t\t\t\t\t\tCredentialSecret: secret,\n\t\t\t\t\t}\n\t\t\t\t\tproxy.managedArrays[array.StorageArrayID].ProxyCredentialSecrets[secret] = *proxyCredentialSecret\n\t\t\t\t\tproxy.updateProxyCredentials(*proxyCredentials, array.StorageArrayID)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tfor _, managementServer := range config.ManagementServerConfig {\n\t\t\tvar arrayCredentials common.Credentials\n\t\t\tif managementServer.ArrayCredentialSecret != \"\" {\n\t\t\t\tcredentials, err := k8sUtils.GetCredentialsFromSecretName(managementServer.ArrayCredentialSecret)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tarrayCredentials = *credentials\n\t\t\t}\n\t\t\tmgmtURL, err := url.Parse(managementServer.URL)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tvar certFile string\n\t\t\tif managementServer.CertSecret != \"\" {\n\t\t\t\tcertFile, err = k8sUtils.GetCertFileFromSecretName(managementServer.CertSecret)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t\tproxy.managementServers[*mgmtURL] = &ManagementServer{\n\t\t\t\tURL: *mgmtURL,\n\t\t\t\tStorageArrayIdentifiers: storageArrayIdentifiers[*mgmtURL],\n\t\t\t\tSkipCertificateValidation: managementServer.SkipCertificateValidation,\n\t\t\t\tCertFile: certFile,\n\t\t\t\tCertSecret: managementServer.CertSecret,\n\t\t\t\tCredentials: arrayCredentials,\n\t\t\t\tCredentialSecret: managementServer.ArrayCredentialSecret,\n\t\t\t\tLimits: managementServer.Limits,\n\t\t\t}\n\t\t}\n\t\tproxyConfig.StandAloneProxyConfig = &proxy\n\t} else {\n\t\treturn fmt.Errorf(\"unknown proxy mode: %s specified\", string(proxyMode))\n\t}\n\tif proxyConfig.LinkProxyConfig == nil && proxyConfig.StandAloneProxyConfig == nil {\n\t\treturn fmt.Errorf(\"no configuration provided for the proxy\")\n\t}\n\treturn nil\n}",
"func WithConfig(c *Config) OptionFunc {\n\treturn func(b *Bot) {\n\t\tb.conf = c\n\t}\n}",
"func NewProxy(opts ...ProxyOption) (*Proxy, error) {\n\tvar err error\n\tp := Proxy{\n\t\tHeaders: http.Header{},\n\t}\n\tfor _, opt := range opts {\n\t\tif err = opt(&p); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn &p, nil\n}",
"func newReverseProxy(cfg *model.Config) *httputil.ReverseProxy {\n\tdirector := func(req *http.Request) {}\n\treturn &httputil.ReverseProxy{\n\t\tDirector: director,\n\t\tTransport: &http.Transport{\n\t\t\t// Proxy: func(req *http.Request) (*url.URL, error) {\n\t\t\t// \treturn http.ProxyFromEnvironment(req)\n\t\t\t// },\n\t\t\tDial: func(network, addr string) (net.Conn, error) {\n\n\t\t\t\tmaxTries := 3\n\t\t\t\twaitFor := time.Millisecond * time.Duration(1000)\n\n\t\t\t\tvar err error\n\t\t\t\tvar conn net.Conn\n\t\t\t\tfor tries := 0; tries < maxTries; tries++ {\n\n\t\t\t\t\tconn, err = (&net.Dialer{\n\t\t\t\t\t\tTimeout: 30 * time.Second,\n\t\t\t\t\t\tKeepAlive: 30 * time.Second,\n\t\t\t\t\t}).Dial(network, addr)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tlogrus.Warnf(\"Dial failed, retrying (%s)\", err.Error())\n\t\t\t\t\t\ttime.Sleep(waitFor)\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\treturn conn, err\n\t\t\t},\n\t\t\t// TLSHandshakeTimeout: 10 * time.Second,\n\t\t},\n\t}\n}",
"func Proxy(proxy func(*http.Request) (*url.URL, error)) func(*Attacker) {\n\treturn func(a *Attacker) {\n\t\ttr := a.client.Transport.(*http.Transport)\n\t\ttr.Proxy = proxy\n\t}\n}",
"func (p *Proxy) Config(conf ConfData) {\n\tp.configMutex.Lock()\n\tif conf.DestAddr.String() != p.destAddr.String() {\n\t\tp.connMutex.Lock()\n\t\tp.destAddr = conf.DestAddr\n\t\tp.connMutex.Unlock()\n\t}\n\tp.configMutex.Unlock()\n}",
"func SetProxy(p string, verifyssl bool) {\n\tif p != \"\" && p != \"none\" {\n\t\tproxyUrl, _ := url.Parse(p)\n\t\tclient = &http.Client{Transport: &http.Transport{Proxy: http.ProxyURL(proxyUrl), TLSClientConfig: &tls.Config{InsecureSkipVerify: verifyssl}}}\n\t} else {\n\t\tclient = &http.Client{Transport: &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: verifyssl}}}\n\t}\n}",
"func (p *Proxier) WithProxyDB(proxyDB proxy.ProxyDB) *Proxier {\n\tp.proxyDB = proxyDB\n\treturn p\n}",
"func (m *MockProxyClient) ProxyFlowConfig(ctx context.Context, in *ProxyFlowConfigRequestMsg, opts ...grpc.CallOption) (*ProxyResponseMsg, error) {\n\tvarargs := []interface{}{ctx, in}\n\tfor _, a := range opts {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"ProxyFlowConfig\", varargs...)\n\tret0, _ := ret[0].(*ProxyResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}",
"func OptionEnableProxyProtocol(enabled bool, subnet string) Option {\n\treturn func(cfg *gwconfig) {\n\t\tcfg.proxyProtocolEnabled = enabled\n\t\tcfg.proxyProtocolSubnet = subnet\n\t}\n}",
"func newProxy(config *Config) (*oauthProxy, error) {\n\t// create the service logger\n\tlog, err := createLogger(config)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlog.Info(\"starting the service\", zap.String(\"prog\", prog), zap.String(\"author\", author), zap.String(\"version\", version))\n\tsvc := &oauthProxy{\n\t\tconfig: config,\n\t\tlog: log,\n\t\tmetricsHandler: prometheus.Handler(),\n\t}\n\n\t// parse the upstream endpoint\n\tif svc.endpoint, err = url.Parse(config.Upstream); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// initialize the store if any\n\tif config.StoreURL != \"\" {\n\t\tif svc.store, err = createStorage(config.StoreURL); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// initialize the openid client\n\tif !config.SkipTokenVerification {\n\t\tif svc.client, svc.idp, svc.idpClient, err = svc.newOpenIDClient(); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t} else {\n\t\tlog.Warn(\"TESTING ONLY CONFIG - the verification of the token have been disabled\")\n\t}\n\n\tif config.ClientID == \"\" && config.ClientSecret == \"\" {\n\t\tlog.Warn(\"client credentials are not set, depending on provider (confidential|public) you might be unable to auth\")\n\t}\n\n\t// are we running in forwarding mode?\n\tif config.EnableForwarding {\n\t\tif err := svc.createForwardingProxy(); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t} else {\n\t\tif err := svc.createReverseProxy(); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn svc, nil\n}",
"func (p *proxySettings) buildProxySettings(proxyListenerMode types.ProxyListenerMode) *webclient.ProxySettings {\n\tproxySettings := webclient.ProxySettings{\n\t\tTLSRoutingEnabled: proxyListenerMode == types.ProxyListenerMode_Multiplex,\n\t\tAssistEnabled: p.cfg.Proxy.AssistAPIKey != \"\",\n\t\tKube: webclient.KubeProxySettings{\n\t\t\tEnabled: p.cfg.Proxy.Kube.Enabled,\n\t\t},\n\t\tSSH: webclient.SSHProxySettings{\n\t\t\tListenAddr: p.proxySSHAddr.String(),\n\t\t\tTunnelListenAddr: p.cfg.Proxy.ReverseTunnelListenAddr.String(),\n\t\t\tWebListenAddr: p.cfg.Proxy.WebAddr.String(),\n\t\t},\n\t}\n\n\tp.setProxyPublicAddressesSettings(&proxySettings)\n\n\tif !p.cfg.Proxy.MySQLAddr.IsEmpty() {\n\t\tproxySettings.DB.MySQLListenAddr = p.cfg.Proxy.MySQLAddr.String()\n\t}\n\n\tif !p.cfg.Proxy.PostgresAddr.IsEmpty() {\n\t\tproxySettings.DB.PostgresListenAddr = p.cfg.Proxy.PostgresAddr.String()\n\t}\n\n\tif !p.cfg.Proxy.MongoAddr.IsEmpty() {\n\t\tproxySettings.DB.MongoListenAddr = p.cfg.Proxy.MongoAddr.String()\n\t}\n\n\tif p.cfg.Proxy.Kube.Enabled {\n\t\tproxySettings.Kube.ListenAddr = p.cfg.Proxy.Kube.ListenAddr.String()\n\t}\n\treturn &proxySettings\n}",
"func (a *Agent) applyProxyConfigDefaults(p *structs.ConnectManagedProxy) (map[string]interface{}, error) {\n\tif p == nil || p.ProxyService == nil {\n\t\t// Should never happen but protect from panic\n\t\treturn nil, fmt.Errorf(\"invalid proxy state\")\n\t}\n\n\t// Lookup the target service\n\ttarget := a.State.Service(p.TargetServiceID)\n\tif target == nil {\n\t\t// Can happen during deregistration race between proxy and scheduler.\n\t\treturn nil, fmt.Errorf(\"unknown target service ID: %s\", p.TargetServiceID)\n\t}\n\n\t// Merge globals defaults\n\tconfig := make(map[string]interface{})\n\tfor k, v := range a.config.ConnectProxyDefaultConfig {\n\t\tif _, ok := config[k]; !ok {\n\t\t\tconfig[k] = v\n\t\t}\n\t}\n\n\t// Copy config from the proxy\n\tfor k, v := range p.Config {\n\t\tconfig[k] = v\n\t}\n\n\t// Set defaults for anything that is still not specified but required.\n\t// Note that these are not included in the content hash. Since we expect\n\t// them to be static in general but some like the default target service\n\t// port might not be. In that edge case services can set that explicitly\n\t// when they re-register which will be caught though.\n\tif _, ok := config[\"bind_port\"]; !ok {\n\t\tconfig[\"bind_port\"] = p.ProxyService.Port\n\t}\n\tif _, ok := config[\"bind_address\"]; !ok {\n\t\t// Default to binding to the same address the agent is configured to\n\t\t// bind to.\n\t\tconfig[\"bind_address\"] = a.config.BindAddr.String()\n\t}\n\tif _, ok := config[\"local_service_address\"]; !ok {\n\t\t// Default to localhost and the port the service registered with\n\t\tconfig[\"local_service_address\"] = fmt.Sprintf(\"127.0.0.1:%d\", target.Port)\n\t}\n\n\t// Basic type conversions for expected types.\n\tif raw, ok := config[\"bind_port\"]; ok {\n\t\tswitch v := raw.(type) {\n\t\tcase float64:\n\t\t\t// Common since HCL/JSON parse as float64\n\t\t\tconfig[\"bind_port\"] = int(v)\n\n\t\t\t// NOTE(mitchellh): No default case since errors and validation\n\t\t\t// are handled by the ServiceDefinition.Validate function.\n\t\t}\n\t}\n\n\treturn config, nil\n}",
"func (d *Dialer) DialProxyContext(ctx context.Context) (net.Conn, error) {\n\treturn d.DialContext(ctx, \"tcp\", \"\")\n}",
"func InjectConfig(readOrigin func() ([]byte, error),\n\twriteInjected func([]byte) error,\n\tproxyURL string) (origin *config.Config, injected *config.Config, err error) {\n\tcfgData, err := readOrigin()\n\tif err != nil {\n\t\treturn nil, nil, errors.Wrap(err, \"read origin file failed\")\n\t}\n\n\torigin = &config.Config{}\n\tinjected = &config.Config{}\n\tif err := yaml.Unmarshal(cfgData, &origin); err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tif err := yaml.Unmarshal(cfgData, &injected); err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tfor _, job := range injected.ScrapeConfigs {\n\t\tif job.Params == nil {\n\t\t\tjob.Params = map[string][]string{}\n\t\t}\n\t\tjob.Params[jobNameFormName] = []string{job.JobName}\n\n\t\tu, _ := url.Parse(proxyURL)\n\t\tjob.HTTPClientConfig.ProxyURL = config_util.URL{\n\t\t\tURL: u,\n\t\t}\n\t\tjob.Scheme = \"http\"\n\t}\n\n\tgen, err := yaml.Marshal(&injected)\n\tif err != nil {\n\t\treturn nil, nil, errors.Wrapf(err, \"marshal config failed\")\n\t}\n\n\tif err := writeInjected(gen); err != nil {\n\t\treturn nil, nil, errors.Wrapf(err, \"write file failed\")\n\t}\n\n\treturn origin, injected, nil\n}",
"func TransportWithProxyTarget(t *http.Transport, fixedProxyUrl string, fixedProxyTarget string) *http.Transport {\n\tt.Proxy = ProxyFuncWithTargetOrDefault(fixedProxyUrl, fixedProxyTarget, t.Proxy)\n\treturn t\n}",
"func (f CoolProxy) ProxyMode() types.ProxyMode {\n\treturn types.MasterProxy\n}",
"func (cfg *BaseProxyConf) decorate(prefix string, name string, section *ini.Section) error {\n\t// proxy_name\n\tcfg.ProxyName = prefix + name\n\n\t// metas_xxx\n\tcfg.Metas = GetMapWithoutPrefix(section.KeysHash(), \"meta_\")\n\n\t// bandwidth_limit\n\tif bandwidth, err := section.GetKey(\"bandwidth_limit\"); err == nil {\n\t\tcfg.BandwidthLimit, err = NewBandwidthQuantity(bandwidth.String())\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// plugin_xxx\n\tcfg.LocalSvrConf.PluginParams = GetMapByPrefix(section.KeysHash(), \"plugin_\")\n\n\t// custom logic code\n\tif cfg.HealthCheckType == \"tcp\" && cfg.Plugin == \"\" {\n\t\tcfg.HealthCheckAddr = cfg.LocalIP + fmt.Sprintf(\":%d\", cfg.LocalPort)\n\t}\n\n\tif cfg.HealthCheckType == \"http\" && cfg.Plugin == \"\" && cfg.HealthCheckURL != \"\" {\n\t\ts := \"http://\" + net.JoinHostPort(cfg.LocalIP, strconv.Itoa(cfg.LocalPort))\n\t\tif !strings.HasPrefix(cfg.HealthCheckURL, \"/\") {\n\t\t\ts += \"/\"\n\t\t}\n\t\tcfg.HealthCheckURL = s + cfg.HealthCheckURL\n\t}\n\n\treturn nil\n}",
"func proxy(w http.ResponseWriter, r *http.Request) {\n\tproxy := httputil.NewSingleHostReverseProxy(&serverConfig.ProxyURL)\n\tproxy.ServeHTTP(w, r)\n}",
"func (o KubernetesClusterHttpProxyConfigOutput) HttpProxy() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v KubernetesClusterHttpProxyConfig) *string { return v.HttpProxy }).(pulumi.StringPtrOutput)\n}",
"func (o MustGatherSpecProxyConfigPtrOutput) HttpProxy() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *MustGatherSpecProxyConfig) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.HttpProxy\n\t}).(pulumi.StringPtrOutput)\n}",
"func (c *ThreeScaleClient) PromoteProxyConfig(svcId string, env string, version string, toEnv string) (ProxyConfigElement, error) {\n\tvar pe ProxyConfigElement\n\tendpoint := fmt.Sprintf(proxyConfigPromote, svcId, env, version)\n\n\tvalues := url.Values{}\n\tvalues.Add(\"to\", toEnv)\n\n\tbody := strings.NewReader(values.Encode())\n\treq, err := c.buildPostReq(endpoint, body)\n\tif err != nil {\n\t\treturn pe, httpReqError\n\t}\n\n\tresp, err := c.httpClient.Do(req)\n\tif err != nil {\n\t\treturn pe, err\n\t}\n\n\tdefer resp.Body.Close()\n\n\terr = handleJsonResp(resp, http.StatusCreated, &pe)\n\treturn pe, err\n}",
"func Proxy(c *gin.Context) {\n\tc.Request.RequestURI = \"\"\n\tnewURLString := fmt.Sprintf(\"http://%s%s?%s\", os.Getenv(\"GUARTZ_HOST\"), c.Request.URL.Path, c.Request.URL.RawQuery)\n\tnewURL, err := url.Parse(newURLString)\n\n\tc.Request.URL = newURL\n\tclient := http.Client{}\n\tresp, err := client.Do(c.Request)\n\tif err != nil {\n\t\tc.JSON(http.StatusInternalServerError, \"\")\n\t\treturn\n\t}\n\tjsonDataFromHTTP, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\tc.JSON(http.StatusInternalServerError, \"\")\n\t\treturn\n\t}\n\tc.Header(\"Content-Type\", \"application/json\")\n\tc.Writer.WriteHeader(resp.StatusCode)\n\tc.Writer.Write(jsonDataFromHTTP)\n}",
"func NewProxy() *ProxyBuilder {\n\treturn &ProxyBuilder{}\n}",
"func (p *proxySettings) buildProxySettingsV2(proxyListenerMode types.ProxyListenerMode) *webclient.ProxySettings {\n\tmultiplexAddr := p.cfg.Proxy.WebAddr.String()\n\tsettings := p.buildProxySettings(proxyListenerMode)\n\tif proxyListenerMode == types.ProxyListenerMode_Multiplex {\n\t\tsettings.SSH.ListenAddr = multiplexAddr\n\t\tsettings.SSH.TunnelListenAddr = multiplexAddr\n\t\tsettings.SSH.WebListenAddr = multiplexAddr\n\t\tsettings.Kube.ListenAddr = multiplexAddr\n\t\tsettings.DB.MySQLListenAddr = multiplexAddr\n\t\tsettings.DB.PostgresListenAddr = multiplexAddr\n\t}\n\treturn settings\n}",
"func NewProxy(c *Config) (p Proxy, err error) {\n\n\tproxyPaths := []string{\"go-mlbam-proxy\", \"go-mlbam-proxy/go-mlbam-proxy\", \"/usr/local/bin/go-mlbam-proxy\"}\n\tfor _, path := range proxyPaths {\n\t\tif p.path, err = exec.LookPath(path); err == nil {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif p.path == \"\" {\n\t\terr = errors.New(\"unable to find go-mlbam-proxy in path\")\n\t\treturn\n\t}\n\n\tp.domain = c.Proxy.Domain\n\tp.sourceDomains = c.Proxy.SourceDomains\n\tp.port = \"9876\"\n\n\tp.cmd = exec.Command(p.path, \"-d\", p.domain, \"-p\", p.port, \"-s\", p.sourceDomains)\n\tp.cmd.Env = os.Environ()\n\n\tlog.WithFields(log.Fields{\n\t\t\"path\": p.path,\n\t\t\"domain\": p.domain,\n\t\t\"sourceDomains\": p.sourceDomains,\n\t\t\"port\": p.port,\n\t}).Debug(\"NewProxy\")\n\n\treturn\n}",
"func GetConf() ProxyConf {\n\treturn getConf()\n}",
"func WithLogger(logger Logger) Option {\n\treturn func(p *proxy) {\n\t\tp.logger = logger\n\t}\n}",
"func WrapRoundTripper(f func(http.RoundTripper) http.RoundTripper) ConfigOpt {\n\treturn func(c *Config) {\n\t\tif f != nil {\n\t\t\tif rt := f(c.client.Transport); rt != nil {\n\t\t\t\tc.client.Transport = rt\n\t\t\t}\n\t\t}\n\t}\n}",
"func NewProxy(opts ...options.Option) proxy.Proxy {\n\tp := new(Proxy)\n\tp.Links = map[string]client.Client{}\n\tp.Options = options.NewOptions(opts...)\n\tp.Options.Init(options.WithString(\"mucp\"))\n\n\t// get endpoint\n\tep, ok := p.Options.Values().Get(\"proxy.endpoint\")\n\tif ok {\n\t\tp.Endpoint = ep.(string)\n\t}\n\n\t// get client\n\tc, ok := p.Options.Values().Get(\"proxy.client\")\n\tif ok {\n\t\tp.Client = c.(client.Client)\n\t}\n\n\t// set the default client\n\tif p.Client == nil {\n\t\tp.Client = mucp.NewClient()\n\t}\n\n\t// get client\n\tlinks, ok := p.Options.Values().Get(\"proxy.links\")\n\tif ok {\n\t\tp.Links = links.(map[string]client.Client)\n\t}\n\n\t// get router\n\tr, ok := p.Options.Values().Get(\"proxy.router\")\n\tif ok {\n\t\tp.Router = r.(router.Router)\n\t}\n\n\t// create default router and start it\n\tif p.Router == nil {\n\t\tp.Router = router.DefaultRouter\n\t}\n\n\t// routes cache\n\tp.Routes = make(map[string]map[uint64]router.Route)\n\n\tgo func() {\n\t\t// continuously attempt to watch routes\n\t\tfor {\n\t\t\t// watch the routes\n\t\t\tp.watchRoutes()\n\t\t\t// in case of failure just wait a second\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}()\n\n\tgo func() {\n\t\t// TODO: speed up refreshing of metrics\n\t\t// without this ticking effort e.g stream\n\t\tt := time.NewTicker(time.Second * 10)\n\t\tdefer t.Stop()\n\n\t\t// we must refresh route metrics since they do not trigger new events\n\t\tfor range t.C {\n\t\t\t// refresh route metrics\n\t\t\tp.refreshMetrics()\n\t\t}\n\t}()\n\n\treturn p\n}",
"func DefaultProxyConf(proxyType string) ProxyConf {\n\tconf := NewConfByType(proxyType)\n\tif conf != nil {\n\t\tconf.SetDefaultValues()\n\t}\n\treturn conf\n}",
"func (vaultInjector *VaultInjector) proxyMode(annotations map[string]string) (string, error) {\n\tproxyPort := annotations[vaultInjector.VaultInjectorAnnotationsFQ[vaultInjectorAnnotationProxyPortKey]]\n\n\tif proxyPort == \"\" { // Default port\n\t\tproxyPort = vaultProxyDefaultPort\n\t}\n\n\tproxyConfig := strings.Replace(vaultInjector.ProxyConfig, vaultProxyPortPlaceholder, proxyPort, -1)\n\n\treturn proxyConfig, nil\n}",
"func (F *Frisby) SetProxy(url string) *Frisby {\n\tF.Req.Proxy = url\n\treturn F\n}",
"func GetConfig() ProxyConfig {\n\tonce.Do(InitConfig)\n\treturn conf\n}",
"func wrapConfigFn(cf *CLIConf) func(c *rest.Config) *rest.Config {\n\treturn func(c *rest.Config) *rest.Config {\n\t\tc.Wrap(\n\t\t\tfunc(rt http.RoundTripper) http.RoundTripper {\n\t\t\t\tif cf.SampleTraces {\n\t\t\t\t\t// If the user wants to sample traces, wrap the transport with a trace\n\t\t\t\t\t// transport.\n\t\t\t\t\treturn tracehttp.NewTransport(rt)\n\t\t\t\t}\n\t\t\t\treturn rt\n\t\t\t},\n\t\t)\n\t\treturn c\n\t}\n}",
"func WithConfig(config string) func(*Client) error {\n\treturn func(c *Client) error {\n\t\tc.config = config\n\t\treturn nil\n\t}\n}",
"func CreateProxyClient(config ProxyConfig) (ProxyClient, error) {\n\tswitch config.Protocol {\n\tcase \"direct\":\n\t\tif config.Transport != nil {\n\t\t\treturn nil, errors.New(\n\t\t\t\t\"'direct' protocol should not have any transport setting\")\n\t\t}\n\t\tif len(config.Settings) > 0 {\n\t\t\treturn nil, errors.New(\n\t\t\t\t\"'direct' protocol should not have any extra setting\")\n\t\t}\n\t\treturn DirectTCPClient{}, nil\n\n\tcase \"http\":\n\t\tif config.Transport != nil {\n\t\t\treturn nil, errors.New(\n\t\t\t\t\"'http' protocol should not have any transport setting\")\n\t\t}\n\t\taddr, ok := config.Settings[\"address\"]\n\t\tif !ok || len(config.Settings) != 1 {\n\t\t\treturn nil, errors.New(\n\t\t\t\t\"'http' protocol should have one and only one\" +\n\t\t\t\t\t\" extra setting 'address'\")\n\t\t}\n\t\tif addrStr, ok := addr.(string); ok {\n\t\t\treturn HTTPTunnelClient{addrStr}, nil\n\t\t}\n\t\treturn nil, errors.New(\"a valid 'address' must be supplied\")\n\n\tcase \"socks5\":\n\t\treturn NewSOCKS5Client(config)\n\n\tdefault:\n\t\treturn nil, errors.New(\"unknown proxy protocol: \" + config.Protocol)\n\t}\n}",
"func (c Client) WithConfig(conf *Config) Client {\n\tif conf.DefaultHeaders != nil {\n\t\tc.headers = conf.DefaultHeaders\n\t}\n\tif conf.DefaultQueryParams != nil {\n\t\tc.queryParams = conf.DefaultQueryParams\n\t}\n\tc.baseURL = conf.BaseURL\n\n\treturn c\n}",
"func New(cfg config.Proxy, bp httputil.BufferPool, prov service.Authorizationd) http.Handler {\n\tscheme := \"http\"\n\tif cfg.Scheme != \"\" {\n\t\tscheme = cfg.Scheme\n\t}\n\n\thost := fmt.Sprintf(\"%s:%d\", cfg.Host, cfg.Port)\n\n\treturn &httputil.ReverseProxy{\n\t\tBufferPool: bp,\n\t\tDirector: func(r *http.Request) {\n\t\t\tu := *r.URL\n\t\t\tu.Scheme = scheme\n\t\t\tu.Host = host\n\t\t\treq, err := http.NewRequest(r.Method, u.String(), r.Body)\n\t\t\tif err != nil {\n\t\t\t\tglg.Error(errors.Wrap(err, \"NewRequest returned error\"))\n\t\t\t\tr.URL.Scheme = scheme\n\t\t\t\treturn\n\t\t\t}\n\t\t\treq.Header = r.Header\n\t\t\treq.TLS = r.TLS\n\t\t\tif cfg.PreserveHost {\n\t\t\t\treq.Host = r.Host\n\t\t\t\tglg.Debugf(\"proxy.PreserveHost enabled, forward host header: %s\\n\", req.Host)\n\t\t\t}\n\t\t\tif cfg.ForceContentLength {\n\t\t\t\treq.ContentLength = r.ContentLength\n\t\t\t\treq.TransferEncoding = r.TransferEncoding\n\t\t\t\tglg.Debugf(\"proxy.ForceContentLength enabled, forward content-length header: %d\\n\", req.ContentLength)\n\t\t\t}\n\n\t\t\t*r = *req\n\t\t},\n\t\tTransport: &transport{\n\t\t\tprov: prov,\n\t\t\tRoundTripper: transportFromCfg(cfg.Transport),\n\t\t\tcfg: cfg,\n\t\t},\n\t\tErrorHandler: handleError,\n\t}\n}",
"func WithAddr(addr string) Option {\n\treturn func(c *gatewayClient) {\n\t\tif len(addr) != 0 {\n\t\t\tc.addr = addr\n\t\t}\n\t}\n}",
"func WithAddr(addr string) Option {\n\treturn func(c *config) {\n\t\tc.ListenAddr = addr\n\t\tc.LinkAddr = addr\n\t}\n}",
"func WithLogger(logger *zap.Logger) Option {\n\treturn func(p *proxy) {\n\t\tp.logger = logger\n\t}\n}",
"func ConfigureReverseProxy(S *server.SimpleServer, Client *client.SimpleClient, logger *log.Logger, RouteMatcher ReverseProxyRouterFunc, PathPrefix string) *server.SimpleServer {\n\n\t// If No server is provided, create a default HTTP Server.\n\tvar err error\n\tif S == nil {\n\t\tS = server.NewServerHTTP()\n\t}\n\n\t// Assert a non-empty path prefix to proxy on\n\tif PathPrefix == \"\" {\n\t\tPathPrefix = \"/\"\n\t}\n\n\t// If there's no logger provided, use the one from the server\n\tif logger == nil {\n\t\tlogger = S.Logger()\n\t}\n\n\t// If no client is given, attempt to create one, using any TLS resources the potential server had.\n\tif Client == nil {\n\t\tClient, err = client.NewClientHTTPS(S.TLSBundle())\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tClient.SetLogger(logger)\n\t}\n\n\tS.AddSubrouter(\n\t\tS.Router(),\n\t\tPathPrefix,\n\t\tserver.NewSimpleHandler(\n\t\t\tDoReverseProxy(\n\t\t\t\tClient,\n\t\t\t\tRouteMatcher,\n\t\t\t\tlogger,\n\t\t\t),\n\t\t\tPathPrefix,\n\t\t),\n\t)\n\n\treturn S\n}",
"func createProxy(client client.Client) error {\n\tproxy := &configv1.Proxy{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: \"cluster\",\n\t\t},\n\t}\n\treturn client.Default().CRClient().Create(context.TODO(), proxy)\n}",
"func NewProxy(\n\ttenant *clients.Tenant, wit clients.WIT, idler clients.IdlerService,\n\tstorageService storage.Store,\n\tconfig configuration.Configuration,\n\tclusters map[string]string) (Proxy, error) {\n\n\tp := Proxy{\n\t\tTenantCache: cache.New(30*time.Minute, 40*time.Minute),\n\t\tProxyCache: cache.New(15*time.Minute, 10*time.Minute),\n\t\tvisitLock: &sync.Mutex{},\n\t\ttenant: tenant,\n\t\twit: wit,\n\t\tidler: idler,\n\t\tbufferCheckSleep: 30 * time.Second,\n\t\tredirect: config.GetRedirectURL(),\n\t\tresponseTimeout: config.GetGatewayTimeout(),\n\t\tauthURL: config.GetAuthURL(),\n\t\tstorageService: storageService,\n\t\tindexPath: config.GetIndexPath(),\n\t\tmaxRequestRetry: config.GetMaxRequestRetry(),\n\t\tclusters: clusters,\n\t}\n\n\t//Initialize metrics\n\tRecorder.Initialize()\n\n\t//Spawn a routine to process buffered requests\n\tgo func() {\n\t\tp.ProcessBuffer()\n\t}()\n\treturn p, nil\n}",
"func (o MustGatherSpecProxyConfigOutput) NoProxy() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v MustGatherSpecProxyConfig) *string { return v.NoProxy }).(pulumi.StringPtrOutput)\n}",
"func WrapConfig(hfn http.HandlerFunc, cfg *config.APICfg, brk brokers.Broker, str stores.Store, mgr *oldPush.Manager, c push.Client) http.HandlerFunc {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\n\t\tnStr := str.Clone()\n\t\tdefer nStr.Close()\n\t\tgorillaContext.Set(r, \"brk\", brk)\n\t\tgorillaContext.Set(r, \"str\", nStr)\n\t\tgorillaContext.Set(r, \"mgr\", mgr)\n\t\tgorillaContext.Set(r, \"apsc\", c)\n\t\tgorillaContext.Set(r, \"auth_resource\", cfg.ResAuth)\n\t\tgorillaContext.Set(r, \"auth_service_token\", cfg.ServiceToken)\n\t\tgorillaContext.Set(r, \"push_worker_token\", cfg.PushWorkerToken)\n\t\tgorillaContext.Set(r, \"push_enabled\", cfg.PushEnabled)\n\t\thfn.ServeHTTP(w, r)\n\n\t})\n}",
"func (v *Verifier) Proxy(proxyURI string) *Verifier {\n\tv.proxyURI = proxyURI\n\treturn v\n}",
"func (c *services) ProxyGet(scheme, name, port, path string, params map[string]string) rest.ResponseWrapper {\n\trequest := c.client.Get().\n\t\tPrefix(\"proxy\").\n\t\tNamespace(c.ns).\n\t\tResource(\"services\").\n\t\tName(net.JoinSchemeNamePort(scheme, name, port)).\n\t\tSuffix(path)\n\tfor k, v := range params {\n\t\trequest = request.Param(k, v)\n\t}\n\treturn request\n}",
"func WithConfig(config Config) Option {\n\treturn func(d *D) {\n\t\td.config = config\n\t}\n}",
"func (f Function) Proxy() ProxyFunc {\n\treturn func(args ...cty.Value) (cty.Value, error) {\n\t\treturn f.Call(args)\n\t}\n}"
] | [
"0.7345297",
"0.7208164",
"0.71902514",
"0.71116483",
"0.7076825",
"0.69628906",
"0.6940307",
"0.69159514",
"0.6877981",
"0.67587405",
"0.65878874",
"0.6566005",
"0.64126563",
"0.6381108",
"0.6376971",
"0.63682663",
"0.63555306",
"0.6329982",
"0.62516534",
"0.6219726",
"0.6211791",
"0.61199754",
"0.611863",
"0.61041814",
"0.61041814",
"0.6088134",
"0.6067136",
"0.6052557",
"0.6047807",
"0.6027084",
"0.60230714",
"0.5935426",
"0.5866358",
"0.5864655",
"0.5846325",
"0.5837703",
"0.5828808",
"0.58060277",
"0.5776764",
"0.5773886",
"0.5711876",
"0.57116073",
"0.57112455",
"0.56908286",
"0.56842715",
"0.56688344",
"0.5666933",
"0.56572366",
"0.5654574",
"0.5651097",
"0.56446433",
"0.5623931",
"0.5620989",
"0.5609446",
"0.55964816",
"0.55895984",
"0.558689",
"0.5577544",
"0.5575825",
"0.55748457",
"0.5570804",
"0.5559173",
"0.55495876",
"0.55464035",
"0.553438",
"0.5523503",
"0.5521388",
"0.5501502",
"0.549842",
"0.5481555",
"0.5441999",
"0.54365116",
"0.5423758",
"0.54046",
"0.5401908",
"0.5392471",
"0.5391947",
"0.5382508",
"0.5380778",
"0.53702044",
"0.5365428",
"0.5363899",
"0.5362292",
"0.5344058",
"0.5341475",
"0.53379387",
"0.5333963",
"0.53217787",
"0.5307933",
"0.53001845",
"0.5297336",
"0.52963847",
"0.5282929",
"0.52721304",
"0.5270326",
"0.526657",
"0.5262595",
"0.52557623",
"0.5253395",
"0.5251889"
] | 0.7866575 | 0 |
WithRoleTokenConfig returns a role token config functional option | func WithRoleTokenConfig(cfg config.RoleToken) GRPCOption {
return func(h *GRPCHandler) {
h.roleCfg = cfg
}
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (h *handler) RoleToken(w http.ResponseWriter, r *http.Request) error {\n\tdefer flushAndClose(r.Body)\n\n\tvar data model.RoleRequest\n\terr := json.NewDecoder(r.Body).Decode(&data)\n\tif err != nil {\n\t\treturn err\n\t}\n\ttok, err := h.role(r.Context(), data.Domain, data.Role, data.ProxyForPrincipal, data.MinExpiry, data.MaxExpiry)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tw.Header().Set(\"Content-type\", \"application/json; charset=utf-8\")\n\treturn json.NewEncoder(w).Encode(tok)\n}",
"func WithToken(t string) OptionFunc {\n\treturn func(b *Bot) {\n\t\tb.conf.Token = t\n\t}\n}",
"func AssumeRoleTokenProvider(provider func() (string, error)) SessionOption {\n\treturn func(options *session.Options) {\n\t\toptions.AssumeRoleTokenProvider = provider\n\t}\n}",
"func TokenConfig(globalConfig *viper.Viper) (*token.Config, error) {\n\tsub := subconfig(globalConfig, tokenConfigKey)\n\treturn token.Initialize(sub)\n}",
"func WithToken(token string) {\n\tcfg.token = token\n\tcfg.Password = \"\"\n\tcfg.UserName = \"\"\n}",
"func WithTokenFile(roleARN, path string) Option {\n\treturn func(s *DynamoDB) {\n\t\ts.roleARN = roleARN\n\t\ts.tokenFile = path\n\t}\n}",
"func withRole(node *Role) roleOption {\n\treturn func(m *RoleMutation) {\n\t\tm.oldValue = func(context.Context) (*Role, error) {\n\t\t\treturn node, nil\n\t\t}\n\t\tm.id = &node.ID\n\t}\n}",
"func withRole(node *Role) roleOption {\n\treturn func(m *RoleMutation) {\n\t\tm.oldValue = func(context.Context) (*Role, error) {\n\t\t\treturn node, nil\n\t\t}\n\t\tm.id = &node.ID\n\t}\n}",
"func withRole(node *Role) roleOption {\n\treturn func(m *RoleMutation) {\n\t\tm.oldValue = func(context.Context) (*Role, error) {\n\t\t\treturn node, nil\n\t\t}\n\t\tm.id = &node.ID\n\t}\n}",
"func (h *handler) RoleTokenProxy(w http.ResponseWriter, r *http.Request) error {\n\tdefer flushAndClose(r.Body)\n\n\trole := r.Header.Get(\"Athenz-Role\")\n\tdomain := r.Header.Get(\"Athenz-Domain\")\n\tprincipal := r.Header.Get(\"Athenz-Proxy-Principal\")\n\ttok, err := h.role(r.Context(), domain, role, principal, 0, 0)\n\tif err != nil {\n\t\treturn err\n\t}\n\tr.Header.Set(h.cfg.RoleAuthHeader, tok.Token)\n\th.proxy.ServeHTTP(w, r)\n\treturn nil\n}",
"func (c VaultConfig) GetToken() string {\n\treturn c.Token\n}",
"func TokenOption(token string) Option {\n\treturn func(opts *options) {\n\t\topts.Token = token\n\t}\n}",
"func (c Config) WithToken(token string) Config {\n\tc.Token = token\n\treturn c\n}",
"func WithToken(with string) wrapping.Option {\n\treturn func() interface{} {\n\t\treturn OptionFunc(func(o *options) error {\n\t\t\to.withToken = with\n\t\t\treturn nil\n\t\t})\n\t}\n}",
"func WithAdminToken(token string) TestContextOpt {\n\treturn func(tstContext *testContext) {\n\t\ttstContext.save.Globals.CachedToken = token\n\t}\n}",
"func (a *authorizer) VerifyRoleToken(ctx context.Context, tok, act, res string) error {\n\treturn a.verify(ctx, token, tok, act, res)\n}",
"func (cfg Config) GetToken() (token string) {\n\treturn cfg.Token\n}",
"func WithToken(value string) OptFn {\n\treturn func(o *Opt) {\n\t\to.token = value\n\t}\n}",
"func (c *configuration) Token(restConfig *RestConfig) Token {\n\tif restConfig != nil {\n\t\treturn Token(restConfig.Config.BearerToken)\n\t}\n\treturn \"\"\n}",
"func MatchRoleToConfig(poolRole string, ec2Configs []rancherEc2.AWSEC2Config) *rancherEc2.AWSEC2Config {\n\tfor _, config := range ec2Configs {\n\t\thasMatch := false\n\t\tfor _, configRole := range config.Roles {\n\t\t\tif strings.Contains(poolRole, configRole) {\n\t\t\t\thasMatch = true\n\t\t\t}\n\t\t}\n\t\tif hasMatch {\n\t\t\treturn &config\n\t\t}\n\t}\n\treturn nil\n}",
"func WithToken(s string) Option {\n\treturn func(o *options) {\n\t\to.token = s\n\t}\n}",
"func WithToken(t Token) Option {\n\treturn option.New(optkeyToken, t)\n}",
"func WithAccesToken(token string) ConfigOption {\n\treturn func(c *Config) {\n\t\tc.accessToken = token\n\t}\n}",
"func WithRoleChangedListener(l RoleChangedListener) Option {\n\treturn func(o *config) {\n\t\to.RoleChanged = l\n\t}\n}",
"func (conf *ConfigType) Role() Role {\n\treturn conf.role\n}",
"func WithToken(val string) Option {\n\treturn func(m *metrics) {\n\t\tm.token = val\n\t}\n}",
"func GetToken(ctx *pulumi.Context) string {\n\treturn config.Get(ctx, \"aws:token\")\n}",
"func TokenRealm(tokenRealmString string) (*url.URL, error) {\n\tif len(tokenRealmString) == 0 {\n\t\t// If not specified, default to \"/openshift/token\", auto-detecting the scheme and host\n\t\treturn &url.URL{Path: defaultTokenPath}, nil\n\t}\n\n\ttokenRealm, err := url.Parse(tokenRealmString)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error parsing URL in %s config option: %v\", tokenRealmKey, err)\n\t}\n\tif len(tokenRealm.RawQuery) > 0 || len(tokenRealm.Fragment) > 0 {\n\t\treturn nil, fmt.Errorf(\"%s config option may not contain query parameters or a fragment\", tokenRealmKey)\n\t}\n\tif len(tokenRealm.Path) > 0 {\n\t\treturn nil, fmt.Errorf(\"%s config option may not contain a path (%q was specified)\", tokenRealmKey, tokenRealm.Path)\n\t}\n\n\t// pin to \"/openshift/token\"\n\ttokenRealm.Path = defaultTokenPath\n\n\treturn tokenRealm, nil\n}",
"func (a *Config) GetRole(c echo.Context) string {\n\treqToken := c.Request().Header.Get(\"Authorization\")\n\tsplitToken := strings.Split(reqToken, \"Bearer\")\n\tif len(splitToken) != 2 {\n\t\treturn \"\"\n\t}\n\treqToken = strings.TrimSpace(splitToken[1])\n\treturn a.Source.GetRoleByToken(reqToken)\n}",
"func CreateWithToken(serverURL, clusterName, userName string, caCert []byte, token string) *clientcmdapi.Config {\n\tconfig := CreateBasic(serverURL, clusterName, userName, caCert)\n\tconfig.AuthInfos[userName] = &clientcmdapi.AuthInfo{\n\t\tToken: token,\n\t}\n\treturn config\n}",
"func withRoleID(id int) roleOption {\n\treturn func(m *RoleMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Role\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Role, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = fmt.Errorf(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Role.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}",
"func withRoleID(id int) roleOption {\n\treturn func(m *RoleMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Role\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Role, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = fmt.Errorf(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Role.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}",
"func withRoleID(id int) roleOption {\n\treturn func(m *RoleMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Role\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Role, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = fmt.Errorf(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Role.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}",
"func jiraTokenFromConfig(config cfg.Config) (*oauth1.Token, bool) {\n\ttoken := config.GetConfigString(\"jira-token\")\n\tif token == \"\" {\n\t\treturn nil, false\n\t}\n\n\tsecret := config.GetConfigString(\"jira-secret\")\n\tif secret == \"\" {\n\t\treturn nil, false\n\t}\n\n\treturn &oauth1.Token{\n\t\tToken: token,\n\t\tTokenSecret: secret,\n\t}, true\n}",
"func (m *SDSConfig) GetToken() *types.Struct {\n\tif m != nil {\n\t\treturn m.Token\n\t}\n\treturn nil\n}",
"func Token(t string) Option {\n\treturn func(o *Options) {\n\t\to.Token = t\n\t}\n}",
"func WithSecurityToken(securityToken string) configurer {\n\treturn func(conf *config) {\n\t\tfor _, sp := range conf.securityProviders {\n\t\t\tif bsp, ok := sp.(*BasicSecurityProvider); ok {\n\t\t\t\tsh := bsp.getSecurity()\n\t\t\t\tbsp.refresh(sh.ak, sh.sk, securityToken)\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n}",
"func getUserTokenAuth(ctx context.Context, config Config, endpoints *Endpoints) Config {\n\tauthConfig := &oauth2.Config{\n\t\tClientID: \"cf\",\n\t\tScopes: []string{\"\"},\n\t\tEndpoint: oauth2.Endpoint{\n\t\t\tAuthURL: endpoints.Links.AuthEndpoint.URL + \"/oauth/auth\",\n\t\t\tTokenURL: endpoints.Links.TokenEndpoint.URL + \"/oauth/token\",\n\t\t},\n\t}\n\n\t// Token is expected to have no \"bearer\" prefix\n\ttoken := &oauth2.Token{\n\t\tAccessToken: config.Token,\n\t\tTokenType: \"Bearer\"}\n\n\tconfig.TokenSource = authConfig.TokenSource(ctx, token)\n\tconfig.HttpClient = oauth2.NewClient(ctx, config.TokenSource)\n\n\treturn config\n}",
"func ConfigTokenKeyResolver(config *config.Configuration) TokenKeyResolver {\n\treturn func(token *jwt.Token) (interface{}, error) {\n\t\tb := config.Security.JWTSecret\n\t\treturn b, nil\n\t}\n}",
"func NewConfigWithToken(token string) oauth2.TokenSource {\n\treturn oauth2.StaticTokenSource(\n\t\t&oauth2.Token{AccessToken: token},\n\t)\n}",
"func JWTConfig() echo.MiddlewareFunc {\n\treturn middleware.JWTWithConfig(middleware.JWTConfig{\n\t\tClaims: &dtos.ClaimsDto{},\n\t\tSigningKey: []byte(tokenSecret),\n\t\t// ErrorHandler: func(e error) error {\n\t\t// \treturn dtos.APIResult{\n\t\t// \t\tErrorCode: http.StatusUnauthorized,\n\t\t// \t\tMessage: e.Error(),\n\t\t// \t}\n\t\t// },\n\t})\n}",
"func (p *TempCredentialsProvider) getCredsWithRole() (credentials.Value, error) {\n\tlog.Println(\"Getting credentials with AssumeRole\")\n\n\tif p.config.RoleARN == \"\" {\n\t\treturn credentials.Value{}, errors.New(\"No role defined\")\n\t}\n\n\trole, err := p.assumeRoleFromCreds(p.masterCreds, true)\n\tif err != nil {\n\t\treturn credentials.Value{}, err\n\t}\n\n\tp.SetExpiration(*role.Expiration, DefaultExpirationWindow)\n\n\tlog.Printf(\"Using role %s, expires in %s\", formatKeyForDisplay(*role.AccessKeyId), time.Until(*role.Expiration).String())\n\treturn credentials.Value{\n\t\tAccessKeyID: *role.AccessKeyId,\n\t\tSecretAccessKey: *role.SecretAccessKey,\n\t\tSessionToken: *role.SessionToken,\n\t}, nil\n}",
"func InstanceRole(role string) RequestOptionFunc {\n\treturn func(body *RequestBody) error {\n\t\tbody.Role = role\n\t\treturn nil\n\t}\n}",
"func parseTokenConfig(config []byte) (*TokenGenerator, error) {\n\tvar parsedConfig *TokenGenerator\n\n\terr := json.Unmarshal(config, &parsedConfig)\n\n\treturn parsedConfig, err\n}",
"func WithAccount(name string) TokenOption {\n return func(t *tokenOpts) {\n t.account = name\n }\n}",
"func (m *VpnConfiguration) GetRole()(*string) {\n val, err := m.GetBackingStore().Get(\"role\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*string)\n }\n return nil\n}",
"func exchangeToken(config *oauth2.Config, code string) (*oauth2.Token, error) {\n\t// Use the custom HTTP client when requesting a token.\n\t//httpClient := &http.Client{Timeout: 2 * time.Second}\n\t//ctx := context.Background()\n\t//context.WithValue(ctx, oauth2.HTTPClient, httpClient)\n\ttok, err := config.Exchange(context.TODO(), code)\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"Unable to retrieve token\")\n\t}\n\treturn tok, nil\n}",
"func (o BuildStrategySpecBuildStepsSecurityContextSeLinuxOptionsOutput) Role() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildStrategySpecBuildStepsSecurityContextSeLinuxOptions) *string { return v.Role }).(pulumi.StringPtrOutput)\n}",
"func WithWebhookSecurityToken(token string) WebhookOption {\n\treturn func(webhook *Webhook) {\n\t\twebhook.securityToken = token\n\t}\n}",
"func (p *SQLResourcesClientDeleteSQLRoleDefinitionPoller) ResumeToken() (string, error) {\n\treturn p.pt.ResumeToken()\n}",
"func WithVaultTokenSecretRef() InstallOsmOpt {\n\treturn func(opts *InstallOSMOpts) {\n\t\topts.SetOverrides = []string{\n\t\t\t\"osm.vault.secret.name=osm-vault-token\",\n\t\t\t\"osm.vault.secret.key=vault_token\",\n\t\t}\n\t}\n}",
"func (o ClusterBuildStrategySpecBuildStepsSecurityContextSeLinuxOptionsOutput) Role() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ClusterBuildStrategySpecBuildStepsSecurityContextSeLinuxOptions) *string { return v.Role }).(pulumi.StringPtrOutput)\n}",
"func (o BucketReplicationConfigOutput) Token() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *BucketReplicationConfig) pulumi.StringPtrOutput { return v.Token }).(pulumi.StringPtrOutput)\n}",
"func (p *SQLResourcesClientCreateUpdateSQLRoleDefinitionPoller) ResumeToken() (string, error) {\n\treturn p.pt.ResumeToken()\n}",
"func winrmConfig(state multistep.StateBag) (*communicator.WinRMConfig, error) {\n\tconfig := state.Get(\"config\").(*Config)\n\tpassword := state.Get(\"winrm_password\").(string)\n\n\treturn &communicator.WinRMConfig{\n\t\tUsername: config.Comm.WinRMUser,\n\t\tPassword: password,\n\t}, nil\n}",
"func (m *SDSConfig) GetToken() map[string]interface{} {\n\tif m != nil {\n\t\treturn m.Token\n\t}\n\treturn nil\n}",
"func (m *SDSConfig) GetToken() map[string]interface{} {\n\tif m != nil {\n\t\treturn m.Token\n\t}\n\treturn nil\n}",
"func (m *SDSConfig) GetToken() map[string]interface{} {\n\tif m != nil {\n\t\treturn m.Token\n\t}\n\treturn nil\n}",
"func (m *SDSConfig) GetToken() map[string]interface{} {\n\tif m != nil {\n\t\treturn m.Token\n\t}\n\treturn nil\n}",
"func (m *SDSConfig) GetToken() map[string]interface{} {\n\tif m != nil {\n\t\treturn m.Token\n\t}\n\treturn nil\n}",
"func exchangeToken(config *oauth2.Config, code string) (*oauth2.Token, error) {\n\ttok, err := config.Exchange(oauth2.NoContext, code)\n\tif err != nil {\n\t\tlog.Fatalf(\"Unable to retrieve token %v\", err)\n\t}\n\treturn tok, nil\n}",
"func WebhookTokenAuthenticator() *WebhookTokenAuthenticatorApplyConfiguration {\n\treturn &WebhookTokenAuthenticatorApplyConfiguration{}\n}",
"func WithAuthToken(f func(http.ResponseWriter, *http.Request)) func(http.ResponseWriter, *http.Request) {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\ttokenR := context.Get(r, repos.TokenR).(repos.TokenRepo)\n\t\tuserR := context.Get(r, repos.UserR).(repos.UserIdGetter)\n\t\tvar token string\n\t\tif r.Method == \"POST\" || r.Method == \"PUT\" {\n\t\t\ttoken = r.FormValue(\"token\")\n\t\t}\n\t\tif r.Method == \"GET\" {\n\t\t\tparams := r.URL.Query()\n\t\t\tif len(params[\"token\"]) == 0 {\n\t\t\t\tRespond(w, r, http.StatusUnauthorized, &ErrorDescriptor{Error: \"Send a token\", ErrorCode: 100})\n\t\t\t\treturn\n\t\t\t}\n\t\t\ttoken = params[\"token\"][0]\n\t\t}\n\t\tlog.Printf(\"[INFOS] Token from user : %v \", token)\n\t\tid, err := tokenR.GetUserWithToken(token)\n\t\tu, err := userR.GetUserById(id)\n\t\tif err != nil {\n\t\t\t//context.Clear(r)\n\t\t\tlog.Print(\"[INFOS] [ERROR] Token submitted is invalid\")\n\t\t\tRespond(w, r, http.StatusUnauthorized, &ErrorDescriptor{Error: \"Invalid token \", ErrorCode: 100})\n\t\t\treturn\n\t\t}\n\t\tlog.Printf(\"[INFOS] User ID : %v \", u.ID)\n\t\tcontext.Set(r, \"user\", u)\n\t\tf(w, r)\n\t}\n}",
"func (p *SQLResourcesDeleteSQLRoleDefinitionPoller) ResumeToken() (string, error) {\n\treturn p.pt.ResumeToken()\n}",
"func (p *SQLResourcesCreateUpdateSQLRoleDefinitionPoller) ResumeToken() (string, error) {\n\treturn p.pt.ResumeToken()\n}",
"func (i *integrationTest) withServiceAccountRole(sa string, inlinePolicy bool) *integrationTest {\n\trole := truncate.TruncateString(sa+\".sa.\"+i.clusterName, truncate.TruncateStringOptions{MaxLength: iam.MaxLengthIAMRoleName, AlwaysAddHash: false})\n\ti.expectServiceAccountRolePolicies = append(i.expectServiceAccountRolePolicies, fmt.Sprintf(\"aws_iam_role_%s_policy\", role))\n\tif inlinePolicy {\n\t\ti.expectServiceAccountRolePolicies = append(i.expectServiceAccountRolePolicies, fmt.Sprintf(\"aws_iam_role_policy_%s_policy\", role))\n\t}\n\treturn i\n}",
"func grpcGetServiceConfigWithOauthToken(sa *string, ctx context.Context) (*serviceconfig.Service, error) {\n\t//scope := sm.CloudPlatformScope\n\tserverAddr := \"servicemanagement.googleapis.com\"\n\tserverAddrWithPort := \"servicemanagement.googleapis.com:443\"\n\tgetServiceConfigProtoReq := smp.GetServiceConfigRequest{\n\t\tServiceName: serviceName,\n\t}\n\tpool, _ := x509.SystemCertPool()\n\t// error handling omitted\n\tcreds := credentials.NewClientTLSFromCert(pool, serverAddrWithPort)\n\tcreds.OverrideServerName(serverAddr)\n\t//perRPC, _ := grpcOauth.NewServiceAccountFromFile(*sa, scope)\n\tbearerToken := \"Bearer ya29.c.ElrSBZKqpjJDEyFjqpfWF1s62FplR8at1Lvt2NDxFKShwNzJr6x2T0YK6ycldNv_ZlA4aNxBjL1jmZdBmjvf6733o8G9sCsxDWHWNgy9Wewz7Fz_Jo7bSaz0psc\"\n\n\t//md := metadata.Pairs(\"Authorization\", bearerToken)\n\t//cos := grpc.HeaderCallOption{\n\t//\tHeaderAddr: &md,\n\t//}\n\n\tperRPC := customJwt{\n\t\ttoken: bearerToken,\n\t}\n\n\tconn, _ := grpc.Dial(\n\t\tserverAddrWithPort,\n\t\tgrpc.WithPerRPCCredentials(&perRPC),\n\t\tgrpc.WithTransportCredentials(creds),\n\t)\n\t//grpc.Header()\n\tgscp, err := smp.NewServiceManagerClient(conn).GetServiceConfig(ctx, &getServiceConfigProtoReq)\n\tif err != nil {\n\t\tfmt.Println(\"Error while making grpc call: \", err)\n\t}\n\tfmt.Println(\"grpc call get name : \", gscp.GetName())\n\treturn gscp, err\n}",
"func WithToken(ctx context.Context, token string) context.Context {\n\tif token != \"\" {\n\t\treturn rpc.WithOutgoingHeader(ctx, authenticationHeader, authenticationTokenPrefix+token)\n\t}\n\treturn ctx\n}",
"func (c *configuration) Role(clientSet ClientSet) *Role {\n\tif clientSet != nil {\n\t\treturn NewRole(clientSet)\n\t}\n\treturn nil\n\n}",
"func (c *Client) GetTokenSupplyWithConfig(ctx context.Context, mintAddr string, cfg rpc.GetTokenSupplyConfig) (uint64, uint8, error) {\n\tres, err := c.RpcClient.GetTokenSupplyWithConfig(ctx, mintAddr, cfg)\n\terr = checkRpcResult(res.GeneralResponse, err)\n\tif err != nil {\n\t\treturn 0, 0, err\n\t}\n\tbalance, err := strconv.ParseUint(res.Result.Value.Amount, 10, 64)\n\tif err != nil {\n\t\treturn 0, 0, fmt.Errorf(\"failed to cast token amount, err: %v\", err)\n\t}\n\treturn balance, res.Result.Value.Decimals, nil\n}",
"func CreateConfigServiceToken(host string, verifyTLS bool, apiKey string, project string, config string, name string, expireAt time.Time, access string) (models.ConfigServiceToken, Error) {\n\tpostBody := map[string]interface{}{\"name\": name}\n\tif !expireAt.IsZero() {\n\t\tpostBody[\"expire_at\"] = expireAt.Unix()\n\t}\n\tpostBody[\"access\"] = access\n\n\tbody, err := json.Marshal(postBody)\n\tif err != nil {\n\t\treturn models.ConfigServiceToken{}, Error{Err: err, Message: \"Invalid service token info\"}\n\t}\n\n\tvar params []queryParam\n\tparams = append(params, queryParam{Key: \"project\", Value: project})\n\tparams = append(params, queryParam{Key: \"config\", Value: config})\n\n\turl, err := generateURL(host, \"/v3/configs/config/tokens\", params)\n\tif err != nil {\n\t\treturn models.ConfigServiceToken{}, Error{Err: err, Message: \"Unable to generate url\"}\n\t}\n\n\tstatusCode, _, response, err := PostRequest(url, verifyTLS, apiKeyHeader(apiKey), body)\n\tif err != nil {\n\t\treturn models.ConfigServiceToken{}, Error{Err: err, Message: \"Unable to create service token\", Code: statusCode}\n\t}\n\n\tvar result map[string]interface{}\n\terr = json.Unmarshal(response, &result)\n\tif err != nil {\n\t\treturn models.ConfigServiceToken{}, Error{Err: err, Message: \"Unable to parse API response\", Code: statusCode}\n\t}\n\n\ttokenResult, ok := result[\"token\"].(map[string]interface{})\n\tif !ok {\n\t\treturn models.ConfigServiceToken{}, Error{Err: fmt.Errorf(\"Unexpected type for token result in ConfigServiceToken, expected map[string]interface{}, got %T\", result[\"token\"]), Message: \"Unable to parse API response\", Code: statusCode}\n\t}\n\tinfo := models.ParseConfigServiceToken(tokenResult)\n\treturn info, Error{}\n}",
"func HasRole(role string, token *jwt.Token) bool {\n\tfor _, r := range Roles(token) {\n\t\tif r == role {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}",
"func Roles(token *jwt.Token) []string {\n\treturn oauth.Roles(token)\n}",
"func NewToken(id int, role string) (string, error) {\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{\n\t\t\"role\": role,\n\t\t\"userID\": id,\n\t\t\"nbf\": time.Now().Unix(),\n\t\t\"iat\": time.Now().Unix(),\n\t\t\"exp\": time.Now().Local().Add(time.Hour*time.Duration(JWT_EXP_HOUR) + time.Minute*time.Duration(JWT_EXP_MIN) + time.Second*time.Duration(JWT_EXP_SEC)).Unix(),\n\t})\n\t// Sign and get the complete encoded token as a string using the secret\n\tsToken, err := token.SignedString([]byte(JWT_SECRET))\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn sToken, nil\n}",
"func (p *SQLResourcesClientDeleteSQLRoleAssignmentPoller) ResumeToken() (string, error) {\n\treturn p.pt.ResumeToken()\n}",
"func (p *SQLResourcesClientCreateUpdateSQLRoleAssignmentPoller) ResumeToken() (string, error) {\n\treturn p.pt.ResumeToken()\n}",
"func Token(val string) Argument {\n\treturn func(request *requests.Request) error {\n\t\trequest.AddArgument(\"token\", val)\n\t\treturn nil\n\t}\n}",
"func UpdateConfigToken(token string) error {\n\n\t// Reauthenticate against Vault and update in-memory config\n\tvc.SetToken(token)\n\tvc.Auth()\n\tcfg.Token = token\n\n\tpath, err := GetConfigPath()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcontent, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ttoken_found := false\n\n\tlines := strings.Split(string(content), \"\\n\")\n\n\tfor i, line := range lines {\n\t\tif strings.HasPrefix(line, \"token:\") {\n\t\t\tlines[i] = \"token: \" + token\n\t\t\ttoken_found = true\n\t\t}\n\t}\n\n\tif !token_found {\n\t\tlines = append(lines, \"token: \"+token)\n\n\t}\n\n\toutput := strings.Join(lines, \"\\n\")\n\terr = ioutil.WriteFile(path, []byte(output), 0600)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func Token(c *cli.Context) {\n\tconfigPath := c.GlobalString(\"config\")\n\n\tconfig, err := parseConfig(configPath)\n\n\tif err != nil {\n\t\tlog.Fatalf(\"Error parsing the config : %s\", err)\n\t\treturn\n\t}\n\n\tfmt.Print(string(authtoken.Token(config.HTTP.Login, config.HTTP.Password, config.HTTP.Salt)))\n}",
"func (c *ConfigurationFile) GetRole() string {\n\treturn c.CurrentRole\n}",
"func desiredRole(name string, contour *operatorv1alpha1.Contour) *rbacv1.Role {\n\trole := &rbacv1.Role{\n\t\tTypeMeta: metav1.TypeMeta{\n\t\t\tKind: \"Role\",\n\t\t},\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tNamespace: contour.Spec.Namespace.Name,\n\t\t\tName: name,\n\t\t},\n\t}\n\tgroupAll := []string{\"\"}\n\tverbCU := []string{\"create\", \"update\"}\n\tsecret := rbacv1.PolicyRule{\n\t\tVerbs: verbCU,\n\t\tAPIGroups: groupAll,\n\t\tResources: []string{\"secrets\"},\n\t}\n\trole.Rules = []rbacv1.PolicyRule{secret}\n\trole.Labels = map[string]string{\n\t\toperatorv1alpha1.OwningContourNameLabel: contour.Name,\n\t\toperatorv1alpha1.OwningContourNsLabel: contour.Namespace,\n\t}\n\treturn role\n}",
"func withUserRole(t *testing.T, name string, spec types.RoleSpecV6) testOptionsFunc {\n\tt.Helper()\n\t// Create a new role with full access to all databases.\n\trole, err := types.NewRole(name, spec)\n\trequire.NoError(t, err)\n\treturn func(options *testOptions) {\n\t\toptions.userRoles = append(options.userRoles, role)\n\t}\n}",
"func (p *SQLResourcesDeleteSQLRoleAssignmentPoller) ResumeToken() (string, error) {\n\treturn p.pt.ResumeToken()\n}",
"func (w *AWS) SessionToken(account, role, sessName string) string {\n\tctx := w.Ctx\n\tif account != \"\" {\n\t\tctx.Account = AccountID(account)\n\t}\n\tif sessName == \"\" {\n\t\tsessName = role\n\t}\n\treturn string(ctx.New(\"sts\", \"assumed-role/\", role, \"/\", sessName))\n}",
"func (p *ManagedClustersCreateOrUpdatePoller) ResumeToken() (string, error) {\n\treturn p.pt.ResumeToken()\n}",
"func (p *SQLResourcesCreateUpdateSQLRoleAssignmentPoller) ResumeToken() (string, error) {\n\treturn p.pt.ResumeToken()\n}",
"func (wh *AccountWarehouse) MintTokenWithTTL(ctx context.Context, params *ResourceParams) (*clouds.AwsResourceTokenResult, error) {\n\tsess, err := createSession()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif params.Ttl > params.MaxKeyTtl {\n\t\treturn nil, fmt.Errorf(\"given ttl [%s] is greater than max ttl [%s]\", params.Ttl, params.MaxKeyTtl)\n\t}\n\n\t// FIXME load in constructor function?\n\tsvcUserArn, err := wh.loadSvcUserArn(sess)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tprincSpec := &principalSpec{\n\t\tdamPrincipalArn: svcUserArn,\n\t\tparams: params,\n\t}\n\n\tif params.Ttl > TemporaryCredMaxTtl {\n\t\tprincSpec.pType = userType\n\t} else {\n\t\tprincSpec.pType = roleType\n\t}\n\n\tvar polSpec *policySpec\n\tswitch params.ServiceTemplate.ServiceName {\n\tcase S3ItemFormat:\n\t\tbucket, ok := params.Vars[\"bucket\"]\n\t\tif !ok {\n\t\t\treturn nil, fmt.Errorf(\"no bucket specified\")\n\t\t}\n\t\trSpec := &resourceSpec{\n\t\t\tid: bucket,\n\t\t\tarn: fmt.Sprintf(\"arn:aws:s3:::%s/*\", bucket),\n\t\t\trType: bucketType,\n\t\t}\n\t\tpolSpec = &policySpec{\n\t\t\tprincipal: princSpec,\n\t\t\trSpecs: []*resourceSpec{rSpec},\n\t\t\tparams: params,\n\t\t}\n\tcase RedshiftItemFormat:\n\t\tclusterArn, ok := params.Vars[\"cluster\"]\n\t\tif !ok {\n\t\t\treturn nil, fmt.Errorf(\"no cluster specified\")\n\t\t}\n\t\tclusterSpec := &resourceSpec{\n\t\t\trType: otherRType,\n\t\t\tarn: clusterArn,\n\t\t\tid: extractClusterName(clusterArn),\n\t\t}\n\t\tdbuser := convertToAwsSafeIdentifier(params.UserId)\n\t\tuserSpec := &resourceSpec{\n\t\t\trType: otherRType,\n\t\t\tarn: calculateUserArn(clusterArn, dbuser),\n\t\t\tid: dbuser,\n\t\t}\n\t\tgroup, ok := params.Vars[\"group\"]\n\t\tvar rSpecs []*resourceSpec\n\t\tif ok {\n\t\t\trSpecs = []*resourceSpec{\n\t\t\t\tclusterSpec,\n\t\t\t\tuserSpec,\n\t\t\t\t{\n\t\t\t\t\trType: otherRType,\n\t\t\t\t\tarn: group,\n\t\t\t\t\tid: extractDBGroupName(group),\n\t\t\t\t},\n\t\t\t}\n\t\t} else {\n\t\t\trSpecs = []*resourceSpec{clusterSpec,userSpec}\n\t\t}\n\n\t\tpolSpec = &policySpec{\n\t\t\tprincipal: princSpec,\n\t\t\trSpecs: rSpecs,\n\t\t\tparams: params,\n\t\t}\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unrecognized item format [%s] for AWS target adapter\", params.ServiceTemplate.ServiceName)\n\t}\n\n\tprincipalArn, err := ensurePrincipal(sess, princSpec)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\terr = ensurePolicy(sess, polSpec)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn wh.ensureTokenResult(sess, principalArn, princSpec)\n}",
"func (o BuildStrategySpecBuildStepsSecurityContextSeLinuxOptionsPtrOutput) Role() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *BuildStrategySpecBuildStepsSecurityContextSeLinuxOptions) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Role\n\t}).(pulumi.StringPtrOutput)\n}",
"func (o AuthBackendRoleOutput) TokenTtl() pulumi.IntPtrOutput {\n\treturn o.ApplyT(func(v *AuthBackendRole) pulumi.IntPtrOutput { return v.TokenTtl }).(pulumi.IntPtrOutput)\n}",
"func RoleAuth(allowedRole string) gin.HandlerFunc {\n\t/*\n\t\tI am aware that this function or way is not scalable.\n\t\tIf this program has more than 2 roles, and several roles can access a request,\n\t\tit won't be able to check multiple roles.\n\t\tI might have the solution, but will do it if scaling is really required.\n\t*/\n\treturn func(c *gin.Context) {\n\t\tvar user models.User\n\t\tuserID, err := strconv.Atoi(c.Request.Header.Get(\"userID\"))\n\t\tif err != nil {\n\t\t\tc.JSON(http.StatusUnauthorized, gin.H{\"error\": err.Error()})\n\t\t\tc.Abort()\n\t\t\treturn\n\t\t}\n\n\t\tuserRole, err := user.GetUserRoleByID(userID)\n\t\tif err != nil {\n\t\t\tc.JSON(http.StatusUnauthorized, gin.H{\"error\": err.Error()})\n\t\t\tc.Abort()\n\t\t\treturn\n\t\t}\n\n\t\tif userRole != allowedRole {\n\t\t\tc.JSON(http.StatusUnauthorized, gin.H{\"error\": \"User is unauthorized to use this request.\"})\n\t\t\tc.Abort()\n\t\t\treturn\n\t\t}\n\n\t\tc.Next()\n\t}\n}",
"func (c *Config) Role() int {\n\trole := c.Get(\"role\", \"follower\")\n\tswitch role {\n\tcase \"follower\":\n\t\treturn FOLLOWER\n\tcase \"leader\":\n\t\treturn LEADER\n\tdefault:\n\t\tlog.Panic(\"Invalid role: %s.\", role)\n\t}\n\treturn LEADER\n}",
"func (o ClusterBuildStrategySpecBuildStepsSecurityContextSeLinuxOptionsPtrOutput) Role() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ClusterBuildStrategySpecBuildStepsSecurityContextSeLinuxOptions) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Role\n\t}).(pulumi.StringPtrOutput)\n}",
"func (workflow *pipelineWorkflow) pipelineToken(namespace string, tokenProvider func(bool) string, stackWaiter common.StackWaiter, params map[string]string) Executor {\n\treturn func() error {\n\t\tpipelineStackName := common.CreateStackName(namespace, common.StackTypePipeline, workflow.serviceName)\n\t\tpipelineStack := stackWaiter.AwaitFinalStatus(pipelineStackName)\n\t\tif workflow.pipelineConfig.Source.Provider == \"GitHub\" {\n\t\t\tparams[\"GitHubToken\"] = tokenProvider(pipelineStack == nil)\n\t\t}\n\t\treturn nil\n\t}\n}",
"func WithRegistryServiceToken(serviceToken string) RegistryOption {\n\treturn func(o *registryOptions) { o.ServiceToken = serviceToken }\n}",
"func (cfg *Config) SetToken(token string) {\n\tcfg.Token = token\n}",
"func NamespaceWithTokenProvider(provider auth.TokenProvider) NamespaceOption {\n\treturn func(ns *Namespace) error {\n\t\tns.TokenProvider = provider\n\t\treturn nil\n\t}\n}",
"func (o BucketReplicationConfigOutput) Role() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *BucketReplicationConfig) pulumi.StringOutput { return v.Role }).(pulumi.StringOutput)\n}",
"func (p *ManagedClustersRunCommandPoller) ResumeToken() (string, error) {\n\treturn p.pt.ResumeToken()\n}",
"func (p *CassandraClustersInvokeCommandPoller) ResumeToken() (string, error) {\n\treturn p.pt.ResumeToken()\n}",
"func WithBearerToken(tok string) Option {\n\treturn func(opts *backendOptions) {\n\t\topts.bearerToken = tok\n\t}\n}"
] | [
"0.65500444",
"0.6109445",
"0.6060574",
"0.5755772",
"0.55504984",
"0.5467545",
"0.53978205",
"0.53978205",
"0.53978205",
"0.5376566",
"0.5308736",
"0.5294586",
"0.5285593",
"0.52821636",
"0.5274694",
"0.52643675",
"0.52467155",
"0.51788664",
"0.51758593",
"0.5169364",
"0.5164233",
"0.5151515",
"0.5142084",
"0.5129196",
"0.5127573",
"0.5122581",
"0.5110335",
"0.5067846",
"0.5052936",
"0.50446373",
"0.5004616",
"0.5004616",
"0.5004616",
"0.49904737",
"0.49724412",
"0.49676013",
"0.49645072",
"0.49563572",
"0.49477673",
"0.49395123",
"0.49386796",
"0.4910788",
"0.49039215",
"0.4899813",
"0.48884478",
"0.48797986",
"0.48662397",
"0.4822583",
"0.4818729",
"0.47842252",
"0.47842002",
"0.47773448",
"0.47763076",
"0.47728503",
"0.47658545",
"0.47635928",
"0.47635928",
"0.47635928",
"0.47635928",
"0.47635928",
"0.47608775",
"0.47565466",
"0.47533765",
"0.47386616",
"0.47367108",
"0.4726273",
"0.47238836",
"0.4720535",
"0.47133946",
"0.47128886",
"0.470667",
"0.46927372",
"0.46846107",
"0.46800944",
"0.4662495",
"0.46496662",
"0.4632899",
"0.4632745",
"0.4629959",
"0.46252942",
"0.4624123",
"0.4620742",
"0.46184355",
"0.46168727",
"0.46166828",
"0.4614928",
"0.4613741",
"0.46090057",
"0.45954958",
"0.4589786",
"0.45881295",
"0.45829794",
"0.45791775",
"0.45632732",
"0.456314",
"0.45616817",
"0.45539135",
"0.45497206",
"0.4535157",
"0.45345095"
] | 0.7468508 | 0 |
WithAuthorizationd returns a authorizationd functional option | func WithAuthorizationd(a service.Authorizationd) GRPCOption {
return func(h *GRPCHandler) {
h.authorizationd = a
}
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (c Client) WithAuthorization() PrepareDecorator {\n\treturn c.authorizer().WithAuthorization()\n}",
"func WithAuthorization(hhandler http.Handler, auth authorizer.Authorizer, s runtime.NegotiatedSerializer) http.Handler {\n\treturn withAuthorization(hhandler, auth, s, recordAuthorizationMetrics)\n}",
"func Authorization(f func(http.ResponseWriter, *http.Request)) func(http.ResponseWriter, *http.Request) {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\ttoken := r.Header.Get(\"Authorization\")\n\t\t_, err := authorization.ValidateToken(token)\n\t\tif err != nil {\n\t\t\tforbbiden(w, r)\n\t\t\treturn\n\t\t}\n\t\tf(w, r)\n\t}\n}",
"func (sk *SharedKeyAuthorizer) WithAuthorization() PrepareDecorator {\n\treturn func(p Preparer) Preparer {\n\t\treturn PreparerFunc(func(r *http.Request) (*http.Request, error) {\n\t\t\tr, err := p.Prepare(r)\n\t\t\tif err != nil {\n\t\t\t\treturn r, err\n\t\t\t}\n\n\t\t\tsk, err := buildSharedKey(sk.accountName, sk.accountKey, r, sk.keyType)\n\t\t\tif err != nil {\n\t\t\t\treturn r, err\n\t\t\t}\n\t\t\treturn Prepare(r, WithHeader(headerAuthorization, sk))\n\t\t})\n\t}\n}",
"func authorizationFilter(host service.Host) FilterFunc {\n\treturn func(ctx context.Context, w http.ResponseWriter, r *http.Request, next Handler) {\n\t\tfxctx := &fxcontext.Context{\n\t\t\tContext: ctx,\n\t\t}\n\n\t\tif err := host.AuthClient().Authorize(fxctx); err != nil {\n\t\t\thost.Metrics().SubScope(\"http\").SubScope(\"auth\").Counter(\"fail\").Inc(1)\n\t\t\tfxctx.Logger().Error(auth.ErrAuthorization, \"error\", err)\n\t\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\t\tfmt.Fprintf(w, \"Unauthorized access: %+v\", err)\n\t\t\treturn\n\t\t}\n\t\tnext.ServeHTTP(fxctx, w, r)\n\t}\n}",
"func (c *Authorizer) WithAuthorization() autorest.PrepareDecorator {\n\treturn func(p autorest.Preparer) autorest.Preparer {\n\t\treturn autorest.PreparerFunc(func(req *http.Request) (*http.Request, error) {\n\t\t\tvar err error\n\t\t\treq, err = p.Prepare(req)\n\t\t\tif err == nil {\n\t\t\t\ttoken, err := c.Token()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\n\t\t\t\treq, err = autorest.Prepare(req, autorest.WithHeader(\"Authorization\", fmt.Sprintf(\"Bearer %s\", token.AccessToken)))\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn req, err\n\t\t\t\t}\n\n\t\t\t\tauxTokens, err := c.AuxiliaryTokens()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn req, err\n\t\t\t\t}\n\n\t\t\t\tauxTokenList := make([]string, 0)\n\t\t\t\tfor _, a := range auxTokens {\n\t\t\t\t\tif a != nil && a.AccessToken != \"\" {\n\t\t\t\t\t\tauxTokenList = append(auxTokenList, fmt.Sprintf(\"%s %s\", a.TokenType, a.AccessToken))\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\treturn autorest.Prepare(req, autorest.WithHeader(\"x-ms-authorization-auxiliary\", strings.Join(auxTokenList, \", \")))\n\t\t\t}\n\n\t\t\treturn req, err\n\t\t})\n\t}\n}",
"func (ca *policyAdapter) WithAuthorization() autorest.PrepareDecorator {\n\treturn func(p autorest.Preparer) autorest.Preparer {\n\t\treturn autorest.PreparerFunc(func(r *http.Request) (*http.Request, error) {\n\t\t\tr, err := p.Prepare(r)\n\t\t\tif err != nil {\n\t\t\t\treturn r, err\n\t\t\t}\n\t\t\t// create a dummy request\n\t\t\treq, err := runtime.NewRequest(r.Context(), r.Method, r.URL.String())\n\t\t\tif err != nil {\n\t\t\t\treturn r, err\n\t\t\t}\n\t\t\t_, err = ca.pl.Do(req)\n\t\t\t// if the authentication failed due to invalid/missing credentials\n\t\t\t// return a wrapped error so the retry policy won't kick in.\n\t\t\ttype nonRetriable interface {\n\t\t\t\tNonRetriable()\n\t\t\t}\n\t\t\tvar nre nonRetriable\n\t\t\tif errors.As(err, &nre) {\n\t\t\t\treturn r, &tokenRefreshError{\n\t\t\t\t\tinner: err,\n\t\t\t\t}\n\t\t\t}\n\t\t\t// some other error\n\t\t\tif err != nil {\n\t\t\t\treturn r, err\n\t\t\t}\n\t\t\t// copy the authorization header to the real request\n\t\t\tconst authHeader = \"Authorization\"\n\t\t\tr.Header.Set(authHeader, req.Raw().Header.Get(authHeader))\n\t\t\treturn r, err\n\t\t})\n\t}\n}",
"func Authorization(ctx context.Context) (string, error) {\n\treturn fromMeta(ctx, AuthKey)\n}",
"func WithAuthorizationCheck(handler http.Handler, getAttribs RequestAttributeGetter, a authorizer.Authorizer) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {\n\t\terr := a.Authorize(getAttribs.GetAttribs(req))\n\t\tif err == nil {\n\t\t\thandler.ServeHTTP(w, req)\n\t\t\treturn\n\t\t}\n\t\tforbidden(w, req)\n\t})\n}",
"func withRights() adapter {\n\treturn func(h http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\t// No authentification to check\n\t\t\th.ServeHTTP(w, r)\n\t\t})\n\t}\n}",
"func authorize(name string) error {\n\tif distro.Get() == distro.Synology {\n\t\treturn authorizeSynology(name)\n\t}\n\treturn nil\n}",
"func (authService *AuthService) authorized(dbClient DBClient, sessionToken *apitypes.SessionToken,\n\tactionMask []bool, resourceId string) (bool, error) {\n\n\t/* Rules:\n\t\n\tA party can access a resource if the party,\n\t\thas an ACL entry for the resource; or,\n\t\tthe resource belongs to a repo or realm for which the party has an ACL entry.\n\t\n\tIn this context, a user is a party if the user is explicitly the party or if\n\tthe user belongs to a group that is explicitly the party.\n\t\n\tGroups may not belong to other groups.\n\t\n\tThe user must have the required access mode (CreateIn, Read, Write, Exec, Delete).\n\tNo access mode implies any other access mode.\n\tThe access modes have the following meanings:\n\t\tCreateIn - The party can create resources that will be owned by the target resource.\n\t\tRead - The party can obtain the contents of the target resource.\n\t\tWrite - The party can modify the contents of the target resource.\n\t\tExec - The party can compel SafeHarbor to perform the actions specified by\n\t\t\tthe target resource (e.g., execute a Dockerfile).\n\t\tDelete - The party can Delete the target resource.\n\t*/\n\t\n\tif sessionToken == nil { return false, utilities.ConstructServerError(\"No session token\") }\n\t\n\t// Identify the user.\n\tvar userId string = sessionToken.AuthenticatedUserid\n\tfmt.Println(\"userid=\", userId)\n\tvar user User\n\tvar err error\n\tuser, err = dbClient.dbGetUserByUserId(userId)\n\tif user == nil {\n\t\treturn false, utilities.ConstructServerError(\"user object cannot be identified from user id \" + userId)\n\t}\n\t\n\t// Special case: Allow user all capabilities for their own user object.\n\tif user.getId() == resourceId { return true, nil }\n\n\t// Verify that at most one field of the actionMask is true.\n\tvar nTrue = 0\n\tfor _, b := range actionMask {\n\t\tif b {\n\t\t\tif nTrue == 1 {\n\t\t\t\treturn false, utilities.ConstructUserError(\"More than one field in mask may not be true\")\n\t\t\t}\n\t\t\tnTrue++\n\t\t}\n\t}\n\t\n\t// Check if the user or a group that the user belongs to has the permission\n\t// that is specified by the actionMask.\n\tvar party Party = user // start with the user.\n\tvar resource Resource\n\tresource, err = dbClient.getResource(resourceId)\n\tif err != nil { return false, err }\n\tif resource == nil {\n\t\treturn false, utilities.ConstructUserError(\"Resource with Id \" + resourceId + \" not found\")\n\t}\n\tvar groupIds []string = user.getGroupIds()\n\tvar groupIndex = -1\n\tfor { // the user, and then each group that the user belongs to...\n\t\t// See if the party (user or group) has an ACL entry for the resource.\n\t\tvar partyCanAccessResourceDirectoy bool\n\t\tpartyCanAccessResourceDirectoy, err =\n\t\t\tauthService.partyHasAccess(dbClient, party, actionMask, resource)\n\t\tif err != nil { return false, err }\n\t\tif partyCanAccessResourceDirectoy { return true, nil }\n\t\t\n\t\t// See if any of the party's parent resources have access.\n\t\tvar parentId string = resource.getParentId()\n\t\tif parentId != \"\" {\n\t\t\tvar parent Resource\n\t\t\tparent, err = dbClient.getResource(parentId)\n\t\t\tif err != nil { return false, err }\n\t\t\tvar parentHasAccess bool\n\t\t\tparentHasAccess, err = authService.partyHasAccess(dbClient, party, actionMask, parent)\n\t\t\tif err != nil { return false, err }\n\t\t\tif parentHasAccess { return true, nil }\n\t\t}\n\t\t\n\t\tgroupIndex++\n\t\tif groupIndex == len(groupIds) { return false, nil }\n\t\tvar err error\n\t\tparty, err = dbClient.getParty(groupIds[groupIndex]) // check next group\n\t\tif err != nil { return false, err }\n\t}\n\treturn false, nil // no access rights found\n}",
"func AuthorizationMiddleware(c *gin.Context) {\n\tauth := strings.SplitN(c.GetHeader(\"Authorization\"), \" \", 2)\n\n\tif len(auth) != 2 || auth[0] != \"Bearer\" {\n\t\tc.Header(\"WWW-Authenticate\", \"Bearer\")\n\t\tc.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{\n\t\t\t\"error\": \"The required authorization heads were not present in the request.\",\n\t\t})\n\n\t\treturn\n\t}\n\n\t// Try to match the request against the global token for the Daemon, regardless\n\t// of the permission type. If nothing is matched we will fall through to the Panel\n\t// API to try and validate permissions for a server.\n\tif auth[1] == config.Get().AuthenticationToken {\n\t\tc.Next()\n\n\t\treturn\n\t}\n\n\tc.AbortWithStatusJSON(http.StatusForbidden, gin.H{\n\t\t\"error\": \"You are not authorized to access this endpoint.\",\n\t})\n}",
"func (ba *ExplicitBearerAuthorizer) WithAuthorization() autorest.PrepareDecorator {\n\treturn func(p autorest.Preparer) autorest.Preparer {\n\t\treturn autorest.PreparerFunc(func(r *http.Request) (*http.Request, error) {\n\t\t\tr, err := p.Prepare(r)\n\t\t\tif err == nil {\n\t\t\t\treturn autorest.Prepare(r, autorest.WithHeader(\"Authorization\", fmt.Sprintf(\"Bearer %s\", ba.token)))\n\t\t\t}\n\t\t\treturn r, err\n\t\t})\n\t}\n}",
"func withAuthorizeID(id int) authorizeOption {\n\treturn func(m *AuthorizeMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Authorize\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Authorize, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = fmt.Errorf(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Authorize.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}",
"func WithPrivileged(images ...string) Option {\n\treturn WithTransform(\n\t\ttransformPrivilege(images...),\n\t)\n}",
"func (aec *AddEntryCmd) RequiresAuthorization() {}",
"func withAuthorize(node *Authorize) authorizeOption {\n\treturn func(m *AuthorizeMutation) {\n\t\tm.oldValue = func(context.Context) (*Authorize, error) {\n\t\t\treturn node, nil\n\t\t}\n\t\tm.id = &node.ID\n\t}\n}",
"func NewAuthorization(domain, audience string) Authorization {\n\tlog.Debug(\"NewAuthorization started\")\n\tauth := Authorization{\n\t\tdomain: domain,\n\t\taudience: audience,\n\t}\n\n\tauth.middleware = jwtmiddleware.New(jwtmiddleware.Options{\n\t\tValidationKeyGetter: auth.validateToken,\n\t\tSigningMethod: jwt.SigningMethodRS256,\n\t\tErrorHandler: NotAuthorizedError,\n\t})\n\n\tlog.Debug(\"NewAuthorziation finished\")\n\treturn auth\n}",
"func (m *PoliciesRequestBuilder) AuthorizationPolicy()(*AuthorizationPolicyRequestBuilder) {\n return NewAuthorizationPolicyRequestBuilderInternal(m.pathParameters, m.requestAdapter)\n}",
"func (o MethodOutput) Authorization() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Method) pulumi.StringOutput { return v.Authorization }).(pulumi.StringOutput)\n}",
"func AuthorizationMiddleware() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tID := c.Param(\"id\")\n\t\tbearer := c.Request.Header.Get(\"Authorization\")\n\n\t\tAuthorID := routes.AllBlogs[ID].AuthorID\n\n\t\tif bearer == AuthorID {\n\t\t\tc.Next()\n\t\t} else {\n\t\t\tc.JSON(400, \"Not Authorized\")\n\t\t\tc.Abort()\n\t\t}\n\t}\n}",
"func authorize(ctx context.Context) error {\n\t// Fetch Bearer token\n\t// In case it is provided and is correct, consider auth completed\n\t_, err := fetchJWTToken(ctx)\n\n\treturn err\n}",
"func getAuthorization(acmeClient *acme.Client, hostname string) (*acme.Authorization, error) {\n\tctx, cancel := context.WithTimeout(context.Background(), 1*time.Minute)\n\tdefer cancel()\n\n\tauthorization, err := acmeClient.Authorize(ctx, hostname)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tswitch authorization.Status {\n\tcase acme.StatusValid:\n\tcase acme.StatusPending:\n\t\treturn authorization, nil\n\tcase acme.StatusProcessing:\n\t\treturn nil, fmt.Errorf(\"certificate authorization already in progress\")\n\tcase acme.StatusInvalid:\n\tcase acme.StatusRevoked:\n\tcase acme.StatusUnknown:\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"invalid certificate authorization status: %v\", authorization.Status)\n\t}\n\n\treturn authorization, nil\n}",
"func (b *HTTPSDConfigApplyConfiguration) WithAuthorization(value *monitoringv1.SafeAuthorizationApplyConfiguration) *HTTPSDConfigApplyConfiguration {\n\tb.Authorization = value\n\treturn b\n}",
"func (m Middleware) Authorized(h http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t// Retrieve secret environment variable\n\t\thmacSecret := []byte(os.Getenv(\"JWT_SECRET\"))\n\n\t\t// Check request header for a Token\n\t\t// Check if the length of the Authorization value is greater than 0\n\t\tif len(r.Header.Get(\"Authorization\")) > 0 {\n\n\t\t\t// Get JWT from the Authorization header. As it gives us\n\t\t\t// bearer JWT, we need to split the JWT.\n\t\t\tauthSlice := strings.Split(r.Header.Get(\"Authorization\"), \" \")\n\t\t\tjwtToken := authSlice[1]\n\n\t\t\t// Parse token in header\n\t\t\ttoken, err := jwt.Parse(jwtToken, func(token *jwt.Token) (interface{}, error) {\n\t\t\t\t// Check that the algorithm used in parsed token is the same as what was issued by the server\n\t\t\t\tif _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok {\n\t\t\t\t\treturn nil, fmt.Errorf(\"Unexpected signing method: %v\", token.Header[\"alg\"])\n\t\t\t\t}\n\t\t\t\t// Return the secret and nil error\n\t\t\t\treturn hmacSecret, nil\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\t// If token is valid, server the argument handler\n\t\t\tif token.Valid {\n\t\t\t\th.ServeHTTP(w, r)\n\t\t\t}\n\t\t} else {\n\t\t\t// No token in header\n\t\t\thttp.Error(w, \"Not Authorized\", http.StatusForbidden)\n\t\t\treturn\n\t\t}\n\n\t})\n}",
"func GetAuthorization(req *http.Request, auths []*Authorization) *Authorization {\n\tfor _, auth := range auths {\n\t\tif auth.Applies(req) {\n\t\t\treturn auth\n\t\t}\n\t}\n\treturn nil\n}",
"func (c *Admission) authorizeResource(ctx context.Context, object metav1.Object, kind metav1.GroupVersionKind) (bool, string, error) {\n\t// @step: create a authorization context\n\tcx := &api.Context{\n\t\tCache: c.resourceCache,\n\t\tClient: c.client,\n\t\tGroup: kind,\n\t\tObject: object,\n\t\tPrefix: c.config.ControllerName,\n\t}\n\n\t// @step: iterate the authorizers and fail on first refusal\n\tfor i, provider := range c.providers {\n\t\tvar namespace *v1.Namespace\n\n\t\tif object.GetNamespace() != \"\" {\n\t\t\tn, err := utils.GetCachedNamespace(c.client, c.resourceCache, object.GetNamespace())\n\t\t\tif err != nil {\n\t\t\t\treturn false, \"\", err\n\t\t\t}\n\t\t\tnamespace = n\n\t\t}\n\n\t\t// @check if this authorizer is listening to this type\n\t\tif !provider.FilterOn().Matched(kind, namespace) {\n\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\"group\": kind.Group,\n\t\t\t\t\"id\": utils.GetTRX(ctx),\n\t\t\t\t\"kind\": kind.Kind,\n\t\t\t\t\"namespace\": object.GetNamespace(),\n\t\t\t\t\"provider\": provider.Name(),\n\t\t\t}).Debug(\"provider is not filtering on this object\")\n\n\t\t\tcontinue\n\t\t}\n\n\t\t// @step: pass the object into the provider for authorization\n\t\terrs := func() field.ErrorList {\n\t\t\tnow := time.Now()\n\t\t\t// @metric record the time taken per provider\n\t\t\tdefer admissionAuthorizerLatencyMetric.WithLabelValues(provider.Name(), fmt.Sprintf(\"%d\", i)).Observe(time.Since(now).Seconds())\n\n\t\t\treturn provider.Admit(ctx, cx)\n\t\t}()\n\n\t\t// @check if we found any error from the provider\n\t\tif len(errs) > 0 {\n\t\t\tadmissionAuthorizerActionMetric.WithLabelValues(provider.Name(), actionDenied).Inc()\n\n\t\t\t// @check if it's an internal provider error and whether we should skip them\n\t\t\tskipme := false\n\t\t\tfor _, x := range errs {\n\t\t\t\tif x.Type == field.ErrorTypeInternal {\n\t\t\t\t\t// @check if the provider is asking as to ignore internal failures\n\t\t\t\t\tif provider.FilterOn().IgnoreOnFailure {\n\t\t\t\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\t\t\t\"error\": x.Detail,\n\t\t\t\t\t\t\t\"group\": kind.Group,\n\t\t\t\t\t\t\t\"id\": utils.GetTRX(ctx),\n\t\t\t\t\t\t\t\"kind\": kind.Kind,\n\t\t\t\t\t\t\t\"name\": object.GetGenerateName(),\n\t\t\t\t\t\t\t\"namespace\": object.GetNamespace(),\n\t\t\t\t\t\t}).Error(\"internal provider error, skipping the provider result\")\n\n\t\t\t\t\t\tskipme = true\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif skipme {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tvar reasons []string\n\t\t\tfor _, x := range errs {\n\t\t\t\treasons = append(reasons, fmt.Sprintf(\"%s=%v : %s\", x.Field, x.BadValue, x.Detail))\n\t\t\t}\n\n\t\t\treturn true, strings.Join(reasons, \",\"), nil\n\t\t}\n\n\t\tadmissionAuthorizerActionMetric.WithLabelValues(provider.Name(), actionAccepted)\n\t}\n\n\treturn false, \"\", nil\n}",
"func basicAuth(f ViewFunc) ViewFunc {\n\tif !pubConf.Auth.Enabled {\n\t\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\t\tf(w, r)\n\t\t}\n\t}\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tbasicAuthPrefix := \"Basic \"\n\t\t// Parse request header\n\t\tauth := r.Header.Get(\"Authorization\")\n\t\tif strings.HasPrefix(auth, basicAuthPrefix) {\n\t\t\t// Decoding authentication information.\n\t\t\tpayload, err := base64.StdEncoding.DecodeString(\n\t\t\t\tauth[len(basicAuthPrefix):],\n\t\t\t)\n\t\t\tif err == nil {\n\t\t\t\tpair := bytes.SplitN(payload, []byte(\":\"), 2)\n\t\t\t\tif len(pair) == 2 && bytes.Equal(pair[0], []byte(pubConf.Auth.Username)) &&\n\t\t\t\t\tbytes.Equal(pair[1], []byte(pubConf.Auth.Password)) {\n\t\t\t\t\tf(w, r)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t// Authorization fail, return 401 Unauthorized.\n\t\tw.Header().Set(\"WWW-Authenticate\", `Basic realm=\"Restricted\"`)\n\t\tw.WriteHeader(http.StatusUnauthorized)\n\t}\n}",
"func AuthorizationEnabled(ctx context.Context) bool {\n\t_, ok := NewUserInfoFromContext(ctx)\n\treturn ok\n}",
"func setAuthorization(req *http.Request, apiKey string) {\n\treq.SetBasicAuth(apiKey, \"\")\n}",
"func Authorized() runtime.Authorizer {\n\treturn runtime.AuthorizerFunc(func(_ *http.Request, _ interface{}) error { return nil })\n}",
"func (c *authorizer) Admit(ctx context.Context, cx *api.Context) field.ErrorList {\n\tvar errs field.ErrorList\n\n\t// @step: decode the object into an object\n\t// @TODO there probably a better way of doing the, perhaps just passing the object??\n\terr := func() error {\n\t\tobj, err := marshal(cx.Object)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// @note: a somewhat hacky but's its due to the fact the groupkind is omitted\n\t\tif _, found := obj[\"apiVersion\"]; !found {\n\t\t\tobj[\"apiVersion\"] = fmt.Sprintf(\"%s/%s\", cx.Group.Group, cx.Group.Version)\n\t\t}\n\t\tif _, found := obj[\"kind\"]; !found {\n\t\t\tobj[\"kind\"] = cx.Group.Kind\n\t\t}\n\n\t\t// @step: create the runtime\n\t\tvm := otto.New()\n\t\tfor k, v := range c.config.Options {\n\t\t\tvm.Set(k, v)\n\t\t}\n\t\tvm.Set(\"cache\", cx.Cache)\n\t\tvm.Set(\"object\", obj)\n\n\t\tif cx.Object.GetNamespace() != \"\" {\n\t\t\t// @step: get namespace for this object\n\t\t\tnamespace, err := utils.GetCachedNamespace(cx.Client, cx.Cache, cx.Object.GetNamespace())\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tns, err := marshal(namespace)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tvm.Set(\"namespace\", ns)\n\t\t}\n\n\t\t// @step: setup some functions\n\t\tvm.Set(\"log\", func(call otto.FunctionCall) otto.Value {\n\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\"scripts\": c.config.Name,\n\t\t\t}).Info(call.Argument(0).String())\n\n\t\t\treturn otto.Value{}\n\t\t})\n\n\t\tvm.Set(\"hasPrefix\", hasPrefix)\n\t\tvm.Set(\"hasSuffix\", hasSuffix)\n\t\tvm.Set(\"inDomain\", inDomain)\n\n\t\tvm.Set(\"deny\", func(call otto.FunctionCall) otto.Value {\n\t\t\tpath := call.Argument(0).String()\n\t\t\tmessage := call.Argument(1).String()\n\t\t\tvalue := call.Argument(2).String()\n\t\t\terrs = append(errs, field.Invalid(field.NewPath(path), value, message))\n\n\t\t\treturn otto.Value{}\n\t\t})\n\n\t\treturn c.runSafely(ctx, vm, c.config.Script, c.config.Timeout)\n\t}()\n\tif err != nil {\n\t\treturn append(errs, field.InternalError(field.NewPath(\"\"), err))\n\t}\n\n\treturn errs\n}",
"func NewAuthorization(h func() hash.Hash) *Authorization {\n\treturn &Authorization{\n\t\tsalt: make([]byte, 8),\n\t\tsignature: make([]byte, h().Size()),\n\t\trawMsg: make([]byte, 0, MaxMsgSize),\n\n\t\tPayload: make(map[string]interface{}),\n\t\tH: h,\n\t}\n}",
"func createAuthorization(roleRef, userRef string) string {\n\treturn fmt.Sprintf(`{\n \"type\": \"Authorization\",\n \"user\": \"%s\",\n \"role\": \"%s\",\n \"target\": \"%s\"\n}`, userRef, roleRef, userRef)\n}",
"func (o *DataPlaneAPI) Authorizer() runtime.Authorizer {\n\n\treturn o.APIAuthorizer\n\n}",
"func CreateAuthorizationEndpoint(c echo.Context) error {\n\tvar req *a.AuthorizationRequest = new(a.AuthorizationRequest)\n\n\t// this endpoint is secured by a master token i.e. a shared secret between\n\t// the service and the client, NOT a JWT token !!\n\tbearer := GetBearerToken(c)\n\tif bearer != env.GetString(\"MASTER_KEY\", \"\") {\n\t\treturn c.NoContent(http.StatusUnauthorized)\n\t}\n\n\terr := c.Bind(req)\n\tif err != nil {\n\t\treturn api.ErrorResponse(c, http.StatusInternalServerError, err)\n\t}\n\n\ttoken, err := CreateJWTToken(req.Secret, req.Realm, req.ClientID, req.UserID, req.Scope, req.Duration)\n\tif err != nil {\n\t\treturn api.ErrorResponse(c, http.StatusInternalServerError, err)\n\t}\n\n\tnow := util.Timestamp()\n\tauthorization := Authorization{\n\t\tClientID: req.ClientID,\n\t\tName: req.Realm,\n\t\tToken: token,\n\t\tTokenType: req.ClientType,\n\t\tUserID: req.UserID,\n\t\tScope: req.Scope,\n\t\tExpires: now + (req.Duration * 86400), // Duration days from now\n\t\tAuthType: AuthTypeJWT,\n\t\tCreated: now,\n\t\tUpdated: now,\n\t}\n\terr = CreateAuthorization(appengine.NewContext(c.Request()), &authorization)\n\tif err != nil {\n\t\treturn api.ErrorResponse(c, http.StatusInternalServerError, err)\n\t}\n\n\tresp := a.AuthorizationResponse{\n\t\tRealm: req.Realm,\n\t\tClientID: req.ClientID,\n\t\tToken: token,\n\t}\n\treturn api.StandardResponse(c, http.StatusCreated, &resp)\n}",
"func (o *HandleGetAboutUsingGETParams) SetAuthorization(authorization string) {\n\to.Authorization = authorization\n}",
"func newAuthorizeMutation(c config, op Op, opts ...authorizeOption) *AuthorizeMutation {\n\tm := &AuthorizeMutation{\n\t\tconfig: c,\n\t\top: op,\n\t\ttyp: TypeAuthorize,\n\t\tclearedFields: make(map[string]struct{}),\n\t}\n\tfor _, opt := range opts {\n\t\topt(m)\n\t}\n\treturn m\n}",
"func (dfc DeviceFlowConfig) Authorizer() (autorest.Authorizer, error) {\n\tspToken, err := dfc.ServicePrincipalToken()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to get oauth token from device flow: %v\", err)\n\t}\n\treturn autorest.NewBearerAuthorizer(spToken), nil\n}",
"func (c *myClient) createAuthorization(roleRef, userRef string, wait bool) (results map[string]interface{}, err error) {\n\tnamespace := \"authorization\"\n\n\turl := fmt.Sprintf(\"%s\", namespace)\n\tpostBody := createAuthorization(roleRef, userRef)\n\taction, _, err := c.httpPost(url, postBody)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif wait {\n\t\tc.jobWaiter(action)\n\t}\n\treturn action, err\n}",
"func isAuthorized(endpoint func(http.ResponseWriter, *http.Request)) http.Handler {\n\treturn http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) {\n\t\tif request.Header[\"Token\"] != nil {\n\t\t\ttoken, err := jwt.Parse(request.Header[\"Token\"][0], func(token *jwt.Token) (interface{}, error) {\n\t\t\t\tif _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok {\n\t\t\t\t\treturn nil, fmt.Errorf(\"Theer an error\")\n\t\t\t\t}\n\t\t\t\treturn mySecretKey, nil\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(\"error while parsing token : \", err)\n\t\t\t}\n\t\t\tif token.Valid {\n\t\t\t\tendpoint(writer, request)\n\t\t\t}\n\t\t} else {\n\t\t\tfmt.Fprintf(writer, \"Not Authorized\")\n\t\t}\n\t})\n}",
"func (dfc DeviceFlowConfig) Authorizer() (autorest.Authorizer, error) {\n\toauthClient := &autorest.Client{}\n\toauthConfig, err := adal.NewOAuthConfig(dfc.AADEndpoint, dfc.TenantID)\n\tdeviceCode, err := adal.InitiateDeviceAuth(oauthClient, *oauthConfig, dfc.ClientID, dfc.Resource)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to start device auth flow: %s\", err)\n\t}\n\n\tlog.Println(*deviceCode.Message)\n\n\ttoken, err := adal.WaitForUserCompletion(oauthClient, deviceCode)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to finish device auth flow: %s\", err)\n\t}\n\n\tspToken, err := adal.NewServicePrincipalTokenFromManualToken(*oauthConfig, dfc.ClientID, dfc.Resource, *token)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to get oauth token from device flow: %v\", err)\n\t}\n\n\treturn autorest.NewBearerAuthorizer(spToken), nil\n}",
"func NewAuthorizationWithDefaults() *Authorization {\n\tthis := Authorization{}\n\tvar status string = \"active\"\n\tthis.Status = &status\n\treturn &this\n}",
"func (tmpl *APIClientTemplate) BuildWithAuthorization(authorization string) APIClient {\n\tac := apiClient(*tmpl)\n\tac.Authorization = authorization\n\treturn &ac\n}",
"func (c Client) HandleAuthorization(handleAccess func(access Access, err error)) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tquery := r.URL.Query()\n\n\t\thandleAccess(c.getAccessToken(query.Get(\"code\")))\n\t}\n}",
"func (r *Controller) authorized() gin.HandlerFunc {\n\n\treturn func(c *gin.Context) {\n\n\t\tbearToken := c.GetHeader(\"Authorization\")\n\n\t\tstrArr := strings.Split(bearToken, \" \")\n\t\tif len(strArr) != 2 {\n\t\t\tc.AbortWithStatus(http.StatusForbidden)\n\t\t\treturn\n\t\t}\n\n\t\ttk, err := r.UserToken.VerifyToken(strArr[1])\n\t\tif err != nil {\n\t\t\tc.AbortWithStatus(http.StatusForbidden)\n\t\t\treturn\n\t\t}\n\n\t\tif _, ok := tk.Claims.(jwt.Claims); !ok && !tk.Valid {\n\t\t\tc.AbortWithStatus(http.StatusForbidden)\n\t\t\treturn\n\t\t}\n\n\t}\n}",
"func (o *Operation) SecuredWith(name string, scopes ...string) *Operation {\n\to.Operation.SecuredWith(name, scopes...)\n\treturn o\n}",
"func NewAuthorization(req *http.Request) (a *Authorization, err error) {\n\tcontent := req.Header.Get(headKeyAuthorization)\n\tif len(content) > 0 {\n\t\treturn newAuthorizationByHeader(content)\n\t}\n\treturn newAuthorizationByQueryValues(req.URL.Query())\n}",
"func Privileged(f func(*testing.T)) func(*testing.T) {\n\treturn f\n}",
"func (o *GetSellerServicesUsingGETParams) SetAuthorization(authorization string) {\n\to.Authorization = authorization\n}",
"func authWrapper(handler http.HandlerFunc, secrets auth.SecretProvider, host string) http.HandlerFunc {\n\tauthenticator := &auth.BasicAuth{Realm: host, Secrets: secrets}\n\treturn auth.JustCheck(authenticator, handler)\n}",
"func WithAuth(auth BasicAuth) (Option, error) {\n\tauthHeaderValue, err := auth.HeaderValue()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to retrieve auth header value: %w\", err)\n\t}\n\n\treturn func(c *Client) {\n\t\tnext := c.doFunc\n\t\tc.doFunc = func(c *Client, req *http.Request) (*http.Response, error) {\n\t\t\treq.Header.Set(\"Authorization\", authHeaderValue)\n\t\t\treturn next(c, req)\n\t\t}\n\t}, nil\n}",
"func WrapAuthorize(hfn http.Handler, routeName string) http.HandlerFunc {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\n\t\turlValues := r.URL.Query()\n\n\t\trefStr := gorillaContext.Get(r, \"str\").(stores.Store)\n\t\trefRoles := gorillaContext.Get(r, \"auth_roles\").([]string)\n\t\tserviceToken := gorillaContext.Get(r, \"auth_service_token\").(string)\n\n\t\t// Check first if service token is used\n\t\tif serviceToken != \"\" && serviceToken == urlValues.Get(\"key\") {\n\t\t\thfn.ServeHTTP(w, r)\n\t\t\treturn\n\t\t}\n\n\t\tif auth.Authorize(routeName, refRoles, refStr) {\n\t\t\thfn.ServeHTTP(w, r)\n\t\t} else {\n\t\t\terr := APIErrorForbidden()\n\t\t\trespondErr(w, err)\n\t\t}\n\t})\n}",
"func authorize(ctx context.Context) error {\n\tmd, ok := metadata.FromIncomingContext(ctx)\n\tif !ok {\n\t\treturn status.Errorf(codes.InvalidArgument, \"Retrieving metadata is failed\")\n\t}\n\n\tauthHeader, ok := md[\"token\"]\n\tif !ok {\n\t\treturn status.Errorf(codes.Unauthenticated, \"Authorization token is not supplied\")\n\t}\n\n\ttoken := authHeader[0]\n\t// validateToken function validates the token\n\terr := validateToken(token)\n\tif err != nil {\n\t\treturn status.Errorf(codes.Unauthenticated, err.Error())\n\t}\n\treturn nil\n}",
"func (o BackendCredentialsOutput) Authorization() BackendCredentialsAuthorizationPtrOutput {\n\treturn o.ApplyT(func(v BackendCredentials) *BackendCredentialsAuthorization { return v.Authorization }).(BackendCredentialsAuthorizationPtrOutput)\n}",
"func authorize(ctx context.Context) error {\n\tmd, ok := metadata.FromIncomingContext(ctx)\n\tif !ok {\n\t\treturn status.Errorf(codes.InvalidArgument, \"Retrieving metadata is failed\")\n\t}\n\n\tauthHeader, ok := md[\"authorization\"]\n\tif !ok {\n\t\treturn status.Errorf(codes.Unauthenticated, \"Authorization token is not supplied\")\n\t}\n\n\ttoken := authHeader[0]\n\n\t// validateToken function validates the token\n\n\tif token != \"jwt-token\" {\n\t\treturn status.Errorf(codes.Unauthenticated, \"Invalid auth token\")\n\t}\n\treturn nil\n}",
"func RegistryAuthenticationPrivilegedFunc(cli Cli, index *registrytypes.IndexInfo, cmdName string) types.RequestPrivilegeFunc {\n\treturn func() (string, error) {\n\t\tfmt.Fprintf(cli.Out(), \"\\nPlease login prior to %s:\\n\", cmdName)\n\t\tindexServer := registry.GetAuthConfigKey(index)\n\t\tisDefaultRegistry := indexServer == registry.IndexServer\n\t\tauthConfig, err := GetDefaultAuthConfig(cli.ConfigFile(), true, indexServer, isDefaultRegistry)\n\t\tif err != nil {\n\t\t\tfmt.Fprintf(cli.Err(), \"Unable to retrieve stored credentials for %s, error: %s.\\n\", indexServer, err)\n\t\t}\n\t\terr = ConfigureAuth(cli, \"\", \"\", &authConfig, isDefaultRegistry)\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\treturn registrytypes.EncodeAuthConfig(authConfig)\n\t}\n}",
"func Unauthorizedf(format string, args ...interface{}) error {\n\treturn &unauthorized{wrap(nil, format, \"\", args...)}\n}",
"func DefaultEndpointAuthorizationsForHelpDeskRole(volumeBrowsingAuthorizations bool) portainer.Authorizations {\n\tauthorizations := map[portainer.Authorization]bool{\n\t\tportainer.OperationDockerContainerArchiveInfo: true,\n\t\tportainer.OperationDockerContainerList: true,\n\t\tportainer.OperationDockerContainerChanges: true,\n\t\tportainer.OperationDockerContainerInspect: true,\n\t\tportainer.OperationDockerContainerTop: true,\n\t\tportainer.OperationDockerContainerLogs: true,\n\t\tportainer.OperationDockerContainerStats: true,\n\t\tportainer.OperationDockerImageList: true,\n\t\tportainer.OperationDockerImageSearch: true,\n\t\tportainer.OperationDockerImageGetAll: true,\n\t\tportainer.OperationDockerImageGet: true,\n\t\tportainer.OperationDockerImageHistory: true,\n\t\tportainer.OperationDockerImageInspect: true,\n\t\tportainer.OperationDockerNetworkList: true,\n\t\tportainer.OperationDockerNetworkInspect: true,\n\t\tportainer.OperationDockerVolumeList: true,\n\t\tportainer.OperationDockerVolumeInspect: true,\n\t\tportainer.OperationDockerSwarmInspect: true,\n\t\tportainer.OperationDockerNodeList: true,\n\t\tportainer.OperationDockerNodeInspect: true,\n\t\tportainer.OperationDockerServiceList: true,\n\t\tportainer.OperationDockerServiceInspect: true,\n\t\tportainer.OperationDockerServiceLogs: true,\n\t\tportainer.OperationDockerSecretList: true,\n\t\tportainer.OperationDockerSecretInspect: true,\n\t\tportainer.OperationDockerConfigList: true,\n\t\tportainer.OperationDockerConfigInspect: true,\n\t\tportainer.OperationDockerTaskList: true,\n\t\tportainer.OperationDockerTaskInspect: true,\n\t\tportainer.OperationDockerTaskLogs: true,\n\t\tportainer.OperationDockerPluginList: true,\n\t\tportainer.OperationDockerDistributionInspect: true,\n\t\tportainer.OperationDockerPing: true,\n\t\tportainer.OperationDockerInfo: true,\n\t\tportainer.OperationDockerVersion: true,\n\t\tportainer.OperationDockerEvents: true,\n\t\tportainer.OperationDockerSystem: true,\n\t\tportainer.OperationDockerAgentPing: true,\n\t\tportainer.OperationDockerAgentList: true,\n\t\tportainer.OperationDockerAgentHostInfo: true,\n\t\tportainer.OperationPortainerStackList: true,\n\t\tportainer.OperationPortainerStackInspect: true,\n\t\tportainer.OperationPortainerStackFile: true,\n\t\tportainer.OperationPortainerWebhookList: true,\n\t\tportainer.EndpointResourcesAccess: true,\n\t}\n\n\tif volumeBrowsingAuthorizations {\n\t\tauthorizations[portainer.OperationDockerAgentBrowseGet] = true\n\t\tauthorizations[portainer.OperationDockerAgentBrowseList] = true\n\t}\n\n\treturn authorizations\n}",
"func (o *HighLoadCup2020API) Authorizer() runtime.Authorizer {\n\treturn nil\n}",
"func NewAuthorizationHandler(\n\tmodel *acl.Model,\n\tsecretResolver acl.SecretResolverFunc,\n\tpolicyResolver acl.PolicyResolverFunc) AuthorizationHandler {\n\n\taclResolver, _ := acl.NewResolver(&acl.ResolverConfig{\n\t\tModel: model,\n\t\tSecretResolver: secretResolver,\n\t\tPolicyResolver: policyResolver,\n\t})\n\n\treturn &authorizationHandler{\n\t\tresolver: aclResolver,\n\t}\n}",
"func PersistAuthorization(persistAuthorization bool) func(*Config) {\n\treturn func(c *Config) {\n\t\tc.PersistAuthorization = persistAuthorization\n\t}\n}",
"func PersistAuthorization(persistAuthorization bool) func(*Config) {\n\treturn func(c *Config) {\n\t\tc.PersistAuthorization = persistAuthorization\n\t}\n}",
"func (c *Cfg) Authorizer(resource string) autorest.Authorizer {\n\tc.mu.Lock()\n\tdefer c.mu.Unlock()\n\tauthz := c.authz[resource]\n\tif authz == nil {\n\t\tauthz = c.newAuthz(resource)\n\t\tif c.authz == nil {\n\t\t\tc.authz = make(map[string]autorest.Authorizer)\n\t\t}\n\t\tc.authz[resource] = authz\n\t}\n\treturn authz\n}",
"func AuthFunc(ctx context.Context) (context.Context, error) {\n\treturn ctx, nil\n}",
"func withAuth(node *Auth) authOption {\n\treturn func(m *AuthMutation) {\n\t\tm.oldValue = func(context.Context) (*Auth, error) {\n\t\t\treturn node, nil\n\t\t}\n\t\tm.id = &node.ID\n\t}\n}",
"func requireBasicAuth(\n\tcheckFunc func(context.Context, *http.Request, string, string, string) error,\n\toptional bool,\n) func(next http.Handler) http.Handler {\n\treturn func(next http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tauthenticated := false\n\n\t\t\tif checkFunc != nil {\n\t\t\t\taps := chi.URLParam(r, apsParamName)\n\t\t\t\tusername, password, _ := r.BasicAuth()\n\t\t\t\tif err := checkFunc(r.Context(), r, aps, username, password); err == nil {\n\t\t\t\t\tauthenticated = true\n\t\t\t\t}\n\t\t\t} else if optional {\n\t\t\t\tauthenticated = true\n\t\t\t}\n\n\t\t\tif !authenticated {\n\t\t\t\treqHost := r.Host\n\t\t\t\tif host, _, err := net.SplitHostPort(reqHost); err == nil {\n\t\t\t\t\treqHost = host\n\t\t\t\t}\n\n\t\t\t\tw.Header().Set(wwwAuthenticateHeader, fmt.Sprintf(`Basic realm=\"estserver@%s\"`,\n\t\t\t\t\turl.QueryEscape(reqHost)))\n\t\t\t\terrAuthRequired.Write(w)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tnext.ServeHTTP(w, r)\n\t\t})\n\t}\n}",
"func (a authorizer) Authorize(ctx context.Context, id bakery.Identity, ops []bakery.Op) (allowed []bool, caveats []checkers.Caveat, err error) {\n\tallowed = make([]bool, len(ops))\n\tfor i := range allowed {\n\t\tallowed[i] = true\n\t}\n\tcaveats = []checkers.Caveat{{\n\t\tLocation: a.thirdPartyLocation,\n\t\tCondition: \"access-allowed\",\n\t}}\n\treturn\n}",
"func New(opts ...Option) (Authorizerd, error) {\n\tvar (\n\t\tprov = &authorizer{\n\t\t\tcache: gache.New(),\n\t\t}\n\t\terr error\n\n\t\tpubkeyProvider pubkey.Provider\n\t\tjwkProvider jwk.Provider\n\t)\n\n\tfor _, opt := range append(defaultOptions, opts...) {\n\t\tif err = opt(prov); err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"error creating authorizerd\")\n\t\t}\n\t}\n\n\tif !prov.disablePubkeyd {\n\t\tif prov.pubkeyd, err = pubkey.New(\n\t\t\tpubkey.WithAthenzURL(prov.athenzURL),\n\t\t\tpubkey.WithSysAuthDomain(prov.pubkeySysAuthDomain),\n\t\t\tpubkey.WithEtagExpTime(prov.pubkeyEtagExpTime),\n\t\t\tpubkey.WithEtagFlushDuration(prov.pubkeyEtagFlushDur),\n\t\t\tpubkey.WithRefreshDuration(prov.pubkeyRefreshDuration),\n\t\t\tpubkey.WithErrRetryInterval(prov.pubkeyErrRetryInterval),\n\t\t\tpubkey.WithHTTPClient(prov.client),\n\t\t); err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"error create pubkeyd\")\n\t\t}\n\n\t\tpubkeyProvider = prov.pubkeyd.GetProvider()\n\t}\n\n\tif !prov.disablePolicyd {\n\t\tif prov.policyd, err = policy.New(\n\t\t\tpolicy.WithExpireMargin(prov.policyExpireMargin),\n\t\t\tpolicy.WithEtagFlushDuration(prov.policyEtagFlushDur),\n\t\t\tpolicy.WithEtagExpTime(prov.policyEtagExpTime),\n\t\t\tpolicy.WithAthenzURL(prov.athenzURL),\n\t\t\tpolicy.WithAthenzDomains(prov.athenzDomains...),\n\t\t\tpolicy.WithRefreshDuration(prov.policyRefreshDuration),\n\t\t\tpolicy.WithErrRetryInterval(prov.policyErrRetryInterval),\n\t\t\tpolicy.WithHTTPClient(prov.client),\n\t\t\tpolicy.WithPubKeyProvider(prov.pubkeyd.GetProvider()),\n\t\t); err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"error create policyd\")\n\t\t}\n\t}\n\n\tif !prov.disableJwkd {\n\t\tif prov.jwkd, err = jwk.New(\n\t\t\tjwk.WithAthenzURL(prov.athenzURL),\n\t\t\tjwk.WithRefreshDuration(prov.jwkRefreshDuration),\n\t\t\tjwk.WithErrRetryInterval(prov.jwkErrRetryInterval),\n\t\t\tjwk.WithHTTPClient(prov.client),\n\t\t); err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"error create jwkd\")\n\t\t}\n\n\t\tjwkProvider = prov.jwkd.GetProvider()\n\t}\n\n\tprov.roleProcessor = role.New(\n\t\trole.WithPubkeyProvider(pubkeyProvider),\n\t\trole.WithJWKProvider(jwkProvider))\n\n\treturn prov, nil\n}",
"func (o *ExportUsingGETParams) SetAuthorization(authorization string) {\n\to.Authorization = authorization\n}",
"func (*OpenconfigOfficeAp_System_Aaa_Authorization_Config_AuthorizationMethod_Union_String) Is_OpenconfigOfficeAp_System_Aaa_Authorization_Config_AuthorizationMethod_Union() {\n}",
"func withAuthID(id int) authOption {\n\treturn func(m *AuthMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Auth\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Auth, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = fmt.Errorf(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Auth.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}",
"func WithAuth(token string) RequestModifier {\n\treturn func(req *Request) {\n\t\treq.Header[\"authorization\"] = token\n\t}\n}",
"func (c *Admission) authorize(ctx context.Context, review *admission.AdmissionReview) error {\n\t// @check if the review is for something we can process\n\tobject, err := decodeObject(review.Request.Kind.Kind, review)\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"error\": err.Error(),\n\t\t\t\"id\": utils.GetTRX(ctx),\n\t\t\t\"name\": review.Request.Name,\n\t\t\t\"namespace\": review.Request.Namespace,\n\t\t}).Errorf(\"unable to decode object for review\")\n\n\t\treturn err\n\t}\n\n\t// @step: attempt to get the object authorized\n\tdenied, reason, err := c.authorizeResource(ctx, object, review.Request.Kind)\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"error\": err.Error(),\n\t\t\t\"id\": utils.GetTRX(ctx),\n\t\t\t\"name\": review.Request.Name,\n\t\t\t\"namespace\": review.Request.Namespace,\n\t\t}).Errorf(\"unable to handle admission review\")\n\n\t\treturn err\n\t}\n\n\t// @check if the object was rejected\n\tif denied {\n\t\tadmissionTotalMetric.WithLabelValues(actionDenied).Inc()\n\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"error\": reason,\n\t\t\t\"group\": review.Request.Kind.Group,\n\t\t\t\"id\": utils.GetTRX(ctx),\n\t\t\t\"kind\": review.Request.Kind.Kind,\n\t\t\t\"name\": review.Request.Name,\n\t\t\t\"namespace\": review.Request.Namespace,\n\t\t\t\"uid\": review.Request.UserInfo.UID,\n\t\t\t\"user\": review.Request.UserInfo.Username,\n\t\t\t\"version\": review.Request.Kind.Version,\n\t\t}).Warn(\"authorization for object execution denied\")\n\n\t\treview.Response = &admission.AdmissionResponse{\n\t\t\tAllowed: false,\n\t\t\tResult: &metav1.Status{\n\t\t\t\tCode: http.StatusForbidden,\n\t\t\t\tMessage: reason,\n\t\t\t\tReason: metav1.StatusReasonForbidden,\n\t\t\t\tStatus: metav1.StatusFailure,\n\t\t\t},\n\t\t}\n\n\t\t// @step: log the denial is required\n\t\tgo c.events.Send(&api.Event{\n\t\t\tDetail: reason,\n\t\t\tObject: object,\n\t\t\tReview: review.Request,\n\t\t})\n\n\t\treturn nil\n\t}\n\n\tadmissionTotalMetric.WithLabelValues(actionAccepted).Inc()\n\n\treview.Response = &admission.AdmissionResponse{Allowed: true}\n\n\tlog.WithFields(log.Fields{\n\t\t\"group\": review.Request.Kind.Group,\n\t\t\"id\": utils.GetTRX(ctx),\n\t\t\"kind\": review.Request.Kind.Kind,\n\t\t\"name\": review.Request.Name,\n\t\t\"namespace\": review.Request.Namespace,\n\t\t\"uid\": review.Request.UserInfo.UID,\n\t\t\"user\": review.Request.UserInfo.Username,\n\t\t\"version\": review.Request.Kind.Version,\n\t}).Info(\"object has been authorized for execution\")\n\n\treturn nil\n}",
"func (a *authorizer) Authorize(method string, r model.Role) bool {\n\tswitch method {\n\tcase \"/pipe.api.service.webservice.WebService/AddEnvironment\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/UpdateEnvironmentDesc\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/RegisterPiped\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/RecreatePipedKey\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/EnablePiped\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/DisablePiped\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/AddApplication\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/EnableApplication\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/DisableApplication\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/UpdateProjectStaticAdmin\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/EnableStaticAdmin\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/DisableStaticAdmin\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/UpdateProjectSSOConfig\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/UpdateProjectRBACConfig\":\n\t\treturn isAdmin(r)\n\tcase \"/pipe.api.service.webservice.WebService/SyncApplication\":\n\t\treturn isAdmin(r) || isEditor(r)\n\tcase \"/pipe.api.service.webservice.WebService/CancelDeployment\":\n\t\treturn isAdmin(r) || isEditor(r)\n\tcase \"/pipe.api.service.webservice.WebService/ApproveStage\":\n\t\treturn isAdmin(r) || isEditor(r)\n\tcase \"/pipe.api.service.webservice.WebService/GenerateApplicationSealedSecret\":\n\t\treturn isAdmin(r) || isEditor(r)\n\tcase \"/pipe.api.service.webservice.WebService/GetApplicationLiveState\":\n\t\treturn isAdmin(r) || isEditor(r) || isViewer(r)\n\tcase \"/pipe.api.service.webservice.WebService/GetProject\":\n\t\treturn isAdmin(r) || isEditor(r) || isViewer(r)\n\tcase \"/pipe.api.service.webservice.WebService/GetCommand\":\n\t\treturn isAdmin(r) || isEditor(r) || isViewer(r)\n\tcase \"/pipe.api.service.webservice.WebService/ListDeploymentConfigTemplates\":\n\t\treturn isAdmin(r) || isEditor(r) || isViewer(r)\n\tcase \"/pipe.api.service.webservice.WebService/ListEnvironments\":\n\t\treturn isAdmin(r) || isEditor(r) || isViewer(r)\n\tcase \"/pipe.api.service.webservice.WebService/ListPipeds\":\n\t\treturn isAdmin(r) || isEditor(r) || isViewer(r)\n\tcase \"/pipe.api.service.webservice.WebService/GetPiped\":\n\t\treturn isAdmin(r) || isEditor(r) || isViewer(r)\n\tcase \"/pipe.api.service.webservice.WebService/ListApplications\":\n\t\treturn isAdmin(r) || isEditor(r) || isViewer(r)\n\tcase \"/pipe.api.service.webservice.WebService/GetApplication\":\n\t\treturn isAdmin(r) || isEditor(r) || isViewer(r)\n\tcase \"/pipe.api.service.webservice.WebService/ListDeployments\":\n\t\treturn isAdmin(r) || isEditor(r) || isViewer(r)\n\tcase \"/pipe.api.service.webservice.WebService/GetDeployment\":\n\t\treturn isAdmin(r) || isEditor(r) || isViewer(r)\n\tcase \"/pipe.api.service.webservice.WebService/GetStageLog\":\n\t\treturn isAdmin(r) || isEditor(r) || isViewer(r)\n\tcase \"/pipe.api.service.webservice.WebService/GetMe\":\n\t\treturn isAdmin(r) || isEditor(r) || isViewer(r)\n\t}\n\treturn false\n}",
"func (i *Influx) RetrieveAuthorization(authID string) (auth *protocol.Authorization, err error) {\n\tres, err := i.HTTPInstance.Get(context.TODO(), i.HTTPClient, i.GetBasicURL()+\"/authorizations\", map[string]string{\n\t\t\"authID\": authID,\n\t}, nil)\n\n\terr = json.Unmarshal(res, &auth)\n\n\treturn\n}",
"func unauthorized(rw http.ResponseWriter, r *http.Request) {\n\n}",
"func makeAuthFunc(schemes map[string]bool) func(ctx context.Context, input *AuthenticationInput) error {\n\treturn func(ctx context.Context, input *AuthenticationInput) error {\n\t\t// If the scheme is valid and present in the schemes\n\t\tvalid, present := schemes[input.SecuritySchemeName]\n\t\tif valid && present {\n\t\t\treturn nil\n\t\t}\n\n\t\t// If the scheme is present in che schemes\n\t\tif present {\n\t\t\t// Return an unmet scheme error\n\t\t\treturn fmt.Errorf(\"security scheme for %q wasn't met\", input.SecuritySchemeName)\n\t\t}\n\t\t// Return an unknown scheme error\n\t\treturn fmt.Errorf(\"security scheme for %q is unknown\", input.SecuritySchemeName)\n\t}\n}",
"func (m *User) GetAuthorizationInfo()(AuthorizationInfoable) {\n return m.authorizationInfo\n}",
"func TestHttpClient_Authorization(t *testing.T) {\n\n\tcfg := NewConfig(accessKeyID, accessKeySecret, host)\n\tc := New(cfg)\n\n\tparams := map[string]string{\n\t\t\"query\": `config=format:json,start:0,hit:20,rerank_size:200&&query=create_time:[1579449600000,1582127999000]&&filter=content_type=3 AND in(status,\"1|2\") AND create_time >=1579449600000 AND create_time<=1582127999000&&sort=-pv`,\n\t\t\"fetch_fields\": \"id;source_id;source_name\",\n\t}\n\n\tresp, err := c.Request(appName, params, map[string]string{})\n\tt.Log(err, resp)\n}",
"func (o *ShortenerAPI) Authorizer() runtime.Authorizer {\n\n\treturn o.APIAuthorizer\n\n}",
"func Authorized(c *gin.Context) {\n\t_, exists := c.Get(\"user\")\n\tif !exists {\n\t\tc.AbortWithStatusJSON(401, gin.H{\n\t\t\t\"status\": false,\n\t\t\t\"message\": \"Unauthorization!!\",\n\t\t})\n\n\t\treturn\n\t}\n}",
"func (o *CloudTidesAPI) Authorizer() runtime.Authorizer {\n\treturn nil\n}",
"func DefaultEndpointAuthorizationsForReadOnlyUserRole(volumeBrowsingAuthorizations bool) portainer.Authorizations {\n\tauthorizations := map[portainer.Authorization]bool{\n\t\tportainer.OperationDockerContainerArchiveInfo: true,\n\t\tportainer.OperationDockerContainerList: true,\n\t\tportainer.OperationDockerContainerChanges: true,\n\t\tportainer.OperationDockerContainerInspect: true,\n\t\tportainer.OperationDockerContainerTop: true,\n\t\tportainer.OperationDockerContainerLogs: true,\n\t\tportainer.OperationDockerContainerStats: true,\n\t\tportainer.OperationDockerImageList: true,\n\t\tportainer.OperationDockerImageSearch: true,\n\t\tportainer.OperationDockerImageGetAll: true,\n\t\tportainer.OperationDockerImageGet: true,\n\t\tportainer.OperationDockerImageHistory: true,\n\t\tportainer.OperationDockerImageInspect: true,\n\t\tportainer.OperationDockerNetworkList: true,\n\t\tportainer.OperationDockerNetworkInspect: true,\n\t\tportainer.OperationDockerVolumeList: true,\n\t\tportainer.OperationDockerVolumeInspect: true,\n\t\tportainer.OperationDockerSwarmInspect: true,\n\t\tportainer.OperationDockerNodeList: true,\n\t\tportainer.OperationDockerNodeInspect: true,\n\t\tportainer.OperationDockerServiceList: true,\n\t\tportainer.OperationDockerServiceInspect: true,\n\t\tportainer.OperationDockerServiceLogs: true,\n\t\tportainer.OperationDockerSecretList: true,\n\t\tportainer.OperationDockerSecretInspect: true,\n\t\tportainer.OperationDockerConfigList: true,\n\t\tportainer.OperationDockerConfigInspect: true,\n\t\tportainer.OperationDockerTaskList: true,\n\t\tportainer.OperationDockerTaskInspect: true,\n\t\tportainer.OperationDockerTaskLogs: true,\n\t\tportainer.OperationDockerPluginList: true,\n\t\tportainer.OperationDockerDistributionInspect: true,\n\t\tportainer.OperationDockerPing: true,\n\t\tportainer.OperationDockerInfo: true,\n\t\tportainer.OperationDockerVersion: true,\n\t\tportainer.OperationDockerEvents: true,\n\t\tportainer.OperationDockerSystem: true,\n\t\tportainer.OperationDockerAgentPing: true,\n\t\tportainer.OperationDockerAgentList: true,\n\t\tportainer.OperationDockerAgentHostInfo: true,\n\t\tportainer.OperationPortainerStackList: true,\n\t\tportainer.OperationPortainerStackInspect: true,\n\t\tportainer.OperationPortainerStackFile: true,\n\t\tportainer.OperationPortainerWebhookList: true,\n\t}\n\n\tif volumeBrowsingAuthorizations {\n\t\tauthorizations[portainer.OperationDockerAgentBrowseGet] = true\n\t\tauthorizations[portainer.OperationDockerAgentBrowseList] = true\n\t}\n\n\treturn authorizations\n}",
"func (_RandomBeacon *RandomBeaconFilterer) FilterAuthorizationIncreased(opts *bind.FilterOpts, stakingProvider []common.Address, operator []common.Address) (*RandomBeaconAuthorizationIncreasedIterator, error) {\n\n\tvar stakingProviderRule []interface{}\n\tfor _, stakingProviderItem := range stakingProvider {\n\t\tstakingProviderRule = append(stakingProviderRule, stakingProviderItem)\n\t}\n\tvar operatorRule []interface{}\n\tfor _, operatorItem := range operator {\n\t\toperatorRule = append(operatorRule, operatorItem)\n\t}\n\n\tlogs, sub, err := _RandomBeacon.contract.FilterLogs(opts, \"AuthorizationIncreased\", stakingProviderRule, operatorRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &RandomBeaconAuthorizationIncreasedIterator{contract: _RandomBeacon.contract, event: \"AuthorizationIncreased\", logs: logs, sub: sub}, nil\n}",
"func (*OpenconfigOfficeAp_System_Aaa_Authorization_State_AuthorizationMethod_Union_String) Is_OpenconfigOfficeAp_System_Aaa_Authorization_State_AuthorizationMethod_Union() {\n}",
"func (srv *targetServiceHandler) auth(h http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {\n\t\tctx := httpbakery.ContextWithRequest(context.TODO(), req)\n\t\tops, err := opsForRequest(req)\n\t\tif err != nil {\n\t\t\tfail(w, http.StatusInternalServerError, \"%v\", err)\n\t\t\treturn\n\t\t}\n\t\tauthChecker := srv.checker.Auth(httpbakery.RequestMacaroons(req)...)\n\t\tif _, err = authChecker.Allow(ctx, ops...); err != nil {\n\t\t\thttpbakery.WriteError(ctx, w, srv.oven.Error(ctx, req, err))\n\t\t\treturn\n\t\t}\n\t\th.ServeHTTP(w, req)\n\t})\n}",
"func Auth(service auth.Service, optional ...bool) fiber.Handler {\n\treturn func(c *fiber.Ctx) error {\n\t\th := c.Get(\"Authorization\")\n\n\t\tif len(optional) > 0 {\n\t\t\tif h == \"\" {\n\t\t\t\treturn c.Next()\n\t\t\t}\n\n\t\t\t// Split the header\n\t\t\tchunks := strings.Split(h, \" \")\n\n\t\t\t// If header signature is not like `Bearer <token>`, then throw\n\t\t\t// This is also required, otherwise chunks[1] will throw out of bound error\n\t\t\tif len(chunks) < 2 {\n\t\t\t\treturn c.Next()\n\t\t\t}\n\n\t\t\t// Verify the token which is in the chunks\n\t\t\tuser, err := jwt.Verify(chunks[1])\n\n\t\t\tif err != nil {\n\t\t\t\treturn c.Next()\n\t\t\t}\n\n\t\t\tif isActive := service.IsUserActiveByUsername(user.Username); !isActive {\n\t\t\t\treturn c.Next()\n\t\t\t}\n\n\t\t\tc.Locals(\"UserId\", user.ID)\n\t\t\tc.Locals(\"User\", user.Username)\n\n\t\t\treturn c.Next()\n\t\t}\n\n\t\tif h == \"\" {\n\t\t\treturn utils.ErrUnauthorized\n\t\t}\n\n\t\t// Split the header\n\t\tchunks := strings.Split(h, \" \")\n\n\t\t// If header signature is not like `Bearer <token>`, then throw\n\t\t// This is also required, otherwise chunks[1] will throw out of bound error\n\t\tif len(chunks) < 2 {\n\t\t\treturn utils.ErrUnauthorized\n\t\t}\n\n\t\t// Verify the token which is in the chunks\n\t\tuser, err := jwt.Verify(chunks[1])\n\n\t\tif err != nil {\n\t\t\treturn utils.ErrUnauthorized\n\t\t}\n\n\t\tif isActive := service.IsUserActiveByUsername(user.Username); !isActive {\n\t\t\treturn utils.ErrUnauthorized\n\t\t}\n\n\t\tc.Locals(\"UserId\", user.ID)\n\t\tc.Locals(\"User\", user.Username)\n\n\t\treturn c.Next()\n\t}\n}",
"func (st *Store) Authorized(r *http.Request) (t *Token, err error) {\n\tvar v = r.Context().Value(st.ctxKey)\n\tvar ok bool\n\n\tif nil == v {\n\t\treturn nil, errors.New(\"Authorization Unknown/Not Processed\")\n\t}\n\n\tif t, ok = v.(*Token); ok {\n\t\treturn\n\t}\n\n\tif err, ok = v.(error); ok {\n\t\treturn\n\t}\n\n\treturn\n}",
"func (a *apiServer) authorizePipelineOp(ctx context.Context, operation pipelineOperation, input *pps.Input, projectName, outputName string) error {\n\treturn a.txnEnv.WithReadContext(ctx, func(txnCtx *txncontext.TransactionContext) error {\n\t\treturn a.authorizePipelineOpInTransaction(ctx, txnCtx, operation, input, projectName, outputName)\n\t})\n}",
"func (*System_Aaa_Authorization_AuthorizationMethod_Union_String) Is_System_Aaa_Authorization_AuthorizationMethod_Union() {\n}",
"func Auth(fn func(ctx *system.Context)) func(ctx *system.Context) {\n\treturn func(ctx *system.Context) {\n\t\tgconfig, err := ctx.System.DB.CreateGuildIfNotExists(ctx.Msg.GuildID)\n\t\tif err != nil {\n\t\t\tctx.ReplyError(\"Error getting guild configuration: \", err)\n\t\t\treturn\n\t\t}\n\n\t\tisAdmin, err := ctx.IsAdmin()\n\t\tif err != nil {\n\t\t\tctx.ReplyError(\"Error checking administrator status: \", err)\n\t\t\treturn\n\t\t}\n\n\t\tif !isAdmin {\n\t\t\tctx.ReplyError(\"You need to be an administrator or own the guild to configure guild settings\")\n\t\t\treturn\n\t\t}\n\n\t\tctx.Set(\"gconfig\", gconfig)\n\t\tfn(ctx)\n\t}\n}",
"func Authorizer(userService userService, jwtService jwtService) gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\taccessToken := extractToken(c)\n\t\tif accessToken == EmptyToken {\n\t\t\tabort(c, http.StatusBadRequest, \"Authorization header is missing or empty\")\n\t\t} else {\n\t\t\tparseJwt, err := jwtService.ParseJwt(accessToken)\n\n\t\t\tif err != nil {\n\t\t\t\tabort(c, http.StatusBadRequest, err.Error())\n\t\t\t} else if err := userVerification(c, parseJwt, userService); err != nil {\n\t\t\t\tabort(c, http.StatusUnauthorized, \"Unauthorized\")\n\t\t\t}\n\t\t}\n\t}\n}",
"func (o *ExportUsingGETParams) WithAuthorization(authorization string) *ExportUsingGETParams {\n\to.SetAuthorization(authorization)\n\treturn o\n}",
"func (svc *Service) CheckAuthorization(ctx context.Context, httMethod, httpRoute string) error {\n\tif ok, err := svc.IsAdmin(ctx); err != nil {\n\t\treturn err\n\t} else if ok {\n\t\treturn nil\n\t}\n\tprivs, err := svc.getPrivileges(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor i := range privs {\n\t\tif privs[i].Method == httMethod && privs[i].Route == httpRoute {\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn baseerrors.ErrPrivilege\n}",
"func (a *AuthService) CompleteAuthorization(res http.ResponseWriter, req *http.Request) {\n\texpectedState := a.getState(req)\n\tstate := req.FormValue(\"state\")\n\tif state != expectedState {\n\t\thttp.Error(res, ErrBadState.Error(), http.StatusForbidden)\n\t\treturn\n\t}\n\tcode := req.FormValue(\"code\")\n\ttok, err := a.OAuthConfig.Exchange(req.Context(), code)\n\tif err != nil {\n\t\thttp.Error(res, err.Error(), http.StatusForbidden)\n\t\treturn\n\t}\n\tclient := a.OAuthConfig.Client(req.Context(), tok)\n\tresp, err := client.Get(\"https://discordapp.com/api/users/@me\")\n\tif err != nil {\n\t\thttp.Error(res, err.Error(), http.StatusForbidden)\n\t\treturn\n\t}\n\tdefer resp.Body.Close()\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\thttp.Error(res, err.Error(), http.StatusForbidden)\n\t\treturn\n\t}\n\tvar du discordgo.User\n\terr = json.Unmarshal(body, &du)\n\tif err != nil {\n\t\thttp.Error(res, err.Error(), http.StatusForbidden)\n\t\treturn\n\t}\n\tuid, _ := strconv.ParseInt(du.ID, 10, 64)\n\tuser := &domains.User{\n\t\tID: uid,\n\t\tName: fmt.Sprintf(\"%s#%s\", du.Username, du.Discriminator),\n\t}\n\tsession := a.getSession(req)\n\tuserStr, err := json.Marshal(user)\n\tif err != nil {\n\t\thttp.Error(res, err.Error(), http.StatusForbidden)\n\t\treturn\n\t}\n\tsession.Values[userValue] = userStr\n\tsession.Save(req, res)\n}",
"func (s *NamespaceWebhook) authorized(request admissionctl.Request) admissionctl.Response {\n\tvar ret admissionctl.Response\n\n\t// Picking OldObject or Object will suffice for most validation concerns\n\tns, err := s.renderNamespace(request)\n\tif err != nil {\n\t\tlog.Error(err, \"Couldn't render a Namespace from the incoming request\")\n\t\treturn admissionctl.Errored(http.StatusBadRequest, err)\n\t}\n\t// service accounts making requests will include their name in the group\n\tfor _, group := range request.UserInfo.Groups {\n\t\tif privilegedServiceAccountsRe.Match([]byte(group)) {\n\t\t\tret = admissionctl.Allowed(\"Privileged service accounts may access\")\n\t\t\tret.UID = request.AdmissionRequest.UID\n\t\t\treturn ret\n\t\t}\n\t}\n\t// This must be prior to privileged namespace check\n\tif utils.SliceContains(layeredProductAdminGroupName, request.UserInfo.Groups) &&\n\t\tlayeredProductNamespaceRe.Match([]byte(ns.GetName())) {\n\t\tret = admissionctl.Allowed(\"Layered product admins may access\")\n\t\tret.UID = request.AdmissionRequest.UID\n\t\treturn ret\n\t}\n\n\t// L64-73\n\tif hookconfig.IsPrivilegedNamespace(ns.GetName()) {\n\n\t\tif amIAdmin(request) {\n\t\t\tret = admissionctl.Allowed(\"Cluster and SRE admins may access\")\n\t\t\tret.UID = request.AdmissionRequest.UID\n\t\t\treturn ret\n\t\t}\n\t\tlog.Info(\"Non-admin attempted to access a privileged namespace matching a regex from this list\", \"list\", hookconfig.PrivilegedNamespaces, \"request\", request.AdmissionRequest)\n\t\tret = admissionctl.Denied(fmt.Sprintf(\"Prevented from accessing Red Hat managed namespaces. Customer workloads should be placed in customer namespaces, and should not match an entry in this list of regular expressions: %v\", hookconfig.PrivilegedNamespaces))\n\t\tret.UID = request.AdmissionRequest.UID\n\t\treturn ret\n\t}\n\tif BadNamespaceRe.Match([]byte(ns.GetName())) {\n\n\t\tif amIAdmin(request) {\n\t\t\tret = admissionctl.Allowed(\"Cluster and SRE admins may access\")\n\t\t\tret.UID = request.AdmissionRequest.UID\n\t\t\treturn ret\n\t\t}\n\t\tlog.Info(\"Non-admin attempted to access a potentially harmful namespace (eg matching this regex)\", \"regex\", badNamespace, \"request\", request.AdmissionRequest)\n\t\tret = admissionctl.Denied(fmt.Sprintf(\"Prevented from creating a potentially harmful namespace. Customer namespaces should not match this regular expression, as this would impact DNS resolution: %s\", badNamespace))\n\t\tret.UID = request.AdmissionRequest.UID\n\t\treturn ret\n\t}\n\t// Check labels.\n\tunauthorized, err := s.unauthorizedLabelChanges(request)\n\tif !amIAdmin(request) && unauthorized {\n\t\tret = admissionctl.Denied(fmt.Sprintf(\"Denied. Err %+v\", err))\n\t\tret.UID = request.AdmissionRequest.UID\n\t\treturn ret\n\t}\n\t// L75-L77\n\tret = admissionctl.Allowed(\"RBAC allowed\")\n\tret.UID = request.AdmissionRequest.UID\n\treturn ret\n}",
"func NewAuthorization(orgID string, permissions []Permission) *Authorization {\n\tthis := Authorization{}\n\tvar status string = \"active\"\n\tthis.Status = &status\n\tthis.OrgID = orgID\n\tthis.Permissions = permissions\n\treturn &this\n}",
"func requestChimeraAuthorization(t *testing.T, authURL string, client *http.Client) *url.URL {\n\treq, err := http.NewRequest(http.MethodGet, authURL, nil)\n\trequire.NoError(t, err)\n\tresp := assertRespStatus(t, client, req, http.StatusFound)\n\n\tlocation, err := resp.Location()\n\trequire.NoError(t, err)\n\n\treturn location\n}"
] | [
"0.64171565",
"0.61032623",
"0.606701",
"0.5921552",
"0.5781001",
"0.57581466",
"0.563823",
"0.560635",
"0.54691887",
"0.5381755",
"0.5379844",
"0.5237179",
"0.52248925",
"0.51815754",
"0.51697916",
"0.5129923",
"0.5112342",
"0.50532514",
"0.50527006",
"0.5043038",
"0.5032289",
"0.5011199",
"0.50074095",
"0.49813113",
"0.49799597",
"0.49775502",
"0.49769634",
"0.49652854",
"0.49552235",
"0.4941093",
"0.49339676",
"0.49317947",
"0.49246582",
"0.48961112",
"0.4895537",
"0.48760283",
"0.48626846",
"0.485739",
"0.48430765",
"0.48324934",
"0.4825197",
"0.4820046",
"0.48186874",
"0.48084897",
"0.48073018",
"0.4806338",
"0.48016456",
"0.47997764",
"0.4790006",
"0.47605944",
"0.47589213",
"0.47587165",
"0.47579163",
"0.47555298",
"0.47548518",
"0.4754751",
"0.47374415",
"0.4736997",
"0.47035176",
"0.4689572",
"0.46818185",
"0.46796075",
"0.46767607",
"0.46767607",
"0.46723506",
"0.4657645",
"0.46450478",
"0.46441233",
"0.46367988",
"0.4636704",
"0.46352407",
"0.46343595",
"0.46188495",
"0.46094865",
"0.46078104",
"0.45919055",
"0.459034",
"0.4590051",
"0.45866832",
"0.45792472",
"0.45729166",
"0.45720938",
"0.4568988",
"0.4565431",
"0.45629606",
"0.45621422",
"0.45549583",
"0.455283",
"0.4549741",
"0.4546012",
"0.454274",
"0.45406196",
"0.45258322",
"0.45155537",
"0.45079866",
"0.4506773",
"0.4503296",
"0.44992325",
"0.44918504",
"0.44914836"
] | 0.69239974 | 0 |
start gin server and load application Swagger Documentation annotations: | func main() {
// load config
config.Init()
// services
services.Init()
// start gin server
router.RunGin()
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func main() {\n\n\t// This will pack config-files folder inside binary\n\t// you need rice utility for it\n\tbox := rice.MustFindBox(\"config-files\")\n\n\tinitErr := initializer.InitAll(box)\n\tif initErr != nil {\n\t\tlog.Fatalln(initErr)\n\t}\n\tr := gin.Default()\n\tpprof.Register(r)\n\tr.GET(\"/doc/*any\", ginSwagger.WrapHandler(swaggerFiles.Handler))\n\tdocs.SwaggerInfo.Host = \"\"\n\tr.GET(\"/\", func(c *gin.Context) {\n\t\tc.JSON(http.StatusOK, gin.H{\n\t\t\t\"message\": \"Server is up and running!\",\n\t\t})\n\t})\n\tr.NoRoute(func(c *gin.Context) {\n\t\tc.JSON(404, gin.H{\"code\": \"RouteNotFound\"})\n\t})\n\tapi.InitAPI(r)\n\tgo func() {\n\t\terr := r.Run(\":9050\")\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t}()\n\tstorage.GetStorageInstance().Close()\n}",
"func main() {\n\tdocs.SwaggerInfo.Schemes = []string{\"http\", \"https\"}\n\tappRegistry := registry.NewAppRegistry()\n\tappRegistry.StartServer()\n}",
"func init() {\n\n\t// Starts a new Gin instance with no middle-ware\n\tr := gin.New()\n\n\t// Define your handlers\n\tr.GET(\"/\", func(c *gin.Context) {\n\t\tc.String(200, \"Hello World!\")\n\t})\n\n\tr.POST(\"/vocab\", addVocab)\n\tr.PUT(\"/vocab\", updateVocab)\n\tr.DELETE(\"/vocab/:word\", deleteVocab)\n\tr.GET(\"/vocab/:word\", getVocab)\n\n\tr.POST(\"/scores\", addScore)\n\tr.PUT(\"/scores\", updateScore)\n\tr.GET(\"/scores/:word\", getScore)\n\n\tr.GET(\"/card\", getCard)\n\tr.PUT(\"/card\", updateCard)\n\n\tr.POST(\"/admin/datastore/data\", loadData)\n\tr.DELETE(\"/admin/datastore/data\", deleteData)\n\tr.GET(\"/admin/datastore/data\", downloadData)\n\n\t// Handle all requests using net/http\n\thttp.Handle(\"/\", r)\n}",
"func (c Routes) StartGin() {\n\tr := gin.Default()\n\tapi := r.Group(\"/api\")\n\t{\n\t\tapi.GET(\"/\", welcome)\n\t\tapi.GET(\"/users\", user.GetAllUsers)\n\t\tapi.POST(\"/users\", user.CreateUser)\n\t}\n\tr.Run(\":8000\")\n}",
"func (c Routes) StartGin() {\n\tr := gin.Default()\n\tr.Use(cors.Default())\n\tapi := r.Group(\"/api\")\n\t{\n\t\tapi.GET(\"/\", welcome)\n\t\tapi.GET(tasksResource, task.GetTasks)\n\t\tapi.GET(taskResource, task.GetTask)\n\t\tapi.POST(taskResource, task.CreateTask)\n\t\tapi.PATCH(taskResource, task.UpdateTaskStatus)\n\t\tapi.DELETE(taskResource, task.DeleteTask)\n\t}\n\n\tr.Run(\":8000\")\n}",
"func main() {\n\tgenerateSwaggerJSON()\n}",
"func main() {\n\tapp := newApp()\n\tcontrollers.Setup(app, \"/api\")\n\tapp.Run(ion.Addr(\":8081\"))\n}",
"func init() {\n\n// Run App at 'release' mode in production.\n gin.SetMode(gin.ReleaseMode)\n\n// Starts a new Gin instance with no middle-ware\n route := gin.New()\n \n // Define your handlers\n route.GET(\"/\", func(ctx *gin.Context) {\n ctx.String(http.StatusOK, \"Hello World!\")\n })\n route.GET(\"/ping\", func(ctx *gin.Context) {\n ctx.String(http.StatusOK, \"pong\")\n })\n\n route.GET(\"/kinds\", KindsList)\n\n // Handle all requests using net/http\n http.Handle(\"/\", route)\n}",
"func (s *server) init() {\n\t// Using middlewares on group.\n\ts.gin.Use(middleware.Recovery())\n\t// use AccessLog to log panic error with zap\n\ts.gin.Use(middleware.AccessLog())\n\ts.gin.Use(cors.Default())\n\n\tif config.Profile {\n\t\ts.logger.Info(\"/debug/pprof is enabled\")\n\t\tpprof.Register(s.gin)\n\t\ts.logger.Info(\"/debug/fgprof is enabled\")\n\t\ts.gin.GET(\"/debug/fgprof\", gin.WrapH(fgprof.Handler()))\n\t}\n\n\tif config.Doc {\n\t\t// swagger-ui: http://localhost:port/swagger/index.html\n\t\tip, _ := hostutil.GetHostIP()\n\t\ts.gin.GET(\"/swagger/*any\", ginSwagger.WrapHandler(swaggerFiles.Handler,\n\t\t\tginSwagger.URL(fmt.Sprintf(\"http://%s:%d/swagger/doc.json\", ip, s.cfg.Port)),\n\t\t\tginSwagger.DefaultModelsExpandDepth(-1)))\n\t}\n\tif s.staticResource {\n\t\t// server static file\n\t\tstaticFS, err := fs.Sub(lindb.StaticContent, \"web/static\")\n\t\tstaticHome := \"/console\"\n\t\tif err != nil {\n\t\t\ts.logger.Error(\"cannot find static resource\", logger.Error(err))\n\t\t} else {\n\t\t\ts.gin.StaticFS(staticHome, http.FS(staticFS))\n\t\t\t// redirects to admin console\n\t\t\ts.gin.GET(\"/\", func(c *gin.Context) {\n\t\t\t\tc.Request.URL.Path = staticHome\n\t\t\t\ts.gin.HandleContext(c)\n\t\t\t})\n\t\t}\n\t}\n}",
"func main() {\r\n\tconfig.InitViper()\r\n\tlogger.Logger().Printf(\"notebook server, startPort: %s\", os.Getenv(\"START_PORT\"))\r\n\tlogger.Logger().Printf(\"notebook server, endPort: %s\", os.Getenv(\"END_PORT\"))\r\n\r\n\tswaggerSpec, err := loads.Embedded(restapi.SwaggerJSON, restapi.FlatSwaggerJSON)\r\n\tif err != nil {\r\n\t\tlogger.Logger().Fatalln(err)\r\n\t}\r\n\r\n\tapi := operations.NewJupyterServerAPI(swaggerSpec)\r\n\tserver := restapi.NewServer(api)\r\n\tdefer server.Shutdown()\r\n\r\n\tserver.Port = viper.GetInt(config.PortKey)\r\n\tserver.ConfigureAPI()\r\n\r\n\tmux := http.NewServeMux()\r\n\tmux.Handle(\"/\", server.GetHandler())\r\n\tmux.HandleFunc(\"/health\", restapi.GetHealth)\r\n\r\n\taddress := fmt.Sprintf(\":%d\", server.Port)\r\n\r\n\tlogger.Logger().Printf(\"Jupyter Notebook Service REST API v1 serving on %s\", address)\r\n\r\n\terr = graceful.RunWithErr(address, 10*time.Second, mux)\r\n\tif err != nil {\r\n\t\tlogger.Logger().Fatalln(err)\r\n\t}\r\n}",
"func main() {\n\n\t//init api\n\tserver.Init()\n}",
"func StartApp() {\n\turlMappings()\n\trouter.Run(\"localhost:8080\")\n}",
"func main() {\n\t//\tif beego.RunMode == \"dev\" {\n\t//\t\tbeego.DirectoryIndex = true\n\t//\t\tbeego.StaticDir[\"/swagger\"] = \"swagger\"\n\t//\t}\n\n\tbeego.Run()\n}",
"func main() {\n\tserverStats := appMiddleware.NewStats()\n\t// Echo instance\n\te := echo.New()\n\n\t// Middleware\n\te.Use(middleware.Logger())\n\te.Use(middleware.Recover())\n\te.Use(middleware.CORSWithConfig(middleware.CORSConfig{\n\t\tAllowOrigins: []string{\"http://localhost\"},\n\t}))\n\te.Use(serverStats.Process)\n\n\te.GET(\"/swagger/*\", echoSwagger.WrapHandler)\n\n\t// Stats\n\tstatsRoute := &routes.StatsRoute{Stats: serverStats}\n\tstatsRoute.BindStatsRoute(e)\n\n\t// Api\n\tvar group = e.Group(\"/api\")\n\troutes.BindRoute(group)\n\troutes.BindQueryRoute(group)\n\troutes.BindRouteBlock(group)\n\n\t// Start server\n\te.Logger.Fatal(e.Start(\":\" + os.Getenv(\"API_PORT\")))\n}",
"func main() {\n\trouter := gin.Default()\n\trouter.GET(\"/ping\", func(c *gin.Context) {\n\t\tc.JSON(200, gin.H{\n\t\t\t\"message\": \"pong\",\n\t\t})\n\t})\n\trouter.Run() // listen and serve on 0.0.0.0:8080\n\t//router.Run(\":8080\")\t\n}",
"func (api *API) Run() {\n\tr := gin.Default()\n\n\tapi.configRoutes(r)\n\n\tr.Run() // listen and serve on 0.0.0.0:8080 (for windows \"localhost:8080\")\n}",
"func main() {\n\tr := gin.New()\n\tr.Use(cors.Default())\n\n\t//r.GET(\"/email\", ctrl.GenEmail)\n\tr.GET(\"/gentax\", ctrl.GenTaxData)\n\n\tr.Run(\":8099\")\n}",
"func main() {\n\n\tcfgPath := flag.String(\"p\", \"./cmd/api/conf.local.yaml\", \"Path to config file\")\n\tflag.Parse()\n\n\tconfig, err := config.Load(*cfgPath)\n\tcheckErr(err)\n\n\tcheckErr(app.Start(config))\n}",
"func Run(addr string) error {\n\te := echo.New()\n\n\te.HideBanner = true\n\n\te.Use(middleware.RequestID())\n\te.Use(middleware.Recover())\n\te.Use(middleware.Logger())\n\n\te.GET(\"/v3/api-docs\", GetSpec)\n\te.GET(\"/swagger-ui\", echo.WrapHandler(http.RedirectHandler(\"/swagger-ui/\", http.StatusMovedPermanently)))\n\te.GET(\"/swagger-ui/*\", echo.WrapHandler(third_party.SwaggerUIHandler(e)))\n\n\tp := prometheus.NewPrometheus(\"echo\", nil)\n\tp.Use(e)\n\n\tsvr := &server{}\n\tapi.RegisterHandlers(e, svr)\n\n\treturn e.Start(addr)\n}",
"func StartApplicatin() {\n\tmapUrls()\n\trouter.Run(\":8080\")\n}",
"func main() {\n\tgodotenv.Load()\n\n\tport := os.Getenv(\"REST_PORT\")\n\tif port == \"\" {\n\t\tport = \"8080\"\n\t}\n\n\tserver, err := start(port)\n\tif err != nil {\n\t\tlog.Println(\"err:\", err)\n\t\treturn\n\t}\n\n\terr = stopServer(server)\n\tif err != nil {\n\t\tlog.Println(\"err:\", err)\n\t\treturn\n\t}\n\n\treturn\n}",
"func HTTPInitDocumentation(r *gin.Engine) {\n\tbaseURL, ok := os.LookupEnv(\"HTTP_DOMAIN\")\n\tif !ok {\n\t\tpanic(\"http-docs-error\")\n\t}\n\turl := ginSwagger.URL(fmt.Sprintf(\"%s/swagger/doc.json\", baseURL))\n\tr.GET(\"/swagger/*any\", ginSwagger.WrapHandler(swaggerFiles.Handler, url))\n}",
"func main() {\n\trouter := gin.Default()\n\trouter.GET(\"/puppy\", handlePuppy)\n\trouter.Run() // listen and serve on 0.0.0.0:8080 (for windows \"localhost:8080\")\n}",
"func main() {\n\tconfigPath := flag.String(\"config\", \"./config.toml\", \"Path to config.toml file\")\n\tflag.Parse()\n\n\tapiConfig, err := config.LoadConfig(*configPath)\n\n\tif err != nil {\n\t\tlog.Fatal(\"Config loading error: \" + err.Error())\n\t\treturn\n\t}\n\n\tserve(apiConfig)\n}",
"func Init() {\n\tr := gin.Default()\n\tgin.DebugPrintRouteFunc = func(httpMethod, absolutePath, handlerName string, nuHandlers int) {\n\t\tlog.Printf(\"endpoint %v %v %v %v\\n\", httpMethod, absolutePath, handlerName, nuHandlers)\n\t}\n\n\tr.GET(\"/tests\", testcontroller.Tests)\n\tr.GET(\"/tests/:id/questions/\", testcontroller.TestQuestions)\n\tr.POST(\"/tests\", testcontroller.AddTest)\n\tr.PUT(\"/tests/:id\", testcontroller.UpdateTest)\n\tr.DELETE(\"/tests/:id\", testcontroller.DeleteTest)\n\n\tr.GET(\"/question/options/:id\", questioncontroller.QuestionOptions)\n\tr.POST(\"/question\", questioncontroller.AddQuestion)\n\tr.PUT(\"/question/:id\", questioncontroller.UpdateQuestion)\n\tr.DELETE(\"/questions/:id\", questioncontroller.DeleteQuestion)\n\n\tr.POST(\"/option\", optioncontroller.AddOption)\n\tr.PUT(\"/option/:id\", optioncontroller.UpdateOption)\n\tr.DELETE(\"/option/:id\", optioncontroller.DeleteOption)\n\n\t// Listen and Server in http://0.0.0.0:8080\n\tr.Run()\n}",
"func Run(){\n\tRestApiImplPtr.RegisterApi()\n\tRestServerImplPtr.StartRestServer();\n}",
"func main() {\n\n\tfmt.Println(\"Starting Restful services...\")\n\tfmt.Println(\"Using port:8080\")\n\thandleRequests()\n}",
"func StartServer() {\n\tr := gin.Default()\n\n\tcorsCfg := cors.DefaultConfig()\n\tcorsCfg.AllowOrigins = []string{\"http://localhost:1234\"}\n\tr.Use(cors.New(corsCfg))\n\n\tapi := r.Group(\"/api\")\n\t{\n\t\tapi.Any(\"/graphql\", graphQL)\n\t\tapi.GET(\"/players\", players)\n\t\tapi.GET(\"/player_datas\", playerDatas)\n\t}\n\n\tport := os.Getenv(\"PORT\")\n\tif len(port) == 0 {\n\t\tport = \"8080\"\n\t}\n\tr.Run(fmt.Sprintf(\":%s\", port))\n}",
"func main() {\n\t//gin.SetMode(gin.ReleaseMode)\n\tr := gin.New()\n\n\tr.GET(\"/\", func(c *gin.Context) {\n\t\tc.String(200, \"hello\")\n\t})\n\n\t// curl -X POST https://[project].com/data\n\tr.POST(\"/data\", AddData)\n\t// curl https://[project].com/uuid2/d05040b2-423d-4f91-a958-11f580e156ef\n\tr.GET(\"/uuid2/:uuid1\", GetUUID2)\n\t// curl https://[project].com/exists/9b0aea2b-70fa-4ce3-87f0-bb1391edad49\n\tr.GET(\"/exists/:uuid2\", ExistsUUID2)\n\n\thttp.Handle(\"/\", r)\n\tappengine.Main()\n}",
"func main() {\n\tfmt.Println(\"APPLICATION BEGIN\")\n\twebserver := new(service.Webserver)\n\tregisterConfig()\n\tregisterErrors()\n\tregisterAllApis()\n\tregisterInitFunc()\n\toverrideConfByEnvVariables()\n\twebserver.Start()\n}",
"func bootup (w http.ResponseWriter, r *http.Request ) {\n\tfmt.Fprintf( w, \"<h1>%s</h1>\\n\", \"Starting app for translating gophers language...\" )\n\tfmt.Fprintf( w, \"<h4>%s</h4>\\n\", \"We currently support two POST endpoints /word and /sentence\" )\n}",
"func InitRequestHandler() {\n\tr := gin.Default()\n\n\tv1 := r.Group(\"/api/v1\")\n\t{\n\n\t\tv1.GET(\"/healthz\", func(c *gin.Context) {\n\t\t\tc.String(200, \"OK\")\n\t\t})\n\n\t\tv1.POST(\"/payments\", func(c *gin.Context) {\n\t\t\tc.JSON(200, gin.H{\"message\": \"pong\"})\n\t\t})\n\n\t}\n\n\tr.Run()\n}",
"func Start(bootstrappers ...IBootstrapper) {\n\tvar err error\n\n\t// load application configurations\n\tAppConfig = initAppConfig()\n\thttpHeaderAppId = AppConfig.GetString(\"api.http.header_app_id\")\n\thttpHeaderAccessToken = AppConfig.GetString(\"api.http.header_access_token\")\n\tAppVersion = AppConfig.GetString(\"app.version\")\n\tAppVersionNumber = utils.VersionToNumber(AppVersion)\n\n\t// setup api-router\n\tApiRouter = itineris.NewApiRouter()\n\n\t// initialize \"Location\"\n\tutils.Location, err = time.LoadLocation(AppConfig.GetString(\"timezone\"))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// bootstrapping\n\tif bootstrappers != nil {\n\t\tfor _, b := range bootstrappers {\n\t\t\tlog.Println(\"Bootstrapping\", b)\n\t\t\terr := b.Bootstrap()\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(err)\n\t\t\t}\n\t\t}\n\t}\n\n\t// initialize and start gRPC server\n\tinitGrpcServer()\n\n\t// initialize and start echo server\n\tinitEchoServer()\n}",
"func (a *DocRouter) RegisterAPI(app *gin.Engine) {\n\tapp.GET(\"/swagger/*any\", ginSwagger.WrapHandler(swaggerFiles.Handler))\n}",
"func initGin(ginEngine *gin.Engine) {\n\tginEngine.Use(logrusLogger())\n\tginEngine.POST(\"/assignment\", putAssignment)\n\tginEngine.POST(\"/submission\", putSubmission)\n\tginEngine.GET(\"/plugin/langs\", getSupportedLangs)\n\tginEngine.GET(\"/debug/vars\", expvarGin.Handler())\n\tginEngine.GET(\"/health\", healthCheck)\n}",
"func main() {\n\tconfig := readFlags()\n\tapp := webapp.NewApp(config)\n\n\tapp.Router.Handle(\"/api/1.0/\", &api.Index{App: app})\n\tapp.Router.Handle(\"/api/1.0/search\", &api.Search{App: app})\n\n\tcloseChannel := make(chan int)\n\n\t// Starts listening.\n\tapp.Start(closeChannel)\n}",
"func StartApp() {\n\t// Start the controller\n\tcontrollers.InitUserController()\n\t// Now listen to gRPC calls\n\tlis, err := net.Listen(\"tcp\", port)\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to listen : %v\", err)\n\t}\n\ts := grpc.NewServer()\n\tapi.RegisterUserServiceServer(s, &server{})\n\treflection.Register(s) // Needed for reflection API, only then grpCui finds it\n\t//https://github.com/grpc/grpc-go/blob/master/Documentation/server-reflection-tutorial.md\n\tif err := s.Serve(lis); err != nil {\n\t\tlog.Fatalf(\"Failed to serve : %v\", err)\n\t}\n}",
"func main() {\n\t// load config and construct the server shared environment\n\tcfg := common.LoadConfig()\n\tlog := services.NewLogger(cfg)\n\n\t// create repository\n\trepo, err := repository.NewRepository(cfg, log)\n\tif err != nil {\n\t\tlog.Fatalf(\"Can not create application data repository. Terminating!\")\n\t}\n\n\t// setup GraphQL API handler\n\thttp.Handle(\"/api\", handlers.ApiHandler(cfg, repo, log))\n\n\t// show the server opening info and start the server with DefaultServeMux\n\tlog.Infof(\"Welcome to Fantom Rocks API server on [%s]\", cfg.BindAddr)\n\tlog.Fatal(http.ListenAndServe(cfg.BindAddr, nil))\n}",
"func main() {\n\n\tr := gin.Default()\n\tr.LoadHTMLGlob(\"templates/*\")\n\tr.GET(\"/\", indexPage)\n\tr.GET(\"/next\", nextPage)\n\tr.POST(\"/submit\", submitProcess)\n\t// r.GET(\"/show\", conDB)\n\n\tr.Run()\n\n}",
"func main() {\n\n\t// Handle command-line flags.\n\t// TODO: extend this to support environment variables as well.\n\tconfig := Config{}\n\terr := envconfig.Process(\"jsonator\", &config)\n\tif err != nil {\n\t\tlog.Fatal(err.Error())\n\t}\n\n\t// Configure logging to make debugging easy and provide a single,\n\t// consistent way to get log info out of the app.\n\tlogfile, _ := os.Create(config.LogPath)\n\tgin.DefaultWriter = io.MultiWriter(logfile)\n\tlog.SetOutput(gin.DefaultWriter)\n\n\t// Redirect stderr to stdout for container happieness\n\tdev_null, _ := os.Open(\"/dev/stdout\")\n\tsyscall.Dup2(int(dev_null.Fd()), 2)\n\n\t// Get a new, bells-and-whistles-included gin.Router.\n\trouter := gin.Default()\n\n\t// Use our middleware to make our shared state availible to request\n\t// handlers.\n\tstore := NewStore()\n\trouter.Use(StoreMiddleware(\"store\", &store))\n\n\t// Use NewRelic middleware for request stats.\n\tif config.NewRelicKey != \"\" && config.NewRelicName != \"\" {\n\t\tlog.Printf(\"Using NewRelic\")\n\n\t\tconfig := newrelic.NewConfig(config.NewRelicName, config.NewRelicKey)\n\t\tapp, err := newrelic.NewApplication(config)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\n\t\trouter.Use(NewRelicMiddleware(app))\n\n\t\tapp.RecordCustomEvent(\"AppStarted\", map[string]interface{}{\n\t\t\t\"cpus\": runtime.NumCPU(),\n\t\t\t\"version\": runtime.Version(),\n\t\t})\n\n\t\tdefer app.RecordCustomEvent(\"AppStopped\", map[string]interface{}{\n\t\t\t\"cpus\": runtime.NumCPU(),\n\t\t\t\"version\": runtime.Version(),\n\t\t})\n\t}\n\n\t// Set up routes.\n\trouter.GET(\"/status\", GetStatus)\n\trouter.GET(\"/stats\", GetStats)\n\trouter.GET(\"/count\", GetCount)\n\trouter.GET(\"/keys\", GetKeys)\n\trouter.GET(\"/doc\", GetAll)\n\trouter.PUT(\"/doc\", PutBatch)\n\trouter.GET(\"/doc/:key\", GetDoc)\n\trouter.PUT(\"/doc/:key\", PutDoc)\n\trouter.DELETE(\"/doc/:key\", DeleteDoc)\n\n\t// Start the service. If this returns, it's an exceptional case.\n\terr = router.Run(config.BindAddr)\n\tlog.Printf(err.Error())\n\tpanic(err)\n\n}",
"func init() {\n\t// Configure and start the API\n\tgo func() {\n\t\tapp := igcinfo.App{\n\t\t\tListenPort: listenPort}\n\t\tapp.StartServer()\n\t}()\n\n\t// Ensure server is started before continuing\n\ttime.Sleep(1000 * time.Millisecond)\n}",
"func (s Server) Run() {\n\tlog.Printf(\"[INFO] activate rest server\")\n\n\tgin.SetMode(gin.ReleaseMode)\n\trouter := gin.New()\n\trouter.Use(gin.Recovery())\n\trouter.Use(s.limiterMiddleware())\n\trouter.Use(s.loggerMiddleware())\n\n\tv1 := router.Group(\"/v1\")\n\t{\n\t\tv1.POST(\"/message\", s.saveMessageCtrl)\n\t\tv1.GET(\"/message/:key/:pin\", s.getMessageCtrl)\n\t\tv1.GET(\"/params\", s.getParamsCtrl)\n\t\tv1.GET(\"/ping\", func(c *gin.Context) { c.String(200, \"pong\") })\n\t}\n\n\tlog.Fatal(router.Run(\":8080\"))\n}",
"func serve(app *App) *gin.Engine {\n\t// Set gin mode.\n\tsetRuntimeMode(app.config.Core.Mode)\n\n\t// Setup the app\n\thandler := router.Load(\n\t\t// Services\n\t\tapp.service,\n\n\t\t// Middlwares\n\t\tmiddleware.RequestId(),\n\t)\n\n\treturn handler\n}",
"func SwaggerServer() {\n\n\tserver := martini.Classic()\n\tserver.Use(martini.Recovery())\n server.Use(martini.Static(\"./assets/swagger\",martini.StaticOptions{Prefix:\"/swagger\"}))\n\t//server.(\"/swagger\", http.Dir(\"./assets/swagger\"))\n\tserver.Use(martini.Static(\"./assets/swagger/favicon.ico\",martini.StaticOptions{Prefix:\"/favicon.ico\"}))\n\t//server.StaticFile(\"/favicon.ico\", \"../assets/swagger/favicon.ico\")\n\n\tserver.RunOnAddr(SWAGGER_PORT)\n\n}",
"func main() {\n\tfmt.Println(\"################################\")\n\tfmt.Println(\"#### Hello from MyAppStatus ####\")\n\tfmt.Println(\"################################\")\n\n\tapp.StartServer()\n}",
"func Start() {\n\tr := gin.Default()\n\tr.GET(\"/ping\", func(c *gin.Context) {\n\t\tc.JSON(200, gin.H{\n\t\t\t\"message\": \"pong\",\n\t\t})\n\t})\n\tr.POST(\"/registry\", controllers.InsertRegistry)\n\tr.GET(\"/registry\", controllers.GetRegistryAll)\n\tr.GET(\"/registry/:id/\", controllers.GetRegistry)\n\tr.DELETE(\"/registry/:id/\", controllers.DeleteRegistry)\n\tr.PUT(\"/registry/:id/\", controllers.PutRegistry)\n\tr.Run(\":8080\") // listen and serve on 0.0.0.0:8080\n}",
"func GinServer() {\n\t// Set Gin to production mode\n\tgin.SetMode(gin.ReleaseMode)\n\n\t// Set the router as the default one provided by Gin\n\trouter = gin.Default()\n\n\t// Process the templates at the start so that they don't have to be loaded\n\t// from the disk again. This makes serving HTML pages very fast.\n\trouter.LoadHTMLGlob(\"static/templates/*\")\n\n\t// Initialize the routes\n\tinitializeRoutes()\n\n\thttp.Handle(\"/\", router)\n}",
"func (s *Server) Run(options ...func(*gin.RouterGroup)) {\n\tlog.Printf(\"[INFO] activate rest server\")\n\n\trouter := gin.New()\n\n\trouter.Use(gin.Recovery())\n\n\trouter.Use(s.loggerMiddleware())\n\n\trouter.GET(\"/ping\", s.pingCtrl)\n\n\tv1 := router.Group(\"/v1\")\n\n\t// Cors headers\n\tconfigCors := cors.DefaultConfig()\n\tconfigCors.AllowAllOrigins = true\n\tconfigCors.AllowHeaders = []string{\"Origin\", \"Content-Length\", \"Content-Type\", \"Authorization\"}\n\n\tv1.Use(cors.New(configCors))\n\n\t// Set Authorization if we have ENV settings\n\tif len(s.BasicAuthUser) > 0 {\n\t\tv1.Use(gin.BasicAuth(gin.Accounts{\n\t\t\ts.BasicAuthUser: s.BasicAuthPWD,\n\t\t}))\n\t}\n\n\tfor _, op := range options {\n\t\tif op != nil {\n\t\t\top(v1)\n\t\t}\n\t}\n\n\tlog.Fatal(router.Run(\":\" + s.ServerPort))\n}",
"func runAPIServer() {\n\trouter := gin.Default()\n\trouter.POST(\"/alert\", alert)\n\trouter.Run(\":\" + strconv.Itoa(config.APIPort))\n}",
"func Load(s server.Controller, middleware ...gin.HandlerFunc) http.Handler {\n\te := gin.New()\n\tgin.SetMode(gin.ReleaseMode)\n\n\te.Use(gin.Recovery())\n\n\te.Use(header.NoCache)\n\te.Use(header.Options)\n\n\te.Use(middleware...)\n\te.Use(logger.Logger())\n\n\te.NoRoute(func(c *gin.Context) {\n\t\tc.JSON(http.StatusNotFound, gin.H{\n\t\t\t\"code\": 404,\n\t\t\t\"message\": \"request not found\",\n\t\t})\n\t})\n\n\tapp := e.Group(\"/api/v1\")\n\t{\n\t\tapp.GET(\"/application/:name\", s.GetApplication)\n\t\tapp.GET(\"/applications\", s.ListApplications)\n\t\tapp.POST(\"/application\", s.CreateApplication)\n\t\tapp.DELETE(\"/application/:name\", s.DeleteApplication)\n\n\t\tapp.GET(\"/resource/:name\", s.GetResource)\n\t\tapp.DELETE(\"/resource/:name\", s.DeleteResource)\n\t\tapp.GET(\"/resources\", s.ListResource)\n\t\tapp.GET(\"/resources/timeframe/:name\", s.ListTimeframeResource)\n\t\tapp.DELETE(\"/resources/timeframe/:name\", s.DeleteTimeframeResource)\n\t\tapp.GET(\"/resources/timeframe/:name/:appName\", s.GetTimeframeResource)\n\t\t// app.POST(\"/resource\", s.CreateResource)\n\n\t\tapp.POST(\"/timeframe\", s.CreateTimeframe)\n\t\tapp.GET(\"/timeframes\", s.ListTimeframes)\n\t\tapp.GET(\"/timeframe/:name\", s.GetTimeframe)\n\t\tapp.PUT(\"/timeframe\", s.UpdateTimeframe)\n\t\tapp.DELETE(\"/timeframe/:name\", s.DeleteTimeframe)\n\t}\n\n\te.GET(\"/version\", versionCtrl)\n\treturn e\n}",
"func startApp(cfg *Config) error {\n\tapp, err := app.NewApplication(cfg.App())\n\tif err != nil {\n\t\treturn err\n\t}\n\tserver, err := server.NewServer(cfg.Listen, cfg.Protocol, app)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = server.Start()\n\tif err != nil {\n\t\treturn err\n\t}\n\tlogger.Info(\"start app\", \"msg\", log.NewLazySprintf(\"Server listening on %v (%v protocol)\", cfg.Listen, cfg.Protocol))\n\treturn nil\n}",
"func InitApi(app *iris.Application) {\n\t// var getAPI router.Party\n\n\tif config.Conf.Debug {\n\t\tppApi := app.Party(\"/debug\")\n\t\tppApi.Get(\"/pprof\", pprofHandler(pprof.Index))\n\t\tppApi.Get(\"/cmdline\", pprofHandler(pprof.Cmdline))\n\t\tppApi.Get(\"/profile\", pprofHandler(pprof.Profile))\n\t\tppApi.Post(\"/symbol\", pprofHandler(pprof.Symbol))\n\t\tppApi.Get(\"/symbol\", pprofHandler(pprof.Symbol))\n\t\tppApi.Get(\"/trace\", pprofHandler(pprof.Trace))\n\t\tppApi.Get(\"/block\", pprofHandler(pprof.Handler(\"block\").ServeHTTP))\n\t\tppApi.Get(\"/goroutine\", pprofHandler(pprof.Handler(\"goroutine\").ServeHTTP))\n\t\tppApi.Get(\"/allocs\", pprofHandler(pprof.Handler(\"allocs\").ServeHTTP))\n\t\tppApi.Get(\"/heap\", pprofHandler(pprof.Handler(\"heap\").ServeHTTP))\n\t\tppApi.Get(\"/mutex\", pprofHandler(pprof.Handler(\"mutex\").ServeHTTP))\n\t\tppApi.Get(\"/threadcreate\", pprofHandler(pprof.Handler(\"threadcreate\").ServeHTTP))\n\n\t\tgetAPI := app.Party(\"/get\")\n\t\tgetAPI.Get(\"/crontab\", ShowCrontab)\n\t}\n}",
"func main() {\n\tstories, err := reader.ReadJsonStory(\"./static/story/default.json\")\n\tif err != nil {\n\t\tlog.Panicln(err)\n\t}\n\n\tweb.Start(stories)\n}",
"func (env *Env) startAPI() {\n\tserver := registerRoutes(env)\n\terr := server.ListenAndServe()\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n}",
"func StartApplication() {\r\n\tmapUrls()\r\n\trouter.Run(\":8080\")\r\n}",
"func main() {\n\tfmt.Println(\"server is up and running!!\")\n\truntime.GOMAXPROCS(4)\n\n\tapp := gin.Default()\n\n\tsearch.RouterMain(app)\n\n\terr := app.Run(\"0.0.0.0:5000\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tfmt.Println(\"server got fired!!!!\")\n}",
"func (app *Application) Run() error {\n\n\tr := gin.Default()\n\tr.LoadHTMLGlob(\"templates/*\")\n\n\t// inject Config into context\n\n\tr.Use(func(c *gin.Context) {\n\t\tc.Set(\"cfg\", app.Config)\n\t\tc.Next()\n\t})\n\n\t// CSRF\n\tr.Use(wraphh.WrapHH(nosurf.NewPure))\n\n\tr.Static(app.Options.StaticURL, app.Options.StaticDir)\n\n\tr.GET(\"/\", indexPage)\n\n\tapi := r.Group(\"/api\")\n\n\tapi.GET(\"/all/\", getMovies)\n\tapi.GET(\"/\", getRandomMovie)\n\tapi.POST(\"/\", addMovie)\n\tapi.GET(\"/suggest\", suggest)\n\tapi.GET(\"/movie/:id\", getMovie)\n\tapi.DELETE(\"/movie/:id\", deleteMovie)\n\tapi.PATCH(\"/seen/:id\", markSeen)\n\n\tr.Run()\n\treturn nil\n}",
"func Setup() *gin.Engine {\n\tapp := gin.New()\n\n\t// Logging to a file.\n\tf, _ := os.Create(\"log/api.log\")\n\tgin.DisableConsoleColor()\n\tgin.DefaultWriter = io.MultiWriter(f)\n\n\t// Middlewares\n\tapp.Use(gin.LoggerWithFormatter(func(param gin.LogFormatterParams) string {\n\t\treturn fmt.Sprintf(\"%s - - [%s] \\\"%s %s %s %d %s \\\" \\\" %s\\\" \\\" %s\\\"\\n\",\n\t\t\tparam.ClientIP,\n\t\t\tparam.TimeStamp.Format(\"02/Jan/2006:15:04:05 -0700\"),\n\t\t\tparam.Method,\n\t\t\tparam.Path,\n\t\t\tparam.Request.Proto,\n\t\t\tparam.StatusCode,\n\t\t\tparam.Latency,\n\t\t\tparam.Request.UserAgent(),\n\t\t\tparam.ErrorMessage,\n\t\t)\n\t}))\n\tapp.Use(gin.Recovery())\n\tapp.NoRoute(middlewares.NoRouteHandler())\n\n\t// Routes\n\tapp.GET(\"/api/contributions\", controllers.GetContributionsChart)\n\n\treturn app\n}",
"func serve(config *config.Config) {\n\trouter := gin.Default()\n\n\t// Set the config in our handlers to give them access to server configuration\n\thandlers.SetConfig(config)\n\n\t// Initialize our routes to point to our handlers\n\tapi := router.Group(config.Server.APIPrefix)\n\tapi.GET(\"/ping\", handlers.PingHandler)\n\tapi.GET(\"/posts\", handlers.PostsHandler)\n\n\t// Configure the HTTP server\n\tserver := &http.Server {\n\t\tAddr: config.Server.Address,\n\t\tHandler: router,\n\t}\n\n\t// Start the HTTP server\n\tlog.Println(\"Starting HatchWays API Server\")\n\tif err := server.ListenAndServe(); err != nil {\n\t\tlog.Fatal(\"Error starting HatchWays API Server: \" + err.Error())\n\t}\n\n}",
"func main() {\n\tclientset, err := NewClientSet()\n\tif err != nil {\n\t\tlog.Fatalf(\"clientset failed to load: %v\", err)\n\t}\n\n\tc := NewController(clientset)\n\n\tstop := make(chan struct{})\n\tdefer close(stop)\n\n\tif err = c.Run(1, stop); err != nil {\n\t\tlog.Fatalf(\"Error running controller: %s\", err.Error())\n\t}\n}",
"func Swagger(c *gin.Context) {\n\tc.Header(\"Content-Type\", \"application/x-yaml\")\n}",
"func main() {\n\tflag.Parse()\n\tconfig := createConfig()\n\twebServices := rest.AggregatedAPIs()\n\tswagger, err := builder.BuildOpenAPISpec(webServices, config)\n\tif err != nil {\n\t\tlog.Fatal(err.Error())\n\t}\n\tjsonBytes, err := json.MarshalIndent(swagger, \"\", \" \")\n\tif err != nil {\n\t\tlog.Fatal(err.Error())\n\t}\n\tif err := ioutil.WriteFile(*outputFile, jsonBytes, 0644); err != nil {\n\t\tlog.Fatal(err.Error())\n\t}\n}",
"func main() {\n\tgob.Register(&model.User{})\n\tgwf.Start(os.Args[1:], Commands, Controllers, Services, middleware.Middleware{}, Models)\n}",
"func main() {\n\tc := webapi.Config{\n\t\tBaseUrl: os.Getenv(\"WEB_API_BASE_URL\"),\n\t\tPort: os.Getenv(\"WEB_API_PORT\"),\n\t\tEnv: os.Getenv(\"APP_ENV\"),\n\t}\n\n\tdbctx := context.Background()\n\tdbC, err := db.NewClient(dbctx, db.Config{\n\t\tURI: os.Getenv(\"MONGO_URI\"),\n\t\tDB: os.Getenv(\"MONGO_DB\"),\n\t})\n\n\tdefer dbC.Disconnect(dbctx)\n\n\tif err != nil {\n\t\tlog.Fatal(fmt.Sprintf(\"Can not get Configuration value from ENV : %s\", err))\n\t}\n\n\tif err := dbC.Health(context.Background()); err != nil {\n\t\tlog.Fatal(fmt.Sprintf(\"An error occured while instanciating db dependency : %s\", err))\n\t}\n\tam := webapi.NewAPIMiddleware(dbC)\n\th := webapi.NewAPIHandler(dbC)\n\ts := webapi.Server{\n\t\tConf: c,\n\t\tDBClient: dbC,\n\t\tMiddleware: am,\n\t\tHandler: h,\n\t}\n\t// todo v1.1: inject a logger, and monitoring agent in server\n\ts.InitServer()\n\tif err != nil {\n\t\tlog.Fatal(fmt.Sprintf(\"An error occured during the web Api init : %s\", err))\n\t}\n\n\terr = s.Router.Run(s.Conf.BaseUrl + \":\" + s.Conf.Port)\n\tif err != nil {\n\t\tlog.Fatal(fmt.Sprintf(\"Can not start server : %s\", err))\n\t}\n\n}",
"func RegisterGin(r *gin.Engine) {\n\tonce.Do(func() { sdkClient(r) })\n}",
"func Start() {\n\te := echo.New()\n\tlogger = e.Logger\n\n\t// custom context\n\te.Use(func(h echo.HandlerFunc) echo.HandlerFunc {\n\t\treturn func(c echo.Context) error {\n\t\t\tcc := &MatchContext{c}\n\t\t\treturn h(cc)\n\t\t}\n\t})\n\n\te.Use(middleware.Recover())\n\te.Logger.SetLevel(func() log.Lvl {\n\t\tif Verbose {\n\t\t\treturn log.DEBUG\n\t\t}\n\t\treturn log.INFO\n\t}())\n\tif l, ok := e.Logger.(*log.Logger); ok {\n\t\tl.SetHeader(\"${time_rfc3339} ${level}\")\n\t}\n\n\tsetupRoutes(e)\n\te.GET(\"/swagger/*\", echoSwagger.WrapHandler)\n\n\te.Logger.Fatal(e.Start(\":8000\"))\n}",
"func App() *buffalo.App {\n\tif app == nil {\n\t\tcorsConfig := cors.New(cors.Options{\n\t\t\tAllowedOrigins: []string{\"*\"},\n\t\t\tAllowedHeaders: []string{\"Origin\", \"Accept\", \"Content-Type\", \"X-Requested-With\", \"Authorization\"},\n\t\t\tExposedHeaders: []string{\"access-token\", \"expiry\", \"token-type\", \"uid\", \"client\", \"latest-version\"},\n\t\t\tAllowedMethods: []string{\"POST\", \"PUT\", \"GET\", \"PATCH\", \"OPTIONS\", \"HEAD\", \"DELETE\"},\n\t\t\tDebug: true,\n\t\t})\n\t\tapp = buffalo.New(buffalo.Options{\n\t\t\tEnv: ENV,\n\t\t\tSessionStore: sessions.Null{},\n\t\t\tPreWares: []buffalo.PreWare{\n\t\t\t\tcorsConfig.Handler,\n\t\t\t},\n\t\t\tSessionName: \"_mnm_sim_session\",\n\t\t})\n\n\t\tg := app.Group(\"/api/v1\")\n\n\t\tapp1 := app.Group(\"/api/v1\")\n\t\t// Automatically redirect to SSL\n\t\tapp1.Use(forceSSL())\n\n\t\t// Log request parameters (filters apply).\n\t\tg.Use(paramlogger.ParameterLogger)\n\t\tapp1.Use(paramlogger.ParameterLogger)\n\n\t\t// Set the request content type to JSON\n\t\tg.Use(contenttype.Set(\"application/json\"))\n\t\tapp1.Use(contenttype.Set(\"application/json\"))\n\n\t\t// Wraps each request in a transaction.\n\t\t// c.Value(\"tx\").(*pop.Connection)\n\t\t// Remove to disable this.\n\t\tg.Use(popmw.Transaction(models.DB))\n\t\tapp1.Use(popmw.Transaction(models.DB))\n\n\t\tapp1.GET(\"/\", HomeHandler)\n\t\tg.GET(\"/swagger/{doc:.*}\", buffaloSwagger.WrapHandler(swaggerFiles.Handler))\n\n\t\tapp1.Use(SetCurrentUser)\n\t\tapp1.Use(Authorize)\n\n\t\tg.GET(\"/users/new\", UsersNew)\n\t\tg.POST(\"/users\", UsersCreate)\n\t\tapp1.GET(\"/signin\", AuthNew)\n\t\tapp1.POST(\"/signin\", AuthCreate)\n\t\tapp1.POST(\"/auth/sign_in\", AuthCreate)\n\t\tapp1.DELETE(\"/signout\", AuthDestroy)\n\t\tapp1.Resource(\"/phases\", PhasesResource{})\n\t\tapp1.Middleware.Skip(Authorize, HomeHandler, UsersNew, UsersCreate, AuthNew, AuthCreate)\n\t\tapp1.Resource(\"/builds\", BuildsResource{})\n\t}\n\n\treturn app\n}",
"func Start() {\n\trouter := gin.Default()\n\trouter.SetFuncMap(map[string]interface{}{\n\t\t\"formatAsTimeAgo\": formatAsTimeAgo,\n\t})\n\trouter.LoadHTMLGlob(\"templates/*\")\n\t// Mount favicon\n\trouter.Use(favicon.New(\"./favicon.ico\"))\n\n\t// Not found\n\trouter.NoRoute(func(c *gin.Context) {\n\t\tc.HTML(404, \"404.html\", gin.H{})\n\t})\n\n\t// Mount controllers\n\tMountIndexController(router)\n\n\trouter.GET(\"/ping\", func(c *gin.Context) {\n\t\tc.JSON(200, gin.H{\n\t\t\t\"message\": \"pong\",\n\t\t})\n\t})\n\n\trouter.Run()\n}",
"func main() {\n\tcmd := cli.New(&cli.CLI{\n\t\tName: \"Starshipyard\",\n\t\tVersion: cli.Version{Major: 0, Minor: 1, Patch: 1},\n\t\tDescription: \"A command-line tool for controling the starshipyard server, scaffolding boilerplate code, and executing developer defined commands\",\n\t\tGlobalFlags: []cli.Flag{\n\t\t\tcli.Flag{\n\t\t\t\tName: \"env\",\n\t\t\t\tAlias: \"e\",\n\t\t\t\tDefault: \"development\",\n\t\t\t\tDescription: \"Specify the server environment\",\n\t\t\t},\n\t\t\tcli.Flag{\n\t\t\t\tName: \"address\",\n\t\t\t\tAlias: \"a\",\n\t\t\t\tDefault: \"0.0.0.0\",\n\t\t\t\tDescription: \"Specify the address for the HTTP server to listen\",\n\t\t\t},\n\t\t\tcli.Flag{\n\t\t\t\tName: \"port\",\n\t\t\t\tAlias: \"p\",\n\t\t\t\tDefault: \"3000\",\n\t\t\t\tDescription: \"Specify the listening port for the HTTP server\",\n\t\t\t},\n\t\t},\n\t\tCommands: []cli.Command{\n\t\t\t{\n\t\t\t\tName: \"server\",\n\t\t\t\tAlias: \"s\",\n\t\t\t\tDescription: \"Options for controlling starshipyard HTTP server\",\n\t\t\t\tSubcommands: []cli.Command{\n\t\t\t\t\t{\n\t\t\t\t\t\tName: \"start\",\n\t\t\t\t\t\tDescription: \"Start the starship yard http server\",\n\t\t\t\t\t\tFlags: []cli.Flag{\n\t\t\t\t\t\t\tcli.Flag{\n\t\t\t\t\t\t\t\tName: \"daemonize\",\n\t\t\t\t\t\t\t\tAlias: \"d\",\n\t\t\t\t\t\t\t\tDescription: \"Daemonize the http server\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAction: func(c *cli.Context) error {\n\t\t\t\t\t\t\tfmt.Println(\"Starting the starship yard http server...\")\n\t\t\t\t\t\t\tconfig, err := framework.LoadConfig(\"config/app.yaml\")\n\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\tfmt.Println(\"[starship] missing 'config/app.yaml' starship app configuration\")\n\t\t\t\t\t\t\t\tconfig = framework.DefaultConfig()\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tconfig.Address = c.Flag(\"address\").String()\n\t\t\t\t\t\t\tconfig.Port = c.Flag(\"port\").Int()\n\n\t\t\t\t\t\t\ts := framework.Init(config)\n\n\t\t\t\t\t\t\tdaemonize := c.Flag(\"daemonize\").Bool()\n\t\t\t\t\t\t\tif daemonize {\n\t\t\t\t\t\t\t\tfmt.Println(\"[starship] launching in daemon mode...\")\n\t\t\t\t\t\t\t\tfmt.Println(\"[starship] not currently implemented, work on this functionality is in progress\")\n\t\t\t\t\t\t\t\t//s.StartAsDaemon()\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tfmt.Println(\"[starship] launching with terminal attached to server\")\n\t\t\t\t\t\t\t\ts.Start()\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\treturn nil\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tName: \"generate\",\n\t\t\t\tAlias: \"g\",\n\t\t\t\tDescription: \"Generate new go source code for models, controllers, and views\",\n\t\t\t\tSubcommands: []cli.Command{\n\t\t\t\t\t{\n\t\t\t\t\t\tName: \"model\",\n\t\t\t\t\t\tDescription: \"Build a model template with the specified object data\",\n\t\t\t\t\t\tAction: func(c *cli.Context) error {\n\t\t\t\t\t\t\tfmt.Println(\"[starship] code generation functionality is not implemented yet\")\n\t\t\t\t\t\t\tfmt.Println(\"[starship] test code has been built, it just needs to be migrated into the base and will be available shortly\")\n\t\t\t\t\t\t\treturn nil\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tName: \"controller\",\n\t\t\t\t\t\tDescription: \"Build a controller template with the specified object data\",\n\t\t\t\t\t\tAction: func(c *cli.Context) error {\n\t\t\t\t\t\t\tfmt.Println(\"[starship] code generation functionality is not implemented yet\")\n\t\t\t\t\t\t\tfmt.Println(\"[starship] test code has been built, it just needs to be migrated into the base and will be available shortly\")\n\t\t\t\t\t\t\treturn nil\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tName: \"view\",\n\t\t\t\t\t\tDescription: \"Build a view template with the specified object data\",\n\t\t\t\t\t\tAction: func(c *cli.Context) error {\n\t\t\t\t\t\t\tfmt.Println(\"[starship] code generation functionality is not implemented yet\")\n\t\t\t\t\t\t\tfmt.Println(\"[starship] test code has been built, it just needs to be migrated into the base and will be available shortly\")\n\t\t\t\t\t\t\treturn nil\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tName: \"job\",\n\t\t\t\t\t\tDescription: \"Build a job template with the specified object data\",\n\t\t\t\t\t\tAction: func(c *cli.Context) error {\n\t\t\t\t\t\t\tfmt.Println(\"[starship] code generation functionality is not implemented yet\")\n\t\t\t\t\t\t\tfmt.Println(\"[starship] test code has been built, it just needs to be migrated into the base and will be available shortly\")\n\t\t\t\t\t\t\treturn nil\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tName: \"model\",\n\t\t\t\t\t\tDescription: \"Build a model template with the specified object data\",\n\t\t\t\t\t\tAction: func(c *cli.Context) error {\n\t\t\t\t\t\t\tfmt.Println(\"[starship] code generation functionality is not implemented yet\")\n\t\t\t\t\t\t\tfmt.Println(\"[starship] test code has been built, it just needs to be migrated into the base and will be available shortly\")\n\t\t\t\t\t\t\treturn nil\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tName: \"new\",\n\t\t\t\tAlias: \"n\",\n\t\t\t\tDescription: \"Create a new starship project\",\n\t\t\t\tAction: func(c *cli.Context) error {\n\t\t\t\t\tfmt.Println(\"Building a new starship project directory:\")\n\n\t\t\t\t\tfmt.Println(\"[CREATE] README.md\")\n\t\t\t\t\tfmt.Println(\"[CREATE] Taskfile\")\n\t\t\t\t\tfmt.Println(\"[CREATE] Dependencies\")\n\t\t\t\t\tfmt.Println(\"[CREATE] app\")\n\t\t\t\t\tfmt.Println(\"[CREATE] app/assets\")\n\t\t\t\t\tfmt.Println(\"[CREATE] app/assets/stylesheets\")\n\t\t\t\t\tfmt.Println(\"[CREATE] app/controllers\")\n\t\t\t\t\tfmt.Println(\"[CREATE] app/models\")\n\t\t\t\t\tfmt.Println(\"[CREATE] app/views\")\n\t\t\t\t\tfmt.Println(\"[CREATE] bin\")\n\t\t\t\t\tfmt.Println(\"[CREATE] bin/task\")\n\t\t\t\t\tfmt.Println(\"[CREATE] bin/starship\")\n\t\t\t\t\tfmt.Println(\"[CREATE] bin/dep\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/database.yml\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/application.yml\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/environments\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/environments/development.yml\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/environments/production.yml\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/environments/test.yml\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/initializers\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/initializers/mime_types.go\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/initializers/inflections.go\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/initializers/cors.go\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/initializers/cookie_serialization.go\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/initializers/content_security_policy.go\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/initializers/backtrace_silencers.go\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/initializers/assets.go\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/locales\")\n\t\t\t\t\tfmt.Println(\"[CREATE] config/boot.go\")\n\t\t\t\t\tfmt.Println(\"[CREATE] db\")\n\t\t\t\t\tfmt.Println(\"[CREATE] db/seed.go\")\n\t\t\t\t\tfmt.Println(\"[CREATE] log\")\n\t\t\t\t\tfmt.Println(\"[CREATE] public\")\n\t\t\t\t\tfmt.Println(\"[CREATE] public/404.html\")\n\t\t\t\t\tfmt.Println(\"[CREATE] public/422.html\")\n\t\t\t\t\tfmt.Println(\"[CREATE] public/500.html\")\n\t\t\t\t\tfmt.Println(\"[CREATE] public/apple-touch-icon-precomposed.png\")\n\t\t\t\t\tfmt.Println(\"[CREATE] public/apple-touch-icon.png\")\n\t\t\t\t\tfmt.Println(\"[CREATE] public/favicon.ico\")\n\t\t\t\t\tfmt.Println(\"[CREATE] tmp\")\n\t\t\t\t\tfmt.Println(\"[CREATE] tmp/cache\")\n\t\t\t\t\tfmt.Println(\"[CREATE] tmp/cache/assets\")\n\t\t\t\t\tfmt.Println(\"[CREATE] test\")\n\t\t\t\t\tfmt.Println(\"[CREATE] test/fixtures\")\n\t\t\t\t\tfmt.Println(\"[CREATE] .gitignore\")\n\n\t\t\t\t\treturn nil\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tName: \"console\",\n\t\t\t\tAlias: \"c\",\n\t\t\t\tDescription: \"Start the starship yard console interface\",\n\t\t\t\tAction: func(c *cli.Context) error {\n\t\t\t\t\tfmt.Println(\"[starship][CONSOLE] console interface is not implemented yes\")\n\t\t\t\t\treturn nil\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t})\n\n\t_, err := cmd.Parse(os.Args)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}",
"func main() {\n\t// Create a new empty YARF server\n\ty := yarf.New()\n\n\t// Add route/resource\n\ty.Add(\"/\", new(Hello))\n\n\t// Start server listening on port 8080\n\ty.Start(\":8080\")\n}",
"func main() {\n\tapp.StartApp()\n}",
"func main() {\n\tr := mux.NewRouter()\n\tconf := config.GetConfig()\n\tSPJ := &app.SPJ{}\n\tSPJ.Initialize(conf,r)\n\terr := http.ListenAndServe(\":8080\", r)\n\tif err != nil {\n\t\tlog.Print(\"App is not running\")\n\t}else{\n\t\tlog.Print(\"App is not running\")\n\t}\n}",
"func Load(middleware ...gin.HandlerFunc) http.Handler {\n\te := gin.New()\n\te.Use(gin.Recovery())\n\n\tfs := http.FileServer(http.Dir(\"/fileserver\"))\n\te.GET(\"/static/*filepath\", func(c *gin.Context) {\n\t\tfs.ServeHTTP(c.Writer, c.Request)\n\t})\n\n\te.Use(middleware...)\n\te.Use(session.SetUser())\n\te.Use(token.Refresh)\n\n\te.GET(\"/login\", api.ShowLogin)\n\te.GET(\"/logout\", api.GetLogout)\n\n\tauth := e.Group(\"/authorize\")\n\t{\n\t\tauth.GET(\"\", api.GetLogin)\n\t\tauth.POST(\"\", api.GetLogin)\n\t\tauth.POST(\"/token\", api.GetLoginToken)\n\t}\n\n\tdisconf := e.Group(\"/api/disconf\")\n\t{\n\t\tdisconf.Use(session.MustAdmin())\n\t\tdisconf.POST(\"/push\", api.DisConfPush)\n\t\tdisconf.GET(\"/pull\", api.DisConfPull)\n\t\tdisconf.GET(\"/search\", api.DisConfList)\n\t\tdisconf.DELETE(\"/delete\", api.DisConfDel)\n\t}\n\n\tbuildpack := e.Group(\"/api/buildpack\")\n\t{\n\t\tbuildpack.Use(session.MustAdmin())\n\t\tbuildpack.POST(\"/import\", api.BuildpackImport)\n\t\tbuildpack.GET(\"/search\", api.BuildpackList)\n\t\tbuildpack.DELETE(\"/delete\", api.BuildpackDel)\n\t\tbuildpack.POST(\"/dockerfile/push\", api.BuildpackDockerfilePush)\n\t\tbuildpack.GET(\"/dockerfile/pull\", api.BuildpackDockerfilePull)\n\t\tbuildpack.GET(\"/image/push\", api.BuildpackImagePush)\n\t}\n\treturn e\n}",
"func main() {\n\t// initialize all flags\n\tflag.Parse()\n\n\t// create an instance of defaultMux()\n\tmux := defaultMux()\n\n\t// Build the MapHandler using the mux as the fallback\n\tpathsToUrls := map[string]string{\n\t\t\"/urlshort-godoc\": \"https://godoc.org/github.com/gophercises/urlshort\",\n\t\t\"/yaml-godoc\": \"https://godoc.org/gopkg.in/yaml.v2\",\n\t}\n\tmapHandler := gUS.MapHandler(pathsToUrls, mux)\n\tfmt.Println(\"\\n==== ==== ==== ====\")\n\tfmt.Println(\"Starting the server on :8080\")\n\tlog.Fatal(errors.Wrap(http.ListenAndServe(\":8080\", selectFlagHandler(mapHandler)), \"Failed to start WebServer\"))\n}",
"func init() {\n\n// Run App at 'release' mode in production.\n gin.SetMode(gin.ReleaseMode)\n\n// Starts a new Gin instance with no middle-ware\n router := gin.New()\n v1 := router.Group(\"/v1\")\n v1.GET(\"/orders/:UsrId/list\", OrdersListV1)\n v1.POST(\"/orders/:UsrId/place\", CreateOrderV1)\n v1.PUT(\"/orders/:OrdId/tranxn\", OrderTransactionV1)\n v1.PUT(\"/orders/:OrdId/ordstatus\", ChangeOrderStatusV1)\n v1.DELETE(\"/orders/:OrdId/remove\", DeleteOrderV1)\n\n v1.POST(\"/items/:UsrId\", CreateItemV1)\n v1.GET(\"/items/:UsrId/usrlist\", UserItemsV1)\n v1.PUT(\"/items/:UsrId\", UpdateItemsV1)\n v1.DELETE(\"/items/:ItmId\", DeleteItemV1)\n v1.GET(\"/orditems/:OrdId/ordlist\", OrderItemsV1)\n\n // Handle all requests using net/http\n http.Handle(\"/\", router)\n}",
"func InitApp(engine *gin.Engine, db *sql.DB, loggerTrace *log.LoggerTrace, duration time.Duration) error {\n\tarticleRepository := mysql.NewMysqlArticleRepository(db, loggerTrace)\n\tauthorRepository := mysql2.NewMysqlAuthorRepository(db, loggerTrace)\n\tarticleUsecase := usecase.NewArticleUsecase(articleRepository, authorRepository, duration, loggerTrace)\n\terror2 := http.NewArticleHandler(engine, articleUsecase, loggerTrace)\n\treturn error2\n}",
"func Init() {\n\t// r := router.Get()\n\t// r.GET(\"/publishOffers\", func(ctx *gin.Context) {\n\t// \tctx.JSON(200, offersController.PublishOffers(ctx))\n\t// })\n}",
"func main() {\n\tRun(&LoadgenService{})\n}",
"func (s *APIServer) Start() error {\n\t// Init new app\n\tapp := fiber.New()\n\n\t// App host config\n\thost := s.config.Server.Host + \":\" + s.config.Server.Port\n\n\t// Static files\n\tif s.config.Static.Prefix != \"\" {\n\t\tapp.Static(s.config.Static.Prefix, s.config.Static.Path)\n\t} else {\n\t\tapp.Static(s.config.Static.Path)\n\t}\n\n\t// Middlewares\n\tapp.Use(func(c *fiber.Ctx) {\n\t\t// Log each request\n\t\ts.logger.Info(\n\t\t\t\"fetch URL\",\n\t\t\tzap.String(\"method\", c.Method()),\n\t\t\tzap.String(\"path\", c.Path()),\n\t\t)\n\n\t\t// Go to next middleware\n\t\tc.Next()\n\t})\n\n\t// App routes\n\tapp.Get(\"/\", IndexHandler)\n\n\t// Start server\n\tif err := app.Listen(host); err != nil {\n\t\ts.logger.Info(\n\t\t\t\"error\",\n\t\t\tzap.Error(err),\n\t\t)\n\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func main() {\n\ta := App{}\n\ta.Initialize()\n\ta.Run(\":8000\")\n}",
"func main() {\n\t//json loading\n\tjsonFile, err := ioutil.ReadFile(\"book_tags.json\")\n\tif err != nil {\n\t\tfmt.Print(err)\n\t}\n\terr = json.Unmarshal(jsonFile, &Tags)\n\tif err != nil {\n\t\tfmt.Println(\"error:\", err)\n\t}\n\n\t// api logic\n\trouter := mux.NewRouter()\n\trouter.HandleFunc(\"/\", func(w http.ResponseWriter, r *http.Request) {\n\t\thttp.ServeFile(w, r, r.URL.Path[1:])\n\t})\n\trouter.HandleFunc(\"/tags/{id}\", GetTag).Methods(\"GET\")\n\trouter.HandleFunc(\"/tags-all\", GetAll).Methods(\"GET\")\n\tlog.Fatal(http.ListenAndServe(\":8000\", router))\n}",
"func init() {\n\tnewApp := core.NewSimpleApplication()\n\n\t// Now, enjoy your web!\n\tnewApp.StartApp(wtsMap)\n}",
"func StartServer() {\n\n\tlogger.Log.Info(\"Starting watermark service server\")\n\n\trouter := gin.Default()\n\n\t// No Authorization required\n\trouter.GET(\"/health\", func(c *gin.Context) {\n\t\tc.JSON(200, gin.H{\n\t\t\t\"Status\": \"ok\",\n\t\t})\n\t})\n\n\t// Login Endpoint: For authentication\n\trouter.POST(\"/login\", func(ctx *gin.Context) {\n\t\ttoken := controller.LoginController(ctx)\n\t\tif token != \"\" {\n\t\t\tctx.JSON(http.StatusOK, gin.H{\n\t\t\t\t\"token\": token,\n\t\t\t})\n\t\t} else {\n\t\t\tctx.JSON(http.StatusUnauthorized, \"Unauthorized\")\n\t\t}\n\t})\n\n\t// JWT Authorization required\n\tapiRoutes := router.Group(\"/api\", middlewares.AuthorizeJWT())\n\t{\n\t\tapiRoutes.POST(\"/\", func(c *gin.Context) {\n\t\t\tcontroller.PutWaterMark(c)\n\t\t})\n\t}\n\n\trouter.Run(\":8080\")\n}",
"func main() {\n\tlog.Info(\"Starting up REST interface on port 8000\")\n\n\trouter := mux.NewRouter()\n\t// Instrument the /webhook endpoint for prometheus instrumentation\n\trouter.HandleFunc(\"/webhook\", prometheus.InstrumentHandlerFunc(\"webhook\", SendXrayMessage))\n\trouter.HandleFunc(\"/webhook\", SendXrayMessage).Methods(\"POST\")\n\trouter.Handle(\"/metrics\", promhttp.Handler())\n\tlog.Fatal(http.ListenAndServe(\":8000\", router))\n}",
"func main() {\n\tlog.Info(\"Initialize service...\")\n\n\trouter := httprouter.New()\n\trouter.GET(\"/\", home)\n\n\tlog.Info(\"Service is ready to listen and serve.\")\n\thttp.ListenAndServe(\":8000\", router)\n}",
"func StartAPIServer(rfs *fs.RootFileSystem) {\n\tfilesys = rfs\n\thttp.Handle(\"/\", APIHandler{})\n\tserveString := fmt.Sprintf(\":%d\", config.PORT)\n\tfmt.Printf(\"Serving on %s\\n\", serveString)\n\thttp.ListenAndServe(serveString, nil)\n}",
"func getMain(c *gin.Context) {\n\n\tc.HTML(http.StatusOK, \"index.html\", gin.H{\n\t\t\"module\": biModule,\n\t})\n}",
"func Run() error {\n\tcloseLogger, err := setupLogger()\n\tif err != nil {\n\t\treturn fail.Wrap(err)\n\t}\n\tdefer closeLogger()\n\n\ts := grapiserver.New(\n\t\tgrapiserver.WithGrpcServerUnaryInterceptors(\n\t\t\tgrpc_ctxtags.UnaryServerInterceptor(grpc_ctxtags.WithFieldExtractor(grpc_ctxtags.CodeGenRequestFieldExtractor)),\n\t\t\tgrpc_zap.UnaryServerInterceptor(zap.L()),\n\t\t\tgrpc_zap.PayloadUnaryServerInterceptor(\n\t\t\t\tzap.L(),\n\t\t\t\tfunc(ctx context.Context, fullMethodName string, servingObject interface{}) bool { return true },\n\t\t\t),\n\t\t),\n\t\tgrapiserver.WithGatewayServerMiddlewares(\n\t\t\tgithubEventDispatcher,\n\t\t),\n\t\tgrapiserver.WithServers(\n\t\t\tgithub.NewInstallationEventServiceServer(),\n\t\t),\n\t)\n\treturn s.Serve()\n}",
"func Run(cfg cli.Config) {\n\tstatikFS, _ := fs.New()\n\trouter := mux.NewRouter().StrictSlash(true)\n\troutes := []Route{\n\t\t{\n\t\t\t\"API\",\n\t\t\t\"GET\",\n\t\t\t\"/api/{name}\",\n\t\t\tAPIWrapper(API, cfg),\n\t\t},\n\t}\n\n\tfor _, route := range routes {\n\t\trouter.\n\t\t\tPath(route.Path).\n\t\t\tMethods(route.Method).\n\t\t\tName(route.Name).\n\t\t\tHandler(route.HandlerFunc)\n\t}\n\trouter.PathPrefix(\"/\").Handler(http.FileServer(statikFS))\n\terr := http.ListenAndServe(fmt.Sprintf(\"%s:%d\", cfg.Web.Address, cfg.Web.Port), router)\n\tif err != nil {\n\t\tprintln(err.Error())\n\t}\n}",
"func init() {\n\t// In this example, we will hard code the port. Later the environment\n\t// will dictate.\n\tport = 7718\n\t// Set up the heartbeat ticker.\n\theartbeat = time.NewTicker(60 * time.Second)\n\n\t// Setup the service router.\n\tgin.SetMode(gin.ReleaseMode)\n\trouter = gin.New()\n\n\t// Make sure we are still alive.\n\trouter.GET(\"/stats/:pgm\", GetCircuitStats)\n\n\t// These are the services we will be listening for.\n\trouter.POST(\"/add/:word\", ReceiveWrapper)\n\t// Get the number of heartbeats put out by the application (also in real-time).\n\trouter.GET(\"/beats\", GetHeartbeatCount)\n\t// Make sure we are still alive.\n\trouter.GET(\"/ping\", PingTheAPI)\n}",
"func main() {\n\n\tswaggerSpec, err := loads.Embedded(restapi.SwaggerJSON, restapi.FlatSwaggerJSON)\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\n\tapi := operations.NewExternalTypesDemoAPI(swaggerSpec)\n\tserver := restapi.NewServer(api)\n\tdefer server.Shutdown()\n\n\tparser := flags.NewParser(server, flags.Default)\n\tparser.ShortDescription = \"external types imports: external anonymous types\"\n\tparser.LongDescription = \"This sample specification exercises external types, with both x-go-type in definitions and inlined.\\n\\nIt demonstrates how to use the x-go-type extension to plug external type definitions in the generated code,\\nfor models (e.g. for properties, arrays or maps) or operations.\\n\\nNotice that x-go-type works for schemas and is not supported for simple swagger types,\\nused for response headers and query & path parameters.\\n\"\n\tserver.ConfigureFlags()\n\tfor _, optsGroup := range api.CommandLineOptionsGroups {\n\t\t_, err := parser.AddGroup(optsGroup.ShortDescription, optsGroup.LongDescription, optsGroup.Options)\n\t\tif err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\t}\n\n\tif _, err := parser.Parse(); err != nil {\n\t\tcode := 1\n\t\tif fe, ok := err.(*flags.Error); ok {\n\t\t\tif fe.Type == flags.ErrHelp {\n\t\t\t\tcode = 0\n\t\t\t}\n\t\t}\n\t\tos.Exit(code)\n\t}\n\n\tserver.ConfigureAPI()\n\n\tif err := server.Serve(); err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\n}",
"func Run() (err error) {\n\n\terr = sm.Run()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = as.Run()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Start Swagger API Manager (provider)\n\terr = apiMgr.Start(true, false)\n\tif err != nil {\n\t\tlog.Error(\"Failed to start Swagger API Manager with error: \", err.Error())\n\t\treturn err\n\t}\n\tlog.Info(\"Swagger API Manager started\")\n\n\t// Add module Swagger APIs\n\terr = apiMgr.AddApis()\n\tif err != nil {\n\t\tlog.Error(\"Failed to add Swagger APIs with error: \", err.Error())\n\t\treturn err\n\t}\n\tlog.Info(\"Swagger APIs successfully added\")\n\n\t// Register Message Queue handler\n\thandler := mq.MsgHandler{Handler: msgHandler, UserData: nil}\n\thandlerId, err = mqLocal.RegisterHandler(handler)\n\tif err != nil {\n\t\tlog.Error(\"Failed to register local Msg Queue listener: \", err.Error())\n\t\treturn err\n\t}\n\tlog.Info(\"Registered local Msg Queue listener\")\n\n\t// Initalize metric store\n\tupdateStoreName()\n\n\treturn nil\n}",
"func main() {\n\tprocDir, err := filepath.Abs(filepath.Dir(os.Args[0]))\n\n\tflag.StringVar(&bindAddr, \"a\", \":9090\", \"Address to listen on.\")\n\tflag.StringVar(&bcApiUrl, \"bcapi\", \"http://localhost:8080/beancounter-platform/rest\",\n\t\t\"Base URL of Beancounter Platform API.\")\n\tflag.StringVar(&assetsDir, \"assets\", filepath.Join(procDir, \"static\"),\n\t\t\"Static assets directory.\")\n\tflag.StringVar(&bcApiKey, \"apikey\", \"\",\n\t\t\"BC API key. This will be removed in the near future.\")\n\tflag.Parse()\n\n\tvar bcClient bc.Client\n\tvar api *bo.Api\n\n\tlog.Printf(\"Beancounter API base URL: %s\", bcApiUrl)\n\tbcClient, err = bc.NewClient(bcApiUrl)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tapi, err = bo.NewApi(PATH_API_V1, bcClient, bcApiKey)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tmux := http.DefaultServeMux\n\tmux.Handle(PATH_API_V1, api)\n\n\tif assetsDir != \"\" {\n\t\tlog.Printf(\"Serving static assets from: %s\", assetsDir)\n\t\tfileServer := http.FileServer(http.Dir(assetsDir))\n\t\tmux.Handle(PATH_STATIC, http.StripPrefix(PATH_STATIC, fileServer))\n\t\t// TODO: make a favicon\n\t\tmux.HandleFunc(\"/favicon.ico\", http.NotFound)\n\t\tmux.HandleFunc(\"/\", indexHandler)\n\t}\n\n\tlog.Printf(\"Listening on %s\", bindAddr)\n\terr = http.ListenAndServe(bindAddr, logHandler(mux))\n\tif err != nil {\n\t\tlog.Fatal(\"ListenAndServe: \", err)\n\t}\n}",
"func API(shutdown chan os.Signal, log *log.Logger) http.Handler {\n\n\t// Construct the web.App which holds all routes as well as common Middleware.\n\tapp := web.NewApp(shutdown, log, webcontext.Env_Dev, mid.Logger(log))\n\n\tapp.Handle(\"GET\", \"/swagger/\", saasSwagger.WrapHandler)\n\tapp.Handle(\"GET\", \"/swagger/*\", saasSwagger.WrapHandler)\n\n\t/*\n\t\tOr can use SaasWrapHandler func with configurations.\n\t\turl := saasSwagger.URL(\"http://localhost:1323/swagger/doc.json\") //The url pointing to API definition\n\t\te.GET(\"/swagger/*\", saasSwagger.SaasWrapHandler(url))\n\t*/\n\n\treturn app\n}",
"func main() {\n\t// load environment settings for environment\n\terr := config.ReadConfig(\"config/config.yml\")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\t// Load client *service.Client\n\ts := services.NewClient()\n\n\t// load Usecase with client and services\n\tu := usecase.NewUsecase(s)\n\n\t// load controller using usecases\n\tc := controller.NewController(u)\n\n\t// router using controller\n\trouter.NewRouter(c)\n}",
"func main() {\n\tenv, err := plugins.NewEnvironment()\n\tenv.RespondAndExitIfError(err)\n\n\tvar stats *statistics.DocumentStatistics\n\n\tfor _, model := range env.Request.Models {\n\t\tswitch model.TypeUrl {\n\t\tcase \"openapi.v2.Document\":\n\t\t\tdocumentv2 := &openapiv2.Document{}\n\t\t\terr = proto.Unmarshal(model.Value, documentv2)\n\t\t\tif err == nil {\n\t\t\t\t// Analyze the API document.\n\t\t\t\tstats = statistics.NewDocumentStatistics(env.Request.SourceName, documentv2)\n\t\t\t}\n\t\tcase \"openapi.v3.Document\":\n\t\t\tdocumentv3 := &openapiv3.Document{}\n\t\t\terr = proto.Unmarshal(model.Value, documentv3)\n\t\t\tif err == nil {\n\t\t\t\t// Analyze the API document.\n\t\t\t\tstats = statistics.NewDocumentStatisticsV3(env.Request.SourceName, documentv3)\n\t\t\t}\n\t\t}\n\t}\n\n\tif stats != nil {\n\t\t// Return the analysis results with an appropriate filename.\n\t\t// Results are in files named \"summary.json\" in the same relative\n\t\t// locations as the description source files.\n\t\tfile := &plugins.File{}\n\t\tfile.Name = strings.Replace(stats.Name, path.Base(stats.Name), \"summary.json\", -1)\n\t\tfile.Data, err = json.MarshalIndent(stats, \"\", \" \")\n\t\tfile.Data = append(file.Data, []byte(\"\\n\")...)\n\t\tenv.RespondAndExitIfError(err)\n\t\tenv.Response.Files = append(env.Response.Files, file)\n\t}\n\n\tenv.RespondAndExit()\n}",
"func main() {\n\tws := new(restful.WebService)\n\tws.Route(ws.GET(\"/\").To(home))\n\trestful.Add(ws)\n\tprint(\"open browser on http://localhost:8080/\\n\")\n\tlog.Fatal(http.ListenAndServe(\":8080\", nil))\n}",
"func main() {\n\tConfigurationFilesImplPtr = new(ConfigurationFilesImpl);\n\tArgsInit();\n\tStorageBackendImplPtr = new(ElasticsearchStorageBackendImp);\n\tStorageBackendImplPtr.init(ConfigurationFilesImplPtr);\n\tRestApiImplPtr = new(CyberBullyingEntryPointRestApiImpl);\n\tRestApiImplPtr.init(ConfigurationFilesImplPtr, StorageBackendImplPtr);\n\tRestServerImplPtr = new(RestServer);\n\tRestServerImplPtr.init(ConfigurationFilesImplPtr, RestApiImplPtr.GetApi());\n Run(); \n}",
"func init() {\n\tfmt.Printf(\"short url generator has version %s built from %s on %s\\n\", app.Version, app.Commit, app.BuildTime)\n\tprepareConfig()\n\t// logging is initialized\n\tctx := cu.BuildContext(context.Background(), cu.SetContextOperation(\"00.init\"), errs.SetDefaultErrsSeverity(errs.SeverityCritical))\n\tlogging.Msg(ctx).Infof(\"pid:%d user:%d(%d) group:%d(%d)\", os.Getpid(), os.Getuid(), os.Geteuid(), os.Getgid(), os.Getegid())\n\tif appCfg.Server == nil {\n\t\tlogging.LogError(ctx, errs.KindServer, \"no server config\")\n\t\tlog.Exit(1)\n\t}\n\tif appCfg.Router == nil {\n\t\tlogging.LogError(ctx, errs.KindRouter, \"no router config\")\n\t\tlog.Exit(1)\n\t}\n\tvar err error\n\tappCfg.Router.WebPath, err = fsutils.ResolvePath(appCfg.Router.WebPath, usr)\n\tif err != nil {\n\t\tlogging.LogError(ctx, errs.KindRouter, \"invalid router web path\")\n\t\tlog.Exit(1)\n\t}\n\tif currConfigSaveToPath != \"\" {\n\t\tif currConfigSaveToPath, err = fsutils.SafeParentResolvePath(currConfigSaveToPath, usr, 0700); err != nil {\n\t\t\tlogging.LogError(ctx, errs.KindInvalidValue, fmt.Errorf(\"invalid path [%s] to save config: %w\", currConfigSaveToPath, err))\n\t\t\tlog.Exit(1)\n\t\t}\n\t}\n\tvar tokenizer app.Tokenizer\n\tif appCfg.Tokenizer != nil && appCfg.Tokenizer.Hashid != nil {\n\t\ttokenizer, err = hashid_tokenizer.NewHashidTokenizer(appCfg.Tokenizer.Hashid)\n\t\tif err != nil {\n\t\t\tlogging.LogError(ctx, errs.KindTokenizer, fmt.Errorf(\"init tokenizer failed: %w\", err))\n\t\t\tlog.Exit(1)\n\t\t}\n\t} else {\n\t\tlogging.LogError(ctx, errs.KindTokenizer, \"no tokenizer config\")\n\t\tlog.Exit(1)\n\t}\n\tvar store app.LinkStore\n\tif appCfg.Store != nil { // todo: add store loader\n\t\tif appCfg.Store.Bolt != nil {\n\t\t\tif appCfg.Store.Bolt.FilePath, err = fsutils.SafeParentResolvePath(appCfg.Store.Bolt.FilePath, usr, 0700); err == nil {\n\t\t\t\tstore, err = bolt_store.NewBoltLinkStore(ctx, *appCfg.Store.Bolt)\n\t\t\t}\n\t\t\tif err != nil {\n\t\t\t\tlogging.LogError(ctx, errs.KindStore, fmt.Errorf(\"init bolt store failed: %w\", err))\n\t\t\t\tlog.Exit(1)\n\t\t\t}\n\t\t} else if appCfg.Store.Mem != nil {\n\t\t\tif appCfg.Store.Mem.FilePath != \"\" {\n\t\t\t\tif appCfg.Store.Mem.FilePath, err = fsutils.SafeParentResolvePath(appCfg.Store.Mem.FilePath, usr, 0700); err != nil {\n\t\t\t\t\tlogging.LogError(ctx, errs.KindStore, fmt.Errorf(\"init mem store failed: %w\", err))\n\t\t\t\t\tlog.Exit(1)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif store, err = mem_store.NewMemStore(ctx, *appCfg.Store.Mem); err != nil {\n\t\t\t\tlogging.LogError(ctx, errs.KindStore, fmt.Errorf(\"init mem store failed: %w\", err))\n\t\t\t\tlog.Exit(1)\n\t\t\t}\n\t\t}\n\t} else {\n\t\tlogging.LogError(ctx, errs.KindStore, \"no store config\")\n\t\tlog.Exit(1)\n\t}\n\tif currConfigSaveToPath != \"\" {\n\t\tif err = viper.WriteConfigAs(currConfigSaveToPath); err != nil {\n\t\t\tlogging.LogError(ctx, errs.KindIO, fmt.Errorf(\"saving config to file [%s] failed: %w\", currConfigSaveToPath, err))\n\t\t\t// no exit (or close store)\n\t\t}\n\t}\n\n\ta = app.NewApp(store, tokenizer)\n}",
"func main() {\n\tapp := cli.NewApp()\n\tapp.Name = \"logging-k8s-controller\"\n\tapp.Version = VERSION\n\tapp.Usage = \"You need help!\"\n\tapp.Action = startServer\n\tapp.Flags = []cli.Flag{\n\t\tcli.BoolFlag{\n\t\t\tName: \"debug\",\n\t\t\tUsage: fmt.Sprintf(\n\t\t\t\t\"Set true to get debug logs\",\n\t\t\t),\n\t\t},\n\t\tcli.StringFlag{\n\t\t\tName: \"listen\",\n\t\t\tValue: \":8090\",\n\t\t\tUsage: fmt.Sprintf(\n\t\t\t\t\"Address to listen to (TCP)\",\n\t\t\t),\n\t\t},\n\t\tcli.StringFlag{\n\t\t\tName: \"k8s-config-path\",\n\t\t\tUsage: \"k8s config path\",\n\t\t},\n\t}\n\tapp.Run(os.Args)\n}"
] | [
"0.65022033",
"0.64835835",
"0.6172741",
"0.61727226",
"0.6147869",
"0.61119086",
"0.6090787",
"0.6024575",
"0.6022573",
"0.6004785",
"0.5878896",
"0.58692914",
"0.58187157",
"0.57975346",
"0.57680064",
"0.57562286",
"0.5727846",
"0.5694507",
"0.5693325",
"0.56817377",
"0.5650376",
"0.56501627",
"0.5646108",
"0.55865866",
"0.55568004",
"0.5544059",
"0.5541259",
"0.5540429",
"0.5524111",
"0.55108625",
"0.5502324",
"0.5494768",
"0.54924893",
"0.545546",
"0.545007",
"0.54247123",
"0.5420528",
"0.5411441",
"0.5411356",
"0.54080826",
"0.54076236",
"0.5398851",
"0.53900266",
"0.5385027",
"0.5349377",
"0.5344767",
"0.5334324",
"0.5302656",
"0.5284087",
"0.5277973",
"0.5277467",
"0.5259455",
"0.52568156",
"0.52514166",
"0.5247575",
"0.5241389",
"0.52342224",
"0.5233718",
"0.521399",
"0.51801795",
"0.5172657",
"0.5167269",
"0.5165817",
"0.51482433",
"0.51475936",
"0.5146328",
"0.5139179",
"0.51341873",
"0.5131406",
"0.5126829",
"0.51042086",
"0.5098101",
"0.5096342",
"0.5093511",
"0.5077725",
"0.5076253",
"0.5059296",
"0.5056479",
"0.5043914",
"0.5043277",
"0.50279224",
"0.5024104",
"0.5023546",
"0.5022461",
"0.50205725",
"0.5016784",
"0.5015546",
"0.50062686",
"0.49966186",
"0.49923542",
"0.49880373",
"0.49876705",
"0.49861178",
"0.49784175",
"0.49642769",
"0.49636185",
"0.4962717",
"0.49612993",
"0.49603677",
"0.49584097"
] | 0.5927891 | 10 |
ExpectEqual is the helper function for test each case | func ExpectEqual(alert func(format string, args ...interface{}),
expected interface{}, actual interface{}) bool {
expectedValue, actualValue := reflect.ValueOf(expected), reflect.ValueOf(actual)
equal := false
switch {
case expected == nil && actual == nil:
return true
case expected != nil && actual == nil:
equal = expectedValue.IsNil()
case expected == nil && actual != nil:
equal = actualValue.IsNil()
default:
if actualType := reflect.TypeOf(actual); actualType != nil {
if expectedValue.IsValid() && expectedValue.Type().ConvertibleTo(actualType) {
equal = reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), actual)
}
}
}
if !equal {
_, file, line, _ := runtime.Caller(1)
alert("%s:%d: missmatch, expect %v but %v", file, line, expected, actual)
return false
}
return true
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func expectEqual(actual interface{}, extra interface{}, explain ...interface{}) {\n\tgomega.ExpectWithOffset(1, actual).To(gomega.Equal(extra), explain...)\n}",
"func expectEqual(value, expected interface{}) {\n\tif value != expected {\n\t\tfmt.Printf(\"Fehler: %v bekommen, erwartet war aber %v.\\n\", value, expected)\n\t} else {\n\t\tfmt.Printf(\"OK: %v bekommen, erwartet war aber %v.\\n\", value, expected)\n\t}\n}",
"func ExpectEqual(actual interface{}, extra interface{}, explain ...interface{}) {\n\tgomega.ExpectWithOffset(1, actual).To(gomega.Equal(extra), explain...)\n}",
"func ExpectEqual(actual interface{}, extra interface{}, explain ...interface{}) {\n\tgomega.ExpectWithOffset(1, actual).To(gomega.Equal(extra), explain...)\n}",
"func (t *T) ExpectEqual(a, b interface{}) {\n\tif !reflect.DeepEqual(a, b) {\n\t\tlog.Printf(\"Expectation failed:\\n\\t%#v\\n\\ndoes not equal\\n\\n\\t%#v\", a, b)\n\t\tt.Failed = true\n\t}\n}",
"func assertEquals(t *testing.T, actual, want interface{}) {\n\tt.Helper()\n\n\tif !reflect.DeepEqual(actual, want) {\n\t\t// t.Helper()を呼ばないと↓のファイル名、行番号が出力されてしまう\n\t\tt.Errorf(\"not equals; actual:%v, want:%v\", actual, want)\n\t}\n}",
"func expectSame(t *T, expected, actual interface{}) bool {\n\tif !reflect.DeepEqual(expected, actual) {\n\t\texpectf(t, expected, actual)\n\t\treturn false\n\t}\n\n\treturn true\n}",
"func assertEquals(t *testing.T, res, std interface{}) {\n\tt.Helper()\n\tif !reflect.DeepEqual(res, std) {\n\t\tlogUnexpected(t, std, res)\n\t}\n}",
"func requireEqual(expected interface{}) require.ValueAssertionFunc {\n\treturn func(t require.TestingT, actual interface{}, msgAndArgs ...interface{}) {\n\t\trequire.Equal(t, expected, actual, msgAndArgs...)\n\t}\n}",
"func Expect(t *testing.T, v, m interface{}) {\n\tvt, vok := v.(Equaler)\n\tmt, mok := m.(Equaler)\n\n\tvar state bool\n\tif vok && mok {\n\t\tstate = vt.Equal(mt)\n\t} else {\n\t\tstate = reflect.DeepEqual(v, m)\n\t}\n\n\tif state {\n\t\tflux.FatalFailed(t, \"Value %+v and %+v are not a match\", v, m)\n\t\treturn\n\t}\n\tflux.LogPassed(t, \"Value %+v and %+v are a match\", v, m)\n}",
"func assertEqual(t *testing.T, expected, actual interface{}) bool {\n\n\tif assert.ObjectsAreEqual(expected, actual) {\n\t\treturn true\n\t}\n\n\tmessage := fmt.Sprintf(\n\t\t\"Not equal: \\nexpected: %s\\nactual : %s\",\n\t\texpected,\n\t\tactual,\n\t)\n\n\treturn assert.Fail(t, message)\n}",
"func assertEqual(t *testing.T, expected, actual interface{}) bool {\n\n\tif assert.ObjectsAreEqual(expected, actual) {\n\t\treturn true\n\t}\n\n\tmessage := fmt.Sprintf(\n\t\t\"Not equal: \\nexpected: %s\\nactual : %s\",\n\t\texpected,\n\t\tactual,\n\t)\n\n\treturn assert.Fail(t, message)\n}",
"func equal(t *testing.T, expected, actual interface{}) {\n\tif !reflect.DeepEqual(expected, actual) {\n\t\tt.Errorf(\"Expected %v (type %v) - Got %v (type %v)\", expected, reflect.TypeOf(expected), actual, reflect.TypeOf(actual))\n\t}\n}",
"func Equal(t *testing.T, expected, actual interface{}) {\n\tt.Helper()\n\n\tif expected != actual {\n\t\tt.Errorf(`%s: expected \"%v\" actual \"%v\"`, t.Name(), expected, actual)\n\t}\n}",
"func assertEqual(t *testing.T, expected interface{}, actual interface{}) {\n\tt.Helper()\n\n\tif !reflect.DeepEqual(expected, actual) {\n\t\tt.Errorf(\"Assertion failed:\\nexpected: %#v\\n actual: %#v\", expected, actual)\n\t}\n}",
"func equals(tb testing.TB, got, want interface{}) {\n\ttb.Helper()\n\tif !reflect.DeepEqual(got, want) {\n\t\ttb.Fatalf(\"\\033[31m\\n\\n\\tgot: %#v\\n\\n\\twant: %#v\\033[39m\\n\\n\", got, want)\n\t}\n}",
"func assertBytesEqual(t *testing.T, expected, actual []byte, format string, args ...interface{}) {\n\tmatch := true\n\tmismatchIndex := 0\n\tif len(expected) == len(actual) {\n\t\tfor i := 0; i < len(expected); i++ {\n\t\t\tif expected[i] != actual[i] {\n\t\t\t\tmatch = false\n\t\t\t\tmismatchIndex = i\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t} else {\n\t\tmatch = false\n\t\tt.Errorf(\"Lengths don't match Expected=%d Actual=%d\", len(expected), len(actual))\n\t}\n\tif !match {\n\t\tt.Errorf(\"Mismatch at index %d \", mismatchIndex)\n\t\tt.Errorf(\"\\tActual String = %s\", string(actual))\n\t\tt.Errorf(\"\\tExpected String = %s\", string(expected))\n\t\tt.Errorf(\"\\tActual = %v\", actual)\n\t\tt.Errorf(\"\\tExpected = %v\", expected)\n\t\tt.Errorf(format, args)\n\t}\n}",
"func Equal[T any](t testing.TB, expected, actual T, msgAndArgs ...interface{}) {\n\tif objectsAreEqual(expected, actual) {\n\t\treturn\n\t}\n\tt.Helper()\n\tmsg := formatMsgAndArgs(\"Expected values to be equal:\", msgAndArgs...)\n\tt.Fatalf(\"%s\\n%s\", msg, diff(expected, actual))\n}",
"func TestAssertEqualBytes(t *testing.T) {\n\tdata := []byte{9, 9, 1, 1, 1, 9, 9}\n\tassertBytesEqual(t, data, data, \"Self\")\n\tassertBytesEqual(t, data[1:4], data[1:4], \"Self\")\n\tassertBytesEqual(t, []byte{1, 1}, []byte{1, 1}, \"Simple match\")\n\tassertBytesEqual(t, []byte{1, 2, 3}, []byte{1, 2, 3}, \"content mismatch\")\n\tassertBytesEqual(t, []byte{1, 1, 1}, data[2:5], \"slice match\")\n}",
"func equalFunc(t *testing.T) func(got, want interface{}) {\n\treturn func(a, b interface{}) {\n\t\tt.Helper()\n\t\tif !reflect.DeepEqual(a, b) {\n\t\t\tt.Fatalf(\"mismatch: got %v, wanted %v\", a, b)\n\t\t}\n\t}\n}",
"func (a Assert) Equal(want interface{}, have interface{}) {\n\tequal(a.t, want, have)\n}",
"func Equal(t *testing.T, expected, actual interface{}, message ...string) {\n\tif !compareEquality(expected, actual) {\n\t\tt.Errorf(\"%v\\nExpected \\n\\t[%#v]\\nto be\\n\\t[%#v]\\n%v \", message, actual, expected, callerInfo(2 +callStackAdjust))\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tlog.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func Equal(t *testing.T, expected, result interface{}) {\n\tif !reflect.DeepEqual(result, expected) {\n\t\tt.Errorf(\"should be %v instead of %v\", expected, result)\n\t}\n}",
"func assertEqual(t *testing.T, expected, actual interface{}, name string) {\n\tif expected != actual {\n\t\tt.Fatalf(\"\\nexpected %s: %+v\\nactual %s: %+v\", name, expected, name, actual)\n\t}\n}",
"func assertEqual(t *testing.T, a interface{}, b interface{}) bool {\n\tm := getMessage()\n\tif a != b {\n\t\tt.Errorf(\"Expected to be equal. %v != %v. %v\", a, b, m)\n\t\treturn false\n\t}\n\treturn true\n}",
"func AssertEqual(t *testing.T, msg string, a interface{}, b interface{}) {\n\tif a == b {\n\t\treturn\n\t}\n\t// debug.PrintStack()\n\tt.Errorf(\"%s was incorrect, received %v, expected %v.\", msg, a, b)\n}",
"func assertEq(expected string, actual string, t *testing.T) {\n\tt.Helper()\n\tif actual != expected {\n\t\tt.Error(\"expected: \", expected, \" but got: \", actual)\n\t}\n}",
"func (tc TestCases) expect() {\n\tfmt.Println(cnt)\n\tcnt++\n\tif !reflect.DeepEqual(tc.resp, tc.respExp) {\n\t\ttc.t.Error(fmt.Sprintf(\"\\nRequested: \", tc.req, \"\\nExpected: \", tc.respExp, \"\\nFound: \", tc.resp))\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%str:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.FailNow()\n\t}\n}",
"func checkEqual(t *testing.T, a interface{}, b interface{}, messagePrefix string) {\n\tif a == b {\n\t\treturn\n\t}\n\tmessage := fmt.Sprintf(\"%v != %v\", a, b)\n\tif len(messagePrefix) != 0 {\n\t\tmessage = messagePrefix + \": \" + message\n\t}\n\tt.Error(message)\n}",
"func AssertEqual(t *testing.T, a interface{}, b interface{}) {\n if a != b {\n t.Fatalf(\"%s != %s\", a, b)\n }\n}",
"func Equal(values ...interface{}) (failureMessage string) {\n\tif values[0] != values[1] {\n\t\tfailureMessage = fmt.Sprintf(\"Expected `%v` to equal `%v`\", values[0], values[1])\n\t}\n\treturn\n}",
"func equals(tb testing.TB, exp, act interface{}) {\n\tif !reflect.DeepEqual(exp, act) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texp: %#v\\n\\n\\tgot: %#v\\033[39m\\n\\n\", filepath.Base(file), line, exp, act)\n\t\ttb.Fail()\n\t}\n}",
"func AssertEqual(expected interface{}, result interface{}) {\n\tAssertType(expected, result)\n\tif expected == nil && result == nil {\n\t\treturn\n\t}\n\tswitch result.(type) {\n\tcase string, uint, uint64, int, int64, error, bool:\n\t\tif expected != result {\n\t\t\tpanic(fmt.Sprintf(\"Error: [] Mismatched Values\\nExpected value: %v\\nResult: %v\", expected, result))\n\t\t}\n\tdefault:\n\t\tpanic(\"Error: AssertEqual doesn't handles this type yet.\")\n\t}\n\n}",
"func (s *Suite) Equal(exp, act interface{}, message ...string) bool {\n\ts.setup()\n\tif exp != act {\n\t\tif len(message) > 0 {\n\t\t\treturn s.Status.failWithCustomMsg(message[0], s.callerInfo)\n\t\t}\n\t\treturn s.Status.fail(exp, act, s.callerInfo)\n\t}\n\treturn s.Status.pass()\n}",
"func EqualAssert(v1,v2 int) bool{\n\tif v1 == v2 {\n\t\treturn true\n\t} else {\n\t\tif show == true {\n\t\t\tfmt.Printf(\"Failed! %d and %d are not equal : \\n\",v1,v2)\n\t\t}\n\t\treturn false\n\t}\n}",
"func checkEqual(t *testing.T, expected, effective interface{}) {\n\tif !reflect.DeepEqual(expected, effective) {\n\t\tt.Errorf(\"FAIL:\\n expected: %#v\\neffective: %#v\", expected, effective)\n\t}\n}",
"func StrictExpect(t *testing.T, v, m interface{}) {\n\tvt, vok := v.(Equaler)\n\tmt, mok := m.(Equaler)\n\n\tvar state bool\n\tif vok && mok {\n\t\tstate = vt.Equal(mt)\n\t} else {\n\t\tstate = (v == m)\n\t}\n\n\tif state {\n\t\tflux.FatalFailed(t, \"Value %+v and %+v are not a match\", v, m)\n\t\treturn\n\t}\n\tflux.LogPassed(t, \"Value %+v and %+v are a match\", v, m)\n}",
"func AssertEqual(t *testing.T, a interface{}, b interface{}) {\n\tif a == b {\n\t\treturn\n\t}\n\t// debug.PrintStack()\n\tt.Errorf(\"Received %v (type %v), expected %v (type %v)\", a, reflect.TypeOf(a), b, reflect.TypeOf(b))\n}",
"func (s *HelpersS) TestCheckEqualFailing(c *gocheck.C) {\n log := \"helpers_test.go:[0-9]+ > helpers_test.go:[0-9]+:\\n\" +\n \"\\\\.+ CheckEqual\\\\(obtained, expected\\\\):\\n\" +\n \"\\\\.+ Obtained \\\\(int\\\\): 10\\n\" +\n \"\\\\.+ Expected \\\\(int\\\\): 20\\n\\n\"\n testHelperFailure(c, \"CheckEqual(10, 20)\", false, false, log,\n func() interface{} {\n return c.CheckEqual(10, 20)\n })\n}",
"func (a *Assertions) Equal(expected interface{}, actual interface{}, userMessageComponents ...interface{}) bool {\n\ta.assertion()\n\tif didFail, message := shouldBeEqual(expected, actual); didFail {\n\t\treturn a.fail(message, userMessageComponents...)\n\t}\n\treturn true\n}",
"func equals(t testing.TB, got, exp interface{}) {\n\tif !cmp.Equal(exp, got) {\n\t\tt.Fatalf(\"\\n\\tgot: %#v\\n\\texp: %#v\\n\", got, exp)\n\t}\n}",
"func (t *T) Equal(got, want interface{}) bool {\n\tt.Helper()\n\n\teq := cmp.Equal(got, want)\n\tif eq {\n\t\tt.Logf(\"%s: got %+v\", caller(), got)\n\t\treturn eq\n\t}\n\n\tgotStr, gotStrOK := got.(string)\n\twantStr, wantStrOK := want.(string)\n\tif gotStrOK && wantStrOK {\n\t\tdiff := difflib.UnifiedDiff{\n\t\t\tA: difflib.SplitLines(gotStr),\n\t\t\tB: difflib.SplitLines(wantStr),\n\t\t\tFromFile: \"got\",\n\t\t\tToFile: \"want\",\n\t\t\tContext: 3,\n\t\t}\n\t\ttext, _ := difflib.GetUnifiedDiffString(diff)\n\t\tt.Errorf(\"%s:\\n%s\", caller(), text)\n\t\treturn eq\n\t}\n\tif displayDumpDiff(got, want) {\n\t\tspew := spew.ConfigState{\n\t\t\tIndent: \" \",\n\t\t\tDisableMethods: true,\n\t\t\tDisablePointerAddresses: true,\n\t\t\tDisableCapacities: true,\n\t\t\tSortKeys: true,\n\t\t\tSpewKeys: true,\n\t\t}\n\t\tgotDump := spew.Sdump(got)\n\t\twantDump := spew.Sdump(want)\n\t\tdiff := difflib.UnifiedDiff{\n\t\t\tA: difflib.SplitLines(gotDump),\n\t\t\tB: difflib.SplitLines(wantDump),\n\t\t\tFromFile: \"got\",\n\t\t\tToFile: \"want\",\n\t\t\tContext: 3,\n\t\t}\n\t\ttext, _ := difflib.GetUnifiedDiffString(diff)\n\t\tt.Errorf(\"%s:\\n%s\", caller(), text)\n\t\treturn eq\n\t}\n\n\tt.Errorf(\"%s: got %+v, want %+v\", caller(), got, want)\n\treturn eq\n}",
"func Equal(t Testing, expected, actual interface{}, formatAndArgs ...interface{}) bool {\n\tif !AreEqualObjects(expected, actual) {\n\t\treturn Fail(t,\n\t\t\tfmt.Sprintf(\n\t\t\t\t\"Expected values are NOT equal.%s\",\n\t\t\t\tdiffValues(expected, actual),\n\t\t\t),\n\t\t\tformatAndArgs...)\n\t}\n\n\treturn true\n}",
"func shouldEqual(value, expected string, t *testing.T) {\n\tif value != expected {\n\t\tt.Errorf(\"Value is '%s', expected '%s'\", value, expected)\n\t}\n}",
"func Equal(t testing.TB, expected, actual interface{}, msgAndArgs ...interface{}) bool {\n\tif err := validateEqualArgs(expected, actual); err != nil {\n\t\treturn failTest(t, 1, fmt.Sprintf(\"Equal: invalid operation `%#v` == `%#v` (%v)\", expected, actual, err), msgAndArgs...)\n\t}\n\n\tif !IsObjectEqual(expected, actual) {\n\t\treturn failTest(t, 1, fmt.Sprintf(\"Equal: expected `%#v`, actual `%#v`\", expected, actual), msgAndArgs...)\n\t}\n\n\treturn true\n}",
"func Equal(t TestingT, expected, actual interface{}, extras ...interface{}) bool {\n\tif !DeepEqual(expected, actual) {\n\t\treturn Errorf(t, \"Expect to be equal\", []labeledOutput{\n\t\t\t{\n\t\t\t\tlabel: labelMessages,\n\t\t\t\tcontent: formatExtras(extras...),\n\t\t\t},\n\t\t\t{\n\t\t\t\tlabel: \"Diff\",\n\t\t\t\tcontent: diff(expected, actual),\n\t\t\t},\n\t\t})\n\t}\n\n\treturn true\n}",
"func assertEquals(t testing.TB, expected, actual interface{}) {\n\tif !reflect.DeepEqual(expected, actual) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\tfmt.Printf(\"\\033[31m%s:%d:\\n\\n\\texpected: %#v\\n\\n\\tactual: %#v\\033[39m\\n\\n\", filepath.Base(file), line, expected, actual)\n\t\tt.FailNow()\n\t}\n}",
"func SameAssert(v1,v2 string) bool{\n\tif v1 == v2 {\n\t\treturn true\n\t} else {\n\t\tif show == true {\n\t\t\tfmt.Printf(\"Failed! %s and %s are not same : \\n\",v1,v2)\n\t\t}\n\t\treturn false\n\t}\n}",
"func TestEqual(t *testing.T) {\n\ttables := []struct {\n\t\tx []string\n\t\ty []string\n\t\texpected bool\n\t}{\n\t\t{[]string{}, []string{}, true},\n\t\t{[]string{}, []string{\"\"}, false},\n\t\t{[]string{\"\"}, []string{\"\"}, true},\n\t\t{[]string{\"\"}, []string{\"a\"}, false},\n\t\t{[]string{\"a\"}, []string{\"a\", \"a\"}, false},\n\t\t{[]string{\"b\"}, []string{\"a\"}, false},\n\t\t{[]string{\"\", \"\", \"\"}, []string{\"\", \"\", \"\"}, true},\n\t\t{[]string{\"a\", \"b\", \"c\"}, []string{\"a\", \"b\", \"e\"}, false},\n\t}\n\n\tfor _, table := range tables {\n\t\tresult := Equal(table.x, table.y)\n\t\tif result != table.expected {\n\t\t\tt.Errorf(\"Match failed for (%s, %s). Expected %t, got %t\",\n\t\t\t\ttable.x, table.y, table.expected, result)\n\t\t}\n\t}\n}",
"func AssertEqual(a interface{}, b interface{}) {\n\tif a != b {\n\t\tpanic(\"Assertion Fail!\")\n\t}\n}",
"func AssertIntEqual(t *testing.T, expect, actual, errMsg string) {\n\tif expect != actual {\n\t\tt.Errorf(\"%s, expect:%s, actual:%s\", errMsg, expect, actual)\n\t}\n}",
"func AssertEqual(t *testing.T, actual interface{}, expected interface{}) error {\n\tif !reflect.DeepEqual(expected, actual) {\n\t\tmsg := fmt.Sprintf(\"\\n(%v) Not Equal:\\n\"+\" - Expected: %#v\\n - Received: %#v\\n\", t.Name(), expected, actual)\n\t\tfmt.Println(msg)\n\t\tt.Fail()\n\t\treturn errors.New(msg)\n\t}\n\treturn nil\n}",
"func AssertEquals(expected string, actual string, t *testing.T) {\n\tif expected != actual {\n\t\tt.Errorf(\"\\nE: %s\\nA: %s\", strconv.Quote(expected), strconv.Quote(actual))\n\t}\n}",
"func Equal(t *testing.T, a, b interface{}) {\n\tif a != b && !reflect.DeepEqual(a, b) {\n\t\tt.Errorf(\"%v Not Equal: %v == %v\", line(), a, b)\n\t}\n}",
"func AssertJSONEquals(expected string, given string, t *testing.T) {\n\ta := []byte(expected)\n\tb := []byte(given)\n\tAssertJSONEqualsBytes(a, b, t)\n}",
"func AssertEqualWithDiff(t *testing.T, expected, actual interface{}) {\n\tif !assert.Equal(t, expected, actual) {\n\t\t// the maximum levels of a struct to recurse into\n\t\t// this prevents infinite recursion from circular references\n\t\tdeep.MaxDepth = 100\n\n\t\tdiff := deep.Equal(expected, actual)\n\n\t\tif len(diff) != 0 {\n\t\t\ts := strings.Builder{}\n\n\t\t\tfor i, d := range diff {\n\t\t\t\tif i == 0 {\n\t\t\t\t\ts.WriteString(\"diff : \")\n\t\t\t\t} else {\n\t\t\t\t\ts.WriteString(\" \")\n\t\t\t\t}\n\n\t\t\t\ts.WriteString(d)\n\t\t\t\ts.WriteString(\"\\n\")\n\t\t\t}\n\n\t\t\tt.Errorf(\"Not equal: \\n\"+\n\t\t\t\t\"expected: %s\\n\"+\n\t\t\t\t\"actual : %s\\n\\n\"+\n\t\t\t\t\"%s\", expected, actual, s.String(),\n\t\t\t)\n\t\t}\n\t}\n}",
"func AssertJSONEqualsBytes(expected []byte, given []byte, t *testing.T) {\n\teq, err := jsonBytesEqual(expected, given)\n\n\tif err != nil {\n\t\tt.Fatalf(\"Internal compare failure:%s\", err)\n\t}\n\tif !eq {\n\n\t\tt.Fatalf(\"JSON differs:\\nExpected:\\n-----\\n%s\\n-----\\nGot :\\n-----\\n%s\\n-----\\n\", string(expected), string(given))\n\t}\n}",
"func AssertFilterEqual(t *testing.T, v1, v2 interface{}, ignoreTypes []interface{}) {\n\tt.Helper()\n\n\tdiff := cmp.Diff(v1, v2, cmpopts.IgnoreTypes(ignoreTypes...),\n\t\tcmp.Exporter(func(reflect.Type) bool { return true }))\n\tif diff != \"\" {\n\t\tt.Errorf(\"Not equal:\\n%s\", diff)\n\t}\n}",
"func (s *StorageSuite) TestServersEquality(c *check.C) {\n\tservers := Servers{{\n\t\tAdvertiseIP: \"192.168.1.1\",\n\t\tHostname: \"node-1\",\n\t\tRole: \"worker\",\n\t}}\n\ttestCases := []struct {\n\t\tservers Servers\n\t\tresult bool\n\t\tcomment string\n\t}{\n\t\t{\n\t\t\tservers: Servers{{\n\t\t\t\tAdvertiseIP: \"192.168.1.1\",\n\t\t\t\tHostname: \"node-1\",\n\t\t\t\tRole: \"worker\",\n\t\t\t}},\n\t\t\tresult: true,\n\t\t\tcomment: \"Servers should be equal\",\n\t\t},\n\t\t{\n\t\t\tservers: Servers{\n\t\t\t\t{\n\t\t\t\t\tAdvertiseIP: \"192.168.1.1\",\n\t\t\t\t\tHostname: \"node-1\",\n\t\t\t\t\tRole: \"worker\",\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAdvertiseIP: \"192.168.1.2\",\n\t\t\t\t\tHostname: \"node-2\",\n\t\t\t\t\tRole: \"worker\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: false,\n\t\t\tcomment: \"Servers should not be equal: different number of servers\",\n\t\t},\n\t\t{\n\t\t\tservers: Servers{{\n\t\t\t\tAdvertiseIP: \"192.168.1.2\",\n\t\t\t\tHostname: \"node-1\",\n\t\t\t\tRole: \"worker\",\n\t\t\t}},\n\t\t\tresult: false,\n\t\t\tcomment: \"Servers should not be equal: different IPs\",\n\t\t},\n\t\t{\n\t\t\tservers: Servers{{\n\t\t\t\tAdvertiseIP: \"192.168.1.1\",\n\t\t\t\tHostname: \"node-2\",\n\t\t\t\tRole: \"worker\",\n\t\t\t}},\n\t\t\tresult: false,\n\t\t\tcomment: \"Servers should not be equal: different hostnames\",\n\t\t},\n\t\t{\n\t\t\tservers: Servers{{\n\t\t\t\tAdvertiseIP: \"192.168.1.1\",\n\t\t\t\tHostname: \"node-1\",\n\t\t\t\tRole: \"db\",\n\t\t\t}},\n\t\t\tresult: false,\n\t\t\tcomment: \"Servers should not be equal: different roles\",\n\t\t},\n\t}\n\tfor _, tc := range testCases {\n\t\tc.Assert(servers.IsEqualTo(tc.servers), check.Equals, tc.result,\n\t\t\tcheck.Commentf(tc.comment))\n\t}\n}",
"func assertEQ(a, b interface{}, t *testing.T) {\n\tif a != b {\n\t\tdebug.PrintStack()\n\t\tt.Fatal(\"assertEQ failed: \", a, \"!=\", b, \"\\n\")\n\t}\n}",
"func AssertEqual(t Failable, fieldName string, expected, actual interface{}) {\n\tt.Helper()\n\n\tfail := func() {\n\t\tt.Helper()\n\n\t\tdiff, err := kmp.SafeDiff(expected, actual)\n\t\tif err == nil {\n\t\t\tt.Fatalf(\"%s: expected %s to be equal expected: %#v actual: %#v diff (-expected, +actual): %s\", t.Name(), fieldName, expected, actual, diff)\n\t\t} else {\n\t\t\tt.Fatalf(\"%s: expected %s to be equal expected: %#v actual: %#v\", t.Name(), fieldName, expected, actual)\n\t\t}\n\t}\n\n\tv1, v2 := reflect.ValueOf(expected), reflect.ValueOf(actual)\n\tif v1.Kind() != v2.Kind() {\n\t\tfail()\n\t}\n\n\t// If the type has a length and can be nil, ensure one isn't nil.\n\t// DeepEqual would return a false positive.\n\tswitch v1.Kind() {\n\tcase reflect.Array, reflect.Chan, reflect.Map, reflect.Slice:\n\t\tif v1.Len() == 0 && v2.Len() == 0 {\n\t\t\treturn\n\t\t}\n\t}\n\n\tif !reflect.DeepEqual(expected, actual) {\n\t\tfail()\n\t}\n}",
"func Equalf(t *testing.T, exp, act interface{}, msg string, v ...interface{}) {\n\tif diff := diff(exp, act); diff != \"\" {\n\t\tt.Fatalf(msg+\": %v\", append(v, diff)...)\n\t}\n}",
"func assertEquals(t *testing.T, a, b int) {\n\tif a != b {\n\t\tt.Error(\"Assertion failed.\")\n\t}\n}",
"func (t *T) Equal(have, want interface{}, desc ...string) {\n\tt.EqualWithIgnores(have, want, nil, desc...)\n}",
"func equals(tb testing.TB, want, have interface{}) {\n\tif !reflect.DeepEqual(want, have) {\n\t\t_, file, line, _ := runtime.Caller(1)\n\t\ttb.Errorf(\"%s:%d: want %#v, have %#v\", filepath.Base(file), line, want, have)\n\t}\n}",
"func AssertEqualFiles(t *testing.T, fn0, fn1 string) {\n\tcmp := equalfile.New(nil, equalfile.Options{}) // compare using single mode\n\tok, err := cmp.CompareFile(fn0, fn1)\n\tassert.Nil(t, err)\n\tassert.True(t, ok)\n}",
"func Equal(t t, want interface{}, have interface{}) {\n\tequal(t, want, have)\n}",
"func assertEquals(t *testing.T, fixtureFilename string, actual []byte) {\n\tt.Helper()\n\tfixturePathAbs, err := filepath.Abs(fixturePath(fixtureFilename))\n\tth.AssertNoErr(t, err)\n\tactualPathAbs := fixturePathAbs + \".actual\"\n\terr = os.WriteFile(actualPathAbs, actual, 0o600)\n\tth.AssertNoErr(t, err)\n\n\texpected, err := os.ReadFile(fixturePathAbs)\n\tth.AssertNoErr(t, err)\n\tth.AssertEquals(t, string(expected), string(actual))\n}",
"func ExpectBodyEquals(expected string) HTTPResponseBodyExpectation {\n\treturn HTTPResponseBodyExpectation{\n\t\tExpected: expected,\n\t\tExactMatch: true,\n\t}\n}",
"func IsEqual(t *testing.T, val1, val2 interface{}) bool {\n\tv1 := reflect.ValueOf(val1)\n\tv2 := reflect.ValueOf(val2)\n\n\tif v1.Kind() == reflect.Ptr {\n\t\tv1 = v1.Elem()\n\t}\n\n\tif v2.Kind() == reflect.Ptr {\n\t\tv2 = v2.Elem()\n\t}\n\n\tif !v1.IsValid() && !v2.IsValid() {\n\t\treturn true\n\t}\n\n\tswitch v1.Kind() {\n\tcase reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:\n\t\tif v1.IsNil() {\n\t\t\tv1 = reflect.ValueOf(nil)\n\t\t}\n\t}\n\n\tswitch v2.Kind() {\n\tcase reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:\n\t\tif v2.IsNil() {\n\t\t\tv2 = reflect.ValueOf(nil)\n\t\t}\n\t}\n\n\tv1Underlying := reflect.Zero(reflect.TypeOf(v1)).Interface()\n\tv2Underlying := reflect.Zero(reflect.TypeOf(v2)).Interface()\n\n\tif v1 == v1Underlying {\n\t\tif v2 == v2Underlying {\n\t\t\tgoto CASE4\n\t\t} else {\n\t\t\tgoto CASE3\n\t\t}\n\t} else {\n\t\tif v2 == v2Underlying {\n\t\t\tgoto CASE2\n\t\t} else {\n\t\t\tgoto CASE1\n\t\t}\n\t}\n\nCASE1:\n\t// fmt.Println(\"CASE 1\")\n\treturn reflect.DeepEqual(v1.Interface(), v2.Interface())\nCASE2:\n\t// fmt.Println(\"CASE 2\")\n\treturn reflect.DeepEqual(v1.Interface(), v2)\nCASE3:\n\t// fmt.Println(\"CASE 3\")\n\treturn reflect.DeepEqual(v1, v2.Interface())\nCASE4:\n\t// fmt.Println(\"CASE 4\")\n\treturn reflect.DeepEqual(v1, v2)\n}",
"func AssertEquals(expected string, given string, t *testing.T) {\n\tif expected != given {\n\n\t\tt.Fatalf(\"Strings differ:\\nExpected:\\n-----\\n%s\\n-----\\nGot :\\n-----\\n%s\\n-----\\n\", expected, given)\n\t}\n}",
"func (mmEquals *mDigestHolderMockEquals) Expect(other DigestHolder) *mDigestHolderMockEquals {\n\tif mmEquals.mock.funcEquals != nil {\n\t\tmmEquals.mock.t.Fatalf(\"DigestHolderMock.Equals mock is already set by Set\")\n\t}\n\n\tif mmEquals.defaultExpectation == nil {\n\t\tmmEquals.defaultExpectation = &DigestHolderMockEqualsExpectation{}\n\t}\n\n\tmmEquals.defaultExpectation.params = &DigestHolderMockEqualsParams{other}\n\tfor _, e := range mmEquals.expectations {\n\t\tif minimock.Equal(e.params, mmEquals.defaultExpectation.params) {\n\t\t\tmmEquals.mock.t.Fatalf(\"Expectation set by When has same params: %#v\", *mmEquals.defaultExpectation.params)\n\t\t}\n\t}\n\n\treturn mmEquals\n}",
"func Equals(tb testing.TB, expected, actual interface{}) {\n\ttb.Helper()\n\tif !reflect.DeepEqual(expected, actual) {\n\t\ttb.Fatalf(\"expected: %#[1]v (%[1]T) but got: %#[2]v (%[2]T)\\n\", expected, actual)\n\t}\n}",
"func Equal[T comparable](val, want T, a ...any) {\n\tif want != val {\n\t\tdefMsg := fmt.Sprintf(assertionMsg+\": got '%v', want '%v'\", val, want)\n\t\tDefault().reportAssertionFault(defMsg, a...)\n\t}\n}",
"func assertEquals(t *testing.T, expected, result int) {\n\tif expected != result {\n\t\tt.Errorf(\"Expected %d but got %d\", expected, result)\n\t}\n}",
"func assert_equal(value, expected int) {\n\tif value != expected {\n\t\tpanic(fmt.Sprintf(\"expected %v, got %v\\n\", expected, value))\n\t}\n}",
"func actEqualsExp(t *testing.T, id, gfName string, actVal, expVal []byte) bool {\n\tt.Helper()\n\n\tif bytes.Equal(actVal, expVal) {\n\t\treturn true\n\t}\n\n\tt.Log(id)\n\tt.Log(\"\\t: Expected\\n\" + string(expVal))\n\tt.Log(\"\\t: Actual\\n\" + string(actVal))\n\tt.Errorf(\"\\t: The value given differs from the golden file value: %q\",\n\t\tgfName)\n\treturn false\n}",
"func (m *MockValues) Bytes() []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Bytes\")\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}",
"func CheckEqual(v1 interface{}, v2 interface{}) {\n\tif !deepComparion(v1, v2) {\n\t\tif currentT.o != nil {\n\t\t\tcurrentT.o.Errorf(\"CheckEqual failed: expected v1 == v2, but v1 != v2; v1 = %v, v2 = %v\",\n\t\t\t\tv1, v2)\n\t\t} else {\n\t\t\tfmt.Println(fmt.Errorf(\"The global testing object is nil! \" +\n\t\t\t\t\"Use SetT(*testing.T) function in your test function before all the tests.\"))\n\t\t}\n\t}\n}",
"func eq(t *testing.T, s, exp string) {\n\tt.Helper()\n\tif s != exp {\n\t\tt.Errorf(\"got %q, expected %q\", s, exp)\n\t}\n}",
"func assertEquals(expected string, actual string, t *testing.T) {\n\tif expected != actual {\n\t\tt.Fatalf(\"Expected: %s Actual: %s\", expected, actual)\n\t}\n}",
"func Equals(tb testing.TB, exp, act interface{}) {\n\ttb.Helper()\n\tif !reflect.DeepEqual(exp, act) {\n\t\ttb.Fatalf(\"\\nexp:\\t%[1]v (%[1]T)\\ngot:\\t%[2]v (%[2]T)\", exp, act)\n\t}\n}",
"func eq(args ...interface{}) bool {\n\tif len(args) == 0 {\n\t\treturn false\n\t}\n\tx := args[0]\n\tswitch x := x.(type) {\n\tcase string, int, int64, byte, float32, float64:\n\t\tfor _, y := range args[1:] {\n\t\t\tif x == y {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t\treturn false\n\t}\n\tfor _, y := range args[1:] {\n\t\tif reflect.DeepEqual(x, y) {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}"
] | [
"0.73963916",
"0.7357671",
"0.72004604",
"0.72004604",
"0.68299437",
"0.6658801",
"0.6617247",
"0.6568662",
"0.65523875",
"0.64350367",
"0.63778836",
"0.63778836",
"0.6365881",
"0.63602674",
"0.632675",
"0.63203734",
"0.6273329",
"0.6267964",
"0.6252295",
"0.6248969",
"0.6248666",
"0.62359023",
"0.62246543",
"0.6208942",
"0.62072265",
"0.6205351",
"0.61987036",
"0.6197023",
"0.6191756",
"0.61877775",
"0.61877775",
"0.61877775",
"0.61877775",
"0.61877775",
"0.61877775",
"0.61877775",
"0.61877775",
"0.61877775",
"0.61877775",
"0.61877775",
"0.61877775",
"0.61877775",
"0.61876976",
"0.61795986",
"0.6168041",
"0.61675906",
"0.61610746",
"0.61291",
"0.6117953",
"0.6090188",
"0.60865754",
"0.60670036",
"0.606547",
"0.6047694",
"0.6046454",
"0.60400057",
"0.6029338",
"0.6017417",
"0.60145575",
"0.59948075",
"0.5990289",
"0.5985215",
"0.59843355",
"0.59511566",
"0.5944786",
"0.59420866",
"0.59286666",
"0.5902044",
"0.59019774",
"0.5901364",
"0.5898625",
"0.5875551",
"0.5858377",
"0.5855027",
"0.58459705",
"0.5834934",
"0.5796385",
"0.57749796",
"0.5773854",
"0.57659554",
"0.5756125",
"0.5753775",
"0.57476604",
"0.5731589",
"0.57260215",
"0.5722968",
"0.5706832",
"0.5705105",
"0.57010025",
"0.5692924",
"0.5682962",
"0.56743205",
"0.56710505",
"0.5662603",
"0.5661329",
"0.5654962",
"0.561842",
"0.5613585"
] | 0.7185996 | 6 |
HELPERS Gets the attacker chance after accounting for modifiers | func (c *Conflict) GetModAttackerChance() int32 {
// TODO: Return modified attacker chance
return c.BaseChance()
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func doesHit(attacker *pet, defender pet) (bool){\n\tchanceToHit := float64(attacker.EffectiveACC) - defender.EffectiveEVA\n\t\n\tif float64(rand.Intn(100)) < chanceToHit {\n\t\treturn true\n\t}\n\t\n\t//fmt.Println(attacker.PetUser.Username, \" miss!\")\n\tattacker.MissCount++\n\treturn false\n}",
"func (p *Player) MobAttack(e *Enemy, l *Log){\n fmt.Printf(\"The %s is attempting to attack\", e.Name)\n TypedText(\"...\", 300)\n roll := RandomNumber(20)\n if roll > p.Armor {\n damage := RandomNumber(e.Attack)\n p.Health -= damage\n fmt.Printf(\"The attack lands and %s takes %d damage! ♥:%d \\n\", p.Name, damage, p.Health)\n\t} else {\n fmt.Printf(\"%s managed to defend the attack!\\n\", p.Name)\n }\n}",
"func getDamage(attacker *pet, defender pet) (float64) {\n\tif doesCrit(attacker) {\n\t\treturn 2*(attacker.EffectiveATK * (100.00/(defender.EffectiveDEF + 100.00)))\n\t}\n\t\n\treturn attacker.EffectiveATK * (100.00/(defender.EffectiveDEF + 100.00))\n}",
"func weapon(char core.Character, c *core.Core, r int, param map[string]int) {\n\n\texpiry := 0\n\tper := 0.03 + 0.01*float64(r)\n\tstacks := 0\n\ticd := 0\n\n\tc.Events.Subscribe(core.OnDamage, func(args ...interface{}) bool {\n\n\t\tds := args[1].(*core.Snapshot)\n\n\t\tif ds.ActorIndex != char.CharIndex() {\n\t\t\treturn false\n\t\t}\n\t\tif ds.AttackTag != core.AttackTagNormal && ds.AttackTag != core.AttackTagExtra {\n\t\t\treturn false\n\t\t}\n\t\tif icd > c.F {\n\t\t\treturn false\n\t\t}\n\t\ticd = c.F + 18\n\t\tif expiry < c.F {\n\t\t\tstacks = 0\n\t\t}\n\t\tstacks++\n\t\tif stacks > 4 {\n\t\t\tstacks = 4\n\t\t}\n\t\texpiry = c.F + 360\n\t\treturn false\n\t}, fmt.Sprintf(\"prototype-rancour-%v\", char.Name()))\n\n\tval := make([]float64, core.EndStatType)\n\tchar.AddMod(core.CharStatMod{\n\t\tKey: \"prototype\",\n\t\tExpiry: -1,\n\t\tAmount: func(a core.AttackTag) ([]float64, bool) {\n\t\t\tif expiry < c.F {\n\t\t\t\tstacks = 0\n\t\t\t\treturn nil, false\n\t\t\t}\n\t\t\tval[core.ATKP] = per * float64(stacks)\n\t\t\tval[core.DEFP] = per * float64(stacks)\n\t\t\treturn val, true\n\t\t},\n\t})\n\n}",
"func (g Gun)calculateDamage(bodyPart BodyArmor,attacker *Character, defender *Character)float64{\n\tcalibreIdx := getCalibreIdxFromCalibre(g.Calibre)\n\tdmgModifier := BODY_DAMAGE_MODIFIERS[bodyPart]\n\tdistance := calculateDistance(attacker.Location,defender.Location,attacker.isIndoor())\n\tdistanceModifier := math.Pow(CALIBRE_DAMAGE_FALLOFF[calibreIdx],distance/BULLET_DROPOFF_DISTANCE)\n\tbaseDamage := g.MaxDamage\n\tarmorDurability := defender.Armor[bodyPart].Durability/100\n\tbulletproofModifier := (100-defender.Armor[bodyPart].Bulletproof*(armorDurability))/100\n\tbulletAppropriateModifier := math.Abs(g.LoadedMagazine.ArmorPiercing-bulletproofModifier*100)/100\n\tbulletproofModifier = bulletproofModifier+bulletAppropriateModifier*BULLETPROOF_APPROPRIATE_MODIFIER\n\tdamage := baseDamage*dmgModifier*distanceModifier*bulletproofModifier\n\tdamage = pickRandomVariedAround(damage,DAMAGE_RANDOMNESS)\n\tif damage<0{\n\t\tdamage = 0\n\t}\n\tif LOG_MODE>=1{\n\t\tfmt.Printf(\"%s did %f damage to %s\\n\",attacker.Name,damage,defender.Name)\n\t}\n\tif LOG_MODE==DEBUG{\n\t\tfmt.Printf(\"Body part damage modifier: %f\\n\",dmgModifier)\n\t\tfmt.Printf(\"Distance: %f\\n\",distance)\n\t\tfmt.Printf(\"Distance modifier: %f\\n\",distanceModifier)\n\t\tfmt.Printf(\"Base damage: %f\\n\",baseDamage)\n\t\tfmt.Printf(\"Armor durability: %f\\n\",armorDurability)\n\t\tfmt.Printf(\"Bulletproof modifier: %f\\n\",bulletproofModifier)\n\t\tfmt.Printf(\"Bullet appropriate modifier: %f\\n\",bulletAppropriateModifier)\n\t}\n\n\treturn damage\n}",
"func DoDamage(a *Agent, d *Agent, odds int) string {\n\n\tvar damage int\n\tvar textOut string\n\n\t// Damage and max damage if it's a critical\n\tif Roll(1, 100) > a.TotalCritical() {\n\t\tdamage = Roll(2, a.TotalDamage())\n\t} else {\n\t\tdamage = a.TotalDamage()\n\t\ttextOut = fmt.Sprintf(CyanU(\"Critical\") + \" \")\n\t}\n\n\t// If damage is greater than the damage resist then subtract\n\tif damage > d.TotalResist() {\n\t\tdamage = damage - d.TotalResist()\n\t\t//if unlocked, damage = damage - TotalResist()\n\t\td.AdjHealth(0 - damage)\n\t\ttextOut = textOut + fmt.Sprintf(\"for %s damage. \", Red(strconv.Itoa(damage)))\n\t\ttextOut = textOut + fmt.Sprintf(\"%s's health = %s.\\n\", d.Name, Red(strconv.Itoa(d.Health.Val)))\n\t\t//else don't adjust\n\t} else {\n\t\tdamage = 0\n\t\ttextOut = textOut + fmt.Sprintf(\"%s! for %s damage. \", YellowU(\"Resist\"), Red(strconv.Itoa(damage)))\n\t\ttextOut = textOut + fmt.Sprintf(\"%s's health = %s.\\n\", d.Name, Red(strconv.Itoa(d.Health.Val)))\n\t}\n\n\t// Experience Reward\n\t// if this is a dead monster\n\t// Monster agents don't have a save file set\n\tif d.File == \"\" && d.Dead == true {\n\n\t\t// reverse the percentage\n\t\tmods := 100 - odds\n\n\t\tpercentage := float32(mods) * .01\n\n\t\t// reduce the drop by the reverse percentage\n\t\texp := float32(d.ExpDrop()) * percentage\n\n\t\t// int to float conversion rounds down towards zero by dropping\n\t\t// everything after the decimal point. So I add 1 to the exp here\n\t\t// so the player never gets 0 exp reward\n\t\texp++\n\n\t\t// exp is a float32 so do math with exp as an int\n\t\ta.Exp = a.Exp + int(exp)\n\n\t\ttextOut = textOut + fmt.Sprintf(Green(\"\\nYou gain %d experience.\\n\"), int(exp))\n\t\ta.Save()\n\t}\n\td.Save()\n\treturn textOut\n}",
"func chance(line *base.Line) {\n\tstr := line.Args[1]\n\tvar chance float64\n\n\tif strings.HasSuffix(str, \"%\") {\n\t\t// Handle 'chance of that is \\d+%'\n\t\tif i, err := strconv.Atoi(str[:len(str)-1]); err != nil {\n\t\t\tbot.ReplyN(line, \"'%s' didn't look like a % chance to me.\", str)\n\t\t\treturn\n\t\t} else {\n\t\t\tchance = float64(i) / 100\n\t\t}\n\t} else {\n\t\t// Assume the chance is a floating point number.\n\t\tif c, err := strconv.ParseFloat(str, 64); err != nil {\n\t\t\tbot.ReplyN(line, \"'%s' didn't look like a chance to me.\", str)\n\t\t\treturn\n\t\t} else {\n\t\t\tchance = c\n\t\t}\n\t}\n\n\t// Make sure the chance we've parsed lies in (0.0,1.0]\n\tif chance > 1.0 || chance <= 0.0 {\n\t\tbot.ReplyN(line, \"'%s' was outside possible chance ranges.\", str)\n\t\treturn\n\t}\n\n\t// Retrieve last seen ObjectId, replace with \"\"\n\tls := LastSeen(line.Args[0], \"\")\n\t// ok, we're good to update the chance.\n\tif fact := fc.GetById(ls); fact != nil {\n\t\t// Store the old chance, update with the new\n\t\told := fact.Chance\n\t\tfact.Chance = chance\n\t\t// Update the Modified field\n\t\tfact.Modify(line.Storable())\n\t\t// And store the new factoid data\n\t\tif err := fc.Update(bson.M{\"_id\": ls}, fact); err == nil {\n\t\t\tbot.ReplyN(line, \"'%s' was at %.0f%% chance, now is at %.0f%%.\",\n\t\t\t\tfact.Key, old*100, chance*100)\n\t\t} else {\n\t\t\tbot.ReplyN(line, \"I failed to replace '%s': %s\", fact.Key, err)\n\t\t}\n\t} else {\n\t\tbot.ReplyN(line, \"Whatever that was, I've already forgotten it.\")\n\t}\n}",
"func (g Gun)attack(bodyPart BodyArmor,attacker *Character,defender *Character){\n\tif g.LoadedMagazine.Rounds==0{\n\t\t//need to reload!\n\t\treturn\n\t}\n\thitChance := g.estimateHitChance(bodyPart,attacker,defender)\n\troll := r1.Float64()\n\tif roll<=hitChance{\n\t\tdamage := g.calculateDamage(bodyPart,attacker,defender)\n\t\tg.LoadedMagazine.Rounds--\n\t\tdefender.Health-=damage\n\t}else{\n\t\tif LOG_MODE>=1{\n\t\t\tfmt.Printf(\"Missed shot with %f probability\\n\",hitChance)\n\t\t}\n\t}\n}",
"func doesCrit(attacker *pet) (bool) {\n\tcritRand := float64(rand.Intn(100))\n\t\n\tif critRand < attacker.EffectiveCRI {\n\t\t//fmt.Println(attacker.PetUser.Username, \" rolled a\", critRand, \" crit!\")\n\t\tattacker.CritCount++\n\t\treturn true\n\t}\n\t\n\treturn false\n\t\n}",
"func RandomMob(witch *Enemy, ghoul *Enemy, slime *Enemy, goblin *Enemy, gator *Enemy) *Enemy {\n roll := RandomNumber(5)\n mob := ghoul\n if roll == 3 {\n mob = witch\n } else if roll == 2 {\n mob = ghoul\n } else if roll == 1 {\n mob = slime\n } else if roll == 4 {\n mob = goblin\n } else if roll == 5 {\n mob = gator\n }\n mob.Health = mob.MaxHealth\n return mob\n}",
"func (p *Player) BossBattle(e *Enemy, l *Log, item *Item, d *Death) int {\n t := time.Now()\n fmt.Printf(\"%s has encountered a strong foe!\\n\\nName:%s\\n♥:%d\\nAtk:%d\\nDef:%d \\n\\nIt doesn't seem to notice. Want to come back another time[1] or fight[2]? \", p.Name, e.Name, e.Health, e.Attack, e.Armor)\n var choice int\n fmt.Scanln(&choice)\n var first bool\n switch choice {\n case 1:\n sucessString := fmt.Sprintf(\"%s snuck away from a %s without it noticing.\", p.Name, e.Name)\n l.AddAction(t.Format(\"3:04:05 \") + sucessString)\n default:\n TypedText(\"You failed to select one of the options given to you.\\n\", 50)\n TypedText(\"You're just going to have to fight it\", 50)\n TypedText(\"...\\n\", 200)\n fallthrough\n case 2:\n var simulate int\n fmt.Printf(\"Would you like to view battle[1] or simulate[2]? \")\n fmt.Scanln(&simulate)\n switch simulate {\n case 1:\n userI := RandomNumber(20)\n compI := RandomNumber(20)\n fmt.Printf(\"Rolling for initiative\")\n TypedText(\"...\", 300)\n fmt.Printf(\"%s rolled a %d\\n\", p.Name, userI)\n fmt.Printf(\"The %s rolled a %d\\n\", e.Name, compI)\n if compI > userI {\n fmt.Printf(\"The %s rolled higher, they will attack first.\\n\", e.Name)\n } else {\n fmt.Printf(\"%s rolled higher, %s will attack first.\\n\", p.Name, p.Name)\n first = true\n }\n for p.Health > 0 && e.Health > 0 {\n if first == true {\n p.UserAttack(e, l)\n time.Sleep(2500 * time.Millisecond)\n if e.Health > 0 {\n p.MobAttack(e, l)\n time.Sleep(2500 * time.Millisecond)\n } else {\n break\n }\n } else {\n p.MobAttack(e, l)\n time.Sleep(2500 * time.Millisecond)\n if p.Health > 0 {\n p.UserAttack(e, l)\n } else {\n break\n }\n }\n }\n default:\n TypedText(\"Since you didn't enter a valid selection the battle will be simulated.\", 50)\n fallthrough\n case 2:\n userI := RandomNumber(20)\n compI := RandomNumber(20)\n fmt.Printf(\"Rolling for initiative\")\n fmt.Printf(\".\")\n fmt.Printf(\".\")\n fmt.Printf(\".\\n\")\n fmt.Printf(\"%s rolled a %d\\n\", p.Name, userI)\n fmt.Printf(\"The %s rolled a %d\\n\", e.Name, compI)\n if compI > userI {\n fmt.Printf(\"The %s rolled higher, they will attack first.\\n\", e.Name)\n } else {\n fmt.Printf(\"%s rolled higher, %s will attack first.\\n\", p.Name, p.Name)\n first = true\n }\n for p.Health > 0 && e.Health > 0 {\n if first == true {\n p.UserAttack(e, l)\n if e.Health > 0 {\n p.MobAttack(e, l)\n } else {\n break\n }\n } else {\n p.MobAttack(e, l)\n if p.Health > 0 {\n p.UserAttack(e, l)\n } else {\n break\n }\n }\n }\n } \n }\n if p.Health <= 0 {\n d.Death()\n } else if e.Health <= 0 && p.Health >= 0 {\n winString := fmt.Sprintf(\"%s successfully defeated a %s!\", p.Name, e.Name)\n l.AddAction(t.Format(\"3:04:05 \")+winString)\n fmt.Printf(\"The %s dropped gold.\", e.Name)\n p.Gold += e.Gold\n lootRoll := RandomNumber(100)\n if lootRoll <= 25 {\n loot := RandomItem(p, item)\n fmt.Printf(\"The %s dropped loot [%s].\", e.Name, loot)\n itemString := fmt.Sprintf(\"%s obtained a %s and %d gold from a %s.\", p.Name, loot, e.Gold, e.Name)\n l.AddAction(t.Format(\"3:04:05 \")+itemString)\n } else {\n goldString := fmt.Sprintf(\"%s gained %d gold from a %s.\", p.Name, e.Gold, e.Name)\n l.AddAction(t.Format(\"3:04:05 \")+goldString)\n }\n if p.Key == false {\n fmt.Printf(\"The %s dropped a giant key.\", e.Name)\n l.AddAction(t.Format(\"3:04:05 \")+p.Name+\" found a giant key.\")\n p.Key = true\n }\n fmt.Printf(\"\\nPosting results\")\n TypedText(\"...\", 300)\n }\n return choice\n}",
"func weapon(char core.Character, c *core.Core, r int, param map[string]int) {\n\tdmg := 0.16 + float64(r)*0.04\n\n\tc.Events.Subscribe(core.OnAttackWillLand, func(args ...interface{}) bool {\n\t\tds := args[1].(*core.Snapshot)\n\t\tt := args[0].(core.Target)\n\t\tif ds.ActorIndex != char.CharIndex() {\n\t\t\treturn false\n\t\t}\n\t\t// if t.AuraType() == def.Hydro {\n\t\t// \tds.Stats[def.DmgP] += dmg\n\t\t// \tc.Log.Debugw(\"dragonbane\", \"frame\", c.F, \"event\", def.LogCalc, \"final dmg%\", ds.Stats[def.DmgP])\n\t\t// }\n\t\tif t.AuraContains(core.Hydro, core.Pyro) {\n\t\t\tds.Stats[core.DmgP] += dmg\n\t\t\tc.Log.Debugw(\"dragonbane\", \"frame\", c.F, \"event\", core.LogCalc, \"final dmg%\", ds.Stats[core.DmgP])\n\t\t}\n\t\treturn false\n\t}, fmt.Sprintf(\"dragonbane-%v\", char.Name()))\n\n}",
"func (e Enemy) Damage() int {\n\treturn 10 * e.Level\n}",
"func weildWeapon() (string, int) {\n\tlottery := random(1, 5)\n\tvar weapon string\n\tvar weapondie int\n\tswitch lottery {\n\tcase 1:\n\t\tweapon = \"fist\"\n\t\tweapondie = 3\n\tcase 2:\n\t\tweapon = \"dagger\"\n\t\tweapondie = 4\n\tcase 3:\n\t\tweapon = \"short sword\"\n\t\tweapondie = 6\n\tcase 4:\n\t\tweapon = \"longsword\"\n\t\tweapondie = 8\n\tcase 5:\n\t\tweapon = \"greataxe\"\n\t\tweapondie = 12 // At this case, the Greataxe will deal random damage from 1 point to 12 points, a 12-side die.\n\t}\n\treturn weapon, weapondie\n}",
"func (p *Player) UserAttack (e *Enemy, l *Log){\n roll := RandomNumber(20)\n fmt.Printf(\"Rolling for attack\")\n TypedText(\"...\", 300)\n fmt.Printf(\"%s rolled a %d \\n\", p.Name, roll)\n\tif roll >= e.Armor {\n\t\tdamage := RandomNumber(p.Attack)\n e.Health -= damage\n fmt.Printf(\"%ss attack dealt %d damage to the %s! ♥:%d \\n\", p.Name, damage, e.Name, e.Health)\n\t} else {\n fmt.Printf(\"%ss attack missed!\\n\", p.Name)\n\t}\n}",
"func (c *Conflict) GetModDefenderChance() int32 {\n\t// TODO: Return modifier defender chance\n\treturn c.BaseChance()\n}",
"func entropy() {}",
"func enchantarmor() {\n\tif c[WEAR] < 0 {\n\t\tif c[SHIELD] < 0 {\n\t\t\tcursors()\n\t\t\tbeep()\n\t\t\tlprcat(\"\\nYou feel a sense of loss\")\n\t\t\treturn\n\t\t} else {\n\t\t\ttmp := iven[c[SHIELD]]\n\t\t\tif tmp != OSCROLL {\n\t\t\t\tif tmp != OPOTION {\n\t\t\t\t\tivenarg[c[SHIELD]]++\n\t\t\t\t\tbottomline()\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\ttmp := iven[c[WEAR]]\n\tif tmp != OSCROLL {\n\t\tif tmp != OPOTION {\n\t\t\tivenarg[c[WEAR]]++\n\t\t\tbottomline()\n\t\t}\n\t}\n}",
"func stealsomething() int {\n\tj := 100\n\tfor {\n\t\ti := rund(26)\n\t\tif iven[i] != 0 {\n\t\t\tif c[WEAR] != i {\n\t\t\t\tif c[WIELD] != i {\n\t\t\t\t\tif c[SHIELD] != i {\n\t\t\t\t\t\tsrcount = 0\n\t\t\t\t\t\tshow3(i)\n\t\t\t\t\t\tadjustcvalues(iven[i], ivenarg[i])\n\t\t\t\t\t\tiven[i] = 0\n\t\t\t\t\t\treturn 1\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tj--\n\t\tif j <= 0 {\n\t\t\treturn 0\n\t\t}\n\t}\n\tpanic(\"unreachable\")\n}",
"func (p *Player) Battle(e *Enemy, l *Log, i *Item, d *Death) int {\n t := time.Now()\n\n fmt.Printf(\"%s encountered an enemy!\\n\\n\", p.Name)\n e.EnemyHud()\n fmt.Printf(\"Would you like to try and escape[1] or fight[2]? \")\n\n var choice string\n fmt.Scanln(&choice)\n\n switch choice{\n case \"1\":\n roll := RandomNumber(20)\n fmt.Printf(\"Rolling a d20 in attempt to escape\")\n TypedText(\"...\", 300)\n if roll > 5 {\n sucessString := fmt.Sprintf(\"%s successfully escaped from a %s unscathed.\", p.Name, e.Name)\n l.AddAction(t.Format(\"3:04:05 \")+sucessString)\n } else {\n damage := RandomNumber(e.Attack)\n p.Health -= damage\n l.AddAction(t.Format(\"3:04:05 \") +p.Name+\" failed a clean getaway.\")\n failString := fmt.Sprintf(\"The %s dealt %d damage during the escape.\", e.Name, damage)\n l.AddAction(t.Format(\"3:04:05 \")+failString)\n }\n return 1\n default:\n TypedText(\"You failed to select one of the options given to you.\\n\", 50)\n TypedText(\"You're just going to have to fight it\", 50)\n TypedText(\"...\\n\", 200)\n fallthrough\n\n case \"2\":\n var simulate int\n fmt.Printf(\"Would you like to view battle[1] or simulate[2]? \")\n fmt.Scanln(&simulate)\n\n if simulate == 1 {\n userI := RandomNumber(20)\n compI := RandomNumber(20)\n\n fmt.Printf(\"Rolling for initiative\")\n TypedText(\"...\", 300)\n\n fmt.Printf(\"%s rolled a %d\\n\", p.Name, userI)\n fmt.Printf(\"The %s rolled a %d\\n\", e.Name, compI)\n\n var first bool\n if compI > userI {\n fmt.Printf(\"The %s rolled higher, they will attack first.\\n\", e.Name)\n } else {\n fmt.Printf(\"%s rolled higher, %s will attack first.\\n\", p.Name, p.Name)\n first = true\n }\n\n for p.Health > 0 && e.Health > 0 {\n if first == true {\n p.UserAttack(e, l)\n if e.Health > 0 {\n p.MobAttack(e, l)\n time.Sleep(2500 * time.Millisecond)\n } else {\n break\n }\n } else {\n p.MobAttack(e, l)\n time.Sleep(2500 * time.Millisecond)\n if p.Health > 0 {\n p.UserAttack(e, l)\n time.Sleep(2500 * time.Millisecond)\n } else {\n break\n }\n }\n }\n } else {\n userI := RandomNumber(20)\n compI := RandomNumber(20)\n fmt.Printf(\"Rolling for initiative...\\n\")\n\n fmt.Printf(\"%s rolled a %d\\n\", p.Name, userI)\n fmt.Printf(\"The %s rolled a %d\\n\", e.Name, compI)\n\n var first bool\n if compI > userI {\n fmt.Printf(\"The %s rolled higher, they will attack first.\\n\", e.Name)\n } else {\n fmt.Printf(\"%s rolled higher, %s will attack first.\\n\", p.Name, p.Name)\n first = true\n }\n\n for p.Health > 0 && e.Health > 0 {\n if first == true {\n p.UserAttack(e, l)\n if e.Health > 0 {\n p.MobAttack(e, l)\n } else {\n break\n }\n } else {\n p.MobAttack(e, l)\n if p.Health > 0 {\n p.UserAttack(e, l)\n } else {\n break\n }\n }\n }\n }\n }\n if p.Health <= 0 {\n d.Death()\n }\n if e.Health <= 0 && p.Health >= 0 {\n winString := fmt.Sprintf(\"%s successfully defeated a %s!\", p.Name, e.Name)\n l.AddAction(t.Format(\"3:04:05 \")+winString)\n fmt.Printf(\"The %s dropped gold.\", e.Name)\n p.Gold += e.Gold\n lootRoll := RandomNumber(100)\n//25% item drop chance currently\n if lootRoll <= 25 {\n loot := RandomItem(p, i)\n if loot == \"none\" {\n fmt.Printf(\"The %s dropped an item.\", e.Name)\n heavyString := fmt.Sprintf(\"%s found an item but is already carrying max weight limit so %s left it behind.\", p.Name, p.Name)\n l.AddAction(t.Format(\"3:04:05 \")+heavyString)\n } else {\n fmt.Printf(\"The %s dropped loot [%s].\", e.Name, loot)\n itemString := fmt.Sprintf(\"%s obtained a %s and %d gold from a %s.\", p.Name, loot, e.Gold, e.Name)\n l.AddAction(t.Format(\"3:04:05 \")+itemString)\n }\n } else {\n goldString := fmt.Sprintf(\"%s gained %d gold from a %s.\", p.Name, e.Gold, e.Name)\n l.AddAction(t.Format(\"3:04:05 \")+goldString)\n }\n if lootRoll == 9 && p.Key == false {\n p.Key = true\n fmt.Printf(\"The %s dropped a giant key. Wonder what it is for...\", e.Name)\n lootKey := fmt.Sprintf(\"%s found a giant key dropped by a %s\", p.Name, e.Name)\n l.AddAction(t.Format(\"3:04:05 \")+lootKey)\n }\n fmt.Printf(\"\\nPosting results\")\n TypedText(\"...\", 300)\n }\n return 2\n}",
"func (su *Superman) Attack() {\n\tfmt.Println(\"Attach with laser\")\n}",
"func Attack(a *Agent, d *Agent) (*Agent, *Agent, string) {\n\n\tvar outText string\n\n\t// roll for attacker and defender\n\tar := Roll(2, 100)\n\tdr := Roll(2, 100)\n\t// bonuses\n\t//arB := a.Str.Val + a.Weap.Attack\n\tarB := a.TotalAttack()\n\t// subtract the dodge percentage hit from armor\n\t//drB := int(float64(d.Dex.Val) - float64(d.Dex.Val)*(float64(d.Armor.Dodge)*.01))\n\tdrB := d.TotalDodge()\n\n\t// totals\n\taT := ar + arB\n\tdT := dr + drB\n\n\t// uncomment here to 'show your work'\n\t//outText = fmt.Sprintf(Black(\"Attack roll: %d plus Bonus: %d for Total: %d\\n\"), ar, arB, aT)\n\t//outText = outText + fmt.Sprintf(Black(\"Resist roll: %d plus Bonus: %d for Total: %d\\n\"), dr, drB, dT)\n\n\t// Attack wins if greater than Resist\n\t// But a tie goes to the Resist\n\tif aT > dT {\n\t\toutText = outText + fmt.Sprintf(Green(\"%s hits with %s!\\n \"), a.Name, a.Weap.Name)\n\t\treturn a, d, outText\n\t} else {\n\t\toutText = outText + fmt.Sprintf(Red(\"%s misses!\\n\"), a.Name)\n\t\treturn d, a, outText\n\t}\n\n}",
"func (GuitarBass) DiezelHerbertAmplifier(){}",
"func rnd(context *Context) {\n x := context.opcode & 0x0F00 >> 8\n b := byte(context.opcode & 0xFF)\n context.cpu.v[x] = b & byte(rand.Intn(256))\n context.cpu.pc += 2\n}",
"func PronounPossessive() string { return pronounPossessive(globalFaker.Rand) }",
"func Insult(s *discordgo.Session, m *discordgo.MessageCreate) {\n if len(m.Mentions) == 0 {\n s.ChannelMessageSend(m.ChannelID, \">>> Please include your target(s).\\n**Usage:** !insult @victim.\")\n return\n }\n rand.Seed(time.Now().UnixNano())\n word := start[rand.Intn(len(start))]\n state := Markov[word]\n insult := word\n for {\n if state == nil {\n break\n }\n tMat := transMatrix(state)\n r := rand.Float64()\n for i := range state {\n if r < tMat[i]{\n word = state[i].wrd\n insult = insult + \" \" + word\n break\n }\n }\n state = Markov[word]\n }\n s.ChannelMessageSend(m.ChannelID, (\"> **\" + m.Mentions[0].Username + \"** \" + insult))\n}",
"func (m *Monster) BaseDamage() int {\n\tswitch m.id {\n\tcase Bat: // bats deal a base of 1 always\n\t\treturn 1\n\tdefault:\n\t\td := m.Info.Dmg\n\t\tif d < 1 {\n\t\t\td++\n\t\t} else {\n\t\t\td += rand.Intn(d)\n\t\t}\n\t\td += m.Info.Lvl\n\t\treturn d\n\t}\n}",
"func (th *Thor) Attack() {\n\tfmt.Println(\"Attach with Hammer\")\n}",
"func (t *Thor) Attack() {\n\tfmt.Println(\"Attack with Hammer\")\n}",
"func CalculateExperience(attackerName string){\n\treturn\n}",
"func wearArmor(dexterity int) (string, int) {\n\tlottery := random(1, 5)\n\tvar armorname string\n\tvar armorBonus, dexBonus int\n\tdexBonus = attrModifier(dexterity)\n\tswitch lottery {\n\tcase 1:\n\t\tarmorname = \"Leather Armor\"\n\t\tarmorBonus = 2\n\t\tif dexBonus > 8 { // Every armor has a limit of how many dexterity bonus points can be added.\n\t\t\tdexBonus = 8\n\t\t}\n\tcase 2:\n\t\tarmorname = \"Chain Shirt\"\n\t\tarmorBonus = 4\n\t\tif dexBonus > 4 {\n\t\t\tdexBonus = 4\n\t\t}\n\tcase 3:\n\t\tarmorname = \"Scale Mail\"\n\t\tarmorBonus = 4\n\t\tif dexBonus > 4 {\n\t\t\tdexBonus = 4\n\t\t}\n\tcase 4:\n\t\tarmorname = \"Breastplate\"\n\t\tarmorBonus = 5\n\t\tif dexBonus > 3 {\n\t\t\tdexBonus = 3\n\t\t}\n\tcase 5:\n\t\tarmorname = \"Full Plate Armor\"\n\t\tarmorBonus = 8\n\t\tif dexBonus > 1 {\n\t\t\tdexBonus = 1\n\t\t}\n\t}\n\treturn armorname, 10 + armorBonus + dexBonus\n}",
"func boostChance(data *BoostCheckData) (int, []string) {\n\treasons := make([]string, 0)\n\tchance := 0\n\n\t// get region with highest winrate\n\thighestWr := 0\n\tvar highestWrRegion string\n\tfor region, wr := range data.Winrates {\n\t\tif wr > highestWr {\n\t\t\thighestWr = wr\n\t\t\thighestWrRegion = region\n\t\t}\n\t}\n\n\twrOnMostPlayedServer := data.Winrates[data.MostPlayedServer]\n\twrDiff := float64(highestWr - wrOnMostPlayedServer)\n\tchance = int(math.Min(wrDiff*3, 99.0))\n\twrDifference := \"Winrate on most played region (\" + data.MostPlayedServer + \", \" + strconv.Itoa(wrOnMostPlayedServer) + \"%) was \" + strconv.FormatFloat(wrDiff, 'f', 1, 64) + \" less than the highest winrate (\" + highestWrRegion + \", \" + strconv.Itoa(highestWr) + \"%)\"\n\treasons = append(reasons, wrDifference)\n\n\treturn chance, reasons\n}",
"func raisemspells(x int) {\n\tc[SPELLMAX] += x\n\tc[SPELLS] += x\n}",
"func rnd() float64 {\n\tss := *g_seed\n\tss += ss\n\tss ^= 1\n\tif int32(ss) < 0 {\n\t\tss ^= 0x88888eef\n\t}\n\t*g_seed = ss\n\treturn float64(*g_seed%95) / float64(95)\n}",
"func rnd() float64 {\n\tss := *g_seed\n\tss += ss\n\tss ^= 1\n\tif int32(ss) < 0 {\n\t\tss ^= 0x88888eef\n\t}\n\t*g_seed = ss\n\treturn float64(*g_seed%95) / float64(95)\n}",
"func enchweapon() {\n\tif c[WIELD] < 0 {\n\t\tcursors()\n\t\tbeep()\n\t\tlprcat(\"\\nYou feel a sense of loss\")\n\t\treturn\n\t}\n\ttmp := iven[c[WIELD]]\n\tif tmp != OSCROLL {\n\t\tif tmp != OPOTION {\n\t\t\tivenarg[c[WIELD]]++\n\t\t\tif tmp == OCLEVERRING {\n\t\t\t\tc[INTELLIGENCE]++\n\t\t\t} else if tmp == OSTRRING {\n\t\t\t\tc[STREXTRA]++\n\t\t\t} else if tmp == ODEXRING {\n\t\t\t\tc[DEXTERITY]++\n\t\t\t}\n\t\t\tbottomline()\n\t\t}\n\t}\n}",
"func RandomPlayer(pos Position) Intersection {\n\n\tvar chosenIntn Intersection\n\tfor {\n\t\tif 0 == rand.Intn(50) {\n\t\t\treturn PASS\n\t\t}\n\t\ti := rand.Intn(int(SIZE))\n\t\tj := rand.Intn(int(SIZE))\n\t\tchosenIntn = Intersection{uint8(i), uint8(j)}\n\t\tif pos.isLegal(chosenIntn) {\n\t\t\treturn chosenIntn\n\t\t}\n\t}\n}",
"func raptorRand(x, i, m uint32) uint32 {\n\tv0 := v0table[(x+i)%256]\n\tv1 := v1table[((x/256)+i)%256]\n\treturn (v0 ^ v1) % m\n}",
"func (g *Game) getSpareBonus(rollIndex int) int {\n\treturn g.rolls[rollIndex+2]\n}",
"func (g *AllergyGenerator) randomReaction() string {\n\treturn g.reactions[rand.Intn(len(g.reactions))]\n}",
"func (d *Degradater) RandomPercent() int {\n\treturn defaultSafeRander.Intn(101)\n}",
"func (p *Password) Entropy() float64 {\n\tp.generatePool()\n\tpoolLength := len(p.pool)\n\tpoolLength += len(p.Include)\n\treturn math.Log2(math.Pow(float64(poolLength), float64(p.Length)))\n}",
"func raiseexperience(x int) {\n\ti := c[LEVEL]\n\tc[EXPERIENCE] += x\n\tfor c[EXPERIENCE] >= skill[c[LEVEL]] && c[LEVEL] < MAXPLEVEL {\n\t\ttmp := (c[CONSTITUTION] - c[HARDGAME]) >> 1\n\t\tc[LEVEL]++\n\t\traisemhp(rnd(3) + rnd(icond(tmp > 0, tmp, 1)))\n\t\traisemspells(rund(3))\n\t\tif c[LEVEL] < 7-c[HARDGAME] {\n\t\t\traisemhp(c[CONSTITUTION] >> 2)\n\t\t}\n\t}\n\tif c[LEVEL] != i {\n\t\tcursors()\n\t\tbeep()\n\t\tlprintf(\"\\nWelcome to level %d\", c[LEVEL]) /* if we changed levels\t */\n\t}\n\tbottomline()\n}",
"func (bool shadower) shadowThem() {}",
"func (s *Superman) Attack() {\n\tfmt.Println(\"Attack with laser\")\n}",
"func RandomFortune(mod string) (*Fortune, error) {\n\tconn := Pool.Get()\n\tdefer conn.Close()\n\n\t// ensure the specified module exists\n\tif mod != \"\" {\n\t\tmember, err := redis.Bool(conn.Do(\"SISMEMBER\", MODS_KEY, mod))\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif member == false {\n\t\t\treturn nil, errors.New(fmt.Sprintf(\"module '%s' not found\", mod))\n\t\t}\n\t}\n\n\tif mod == \"\" {\n\t\tmod2, err := redis.String(conn.Do(\"SRANDMEMBER\", MODS_KEY))\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tmod = mod2\n\t}\n\n\tfid, err := redis.Int(conn.Do(\"SRANDMEMBER\", modKey(mod)))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\ttext, err := redis.String(conn.Do(\"GET\", fortuneKey(fid)))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Fortune{mod: mod, id: fid, text: text}, nil\n}",
"func combat(health, damage float64) float64 {\n\tif math.Floor(damage) == 100.0 {\n\t\treturn 0\n\t}\n\n\thealth = health * damage / 100.0\n\tif health < 1.1 {\n\t\treturn 0\n\t}\n\n\treturn health\n}",
"func (f *Faker) PronounPossessive() string { return pronounPossessive(f.Rand) }",
"func CheckForWhom(level string) int {\r\n\r\n\tfmt.Println(level)\r\n\tif level == \"all\" {\r\n\t\t// fmt.Println(\">>>> AD-checked! and 0 returned\")\r\n\t\treturn 0\r\n\t} else if level == \"judges\" {\r\n\t\treturn 1\r\n\t} else if level == \"opponents\" {\r\n\t\treturn 2\r\n\t}\r\n\treturn -1\r\n}",
"func Hobby() string { return hobby(globalFaker.Rand) }",
"func Random() string {\n\trand.Seed(time.Now().UnixNano())\n\tmin := 0\n\tmax := len(superheroes) - 1\n\treturn superheroes[rand.Intn(max-min+1)+min]\n}",
"func (d *Dungeon) MobSpawner(l *Log, p *Player) {\n t := time.Now()\n roll := RandomNumber(100)\n for i :=0; i < 25; i++ {\n if d.Room[i].Exit == false && d.Room[i].Heal == false {\n if d.Room[i].Empty == true && d.Room[i].Boss == false {\n if roll < 45 && roll > 30 {\n d.Room[i].Empty = false\n d.Room[i].Enemy = true\n }\n if roll >= 75 && d.Boss == false && d.Room[i].Chest == false && d.Room[i].OpenChest == false && d.Room[i].Exit == false && d.Room[i].X != p.X && d.Room[i].Y != p.Y {\n d.Room[i].Empty = false\n d.Room[i].Enemy = false\n d.Room[i].Boss = true\n d.Boss = true\n spawn := fmt.Sprintf(\"A strong enemy just appeared somewhere on the map. Maybe he has something neat.\")\n l.AddAction(t.Format(\"3:04:05 \")+spawn)\n }\n }\n }\n }\n}",
"func Random() string {\n\treturn proverbs[rand.Intn(len(proverbs)-1)]\n}",
"func getLevels(attacker *pet, defender *pet, won bool, ctx context) {\n\texp := calcExperience(*defender)\n\taLevelReq := 10.00 * math.Pow(float64(attacker.Level), 1.2)\n\tdLevelReq := 10.00 * math.Pow(float64(defender.Level), 1.2)\n\n\tif won {\n\t\tattacker.Experience += exp[0]\n\t\tdefender.Experience += exp[1]\n\t} else {\n\t\tattacker.Experience += exp[1]\n\t\tdefender.Experience += exp[0]\n\t}\n\n\tif attacker.Experience >= aLevelReq {\n\t\tattacker.Experience -= aLevelReq\n\t\tattacker.Level += 1\n\t\terr := doPetLevelUp(*attacker)\n\t\tif err != nil {\n\t\t\tctx.Session.ChannelMessageSend(ctx.Msg.ChannelID, \"Result storage failed, battle will not be counted.\")\n\t\t\treturn\n\t\t}\n\t\tlevelPM(*attacker, ctx)\n\t}\n\n\tif defender.Experience >= dLevelReq {\n\t\tdefender.Experience -= dLevelReq\n\t\tdefender.Level += 1\n\t\terr := doPetLevelUp(*defender)\n\t\tif err != nil {\n\t\t\tctx.Session.ChannelMessageSend(ctx.Msg.ChannelID, \"Result storage failed, battle will not be counted.\")\n\t\t\treturn\n\t\t}\n\t\tlevelPM(*defender, ctx)\n\t}\n\n\ttx, err := DataStore.Begin()\n\tif err != nil {\n\t\tctx.Session.ChannelMessageSend(ctx.Msg.ChannelID, \"Result storage failed, battle will not be counted.\")\n\t\treturn\n\t}\n\tdefer tx.Rollback()\n\n\tstmt, err := tx.Prepare(\"UPDATE pettable SET Experience = ? WHERE UserID = ?\")\n\tif err != nil {\n\t\tctx.Session.ChannelMessageSend(ctx.Msg.ChannelID, \"Result storage failed, battle will not be counted.\")\n\t}\n\tdefer stmt.Close()\n\n\t_, err = stmt.Exec(attacker.Experience, attacker.ID)\n\tif err != nil {\n\t\tctx.Session.ChannelMessageSend(ctx.Msg.ChannelID, \"Result storage failed, battle will not be counted.\")\n\t}\n\n\t_, err = stmt.Exec(defender.Experience, defender.ID)\n\tif err != nil {\n\t\tctx.Session.ChannelMessageSend(ctx.Msg.ChannelID, \"Result storage failed, battle will not be counted.\")\n\t}\n\n\ttx.Commit()\n\treturn\n}",
"func (a Energy) RandomGain(rnd *rand.Rand) Energy {\n\tgreatest := geom.Vector3(a).Greatest()\n\tif rnd.Float64() > greatest {\n\t\treturn Energy{}\n\t}\n\treturn a.Amplified(1 / greatest)\n}",
"func (attacker Player) Attack(defender Player) error {\n\tif attacker.Stamina < 5 {\n\t\treturn fmt.Errorf(\"Attacker %d is out of stamina\", attacker.id)\n\t}\n\tif defender.Health < 5 {\n\t\treturn fmt.Errorf(\"Defender %d is iced\", defender.id)\n\t}\n\tattacker.Stamina, defender.Health = attacker.Stamina-1, defender.Health-1\n\treturn nil\n}",
"func (a *attacker) incrementMaliciousRequests() {\n\ta.maliciousRequests++\n}",
"func PronounDemonstrative() string { return pronounDemonstrative(globalFaker.Rand) }",
"func RandomItem (p *Player, item *Item) string{\n switch RandomNumber(6) {\n case 1:\n item.Name = \"Rusty Sword\"\n if p.AddItem(NewItem(\"Rusty Sword\", 3)) == false {\n return \"none\"\n }\n case 2:\n item.Name = \"Small Shield\"\n if p.AddItem(NewItem(\"Small Shield\", 3)) == false {\n return \"none\"\n }\n case 3:\n item.Name = \"Battle Armor\"\n if p.AddItem(NewItem(\"Battle Armor\", 3)) == false {\n return \"none\"\n }\n case 4:\n item.Name = \"Heavy Armor\"\n if p.AddItem(NewItem(\"Heavy Armor\", 5)) == false {\n return \"none\"\n }\n case 5:\n item.Name = \"Lost Sword\"\n if p.AddItem(NewItem(\"Lost Sword\", 5)) == false {\n return \"none\"\n }\n case 6:\n item.Name = \"Large Shield\"\n if p.AddItem(NewItem(\"Large Shield\", 5)) == false {\n return \"none\"\n }\n }\n return item.Name\n}",
"func GetTotalCreaturePower(cards []Card) int {\n\tpower := 0\n\n\tfor _, card := range cards {\n\t\tpower += card.Power\n\t}\n\n\treturn power\n}",
"func DoAttack(sh SuperHero) {\n\tsh.Attack()\n}",
"func randLetter() rune {\n\treturn rune(byte(rand.Intn(26)) + 'a')\n}",
"func (mg *MoveGen) generatePawnRightAttack(pawns uint64) uint64 {\n\tarea := uint64(0xfefefefefefe00)\n\tvar attacks uint64 // TODO: en passant\n\tvar attackDirection int\n\tif mg.isWhite() {\n\t\tattackDirection = -7 // promotions\n\t\tattacks = (pawns & area) << 7\n\t\tattacks &= mg.state.colours[0]\n\t} else {\n\t\tattackDirection = 9 // promotions\n\t\tattacks = (pawns & area) >> 9\n\t\tattacks &= mg.state.colours[1]\n\t}\n\tcache := attacks\n\n\t// promotions\n\tattacks ^= mg.generatePromotions(attackDirection, attacks)\n\n\t// capture, 0b0100\n\tmg.mover.SetFlags(4)\n\tfor i := LSB(attacks); i != 64; i = NLSB(&attacks, i) {\n\t\tmg.mover.SetFrom(uint16(i + attackDirection))\n\t\tmg.mover.SetTo(uint16(i))\n\t\tmg.moves[mg.index] = mg.mover.GetMove()\n\t\tmg.index++\n\t}\n\n\treturn cache\n}",
"func (m *Monster) Damage(dmg int) (int, bool) {\n\tif dmg >= m.Info.Hitpoints { // check if damage would drop hp to 0\n\t\tdealt := m.Info.Hitpoints\n\t\tm.Info.Hitpoints = 0\n\t\treturn dealt, true\n\t}\n\n\t// Deal damage\n\tm.Info.Hitpoints -= dmg\n\treturn dmg, false\n}",
"func GetProbability(l uint) float64 {\n\treturn 1.0 / math.Pow(2.0, float64(l))\n}",
"func weightedRand(max int, talentCo float64) float64 {\n\tnum := posWeightedRand(max)\n\n\tif rand.Float64() > talentCo {\n\t\treturn num - (2 * num)\n\t}\n\n\treturn num\n}",
"func randomPowerStone(portal *sworld.Portal) sworld.Item {\n\treturn &sworld.PortalStone{\n\t\tLevel: 10,\n\t\tDuration: 10 * time.Minute,\n\t\tZone: portal.PortalStone.Zone,\n\t}\n}",
"func bruiser(lastGame game) bool {\n\tif lastGame.Statistics.TotalDamageDone >= 500 {\n\t\treturn true\n\t}\n\treturn false\n}",
"func (f *Faker) Hobby() string { return hobby(f.Rand) }",
"func (c *Libp2pPubSub) AttackVictim() {\n\tc.victim = true\n\tc.makeVictimNotGossip()\n}",
"func GenerateVeryStrongPassword(length int) *Password {\n\tfor {\n\t\tp := GeneratePassword(length)\n\t\tp.ProcessPassword()\n\t\tif p.Score == 4 {\n\t\t\treturn p\n\t\t}\n\t}\n}",
"func (b BaseDefender) GetWeaponPower(researches Researches) int64 {\n\treturn int64(float64(b.WeaponPower) * (1 + float64(researches.WeaponsTechnology)*0.1))\n}",
"func randomEnergy() float64 {\n\treturn math.Round(rand.Float64() * 100)\n}",
"func init() {\n\tpkg.AddScript(\"Giving\",\n\t\t// for summarily ( client side ) rejecting items\n\t\tThe(\"actors\", AreEither(\"items receiver\").Or(\"items rejector\")),\n\n\t\tThe(\"actors\",\n\t\t\tCan(\"acquire it\").And(\"acquiring it\").RequiresOnly(\"prop\"),\n\t\t\tTo(\"acquire it\", g.ReflectToTarget(\"be acquired\"))),\n\n\t\tThe(\"props\",\n\t\t\tCan(\"be acquired\").And(\"being acquired\").RequiresOnly(\"actor\"),\n\t\t\tTo(\"be acquired\",\n\t\t\t\tAssignParent{g.The(\"prop\"), Owner{}, g.The(\"actor\")},\n\t\t\t)),\n\n\t\t// 1. source\n\t\tThe(\"actors\",\n\t\t\tCan(\"give it to\").And(\"giving it to\").RequiresOne(\"actor\").AndOne(\"prop\"),\n\t\t\tTo(\"give it to\", g.ReflectWithContext(\"report give\")),\n\t\t\t// \"convert give to yourself to examine\"\n\t\t\tBefore(\"giving it to\").Always(\n\t\t\t\tChoose{\n\t\t\t\t\tIf: g.The(\"action.Source\").Equals(g.The(\"action.Target\")),\n\t\t\t\t\tTrue: g.Go(\n\t\t\t\t\t\tg.Say(\"You can't give to yourself.\"),\n\t\t\t\t\t\tg.StopHere(),\n\t\t\t\t\t),\n\t\t\t\t}),\n\t\t\t// \"can't give clothes being worn\"\n\t\t\tBefore(\"giving it to\").Always(\n\t\t\t\tChoose{\n\t\t\t\t\tIf: g.The(\"action.Context\").Object(\"wearer\").Exists(),\n\t\t\t\t\tTrue: g.Go(\n\t\t\t\t\t\tg.Say(\"You can't give worn clothing.\"),\n\t\t\t\t\t\t// FIX: try taking off the noun\n\t\t\t\t\t\tg.StopHere(),\n\t\t\t\t\t),\n\t\t\t\t}),\n\t\t\t// \"you can't give what you haven't got\"\n\t\t\tBefore(\"giving it to\").Always(\n\t\t\t\tChoose{\n\t\t\t\t\tIf: Carrier(g.The(\"prop\")).Equals(g.The(\"action.Source\")),\n\t\t\t\t\tFalse: g.Go(\n\t\t\t\t\t\tg.Say(\"You aren't holding\", g.The(\"prop\").Lower(), \".\"),\n\t\t\t\t\t\tg.StopHere(),\n\t\t\t\t\t),\n\t\t\t\t}),\n\t\t),\n\t\t// 2. receiver\n\t\tThe(\"actors\",\n\t\t\tCan(\"report give\").And(\"reporting give\").RequiresOne(\"prop\").AndOne(\"actor\"),\n\t\t\tTo(\"report give\",\n\t\t\t\tg.ReflectWithContext(\"report gave\"))),\n\t\t// 3. context\n\t\tThe(\"props\",\n\t\t\tCan(\"report gave\").And(\"reporting gave\").RequiresTwo(\"actor\"),\n\t\t\tTo(\"report gave\",\n\t\t\t\tg.The(\"action.Context\").Go(\"impress\"))),\n\t\t// input\n\t\tUnderstand(\"give|pay|offer|feed {{something}} {{something else}}\").\n\t\t\tAnd(\"give|pay|offer|feed {{something else}} to {{something}}\").\n\t\t\tAs(\"give it to\"),\n\t)\n\n\t// MARS: move all tests to a sub-directory.\n\tpkg.AddTest(\"Giving\",\n\t\ttest.Setup(\n\t\t\tThe(\"actor\", Called(\"the player\"), Exists()),\n\t\t\tThe(\"actor\", Called(\"the firefighter\"), Exists()),\n\t\t\tThe(\"prop\", Called(\"the cat\"), Exists()),\n\t\t).Try(\"giving when not having\",\n\t\t\ttest.Parse(\"give the cat to the firefighter\").\n\t\t\t\tMatch(\"You aren't holding the cat.\"),\n\t\t),\n\t\ttest.Setup(\n\t\t\tThe(\"actor\", Called(\"the player\"), Exists()),\n\t\t\tThe(\"actor\", Called(\"the firefighter\"), Exists()),\n\t\t\tThe(\"prop\", Called(\"the cat\"), Exists()),\n\t\t\tThe(\"prop\", Called(\"the hat\"), Exists()),\n\t\t\tThe(\"player\", Possesses(\"the cat\")),\n\t\t\tThe(\"player\", Wears(\"the hat\")),\n\t\t).Try(\"giving while having failures\",\n\t\t\ttest.Parse(\"give the cat to the player\").\n\t\t\t\tMatch(\"You can't give to yourself.\"),\n\t\t\ttest.Parse(\"give the hat to the firefighter\").\n\t\t\t\tMatch(\"You can't give worn clothing.\"),\n\t\t\ttest.Parse(\"give the cat to the firefighter\").\n\t\t\t\tMatch(\"The firefighter is unimpressed.\"),\n\t\t),\n\t)\n}",
"func RandomGoodBetMessage(message string) string {\n\trand.Seed(time.Now().UnixNano())\n\tstr := []string{\"Близко к правде!\", message + \"! Уже слышу как Соловьёв кричит этот счёт после финального свистка.\",\n\t\tmessage + \"!:ok:\", \"Были бы у меня ноги, поставил бы также.\", \"Принято.\", \"Мой любимый счет!\",\n\t\t\"Твоя жизнь - твои прогнозы.\", \"Как-то слабо верится, но принято!\", \"Популярный счет.\", \"Записал!\", \"В лучших традициях!\"}\n\tstr2 := str[rand.Intn(len(str))]\n\treturn str2\n}",
"func DoWear(pp *PlayerChar, verb string, dobj thing.Thing,\n prep string, iobj thing.Thing, text string) {\n \n if dobj == nil {\n pp.QWrite(\"Wear what?\")\n return\n }\n \n bod := pp.Body()\n if !bod.IsHolding(dobj) {\n pp.QWrite(\"You are not holding %s.\", dobj.Normal(name.DEF_ART))\n return\n }\n \n if wt, ok := dobj.(thing.Wearable); ok {\n slot := wt.Slot()\n var slots_worn byte = 0\n var can_wear byte\n can_wear, _ = bod.WornSlots(slot)\n var already_worn = make([]string, 0, 0)\n for _, t := range pp.Inventory.Things {\n if wit, wok := t.(thing.Wearable); wok {\n if !bod.IsHolding(t) {\n if wit.Slot() == slot {\n slots_worn++\n already_worn = append(already_worn, t.Normal(0))\n }\n }\n }\n }\n \n if slots_worn < can_wear {\n if rh, _ := bod.HeldIn(\"right_hand\"); rh == dobj {\n bod.SetHeld(\"right_hand\", nil)\n } else {\n bod.SetHeld(\"left_hand\", nil)\n }\n f1p := map[string]interface{} { \"subj\": \"You\",\n \"verb\": \"put\",\n \"pp\": \"your\",\n \"dobj\": dobj.Normal(0), }\n f3p := map[string]interface{} { \"subj\": util.Cap(pp.Normal(0)),\n \"verb\": \"puts\",\n \"pp\": pp.PossPronoun(),\n \"dobj\": f1p[\"dobj\"], }\n var templ string\n slotName := bod.WornSlotName(slot)\n if slotName == \"\" {\n templ = \"{subj} {verb} on {dobj}.\"\n } else {\n templ = fmt.Sprintf(\"{subj} {verb} {dobj} %s.\", slotName)\n }\n \n m := msg.New(\"txt\", gstring.Sprintm(templ, f3p))\n m.Add(pp, \"txt\", gstring.Sprintm(templ, f1p))\n pp.where.Place.(*room.Room).Deliver(m)\n } else {\n f1p := map[string]interface{} { \"pp\": \"your\" }\n slot_str := gstring.Sprintm(bod.WornSlotName(slot), f1p)\n pp.QWrite(\"You are already wearing %s %s.\", util.EnglishList(already_worn), slot_str)\n }\n } else {\n pp.QWrite(\"You cannot wear %s.\", dobj.Normal(0))\n }\n}",
"func (g GumbelRight) Entropy() float64 {\n\treturn math.Log(g.Beta) + eulerMascheroni + 1\n}",
"func RandomFood() rune {\n\temoji := []rune{\n\t\t'R', // Favourite dish, extra points!!!\n\t\t'👿',\n\t\t'🍍',\n\t\t'🍑',\n\t\t'🍇',\n\t\t'🍏',\n\t\t'🍌',\n\t\t'🍫',\n\t\t'🍭',\n\t\t'🍕',\n\t\t'🍩',\n\t\t'🍗',\n\t\t'🍖',\n\t\t'🍬',\n\t\t'🍤',\n\t\t'🍪',\n\t\t'S', // You do not want to eat the skull\n\t}\n\n\trand.Seed(time.Now().UnixNano())\n\n\treturn emoji[rand.Intn(len(emoji))]\n}",
"func CalculateProbability(s string, caseSensitive bool) int64 {\n\tvar nonAlphaProbability, alphaProbability int64\n\talphaProbability = 26 + 10 + 2\n\tnonAlphaProbability = 26 + 26 + 10 + 2\n\tif caseSensitive {\n\t\talphaProbability = nonAlphaProbability\n\t}\n\tascii := \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n\tvar p int64\n\tp = 1\n\n\tfor _, char := range s {\n\t\tif !strings.Contains(ascii, string(char)) {\n\t\t\tp = p * nonAlphaProbability\n\t\t} else {\n\t\t\tp = p * alphaProbability\n\t\t}\n\t}\n\n\treturn p\n}",
"func randomLetter() string {\n\treturn string('A' + rune(rand.Intn(26)))\n}",
"func damageResolve (team teamL/*[]lookup*/, teamD []teamDamage, dmg []float64, msg *orbs) (res []teamDamage) {\n\t//NOTE, dmg = multiplier for that colour.\n\tvar comboMulti float64\n\tvar comboCount float64\n\t\n\tcomboMulti, comboCount=0,0\n\n\ttpacount := []float64{0,0,0,0,0}\n\n\t//two prong, ugh\n\tfor x, y := range msg.Orbs {\n\t\tfor _, z := range y {\n\t\t\tif x != 5 && z[0] == 4 { //heal orbs can't tpa silly\n\t\t\t\ttpacount[x]++\n\t\t\t\tcomboCount ++\n\t\t\t} else {\n\t\t\t\tif z[0] > 2 {\n\t\t\t\t\tcomboCount ++\n\t\t\t\t\tfmt.Println(z)\n\t\t\t\t\tdmg[x] += 1 + (( z[0] -3) * 0.25) + (0.06 * z[1])\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif comboCount >= 1 {\n\t\tcomboMulti = 1 + (( comboCount - 1)*0.25)\n\t} else {\n\t\tcomboMulti = 0\n\t}\n\tfmt.Println(\"Combo Multiplier:\", comboMulti)\n\tfmt.Println(\"TPA counter:\", tpacount)\n\t//factor in leaderskill last\n\tvar subMulti float64\n\t//var lead, friend bool\n\tsubMulti = 0\n\tvar numTpAwk float64\n\t\n\tfor x,_ := range teamD {\n\t\t//Main attribute\n\t\t//lead,friend = false, false\n\t\tnumTpAwk = 0\n\t\tteamD[x].Damage[0].Element = team.Team[x].Element\n\t\tfor _, awk := range team.Team[x].Awakenings {\n\t\t\tif awk == twoProng {\n\t\t\t\tnumTpAwk ++\n\t\t\t}\n\t\t}\n\t\tif team.Team[x].Element != nil {\n\t\t\t//(# of 4 matches * (1.25 * 1.5^#num tpa awakenings) + multiplier without all the tpa's) * atk * combo multiplier\n\t\t\t//Get TPA out of the way\n\t\t\tteamD[x].Damage[0].Value = 0\n\t\t\tfor _, y := range msg.Orbs[*team.Team[x].Element] {\n\t\t\t\t//Find the TPA matches\n\t\t\t\tif y[0] == 4 && numTpAwk > 0 {\n\t\t\t\t\tteamD[x].Damage[0].Value += (1.25 * math.Pow(1.5, numTpAwk) + (0.06 * y[1]))\n\t\t\t\t} else if y[0] == 4 && numTpAwk == 0 {\n\t\t\t\t\tteamD[x].Damage[0].Value += (1 + ((y[0] - 3) * 0.25 )) + (0.06 * y[1])\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tteamD[x].Damage[0].Value += dmg[*team.Team[x].Element] // remember, dmg is the multiplier\n\t\t\tteamD[x].Damage[0].Value *= float64(team.Team[x].Stats.ATK) * comboMulti\n\t\t}\n\t\t//Sub attribute\n\t\tif team.Team[x].Element2 != nil{\n\t\t\tteamD[x].Damage[1].Element = team.Team[x].Element2\n\t\t} else {\n\t\t\tteamD[x].Damage[1].Element = nil\n\t\t}\n\n\t\t//Multiplier for sub element, 0.1 for same element, 0.3 if they differ.\n\t\tif team.Team[x].Element2 != nil{\n\t\t\tif *team.Team[x].Element == *team.Team[x].Element2 {\n\t\t\t\tsubMulti = 0.10\n\t\t\t} else { subMulti = 0.30 }\n\n\t\t\tteamD[x].Damage[1].Value = 0\n\t\t\tfor _, y := range msg.Orbs[*team.Team[x].Element2] {\n\t\t\t\t//Find the TPA matches\n\t\t\t\tif y[0] == 4 && numTpAwk > 0 {\n\t\t\t\t\tteamD[x].Damage[1].Value += (1.25 * math.Pow(1.5, numTpAwk) + (0.06 * y[1]))\n\t\t\t\t} else if y[0] == 4 && numTpAwk == 0 {\n\t\t\t\t\tteamD[x].Damage[1].Value += (1 + ((y[0] - 3) * 0.25 )) + (0.06 * y[1])\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tteamD[x].Damage[1].Value += dmg[*team.Team[x].Element2]\n\t\t\tteamD[x].Damage[1].Value *= ( float64(team.Team[x].Stats.ATK) * subMulti ) * comboMulti\n\n\t\t}\n\t\t//Heal\n\t\ttemp := 5\n\t\tteamD[x].Damage[2].Element = &temp\n\t\tteamD[x].Damage[2].Value = dmg[5] * float64(team.Team[x].Stats.RCV) * comboMulti\n\t\t//teamD\n\t\t//team[x]\n\n\t\t//Leader Skill\n\t\t//Re-implementing leaderskill\n\t\tswitch msg.LeaderSkill.Condition[0].(string) {\n\t\tcase \"type\":\n\t\t\tif msg.LeaderSkill.Condition[1].(float64) == float64(team.Team[x].Type) || msg.LeaderSkill.Condition[1].(float64) == float64(team.Team[x].Type2) {\n\t\t\t\tteamD[x].Damage[0].Value *= msg.LeaderSkill.ATK\n\t\t\t\tteamD[x].Damage[1].Value *= msg.LeaderSkill.ATK\n\t\t\t\tteamD[x].Damage[2].Value *= msg.LeaderSkill.RCV\n\t\t\t}\n\t\t\t\n\t\tcase \"elem\":\n\t\t\tif teamD[x].Damage[0].Element != nil{\n\t\t\t\tif msg.LeaderSkill.Condition[1].(float64) == float64(*teamD[x].Damage[0].Element) {\n\t\t\t\t\tteamD[x].Damage[0].Value *= msg.LeaderSkill.ATK\n\t\t\t\t}\n\t\t\t}\n\t\t\tif teamD[x].Damage[1].Element != nil{\n\t\t\t\tif msg.LeaderSkill.Condition[1].(float64) == float64(*teamD[x].Damage[1].Element) {\n\t\t\t\t\tteamD[x].Damage[1].Value *= msg.LeaderSkill.ATK\n\t\t\t\t}\n\t\t\t}\n\t\tcase \"all\", \"default\":\n\t\t\tteamD[x].Damage[0].Value *= msg.LeaderSkill.ATK\n\t\t\tteamD[x].Damage[1].Value *= msg.LeaderSkill.ATK\n\t\t\tteamD[x].Damage[2].Value *= msg.LeaderSkill.RCV\t\t\t\n\t\t}\n\n\t\t\t\t//Friend leader skill!\n\t\tswitch msg.FLeaderSkill.Condition[0].(string) {\n\t\tcase \"type\":\n\t\t\tif msg.FLeaderSkill.Condition[1].(float64) == float64(team.Team[x].Type) || msg.FLeaderSkill.Condition[1].(float64) == float64(team.Team[x].Type2) {\n\t\t\t\tteamD[x].Damage[0].Value *= msg.FLeaderSkill.ATK\n\t\t\t\tteamD[x].Damage[1].Value *= msg.FLeaderSkill.ATK\n\t\t\t\tteamD[x].Damage[2].Value *= msg.FLeaderSkill.RCV\n\t\t\t}\n\t\tcase \"elem\":\n\t\t\tif teamD[x].Damage[0].Element != nil{\n\t\t\t\tif msg.FLeaderSkill.Condition[1].(float64) == float64(*teamD[x].Damage[0].Element) {\n\t\t\t\t\tteamD[x].Damage[0].Value *= msg.FLeaderSkill.ATK\n\t\t\t\t}\n\t\t\t}\n\t\t\tif teamD[x].Damage[1].Element != nil{\n\t\t\t\tif msg.FLeaderSkill.Condition[1].(float64) == float64(*teamD[x].Damage[1].Element) {\n\t\t\t\t\tteamD[x].Damage[1].Value *= msg.FLeaderSkill.ATK\n\t\t\t\t}\n\t\t\t}\n\t\tcase \"all\", \"default\":\n\t\t\tteamD[x].Damage[0].Value *= msg.FLeaderSkill.ATK\n\t\t\tteamD[x].Damage[1].Value *= msg.FLeaderSkill.ATK\n\t\t\tteamD[x].Damage[2].Value *= msg.FLeaderSkill.RCV\t\t\t\n\t\t}\n\t\t\n\t}\n\n\t//Row Multipliers\n\t//( 1 + ( 0.1 * n * r)) n = # rows, r = num awakenings\n\tfor x, _ := range teamD {\n\t\t//for each in teamD, figure out how much the row multiplier affects., #rows = msg.Rows[element]\n\t\t//msg.Rows[teamD[x].Damage[0].Element] for main att // 1 for sub att\n\t\tif teamD[x].Damage[0].Element != nil{\n\t\t\tteamD[x].Damage[0].Value *= (1 + (0.1 * float64(msg.Rows[*teamD[x].Damage[0].Element]) * float64(team.Rows[*teamD[x].Damage[0].Element])))}\n\t\tif teamD[x].Damage[1].Element != nil{\n\t\t\tteamD[x].Damage[1].Value *= (1 + (0.1 * float64(msg.Rows[*teamD[x].Damage[1].Element]) * float64(team.Rows[*teamD[x].Damage[0].Element])))\n\t\t}\n\t} //test\n\n\t//Active skill multiplier. (Strict multiplier.. if no active skill, put [ \"type/elem\", 1, 1 ]\n\tfor x, _ := range teamD {\n\t\tswitch msg.Active[0].(string) {\n\t\tcase \"type\":\n\t\t\tif team.Team[x].Type == int(msg.Active[1].(float64)) || team.Team[x].Type2 == int(msg.Active[1].(float64)) {\n\t\t\t\tteamD[x].Damage[0].Value *= float64(msg.Active[2].(float64))\n\t\t\t\tteamD[x].Damage[1].Value *= float64(msg.Active[2].(float64))\n\t\t\t}\n\t\tcase \"elem\":\n\t\t\tif teamD[x].Damage[0].Element != nil{\n\t\t\t\tif *teamD[x].Damage[0].Element == int(msg.Active[1].(float64)){\n\t\t\t\t\tteamD[x].Damage[0].Value *= float64(msg.Active[2].(float64))\n\t\t\t\t}\n\t\t\t}\n\t\t\tif teamD[x].Damage[1].Element != nil{\n\t\t\t\tif *teamD[x].Damage[1].Element == int(msg.Active[1].(float64)){\n\t\t\t\t\tteamD[x].Damage[1].Value *= float64(msg.Active[2].(float64))\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tres = teamD\n\treturn\n}",
"func getSecretPhrase() (secret string) {\n\tsecret = \"LORAXSNUGGLEGEORGEICE\"\n\treturn\n}",
"func (p *Player) Chest(c Chest, l *Log, item *Item, death *Death) {\n t := time.Now()\n roll := RandomNumber(36)\n name := p.Name\n if roll > 0 && roll <= 6 {\n gold := roll * 10\n goldText := fmt.Sprintf(\"%s found %d gold.\", p.Name, gold)\n l.AddAction(t.Format(\"3:04:05 \")+goldText)\n p.Gold = p.Gold + gold\n } else if roll > 6 && roll < 13 {\n p.Health = p.Health - roll\n loseHealth := fmt.Sprintf(\"It contained a trap! %s lost %d HP.\", p.Name, roll)\n l.AddAction(t.Format(\"3:04:05 \")+loseHealth)\n if p.Health <= 0 {\n death.Death()\n }\n } else if roll >= 13 && roll < 20 {\n if p.Health + roll >= p.MaxHealth {\n p.Health = p.MaxHealth\n } else {\n p.Health = p.Health + roll\n }\n gainHealth := fmt.Sprintf(\"%s found a healing potion and restored %d HP.\", p.Name, roll)\n l.AddAction(t.Format(\"3:04:05 \")+gainHealth)\n } else if roll >= 20 && roll <= 31 {\n loot := RandomItem(p, item)\n if loot == \"none\" {\n heavyString := fmt.Sprintf(\"%s found an item inside a chest. But already at max weight limit so %s leaves it behind.\", p.Name, p.Name)\n l.AddAction(t.Format(\"3:04:05 \")+heavyString)\n } else {\n itemString := fmt.Sprintf(\"%s found loot [%s] inside a chest.\", p.Name, loot)\n l.AddAction(t.Format(\"3:04:05 \")+itemString)\n }\n } else if roll > 32 {\n c.Empty = true;\n l.AddAction(t.Format(\"3:04:05 \")+name+\" found just a dusty old chest :( \")\n }\n if roll == 32 && p.Key == false {\n p.Key = true\n l.AddAction(t.Format(\"3:04:05 \")+name+\" found a giant key! Wonder what it is for...\")\n }\n}",
"func RandHeteroglyphs(n int) string {\n\tvar letters = []rune(\"abcdefghkmnpqrstwxyz\")\n\tlenLetters := len(letters)\n\tb := make([]rune, n)\n\tfor i := range b {\n\t\tb[i] = letters[rand.Intn(lenLetters)]\n\t}\n\treturn string(b)\n}",
"func (c *Creature) DetermineDamageBonus() *Attribute {\n\n\tdamageBonus := &Attribute{\n\t\tName: \"Damage Bonus\",\n\t\tMax: 21,\n\t\tDice: 1,\n\t}\n\n\tstr := c.Statistics[\"STR\"]\n\tsiz := c.Statistics[\"SIZ\"]\n\n\tstr.UpdateStatistic()\n\tsiz.UpdateStatistic()\n\n\tdb := siz.Total + str.Total\n\n\tswitch {\n\tcase db < 13:\n\t\tdamageBonus.Base = -4\n\t\tdamageBonus.Text = \"-1D4\"\n\tcase db < 25:\n\t\tdamageBonus.Base = 0\n\t\tdamageBonus.Text = \"-\"\n\tcase db < 33:\n\t\tdamageBonus.Base = 4\n\t\tdamageBonus.Text = \"+1D4\"\n\tcase db < 41:\n\t\tdamageBonus.Base = 6\n\t\tdamageBonus.Text = \"+1D6\"\n\tcase db < 57:\n\t\tdamageBonus.Base = 6\n\t\tdamageBonus.Dice = 2\n\t\tdamageBonus.Text = \"+2D6\"\n\tcase db > 56:\n\t\tdamageBonus.Base = 6\n\t\tdamageBonus.Dice = ((db - 56) / 16) + 2\n\t\tdamageBonus.Text = fmt.Sprintf(\"+%dD%d\",\n\t\t\tdamageBonus.Dice,\n\t\t\tdamageBonus.Base,\n\t\t)\n\t}\n\n\treturn damageBonus\n}",
"func philosopherPonderanceGoroutine(id int) {\n\tfor {\n\t\tif rand.Float64() < PHILSWITCHCHANCE {\n\t\t\tphilIn[id] <- 2\n\t\t\tisEating := <- philOut[id] == 1\n\t\t\t// Switch: Thinking <-> Eating.\n\t\t\tif isEating {\n\t\t\t\t// Drop forks and return to positing on the nature of the universe.\n\t\t\t\tphilIn[id] <- 1\n\t\t\t} else {\n\t\t\t\t// Attempt to begin eating. Return to postulating, if missing fork.\n\t\t\t\tphilIn[id] <- 0\n\t\t\t}\n\t\t\t<- philOut[id]\n\t\t}\n\t}\n}",
"func Int() int { return globalRand.Int() }",
"func Int() int { return globalRand.Int() }",
"func Int() int { return globalRand.Int() }",
"func Gender() string { return gender(globalFaker.Rand) }",
"func PronounReflective() string { return pronounReflective(globalFaker.Rand) }",
"func Bonus(e *Employee, percent int) int {\n\treturn e.Salary * percent / 100\n}",
"func PlayerDeadPercent(player Player) int {\n\tif playerIsDead(player) {\n\t\treturn 100\n\t}\n\n\t// Figure out percentage from attacks\n\tattacksOnMe := make([]observation, 0, len(observations))\n\ttotalUniverses := 0\n\ttotalDeaths := 0\n\n\tfor _, o := range observations {\n\t\ttarget, _ := o.getTarget()\n\t\tif !o.getPending() && o.getType() == \"AttackObservation\" && target == player.Num {\n\t\t\tattacksOnMe = append(attacksOnMe, o)\n\t\t}\n\t}\n\n\tfor _, v := range Multiverse.Universes {\n\t\ttotalUniverses++\n\t\tfor _, o := range attacksOnMe {\n\t\t\tif AttackTarget(v, o.getSubject(), player.Num) {\n\t\t\t\ttotalDeaths++\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\n\treturnPercent := (totalDeaths * 100) / totalUniverses\n\tif returnPercent >= 100 && totalDeaths < totalUniverses {\n\t\treturnPercent = 99\n\t}\n\tif returnPercent <= 0 && totalDeaths > 0 {\n\t\treturnPercent = 1\n\t}\n\treturn returnPercent\n}",
"func losemspells(x int) {\n\tc[SPELLMAX] -= x\n\tif c[SPELLMAX] < 0 {\n\t\tc[SPELLMAX] = 0\n\t}\n\tc[SPELLS] -= x\n\tif c[SPELLS] < 0 {\n\t\tc[SPELLS] = 0\n\t}\n}",
"func (d *Dungeon) WorldGenerator(p *Player){\n for i := 0; i < 25; i++ {\n roll := RandomNumber(100)\n if roll <= 20 {\n d.Room[i].Empty = false\n d.Room[i].Chest = true\n } else if roll >= 21 && roll <= 60 {\n d.Room[i].Empty = false\n d.Room[i].Enemy = true\n } else {\n d.Room[i].Empty = true\n }\n }\n//ExitRoom places dungeon exit in a random room\n//If/else statement to prevent exit from being placed in player spawn room\n ExitRoom := RandomNumber(24)\n if d.Room[ExitRoom].X != p.X && d.Room[ExitRoom].Y != p.Y {\n d.Room[ExitRoom].Empty = true\n d.Room[ExitRoom].Exit = true\n } else {\n ExitRoom = RandomNumber(24)\n if d.Room[ExitRoom].X != p.X && d.Room[ExitRoom].Y != p.Y {\n d.Room[ExitRoom].Chest = false\n d.Room[ExitRoom].Empty = true\n d.Room[ExitRoom].Exit = true\n }\n }\n HealRoom := RandomNumber(24)\n if d.Room[HealRoom].X != p.X && d.Room[HealRoom].Y != p.Y && d.Room[HealRoom] != d.Room[ExitRoom] {\n d.Room[HealRoom].Empty = true\n d.Room[HealRoom].Heal = true\n } else {\n HealRoom = RandomNumber(24)\n d.Room[HealRoom].Empty = true\n d.Room[HealRoom].Heal = true\n }\n// this exit location and heal location print is only used for testing purposes to locate exit spawn \n// fmt.Printf(\"\\nHeal located at [%d][%d]\",d.Room[HealRoom].X, d.Room[HealRoom].Y)\n// time.Sleep(1 * time.Second)\n// fmt.Printf(\"\\nExit located at [%d][%d]\",d.Room[ExitRoom].X, d.Room[ExitRoom].Y)\n// time.Sleep(1 * time.Second)\n//loop empties player spawn room\n for i := 0; i < 25; i++ {\n if d.Room[i].X == p.X && d.Room[i].Y == p.Y {\n d.Room[i].Empty = true\n }\n }\n}",
"func (s *skillResult) calculateAttackBuff(effects []effect) {\n\ttotalBuff := 1.0\n\tfor i := range effects {\n\t\tif len(effects[i].OffensiveBuffs) > DamageDealt {\n\t\t\tbuff := 100.0 + float64(effects[i].OffensiveBuffs[DamageDealt])\n\t\t\tbuff = buff / 100\n\t\t\ttotalBuff = totalBuff * buff\n\t\t}\n\t}\n\n\ts.DamageBuff = totalBuff\n}",
"func (b *Brain) RandomAction() int {\r\n\tif b.RandomActionDistribution == nil {\r\n\t\treturn b.Rand.Intn(b.NumActions)\r\n\t}\r\n\r\n\t// okay, lets do some fancier sampling:\r\n\tp := b.Rand.Float64()\r\n\tcumprob := 0.0\r\n\r\n\tfor k := 0; k < b.NumActions; k++ {\r\n\t\tcumprob += b.RandomActionDistribution[k]\r\n\r\n\t\tif p < cumprob {\r\n\t\t\treturn k\r\n\t\t}\r\n\t}\r\n\r\n\t// rounding error\r\n\treturn b.NumActions - 1\r\n}",
"func (b BaseDefender) GetShieldPower(researches Researches) int64 {\n\treturn int64(float64(b.ShieldPower) * (1 + float64(researches.ShieldingTechnology)*0.1))\n}",
"func (list *SkipList) randLevel() int {\n\tvar level int = 1\n\tfor ((rand.Int63()>>32)&0xffff < DefaultProbability) && (level < list.maxLevel) {\n\t\tlevel++\n\t}\n\treturn level\n}",
"func (e *Enemy)EnemyHud() {\n fmt.Printf(\"%s ♥: %d Attack: %d Armor: %d \\n\\n\", e.Name, e.Health, e.Attack, e.Armor)\n}"
] | [
"0.63353246",
"0.60036635",
"0.5936208",
"0.5760439",
"0.572633",
"0.5621206",
"0.56166786",
"0.5572953",
"0.5570117",
"0.54906464",
"0.542936",
"0.5409767",
"0.5389871",
"0.538468",
"0.5332171",
"0.53146607",
"0.5296809",
"0.5295871",
"0.5279863",
"0.5268397",
"0.5248359",
"0.52475053",
"0.51882607",
"0.51777565",
"0.5160935",
"0.51345956",
"0.51310074",
"0.5123602",
"0.51074517",
"0.50706905",
"0.50548756",
"0.5015766",
"0.500734",
"0.50055057",
"0.50055057",
"0.49914154",
"0.4982932",
"0.4979149",
"0.4930312",
"0.49127224",
"0.4897439",
"0.48820177",
"0.48747018",
"0.48728657",
"0.48712072",
"0.48662287",
"0.48633316",
"0.4857651",
"0.4854639",
"0.4841419",
"0.48413047",
"0.48399734",
"0.48198926",
"0.47992536",
"0.4798728",
"0.47968704",
"0.478914",
"0.47871304",
"0.47717384",
"0.47562686",
"0.47272012",
"0.47174275",
"0.47157693",
"0.47039282",
"0.47021458",
"0.4692815",
"0.46912724",
"0.46869713",
"0.46855763",
"0.46845478",
"0.4679084",
"0.46715292",
"0.46685034",
"0.46681497",
"0.46651796",
"0.46622956",
"0.4649331",
"0.46453226",
"0.46443155",
"0.46407428",
"0.46390003",
"0.46374127",
"0.46341756",
"0.46281424",
"0.46249092",
"0.46206954",
"0.4615976",
"0.4615976",
"0.4615976",
"0.46126047",
"0.46102425",
"0.4598688",
"0.45982924",
"0.45835286",
"0.4578327",
"0.45770904",
"0.45767128",
"0.4574939",
"0.45729482",
"0.4570943"
] | 0.58057666 | 3 |
Gets the defender chance after accounting for modifiers | func (c *Conflict) GetModDefenderChance() int32 {
// TODO: Return modifier defender chance
return c.BaseChance()
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func getDamage(attacker *pet, defender pet) (float64) {\n\tif doesCrit(attacker) {\n\t\treturn 2*(attacker.EffectiveATK * (100.00/(defender.EffectiveDEF + 100.00)))\n\t}\n\t\n\treturn attacker.EffectiveATK * (100.00/(defender.EffectiveDEF + 100.00))\n}",
"func (c *Conflict) GetModAttackerChance() int32 {\n\t// TODO: Return modified attacker chance\n\treturn c.BaseChance()\n}",
"func (g Gun)calculateDamage(bodyPart BodyArmor,attacker *Character, defender *Character)float64{\n\tcalibreIdx := getCalibreIdxFromCalibre(g.Calibre)\n\tdmgModifier := BODY_DAMAGE_MODIFIERS[bodyPart]\n\tdistance := calculateDistance(attacker.Location,defender.Location,attacker.isIndoor())\n\tdistanceModifier := math.Pow(CALIBRE_DAMAGE_FALLOFF[calibreIdx],distance/BULLET_DROPOFF_DISTANCE)\n\tbaseDamage := g.MaxDamage\n\tarmorDurability := defender.Armor[bodyPart].Durability/100\n\tbulletproofModifier := (100-defender.Armor[bodyPart].Bulletproof*(armorDurability))/100\n\tbulletAppropriateModifier := math.Abs(g.LoadedMagazine.ArmorPiercing-bulletproofModifier*100)/100\n\tbulletproofModifier = bulletproofModifier+bulletAppropriateModifier*BULLETPROOF_APPROPRIATE_MODIFIER\n\tdamage := baseDamage*dmgModifier*distanceModifier*bulletproofModifier\n\tdamage = pickRandomVariedAround(damage,DAMAGE_RANDOMNESS)\n\tif damage<0{\n\t\tdamage = 0\n\t}\n\tif LOG_MODE>=1{\n\t\tfmt.Printf(\"%s did %f damage to %s\\n\",attacker.Name,damage,defender.Name)\n\t}\n\tif LOG_MODE==DEBUG{\n\t\tfmt.Printf(\"Body part damage modifier: %f\\n\",dmgModifier)\n\t\tfmt.Printf(\"Distance: %f\\n\",distance)\n\t\tfmt.Printf(\"Distance modifier: %f\\n\",distanceModifier)\n\t\tfmt.Printf(\"Base damage: %f\\n\",baseDamage)\n\t\tfmt.Printf(\"Armor durability: %f\\n\",armorDurability)\n\t\tfmt.Printf(\"Bulletproof modifier: %f\\n\",bulletproofModifier)\n\t\tfmt.Printf(\"Bullet appropriate modifier: %f\\n\",bulletAppropriateModifier)\n\t}\n\n\treturn damage\n}",
"func (m *Monster) BaseDamage() int {\n\tswitch m.id {\n\tcase Bat: // bats deal a base of 1 always\n\t\treturn 1\n\tdefault:\n\t\td := m.Info.Dmg\n\t\tif d < 1 {\n\t\t\td++\n\t\t} else {\n\t\t\td += rand.Intn(d)\n\t\t}\n\t\td += m.Info.Lvl\n\t\treturn d\n\t}\n}",
"func chance(line *base.Line) {\n\tstr := line.Args[1]\n\tvar chance float64\n\n\tif strings.HasSuffix(str, \"%\") {\n\t\t// Handle 'chance of that is \\d+%'\n\t\tif i, err := strconv.Atoi(str[:len(str)-1]); err != nil {\n\t\t\tbot.ReplyN(line, \"'%s' didn't look like a % chance to me.\", str)\n\t\t\treturn\n\t\t} else {\n\t\t\tchance = float64(i) / 100\n\t\t}\n\t} else {\n\t\t// Assume the chance is a floating point number.\n\t\tif c, err := strconv.ParseFloat(str, 64); err != nil {\n\t\t\tbot.ReplyN(line, \"'%s' didn't look like a chance to me.\", str)\n\t\t\treturn\n\t\t} else {\n\t\t\tchance = c\n\t\t}\n\t}\n\n\t// Make sure the chance we've parsed lies in (0.0,1.0]\n\tif chance > 1.0 || chance <= 0.0 {\n\t\tbot.ReplyN(line, \"'%s' was outside possible chance ranges.\", str)\n\t\treturn\n\t}\n\n\t// Retrieve last seen ObjectId, replace with \"\"\n\tls := LastSeen(line.Args[0], \"\")\n\t// ok, we're good to update the chance.\n\tif fact := fc.GetById(ls); fact != nil {\n\t\t// Store the old chance, update with the new\n\t\told := fact.Chance\n\t\tfact.Chance = chance\n\t\t// Update the Modified field\n\t\tfact.Modify(line.Storable())\n\t\t// And store the new factoid data\n\t\tif err := fc.Update(bson.M{\"_id\": ls}, fact); err == nil {\n\t\t\tbot.ReplyN(line, \"'%s' was at %.0f%% chance, now is at %.0f%%.\",\n\t\t\t\tfact.Key, old*100, chance*100)\n\t\t} else {\n\t\t\tbot.ReplyN(line, \"I failed to replace '%s': %s\", fact.Key, err)\n\t\t}\n\t} else {\n\t\tbot.ReplyN(line, \"Whatever that was, I've already forgotten it.\")\n\t}\n}",
"func (d *Decoder) Probability() int32 {\n\treturn pocketsphinx.GetProb(d.dec)\n}",
"func (d *Degradater) RandomPercent() int {\n\treturn defaultSafeRander.Intn(101)\n}",
"func doesHit(attacker *pet, defender pet) (bool){\n\tchanceToHit := float64(attacker.EffectiveACC) - defender.EffectiveEVA\n\t\n\tif float64(rand.Intn(100)) < chanceToHit {\n\t\treturn true\n\t}\n\t\n\t//fmt.Println(attacker.PetUser.Username, \" miss!\")\n\tattacker.MissCount++\n\treturn false\n}",
"func GetProbability(l uint) float64 {\n\treturn 1.0 / math.Pow(2.0, float64(l))\n}",
"func (c *Cell) ApplyChance(total uint64) {\n\tif total < c.count {\n\t\treturn\n\t}\n\tc.chance = (float64(c.count) / float64(total)) * 100.0\n}",
"func (c *Creature) DetermineHealingRate() *Attribute {\n\n\thealingRate := &Attribute{\n\t\tName: \"Healing Rate\",\n\t\tMax: 21,\n\t}\n\n\tcon := c.Statistics[\"CON\"]\n\n\tcon.UpdateStatistic()\n\ttCon := con.Total\n\n\tswitch {\n\tcase tCon < 7:\n\t\thealingRate.Base = 1\n\tcase tCon < 13:\n\t\thealingRate.Base = 2\n\tcase tCon < 19:\n\t\thealingRate.Base = 3\n\tcase tCon > 18:\n\t\thealingRate.Base = ((tCon - 18) / 6) + 3\n\t}\n\thealingRate.Total = healingRate.Base + healingRate.Value\n\treturn healingRate\n}",
"func (e *Explosion) MaxDamage() float64 {\n\treturn 100 + e.Strength*10\n}",
"func (e Enemy) Damage() int {\n\treturn 10 * e.Level\n}",
"func (c *Chance) Gender() string {\n\tif c.Bool() {\n\t\treturn \"male\"\n\t} else {\n\t\treturn \"female\"\n\t}\n}",
"func boostChance(data *BoostCheckData) (int, []string) {\n\treasons := make([]string, 0)\n\tchance := 0\n\n\t// get region with highest winrate\n\thighestWr := 0\n\tvar highestWrRegion string\n\tfor region, wr := range data.Winrates {\n\t\tif wr > highestWr {\n\t\t\thighestWr = wr\n\t\t\thighestWrRegion = region\n\t\t}\n\t}\n\n\twrOnMostPlayedServer := data.Winrates[data.MostPlayedServer]\n\twrDiff := float64(highestWr - wrOnMostPlayedServer)\n\tchance = int(math.Min(wrDiff*3, 99.0))\n\twrDifference := \"Winrate on most played region (\" + data.MostPlayedServer + \", \" + strconv.Itoa(wrOnMostPlayedServer) + \"%) was \" + strconv.FormatFloat(wrDiff, 'f', 1, 64) + \" less than the highest winrate (\" + highestWrRegion + \", \" + strconv.Itoa(highestWr) + \"%)\"\n\treasons = append(reasons, wrDifference)\n\n\treturn chance, reasons\n}",
"func (c *Creature) DetermineDamageBonus() *Attribute {\n\n\tdamageBonus := &Attribute{\n\t\tName: \"Damage Bonus\",\n\t\tMax: 21,\n\t\tDice: 1,\n\t}\n\n\tstr := c.Statistics[\"STR\"]\n\tsiz := c.Statistics[\"SIZ\"]\n\n\tstr.UpdateStatistic()\n\tsiz.UpdateStatistic()\n\n\tdb := siz.Total + str.Total\n\n\tswitch {\n\tcase db < 13:\n\t\tdamageBonus.Base = -4\n\t\tdamageBonus.Text = \"-1D4\"\n\tcase db < 25:\n\t\tdamageBonus.Base = 0\n\t\tdamageBonus.Text = \"-\"\n\tcase db < 33:\n\t\tdamageBonus.Base = 4\n\t\tdamageBonus.Text = \"+1D4\"\n\tcase db < 41:\n\t\tdamageBonus.Base = 6\n\t\tdamageBonus.Text = \"+1D6\"\n\tcase db < 57:\n\t\tdamageBonus.Base = 6\n\t\tdamageBonus.Dice = 2\n\t\tdamageBonus.Text = \"+2D6\"\n\tcase db > 56:\n\t\tdamageBonus.Base = 6\n\t\tdamageBonus.Dice = ((db - 56) / 16) + 2\n\t\tdamageBonus.Text = fmt.Sprintf(\"+%dD%d\",\n\t\t\tdamageBonus.Dice,\n\t\t\tdamageBonus.Base,\n\t\t)\n\t}\n\n\treturn damageBonus\n}",
"func (f *Faker) Gender() string { return gender(f.Rand) }",
"func Gender() string { return gender(globalFaker.Rand) }",
"func (g *AllergyGenerator) randomReaction() string {\n\treturn g.reactions[rand.Intn(len(g.reactions))]\n}",
"func DoDamage(a *Agent, d *Agent, odds int) string {\n\n\tvar damage int\n\tvar textOut string\n\n\t// Damage and max damage if it's a critical\n\tif Roll(1, 100) > a.TotalCritical() {\n\t\tdamage = Roll(2, a.TotalDamage())\n\t} else {\n\t\tdamage = a.TotalDamage()\n\t\ttextOut = fmt.Sprintf(CyanU(\"Critical\") + \" \")\n\t}\n\n\t// If damage is greater than the damage resist then subtract\n\tif damage > d.TotalResist() {\n\t\tdamage = damage - d.TotalResist()\n\t\t//if unlocked, damage = damage - TotalResist()\n\t\td.AdjHealth(0 - damage)\n\t\ttextOut = textOut + fmt.Sprintf(\"for %s damage. \", Red(strconv.Itoa(damage)))\n\t\ttextOut = textOut + fmt.Sprintf(\"%s's health = %s.\\n\", d.Name, Red(strconv.Itoa(d.Health.Val)))\n\t\t//else don't adjust\n\t} else {\n\t\tdamage = 0\n\t\ttextOut = textOut + fmt.Sprintf(\"%s! for %s damage. \", YellowU(\"Resist\"), Red(strconv.Itoa(damage)))\n\t\ttextOut = textOut + fmt.Sprintf(\"%s's health = %s.\\n\", d.Name, Red(strconv.Itoa(d.Health.Val)))\n\t}\n\n\t// Experience Reward\n\t// if this is a dead monster\n\t// Monster agents don't have a save file set\n\tif d.File == \"\" && d.Dead == true {\n\n\t\t// reverse the percentage\n\t\tmods := 100 - odds\n\n\t\tpercentage := float32(mods) * .01\n\n\t\t// reduce the drop by the reverse percentage\n\t\texp := float32(d.ExpDrop()) * percentage\n\n\t\t// int to float conversion rounds down towards zero by dropping\n\t\t// everything after the decimal point. So I add 1 to the exp here\n\t\t// so the player never gets 0 exp reward\n\t\texp++\n\n\t\t// exp is a float32 so do math with exp as an int\n\t\ta.Exp = a.Exp + int(exp)\n\n\t\ttextOut = textOut + fmt.Sprintf(Green(\"\\nYou gain %d experience.\\n\"), int(exp))\n\t\ta.Save()\n\t}\n\td.Save()\n\treturn textOut\n}",
"func combat(health, damage float64) float64 {\n\tif math.Floor(damage) == 100.0 {\n\t\treturn 0\n\t}\n\n\thealth = health * damage / 100.0\n\tif health < 1.1 {\n\t\treturn 0\n\t}\n\n\treturn health\n}",
"func weapon(char core.Character, c *core.Core, r int, param map[string]int) {\n\n\texpiry := 0\n\tper := 0.03 + 0.01*float64(r)\n\tstacks := 0\n\ticd := 0\n\n\tc.Events.Subscribe(core.OnDamage, func(args ...interface{}) bool {\n\n\t\tds := args[1].(*core.Snapshot)\n\n\t\tif ds.ActorIndex != char.CharIndex() {\n\t\t\treturn false\n\t\t}\n\t\tif ds.AttackTag != core.AttackTagNormal && ds.AttackTag != core.AttackTagExtra {\n\t\t\treturn false\n\t\t}\n\t\tif icd > c.F {\n\t\t\treturn false\n\t\t}\n\t\ticd = c.F + 18\n\t\tif expiry < c.F {\n\t\t\tstacks = 0\n\t\t}\n\t\tstacks++\n\t\tif stacks > 4 {\n\t\t\tstacks = 4\n\t\t}\n\t\texpiry = c.F + 360\n\t\treturn false\n\t}, fmt.Sprintf(\"prototype-rancour-%v\", char.Name()))\n\n\tval := make([]float64, core.EndStatType)\n\tchar.AddMod(core.CharStatMod{\n\t\tKey: \"prototype\",\n\t\tExpiry: -1,\n\t\tAmount: func(a core.AttackTag) ([]float64, bool) {\n\t\t\tif expiry < c.F {\n\t\t\t\tstacks = 0\n\t\t\t\treturn nil, false\n\t\t\t}\n\t\t\tval[core.ATKP] = per * float64(stacks)\n\t\t\tval[core.DEFP] = per * float64(stacks)\n\t\t\treturn val, true\n\t\t},\n\t})\n\n}",
"func RandomMob(witch *Enemy, ghoul *Enemy, slime *Enemy, goblin *Enemy, gator *Enemy) *Enemy {\n roll := RandomNumber(5)\n mob := ghoul\n if roll == 3 {\n mob = witch\n } else if roll == 2 {\n mob = ghoul\n } else if roll == 1 {\n mob = slime\n } else if roll == 4 {\n mob = goblin\n } else if roll == 5 {\n mob = gator\n }\n mob.Health = mob.MaxHealth\n return mob\n}",
"func (s *SoundGroup) MaxAudibleBehavior(behavior *C.FMOD_SOUNDGROUP_BEHAVIOR) error {\n\t//FMOD_RESULT F_API FMOD_SoundGroup_GetMaxAudibleBehavior (FMOD_SOUNDGROUP *soundgroup, FMOD_SOUNDGROUP_BEHAVIOR *behavior);\n\treturn ErrNoImpl\n}",
"func (t *SBF) Probability() float64 {\n return t.probability\n}",
"func rnd() float64 {\n\tss := *g_seed\n\tss += ss\n\tss ^= 1\n\tif int32(ss) < 0 {\n\t\tss ^= 0x88888eef\n\t}\n\t*g_seed = ss\n\treturn float64(*g_seed%95) / float64(95)\n}",
"func rnd() float64 {\n\tss := *g_seed\n\tss += ss\n\tss ^= 1\n\tif int32(ss) < 0 {\n\t\tss ^= 0x88888eef\n\t}\n\t*g_seed = ss\n\treturn float64(*g_seed%95) / float64(95)\n}",
"func randomGender() string {\n\trand.Seed(time.Now().UnixNano())\n\tcoinFlip := rand.Intn(2)\n\tif coinFlip > 0 {\n\t\treturn \"female\"\n\t}\n\treturn \"male\"\n\n}",
"func (q *Question) Probability(a *Option) float32 {\n\ttotalCount := 0\n\n\tfor _, o := range q.Options {\n\t\ttotalCount += o.ResultCount\n\t}\n\n\tlogrus.WithField(\"total_count\", totalCount).Debug(\"Total Results\")\n\n\tif totalCount <= 0 {\n\t\treturn 0\n\t}\n\n\treturn (float32(a.ResultCount) / float32(totalCount)) * 100\n}",
"func (f *Faker) PronounPossessive() string { return pronounPossessive(f.Rand) }",
"func PronounPossessive() string { return pronounPossessive(globalFaker.Rand) }",
"func (model *Way) normalProbability(cor Coordinate) float64 {\n\tvar NormalMean float64\n\tvar NormalDeviation float64 = 20 //Amount of expected GPS error\n\tprojection := model.FindProjection(cor)\n\tdistanceMeter := projection.Distance * float64(distanceMeterFactor)\n\t//Normal distribution formula\n\tprobability := 1 / (math.Sqrt(2*math.Pi) * NormalDeviation) * math.Exp(-(math.Pow(distanceMeter-NormalMean, 2) / (2 * math.Pow(NormalDeviation, 2))))\n\treturn probability\n}",
"func Bonus(e *Employee, percent int) int {\n\treturn e.Salary * percent / 100\n}",
"func GetBronzeBaseDefensePower() int {\n\treturn BronzeBaseDefensePower\n}",
"func (s *ClampDirectionOffset) Restitution() float64 {\n\treturn s.restitution\n}",
"func DuckReturnProb(a float64, t float64) float64 {\n\treturn (a*math.Exp(-t) +\n\t\t(4-3*a)*math.Exp(-2*t) +\n\t\t(2*a-4)*math.Exp(-4*t))\n}",
"func (GuitarBass) DiezelHerbertAmplifier(){}",
"func (list *SkipList) randLevel() int {\n\tvar level int = 1\n\tfor ((rand.Int63()>>32)&0xffff < DefaultProbability) && (level < list.maxLevel) {\n\t\tlevel++\n\t}\n\treturn level\n}",
"func DefuseScore(g *Game, p *Player) (c *ScoreCard) {\n\n\tc = new(ScoreCard)\n\tc.Player = p\n\n\tif p.Dead {\n\t\tc.Score = 0\n\t\treturn\n\t}\n\n\tc.Score = uint64(p.Duration / time.Second)\n\n\t// The player that defused the bomb gets 5 minutes bonus!\n\tif p.Defused {\n\t\tc.Score = c.Score + 60*5\n\t}\n\n\treturn\n}",
"func (prices *pricesVal) PriceChance(price int) float64 {\n\tswitch {\n\tcase price == prices.maxPrice:\n\t\treturn prices.maxChance\n\tcase price == prices.guaranteedPrice:\n\t\treturn prices.minChance\n\tdefault:\n\t\treturn prices.midChance\n\t}\n}",
"func (g Gun)attack(bodyPart BodyArmor,attacker *Character,defender *Character){\n\tif g.LoadedMagazine.Rounds==0{\n\t\t//need to reload!\n\t\treturn\n\t}\n\thitChance := g.estimateHitChance(bodyPart,attacker,defender)\n\troll := r1.Float64()\n\tif roll<=hitChance{\n\t\tdamage := g.calculateDamage(bodyPart,attacker,defender)\n\t\tg.LoadedMagazine.Rounds--\n\t\tdefender.Health-=damage\n\t}else{\n\t\tif LOG_MODE>=1{\n\t\t\tfmt.Printf(\"Missed shot with %f probability\\n\",hitChance)\n\t\t}\n\t}\n}",
"func (we WeightedElement) GetProbability() float64 {\n\treturn we.Probability\n}",
"func (b BaseDefender) GetWeaponPower(researches Researches) int64 {\n\treturn int64(float64(b.WeaponPower) * (1 + float64(researches.WeaponsTechnology)*0.1))\n}",
"func (p *Player) MobAttack(e *Enemy, l *Log){\n fmt.Printf(\"The %s is attempting to attack\", e.Name)\n TypedText(\"...\", 300)\n roll := RandomNumber(20)\n if roll > p.Armor {\n damage := RandomNumber(e.Attack)\n p.Health -= damage\n fmt.Printf(\"The attack lands and %s takes %d damage! ♥:%d \\n\", p.Name, damage, p.Health)\n\t} else {\n fmt.Printf(\"%s managed to defend the attack!\\n\", p.Name)\n }\n}",
"func (b *Being) RandomizeGender() {\n\tb.Gender = b.Species.GetGenders()[randomizer.Intn(len(b.Species.GetGenders()))]\n}",
"func (difficulty *Difficulty) Backoff() float64 {\n\tdifficulty.Lock()\n\tdefer difficulty.Unlock()\n\n\tswitch {\n\tcase difficulty.modifier < 1:\n\t\tdifficulty.modifier = 1\n\tcase difficulty.modifier > 19:\n\t\tdifficulty.modifier = 19\n\tdefault:\n\t\tdifficulty.modifier += 1\n\t}\n\n\t// return difficulty.internalSetPdiff(difficulty.pdiff * math.Log10(10-0.5*float64(difficulty.modifier)))\n\n\t// disble backoff\n\treturn difficulty.pdiff\n}",
"func (n *NormalProposer) Propose() float64 {\n\treturn n.Random()\n}",
"func randomEnergy() float64 {\n\treturn math.Round(rand.Float64() * 100)\n}",
"func (c *Creature) DetermineDexStrikeRank() *Attribute {\n\n\tdexSR := &Attribute{\n\t\tName: \"DEX Strike Rank\",\n\t\tMax: 5,\n\t}\n\n\tdex := c.Statistics[\"DEX\"]\n\n\tdex.UpdateStatistic()\n\n\tswitch {\n\tcase dex.Total < 6:\n\t\tdexSR.Base = 5\n\tcase dex.Total < 9:\n\t\tdexSR.Base = 4\n\tcase dex.Total < 13:\n\t\tdexSR.Base = 3\n\tcase dex.Total < 16:\n\t\tdexSR.Base = 2\n\tcase dex.Total < 19:\n\t\tdexSR.Base = 1\n\tcase dex.Total > 18:\n\t\tdexSR.Base = 0\n\t}\n\treturn dexSR\n}",
"func (a Energy) RandomGain(rnd *rand.Rand) Energy {\n\tgreatest := geom.Vector3(a).Greatest()\n\tif rnd.Float64() > greatest {\n\t\treturn Energy{}\n\t}\n\treturn a.Amplified(1 / greatest)\n}",
"func (f *Faker) Pronoun() string { return pronoun(f.Rand) }",
"func poissonDelay(uniformRnd float64, rate float64) float64 {\n\treturn -math.Log(1.0-uniformRnd) / rate\n}",
"func (m *Monster) Damage(dmg int) (int, bool) {\n\tif dmg >= m.Info.Hitpoints { // check if damage would drop hp to 0\n\t\tdealt := m.Info.Hitpoints\n\t\tm.Info.Hitpoints = 0\n\t\treturn dealt, true\n\t}\n\n\t// Deal damage\n\tm.Info.Hitpoints -= dmg\n\treturn dmg, false\n}",
"func (p *Player) Gender() gender.Gender {\n\treturn p.gender\n}",
"func (n *BetaProposer) Propose() float64 {\n\treturn n.Random()\n}",
"func (m *em) breedRandom() {\n\t// Do nothing if there are no creatures.\n\tif len(m.creatures) == 0 {\n\t\treturn\n\t}\n\n\t// Get two random indices which are not the same.\n\tix1 := rand.Intn(len(m.creatures))\n\tix2 := ix1\n\tfor ix2 == ix1 && len(m.creatures) > 1 {\n\t\tix2 = rand.Intn(len(m.creatures))\n\t}\n\n\t// Ensure they are in order.\n\t// This allows us to break out of the creature loop early.\n\tif ix2 < ix1 {\n\t\tix1, ix2 = ix2, ix1\n\t}\n\n\t// Resolve indices into creatures.\n\t// Because of the random nature of iterating over a map,\n\t// A particular index will not map to the same creature\n\t// each time. However we don't really care, so long as\n\t// it's random, and we get 2 different creatures to breed.\n\tvar mother, father *creature.Creature\n\tii := 0\n\tfor c, _ := range m.creatures {\n\t\tswitch ii {\n\t\tcase ix1:\n\t\t\tmother = c.(*creature.Creature)\n\t\tcase ix2:\n\t\t\tfather = c.(*creature.Creature)\n\t\t\tbreak\n\t\t}\n\t\tii++\n\t}\n\n\t// Breed the creatures.\n\tchild := mother.Breed(father)\n\n\tm.creatures[child] = struct{}{}\n}",
"func (jobImpl *JobImpl) Damage() int64 {\n\treturn jobImpl.damageImpl\n}",
"func (d *Detector) FailureProbability(now ...int64) float64 {\n\td.mu.RLock()\n\tdefer d.mu.RUnlock()\n\n\tif d.inited == 0 {\n\t\treturn 0\n\t}\n\n\tvar tt int64\n\tif len(now) == 0 {\n\t\ttt = time.Now().Sub(time.Unix(0, 0)).Milliseconds()\n\t} else {\n\t\ttt = now[0]\n\t}\n\n\tvar (\n\t\tt = float64(tt-d.freshness) * d.factor\n\t\tcount float64\n\t\ttotal = min(d.nextIndex, d.mask+1)\n\t)\n\n\tfor i := uint64(0); i < total; i++ {\n\t\tif d.samples[i] <= t {\n\t\t\tcount++\n\t\t}\n\t}\n\n\treturn count / float64(max(1, total))\n}",
"func weildWeapon() (string, int) {\n\tlottery := random(1, 5)\n\tvar weapon string\n\tvar weapondie int\n\tswitch lottery {\n\tcase 1:\n\t\tweapon = \"fist\"\n\t\tweapondie = 3\n\tcase 2:\n\t\tweapon = \"dagger\"\n\t\tweapondie = 4\n\tcase 3:\n\t\tweapon = \"short sword\"\n\t\tweapondie = 6\n\tcase 4:\n\t\tweapon = \"longsword\"\n\t\tweapondie = 8\n\tcase 5:\n\t\tweapon = \"greataxe\"\n\t\tweapondie = 12 // At this case, the Greataxe will deal random damage from 1 point to 12 points, a 12-side die.\n\t}\n\treturn weapon, weapondie\n}",
"func (f *Faker) PronounRelative() string { return pronounRelative(f.Rand) }",
"func GenerateNPC() *NPC {\n\tnpc := NPC{}\n\n\trand.Seed(time.Now().UTC().UnixNano())\n\n\tnpc.genderIsFemale = rand.Int()%2 == 0\n\tnpc.GenName()\n\tnpc.age = lists.Ages[rand.Int()%len(lists.Ages)]\n\tnpc.race = lists.Races[rand.Int()%len(lists.Races)]\n\tnpc.nation = lists.Nations[rand.Int()%len(lists.Nations)]\n\tnpc.voice = lists.Voices[rand.Int()%len(lists.Voices)]\n\tnpc.character = lists.Characters[rand.Int()%len(lists.Characters)]\n\tnpc.GenAspect()\n\tnpc.GenStats()\n\n\treturn &npc\n}",
"func (b *Body) Stats() menu.LevelStats {\n\tcleared := 0.0\n\ttotal := 0.0\n\tfor _, col := range b.infectionPattern {\n\t\tfor _, o := range col {\n\t\t\tif b.graph[o].DiseaseLevel() <= 0 {\n\t\t\t\tcleared++\n\t\t\t}\n\t\t\ttotal++\n\t\t}\n\t}\n\tcleared /= total\n\treturn menu.LevelStats{\n\t\tScore: -1,\n\t\tTime: time.Now().Sub(b.startTime).Nanoseconds(),\n\t\tCleared: cleared,\n\t\tLevel: b.level,\n\t}\n}",
"func (m *ChatMessage) GetImportance()(*ChatMessageImportance) {\n return m.importance\n}",
"func (n *LogNormalProposer) Propose() float64 {\n\treturn n.Random()\n}",
"func (f *Faker) PronounDemonstrative() string { return pronounDemonstrative(f.Rand) }",
"func Random() (string, Gender) {\n\tn := r.Int31n(200)\n\treturn All[n], n%2 == 0\n}",
"func (n *ExponentialProposer) Propose() float64 {\n\treturn n.Random()\n}",
"func RandomMale() string {\n\treturn Males[r.Int31n(100)]\n}",
"func gandalf(lastGame game) bool {\n\tif lastGame.Statistics.TotalSpellDamage*100/lastGame.Statistics.TotalDamageDone > 50 {\n\t\treturn true\n\t}\n\treturn false\n}",
"func (b *Brain) RandomAction() int {\r\n\tif b.RandomActionDistribution == nil {\r\n\t\treturn b.Rand.Intn(b.NumActions)\r\n\t}\r\n\r\n\t// okay, lets do some fancier sampling:\r\n\tp := b.Rand.Float64()\r\n\tcumprob := 0.0\r\n\r\n\tfor k := 0; k < b.NumActions; k++ {\r\n\t\tcumprob += b.RandomActionDistribution[k]\r\n\r\n\t\tif p < cumprob {\r\n\t\t\treturn k\r\n\t\t}\r\n\t}\r\n\r\n\t// rounding error\r\n\treturn b.NumActions - 1\r\n}",
"func (o *InlineObject871) GetProbability() AnyOfobject {\n\tif o == nil || o.Probability == nil {\n\t\tvar ret AnyOfobject\n\t\treturn ret\n\t}\n\treturn *o.Probability\n}",
"func weightedRand(max int, talentCo float64) float64 {\n\tnum := posWeightedRand(max)\n\n\tif rand.Float64() > talentCo {\n\t\treturn num - (2 * num)\n\t}\n\n\treturn num\n}",
"func (self *PhysicsP2) Restitution() int{\n return self.Object.Get(\"restitution\").Int()\n}",
"func GetDifficulty(p float64) uint {\n\treturn uint(math.Ceil(math.Log2(1.0 / p)))\n}",
"func (b Backoff) jitter() float64 {\n\n\t// jitter is a random value on [0.0, 1.0), so subtract 0.5 and multiply by\n\t// 2 to move it to the interval [-1.0, 1.0), which is more suitable for\n\t// jitter, as we want equal probabilities on either side of the basic\n\t// exponential backoff.\n\treturn b.Jitter * (rand.Float64() - 0.5) * 2\n}",
"func weapon(char core.Character, c *core.Core, r int, param map[string]int) {\n\tdmg := 0.16 + float64(r)*0.04\n\n\tc.Events.Subscribe(core.OnAttackWillLand, func(args ...interface{}) bool {\n\t\tds := args[1].(*core.Snapshot)\n\t\tt := args[0].(core.Target)\n\t\tif ds.ActorIndex != char.CharIndex() {\n\t\t\treturn false\n\t\t}\n\t\t// if t.AuraType() == def.Hydro {\n\t\t// \tds.Stats[def.DmgP] += dmg\n\t\t// \tc.Log.Debugw(\"dragonbane\", \"frame\", c.F, \"event\", def.LogCalc, \"final dmg%\", ds.Stats[def.DmgP])\n\t\t// }\n\t\tif t.AuraContains(core.Hydro, core.Pyro) {\n\t\t\tds.Stats[core.DmgP] += dmg\n\t\t\tc.Log.Debugw(\"dragonbane\", \"frame\", c.F, \"event\", core.LogCalc, \"final dmg%\", ds.Stats[core.DmgP])\n\t\t}\n\t\treturn false\n\t}, fmt.Sprintf(\"dragonbane-%v\", char.Name()))\n\n}",
"func (p *Player) BossBattle(e *Enemy, l *Log, item *Item, d *Death) int {\n t := time.Now()\n fmt.Printf(\"%s has encountered a strong foe!\\n\\nName:%s\\n♥:%d\\nAtk:%d\\nDef:%d \\n\\nIt doesn't seem to notice. Want to come back another time[1] or fight[2]? \", p.Name, e.Name, e.Health, e.Attack, e.Armor)\n var choice int\n fmt.Scanln(&choice)\n var first bool\n switch choice {\n case 1:\n sucessString := fmt.Sprintf(\"%s snuck away from a %s without it noticing.\", p.Name, e.Name)\n l.AddAction(t.Format(\"3:04:05 \") + sucessString)\n default:\n TypedText(\"You failed to select one of the options given to you.\\n\", 50)\n TypedText(\"You're just going to have to fight it\", 50)\n TypedText(\"...\\n\", 200)\n fallthrough\n case 2:\n var simulate int\n fmt.Printf(\"Would you like to view battle[1] or simulate[2]? \")\n fmt.Scanln(&simulate)\n switch simulate {\n case 1:\n userI := RandomNumber(20)\n compI := RandomNumber(20)\n fmt.Printf(\"Rolling for initiative\")\n TypedText(\"...\", 300)\n fmt.Printf(\"%s rolled a %d\\n\", p.Name, userI)\n fmt.Printf(\"The %s rolled a %d\\n\", e.Name, compI)\n if compI > userI {\n fmt.Printf(\"The %s rolled higher, they will attack first.\\n\", e.Name)\n } else {\n fmt.Printf(\"%s rolled higher, %s will attack first.\\n\", p.Name, p.Name)\n first = true\n }\n for p.Health > 0 && e.Health > 0 {\n if first == true {\n p.UserAttack(e, l)\n time.Sleep(2500 * time.Millisecond)\n if e.Health > 0 {\n p.MobAttack(e, l)\n time.Sleep(2500 * time.Millisecond)\n } else {\n break\n }\n } else {\n p.MobAttack(e, l)\n time.Sleep(2500 * time.Millisecond)\n if p.Health > 0 {\n p.UserAttack(e, l)\n } else {\n break\n }\n }\n }\n default:\n TypedText(\"Since you didn't enter a valid selection the battle will be simulated.\", 50)\n fallthrough\n case 2:\n userI := RandomNumber(20)\n compI := RandomNumber(20)\n fmt.Printf(\"Rolling for initiative\")\n fmt.Printf(\".\")\n fmt.Printf(\".\")\n fmt.Printf(\".\\n\")\n fmt.Printf(\"%s rolled a %d\\n\", p.Name, userI)\n fmt.Printf(\"The %s rolled a %d\\n\", e.Name, compI)\n if compI > userI {\n fmt.Printf(\"The %s rolled higher, they will attack first.\\n\", e.Name)\n } else {\n fmt.Printf(\"%s rolled higher, %s will attack first.\\n\", p.Name, p.Name)\n first = true\n }\n for p.Health > 0 && e.Health > 0 {\n if first == true {\n p.UserAttack(e, l)\n if e.Health > 0 {\n p.MobAttack(e, l)\n } else {\n break\n }\n } else {\n p.MobAttack(e, l)\n if p.Health > 0 {\n p.UserAttack(e, l)\n } else {\n break\n }\n }\n }\n } \n }\n if p.Health <= 0 {\n d.Death()\n } else if e.Health <= 0 && p.Health >= 0 {\n winString := fmt.Sprintf(\"%s successfully defeated a %s!\", p.Name, e.Name)\n l.AddAction(t.Format(\"3:04:05 \")+winString)\n fmt.Printf(\"The %s dropped gold.\", e.Name)\n p.Gold += e.Gold\n lootRoll := RandomNumber(100)\n if lootRoll <= 25 {\n loot := RandomItem(p, item)\n fmt.Printf(\"The %s dropped loot [%s].\", e.Name, loot)\n itemString := fmt.Sprintf(\"%s obtained a %s and %d gold from a %s.\", p.Name, loot, e.Gold, e.Name)\n l.AddAction(t.Format(\"3:04:05 \")+itemString)\n } else {\n goldString := fmt.Sprintf(\"%s gained %d gold from a %s.\", p.Name, e.Gold, e.Name)\n l.AddAction(t.Format(\"3:04:05 \")+goldString)\n }\n if p.Key == false {\n fmt.Printf(\"The %s dropped a giant key.\", e.Name)\n l.AddAction(t.Format(\"3:04:05 \")+p.Name+\" found a giant key.\")\n p.Key = true\n }\n fmt.Printf(\"\\nPosting results\")\n TypedText(\"...\", 300)\n }\n return choice\n}",
"func (g *Deal) outcome() [PLAYERS] int {\n\treturn g.state.goal(g.players)\n}",
"func (g GumbelRight) Prob(x float64) float64 {\n\treturn math.Exp(g.LogProb(x))\n}",
"func PronounDemonstrative() string { return pronounDemonstrative(globalFaker.Rand) }",
"func (b *OGame) GetDefense(celestialID ogame.CelestialID, options ...Option) (ogame.DefensesInfos, error) {\n\treturn b.WithPriority(taskRunner.Normal).GetDefense(celestialID, options...)\n}",
"func noConflictProbability(slots, eventSlots, events int64) float64 {\n\t// can't have a conflict without events\n\tif events <= 0 {\n\t\treturn 1\n\t}\n\n\t// no more free slots, we definitely have a conflict\n\tif slots <= 0 {\n\t\treturn 0\n\t}\n\n\n\tn := float64(slots)\n\teT := float64(eventSlots)\n\te := float64(events)\n\treturn math.Pow((n - eT) / n, e - float64(1)) * noConflictProbability(slots - eventSlots, eventSlots, events - 1)\n}",
"func (m *SalaryMutation) Bonus() (r float64, exists bool) {\n\tv := m._Bonus\n\tif v == nil {\n\t\treturn\n\t}\n\treturn *v, true\n}",
"func (b BaseDefender) GetShieldPower(researches Researches) int64 {\n\treturn int64(float64(b.ShieldPower) * (1 + float64(researches.ShieldingTechnology)*0.1))\n}",
"func (p Piece) Damage() int {\n\treturn p.damage\n}",
"func (n *GammaProposer) Propose() float64 {\n\treturn n.Random()\n}",
"func (e Exponential) Rand() float64 {\n\tvar rnd float64\n\tif e.Src == nil {\n\t\trnd = rand.ExpFloat64()\n\t} else {\n\t\trnd = rand.New(e.Src).ExpFloat64()\n\t}\n\treturn rnd / e.Rate\n}",
"func (baseAccountReputation BaseAccountReputation) GetRating() int64 { return 100 }",
"func (obj *Facility) GenerateAdvance() int {\n\tadvance := obj.Interval\n\tif obj.Modificator > 0 {\n\t\tadvance += utils.GetRandom(-obj.Modificator, obj.Modificator)\n\t}\n\treturn advance\n}",
"func probability(word string) float64 {\n\treturn float64(wordFreq[word]) / float64(wordTotal)\n}",
"func (ant *Ant) getProbability() []float64 {\n\tp := make([]float64, 0)\n\tvar sum float64\n\tfor i, l := range ant.visited[ant.position] {\n\t\tif l != 0 {\n\t\t\td := math.Pow((float64(1)/float64(l)), ant.env.alpha) * math.Pow(ant.env.pheromon[ant.position][i], ant.env.betta)\n\t\t\tp = append(p, d)\n\t\t\tsum += d\n\t\t} else {\n\t\t\tp = append(p, 0)\n\t\t}\n\t}\n\tfor _, l := range p {\n\t\tl = l / sum\n\t}\n\treturn p\n}",
"func def_guard(bt *yboter,b *game.Board, r *game.Robot) game.Action {\n\tnearby_count := count_enermies_adj(b,r)\n\tswitch {\n\t\tcase (bt.self_prevHP[r.ID] - r.Health > 15):\n\t\t\tif r.Health >= nearby_count*10 {\n\t\t\t\treturn game.Action{Kind: game.Guard}\n\t\t\t}\n\t}\n return game.Action{Kind: game.Wait}\n}",
"func (player *Player) GetSkillAdjust(maxAdjust int) int {\n\trand.Seed(time.Now().UnixNano())\n\tmaxSkilllevel := math.Min(4, float64(player.SkillLevel))\n\tlargestOffset := math.Min(5-maxSkilllevel, float64(maxAdjust))\n\tlargestOffset = math.Max(largestOffset, 0)\n\treturn rand.Intn(int(largestOffset))\n}",
"func RandomFood() rune {\n\temoji := []rune{\n\t\t'R', // Favourite dish, extra points!!!\n\t\t'👿',\n\t\t'🍍',\n\t\t'🍑',\n\t\t'🍇',\n\t\t'🍏',\n\t\t'🍌',\n\t\t'🍫',\n\t\t'🍭',\n\t\t'🍕',\n\t\t'🍩',\n\t\t'🍗',\n\t\t'🍖',\n\t\t'🍬',\n\t\t'🍤',\n\t\t'🍪',\n\t\t'S', // You do not want to eat the skull\n\t}\n\n\trand.Seed(time.Now().UnixNano())\n\n\treturn emoji[rand.Intn(len(emoji))]\n}",
"func (g *Game) getSpareBonus(rollIndex int) int {\n\treturn g.rolls[rollIndex+2]\n}",
"func (*Modifier) Descriptor() ([]byte, []int) {\n\treturn file_FillerGame_proto_rawDescGZIP(), []int{6}\n}",
"func doesCrit(attacker *pet) (bool) {\n\tcritRand := float64(rand.Intn(100))\n\t\n\tif critRand < attacker.EffectiveCRI {\n\t\t//fmt.Println(attacker.PetUser.Username, \" rolled a\", critRand, \" crit!\")\n\t\tattacker.CritCount++\n\t\treturn true\n\t}\n\t\n\treturn false\n\t\n}",
"func (u Uniform) Rand() float64 {\n\treturn rand.Float64()*(u.Max-u.Min) + u.Min\n}",
"func GetReward(a Action, feedback Action) float64 {\n\tif a == feedback {\n\t\treturn 1\n\t}\n\treturn -1\n}",
"func PronounReflective() string { return pronounReflective(globalFaker.Rand) }"
] | [
"0.6194889",
"0.61754656",
"0.61005557",
"0.5939384",
"0.5802302",
"0.5753514",
"0.5746806",
"0.54749453",
"0.54497707",
"0.54424155",
"0.5411699",
"0.5332017",
"0.53062475",
"0.5304713",
"0.5244101",
"0.52316695",
"0.5213608",
"0.5160024",
"0.51429135",
"0.51058805",
"0.5092784",
"0.50700694",
"0.504548",
"0.50270003",
"0.5016573",
"0.501293",
"0.501293",
"0.4997118",
"0.49211985",
"0.49058512",
"0.49024987",
"0.49010783",
"0.48994538",
"0.48978147",
"0.48868895",
"0.48692095",
"0.48570213",
"0.48445344",
"0.48397794",
"0.4836708",
"0.4827345",
"0.4819298",
"0.48146203",
"0.48134777",
"0.47888654",
"0.4782919",
"0.47799775",
"0.4768897",
"0.47606418",
"0.4760067",
"0.47530943",
"0.47501495",
"0.4742268",
"0.47369936",
"0.4736104",
"0.4726622",
"0.47265443",
"0.469431",
"0.46936807",
"0.46925497",
"0.4688263",
"0.46821815",
"0.46803886",
"0.4661155",
"0.46540493",
"0.46443135",
"0.46396583",
"0.46381614",
"0.46363962",
"0.46292657",
"0.46269763",
"0.46207958",
"0.4620653",
"0.46169996",
"0.46066177",
"0.46053967",
"0.46014908",
"0.4597888",
"0.45973983",
"0.45956412",
"0.4589991",
"0.4581464",
"0.45718783",
"0.4568697",
"0.4568649",
"0.45666203",
"0.4565859",
"0.4564297",
"0.45635986",
"0.45631155",
"0.45532474",
"0.4548743",
"0.45433933",
"0.4540085",
"0.4536816",
"0.4522471",
"0.45169845",
"0.45158377",
"0.4511514",
"0.45113477"
] | 0.7181684 | 0 |
QUERIES Checks if province is the location of a conflict | func (s *State) IsSiteOfConflict(id pb.ProvinceId) bool {
for _, c := range s.Conflicts {
for _, l := range c.Locations() {
if l == id {
return true
}
}
}
return false
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func checkProvinceValid(citizenNo []byte) bool {\n\tprovinceCode := make([]byte, 0)\n\tprovinceCode = append(provinceCode, citizenNo[:2]...)\n\tprovinceStr := string(provinceCode)\n\n\t// 判断省份/地区是否合规\n\tif _, ok := validProvince[provinceStr]; ok {\n\t\treturn true\n\t}\n\treturn false\n}",
"func (w *Worker) In(region string) bool { return region == \"\" || region == w.Zone }",
"func verifyProvinceKTP(customer *models.Customer) bool {\n\tsliceProv, _ := strconv.Atoi(customer.Ktp[0:2])\n\n\tvar provPermitted = map[int]string{\n\t\t12: \"Sumatera Utara\",\n\t\t31: \"DKI Jakarta\",\n\t\t32: \"Jawa Barat\",\n\t\t35: \"Jawa Timur\",\n\t}\n\n\t_, isExist := provPermitted[sliceProv]\n\n\treturn isExist\n}",
"func (m CrossOrderCancelReplaceRequest) HasStateOrProvinceOfIssue() bool {\n\treturn m.Has(tag.StateOrProvinceOfIssue)\n}",
"func (db *Db) SearchCityProvince(query string) ([]*locationpb.SearchCityCountry, error) {\n\tvar cities []*locationpb.SearchCityCountry\n\t_, err := db.Client.Query(&cities, `SELECT search_result, city_id FROM \"public\".search_city_province(?)`, query)\n\treturn cities, err\n}",
"func checkresult(action string, photolng float64, citylng float64) bool{\n\treturn (photolng <= citylng && action == \"West\") || (photolng >= citylng && action == \"East\")\n}",
"func (s *State) GetConflict(location pb.ProvinceId) *Conflict {\n\treturn s.Conflicts[location]\n}",
"func addressIn(location map[string]interface{}, country string) bool {\n\tprops, ok := location[\"props\"]\n\tif !ok {\n\t\treturn false\n\t}\n\n\tc, ok := (props.(map[string]interface{}))[\"country\"]\n\tif !ok {\n\t\treturn false\n\t}\n\n\tif c == country {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func (ctl *AddressAPIController) ShowProvince(c echo.Context) (err error) {\n\tmodel := models.Province{}\n\tif err := ctl.DB(nil).Select([]string{\"CH_ID\", \"CHANGWAT_E\", \"CHANGWAT_T\"}).\n\t\tWhere(\"CH_ID = ?\", c.Param(\"id\")).\n\t\tGroup(\"CH_ID\").\n\t\tFirst(&model).Error; gorm.IsRecordNotFoundError(err) {\n\t\treturn &exceptions.ErrorException{\n\t\t\tMessage: \"Not found.\",\n\t\t\tErrorKey: \"not-found\",\n\t\t\tCode: http.StatusNotFound,\n\t\t}\n\t}\n\treturn c.JSON(http.StatusOK, map[string]interface{}{\n\t\t\"data\": model,\n\t})\n}",
"func (l *Location) IsValid() bool {\n\tif len(l.Country) == 0 {\n\t\treturn false\n\t}\n\tif GetCountry(string(l.Country)) == l.Country {\n\t\treturn true\n\t}\n\treturn false\n}",
"func (europ europeDeprecatedTimeZones) Isle_of_Man() string { return \"Europe/London\" }",
"func (uc *userConfig) IfConflict(maxReplicas int) bool {\n\tret := false\n\t// move_leaders\n\tfor i, l1 := range uc.cfg.Leaders.Leader {\n\t\tfor j, l2 := range uc.cfg.Leaders.Leader {\n\t\t\tif i < j {\n\t\t\t\tif (l1.KeyStart <= l2.KeyStart && l1.KeyEnd > l2.KeyStart) ||\n\t\t\t\t\t(l2.KeyStart <= l1.KeyStart && l2.KeyEnd > l1.KeyStart) {\n\t\t\t\t\tif ((l1.StartTime.Before(l2.StartTime) || l1.StartTime.Equal(l2.StartTime)) && \n\t\t\t\t\t\t\tl1.EndTime.After(l2.StartTime)) || \n\t\t\t\t\t\t((l2.StartTime.Before(l1.StartTime) || l2.StartTime.Equal(l1.StartTime)) && \n\t\t\t\t\t\t\tl2.EndTime.After(l1.StartTime)) {\n\t\t\t\t\t\tlog.Error(\"Key Range Conflict\", zap.Ints(\"Config Move-Leader Nums\", []int{i, j}))\n\t\t\t\t\t\tret = true\n\t\t\t\t\t}\n\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\t// move_regions\n\tfor i, r1 := range uc.cfg.Regions.Region {\n\t\tfor j, r2 := range uc.cfg.Regions.Region {\n\t\t\tif i < j {\n\t\t\t\tif (r1.KeyStart <= r2.KeyStart && r1.KeyEnd > r2.KeyStart) ||\n\t\t\t\t\t(r2.KeyStart <= r1.KeyStart && r2.KeyEnd > r1.KeyStart) {\n\t\t\t\t\tif ((r1.StartTime.Before(r2.StartTime) || r1.StartTime.Equal(r2.StartTime)) &&\n\t\t\t\t\t\t\tr1.EndTime.After(r2.StartTime)) ||\n\t\t\t\t\t\t((r2.StartTime.Before(r1.StartTime) || r2.StartTime.Equal(r1.StartTime)) &&\n\t\t\t\t\t\t\tr2.EndTime.After(r1.StartTime)) {\n\t\t\t\t\t\tlog.Error(\"Key Range Conflict\", zap.Ints(\"Config Move-Region Nums\", []int{i, j}))\n\t\t\t\t\t\tret = true\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\t// store nums > max replicas\n\tfor i, r := range uc.cfg.Regions.Region {\n\t\tif len(r.Stores) > maxReplicas {\n\t\t\tlog.Error(\"the number of stores is beyond the max replicas\", zap.Int(\"Config Move-Region Nums\", i))\n\t\t\tret = true\n\t\t}\n\t}\n\treturn ret\n}",
"func (crs LambertConformalConic2SP) Contains(lon, lat float64) bool {\n\tif crs.Area != nil && !crs.Area.Contains(lon, lat) {\n\t\treturn false\n\t}\n\tif crs.GeodeticDatum != nil && !crs.GeodeticDatum.Contains(lon, lat) {\n\t\treturn false\n\t}\n\tif math.Abs(lon) > 180 || math.Abs(lat) > 90 {\n\t\treturn false\n\t}\n\treturn true\n}",
"func location(city string) (string, string) {\n\tvar region, continent string\n\tswitch city {\n\tcase \"Indore\", \"Bhopal\", \"Ratlam\":\n\t\tregion, continent = \"India\", \"Asia\"\n\tcase \"Melbourne\":\n\t\tregion, continent = \"Victoria\", \"Australia\"\n\tdefault:\n\t\tregion, continent = \"unknown\", \"unknown\"\n\t}\n\treturn region, continent\n}",
"func (data *Invasion) AnyCitiesLeft() bool {\n return len(data.AllCities()) != 0\n}",
"func getValidRegion(value string) string {\n\tif len(value) == 3 {\n\t\treturn validRegions[value]\n\t}\n\tif len(value) != 4 {\n\t\treturn \"\"\n\t}\n\tfor _, dc := range validRegions {\n\t\tif value == dc {\n\t\t\treturn value\n\t\t}\n\t}\n\treturn \"\"\n}",
"func isCountry(str string) bool {\n\tfor _, entry := range govalidator.ISO3166List {\n\t\tif str == entry.EnglishShortName {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}",
"func HasProvinceID() predicate.Province {\n\treturn predicate.Province(func(s *sql.Selector) {\n\t\tstep := sqlgraph.NewStep(\n\t\t\tsqlgraph.From(Table, FieldID),\n\t\t\tsqlgraph.To(ProvinceIDTable, FieldID),\n\t\t\tsqlgraph.Edge(sqlgraph.O2M, false, ProvinceIDTable, ProvinceIDColumn),\n\t\t)\n\t\tsqlgraph.HasNeighbors(s, step)\n\t})\n}",
"func (crs TransverseMercator) Contains(lon, lat float64) bool {\n\tif crs.Area != nil && !crs.Area.Contains(lon, lat) {\n\t\treturn false\n\t}\n\tif crs.GeodeticDatum != nil && !crs.GeodeticDatum.Contains(lon, lat) {\n\t\treturn false\n\t}\n\tif math.Abs(lon) > 180 || math.Abs(lat) > 90 {\n\t\treturn false\n\t}\n\treturn true\n}",
"func (ameri americaDeprecatedTimeZones) Louisville() string { return \"America/Kentucky/Louisville\" }",
"func (m *CountryNamedLocation) GetIncludeUnknownCountriesAndRegions()(*bool) {\n val, err := m.GetBackingStore().Get(\"includeUnknownCountriesAndRegions\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*bool)\n }\n return nil\n}",
"func (usaTz uSATimeZones) VirginIslands() string {return \"America/Port_of_Spain\" }",
"func ProvinceNameContainsFold(v string) predicate.Province {\n\treturn predicate.Province(func(s *sql.Selector) {\n\t\ts.Where(sql.ContainsFold(s.C(FieldProvinceName), v))\n\t})\n}",
"func (o *Workloadv1Location) GetRegionOk() (*string, bool) {\n\tif o == nil || o.Region == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Region, true\n}",
"func ProvinceNameContains(v string) predicate.Province {\n\treturn predicate.Province(func(s *sql.Selector) {\n\t\ts.Where(sql.Contains(s.C(FieldProvinceName), v))\n\t})\n}",
"func (m SecurityListRequest) HasStateOrProvinceOfIssue() bool {\n\treturn m.Has(tag.StateOrProvinceOfIssue)\n}",
"func (kentu kentuckyTimeZones) Louisville() string {return \"America/Kentucky/Louisville\" }",
"func (s *WeatherData) verifyCause(APIstub shim.ChaincodeStubInterface, polyId string ) sc.Response {\n\t\n\tfmt.Println(\"============= START : Fetching polygon details by the polygon id =============\")\n\tvar datasource = \"URL\" // Setting the Oraclize datasource\n\tvar query = \"json(http://api.agromonitoring.com/agro/1.0/weather?polyid=\" + polyId + \"&appid=\" + API_KEY + \")\" // Setting the query\n\tresult, proof := oraclizeapi.OraclizeQuery_sync(APIstub, datasource, query, oraclizeapi.TLSNOTARY)\n\tfmt.Printf(\"proof: %s\", proof)\n\tfmt.Printf(\"\\nresult: %s\\n\", result)\n\tvar response = {\n\t\t\"result\": result,\n\t\t\"proof\": proof\n\t}",
"func (m CrossOrderCancelReplaceRequest) HasCountryOfIssue() bool {\n\treturn m.Has(tag.CountryOfIssue)\n}",
"func WarrantyProvinceValidation(province string) infra.Validation {\n\tnotSupportedProvinces := getNotSupportedProvinces()\n\tfor _, notSupportedProvince := range notSupportedProvinces {\n\t\tif province == notSupportedProvince {\n\t\t\treturn infra.Validation{Err: fmt.Errorf(\"warranty province %s not supported\", province)}\n\t\t}\n\t}\n\treturn infra.Validation{Err: nil}\n}",
"func (crs WebMercator) Contains(lon, lat float64) bool {\n\tif crs.Area != nil && !crs.Area.Contains(lon, lat) {\n\t\treturn false\n\t}\n\tif crs.GeodeticDatum != nil && !crs.GeodeticDatum.Contains(lon, lat) {\n\t\treturn false\n\t}\n\tif math.Abs(lon) > 180 || math.Abs(lat) > 85.06 {\n\t\treturn false\n\t}\n\treturn true\n}",
"func InArea(key, set string) bool {\n\tconn := db.Pool.Get()\n\tdefer conn.Close()\n\n\tresult, err := db.Sismember(conn, set, key)\n\tif err != nil {\n\t\treturn false\n\t}\n\treturn result\n}",
"func IsValidRegion(promisedLand string) bool {\n\tpartitions := endpoints.DefaultResolver().(endpoints.EnumPartitions).Partitions()\n\tfor _, p := range partitions {\n\t\tfor region := range p.Regions() {\n\t\t\tif promisedLand == region {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t}\n\treturn false\n}",
"func GetProvince(subject string) string {\n\treturn getRegexMatch(\"ST=([^(,|+)]+)\", subject)\n}",
"func (pacif pacificTimeZones) Auckland() string {return \"Pacific/Auckland\" }",
"func complyWithCookieLaws(ip string) bool {\n\tipThreeOctets := getThreeOctets(ip)\n\n\tif appconfig.Instance.GeoResolver.Type() != geo.MaxmindType {\n\t\treturn false\n\t}\n\n\tdata, err := appconfig.Instance.GeoResolver.Resolve(ipThreeOctets)\n\tif err != nil {\n\t\tlogging.SystemErrorf(\"Error resolving IP %q into geo data: %v\", ipThreeOctets, err)\n\t\treturn false\n\t}\n\n\tif _, ok := geo.EUCountries[data.Country]; ok || data.Country == geo.UKCountry {\n\t\treturn false\n\t}\n\n\treturn true\n}",
"func LocationExist(locationPrefix, location, env string) (retorno bool) {\n\tdb, err := sql.Open(\"mysql\", UserDB+\":\"+PassDB+\"@tcp(\"+HostDB+\":\"+PortDB+\")/\"+DatabaseDB+\"?charset=utf8\")\n\tcheckErr(err)\n\n\tdefer db.Close()\n\n\t// query\n\trows, err := db.Query(\"SELECT COUNT(id_mapeamento) FROM mapeamentos WHERE location_prefix='\" + locationPrefix + \"' AND location='\" + location + \"' AND env='\" + env + \"'\")\n\tcheckErr(err)\n\n\tfor rows.Next() {\n\t\tvar count int\n\t\terr = rows.Scan(&count)\n\t\tcheckErr(err)\n\n\t\tif count >= 1 {\n\t\t\treturn true\n\t\t} else {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn retorno\n}",
"func (me TviewRefreshModeEnumType) IsOnRegion() bool { return me == \"onRegion\" }",
"func (pacif pacificTimeZones) Bougainville() string {return \"Pacific/Bougainville\" }",
"func (c *Country) ExistsThree(text string) bool {\n\tif strings.Contains(text, c.codeThree) {\n\t\treturn true\n\t}\n\treturn false\n\n}",
"func PostalcodeContainsFold(v string) predicate.Location {\n\treturn predicate.Location(func(s *sql.Selector) {\n\t\ts.Where(sql.ContainsFold(s.C(FieldPostalcode), v))\n\t})\n}",
"func (m Message) GetStateOrProvinceOfIssue(f *field.StateOrProvinceOfIssueField) quickfix.MessageRejectError {\n\treturn m.Body.Get(f)\n}",
"func (m Message) GetStateOrProvinceOfIssue(f *field.StateOrProvinceOfIssueField) quickfix.MessageRejectError {\n\treturn m.Body.Get(f)\n}",
"func (m Message) GetStateOrProvinceOfIssue(f *field.StateOrProvinceOfIssueField) quickfix.MessageRejectError {\n\treturn m.Body.Get(f)\n}",
"func (m Message) GetStateOrProvinceOfIssue(f *field.StateOrProvinceOfIssueField) quickfix.MessageRejectError {\n\treturn m.Body.Get(f)\n}",
"func (europ europeTimeZones) London() string {return \"Europe/London\" }",
"func (s *State) IsAtWar(id pb.ProvinceId) bool {\n\tfor _, c := range s.Conflicts {\n\t\tfor _, a := range c.Attackers() {\n\t\t\tif a == id {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t\tfor _, d := range c.Defenders() {\n\t\t\tif d == id {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t}\n\treturn false\n}",
"func HasProvinceIDWith(preds ...predicate.User) predicate.Province {\n\treturn predicate.Province(func(s *sql.Selector) {\n\t\tstep := sqlgraph.NewStep(\n\t\t\tsqlgraph.From(Table, FieldID),\n\t\t\tsqlgraph.To(ProvinceIDInverseTable, FieldID),\n\t\t\tsqlgraph.Edge(sqlgraph.O2M, false, ProvinceIDTable, ProvinceIDColumn),\n\t\t)\n\t\tsqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {\n\t\t\tfor _, p := range preds {\n\t\t\t\tp(s)\n\t\t\t}\n\t\t})\n\t})\n}",
"func (m Message) StateOrProvinceOfIssue() (*field.StateOrProvinceOfIssueField, quickfix.MessageRejectError) {\n\tf := &field.StateOrProvinceOfIssueField{}\n\terr := m.Body.Get(f)\n\treturn f, err\n}",
"func (m Message) StateOrProvinceOfIssue() (*field.StateOrProvinceOfIssueField, quickfix.MessageRejectError) {\n\tf := &field.StateOrProvinceOfIssueField{}\n\terr := m.Body.Get(f)\n\treturn f, err\n}",
"func (m Message) StateOrProvinceOfIssue() (*field.StateOrProvinceOfIssueField, quickfix.MessageRejectError) {\n\tf := &field.StateOrProvinceOfIssueField{}\n\terr := m.Body.Get(f)\n\treturn f, err\n}",
"func (m Message) StateOrProvinceOfIssue() (*field.StateOrProvinceOfIssueField, quickfix.MessageRejectError) {\n\tf := &field.StateOrProvinceOfIssueField{}\n\terr := m.Body.Get(f)\n\treturn f, err\n}",
"func (o *Workloadv1Location) GetCountryOk() (*string, bool) {\n\tif o == nil || o.Country == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Country, true\n}",
"func (mt *GoaLocalCouncillor) Validate() (err error) {\n\tif mt.FirstName == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"firstName\"))\n\t}\n\tif mt.SecondName == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"secondName\"))\n\t}\n\tif mt.Area == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"area\"))\n\t}\n\tif mt.Image == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"image\"))\n\t}\n\tif mt.Phone == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"phone\"))\n\t}\n\tif mt.Email == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"email\"))\n\t}\n\tif mt.Party == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"party\"))\n\t}\n\tif mt.Address == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"address\"))\n\t}\n\tif mt.County == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"county\"))\n\t}\n\tif mt.UserID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"userID\"))\n\t}\n\n\treturn\n}",
"func CLUBELOCATIONADDRESSEqualFold(v string) predicate.Location {\n\treturn predicate.Location(func(s *sql.Selector) {\n\t\ts.Where(sql.EqualFold(s.C(FieldCLUBELOCATIONADDRESS), v))\n\t})\n}",
"func (c *Country) ExistsTwo(text string) bool {\n\tif strings.Contains(text, c.codeTwo) {\n\t\treturn true\n\t}\n\n\treturn false\n\n}",
"func (r *Region) IsOnline() bool {\n\treturn r != nil && r.Status != \"OFFLINE\" // only this specific word takes us offline\n}",
"func (c *Country) ExistsFull(text string) bool {\n\tif strings.Contains(text, strings.ToLower(c.full)) {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func (m *CountryNamedLocation) SetIncludeUnknownCountriesAndRegions(value *bool)() {\n err := m.GetBackingStore().Set(\"includeUnknownCountriesAndRegions\", value)\n if err != nil {\n panic(err)\n }\n}",
"func (o *DisplayInfo) GetRegionOk() (*string, bool) {\n\tif o == nil || o.Region == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Region, true\n}",
"func Get_country_long(ipaddress string) IP2Locationrecord {\n\treturn handleError(defaultDB.query(ipaddress, countrylong))\n}",
"func (db *Database) Area(ip string) string {\n\tdefer func() {\n\t\t_ = recover()\n\t}()\n\trecord, err := db.geo.City(net.ParseIP(ip))\n\tif err != nil {\n\t\treturn \"\"\n\t}\n\n\tcountry := record.Country.Names[\"en\"]\n\tcity := record.City.Names[\"en\"]\n\tif city == \"\" {\n\t\tcity = record.Location.TimeZone\n\t}\n\treturn fmt.Sprintf(\"%s %s\", country, city)\n}",
"func (o *TransactionSplit) GetSepaCountryOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SepaCountry.Get(), o.SepaCountry.IsSet()\n}",
"func (bc *BasicLineGraph) PreCheckPutRegion(region *RegionInfo) (*RegionInfo, error) {\n\tbc.RLock()\n\torigin := bc.Regions.GetRegion(region.GetID())\n\tif origin == nil || !bytes.Equal(origin.GetRootKey(), region.GetRootKey()) || !bytes.Equal(origin.GetEndKey(), region.GetEndKey()) {\n\t\tfor _, item := range bc.Regions.GetOverlaps(region) {\n\t\t\tif region.GetRegionEpoch().GetVersion() < item.GetRegionEpoch().GetVersion() {\n\t\t\t\tbc.RUnlock()\n\t\t\t\treturn nil, ErrRegionIsStale(region.GetMeta(), item.GetMeta())\n\t\t\t}\n\t\t}\n\t}\n\tbc.RUnlock()\n\tif origin == nil {\n\t\treturn nil, nil\n\t}\n\tr := region.GetRegionEpoch()\n\to := origin.GetRegionEpoch()\n\t// Region meta is stale, return an error.\n\tif r.GetVersion() < o.GetVersion() || r.GetConfVer() < o.GetConfVer() {\n\t\treturn origin, ErrRegionIsStale(region.GetMeta(), origin.GetMeta())\n\t}\n\treturn origin, nil\n}",
"func (o *Workloadv1Location) HasRegionCode() bool {\n\tif o != nil && o.RegionCode != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func (antar antarcticaTimeZones) Syowa() string {return \"Antarctica/Syowa\" }",
"func RegionContainsFold(v string) predicate.Location {\n\treturn predicate.Location(func(s *sql.Selector) {\n\t\ts.Where(sql.ContainsFold(s.C(FieldRegion), v))\n\t})\n}",
"func queryIPToCountry(ipAddr string) string {\n\t//db, err := geoip2.Open(\"GeoIP2-City.mmdb\")\n\tdb, err := geoip2.Open(\"GeoLite2-Country.mmdb\")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tdefer db.Close()\n\t// If you are using strings that may be invalid, check that ip is not nil\n\t// ip := net.ParseIP(\"45.32.47.180\")\n\tip := net.ParseIP(ipAddr)\n\trecord, err := db.City(ip)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// fmt.Println(record)\n\t//fmt.Printf(\"%+v\\n\", record.Country.Names[\"en\"])\n\treturn record.Country.Names[\"en\"]\n}",
"func (c *causality) detectConflict(keys [][]byte) (bool, int) {\n\tif len(keys) == 0 {\n\t\treturn false, 0\n\t}\n\n\tfirstIdx := -1\n\tfor _, key := range keys {\n\t\tif idx, ok := c.relations[string(key)]; ok {\n\t\t\tif firstIdx == -1 {\n\t\t\t\tfirstIdx = idx\n\t\t\t} else if firstIdx != idx {\n\t\t\t\treturn true, -1\n\t\t\t}\n\t\t}\n\t}\n\n\treturn firstIdx != -1, firstIdx\n}",
"func hasValidCountryCallingCode(countryCallingCode int) bool {\n\t_, containsKey := countryCodeToRegion[countryCallingCode]\n\treturn containsKey\n}",
"func checkAbbreviation(field string) (bool, string) {\n\tvar expectedAbbreviations = map[string]string{\n\t\t\"configuration\": \"config\",\n\t\t\"identifier\": \"id\",\n\t\t\"information\": \"info\",\n\t\t\"specification\": \"spec\",\n\t\t\"statistics\": \"stats\",\n\t}\n\n\tif suggestion, exists := expectedAbbreviations[field]; exists {\n\t\treturn true, suggestion\n\t}\n\treturn false, field\n}",
"func (o *Workloadv1Location) GetRegionCodeOk() (*string, bool) {\n\tif o == nil || o.RegionCode == nil {\n\t\treturn nil, false\n\t}\n\treturn o.RegionCode, true\n}",
"func (crs AlbersEqualAreaConic) Contains(lon, lat float64) bool {\n\tif crs.Area != nil && !crs.Area.Contains(lon, lat) {\n\t\treturn false\n\t}\n\tif crs.GeodeticDatum != nil && !crs.GeodeticDatum.Contains(lon, lat) {\n\t\treturn false\n\t}\n\tif math.Abs(lon) > 180 || math.Abs(lat) > 90 {\n\t\treturn false\n\t}\n\treturn true\n}",
"func (antar antarcticaDeprecatedTimeZones) South_Pole() string { return \"Pacific/Auckland\" }",
"func (data *Invasion) ValidateRoads() error {\n for _, city := range data.Map {\n for _, direction := range AllRoads(city) {\n toCityName, toExists := city.Roads[direction]\n if !toExists {\n continue\n }\n\n toCity, toCityOk := data.Map[toCityName];\n if !toCityOk {\n return errors.New(fmt.Sprintf(\"Map validation error: road to %s from %s, but %s not found on map\", toCityName, city.Name, toCityName))\n }\n\n if toCity.Roads[oppositeDirection(direction)] != city.Name {\n return errors.New(fmt.Sprintf(\"Map validation error: no back-road to %s from %s, but should be\", toCity.Roads[oppositeDirection(direction)], city.Name))\n }\n }\n }\n\n return nil\n}",
"func (p *simpleParser) parseConflictClause(r reporter) (clause *ast.ConflictClause) {\n\tclause = &ast.ConflictClause{}\n\n\tnext, ok := p.optionalLookahead(r)\n\tif !ok {\n\t\treturn\n\t}\n\n\t// ON\n\tif next.Type() == token.KeywordOn {\n\t\tclause.On = next\n\t\tp.consumeToken()\n\t} else {\n\t\t// if there's no 'ON' token, the empty production is assumed, which is\n\t\t// why no error is reported here.\n\t\treturn\n\t}\n\n\t// CONFLICT\n\tnext, ok = p.lookahead(r)\n\tif !ok {\n\t\treturn\n\t}\n\tif next.Type() == token.KeywordConflict {\n\t\tclause.Conflict = next\n\t\tp.consumeToken()\n\t} else {\n\t\tr.unexpectedToken(token.KeywordConflict)\n\t\treturn\n\t}\n\n\t// ROLLBACK, ABORT, FAIL, IGNORE, REPLACE\n\tnext, ok = p.lookahead(r)\n\tif !ok {\n\t\treturn\n\t}\n\tswitch next.Type() {\n\tcase token.KeywordRollback:\n\t\tclause.Rollback = next\n\t\tp.consumeToken()\n\tcase token.KeywordAbort:\n\t\tclause.Abort = next\n\t\tp.consumeToken()\n\tcase token.KeywordFail:\n\t\tclause.Fail = next\n\t\tp.consumeToken()\n\tcase token.KeywordIgnore:\n\t\tclause.Ignore = next\n\t\tp.consumeToken()\n\tcase token.KeywordReplace:\n\t\tclause.Replace = next\n\t\tp.consumeToken()\n\tdefault:\n\t\tr.unexpectedToken(token.KeywordRollback, token.KeywordAbort, token.KeywordFail, token.KeywordIgnore, token.KeywordReplace)\n\t}\n\treturn\n}",
"func stateExists(state int, at map[int]map[uint8]int)bool {\n _, ok := at[state]\n if (!ok || state == -1 || at[state] == nil) {\n return false\n }\n return true\n}",
"func (o *Workloadv1Location) HasCountryCode() bool {\n\tif o != nil && o.CountryCode != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func ProvinceName(v string) predicate.Province {\n\treturn predicate.Province(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldProvinceName), v))\n\t})\n}",
"func (ameri americaDeprecatedTimeZones) St_Vincent() string { return \"America/Port_of_Spain\" }",
"func (m *Manager) GetProvinceCities(province string) []City {\n\tvar res []City\n\tq := fmt.Sprintf(\"SELECT %s FROM %s AS c WHERE province=?\",\n\t\tGetSelectFields(CityTableFull, \"c\"),\n\t\tCityTableFull,\n\t)\n\t_, err := m.GetRDbMap().Select(&res, q, province)\n\tassert.Nil(err)\n\treturn res\n}",
"func (d *DB) Get_country_long(ipaddress string) (IP2Locationrecord, error) {\n\treturn d.query(ipaddress, countrylong)\n}",
"func (canad canadaDeprecatedTimeZones) Newfoundland() string { return \"America/St_Johns\" }",
"func (ameri americaDeprecatedTimeZones) Lower_Princes() string { return \"America/Curacao\" }",
"func (m *Country) Unknown() bool {\n\treturn m.ID == \"\" || m.ID == UnknownCountry.ID\n}",
"func (c *ConcurrentPreviousSet) ExistsConflict(message types.Message) bool {\n\tc.mutex.Lock()\n\tdefer c.mutex.Unlock()\n\n\tfor _, t := range c.values {\n\t\tif c.relationship.Conflict(message, t) {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}",
"func cityContains(cities [] *model.City, city *model.City) bool {\n\tfor _, c := range cities {\n\t\tif c == city {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}",
"func (o *Workloadv1Location) HasCityCode() bool {\n\tif o != nil && o.CityCode != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func (g *Geocodio) GeocodeAndReturnCongressionalDistrict(address string) (GeocodeResult, error) {\n\treturn g.GeocodeReturnFields(address, \"cd\")\n}",
"func (self *AgentState) HasConflict(potentialJobName string, potentialConflicts []string) (bool, string) {\n\t// Iterate through each existing Job, asserting two things:\n\tfor existingJobName, existingConflicts := range self.conflicts {\n\n\t\t// 1. Each tracked Job does not conflict with the potential conflicts\n\t\tfor _, pc := range potentialConflicts {\n\t\t\tif globMatches(pc, existingJobName) {\n\t\t\t\treturn true, existingJobName\n\t\t\t}\n\t\t}\n\n\t\t// 2. The new Job does not conflict with any of the tracked confclits\n\t\tfor _, ec := range existingConflicts {\n\t\t\tif globMatches(ec, potentialJobName) {\n\t\t\t\treturn true, existingJobName\n\t\t\t}\n\t\t}\n\t}\n\n\treturn false, \"\"\n}",
"func ProvinceNameHasSuffix(v string) predicate.Province {\n\treturn predicate.Province(func(s *sql.Selector) {\n\t\ts.Where(sql.HasSuffix(s.C(FieldProvinceName), v))\n\t})\n}",
"func (europ europeTimeZones) Paris() string {return \"Europe/Paris\" }",
"func (ameri americaTimeZones) St_Vincent() string {return \"America/St_Vincent\" }",
"func Test_Client_MapBySubregion(t *testing.T) {\n\tcountriesInSEA := []string{\n\t\t\"Brunei\",\n\t\t\"Cambodia\",\n\t\t\"Indonesia\",\n\t\t\"Laos\",\n\t\t\"Malaysia\",\n\t\t\"Myanmar\",\n\t\t\"Philippines\",\n\t\t\"Singapore\",\n\t\t\"Thailand\",\n\t\t\"Timor-Leste\",\n\t\t\"Vietnam\",\n\t}\n\tret := mockClient.MapBySubregion(\"South-Eastern Asia\")\n\tfor _, row := range ret {\n\t\tassert.Contains(t, countriesInSEA, row.Name)\n\t}\n}",
"func (cr *Core) ProvinceList() (res ProvinceListResponse, err error) {\n\turlPath := \"province\"\n\theaders := map[string]string{\n\t\t\"key\": cr.Client.APIKey,\n\t}\n\terr = cr.CallPro(fasthttp.MethodGet, urlPath, headers, nil, &res)\n\tif err != nil {\n\t\treturn\n\t}\n\treturn\n}",
"func (c *causality) detectConflict(keys []string) bool {\n\tif len(keys) == 0 {\n\t\treturn false\n\t}\n\n\tvar existedRelation string\n\tfor _, key := range keys {\n\t\tif val, ok := c.relations[key]; ok {\n\t\t\tif existedRelation != \"\" && val != existedRelation {\n\t\t\t\treturn true\n\t\t\t}\n\t\t\texistedRelation = val\n\t\t}\n\t}\n\n\treturn false\n}",
"func CityContainsFold(v string) predicate.Location {\n\treturn predicate.Location(func(s *sql.Selector) {\n\t\ts.Where(sql.ContainsFold(s.C(FieldCity), v))\n\t})\n}",
"func (crs Projection) Contains(lon, lat float64) bool {\n\tif crs.Area != nil && !crs.Area.Contains(lon, lat) {\n\t\treturn false\n\t}\n\tif crs.GeodeticDatum != nil && !crs.GeodeticDatum.Contains(lon, lat) {\n\t\treturn false\n\t}\n\tif math.Abs(lon) > 180 || math.Abs(lat) > 90 {\n\t\treturn false\n\t}\n\treturn true\n}",
"func isPostcodeByIso3166Alpha2(fl FieldLevel) bool {\n\tfield := fl.Field()\n\tparam := fl.Param()\n\n\treg, found := postCodeRegexDict[param]\n\tif !found {\n\t\treturn false\n\t}\n\n\treturn reg.MatchString(field.String())\n}",
"func (db *DB) GetCountryLong(ip string) (*Record, error) { return db.query(ip, ModeCountryLong) }"
] | [
"0.5658489",
"0.53894514",
"0.5341391",
"0.5287556",
"0.5286399",
"0.52390695",
"0.5008304",
"0.49755126",
"0.49598086",
"0.49070632",
"0.4902826",
"0.48903996",
"0.48770154",
"0.48651946",
"0.48634878",
"0.48171234",
"0.48066315",
"0.47969204",
"0.47834495",
"0.4744221",
"0.47427574",
"0.47348478",
"0.4726006",
"0.47104025",
"0.4692802",
"0.46860912",
"0.46736917",
"0.46688822",
"0.4666491",
"0.46479413",
"0.46478713",
"0.4632101",
"0.4620035",
"0.46190983",
"0.46134654",
"0.46079597",
"0.46072537",
"0.46050823",
"0.4603379",
"0.4590619",
"0.4589922",
"0.45889246",
"0.45889246",
"0.45889246",
"0.45889246",
"0.45857406",
"0.4577913",
"0.45694157",
"0.45552245",
"0.45552245",
"0.45552245",
"0.45552245",
"0.45279586",
"0.45196274",
"0.4519156",
"0.4513878",
"0.4511051",
"0.450764",
"0.4506468",
"0.45063955",
"0.44990057",
"0.4498288",
"0.44741488",
"0.44579124",
"0.4453701",
"0.44532496",
"0.44506618",
"0.44373122",
"0.44295916",
"0.44217235",
"0.44112724",
"0.44031447",
"0.44001567",
"0.43949363",
"0.43917206",
"0.43866053",
"0.43852445",
"0.43837002",
"0.43780062",
"0.4376932",
"0.43745238",
"0.43712947",
"0.43667182",
"0.43644443",
"0.4364208",
"0.43611848",
"0.4356814",
"0.4354761",
"0.4347757",
"0.43460438",
"0.434555",
"0.43437418",
"0.43373904",
"0.43351468",
"0.43314093",
"0.43264213",
"0.43221012",
"0.43209937",
"0.43194923",
"0.43181965"
] | 0.61279434 | 0 |
Checks if province is at war (not necessarily location of conflict) | func (s *State) IsAtWar(id pb.ProvinceId) bool {
for _, c := range s.Conflicts {
for _, a := range c.Attackers() {
if a == id {
return true
}
}
for _, d := range c.Defenders() {
if d == id {
return true
}
}
}
return false
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (s *State) IsSiteOfConflict(id pb.ProvinceId) bool {\n\tfor _, c := range s.Conflicts {\n\t\tfor _, l := range c.Locations() {\n\t\t\tif l == id {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t}\n\treturn false\n}",
"func checkProvinceValid(citizenNo []byte) bool {\n\tprovinceCode := make([]byte, 0)\n\tprovinceCode = append(provinceCode, citizenNo[:2]...)\n\tprovinceStr := string(provinceCode)\n\n\t// 判断省份/地区是否合规\n\tif _, ok := validProvince[provinceStr]; ok {\n\t\treturn true\n\t}\n\treturn false\n}",
"func (s *State) NewCivilWar(target pb.ProvinceId) bool { // TODO: Error return\n\tif s.IsAtWar(target) || s.IsSiteOfConflict(target) {\n\t\treturn false\n\t}\n\tc := &Conflict{\n\t\tname: \"Civil War\", // TODO\n\t\tlength: 0,\n\t\tattackers: Faction{\n\t\t\trebels: *(s.Get(target).Dissidents()),\n\t\t\tprogress: 0,\n\t\t},\n\t\tdefenders: Faction{\n\t\t\tmembers: []pb.ProvinceId{target},\n\t\t\tprogress: 0,\n\t\t},\n\t\tgoal: s.Settings().GetConflictGoal(pb.ConflictType_CIVIL_WAR),\n\t\tbase_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_CIVIL_WAR),\n\t\tlocations: []pb.ProvinceId{target},\n\t\tconflict_type: pb.ConflictType_CIVIL_WAR,\n\t}\n\ts.Conflicts[target] = c\n\treturn true\n}",
"func (w *Worker) In(region string) bool { return region == \"\" || region == w.Zone }",
"func (s *State) NewColonialWar(target pb.ProvinceId) bool { // TODO: Error return\n\tif s.IsAtWar(target) || s.IsSiteOfConflict(target) || s.Get(target).Occupier() != pb.ProvinceId_NONE {\n\t\treturn false\n\t}\n\tc := &Conflict{\n\t\tname: \"Colonial War\", // TODO\n\t\tlength: 0,\n\t\tattackers: Faction{\n\t\t\t// Dissidents\n\t\t\tprogress: 0,\n\t\t},\n\t\tdefenders: Faction{\n\t\t\tmembers: []pb.ProvinceId{s.Get(target).Occupier()},\n\t\t\tprogress: 0,\n\t\t},\n\t\tgoal: s.Settings().GetConflictGoal(pb.ConflictType_COLONIAL_WAR),\n\t\tbase_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_COLONIAL_WAR),\n\t\tlocations: []pb.ProvinceId{target},\n\t\tconflict_type: pb.ConflictType_COLONIAL_WAR,\n\t}\n\ts.Conflicts[target] = c\n\treturn true\n}",
"func (chile chileDeprecatedTimeZones) EasterIsland() string { return \"Pacific/Easter\" }",
"func (s *State) NewConventionalWar(defenders []pb.ProvinceId, attackers []pb.ProvinceId, locations []pb.ProvinceId) bool { // TODO: Error return\n\tfor _, d := range defenders {\n\t\tif s.IsAtWar(d) || s.IsSiteOfConflict(d) {\n\t\t\treturn false\n\t\t}\n\t}\n\tfor _, a := range attackers {\n\t\tif s.IsAtWar(a) || s.IsSiteOfConflict(a) {\n\t\t\treturn false\n\t\t}\n\t}\n\tfor _, l := range locations {\n\t\tif s.IsAtWar(l) || s.IsSiteOfConflict(l) {\n\t\t\treturn false\n\t\t}\n\t}\n\t// TODO: Logic for joining wars?\n\tc := &Conflict{\n\t\tname: \"War!\", // TODO\n\t\tlength: 0,\n\t\tattackers: Faction{\n\t\t\tmembers: attackers,\n\t\t\tprogress: 0,\n\t\t},\n\t\tdefenders: Faction{\n\t\t\tmembers: defenders,\n\t\t\tprogress: 0,\n\t\t},\n\t\tgoal: s.Settings().GetConflictGoal(pb.ConflictType_CONVENTIONAL_WAR),\n\t\tbase_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_CONVENTIONAL_WAR),\n\t\tlocations: locations,\n\t\tconflict_type: pb.ConflictType_CONVENTIONAL_WAR,\n\t}\n\t// For now it maps only to the first location\n\ts.Conflicts[locations[0]] = c\n\treturn true\n}",
"func (europ europeDeprecatedTimeZones) Isle_of_Man() string { return \"Europe/London\" }",
"func (atlan atlanticTimeZones) St_Helena() string {return \"Atlantic/St_Helena\" }",
"func verifyProvinceKTP(customer *models.Customer) bool {\n\tsliceProv, _ := strconv.Atoi(customer.Ktp[0:2])\n\n\tvar provPermitted = map[int]string{\n\t\t12: \"Sumatera Utara\",\n\t\t31: \"DKI Jakarta\",\n\t\t32: \"Jawa Barat\",\n\t\t35: \"Jawa Timur\",\n\t}\n\n\t_, isExist := provPermitted[sliceProv]\n\n\treturn isExist\n}",
"func (usaTz uSATimeZones) VirginIslands() string {return \"America/Port_of_Spain\" }",
"func (antar antarcticaDeprecatedTimeZones) South_Pole() string { return \"Pacific/Auckland\" }",
"func (w Workspace) IsBehind(ctx context.Context) bool {\n\tnodes := GetModelContext(ctx).Nodes\n\n\tfor _, id := range w.ProjectIDs {\n\t\tnode := nodes.MustLoadProject(id)\n\t\tif node.IsBehind {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}",
"func (ctl *AddressAPIController) ShowProvince(c echo.Context) (err error) {\n\tmodel := models.Province{}\n\tif err := ctl.DB(nil).Select([]string{\"CH_ID\", \"CHANGWAT_E\", \"CHANGWAT_T\"}).\n\t\tWhere(\"CH_ID = ?\", c.Param(\"id\")).\n\t\tGroup(\"CH_ID\").\n\t\tFirst(&model).Error; gorm.IsRecordNotFoundError(err) {\n\t\treturn &exceptions.ErrorException{\n\t\t\tMessage: \"Not found.\",\n\t\t\tErrorKey: \"not-found\",\n\t\t\tCode: http.StatusNotFound,\n\t\t}\n\t}\n\treturn c.JSON(http.StatusOK, map[string]interface{}{\n\t\t\"data\": model,\n\t})\n}",
"func (data *Invasion) AnyCitiesLeft() bool {\n return len(data.AllCities()) != 0\n}",
"func (atlan atlanticDeprecatedTimeZones) St_Helena() string { return \"Africa/Abidjan\" }",
"func (m CrossOrderCancelReplaceRequest) HasStateOrProvinceOfIssue() bool {\n\treturn m.Has(tag.StateOrProvinceOfIssue)\n}",
"func (ameri americaDeprecatedTimeZones) Louisville() string { return \"America/Kentucky/Louisville\" }",
"func (w Winding) IsColinear() bool { return w == Colinear }",
"func checkresult(action string, photolng float64, citylng float64) bool{\n\treturn (photolng <= citylng && action == \"West\") || (photolng >= citylng && action == \"East\")\n}",
"func (brazi brazilDeprecatedTimeZones) West() string { return \"America/Manaus\" }",
"func (europ europeTimeZones) Isle_of_Man() string {return \"Europe/Isle_of_Man\" }",
"func (pacif pacificTimeZones) Auckland() string {return \"Pacific/Auckland\" }",
"func (ameri americaDeprecatedTimeZones) Lower_Princes() string { return \"America/Curacao\" }",
"func (pacif pacificTimeZones) Bougainville() string {return \"Pacific/Bougainville\" }",
"func (m SecurityListRequest) HasStateOrProvinceOfIssue() bool {\n\treturn m.Has(tag.StateOrProvinceOfIssue)\n}",
"func (austr australiaDeprecatedTimeZones) West() string { return \"Australia/Perth\" }",
"func (atlan atlanticTimeZones) South_Georgia() string {return \"Atlantic/South_Georgia\" }",
"func (pacif pacificDeprecatedTimeZones) Midway() string { return \"Pacific/Pago_Pago\" }",
"func (r *Region) IsOnline() bool {\n\treturn r != nil && r.Status != \"OFFLINE\" // only this specific word takes us offline\n}",
"func outsideOfMap(m boundaries, x, y int) bool {\n\tif x < m.minX || x > m.maxX {\n\t\treturn true\n\t}\n\n\tif y < m.minY || y > m.maxY {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func (kentu kentuckyTimeZones) Louisville() string {return \"America/Kentucky/Louisville\" }",
"func (antar antarcticaTimeZones) Syowa() string {return \"Antarctica/Syowa\" }",
"func GetProvince(subject string) string {\n\treturn getRegexMatch(\"ST=([^(,|+)]+)\", subject)\n}",
"func (atlan atlanticTimeZones) Canary() string {return \"Atlantic/Canary\" }",
"func (me TviewRefreshModeEnumType) IsOnRegion() bool { return me == \"onRegion\" }",
"func HasProvinceID() predicate.Province {\n\treturn predicate.Province(func(s *sql.Selector) {\n\t\tstep := sqlgraph.NewStep(\n\t\t\tsqlgraph.From(Table, FieldID),\n\t\t\tsqlgraph.To(ProvinceIDTable, FieldID),\n\t\t\tsqlgraph.Edge(sqlgraph.O2M, false, ProvinceIDTable, ProvinceIDColumn),\n\t\t)\n\t\tsqlgraph.HasNeighbors(s, step)\n\t})\n}",
"func checkLocalTimezone() {\n\ttzName, tzOffset := time.Now().Zone()\n\tif time.Duration(tzOffset)*time.Second != tzBeijing {\n\t\tlog.Warn().Msgf(\n\t\t\t\"expected Beijing Timezone (UTC+08), but found %s (UTC%s)\",\n\t\t\ttzName, time.Now().Format(\"-07\"),\n\t\t)\n\t}\n}",
"func (o *W2) HasStateAndLocalWages() bool {\n\tif o != nil && o.StateAndLocalWages != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func WarrantyProvinceValidation(province string) infra.Validation {\n\tnotSupportedProvinces := getNotSupportedProvinces()\n\tfor _, notSupportedProvince := range notSupportedProvinces {\n\t\tif province == notSupportedProvince {\n\t\t\treturn infra.Validation{Err: fmt.Errorf(\"warranty province %s not supported\", province)}\n\t\t}\n\t}\n\treturn infra.Validation{Err: nil}\n}",
"func (afric africaTimeZones) Windhoek() string {return \"Africa/Windhoek\" }",
"func stateExists(state int, at map[int]map[uint8]int)bool {\n _, ok := at[state]\n if (!ok || state == -1 || at[state] == nil) {\n return false\n }\n return true\n}",
"func CheckIfBattle(img gocv.Mat, percentage float64, options Options) (bool, error) {\n\n\tvar asset AreaLocation\n\tasset = options.Provider.GetAreaLocation(\"battle_mode_area\")\n\ttopCorner := asset.Bounds.Lower\n\tbottomCorner := asset.Bounds.Upper\n\n\tif bottomCorner == (image.Point{}) {\n\t\treturn false, errors.New(\"invalid bottom point\")\n\t}\n\t// use image.Rect in case data is wrong and the lower and upper bounds are switched\n\troi := img.Region(image.Rect(topCorner.X, topCorner.Y, bottomCorner.X, bottomCorner.Y))\n\twhiteMin := gocv.NewScalar(250, 250, 250, 0)\n\twhiteMax := gocv.NewScalar(255, 255, 255, 255)\n\n\twhiteQuery := gocv.NewMat()\n\tgocv.InRangeWithScalar(roi, whiteMin, whiteMax, &whiteQuery)\n\n\tdefer roi.Close()\n\tdefer whiteQuery.Close()\n\n\tif gocv.CountNonZero(whiteQuery) > int(float64(roi.Rows()*roi.Cols())*percentage) {\n\t\treturn true, nil\n\t}\n\treturn false, nil\n}",
"func (g *G1) isValidProjective() bool { return (g.x.IsZero() & g.y.IsZero() & g.z.IsZero()) != 1 }",
"func (europ europeTimeZones) Warsaw() string {return \"Europe/Warsaw\" }",
"func getValidTopology(topologyMap map[string][]string) ([]string, []string) {\n\tvar regionValues []string\n\tvar zoneValues []string\n\tfor region, zones := range topologyMap {\n\t\tregionValues = append(regionValues, region)\n\t\tzoneValues = append(zoneValues, zones...)\n\t}\n\treturn regionValues, zoneValues\n}",
"func (ameri americaTimeZones) Creston() string {return \"America/Creston\" }",
"func allowedInCity(lastDigit int, dateTime time.Time) bool {\n\tallowed := true\n\n\tweekday := dateTime.Weekday()\n\tdateString := dateTime.Format(\"2006-01-02\")\n\n\tmorningRestrictionStart, _ := time.Parse(time.RFC3339, dateString + \"T\" + \"07:00:00-05:00\")\n\tmorningRestrictionEnd, _ := time.Parse(time.RFC3339, dateString + \"T\" + \"09:30:00-05:00\")\n\n\teveningRestrictionStart, _ := time.Parse(time.RFC3339, dateString + \"T\" + \"16:00:00-05:00\")\n\teveningRestrictionEnd, _ := time.Parse(time.RFC3339, dateString + \"T\" + \"19:30:00-05:00\")\n\n\trestrictedMorningTime := dateTime.After(morningRestrictionStart) && dateTime.Before(morningRestrictionEnd)\n\trestrictedEveningTime := dateTime.After(eveningRestrictionStart) && dateTime.Before(eveningRestrictionEnd)\n\n\tif restrictedMorningTime || restrictedEveningTime {\n\t\tswitch weekday {\n\t\tcase time.Monday:\n\t\t\tif lastDigit == 1 || lastDigit == 2 {\n\t\t\t\tallowed = false\n\t\t\t}\n\t\tcase time.Tuesday:\n\t\t\tif lastDigit == 3 || lastDigit == 4 {\n\t\t\t\tallowed = false\n\t\t\t}\n\t\tcase time.Wednesday:\n\t\t\tif lastDigit == 5 || lastDigit == 6 {\n\t\t\t\tallowed = false\n\t\t\t}\n\t\tcase time.Thursday:\n\t\t\tif lastDigit == 7 || lastDigit == 8 {\n\t\t\t\tallowed = false\n\t\t\t}\n\t\tcase time.Friday:\n\t\t\tif lastDigit == 9 || lastDigit == 0 {\n\t\t\t\tallowed = false\n\t\t\t}\n\t\t}\n\t}\n\n\treturn allowed\n}",
"func (ameri americaTimeZones) Whitehorse() string {return \"America/Whitehorse\" }",
"func (ameri americaTimeZones) Winnipeg() string {return \"America/Winnipeg\" }",
"func (europ europeTimeZones) Jersey() string {return \"Europe/Jersey\" }",
"func IsWestern(char rune) bool {\n\treturn (char > '\\u0040' && char < '\\u005B') ||\n\t\t (char > '\\u0060' && char < '\\u007B') ||\n\t\t (char > '\\u00BF' && char < '\\u00D7') ||\n\t\t (char > '\\u00D7' && char < '\\u02B0') ||\n\t\t (char > '\\u036F' && char < '\\u0590')\n}",
"func (ameri americaTimeZones) St_Vincent() string {return \"America/St_Vincent\" }",
"func (ameri americaTimeZones) Los_Angeles() string {return \"America/Los_Angeles\" }",
"func (ameri americaTimeZones) PortMinusauMinusPrince() string {return \"America/Port-au-Prince\" }",
"func (ameri americaDeprecatedTimeZones) Virgin() string { return \"America/Port_of_Spain\" }",
"func (austr australiaTimeZones) Melbourne() string {return \"Australia/Melbourne\" }",
"func (pacif pacificTimeZones) Midway() string {return \"Pacific/Midway\" }",
"func (ameri americaDeprecatedTimeZones) St_Barthelemy() string { return \"America/Port_of_Spain\" }",
"func (europ europeTimeZones) London() string {return \"Europe/London\" }",
"func (bf *boardFilter) atStation(tpl string) bool {\n\tfor _, s := range bf.res.Station {\n\t\tif s == tpl {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}",
"func (austr australiaTimeZones) Broken_Hill() string {return \"Australia/Broken_Hill\" }",
"func (pacif pacificDeprecatedTimeZones) Johnston() string { return \"Pacific/Honolulu\" }",
"func (europ europeTimeZones) Andorra() string {return \"Europe/Andorra\" }",
"func (ameri americaTimeZones) St_Lucia() string {return \"America/St_Lucia\" }",
"func (pacif pacificTimeZones) Enderbury() string {return \"Pacific/Enderbury\" }",
"func (europ europeDeprecatedTimeZones) Jersey() string { return \"Europe/London\" }",
"func (atlan atlanticTimeZones) Stanley() string {return \"Atlantic/Stanley\" }",
"func (ameri americaDeprecatedTimeZones) St_Lucia() string { return \"America/Port_of_Spain\" }",
"func IsValidRegion(promisedLand string) bool {\n\tpartitions := endpoints.DefaultResolver().(endpoints.EnumPartitions).Partitions()\n\tfor _, p := range partitions {\n\t\tfor region := range p.Regions() {\n\t\t\tif promisedLand == region {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t}\n\treturn false\n}",
"func (europ europeTimeZones) Guernsey() string {return \"Europe/Guernsey\" }",
"func (me TAttlistDescriptorNameType) IsGeographic() bool { return me.String() == \"Geographic\" }",
"func isIso3166Alpha2(fl FieldLevel) bool {\n\tval := fl.Field().String()\n\treturn iso3166_1_alpha2[val]\n}",
"func (ameri americaDeprecatedTimeZones) St_Vincent() string { return \"America/Port_of_Spain\" }",
"func (antar antarcticaTimeZones) Vostok() string {return \"Antarctica/Vostok\" }",
"func (uSDep uSDeprecatedTimeZones) Arizona() string { return \"America/Phoenix\" }",
"func (europ europeTimeZones) Oslo() string {return \"Europe/Oslo\" }",
"func isDST(t time.Time) bool {\n\tname, _ := t.In(locNewYork).Zone()\n\treturn name == \"EDT\"\n}",
"func (pacif pacificTimeZones) Johnston() string {return \"Pacific/Johnston\" }",
"func (europ europeTimeZones) Belfast() string {return \"Europe/Belfast\" }",
"func getValidRegion(value string) string {\n\tif len(value) == 3 {\n\t\treturn validRegions[value]\n\t}\n\tif len(value) != 4 {\n\t\treturn \"\"\n\t}\n\tfor _, dc := range validRegions {\n\t\tif value == dc {\n\t\t\treturn value\n\t\t}\n\t}\n\treturn \"\"\n}",
"func (ameri americaTimeZones) Yellowknife() string {return \"America/Yellowknife\" }",
"func (canad canadaDeprecatedTimeZones) Newfoundland() string { return \"America/St_Johns\" }",
"func (o *Workloadv1Location) HasCityCode() bool {\n\tif o != nil && o.CityCode != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func (ameri americaDeprecatedTimeZones) Shiprock() string { return \"America/Denver\" }",
"func (europ europeDeprecatedTimeZones) Guernsey() string { return \"Europe/London\" }",
"func (europ europeTimeZones) Paris() string {return \"Europe/Paris\" }",
"func (austr australiaDeprecatedTimeZones) North() string { return \"Australia/Darwin\" }",
"func (europ europeTimeZones) Prague() string {return \"Europe/Prague\" }",
"func (e Environment) IsProduction() bool {\n\treturn e == EnvironmentProduction\n}",
"func IsStaging() bool {\n\treturn strings.ToLower(env) == staging\n}",
"func (ameri americaDeprecatedTimeZones) Coral_Harbour() string { return \"America/Atikokan\" }",
"func ProvinceNameContainsFold(v string) predicate.Province {\n\treturn predicate.Province(func(s *sql.Selector) {\n\t\ts.Where(sql.ContainsFold(s.C(FieldProvinceName), v))\n\t})\n}",
"func (ameri americaTimeZones) Halifax() string {return \"America/Halifax\" }",
"func (ameri americaTimeZones) Thunder_Bay() string {return \"America/Thunder_Bay\" }",
"func (ameri americaDeprecatedTimeZones) Fort_Wayne() string { return \"America/Indiana/Indianapolis\" }",
"func IsProd() bool {\n\treturn strings.ToLower(env) == prod\n}",
"func (antar antarcticaTimeZones) Casey() string {return \"Antarctica/Casey\" }",
"func (ameri americaTimeZones) Phoenix() string {return \"America/Phoenix\" }",
"func (e *Env) IsProd() bool {\n\treturn *e == Prod\n}"
] | [
"0.57962584",
"0.55487424",
"0.5342499",
"0.53356093",
"0.5323124",
"0.52885604",
"0.52777106",
"0.51641357",
"0.5091826",
"0.50897366",
"0.5077543",
"0.5004437",
"0.4968343",
"0.49320322",
"0.4922909",
"0.49174032",
"0.48947048",
"0.4862266",
"0.48564965",
"0.48328453",
"0.48234984",
"0.48220938",
"0.48134443",
"0.48007292",
"0.47968975",
"0.4774853",
"0.4754206",
"0.47528034",
"0.4738043",
"0.47317868",
"0.47308373",
"0.47108492",
"0.47015202",
"0.47003347",
"0.46902275",
"0.46893838",
"0.46612567",
"0.4655619",
"0.46429396",
"0.45964277",
"0.45938367",
"0.45828658",
"0.45811158",
"0.458022",
"0.45801643",
"0.45699772",
"0.45588917",
"0.4545636",
"0.45392868",
"0.45294103",
"0.4526522",
"0.45257062",
"0.4520775",
"0.45165378",
"0.45112103",
"0.45064336",
"0.4504261",
"0.4498675",
"0.44985884",
"0.4491675",
"0.44741604",
"0.4472736",
"0.4470905",
"0.4467719",
"0.4467015",
"0.44669983",
"0.4450153",
"0.44327918",
"0.4430086",
"0.44294557",
"0.44251367",
"0.4419691",
"0.4417782",
"0.44007948",
"0.43984035",
"0.43862662",
"0.43766013",
"0.43699917",
"0.43646237",
"0.43626574",
"0.43590373",
"0.43588188",
"0.435785",
"0.43501437",
"0.43453637",
"0.43399656",
"0.4335958",
"0.43347314",
"0.43343237",
"0.433278",
"0.4329386",
"0.43275934",
"0.43269977",
"0.43260166",
"0.4313718",
"0.4309028",
"0.43071812",
"0.43057945",
"0.43032187",
"0.43004486"
] | 0.6892222 | 0 |
Gets a conflict by location | func (s *State) GetConflict(location pb.ProvinceId) *Conflict {
return s.Conflicts[location]
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (b *BranchDAG) Conflict(conflictID ConflictID) *CachedConflict {\n\treturn &CachedConflict{CachedObject: b.conflictStorage.Load(conflictID.Bytes())}\n}",
"func NewConflictResolver(\n\tconfig Config, fbo *folderBranchOps) *ConflictResolver {\n\t// make a logger with an appropriate module name\n\tbranchSuffix := \"\"\n\tif fbo.branch() != data.MasterBranch {\n\t\tbranchSuffix = \" \" + string(fbo.branch())\n\t}\n\ttlfStringFull := fbo.id().String()\n\tlog := config.MakeLogger(\n\t\tfmt.Sprintf(\"CR %s%s\", tlfStringFull[:8], branchSuffix))\n\n\tcr := &ConflictResolver{\n\t\tconfig: config,\n\t\tfbo: fbo,\n\t\tprepper: folderUpdatePrepper{\n\t\t\tconfig: config,\n\t\t\tfolderBranch: fbo.folderBranch,\n\t\t\tblocks: &fbo.blocks,\n\t\t\tlog: log,\n\t\t\tvlog: config.MakeVLogger(log),\n\t\t},\n\t\tlog: traceLogger{log},\n\t\tdeferLog: traceLogger{log.CloneWithAddedDepth(1)},\n\t\tmaxRevsThreshold: crMaxRevsThresholdDefault,\n\t\tcurrInput: conflictInput{\n\t\t\tunmerged: kbfsmd.RevisionUninitialized,\n\t\t\tmerged: kbfsmd.RevisionUninitialized,\n\t\t},\n\t}\n\n\tif fbo.bType == standard && config.Mode().ConflictResolutionEnabled() {\n\t\tcr.startProcessing(libcontext.BackgroundContextWithCancellationDelayer())\n\t}\n\treturn cr\n}",
"func Conflict(msg string) Error {\n\te := err{msg: msg, code: conflictCode, group: generic, kind: conflict}\n\treturn &e\n}",
"func (r *Reply) Conflict() *Reply {\n\treturn r.Status(http.StatusConflict)\n}",
"func Conflict(id Identifier) Constraint {\n\treturn conflict(id)\n}",
"func (service *ResultService) ResolveConflict(in *proto_job.ResultRequest) (*proto_job.ResultReply, error) {\n\tresult, err := service.accessor.GetByID(uint(in.Id))\n\n\tif err != nil {\n\t\treturn nil, err\n\t} else if result.ID == 0 {\n\t\tlog.Fatal(\"Conflict not found in SetResultState\")\n\t}\n\n\tresult.State = \"RESOLVED\"\n\tresult.TaxonID = uint(in.TaxonId)\n\terr = service.accessor.Save(result)\n\n\treturn converters.ResultModelToProto(result), err\n}",
"func (s *Service) Conflicts() []string {\n\treturn s.conflicts\n}",
"func (b Bucket) GetLocation(args ...Params) (string, error) {\n\theader, query := getHeaderQuery(args)\n\tquery.Set(\"location\", \"\")\n\tvar location string\n\terr := b.Do(\"GET\", \"\", nil, &location, header, query)\n\treturn location, err\n}",
"func NewGetBannersConflict() *GetBannersConflict {\n\treturn &GetBannersConflict{}\n}",
"func (rc ResponseController) GetLocation(w http.ResponseWriter, r *http.Request, p httprouter.Params) {\n\tid := p.ByName(\"id\")\n\tfmt.Println(\"GET Request: ID:\", id)\n\n\tresp, err := getDBData(id, rc)\n\tif err != nil {\n\t w.WriteHeader(404)\n\t\tfmt.Println(\"Response: 404 Not Found\")\n\t\treturn\n\t}\n\n\tjsonOut, _ := json.Marshal(resp)\n\thttpResponse(w, jsonOut, 200)\n\tfmt.Println(\"Response:\", string(jsonOut), \" 200 OK\")\n}",
"func Conflict(message string, args ...interface{}) *Failure {\n\treturn NewWithStatus(fmt.Sprintf(message, args...), http.StatusConflict)\n}",
"func (d *dirInode) lookUpConflicting(\n\tctx context.Context,\n\tname string) (result LookUpResult, err error) {\n\tstrippedName := strings.TrimSuffix(name, ConflictingFileNameSuffix)\n\n\t// In order to a marked name to be accepted, we require the conflicting\n\t// directory to exist.\n\tvar dirResult LookUpResult\n\tdirResult, err = d.lookUpChildDir(ctx, strippedName)\n\tif err != nil {\n\t\terr = fmt.Errorf(\"lookUpChildDir for stripped name: %v\", err)\n\t\treturn\n\t}\n\n\tif !dirResult.Exists() {\n\t\treturn\n\t}\n\n\t// The directory name exists. Find the conflicting file.\n\tresult, err = d.lookUpChildFile(ctx, strippedName)\n\tif err != nil {\n\t\terr = fmt.Errorf(\"lookUpChildFile for stripped name: %v\", err)\n\t\treturn\n\t}\n\n\treturn\n}",
"func LocationGet(l int) Location {\n\ti := Locationmap[l]\n\treturn i\n}",
"func Conflict(message ...interface{}) Err {\n\treturn Boomify(http.StatusConflict, message...)\n}",
"func NewGetSecurityGroupConflict() *GetSecurityGroupConflict {\n\treturn &GetSecurityGroupConflict{}\n}",
"func (m *CarserviceMutation) Location() (r string, exists bool) {\n\tv := m.location\n\tif v == nil {\n\t\treturn\n\t}\n\treturn *v, true\n}",
"func (c *ConflictResolver) Resolve(conflict Conflict) (winner Body, resolutionType ConflictResolutionType, err error) {\n\n\twinner, err = c.crf(conflict)\n\tif err != nil {\n\t\treturn winner, \"\", err\n\t}\n\n\twinningRev, ok := winner[BodyRev]\n\tif !ok {\n\t\tc.stats.ConflictResultMergeCount.Add(1)\n\t\treturn winner, ConflictResolutionMerge, nil\n\t}\n\n\tlocalRev, ok := conflict.LocalDocument[BodyRev]\n\tif ok && localRev == winningRev {\n\t\tc.stats.ConflictResultLocalCount.Add(1)\n\t\treturn winner, ConflictResolutionLocal, nil\n\t}\n\n\tremoteRev, ok := conflict.RemoteDocument[BodyRev]\n\tif ok && remoteRev == winningRev {\n\t\tc.stats.ConflictResultRemoteCount.Add(1)\n\t\treturn winner, ConflictResolutionRemote, nil\n\t}\n\n\tbase.InfofCtx(context.Background(), base.KeyReplicate, \"Conflict resolver returned non-empty revID (%s) not matching local (%s) or remote (%s), treating result as merge.\", winningRev, localRev, remoteRev)\n\tc.stats.ConflictResultMergeCount.Add(1)\n\treturn winner, ConflictResolutionMerge, err\n}",
"func (c *SeaterController) Conflictf(format string, args ...interface{}) {\n\tc.TraceConflictf(nil, format, args...)\n}",
"func Conflict(err error) Response {\n\tmessage := \"already exists\"\n\tif err != nil {\n\t\tmessage = err.Error()\n\t}\n\treturn &errorResponse{\n\t\tcode: http.StatusConflict,\n\t\tmsg: message,\n\t}\n}",
"func NewGetBucketsConflict() *GetBucketsConflict {\n\treturn &GetBucketsConflict{}\n}",
"func NewGetBadgesConflict() *GetBadgesConflict {\n\treturn &GetBadgesConflict{}\n}",
"func Conflict(message string, errors []Error) {\n\tresponse := Response{\n\t\tStatus: http.StatusConflict,\n\t\tMessage: message,\n\t\tData: nil,\n\t\tErrors: errors,\n\t}\n\tpanic(response)\n}",
"func NewGetVariableRegistryUpdateRequestConflict() *GetVariableRegistryUpdateRequestConflict {\n\treturn &GetVariableRegistryUpdateRequestConflict{}\n}",
"func (service *ResultService) UnresolveConflict(in *proto_job.ResultRequest) (*proto_job.ResultReply, error) {\n\tresult, err := service.accessor.GetByID(uint(in.Id))\n\n\tif err != nil {\n\t\treturn nil, err\n\t} else if result.ID == 0 {\n\t\tlog.Fatal(\"Conflict not found in SetResultState\")\n\t}\n\n\tresult.State = \"NOT_FOUND\"\n\tresult.TaxonID = uint(in.TaxonId)\n\terr = service.accessor.Save(result)\n\n\treturn converters.ResultModelToProto(result), err\n}",
"func (s *State) IsSiteOfConflict(id pb.ProvinceId) bool {\n\tfor _, c := range s.Conflicts {\n\t\tfor _, l := range c.Locations() {\n\t\t\tif l == id {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t}\n\treturn false\n}",
"func (l Location) Locate() Location {\n\treturn l\n}",
"func (o JobStatusErrorOutput) Location() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobStatusError) *string { return v.Location }).(pulumi.StringPtrOutput)\n}",
"func (o LookupGroupResultOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupGroupResult) string { return v.Location }).(pulumi.StringOutput)\n}",
"func NewGetPayportConflict() *GetPayportConflict {\n\treturn &GetPayportConflict{}\n}",
"func (o LookupSharedImageResultOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupSharedImageResult) string { return v.Location }).(pulumi.StringOutput)\n}",
"func RenderConflict(w http.ResponseWriter, message ...interface{}) {\n\tRender(w, Conflict(message...))\n}",
"func (o JobStatusErrorResultOutput) Location() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobStatusErrorResult) *string { return v.Location }).(pulumi.StringPtrOutput)\n}",
"func (o SchedulingResponseOutput) LocationHint() pulumi.StringOutput {\n\treturn o.ApplyT(func(v SchedulingResponse) string { return v.LocationHint }).(pulumi.StringOutput)\n}",
"func ClientLocationGet(cll models.ClientListLocation, m *models.Message) {\n\tif cll.ID <= 0 {\n\t\tm.Code = http.StatusBadRequest\n\t\tm.Message = \"especifique localizacion\"\n\t\treturn\n\t}\n\tdb := configuration.GetConnection()\n\tdefer db.Close()\n\terr := getClientLocation(&cll, db)\n\tif err != nil {\n\t\tm.Code = http.StatusBadRequest\n\t\tm.Message = \"no se encotro descripcion de ubicacion\"\n\t\treturn\n\t}\n\tm.Code = http.StatusOK\n\tm.Message = \"descripcion de ubicacion creado\"\n\tm.Data = cll\n}",
"func NewGetConfigurationConflict() *GetConfigurationConflict {\n\treturn &GetConfigurationConflict{}\n}",
"func (o TlsInspectionPolicyOutput) Location() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *TlsInspectionPolicy) pulumi.StringPtrOutput { return v.Location }).(pulumi.StringPtrOutput)\n}",
"func (o ConflictResolutionPolicyResponseOutput) ConflictResolutionPath() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ConflictResolutionPolicyResponse) *string { return v.ConflictResolutionPath }).(pulumi.StringPtrOutput)\n}",
"func (o ExprResponseOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ExprResponse) string { return v.Location }).(pulumi.StringOutput)\n}",
"func (o ExprResponseOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ExprResponse) string { return v.Location }).(pulumi.StringOutput)\n}",
"func (o ExprResponseOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ExprResponse) string { return v.Location }).(pulumi.StringOutput)\n}",
"func (o ExprResponseOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ExprResponse) string { return v.Location }).(pulumi.StringOutput)\n}",
"func (o ExprResponseOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ExprResponse) string { return v.Location }).(pulumi.StringOutput)\n}",
"func (o ExprResponseOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ExprResponse) string { return v.Location }).(pulumi.StringOutput)\n}",
"func (p *simpleParser) parseConflictClause(r reporter) (clause *ast.ConflictClause) {\n\tclause = &ast.ConflictClause{}\n\n\tnext, ok := p.optionalLookahead(r)\n\tif !ok {\n\t\treturn\n\t}\n\n\t// ON\n\tif next.Type() == token.KeywordOn {\n\t\tclause.On = next\n\t\tp.consumeToken()\n\t} else {\n\t\t// if there's no 'ON' token, the empty production is assumed, which is\n\t\t// why no error is reported here.\n\t\treturn\n\t}\n\n\t// CONFLICT\n\tnext, ok = p.lookahead(r)\n\tif !ok {\n\t\treturn\n\t}\n\tif next.Type() == token.KeywordConflict {\n\t\tclause.Conflict = next\n\t\tp.consumeToken()\n\t} else {\n\t\tr.unexpectedToken(token.KeywordConflict)\n\t\treturn\n\t}\n\n\t// ROLLBACK, ABORT, FAIL, IGNORE, REPLACE\n\tnext, ok = p.lookahead(r)\n\tif !ok {\n\t\treturn\n\t}\n\tswitch next.Type() {\n\tcase token.KeywordRollback:\n\t\tclause.Rollback = next\n\t\tp.consumeToken()\n\tcase token.KeywordAbort:\n\t\tclause.Abort = next\n\t\tp.consumeToken()\n\tcase token.KeywordFail:\n\t\tclause.Fail = next\n\t\tp.consumeToken()\n\tcase token.KeywordIgnore:\n\t\tclause.Ignore = next\n\t\tp.consumeToken()\n\tcase token.KeywordReplace:\n\t\tclause.Replace = next\n\t\tp.consumeToken()\n\tdefault:\n\t\tr.unexpectedToken(token.KeywordRollback, token.KeywordAbort, token.KeywordFail, token.KeywordIgnore, token.KeywordReplace)\n\t}\n\treturn\n}",
"func (c *withNameAndCode) Location() []parser.Range {\n\treturn c.location\n}",
"func ErrConflictf(format string, arguments ...interface{}) *Status {\n\treturn &Status{Code: http.StatusConflict, Text: fmt.Sprintf(format, arguments...)}\n}",
"func ConflictFromMarshalUtil(marshalUtil *marshalutil.MarshalUtil) (conflict Conflict, err error) {\n\treadStartOffset := marshalUtil.ReadOffset()\n\n\tconflict = Conflict{}\n\tbytesID, err := marshalUtil.ReadBytes(int(ledgerstate.TransactionIDLength))\n\tif err != nil {\n\t\terr = errors.Errorf(\"failed to parse ID from conflict: %w\", err)\n\t\treturn\n\t}\n\tconflict.ID, _, err = ledgerstate.TransactionIDFromBytes(bytesID)\n\tif err != nil {\n\t\terr = errors.Errorf(\"failed to parse ID from bytes: %w\", err)\n\t\treturn\n\t}\n\n\tconflict.Opinion, err = OpinionFromMarshalUtil(marshalUtil)\n\tif err != nil {\n\t\terr = errors.Errorf(\"failed to parse opinion from conflict: %w\", err)\n\t\treturn\n\t}\n\n\t// return the number of bytes we processed\n\tparsedBytes := marshalUtil.ReadOffset() - readStartOffset\n\tif parsedBytes != ConflictLength {\n\t\terr = errors.Errorf(\"parsed bytes (%d) did not match expected size (%d): %w\", parsedBytes, ConflictLength, cerrors.ErrParseBytesFailed)\n\t\treturn\n\t}\n\n\treturn\n}",
"func (t *Table) GetConflicts(ctx context.Context) (conflict.ConflictSchema, durable.ConflictIndex, error) {\n\tif t.Format() == types.Format_DOLT {\n\t\tpanic(\"should use artifacts\")\n\t}\n\n\treturn t.table.GetConflicts(ctx)\n}",
"func NewGetBicsConflict() *GetBicsConflict {\n\treturn &GetBicsConflict{}\n}",
"func Conflict(w http.ResponseWriter, message ...interface{}) {\n\tboom(w, 409, message...)\n}",
"func (o LookupAccountResultOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupAccountResult) string { return v.Location }).(pulumi.StringOutput)\n}",
"func resolveConflict(ctx context.Context, localObject, remoteObject *GenericObject, local, remote Storage) *Change {\n\t// local object is older than remote object. We preserve this object\n\tif localObject.Modified.After(remoteObject.Modified) {\n\t\tfmt.Printf(\"We should add local [%s] to remote\\n\", remoteObject.ID)\n\t\treturn &Change{\n\t\t\tType: ChangeTypeSet,\n\t\t\tObject: localObject,\n\t\t\tStore: remote,\n\t\t\tSyncStatus: &SyncStatus{\n\t\t\t\tID: localObject.ID,\n\t\t\t\tLocalHash: localObject.Hash,\n\t\t\t\tRemoteHash: localObject.Hash,\n\t\t\t}}\n\t}\n\t// remote object is older than local object. Preserve older object.\n\tfmt.Printf(\"We should add remote [%s] to local\\n\", remoteObject.ID)\n\treturn &Change{\n\t\tType: ChangeTypeSet,\n\t\tObject: remoteObject,\n\t\tStore: local,\n\t\tSyncStatus: &SyncStatus{\n\t\t\tID: remoteObject.ID,\n\t\t\tLocalHash: remoteObject.Hash,\n\t\t\tRemoteHash: remoteObject.Hash,\n\t\t}}\n}",
"func DefaultConflictResolver(conflict Conflict) (result Body, err error) {\n\tlocalDeleted, _ := conflict.LocalDocument[BodyDeleted].(bool)\n\tremoteDeleted, _ := conflict.RemoteDocument[BodyDeleted].(bool)\n\tif localDeleted && !remoteDeleted {\n\t\treturn conflict.LocalDocument, nil\n\t}\n\tif remoteDeleted && !localDeleted {\n\t\treturn conflict.RemoteDocument, nil\n\t}\n\n\tlocalRevID, _ := conflict.LocalDocument[BodyRev].(string)\n\tremoteRevID, _ := conflict.RemoteDocument[BodyRev].(string)\n\tif compareRevIDs(localRevID, remoteRevID) >= 0 {\n\t\treturn conflict.LocalDocument, nil\n\t} else {\n\t\treturn conflict.RemoteDocument, nil\n\t}\n}",
"func (c *Corpus) gitLocation(v []byte) *time.Location {\n\tif loc, ok := c.zoneCache[string(v)]; ok {\n\t\treturn loc\n\t}\n\ts := string(v)\n\th, _ := strconv.Atoi(s[1:3])\n\tm, _ := strconv.Atoi(s[3:5])\n\teast := 1\n\tif v[0] == '-' {\n\t\teast = -1\n\t}\n\tloc := time.FixedZone(s, east*(h*3600+m*60))\n\tif c.zoneCache == nil {\n\t\tc.zoneCache = map[string]*time.Location{}\n\t}\n\tc.zoneCache[s] = loc\n\treturn loc\n}",
"func (o ConflictResolutionPolicyOutput) ConflictResolutionPath() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ConflictResolutionPolicy) *string { return v.ConflictResolutionPath }).(pulumi.StringPtrOutput)\n}",
"func (l *LogStore) findConflict(entries []*pb.Entry) uint64 {\n\t// TODO: 会有第0个冲突么?\n\tfor _, ne := range entries {\n\t\tif !l.matchTerm(ne.Index, ne.Term) {\n\t\t\tif ne.Index <= l.lastIndex() {\n\t\t\t\tl.logger.Info(\"log found conflict\",\n\t\t\t\t\tzap.Uint64(\"conflictIndex\", ne.Index),\n\t\t\t\t\tzap.Uint64(\"conflictTerm\", ne.Term),\n\t\t\t\t\tzap.Uint64(\"existTerm\", l.termOrPanic(l.term(ne.Index))))\n\t\t\t}\n\t\t\treturn ne.Index\n\t\t}\n\t}\n\treturn 0\n}",
"func (d *dht) GetLocation(key string) (leader *SlotAndNode, follower *SlotAndNode, err error) {\n\td.mu.RLock()\n\tdefer d.mu.RUnlock()\n\n\tif len(d.slotVsNodes) == 0 {\n\t\treturn nil, nil, ErrNotInitialised\n\t}\n\n\tslot1 := SlotID(d.hashSlot(key))\n\tnode1 := d.slotVsNodes[slot1]\n\tsn1 := &SlotAndNode{\n\t\tSlotID: slot1,\n\t\tNodeID: node1.NodeID,\n\t}\n\n\tslot2 := d.replicaSlot(slot1)\n\tnode2 := d.slotVsNodes[slot2]\n\tsn2 := &SlotAndNode{\n\t\tSlotID: slot2,\n\t\tNodeID: node2.NodeID,\n\t}\n\n\tif node1.SlotState == Leader {\n\t\treturn sn1, sn2, nil // sn1 is the leader\n\t} else {\n\t\treturn sn2, sn1, nil // sn2 is the leader\n\t}\n}",
"func (c *workflowsServiceV2BetaRESTClient) GetLocation(ctx context.Context, req *locationpb.GetLocationRequest, opts ...gax.CallOption) (*locationpb.Location, error) {\n\tbaseUrl, err := url.Parse(c.endpoint)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tbaseUrl.Path += fmt.Sprintf(\"/v2beta/%v\", req.GetName())\n\n\tparams := url.Values{}\n\tparams.Add(\"$alt\", \"json;enum-encoding=int\")\n\n\tbaseUrl.RawQuery = params.Encode()\n\n\t// Build HTTP headers from client and context metadata.\n\thds := []string{\"x-goog-request-params\", fmt.Sprintf(\"%s=%v\", \"name\", url.QueryEscape(req.GetName()))}\n\n\thds = append(c.xGoogHeaders, hds...)\n\thds = append(hds, \"Content-Type\", \"application/json\")\n\theaders := gax.BuildHeaders(ctx, hds...)\n\topts = append((*c.CallOptions).GetLocation[0:len((*c.CallOptions).GetLocation):len((*c.CallOptions).GetLocation)], opts...)\n\tunm := protojson.UnmarshalOptions{AllowPartial: true, DiscardUnknown: true}\n\tresp := &locationpb.Location{}\n\te := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {\n\t\tif settings.Path != \"\" {\n\t\t\tbaseUrl.Path = settings.Path\n\t\t}\n\t\thttpReq, err := http.NewRequest(\"GET\", baseUrl.String(), nil)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\thttpReq = httpReq.WithContext(ctx)\n\t\thttpReq.Header = headers\n\n\t\thttpRsp, err := c.httpClient.Do(httpReq)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer httpRsp.Body.Close()\n\n\t\tif err = googleapi.CheckResponse(httpRsp); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tbuf, err := io.ReadAll(httpRsp.Body)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif err := unm.Unmarshal(buf, resp); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn nil\n\t}, opts...)\n\tif e != nil {\n\t\treturn nil, e\n\t}\n\treturn resp, nil\n}",
"func LocalWinsConflictResolver(conflict Conflict) (winner Body, err error) {\n\treturn conflict.LocalDocument, nil\n}",
"func (r *Response) Conflict(v interface{}) {\n\tr.writeResponse(http.StatusConflict, v)\n}",
"func (r Response) Conflict(code string, payload Payload, header ...ResponseHeader) {\n\tr.Response(code, http.Conflict, payload, header...)\n}",
"func NewConflict(parameters ...wparams.ParamStorer) Error {\n\treturn newGenericError(nil, DefaultConflict, wparams.NewParamStorer(parameters...))\n}",
"func ThirdPartyInfoForLocation(ctx context.Context, client httprequest.Doer, url string) (bakery.ThirdPartyInfo, error) {\n\tdclient := newDischargeClient(url, client)\n\tinfo, err := dclient.DischargeInfo(ctx, &dischargeInfoRequest{})\n\tif err == nil {\n\t\treturn bakery.ThirdPartyInfo{\n\t\t\tPublicKey: *info.PublicKey,\n\t\t\tVersion: info.Version,\n\t\t}, nil\n\t}\n\tderr, ok := errgo.Cause(err).(*httprequest.DecodeResponseError)\n\tif !ok || derr.Response.StatusCode != http.StatusNotFound {\n\t\treturn bakery.ThirdPartyInfo{}, errgo.Mask(err)\n\t}\n\t// The new endpoint isn't there, so try the old one.\n\tpkResp, err := dclient.PublicKey(ctx, &publicKeyRequest{})\n\tif err != nil {\n\t\treturn bakery.ThirdPartyInfo{}, errgo.Mask(err)\n\t}\n\treturn bakery.ThirdPartyInfo{\n\t\tPublicKey: *pkResp.PublicKey,\n\t\tVersion: bakery.Version1,\n\t}, nil\n}",
"func (o BucketOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Bucket) pulumi.StringOutput { return v.Location }).(pulumi.StringOutput)\n}",
"func attemptLoadImpteamAndConflict(ctx context.Context, g *libkb.GlobalContext, impTeamName keybase1.ImplicitTeamDisplayName,\n\tnameWithoutConflict string, preResolveDisplayName string, skipCache bool) (conflicts []keybase1.ImplicitTeamConflictInfo, teamID keybase1.TeamID, hitCache bool, err error) {\n\n\tdefer g.CTrace(ctx,\n\t\tfmt.Sprintf(\"attemptLoadImpteamAndConflict(impName=%q,woConflict=%q,preResolve=%q,skipCache=%t)\", impTeamName, nameWithoutConflict, preResolveDisplayName, skipCache),\n\t\t&err)()\n\timp, hitCache, err := loadImpteam(ctx, g, nameWithoutConflict, impTeamName.IsPublic, skipCache)\n\tif err != nil {\n\t\treturn conflicts, teamID, hitCache, err\n\t}\n\tif len(imp.Conflicts) > 0 {\n\t\tg.Log.CDebugf(ctx, \"LookupImplicitTeam found %v conflicts\", len(imp.Conflicts))\n\t}\n\t// We will use this team. Changed later if we selected a conflict.\n\tvar foundSelectedConflict bool\n\tteamID = imp.TeamID\n\t// We still need to iterate over Conflicts because we are returning parsed\n\t// conflict list. So even if caller is not requesting a conflict team, go\n\t// through this loop.\n\tfor i, conflict := range imp.Conflicts {\n\t\tg.Log.CDebugf(ctx, \"| checking conflict: %+v (iter %d)\", conflict, i)\n\t\tconflictInfo, err := conflict.parse()\n\t\tif err != nil {\n\t\t\t// warn, don't fail\n\t\t\tg.Log.CDebugf(ctx, \"LookupImplicitTeam got conflict suffix: %v\", err)\n\t\t\tcontinue\n\t\t}\n\t\tif conflictInfo == nil {\n\t\t\tg.Log.CDebugf(ctx, \"| got unexpected nil conflictInfo (iter %d)\", i)\n\t\t\tcontinue\n\t\t}\n\t\tconflicts = append(conflicts, *conflictInfo)\n\n\t\tg.Log.CDebugf(ctx, \"| parsed conflict into conflictInfo: %+v\", *conflictInfo)\n\n\t\tif impTeamName.ConflictInfo != nil {\n\t\t\tmatch := libkb.FormatImplicitTeamDisplayNameSuffix(*impTeamName.ConflictInfo) == libkb.FormatImplicitTeamDisplayNameSuffix(*conflictInfo)\n\t\t\tif match {\n\t\t\t\tteamID = conflict.TeamID\n\t\t\t\tfoundSelectedConflict = true\n\t\t\t\tg.Log.CDebugf(ctx, \"| found conflict suffix match: %v\", teamID)\n\t\t\t} else {\n\t\t\t\tg.Log.CDebugf(ctx, \"| conflict suffix didn't match (teamID %v)\", conflict.TeamID)\n\t\t\t}\n\t\t}\n\t}\n\tif impTeamName.ConflictInfo != nil && !foundSelectedConflict {\n\t\t// We got the team but didn't find the specific conflict requested.\n\t\treturn conflicts, teamID, hitCache, NewTeamDoesNotExistError(\n\t\t\timpTeamName.IsPublic, \"could not find team with suffix: %v\", preResolveDisplayName)\n\t}\n\treturn conflicts, teamID, hitCache, nil\n}",
"func (self *Chromosome) Get(i int) (Location, error) {\n\tif i < len(self.Locations) {\n\t\treturn self.Locations[i], nil\n\t}\n\n\treturn Location{0, 0, \"error\", 0}, errors.New(\"Index out of boounds\")\n}",
"func NewGetActionConflict() *GetActionConflict {\n\treturn &GetActionConflict{}\n}",
"func NewGetBacsConflict() *GetBacsConflict {\n\treturn &GetBacsConflict{}\n}",
"func ResolveNameConflict(rawName string, used func(string) bool) string {\n\tname := rawName\n\tok := used(name)\n\tfor idx := 0; ok; idx++ {\n\t\tname = fmt.Sprintf(\"%s%d\", rawName, idx)\n\t\tok = used(name)\n\t}\n\treturn name\n}",
"func (l *Location) GetLocation() string {\n\tchkParsed(l)\n\treturn l.host + \"/\" + l.group + \"/\" + l.name + \":\" + l.version\n}",
"func (o RegistryGeoreplicationOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v RegistryGeoreplication) string { return v.Location }).(pulumi.StringOutput)\n}",
"func (m *Message) LOC() (*LOC, error) {\n\tps, err := m.Parse(\"LOC\")\n\tpst, ok := ps.(*LOC)\n\tif ok {\n\t\treturn pst, err\n\t}\n\treturn nil, err\n}",
"func (r *Repository) Location() borges.Location {\n\treturn r.location\n}",
"func fromProtocolLocation(ctx context.Context, v *cache.View, loc protocol.Location) (source.Range, error) {\n\tsourceURI, err := fromProtocolURI(loc.URI)\n\tif err != nil {\n\t\treturn source.Range{}, err\n\t}\n\tf, err := v.GetFile(ctx, sourceURI)\n\tif err != nil {\n\t\treturn source.Range{}, err\n\t}\n\ttok := f.GetToken(ctx)\n\treturn fromProtocolRange(tok, loc.Range), nil\n}",
"func getClientLocation(cll *models.ClientListLocation, db *gorm.DB) error {\n\terr := db.Select(\"id,created_at,updated_at,cod_collection,descrip\").First(cll).GetErrors()\n\tif len(err) != 0 {\n\t\treturn errors.New(\"no se encuentra\")\n\t}\n\treturn nil\n}",
"func GetDosenLocation(w http.ResponseWriter, r *http.Request) {\n\tvar lokasi StatusLocation\n\tuserID := pat.Param(r, \"id\")\n\n\tquery := `SELECT users.id as UserID, users.nama, status.posisi, status.last_update as LastUpdate\n\tFROM users JOIN status\n\tWHERE users.id = ? AND users.id = status.user_id;`\n\n\tif err := models.Dbm.SelectOne(&lokasi, query, userID); err != nil {\n\t\terrors.NewError(\"Can't fetch location\", http.StatusInternalServerError).WriteTo(w)\n\t\treturn\n\t}\n\n\tjson.NewEncoder(w).Encode(lokasi)\n}",
"func (c Conflict) String() string {\n\tstructBuilder := stringify.StructBuilder(\"Conflict:\")\n\tstructBuilder.AddField(stringify.StructField(\"ID\", c.ID.String()))\n\tstructBuilder.AddField(stringify.StructField(\"Opinion\", c.Opinion))\n\n\treturn structBuilder.String()\n}",
"func (r *Rook) Location() location.Location {\n\treturn r.loc\n}",
"func (m *LocationManager) GetLocation(ctx context.Context, locationID int) (*Location, error) {\n\t// Magic numbers here are sourced from:\n\t// - http://eveonline-third-party-documentation.readthedocs.io/en/latest/xmlapi/character/char_assetlist.html\n\t// - https://oldforums.eveonline.com/?a=topic&threadID=667487\n\tconst offsetOfficeIDToStationID = 6000001\n\tconst legacyOutpostStart = 60014861\n\tconst legacyOutpostEndInclusive = 60014928\n\tloc := &Location{}\n\tvar corpID int\n\tif c, ok := authContextFromContext(ctx); ok {\n\t\tcorpID = c.CorporationID()\n\t}\n\tvar err error\n\tswitch {\n\tcase locationID < 60000000:\n\t\t// locationID is a SystemID.\n\t\tloc.System, err = m.evedb.GetSystem(locationID)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\tcase locationID < 61000000:\n\t\t// locationID is a Station or legacy outpost.\n\t\tif locationID >= legacyOutpostStart && locationID <= legacyOutpostEndInclusive {\n\t\t\t// Conquerable outpost pre-dating player outposts. Not yet supported.\n\t\t\treturn nil, errors.Errorf(\"unable to determine details for locationID %d, conquerable outposts are not supported\", locationID)\n\t\t}\n\t\t// Not a legacy outpost, must be a station.\n\t\tloc.Station, err = m.evedb.GetStation(locationID)\n\n\tcase locationID < 66000000:\n\t\t// locationID is a conquerable outpost. Not yet supported.\n\t\treturn nil, errors.Errorf(\"unable to determine details for locationID %d, conquerable outposts are not supported\", locationID)\n\n\tcase locationID < 67000000:\n\t\t// locationID is a rented office.\n\t\tloc.Station, err = m.evedb.GetStation(locationID - offsetOfficeIDToStationID)\n\n\tdefault:\n\t\t// locationID might be a citadel.\n\t\ts, err := m.structure.GetStructure(ctx, locationID)\n\t\tif err == nil {\n\t\t\tloc.Structure = s\n\t\t\tbreak\n\t\t}\n\t\t// locationID is in a container somewhere.\n\t\tif corpID != 0 {\n\t\t\t// Corporation is opted-in, we can query for asset information.\n\t\t\tif ca, err := m.asset.GetCorporationAsset(ctx, corpID, locationID); err == nil {\n\t\t\t\t// Found an asset with the given locationID, call GetLocation on the asset's location.\n\t\t\t\tif loc, err = m.GetLocation(ctx, ca.LocationID); err != nil {\n\t\t\t\t\treturn nil, errors.Wrapf(err, \"unable to determine details for locationID %d\", locationID)\n\t\t\t\t}\n\t\t\t\tbreak\n\t\t\t} else if err != ErrCorpNotRegistered {\n\t\t\t\treturn nil, errors.Wrapf(err, \"unable to determine details for locationID %d\", locationID)\n\t\t\t}\n\t\t}\n\n\t}\n\n\t// Second round of resolution; ensure that loc.System gets populated.\n\tswitch {\n\tcase loc.System != nil:\n\t\t// do nothing\n\n\tcase loc.Structure != nil:\n\t\tif loc.System, err = m.evedb.GetSystem(int(loc.Structure.SystemID)); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\tcase loc.Station != nil:\n\t\tif loc.System, err = m.evedb.GetSystem(loc.Station.SystemID); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\tdefault:\n\t\treturn nil, errors.Errorf(\"unable to determine details for locationID %d\", locationID)\n\t}\n\n\t// Finally, populate the Constellation and Region information.\n\tif loc.Constellation, err = m.evedb.GetConstellation(loc.System.ConstellationID); err != nil {\n\t\treturn nil, err\n\t}\n\tif loc.Region, err = m.evedb.GetRegion(loc.System.RegionID); err != nil {\n\t\treturn nil, err\n\t}\n\tloc.LocationID = locationID\n\treturn loc, nil\n}",
"func (e *NotFoundError) GetCause() error { return e.cause }",
"func NewGetSepainstantConflict() *GetSepainstantConflict {\n\treturn &GetSepainstantConflict{}\n}",
"func Rlocation(s string) (string, error) {\n\tr, err := g.get()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn r.Rlocation(s)\n}",
"func (sys *System) GetLocation(ctx *Context, name string) (*Location, error) {\n\treturn sys.findLocation(ctx, name, false)\n}",
"func (o LookupProvisioningConfigResultOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupProvisioningConfigResult) string { return v.Location }).(pulumi.StringOutput)\n}",
"func (c *Client) Location(ip string) (*Location, error) {\n\tbody, err := c.getbody(\"geo\", ip)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tl := &Location{}\n\tif err := json.Unmarshal(body, l); err != nil {\n\t\treturn nil, err\n\t}\n\treturn l, nil\n}",
"func (i definitionsIndex) get(name, definitionFrom string) (*statusVariableDefinition, error) {\n\t// If no variable with this name exists return an error.\n\tif _, ok := i[name]; !ok {\n\t\treturn nil, errors.Errorf(\"no definitions found for variable %q\", name)\n\t}\n\n\t// If the definition exists for the specific definitionFrom, return it.\n\tif def, ok := i[name][definitionFrom]; ok {\n\t\treturn def, nil\n\t}\n\n\t// If definitionFrom is empty and there are no conflicts return a definition with an emptyDefinitionFrom.\n\tif definitionFrom == emptyDefinitionFrom {\n\t\tfor _, def := range i[name] {\n\t\t\tif !def.Conflicts {\n\t\t\t\treturn &statusVariableDefinition{\n\t\t\t\t\tName: def.Name,\n\t\t\t\t\tConflicts: def.Conflicts,\n\t\t\t\t\tClusterClassStatusVariableDefinition: &clusterv1.ClusterClassStatusVariableDefinition{\n\t\t\t\t\t\t// Return the definition with an empty definitionFrom. This ensures when a user gets\n\t\t\t\t\t\t// a definition with an emptyDefinitionFrom, the return value also has emptyDefinitionFrom.\n\t\t\t\t\t\t// This is used in variable defaulting to ensure variables that only need one value for multiple\n\t\t\t\t\t\t// definitions have an emptyDefinitionFrom.\n\t\t\t\t\t\tFrom: emptyDefinitionFrom,\n\t\t\t\t\t\tRequired: def.Required,\n\t\t\t\t\t\tSchema: def.Schema,\n\t\t\t\t\t},\n\t\t\t\t}, nil\n\t\t\t}\n\t\t\treturn nil, errors.Errorf(\"variable %q has conflicting definitions. It requires a non-empty `definitionFrom`\", name)\n\t\t}\n\t}\n\treturn nil, errors.Errorf(\"no definitions found for variable %q from %q\", name, definitionFrom)\n}",
"func (o GetRestorableDatabaseAccountsResultOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v GetRestorableDatabaseAccountsResult) string { return v.Location }).(pulumi.StringOutput)\n}",
"func getLocation(name string) (int, error) {\n\tquery := fmt.Sprintf(\"https://www.metaweather.com/api/location/search/?query=%s\", name)\n\tresp, errGet := http.Get(query)\n\tif errGet != nil {\n\t\treturn 0, errGet\n\t}\n\tdefer resp.Body.Close()\n\tbody, errBody := ioutil.ReadAll(resp.Body)\n\tif errBody != nil {\n\t\treturn 0, errBody\n\t}\n\tlocations := []Location{}\n\terrJson := json.Unmarshal(body, &locations)\n\t// I feel lucky!\n\tif len(locations) < 1 {\n\t\treturn 0, fmt.Errorf(\"no location %s\", name)\n\t}\n\tlocaton := locations[0].Woeid\n\tfmt.Printf(\"%v\\n\", locations[0])\n\treturn locaton, errJson\n}",
"func (pkg *Package) Loc() *Location {\n\treturn pkg.Location\n}",
"func (c *restClient) GetLocation(ctx context.Context, req *locationpb.GetLocationRequest, opts ...gax.CallOption) (*locationpb.Location, error) {\n\tbaseUrl, err := url.Parse(c.endpoint)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tbaseUrl.Path += fmt.Sprintf(\"/v2beta3/%v\", req.GetName())\n\n\tparams := url.Values{}\n\tparams.Add(\"$alt\", \"json;enum-encoding=int\")\n\n\tbaseUrl.RawQuery = params.Encode()\n\n\t// Build HTTP headers from client and context metadata.\n\thds := []string{\"x-goog-request-params\", fmt.Sprintf(\"%s=%v\", \"name\", url.QueryEscape(req.GetName()))}\n\n\thds = append(c.xGoogHeaders, hds...)\n\thds = append(hds, \"Content-Type\", \"application/json\")\n\theaders := gax.BuildHeaders(ctx, hds...)\n\topts = append((*c.CallOptions).GetLocation[0:len((*c.CallOptions).GetLocation):len((*c.CallOptions).GetLocation)], opts...)\n\tunm := protojson.UnmarshalOptions{AllowPartial: true, DiscardUnknown: true}\n\tresp := &locationpb.Location{}\n\te := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {\n\t\tif settings.Path != \"\" {\n\t\t\tbaseUrl.Path = settings.Path\n\t\t}\n\t\thttpReq, err := http.NewRequest(\"GET\", baseUrl.String(), nil)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\thttpReq = httpReq.WithContext(ctx)\n\t\thttpReq.Header = headers\n\n\t\thttpRsp, err := c.httpClient.Do(httpReq)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer httpRsp.Body.Close()\n\n\t\tif err = googleapi.CheckResponse(httpRsp); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tbuf, err := io.ReadAll(httpRsp.Body)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif err := unm.Unmarshal(buf, resp); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn nil\n\t}, opts...)\n\tif e != nil {\n\t\treturn nil, e\n\t}\n\treturn resp, nil\n}",
"func (o LookupSnapshotPolicyResultOutput) Location() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupSnapshotPolicyResult) string { return v.Location }).(pulumi.StringOutput)\n}",
"func (m *CarserviceMutation) OldLocation(ctx context.Context) (v string, err error) {\n\tif !m.op.Is(OpUpdateOne) {\n\t\treturn v, fmt.Errorf(\"OldLocation is allowed only on UpdateOne operations\")\n\t}\n\tif m.id == nil || m.oldValue == nil {\n\t\treturn v, fmt.Errorf(\"OldLocation requires an ID field in the mutation\")\n\t}\n\toldValue, err := m.oldValue(ctx)\n\tif err != nil {\n\t\treturn v, fmt.Errorf(\"querying old value for OldLocation: %w\", err)\n\t}\n\treturn oldValue.Location, nil\n}",
"func Location() (string, error) {\n\tslice, err := loader(\"locations\")\n\tcheckErr(err)\n\treturn random(slice), nil\n}",
"func (e ToolEdges) LocationOrErr() (*Location, error) {\n\tif e.loadedTypes[0] {\n\t\tif e.Location == nil {\n\t\t\t// The edge location was loaded in eager-loading,\n\t\t\t// but was not found.\n\t\t\treturn nil, &NotFoundError{label: location.Label}\n\t\t}\n\t\treturn e.Location, nil\n\t}\n\treturn nil, &NotLoadedError{edge: \"location\"}\n}",
"func (b *fakeBosClient) GetBucketLocation(bucket string) (string, error) {\n\treturn \"\", fmt.Errorf(\"test\")\n}",
"func (o AllocationSpecificSKUAllocationReservedInstancePropertiesResponseOutput) LocationHint() pulumi.StringOutput {\n\treturn o.ApplyT(func(v AllocationSpecificSKUAllocationReservedInstancePropertiesResponse) string {\n\t\treturn v.LocationHint\n\t}).(pulumi.StringOutput)\n}",
"func (imp *Import) Loc() *Location {\n\treturn imp.Location\n}",
"func (c StubClient) GetLookup(ctx context.Context, host string) api.Lookup {\n\tlookup := api.Lookup{Name: host}\n\n\tf, err := os.Open(c.File)\n\tif err != nil {\n\t\tlookup.Error = err\n\t\treturn lookup\n\t}\n\tdefer f.Close()\n\n\tlookup.Error = json.NewDecoder(f).Decode(&lookup.Domain)\n\n\treturn lookup\n}",
"func ErrConflict(err error) APIError {\n\treturn APIError{http.StatusConflict, err}\n}",
"func NewConflict(msg string) error {\n\treturn &ELBError{\n\t\tmsg: msg,\n\t\tCode: http.StatusConflict,\n\t}\n}"
] | [
"0.576138",
"0.5587241",
"0.5540109",
"0.54999095",
"0.5465813",
"0.5445901",
"0.5393411",
"0.52668756",
"0.524657",
"0.52306247",
"0.5203719",
"0.5173703",
"0.51696897",
"0.5167564",
"0.5155827",
"0.5137515",
"0.51233417",
"0.5119585",
"0.51193464",
"0.50692385",
"0.50082576",
"0.49725062",
"0.49606735",
"0.49527866",
"0.4952338",
"0.49466145",
"0.49432567",
"0.4936743",
"0.4925249",
"0.49201784",
"0.49195373",
"0.49121574",
"0.48998454",
"0.489861",
"0.48632082",
"0.48553732",
"0.4840901",
"0.48406553",
"0.48406553",
"0.48406553",
"0.48406553",
"0.48406553",
"0.48406553",
"0.483933",
"0.48390692",
"0.48317042",
"0.48257223",
"0.48230937",
"0.48203087",
"0.48188394",
"0.4815585",
"0.4814598",
"0.47921497",
"0.47820818",
"0.47815967",
"0.47781765",
"0.4776742",
"0.4768354",
"0.47642612",
"0.47571987",
"0.4740483",
"0.47391585",
"0.47336942",
"0.47277853",
"0.47216395",
"0.4719286",
"0.47166932",
"0.47129267",
"0.4705047",
"0.47006062",
"0.46964848",
"0.46958202",
"0.46892458",
"0.46807188",
"0.46793944",
"0.4672927",
"0.4669653",
"0.46657076",
"0.46635187",
"0.46622568",
"0.46586406",
"0.46572685",
"0.46555147",
"0.46520537",
"0.46379435",
"0.4636901",
"0.4635786",
"0.4633057",
"0.4631931",
"0.46299544",
"0.46295658",
"0.4624225",
"0.46152678",
"0.46085542",
"0.46008283",
"0.4599468",
"0.4596737",
"0.45942947",
"0.45934615",
"0.4590905"
] | 0.6948077 | 0 |
ACTIONS Creates a new conventional war | func (s *State) NewConventionalWar(defenders []pb.ProvinceId, attackers []pb.ProvinceId, locations []pb.ProvinceId) bool { // TODO: Error return
for _, d := range defenders {
if s.IsAtWar(d) || s.IsSiteOfConflict(d) {
return false
}
}
for _, a := range attackers {
if s.IsAtWar(a) || s.IsSiteOfConflict(a) {
return false
}
}
for _, l := range locations {
if s.IsAtWar(l) || s.IsSiteOfConflict(l) {
return false
}
}
// TODO: Logic for joining wars?
c := &Conflict{
name: "War!", // TODO
length: 0,
attackers: Faction{
members: attackers,
progress: 0,
},
defenders: Faction{
members: defenders,
progress: 0,
},
goal: s.Settings().GetConflictGoal(pb.ConflictType_CONVENTIONAL_WAR),
base_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_CONVENTIONAL_WAR),
locations: locations,
conflict_type: pb.ConflictType_CONVENTIONAL_WAR,
}
// For now it maps only to the first location
s.Conflicts[locations[0]] = c
return true
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (act *CreateAction) Do() error {\n\tif err := act.genAppID(); err != nil {\n\t\treturn act.Err(pbcommon.ErrCode_E_BS_SYSTEM_UNKONW, err.Error())\n\t}\n\n\t// create app.\n\tif errCode, errMsg := act.create(); errCode != pbcommon.ErrCode_E_OK {\n\t\treturn act.Err(errCode, errMsg)\n\t}\n\n\t// create default cluster/zone.\n\tif err := act.genClusterID(); err != nil {\n\t\treturn act.Err(pbcommon.ErrCode_E_BS_SYSTEM_UNKONW, err.Error())\n\t}\n\tif errCode, errMsg := act.createDefaultCluster(); errCode != pbcommon.ErrCode_E_OK {\n\t\treturn act.Err(errCode, errMsg)\n\t}\n\n\tif err := act.genZoneID(); err != nil {\n\t\treturn act.Err(pbcommon.ErrCode_E_BS_SYSTEM_UNKONW, err.Error())\n\t}\n\tif errCode, errMsg := act.createDefaultZone(); errCode != pbcommon.ErrCode_E_OK {\n\t\treturn act.Err(errCode, errMsg)\n\t}\n\treturn nil\n}",
"func (wds *WeaponAISystem) New(w *ecs.World) {\n\n}",
"func (s *State) NewColonialWar(target pb.ProvinceId) bool { // TODO: Error return\n\tif s.IsAtWar(target) || s.IsSiteOfConflict(target) || s.Get(target).Occupier() != pb.ProvinceId_NONE {\n\t\treturn false\n\t}\n\tc := &Conflict{\n\t\tname: \"Colonial War\", // TODO\n\t\tlength: 0,\n\t\tattackers: Faction{\n\t\t\t// Dissidents\n\t\t\tprogress: 0,\n\t\t},\n\t\tdefenders: Faction{\n\t\t\tmembers: []pb.ProvinceId{s.Get(target).Occupier()},\n\t\t\tprogress: 0,\n\t\t},\n\t\tgoal: s.Settings().GetConflictGoal(pb.ConflictType_COLONIAL_WAR),\n\t\tbase_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_COLONIAL_WAR),\n\t\tlocations: []pb.ProvinceId{target},\n\t\tconflict_type: pb.ConflictType_COLONIAL_WAR,\n\t}\n\ts.Conflicts[target] = c\n\treturn true\n}",
"func (s *State) NewCivilWar(target pb.ProvinceId) bool { // TODO: Error return\n\tif s.IsAtWar(target) || s.IsSiteOfConflict(target) {\n\t\treturn false\n\t}\n\tc := &Conflict{\n\t\tname: \"Civil War\", // TODO\n\t\tlength: 0,\n\t\tattackers: Faction{\n\t\t\trebels: *(s.Get(target).Dissidents()),\n\t\t\tprogress: 0,\n\t\t},\n\t\tdefenders: Faction{\n\t\t\tmembers: []pb.ProvinceId{target},\n\t\t\tprogress: 0,\n\t\t},\n\t\tgoal: s.Settings().GetConflictGoal(pb.ConflictType_CIVIL_WAR),\n\t\tbase_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_CIVIL_WAR),\n\t\tlocations: []pb.ProvinceId{target},\n\t\tconflict_type: pb.ConflictType_CIVIL_WAR,\n\t}\n\ts.Conflicts[target] = c\n\treturn true\n}",
"func CreateAction(r *Raptor) *Action {\n\treturn &Action{\n\t\tRaptor: r,\n\t}\n}",
"func Create(w http.ResponseWriter, r *http.Request) {\n\tc := flight.Context(w, r)\n\tnow := time.Now()\n\n\tv := c.View.New(\"code/create\")\n\tv.Vars[\"curdate\"] = now.Format(\"2006-01-02\")\n\t//c.Repopulate(v.Vars, \"name\")\n\tv.Render(w, r)\n}",
"func createTeam(w http.ResponseWriter, r *http.Request) {\n\tteam := models.NewTeam(\"\")\n\tskue.Create(view, team, w, r)\n}",
"func Create(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {\n\tfmt.Fprint(w, \"Welcome!\\n\")\n}",
"func CreateProject(w http.ResponseWriter, r *http.Request) {\n\t// Get incoming data, content n' stuff\n\t// Pass those data and create em'\n\t// Return new project and response\n}",
"func AddNewAction(typ int,uuid string, key string, description string){\n\n\telem, ok := ActionBuffer[typ].Description[uuid]\n if !ok {\n elem = make(map[string]string)\n\t if(ActionBuffer[typ].Description == nil){\n\t\t\tActionBuffer[typ].Description=make(map[string]map[string]string)\n\t }\n ActionBuffer[typ].Description[uuid] = elem\n }\n\tActionBuffer[typ].Description[uuid][key]=description\n}",
"func create(w http.ResponseWriter, req *http.Request) {\n\tresponse := \"\"\n\tswitch req.RequestURI {\n\tcase \"/get/accounts\":\n\t\tmapD := map[string]int{\"apple\": 5, \"lettuce\": 7}\n\t\tmapB, _ := json.Marshal(mapD)\n\t\tresponse = string(mapB)\n\t\tbreak\n\tdefault:\n\t\tr, _ := json.Marshal(\"Request not found\")\n\t\tresponse = string(r)\n\t\tbreak\n\t}\n\n\tcontext := Context{Title: response}\n\trender(w, \"api\", context)\n}",
"func CreateNewSchool(c echo.Context) error {\n\n\tdb, ok := c.Get(\"db\").(*gorm.DB)\n\n\tif !ok {\n\t\treturn c.NoContent(http.StatusInternalServerError)\n\t}\n\n\tvar modelview view.CreateNewSchoolModelView\n\n\tc.Bind(&modelview)\n\n\tcanteens := make([]canteen.Canteen, len(modelview.Canteens))\n\n\tfor index := range modelview.Canteens {\n\n\t\tlocation := canteen.Location{}\n\n\t\tlocation.Latitude = modelview.Canteens[index].Location.Latitude\n\n\t\tlocation.Longitude = modelview.Canteens[index].Location.Longitude\n\n\t\tcanteen, cerr := canteen.New(modelview.Canteens[index].Name, location)\n\t\tif cerr != nil {\n\n\t\t\tmodelview := customerrorview.UsingFieldErrorToErrorMessageModelView(*cerr)\n\n\t\t\treturn c.JSON(http.StatusBadRequest, modelview)\n\t\t}\n\t\tcanteens[index] = canteen\n\t}\n\n\tschool, serr := model.New(modelview.Acronym, modelview.Name, canteens)\n\n\tif serr != nil {\n\n\t\tmodelview := customerrorview.UsingFieldErrorToErrorMessageModelView(*serr)\n\n\t\treturn c.JSON(http.StatusBadRequest, modelview)\n\t}\n\n\tvar existingSchool model.School\n\n\t// Finds if school with same acronym already exists\n\n\terr := db.Where(map[string]interface{}{\"acronym\": modelview.Acronym}).First(&existingSchool).Error\n\n\tif err == nil {\n\n\t\tcerr := customerrormodel.FieldError{Field: \"acronym\", Model: \"school\", Explanation: \"a school with the same acronym already exists\"}\n\n\t\tmodelview := customerrorview.UsingFieldErrorToErrorMessageModelView(cerr)\n\n\t\treturn c.JSON(http.StatusBadRequest, modelview)\n\t}\n\n\t// Creates school\n\tdb.Create(&school)\n\n\tmodelviewres := view.ToGetDetailedSchoolInformationModelView(school)\n\n\treturn c.JSON(http.StatusCreated, modelviewres)\n\n}",
"func (c *WorkitemtypeController) Create(ctx *app.CreateWorkitemtypeContext) error {\n\treturn application.Transactional(c.db, func(appl application.Application) error {\n\t\tvar fields = map[string]app.FieldDefinition{}\n\n\t\tfor key, fd := range ctx.Payload.Fields {\n\t\t\tfields[key] = *fd\n\t\t}\n\t\twit, err := appl.WorkItemTypes().Create(ctx.Context, ctx.Payload.ExtendedTypeName, ctx.Payload.Name, fields)\n\n\t\tif err != nil {\n\t\t\tjerrors, httpStatusCode := jsonapi.ErrorToJSONAPIErrors(err)\n\t\t\treturn ctx.ResponseData.Service.Send(ctx.Context, httpStatusCode, jerrors)\n\t\t}\n\t\tctx.ResponseData.Header().Set(\"Location\", app.WorkitemtypeHref(wit.Name))\n\t\treturn ctx.Created(wit)\n\t})\n}",
"func doCreate(enviro env.Project, appJson, rootDir, appName, vendorDir, constraints string) error {\n\tfmt.Printf(\"Creating initial project structure, this might take a few seconds ... \\n\")\n\tdescriptor, err := ParseAppDescriptor(appJson)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif appName != \"\" {\n\t\t// override the application name\n\n\t\taltJson := strings.Replace(appJson, `\"`+descriptor.Name+`\"`, `\"`+appName+`\"`, 1)\n\t\taltDescriptor, err := ParseAppDescriptor(altJson)\n\n\t\t//see if we can get away with simple replace so we don't reorder the existing json\n\t\tif err == nil && altDescriptor.Name == appName {\n\t\t\tappJson = altJson\n\t\t} else {\n\t\t\t//simple replace didn't work so we have to unmarshal & re-marshal the supplied json\n\t\t\tvar appObj map[string]interface{}\n\t\t\terr := json.Unmarshal([]byte(appJson), &appObj)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tappObj[\"name\"] = appName\n\n\t\t\tupdApp, err := json.MarshalIndent(appObj, \"\", \" \")\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tappJson = string(updApp)\n\t\t}\n\n\t\tdescriptor.Name = appName\n\t} else {\n\t\tappName = descriptor.Name\n\t\trootDir = filepath.Join(rootDir, appName)\n\t}\n\n\terr = enviro.Init(rootDir)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = enviro.Create(false, \"\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = fgutil.CreateFileFromString(filepath.Join(rootDir, \"flogo.json\"), appJson)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// create initial structure\n\tappDir := filepath.Join(enviro.GetSourceDir(), descriptor.Name)\n\tos.MkdirAll(appDir, os.ModePerm)\n\n\t// Validate structure\n\terr = enviro.Open()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Create the dep manager\n\tdepManager := &dep.DepManager{Env: enviro}\n\n\t// Initialize the dep manager\n\terr = depManager.Init()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Create initial files\n\tdeps, err := config.ExtractAllDependencies(appJson)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcreateMainGoFile(appDir, \"\")\n\tcreateImportsGoFile(appDir, deps)\n\n\t// Add constraints\n\tif len(constraints) > 0 {\n\t\tnewConstraints := []string{\"-add\"}\n\t\tnewConstraints = append(newConstraints, strings.Split(constraints, \",\")...)\n\t\terr = depManager.Ensure(newConstraints...)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tensureArgs := []string{}\n\n\tif len(vendorDir) > 0 {\n\t\t// Copy vendor directory\n\t\tfgutil.CopyDir(vendorDir, enviro.GetVendorDir())\n\t\t// Do not touch vendor folder when ensuring\n\t\tensureArgs = append(ensureArgs, \"-no-vendor\")\n\t}\n\n\t// Sync up\n\terr = depManager.Ensure(ensureArgs...)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func (t tApp) New(w http.ResponseWriter, r *http.Request, ctr, act string) *contr.App {\n\tc := &contr.App{}\n\tc.Controllers = Controllers.New(w, r, ctr, act)\n\treturn c\n}",
"func (o *DesktopApp) Create() (*restapi.SliceResponse, error) {\n\tvar queryArg = make(map[string]interface{})\n\n\tqueryArg[\"ID\"] = []string{o.TemplateName}\n\tLogD.Printf(\"Generated Map for Create(): %+v\", queryArg)\n\n\tresp, err := o.client.CallSliceAPI(o.apiCreate, queryArg)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif !resp.Success {\n\t\treturn nil, errors.New(resp.Message)\n\t}\n\n\treturn resp, nil\n}",
"func CreateDeploy(w http.ResponseWriter, r *http.Request) {\n\tdeploy := models.Deploy{}\n\terr := json.NewDecoder(r.Body).Decode(&deploy)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\t// Todo validate requirement id\n\n\terr = models.InsertDeploy(deploy)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tw.WriteHeader(200)\n\terr = json.NewEncoder(w).Encode(deploy)\n\tif err != nil {\n\t\tpanic(error(err))\n\t}\n}",
"func CreateApplication() *Alpha {\n app := &Alpha{}\n app.Request = &Request{}\n app.Response = &Response{}\n app.init()\n return app\n}",
"func (app *application) createBoard(w http.ResponseWriter, r *http.Request) {\n\tplayerID := app.session.GetInt(r, \"authenticatedPlayerID\")\n\t// POST /create/board\n\terr := r.ParseForm()\n\tif err != nil {\n\t\tapp.clientError(w, http.StatusBadRequest)\n\t\treturn\n\t}\n\t\n\t// Create a new forms.Form struct containing the POSTed data\n\t// - Use the validation methods to check the content\n\tform := forms.New(r.PostForm)\n\tform.Required(\"boardName\")\n\tform.MaxLength(\"boardName\", 35)\n\n\t// Before returning to the caller, let's check the validity of the ship coordinates\n\t// - If anything is amiss, we can send those errors back as well\n\tvar carrier []string\n\tcInd := 0\n\tvar battleship []string\n\tbInd := 0\n\tvar cruiser []string\n\trInd := 0\n\tvar submarine []string\n\tsInd := 0\n\tvar destroyer []string\n\tdInd := 0\n\t// Loop through the POSTed data, checking for their values\n\t// - Add coordinates to a given ship's array\n for row := 1; row < 11; row++ {\n\t\trowStr := strconv.Itoa(row)\n \t\tfor _, col := range \"ABCDEFGHIJ\" {\n\t\t\tcolStr := string(col)\n\t\t\tshipXY := form.Get(\"shipXY\"+rowStr+colStr)\n\t\t\tif shipXY != \"\" {\n\t\t\t\t// Only I, the program, should be permitted to update this as a player enters a game\n\t\t\t\t//battleID := r.URL.Query().Get(\"battleID\")\n\t\t\t\t// playerID should be gotten from somewhere else\n\t\t\t\t//playerID = r.PostForm(\"playerID\")\n\n\t\t\t\t// Upper the values to simplify testing\n\t\t\t\t// - Build the slices containing the submitted coordinates\n\t\t\t\tswitch strings.ToUpper(shipXY) {\n\t\t\t\tcase \"C\":\n\t\t\t\t\tcarrier = append(carrier, rowStr+\",\"+colStr)\n\t\t\t\t\tcInd += 1\n\t\t\t\tcase \"B\":\n\t\t\t\t\tbattleship = append(battleship, rowStr+\",\"+colStr)\n\t\t\t\t\tbInd += 1\n\t\t\t\tcase \"R\":\n\t\t\t\t\tcruiser = append(cruiser, rowStr+\",\"+colStr)\n\t\t\t\t\trInd += 1\n\t\t\t\tcase \"S\":\n\t\t\t\t\tsubmarine = append(submarine, rowStr+\",\"+colStr)\n\t\t\t\t\tsInd += 1\n\t\t\t\tcase \"D\":\n\t\t\t\t\tdestroyer = append(destroyer, rowStr+\",\"+colStr)\n\t\t\t\t\tdInd += 1\n\t\t\t\tdefault:\n\t\t\t\t\t// Add this to Form's error object?\n\t\t\t\t\t// - I don't think it helps to tell the user this info\n\t\t\t\t\t// unless they're struggling to build the board\n\t\t\t\t\tfmt.Println(\"Unsupported character:\", shipXY)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Test our numbers, update .Valid property of our Form object\n\tform.RequiredNumberOfItems(\"carrier\", 5, cInd)\n\tform.RequiredNumberOfItems(\"battleship\", 4, bInd)\n\tform.RequiredNumberOfItems(\"cruiser\", 3, rInd)\n\tform.RequiredNumberOfItems(\"submarine\", 3, sInd)\n\tform.RequiredNumberOfItems(\"destroyer\", 2, dInd)\n\n\tform.ValidNumberOfItems(carrier, \"carrier\")\n\tform.ValidNumberOfItems(battleship, \"battleship\")\n\tform.ValidNumberOfItems(cruiser, \"cruiser\")\n\tform.ValidNumberOfItems(submarine, \"submarine\")\n\tform.ValidNumberOfItems(destroyer, \"destroyer\")\n\n\t// If our validation has failed anywhere along the way\n\t// - Take the user back to their board\n\tif !form.Valid() {\n\t\t// helper\n\t\tapp.renderBoard(w, r, \"create.board.page.tmpl\", &templateDataBoard{Form: form})\n\t\treturn\n\t}\n\n\t// If we've made it to here, then we have a good set of coordinates for a ship\n\t// - We have a boardID, playerID, shipName, and a bunch of coordinates\n\n\t// Create a new board, return boardID\n\tboardID, _ := app.boards.Create(playerID, form.Get(\"boardName\"))\n\n\t// Carrier\n\t_, err = app.boards.Insert(playerID, boardID, \"carrier\", carrier)\n\tif err != nil {\n\t\tapp.serverError(w, err)\n\t\treturn\n\t}\n\n\t// Battleship\n\t_, err = app.boards.Insert(playerID, boardID, \"battleship\", battleship)\n\tif err != nil {\n\t\tapp.serverError(w, err)\n\t\treturn\n\t}\n\n\t// Cruiser\n\t_, err = app.boards.Insert(playerID, boardID, \"cruiser\", cruiser)\n\tif err != nil {\n\t\tapp.serverError(w, err)\n\t\treturn\n\t}\n\n\t// Submarine\n\t_, err = app.boards.Insert(playerID, boardID, \"submarine\", submarine)\n\tif err != nil {\n\t\tapp.serverError(w, err)\n\t\treturn\n\t}\n\n\t// Destroyer\n\t_, err = app.boards.Insert(playerID, boardID, \"destroyer\", destroyer)\n\tif err != nil {\n\t\tapp.serverError(w, err)\n\t\treturn\n\t}\n\n\t// Add status message to session data; create new if one doesn't exist\n\tapp.session.Put(r, \"flash\", \"Board successfully created!\")\n\t// Send user back to list of boards\n\thttp.Redirect(w, r, \"/board/list\", http.StatusSeeOther)\n}",
"func Create (appName string) {\n\n checkGopath ()\n checkContainer (appName)\n\n app := Application { Name: appName }\n\n app.createContainer ()\n\n err := app.copyFileTree (\n GOPATH + slash + applicationTemplatesPath,\n GOPATH_SRC + app.Name,\n )\n\n if err != nil {\n log.Fatal (err)\n }\n}",
"func (l *ActionList) Create(doc Document) *ActionList {\n\treturn l.add(&Action{kind: driver.Create, doc: doc})\n}",
"func (s *server) handleStoryboardCreate() http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tuserID := r.Context().Value(contextKeyUserID).(string)\n\t\tvars := mux.Vars(r)\n\n\t\tbody, bodyErr := ioutil.ReadAll(r.Body) // check for errors\n\t\tif bodyErr != nil {\n\t\t\tlog.Println(\"error in reading request body: \" + bodyErr.Error() + \"\\n\")\n\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\n\t\tvar keyVal struct {\n\t\t\tStoryboardName string `json:\"storyboardName\"`\n\t\t}\n\t\tjson.Unmarshal(body, &keyVal) // check for errors\n\n\t\tnewStoryboard, err := s.database.CreateStoryboard(userID, keyVal.StoryboardName)\n\t\tif err != nil {\n\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\n\t\t// if storyboard created with team association\n\t\tTeamID, ok := vars[\"teamId\"]\n\t\tif ok {\n\t\t\tOrgRole := r.Context().Value(contextKeyOrgRole)\n\t\t\tDepartmentRole := r.Context().Value(contextKeyDepartmentRole)\n\t\t\tTeamRole := r.Context().Value(contextKeyTeamRole).(string)\n\t\t\tvar isAdmin bool\n\t\t\tif DepartmentRole != nil && DepartmentRole.(string) == \"ADMIN\" {\n\t\t\t\tisAdmin = true\n\t\t\t}\n\t\t\tif OrgRole != nil && OrgRole.(string) == \"ADMIN\" {\n\t\t\t\tisAdmin = true\n\t\t\t}\n\n\t\t\tif isAdmin == true || TeamRole != \"\" {\n\t\t\t\terr := s.database.TeamAddStoryboard(TeamID, newStoryboard.StoryboardID)\n\n\t\t\t\tif err != nil {\n\t\t\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\ts.respondWithJSON(w, http.StatusOK, newStoryboard)\n\t}\n}",
"func createWorld() {\n\tspace = chipmunk.NewSpace()\n\tspace.Gravity = vect.Vect{0, -900}\n\n\tstaticBody := chipmunk.NewBodyStatic()\n\tstaticLines = []*chipmunk.Shape{\n\t\tchipmunk.NewSegment(vect.Vect{0, -600}, vect.Vect{800.0, -600}, 0),\n\t\tchipmunk.NewSegment(vect.Vect{0, -600}, vect.Vect{0, 0}, 0),\n\t\tchipmunk.NewSegment(vect.Vect{800, -600}, vect.Vect{800.0, 0}, 0),\n\t}\n\tfor _, segment := range staticLines {\n\t\t// segment.SetElasticity(0.6)\n\t\tstaticBody.AddShape(segment)\n\t}\n\tspace.AddBody(staticBody)\n}",
"func CreateWare(c *server.Context) error {\n\tvar (\n\t\terr error\n\t\taddReq ware.Ware\n\t\tconn orm.Connection\n\t)\n\n\tisAdmin := c.Request().Context().Value(\"user\").(jwtgo.MapClaims)[util.IsAdmin].(bool)\n\tif !isAdmin {\n\t\tlogger.Error(\"You don't have access\")\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrToken, nil)\n\t}\n\n\terr = c.JSONBody(&addReq)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInvalidParam, nil)\n\t}\n\n\terr = c.Validate(addReq)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInvalidParam, nil)\n\t}\n\n\tif len(addReq.Avatar) > 0 {\n\t\taddReq.Avatar, err = util.SavePicture(addReq.Avatar, \"ware/\")\n\t\tif err != nil {\n\t\t\tlogger.Error(err)\n\t\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInternalServerError, nil)\n\t\t}\n\t}\n\tif len(addReq.Image) > 0 {\n\t\taddReq.Image, err = util.SavePicture(addReq.Image, \"ware/\")\n\t\tif err != nil {\n\t\t\tlogger.Error(err)\n\t\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInternalServerError, nil)\n\t\t}\n\t}\n\tif len(addReq.DetailPic) > 0 {\n\t\taddReq.DetailPic, err = util.SavePicture(addReq.DetailPic, \"wareIntro/\")\n\t\tif err != nil {\n\t\t\tlogger.Error(err)\n\t\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInternalServerError, nil)\n\t\t}\n\t}\n\n\tconn, err = mysql.Pool.Get()\n\tdefer mysql.Pool.Release(conn)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrMysql, nil)\n\t}\n\n\terr = ware.Service.CreateWare(conn, &addReq)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\tif (len(addReq.Avatar) > 0 && !util.DeletePicture(addReq.Avatar)) ||\n\t\t\t(len(addReq.Image) > 0 && !util.DeletePicture(addReq.Image)) ||\n\t\t\t(len(addReq.DetailPic) > 0 && !util.DeletePicture(addReq.DetailPic)) {\n\t\t\tlogger.Error(errors.New(\"create ware failed and delete it's pictures go wrong, please delete picture manually\"))\n\t\t}\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrMysql, nil)\n\t}\n\n\tlogger.Info(\"create ware\", addReq.Name, \"success\")\n\treturn core.WriteStatusAndDataJSON(c, constants.ErrSucceed, nil)\n}",
"func (g *projectGateway) CreateProjectAction(params project.CreateProjectParams) middleware.Responder {\n\trsp, err := g.projectClient.Create(context.TODO(), &proto.CreateRequest{\n\t\tName: params.Body.Name,\n\t\tDescription: params.Body.Description,\n\t})\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn project.NewCreateProjectInternalServerError()\n\t}\n\n\tif uint32(codes.OK) == rsp.Status {\n\t\tfmt.Println(fmt.Sprintf(\"project.client: ok. Id = %v\", rsp.Uuid))\n\t} else {\n\t\tfmt.Println(\"project.client: create fail. \")\n\t}\n\n\treadRsp, err := g.projectClient.Read(context.TODO(), &proto.ReadRequest{\n\t\tUuid: rsp.Uuid,\n\t})\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn project.NewCreateProjectInternalServerError()\n\t}\n\n\tpr := &models.Project{\n\t\tUUID: strfmt.UUID(readRsp.Project.Uuid),\n\t\tName: readRsp.Project.Name,\n\t\tDescription: readRsp.Project.Description,\n\t}\n\n\treturn project.NewCreateProjectCreated().WithPayload(pr)\n}",
"func (t tApp) newC(w http.ResponseWriter, r *http.Request, ctr, act string) *contr.App {\n\t// Allocate a new controller. Set values of special fields, if necessary.\n\tc := &contr.App{}\n\n\t// Allocate its parents. Make sure controller of every type\n\t// is allocated just once, then reused.\n\tc.Controllers = &contr.Controllers{}\n\tc.Controllers.Templates = c.Controllers.Errors.Templates\n\tc.Controllers.Errors = &c5.Errors{}\n\tc.Controllers.Static = &c3.Static{}\n\tc.Controllers.Sessions = &c2.Sessions{\n\n\t\tRequest: r,\n\n\t\tResponse: w,\n\t}\n\tc.Controllers.Requests = &c1.Requests{\n\n\t\tRequest: r,\n\n\t\tResponse: w,\n\t}\n\tc.Controllers.Global = &c0.Global{\n\n\t\tCurrentAction: act,\n\n\t\tCurrentController: ctr,\n\t}\n\tc.Controllers.Errors.Templates = &c4.Templates{}\n\tc.Controllers.Errors.Templates.Requests = c.Controllers.Requests\n\tc.Controllers.Errors.Templates.Global = c.Controllers.Global\n\tc.Controllers.Templates.Requests = c.Controllers.Requests\n\tc.Controllers.Templates.Global = c.Controllers.Global\n\n\treturn c\n}",
"func Create(writer http.ResponseWriter, request *http.Request) {\n\ttemplate_html.ExecuteTemplate(writer, \"Create\", nil)\n}",
"func CreateAction(req *http.Request) (interface{}, error) {\n\tparam, err := newCreateParam4Create(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn createActionProcess(req, param)\n}",
"func Create(\n\tcontext contexts.Contextable,\n\tlogger *logger.Logger,\n\tconnection *golastic.Connection,\n\tqueue *notifications.Queue,\n\tctx context.Context,\n) (Actionable, error) {\n\taction, err := build(context.Action())\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif err := action.Init(context, logger, connection, queue, ctx); err != nil {\n\t\treturn nil, err\n\t}\n\n\tif err := action.ApplyOptions().ApplyFilters(); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn action, nil\n}",
"func DeclareWar(sender *betypes.Clan, receiver *betypes.Clan, bot *tgbotapi.BotAPI, update tgbotapi.Update) {\n\tvar AttackButton = tgbotapi.NewReplyKeyboard(\n\t\ttgbotapi.NewKeyboardButtonRow(\n\t\t\ttgbotapi.NewKeyboardButton(\"Attack\"),\n\t\t),\n\t)\n\tcounterR := 0\n\tcounterS := 0\n\tgo func() {\n\t\tfor _, user := range receiver.Users {\n\t\t\tmsg := tgbotapi.NewMessage(user.Id, fmt.Sprintf(\"Clan #%d has just declared you a war!\", sender.Number))\n\t\t\tbot.Send(msg)\n\t\t\tmsg.ReplyMarkup = AttackButton\n\t\t\tif update.CallbackQuery != nil {\n\t\t\t\tswitch update.CallbackQuery.Data {\n\t\t\t\tcase \"Attack\":\n\t\t\t\t\tcounterR++\n\t\t\t\t\tif counterR < len(receiver.Users) {\n\t\t\t\t\t\tmsg.Text = \"Wait for other members to accept war...\"\n\t\t\t\t\t\tbot.Send(msg)\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}()\n\tif counterR < len(receiver.Users) {\n\t\tgo func() {\n\t\t\tfor _, user := range receiver.Users {\n\t\t\t\tbot.Send(tgbotapi.NewMessage(user.Id, fmt.Sprintln(\"Unfortunately, your clan's memebers decided not to attack.\")))\n\t\t\t}\n\t\t}()\n\t} else {\n\t\tgo func() {\n\t\t\tfor _, user := range receiver.Users {\n\t\t\t\tbot.Send(tgbotapi.NewMessage(user.Id, fmt.Sprintf(\"The war begins!💥\")))\n\t\t\t\tmsg := tgbotapi.NewMessage(user.Id, \"\")\n\t\t\t\tif update.CallbackQuery.Data == \"Attack\" {\n\t\t\t\t\tmsg.Text = \"The attack begins!\"\n\t\t\t\t\tbot.Send(msg)\n\t\t\t\t\tdamage := rand.Intn(counterR % 10)\n\t\t\t\t\tif Attack(sender, bot, damage) == true {\n\t\t\t\t\t\tmsg.Text = fmt.Sprintf(\"Your clan has just attacked with damage of %d\", damage)\n\t\t\t\t\t\tbot.Send(msg)\n\t\t\t\t\t} else {\n\t\t\t\t\t\tmsg.Text = \"You can't attack no more.\"\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}()\n\t}\n\tgo func() {\n\t\tfor _, user := range sender.Users {\n\t\t\tmsg := tgbotapi.NewMessage(user.Id, \"\")\n\t\t\tmsg.ReplyMarkup = AttackButton\n\t\t\tif update.CallbackQuery != nil {\n\t\t\t\tswitch update.CallbackQuery.Data {\n\t\t\t\tcase \"Attack\":\n\t\t\t\t\tcounterS++\n\t\t\t\t\tif counterS < len(sender.Users) {\n\t\t\t\t\t\tmsg.Text = \"Wait for other members to start attack...\"\n\t\t\t\t\t\tbot.Send(msg)\n\t\t\t\t\t}\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}()\n\tif counterS < len(sender.Users) {\n\t\tgo func() {\n\t\t\tfor _, user := range sender.Users {\n\t\t\t\tbot.Send(tgbotapi.NewMessage(user.Id, fmt.Sprintln(\"Unfortunately, your clan's memebers decided not to attack.\")))\n\t\t\t}\n\t\t}()\n\t} else {\n\t\tgo func() {\n\t\t\tfor _, user := range sender.Users {\n\t\t\t\tbot.Send(tgbotapi.NewMessage(user.Id, fmt.Sprintf(\"The war begins!💥\")))\n\t\t\t\tmsg := tgbotapi.NewMessage(user.Id, \"\")\n\t\t\t\tif update.CallbackQuery.Data == \"Attack\" {\n\t\t\t\t\tmsg.Text = \"The attack begins!\"\n\t\t\t\t\tbot.Send(msg)\n\t\t\t\t\tdamage := rand.Intn(counterS % 10)\n\t\t\t\t\tif Attack(receiver, bot, damage) == true {\n\t\t\t\t\t\tmsg.Text = fmt.Sprintf(\"Your clan has just attacked with damage of %d\", damage)\n\t\t\t\t\t\tbot.Send(msg)\n\t\t\t\t\t} else {\n\t\t\t\t\t\tmsg.Text = \"You can't attack no more.\"\n\t\t\t\t\t\tbot.Send(msg)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}()\n\t}\n}",
"func (w *RandomWorld) CreateCarnivores(quantity int) {\n\t// Initialize each being to a random one\n\tfor i := 0; i < quantity; i++ {\n\t\t// Create random being and place it into the map\n\t\tb := w.CreateRandomCarnivore()\n\t\tw.BeingList[b.ID.String()] = b\n\t}\n}",
"func (act *CreateAction) Do() error {\n\t// business sharding db.\n\tsd, err := act.smgr.ShardingDB(act.req.BizId)\n\tif err != nil {\n\t\treturn act.Err(pbcommon.ErrCode_E_DM_ERR_DBSHARDING, err.Error())\n\t}\n\tact.sd = sd\n\tact.tx = act.sd.DB().Begin()\n\n\t// query config template.\n\tif errCode, errMsg := act.queryConfigTemplate(); errCode != pbcommon.ErrCode_E_OK {\n\t\tact.tx.Rollback()\n\t\treturn act.Err(errCode, errMsg)\n\t}\n\n\t// create config template bind relation.\n\tif errCode, errMsg := act.createTemplateBind(); errCode != pbcommon.ErrCode_E_OK {\n\t\tact.tx.Rollback()\n\t\treturn act.Err(errCode, errMsg)\n\t}\n\n\t// create config.\n\tif errCode, errMsg := act.createBindConfig(); errCode != pbcommon.ErrCode_E_OK {\n\t\tact.tx.Rollback()\n\t\treturn act.Err(errCode, errMsg)\n\t}\n\n\t// commit tx.\n\tif err := act.tx.Commit().Error; err != nil {\n\t\tact.tx.Rollback()\n\t\treturn act.Err(pbcommon.ErrCode_E_DM_SYSTEM_UNKNOWN, err.Error())\n\t}\n\n\treturn nil\n}",
"func (w *RandomWorld) CreateFlyers(quantity int) {\n\t// Initialize each being to a random one\n\tfor i := 0; i < quantity; i++ {\n\t\t// Create random being and place it into the map\n\t\tb := w.CreateRandomFlyer()\n\t\tw.BeingList[b.ID.String()] = b\n\t}\n}",
"func CreateAction(\n\tcmd, keyB, id, secretKey string,\n\targs ...interface{}) *types.Action {\n\n\tmac := hmac.New(sha1.New, []byte(secretKey))\n\tmac.Write([]byte(cmd))\n\tmac.Write([]byte(keyB))\n\tmac.Write([]byte(id))\n\tsum := mac.Sum(nil)\n\tsumhex := hex.EncodeToString(sum)\n\n\treturn &types.Action{\n\t\tCommand: cmd,\n\t\tStorageKey: keyB,\n\t\tArgs: args,\n\t\tId: id,\n\t\tSecret: sumhex,\n\t}\n}",
"func applyToWorld (world *World, actions []Action) {\n for _, action := range(actions) {\n action.Do(DummyPlayerApi{}, world)\n }\n}",
"func (a *LaborApiService) CreateShift(ctx context.Context, body CreateShiftRequest) (CreateShiftResponse, *http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Post\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\tlocalVarReturnValue CreateShiftResponse\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/v2/labor/shifts\"\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &body\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarReturnValue, localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHttpResponse, err\n\t}\n\n\tif localVarHttpResponse.StatusCode < 300 {\n\t\t// If we succeed, return the data, otherwise pass on to decode error.\n\t\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get(\"Content-Type\"));\n\t\tif err == nil { \n\t\t\treturn localVarReturnValue, localVarHttpResponse, err\n\t\t}\n\t}\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericSwaggerError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\tif localVarHttpResponse.StatusCode == 200 {\n\t\t\tvar v CreateShiftResponse\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get(\"Content-Type\"));\n\t\t\t\tif err != nil {\n\t\t\t\t\tnewErr.error = err.Error()\n\t\t\t\t\treturn localVarReturnValue, localVarHttpResponse, newErr\n\t\t\t\t}\n\t\t\t\tnewErr.model = v\n\t\t\t\treturn localVarReturnValue, localVarHttpResponse, newErr\n\t\t}\n\t\treturn localVarReturnValue, localVarHttpResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHttpResponse, nil\n}",
"func CreateAction(action func(*cli.Context) error) func(*cli.Context) error {\n\treturn func(c *cli.Context) error {\n\t\terr := action(c)\n\t\tif err != nil {\n\t\t\tiocli.Error(\"%s\", err)\n\t\t}\n\n\t\treturn nil\n\t}\n}",
"func Create() http.Handler {\n\trouter := httprouter.New()\n\n\trouter.Handle(\"GET\", \"/\", middle.ResponseHandler(Hello))\n\trouter.Handle(\"POST\", \"/post\", middle.ResponseHandler(Hello))\n\trouter.Handle(\"GET\", \"/error\", middle.ResponseHandler(ErrorRoute))\n\trouter.Handle(\"GET\", \"/user-error\", middle.ResponseHandler(UserErrorRoute))\n\trouter.Handle(\"GET\", \"/multi-error\", middle.ResponseHandler(MultiErrorRoute))\n\trouter.Handle(\"GET\", \"/panic\", middle.ResponseHandler(Panic))\n\trouter.Handle(\"GET\", \"/version\", Version)\n\n\treturn alice.New(\n\t\tmiddle.RecoveryHandler,\n\t\tmiddle.FrameHandler,\n\t\tmiddle.RequestIDHandler,\n\t\tmiddle.RequestPathHandler,\n\t\tmiddle.BodyHandler).\n\t\tThen(router)\n}",
"func createClass(w http.ResponseWriter, r *http.Request) {\n\treqBody, _ := ioutil.ReadAll(r.Body)\n\n\tvar classRequest ClassRequest\n\terr := json.Unmarshal(reqBody, &classRequest)\n\tif err != nil {\n\t\terr = errorResponse(w, InvalidJSON, http.StatusBadRequest)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\treturn\n\t}\n\n\tvar classes []Class\n\tstartDate, err := time.Parse(layoutISO, classRequest.StartDate)\n\tif err != nil {\n\t\terr = errorResponse(w, InvalidDate, http.StatusBadRequest)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\treturn\n\t}\n\tendDate, err := time.Parse(layoutISO, classRequest.EndDate)\n\tif err != nil {\n\t\terr = errorResponse(w, InvalidDate, http.StatusBadRequest)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\treturn\n\t}\n\n\tfor days := 0; days <= int(endDate.Sub(startDate).Hours()/24); days++ {\n\t\tclass := Class{\n\t\t\tId: createID(),\n\t\t\tName: classRequest.Name,\n\t\t\tDate: startDate.Add(time.Hour * 24 * time.Duration(days)),\n\t\t\tCapacity: classRequest.Capacity,\n\t\t}\n\t\tclasses = append(classes, class)\n\t}\n\tDBClasses = append(DBClasses, classes...)\n\n\tw.WriteHeader(http.StatusCreated)\n\terr = json.NewEncoder(w).Encode(classes)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n}",
"func CreateUserAction(context *web.AppContext) *web.AppError {\n\n\tdb := context.MDB\n\tvar input model.CayUserAction\n\tjson.NewDecoder(context.Body).Decode(&input)\n\n\tinput.ID = bson.NewObjectId()\n\tinput.Date = time.Now()\n\tif input.Release == \"\" {\n\t\tinput.Release = \"0.1.0\"\n\t}\n\terr := db.DB.C(model.CayUserActions).Insert(input)\n\tif err != nil {\n\t\tmessage := fmt.Sprintf(\"Error creating user-action [%s]\", err)\n\t\treturn &web.AppError{err, message, http.StatusInternalServerError}\n\t}\n\n\treturn nil\n}",
"func ApplicationCreate(w http.ResponseWriter, r *http.Request) {\n\tdb, err := database.Connect()\n\tdefer db.Close()\n\tif err != nil {\n\t\tlog.Printf(\"Database error: '%s'\\n\", err)\n\t\thttp.Error(w, \"there was an error when attempting to connect to the database\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tvar createForm struct {\n\t\tName string\n\t}\n\tdecoder := json.NewDecoder(r.Body)\n\terr = decoder.Decode(&createForm)\n\tdefer r.Body.Close()\n\tif err != nil {\n\t\tlog.Printf(\"decoding error: '%s'\\n\", err)\n\t\thttp.Error(w, \"there was an error when attempting to parse the form\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tapp := resources.Application{\n\t\tName: createForm.Name,\n\t}\n\t_, err = resources.CreateApplication(db, &app)\n\t// @todo handle failed save w/out error?\n\tif err != nil {\n\t\tlog.Printf(\"Error when retrieving application: '%s'\\n\", err)\n\t\thttp.Error(w, \"there was an error when retrieving application\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// @todo return some sort of content?\n\tw.WriteHeader(http.StatusCreated)\n\treturn\n}",
"func (s *CreateMapping) Run() error {\n\tam := &acApi.ApplicationMapping{\n\t\tTypeMeta: metav1.TypeMeta{Kind: \"ApplicationMapping\", APIVersion: acApi.SchemeGroupVersion.String()},\n\t\tObjectMeta: metav1.ObjectMeta{Name: s.name},\n\t}\n\n\t_, err := s.mappings.Create(am)\n\treturn err\n}",
"func (sh *Shift) Create() error {\n\tvalidator := validatorimpl.NewDefaultValidator()\n\terrs := validator.Verify(sh)\n\tif len(errs) != 0 {\n\t\treturn fmt.Errorf(\"Save the shift failed due to content errors: %v\", errs)\n\t}\n\tshiftRepo := repoimpl.GetShiftRepo()\n\tfindCtx, findCancel := utils.GetDefaultCtx()\n\tdefer findCancel()\n\trst := shiftRepo.FindOne(findCtx, bson.M{\"projectId\": sh.ProjectID})\n\tif rst.Err() == nil {\n\t\treturn DuplicateShiftError{}\n\t}\n\tctxInsert, cancelInsert := utils.GetDefaultCtx()\n\tdefer cancelInsert()\n\t_, err := shiftRepo.InsertOne(ctxInsert, sh)\n\treturn err\n}",
"func doCreate(constructor func() base.IGameObject2D, isActive *bool) base.IGameObject2D {\r\n\tobj := constructor()\r\n\tobj.Obj().SetIGameObject2D(obj)\r\n\tapp.registerChannel <- resourceAccessRequest{\r\n\t\tpayload: obj,\r\n\t\tisActive: isActive,\r\n\t}\r\n\treturn obj\r\n}",
"func (h *Handler) serveCreateShardSpace(w http.ResponseWriter, r *http.Request) {}",
"func (h *Handler) serveCreateShard(w http.ResponseWriter, r *http.Request) {}",
"func (g *Game) getActions() *Actions {\n\n\tactions := make([]Action, 0)\n\n\tif g.Phase == Development {\n\t\tfor _, loco := range g.Locos {\n\t\t\tif g.isLocoAvailableForDevelopment(loco) {\n\t\t\t\tabbr := fmt.Sprintf(\"D:%s\", loco.Key)\n\t\t\t\tactions = append(actions, Action{\n\t\t\t\t\tAbbr: abbr,\n\t\t\t\t\tVerb: \"Develop\",\n\t\t\t\t\tNoun: loco.Name,\n\t\t\t\t\tCost: loco.DevelopmentCost,\n\t\t\t\t\tLoco: loco})\n\t\t\t}\n\t\t}\n\t}\n\n\t// I think you can always pass.\n\tactions = append(actions, Action{Abbr: \"P\", Verb: \"Pass\"})\n\tphase := Phases[g.Phase-1]\n\treturn &Actions{Phase: phase, Actions: actions}\n}",
"func CreateWarrior(WarriorName string) (*Warrior, error) {\n\tnewID, _ := uuid.NewUUID()\n\tid := newID.String()\n\n\tvar WarriorID string\n\te := db.QueryRow(`INSERT INTO warriors (id, name) VALUES ($1, $2) RETURNING id`, id, WarriorName).Scan(&WarriorID)\n\tif e != nil {\n\t\tlog.Println(e)\n\t\treturn nil, errors.New(\"Unable to create new warrior\")\n\t}\n\n\treturn &Warrior{WarriorID: WarriorID, WarriorName: WarriorName}, nil\n}",
"func (a *appHandler) CreateApp(w http.ResponseWriter, r *http.Request) {\n\tvar app model.App\n\terr := json.NewDecoder(r.Body).Decode(&app)\n\tif err != nil {\n\t\ta.httpUtil.WriteJSONBadRequestResponse(w, err)\n\t\treturn\n\t}\n\n\t// TODO : Create\n\n\tjsonR, err := json.Marshal(app)\n\tif err != nil {\n\t\ta.httpUtil.WriteJSONInternalServerErrorResponse(w, err)\n\t}\n\n\ta.httpUtil.WriteJSONSuccessResponse(w, jsonR)\n}",
"func HandlerCreateArboard(w http.ResponseWriter, r *http.Request) {\n\tdb := models.DB\n\tcreated, err := models.CreateArtboard(db)\n\n\tif err != nil {\n\t\terrorResponse(r, w, err)\n\t\treturn\n\t}\n\n\trespond(r, w, http.StatusOK, created)\n}",
"func createHandler(w http.ResponseWriter, r *http.Request) {\n\n\tlog.Printf(\"CreateHandler Called...\\n\")\n\tw.Header().Add(\"Content-Type\", \"application/json\")\n\tparams := mux.Vars(r)\n\tgameName := params[\"gameName\"]\n\tplayerName := params[\"playerName\"]\n\tdieNum, _ := strconv.Atoi(params[\"dieNum\"])\n\tgameId := storewrapper.GreateUniqueGameId(game_db)\n\tplayerId := storewrapper.GreateUniquePlayerId(player_db, gameId)\n\tgame := NewDiceChessGame(gameId, gameName, playerName, \"\", dieNum)\n\tvalue, err := json.Marshal(game)\n\tif err != nil {\n\t\tlog.Println(\"Failed to parse inputs.\")\n\t\tReturnFailure(w, \"Failed to parse inputs.\")\n\t\treturn\n\t}\n\tgame_db.Set(gameId, string(value), 0)\n\tresp := createResponse{RESPONSE_CREATE, gameId, playerId, gameName, playerName, int(WAITING), strconv.Itoa(dieNum)}\n\tjson.NewEncoder(w).Encode(resp)\n}",
"func initAction(destination string) {\n\n\tvar (\n\t\tFileName = map[string]string{\n\t\t\t\"cfg\": \"/todo.cfg\",\n\t\t\t\"todo\": \"/todo.txt\",\n\t\t\t\"done\": \"/done.txt\",\n\t\t\t\"report\": \"/report.txt\",\n\t\t}\n\n\t\tFileTemplate = map[string]string{\n\t\t\t\"cfg\": `\n# === FILE LOCATIONS ===\n\n# Your todo.txt directory\n#export TODO_DIR=\"$HOME/todo\"\nexport TODO_DIR=\".\"\n\n# Your todo/done/report.txt locations\nexport TODO_FILE=\"$TODO_DIR/todo.txt\"\nexport DONE_FILE=\"$TODO_DIR/done.txt\"\nexport REPORT_FILE=\"$TODO_DIR/report.txt\"\n\n# You can customize your actions directory location\n#export TODO_ACTIONS_DIR=\"$HOME/.todo.actions.d\"\n\n# === APP OPTIONS ===\n\n# is same as option -t (1)/-T (0)\nexport TODOTXT_DATE_ON_ADD=0\n\n# is same as option -f\nexport TODOTXT_FORCE=0\n`,\n\t\t\t\"todo\": \"\",\n\t\t\t\"done\": \"\",\n\t\t\t\"report\": \"\",\n\t\t}\n\n\t\tinitiated = false\n\t\tmessage = \"Initialized a new\"\n\t)\n\n\t// try to guess if the destination is an existing and\n\t// pre-configured todo.txt structure.\n\tfor _, filename := range FileName {\n\t\t// sanitize the absolute path of the file\n\t\tfilePath, err := filepath.Abs(destination + filename)\n\t\t//fmt.Printf(\"absolute path: %s\\n\", cfgFilePath)\n\t\tutils.Check(err)\n\n\t\tret, _ := utils.Exists(filePath)\n\t\tif ret {\n\t\t\tinitiated = true\n\t\t\tbreak\n\t\t}\n\t}\n\n\t// if the destination is an existing todo.txt structure then\n\t// change the message accordingly\n\tif initiated {\n\t\tmessage = \"Reinitialized an existing\"\n\t}\n\n\t// sanitize the absolute path of the destination\n\tfilePath, err := filepath.Abs(destination)\n\tutils.Check(err)\n\n\t// print first line of the action's summary\n\tfmt.Printf(\"%s todo.txt structure in %s\\n\", message, filePath)\n\n\t// create the missing files of the todo.txt structure\n\tfor k, filename := range FileName {\n\t\t// sanitize the absolute path of the file\n\t\tfilePath, err := filepath.Abs(destination + FileName[k])\n\t\tutils.Check(err)\n\t\t//fmt.Printf(\"absolute path: %s\\n\", filePath)\n\n\t\t// Open file\n\t\tfile, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)\n\t\tdefer file.Close()\n\n\t\t// if there aren't errors, write a new file with default values\n\t\tif err == nil {\n\t\t\tsize, err := file.WriteString(FileTemplate[k])\n\t\t\tutils.Check(err)\n\n\t\t\t// sync / flush file\n\t\t\tfile.Sync()\n\n\t\t\t// print a small summary\n\t\t\tfmt.Printf(\"%s [%s] (%d bytes)\\n\", filename, \"new\", size)\n\t\t\tcontinue\n\t\t}\n\n\t\t// file exists, there is nothing to write\n\t\tif os.IsExist(err) {\n\t\t\t// print a small summary\n\t\t\tfmt.Printf(\"%s [%s]\\n\", filename, \"exists\")\n\t\t\tcontinue\n\t\t}\n\t}\n}",
"func (act *CreateAction) Do() error {\n\tif err := act.genStrategyID(); err != nil {\n\t\treturn act.Err(pbcommon.ErrCode_E_CS_SYSTEM_UNKNOWN, err.Error())\n\t}\n\n\t// query app.\n\tif errCode, errMsg := act.queryApp(); errCode != pbcommon.ErrCode_E_OK {\n\t\treturn act.Err(errCode, errMsg)\n\t}\n\n\t// create strategy.\n\tif errCode, errMsg := act.create(); errCode != pbcommon.ErrCode_E_OK {\n\t\treturn act.Err(errCode, errMsg)\n\t}\n\treturn nil\n}",
"func Create(w http.ResponseWriter, r *http.Request) {\n\tc := flight.Context(w, r)\n\n\tv := c.View.New(\"summary/create\")\n\tc.Repopulate(v.Vars, \"name\")\n\tv.Render(w, r)\n}",
"func create_site() {\n\n\tcreate_dirs()\n\tcreate_theme_files()\n}",
"func createPlayer(w http.ResponseWriter, r *http.Request) {\n\tplayer := models.NewPlayer(\"\")\n\tskue.Create(view, player, w, r)\n}",
"func (h Handler) Create(res http.ResponseWriter, req *http.Request) {\n\tdefer req.Body.Close()\n\tvar body []byte\n\t_, readErr := req.Body.Read(body)\n\tif readErr != nil {\n\t\tres.WriteHeader(400)\n\t\tres.Write([]byte(\"400 Bad Request\"))\n\t\tlog.Println(readErr.Error())\n\t\treturn\n\t}\n\tvar bodyMap map[string]interface{}\n\tmarshErr := json.Unmarshal(body, bodyMap)\n\tif marshErr != nil {\n\t\tres.WriteHeader(400)\n\t\tres.Write([]byte(\"400 Bad Request\"))\n\t\tlog.Println(marshErr.Error())\n\t}\n\tvar key string\n\tif bodyMap[\"Name\"] != nil {\n\t\tkey = serviceStateKey(bodyMap)\n\t} else {\n\t\tkey = projectStateKey(bodyMap)\n\t}\n\terrChan := make(chan error)\n\th.Store.Save(key, bodyMap, func(err error) {\n\t\terrChan <- err\n\t})\n\tsaveErr := <-errChan\n\tif saveErr != nil {\n\t\tres.WriteHeader(500)\n\t\tres.Write([]byte(\"500 Internal Error\"))\n\t\treturn\n\t}\n\tres.WriteHeader(201)\n\tres.Write([]byte(\"201 Created\"))\n\th.Running.CheckIn(bodyMap[\"Project\"].(string), bodyMap[\"Branch\"].(string))\n}",
"func (r *Router) Create() error {\n\tfmt.Printf(emoji.Sprintf(\":gear:\")+\" Generating \"+aurora.Yellow(\"%s\").String()+\" router\\n\\n\", r.Name)\n\n\tisNewRouter := true\n\toverwriteRouter := true\n\toverwriteController := true\n\n\t// check if router exists and if user wants to overwrite it\n\tif _, err := os.Stat(fmt.Sprintf(\"%s/src/routes/%s.ts\", r.Project.AbsolutePath, r.Name)); err == nil {\n\t\toverwriteRouter = util.AskForConfirmation(fmt.Sprintf(aurora.Yellow(\" src/routes/%s.ts already exists. Would you like to overwrite it?\").String(), r.Name))\n\t\tisNewRouter = false\n\t}\n\tif overwriteRouter {\n\t\tfmt.Print(\" src/routes/\")\n\t\terr := util.CreateFile(r, r.Name+\".ts\", r.Project.AbsolutePath+\"/src/routes\", string(tpl.RouterTemplate()), 0)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// check if controller exists and if user wants to overwrite it\n\tif _, err := os.Stat(fmt.Sprintf(\"%s/%s.ts\", r.Project.AbsolutePath+\"/src/controllers\", r.Name)); err == nil {\n\t\toverwriteController = util.AskForConfirmation(fmt.Sprintf(aurora.Yellow(\" src/controllers/%s.ts already exists. Would you like to overwrite it?\").String(), r.Name))\n\t}\n\tif overwriteController {\n\t\tfmt.Print(\" src/controllers/\")\n\t\terr := util.CreateFile(r, r.Name+\".ts\", r.Project.AbsolutePath+\"/src/controllers\", string(tpl.ControllerTemplate()), 0)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// Update the app if it's a new router\n\tif isNewRouter {\n\t\tfmt.Printf(\" \" + aurora.Cyan(\"Updating \").String() + \"src/app.ts\\n\")\n\t\tappFile, err := ioutil.ReadFile(fmt.Sprintf(\"%s/src/app.ts\", r.Project.AbsolutePath))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tappFileLines := strings.Split(string(appFile), \"\\n\")\n\t\tuseStr := fmt.Sprintf(\"app.use(%sRouter);\", r.Name)\n\t\timportStr := fmt.Sprintf(\"import { %sRouter } from \\\"./routes/%s\\\";\", r.Name, r.Name)\n\t\tlinesToAdd := []string{useStr, importStr}\n\t\tfor i, line := range appFileLines {\n\t\t\tif strings.Contains(line, \"import cors from\") {\n\t\t\t\tappFileLines = append(appFileLines, \"\")\n\t\t\t\tcopy(appFileLines[i+2:], appFileLines[i+1:])\n\t\t\t\tappFileLines[i+1] = linesToAdd[1]\n\t\t\t}\n\t\t\tif strings.Contains(line, \"app.use((req: Request\") {\n\t\t\t\tappFileLines = append(appFileLines, \"\")\n\t\t\t\tcopy(appFileLines[i+2:], appFileLines[i+1:])\n\t\t\t\tappFileLines[i+1] = linesToAdd[0]\n\t\t\t}\n\t\t}\n\n\t\toutput := strings.Join(appFileLines, \"\\n\")\n\t\terr = ioutil.WriteFile(fmt.Sprintf(\"%s/src/app.ts\", r.Project.AbsolutePath), []byte(output), 0644)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tfmt.Println(\"\\n\" + emoji.Sprintf(\":party_popper:\") + \"Done\")\n\n\treturn nil\n}",
"func (_obj *DataService) CreateApply(wx_id string, club_id string, _opt ...map[string]string) (ret int32, err error) {\n\n\tvar length int32\n\tvar have bool\n\tvar ty byte\n\t_os := codec.NewBuffer()\n\terr = _os.Write_string(wx_id, 1)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\terr = _os.Write_string(club_id, 2)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tvar _status map[string]string\n\tvar _context map[string]string\n\tif len(_opt) == 1 {\n\t\t_context = _opt[0]\n\t} else if len(_opt) == 2 {\n\t\t_context = _opt[0]\n\t\t_status = _opt[1]\n\t}\n\t_resp := new(requestf.ResponsePacket)\n\ttarsCtx := context.Background()\n\n\terr = _obj.s.Tars_invoke(tarsCtx, 0, \"createApply\", _os.ToBytes(), _status, _context, _resp)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\t_is := codec.NewReader(tools.Int8ToByte(_resp.SBuffer))\n\terr = _is.Read_int32(&ret, 0, true)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tif len(_opt) == 1 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t} else if len(_opt) == 2 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t\tfor k := range _status {\n\t\t\tdelete(_status, k)\n\t\t}\n\t\tfor k, v := range _resp.Status {\n\t\t\t_status[k] = v\n\t\t}\n\n\t}\n\t_ = length\n\t_ = have\n\t_ = ty\n\treturn ret, nil\n}",
"func CreateRedirect(c echo.Context) error {\n r := new(Redirection)\n if err := c.Bind(r); err != nil {\n badRequestMessage := &Response{Message: \"Bad Request\"}\n return c.JSON(http.StatusBadRequest, badRequestMessage)\n }\n urlKey, err := Save(r.URL)\n if err != nil {\n return c.JSON(http.StatusInternalServerError, &Response{Message: \"Internal Error\"})\n }\n success := &Response{Message: urlKey}\n return c.JSON(http.StatusCreated, success)\n}",
"func generateAction(c *cli.Context) error {\n\tif c.NArg() != 1 {\n\t\treturn cli.NewExitError(fmt.Sprintf(\"%v %v requires exactly 1 argument\", c.App.Name, c.Command.Name), 1)\n\t}\n\n\tfilename, err := builder.CreateSkeletonFile(c.Args().First())\n\tif err != nil {\n\t\treturn cli.NewExitError(err, 1)\n\t}\n\n\tfmt.Printf(\"successfully created template file %v\\n\", filename)\n\n\treturn nil\n}",
"func createNewFact(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t \n\tvar fact Fact\n\tvar stmt *sql.Stmt\n\tjson.NewDecoder(r.Body).Decode(&fact)\n\tif fact.ID == \"\" {\n\t\tstmt, err = db.Prepare(\"INSERT INTO Facts(FactType, Content) VALUES(?, ?)\")\n\t} else {\n\t\tstmt, err = db.Prepare(\"INSERT INTO Facts(ID, FactType, Content) VALUES(?, ?, ?)\")\n\t}\n\tif err != nil {\n\t\tflushResponseWriter(w, 0)\n \tpanic(err.Error())\n \t}\n\n\tif fact.ID == \"\" {\n\t\t_, err = stmt.Exec(fact.FactType, fact.Content)\n\t} else {\n\t\t_, err = stmt.Exec(fact.ID, fact.FactType, fact.Content)\n\t}\n\tif err != nil {\n\t\tflushResponseWriter(w, 1)\n \tpanic(err.Error())\n \t}\n\t\n\tw.WriteHeader(http.StatusCreated) // 201 to client\n\tjson.NewEncoder(w).Encode(fact)\n}",
"func CreateMenu(w http.ResponseWriter, r *http.Request) {\n\tdefer r.Body.Close()\n\tvar menu Menu\n\tif err := json.NewDecoder(r.Body).Decode(&menu); err != nil {\n\t\tRespondWithError(w, http.StatusBadRequest, \"Invalid request payload\")\n\t\treturn\n\t}\n\tmenu.ID = bson.NewObjectId()\n\n\t// set Date for every entry\n\tdays := []string{\"Mon\", \"Tue\", \"Wed\", \"Thr\", \"Fri\", \"Sat\", \"Sun\"}\n\tdates := WholeWeekDates(time.Now().AddDate(0, 0, 7))\n\n\tfor i := range days {\n\t\treflect.ValueOf(&menu.MessUP).Elem().FieldByName(days[i]).FieldByName(\"Date\").Set(reflect.ValueOf(dates[i]))\n\t\treflect.ValueOf(&menu.MessDown).Elem().FieldByName(days[i]).FieldByName(\"Date\").Set(reflect.ValueOf(dates[i]))\n\t}\n\n\tif err := mdao.Insert(menu); err != nil {\n\t\tRespondWithError(w, http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\n\tRespondWithJSON(w, http.StatusCreated, menu)\n}",
"func NewAction(app *buffalo.App) *Action {\n\tas := &Action{\n\t\tApp: app,\n\t\tModel: NewModel(),\n\t}\n\treturn as\n}",
"func NewHTTPAction(a map[interface{}]interface{}, dflt config.Default, playbook *config.TestDef) (HTTPAction, bool) {\n\tlog.Debugf(\"NewhttpAction=%v\", a)\n\tvalid := true\n\n\tif a[\"url\"] == \"\" || a[\"url\"] == nil {\n\t\tlog.Error(\"HttpAction must define a URL.\")\n\t\ta[\"url\"] = \"\"\n\t\tvalid = false\n\t} else {\n\t\t// Try to substitute already known variables: needed if variables are used\n\t\t// protocol://in the user:auth@server:port/ part of the URL\n\t\t// (cannot use SubstParams() here)\n\t\t// TODO: why here and not in DoHTTPRequest ? (same question for Mongo, SQL, etc...)\n\t\ttextData := a[\"url\"].(string)\n\t\tif strings.ContainsAny(textData, \"${\") {\n\t\t\tres := re.FindAllStringSubmatch(textData, -1)\n\t\t\tfor _, v := range res {\n\t\t\t\tlog.Debugf(\"playbook.Variables[%s]=%s\", v[1], playbook.Variables[v[1]])\n\t\t\t\tif _, err := playbook.Variables[v[1]]; !err {\n\t\t\t\t\tlog.Debugf(\"Variable ${%s} not set\", v[1])\n\t\t\t\t} else {\n\t\t\t\t\ttextData = strings.Replace(textData, \"${\"+v[1]+\"}\", url.QueryEscape(playbook.Variables[v[1]].Values[0]), 1) // TODO array\n\t\t\t\t}\n\t\t\t}\n\t\t\ta[\"url\"] = textData\n\t\t}\n\t\tvalid = setDefaultURL(a, dflt)\n\t\tlog.Debugf(\"setDefaultURL returned %v\", a)\n\t}\n\n\tif a[\"method\"] == nil || a[\"method\"] == \"\" {\n\t\tif dflt.Method == \"\" {\n\t\t\tlog.Error(\"Action has no Method and no default Method specified\")\n\t\t\ta[\"method\"] = \"\"\n\t\t\tvalid = false\n\t\t} else {\n\t\t\ta[\"method\"] = dflt.Method\n\t\t}\n\t} else if !config.IsValidHTTPMethod(a[\"method\"].(string)) {\n\t\tlog.Errorf(\"HttpAction must specify a valid HTTP method: GET, POST, PUT, HEAD or DELETE: %s\", a[\"method\"].(string))\n\t\tvalid = false\n\t}\n\tif a[\"title\"] == nil || a[\"title\"] == \"\" {\n\t\tlog.Error(\"HttpAction must define a title.\")\n\t\ta[\"title\"] = \"\"\n\t\tvalid = false\n\t}\n\tif a[\"use_http2\"] == nil {\n\t\ta[\"use_http2\"] = false\n\t} else {\n\t\tif _, ok := a[\"use_http2\"].(bool); !ok {\n\t\t\tlog.Error(\"use_http2 value must be a boolean (true or false)\")\n\t\t\ta[\"use_http2\"] = false\n\t\t\tvalid = false\n\t\t}\n\t}\n\n\t// Check formdatas\n\tnu := 0\n\tif a[\"body\"] != nil {\n\t\tnu++\n\t}\n\tif a[\"template\"] != nil {\n\t\taddEmbeddedFilename(a[\"template\"].(string))\n\t\tnu++\n\t}\n\tif a[\"upload_file\"] != nil {\n\t\taddEmbeddedFilename(a[\"upload_file\"].(string))\n\t\tnu++\n\t}\n\tif a[\"formdata\"] != nil {\n\t\tnu++\n\t}\n\tif nu > 1 {\n\t\tlog.Error(\"A HttpAction can contain a single 'body' or a 'template' or a 'formdata' or an 'upload_file'.\")\n\t\tvalid = false\n\t}\n\n\tvar storeCookie string\n\tif a[\"store_cookie\"] != nil && a[\"store_cookie\"].(string) != \"\" {\n\t\tstoreCookie = a[\"store_cookie\"].(string)\n\t}\n\n\theaders := make(map[string]string, 20)\n\tif a[\"headers\"] != nil {\n\t\t// Check the type : otherwise crashes if headers content is a list instead of a map...\n\t\tswitch v := a[\"headers\"].(type) {\n\t\tcase map[interface{}]interface{}:\n\t\t\t//for hdr, value := range a[\"headers\"].(map[interface{}]interface{}) {\n\t\t\tfor hdr, value := range v {\n\t\t\t\tlog.Debugf(\"Header Key=%s / Value=%s\", hdr.(string), value.(string))\n\t\t\t\theaders[strings.ToLower(hdr.(string))] = value.(string)\n\t\t\t}\n\t\tdefault:\n\t\t\tlog.Fatalf(\"headers format is invalid: it should be a map (you probably set it as a list ?)\")\n\t\t}\n\t}\n\n\t// Set the Accept header if not set in Playbook\n\tif _, ok := headers[\"accept\"]; !ok {\n\t\theaders[\"accept\"] = \"text/html,application/json,application/xhtml+xml,application/xml,text/plain\"\n\t}\n\t// Set the User-Agent header if not set in Playbook\n\tif _, ok := headers[\"user-agent\"]; !ok {\n\t\tif is_daemon_mode {\n\t\t\theaders[\"user-agent\"] = \"chaingun-\" + injector_id\n\t\t} else {\n\t\t\theaders[\"user-agent\"] = \"chaingun\"\n\t\t}\n\t}\n\n\tformdatas, validData := NewFormDatas(a)\n\tresponseHandlers, validResp := NewResponseHandlers(a)\n\ttemplate, validTempl := getTemplate(a)\n\tbody, validBody := getBody(a)\n\tupload, validUpload := getFileToPUT(a)\n\n\tif !valid || !validResp || !validData || !validTempl || !validBody || !validUpload {\n\t\tlog.Errorf(\"Your YAML Playbook contains an invalid HTTPAction, see errors listed above.\")\n\t\tvalid = false\n\t}\n\n\thttpAction := HTTPAction{\n\t\tMethod: a[\"method\"].(string),\n\t\tUseHTTP2: a[\"use_http2\"].(bool),\n\t\tURL: a[\"url\"].(string),\n\t\tBody: body,\n\t\tTemplate: template,\n\t\tFormDatas: formdatas,\n\t\tHeaders: headers,\n\t\tTitle: a[\"title\"].(string),\n\t\tUploadFile: upload,\n\t\tStoreCookie: storeCookie,\n\t\tResponseHandlers: responseHandlers,\n\t}\n\n\tlog.Debugf(\"HTTPAction: %v\", httpAction)\n\n\treturn httpAction, valid\n}",
"func Create(engine *leader.Leader, port int) *http.Server {\n\tgame.InitGames()\n\tlgger := logger.Init(\"BattleSnake Web\", true, false, ioutil.Discard)\n\tvar host string\n\tif os.Getenv(\"ENV\") == \"dev\" {\n\t\thost = \"localhost\"\n\t} else {\n\t\thost = \"\"\n\t}\n\treturn &http.Server{\n\t\tAddr: fmt.Sprintf(\"%s:%d\", host, port),\n\t\tHandler: web.NewRouter(engine, lgger),\n\t\tReadTimeout: time.Duration(500) * time.Millisecond, // TODO remove hardcoding\n\t\tWriteTimeout: time.Duration(500) * time.Millisecond, // TODO remove hardcoding\n\t}\n}",
"func CreateFolderAction(w http.ResponseWriter, r *http.Request) {\n\n\tpageVars := PageVars{}\n\taddPageVars(r, &pageVars)\n\n\tfolderName := r.FormValue(\"folderName\")\n\n\tif len(pageVars.BName) <= 0 {\n\t\thttp.Redirect(w, r, \"/objectlist?bucketName=\"+pageVars.BName+\"&prefix=\"+pageVars.Prefix+\"&errorM=Invalid bucket name\", http.StatusSeeOther)\n\t} else if len(folderName) <= 0 {\n\t\thttp.Redirect(w, r, \"/objectlist?bucketName=\"+pageVars.BName+\"&prefix=\"+pageVars.Prefix+\"&errorM=Invalid folder name\", http.StatusSeeOther)\n\t} else {\n\t\tbucket := aws.String(pageVars.BName)\n\n\t\tif (len(pageVars.Prefix) > 0) && strings.HasSuffix(pageVars.Prefix, \"/\") {\n\t\t\tfolderName = pageVars.Prefix + folderName\n\t\t}\n\n\t\tsvc := s3.New(sess)\n\n\t\t_, err := svc.PutObject(&s3.PutObjectInput{\n\t\t\tBucket: bucket,\n\t\t\tKey: aws.String(folderName + \"/\"),\t\n\t\t\tContentLength: aws.Int64(0),\n\t\t})\n\n\t\tif err != nil {\n\t\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\t\thttp.Redirect(w, r, \"/objectlist?bucketName=\"+pageVars.BName+\"&prefix=\"+pageVars.Prefix+\"&errorM=\"+awsErr.Message(), http.StatusSeeOther)\n\t\t\t} else {\n\t\t\t\thttp.Redirect(w, r, \"/objectlist?bucketName=\"+pageVars.BName+\"&prefix=\"+pageVars.Prefix+\"&errorM=Error in creating folder\", http.StatusSeeOther)\n\t\t\t}\n\t\t} else {\n\t\t\thttp.Redirect(w, r, \"/objectlist?bucketName=\"+pageVars.BName+\"&prefix=\"+pageVars.Prefix+\"&successM=Successfully created\", http.StatusSeeOther)\n\t\t}\n\t}\n\n}",
"func Create(cfg map[string]interface{}) {\r\n\tgo func(c map[string]interface{}) {\r\n\t\tserver.CreateSingle(\"ws\", c, serveWs)\r\n\t}(cfg)\r\n\tgo func(c map[string]interface{}) {\r\n\t\tserver.CreateSingle(\"wss\", c, serveWs)\r\n\t}(cfg)\r\n}",
"func (s *Server) CreateHandler(w http.ResponseWriter, r *http.Request) {\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), 500)\n\t\treturn\n\t}\n\tdefer r.Body.Close()\n\n\tvar workoutReq storage.WorkoutReq\n\terr = json.Unmarshal(body, &workoutReq)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), 500)\n\t\treturn\n\t}\n\n\terr = s.DataRepository.CreateWorkout(&workoutReq)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), 500)\n\t\treturn\n\t}\n}",
"func (planetDeliveryRest *PlanetDeliveryRest) Create(w http.ResponseWriter, r *http.Request) {\n\tvar planet entity.Planet\n\n\terr := json.NewDecoder(r.Body).Decode(&planet)\n\tif err != nil {\n\t\tError(w, \"Failed to decode JSON\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tplanetToInsert := *entity.NewPlanet(planet.Name, planet.Climate, planet.Terrain)\n\n\tnewPlanet, err := planetDeliveryRest.planetUsecase.Create(r.Context(), planetToInsert)\n\tif err != nil {\n\t\tError(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tJSON(w, newPlanet, http.StatusCreated)\n}",
"func (c *Controller) Create(w http.ResponseWriter, r *http.Request) {\n\tvar err error\n\ttoken := r.FormValue(\"token\")\n\tmaxPlayers := r.FormValue(\"maxPlayers\")\n\tname := r.FormValue(\"name\")\n\tservice, err := createDSTService(token, maxPlayers, name)\n\tif c.CheckError(err, http.StatusBadRequest, w) {\n\t\treturn\n\t}\n\tc.SendJSON(\n\t\tw,\n\t\tr,\n\t\tservice,\n\t\thttp.StatusOK,\n\t)\n}",
"func (v ToursResource) Create(c buffalo.Context) error {\n\t// Allocate an empty Tour\n\ttour := &models.Tour{}\n\n\t// Bind tour to the html form elements\n\tif err := c.Bind(tour); err != nil {\n\t\treturn err\n\t}\n\n\t// Get the DB connection from the context\n\ttx, ok := c.Value(\"tx\").(*pop.Connection)\n\tif !ok {\n\t\treturn errors.New(\"no transaction found\")\n\t}\n\n\t// Validate the data from the html form\n\tverrs, err := tx.ValidateAndCreate(tour)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif verrs.HasAny() {\n\t\t// Make the errors available inside the html template\n\t\tc.Set(\"errors\", verrs)\n\n\t\t// Render again the new.html template that the user can\n\t\t// correct the input.\n\t\treturn c.Render(422, r.Auto(c, tour))\n\t}\n\n\t// If there are no errors set a success message\n\tc.Flash().Add(\"success\", T.Translate(c, \"tour.created.success\"))\n\t// and redirect to the tours index page\n\treturn c.Render(201, r.Auto(c, tour))\n}",
"func ProjectCreate(w http.ResponseWriter, r *http.Request) {\n\n\t// Init output\n\toutput := []byte(\"\")\n\n\t// Add content type header to the response\n\tcontentType := \"application/json\"\n\tcharset := \"utf-8\"\n\tw.Header().Add(\"Content-Type\", fmt.Sprintf(\"%s; charset=%s\", contentType, charset))\n\n\t// Grab url path variables\n\turlVars := mux.Vars(r)\n\turlProject := urlVars[\"project\"]\n\n\t// Grab context references\n\trefStr := gorillaContext.Get(r, \"str\").(stores.Store)\n\trefUserUUID := gorillaContext.Get(r, \"auth_user_uuid\").(string)\n\n\t// Read POST JSON body\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\terr := APIErrorInvalidRequestBody()\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Parse pull options\n\tpostBody, err := projects.GetFromJSON(body)\n\tif err != nil {\n\t\terr := APIErrorInvalidArgument(\"Project\")\n\t\trespondErr(w, err)\n\t\tlog.Error(string(body[:]))\n\t\treturn\n\t}\n\n\tuuid := uuid.NewV4().String() // generate a new uuid to attach to the new project\n\tcreated := time.Now().UTC()\n\t// Get Result Object\n\n\tres, err := projects.CreateProject(uuid, urlProject, created, refUserUUID, postBody.Description, refStr)\n\n\tif err != nil {\n\t\tif err.Error() == \"exists\" {\n\t\t\terr := APIErrorConflict(\"Project\")\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Output result to JSON\n\tresJSON, err := res.ExportJSON()\n\tif err != nil {\n\t\terr := APIErrExportJSON()\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Write response\n\toutput = []byte(resJSON)\n\trespondOK(w, output)\n\n}",
"func (c *SpaceIterationsController) Create(ctx *app.CreateSpaceIterationsContext) error {\n\tcurrentUser, err := login.ContextIdentity(ctx)\n\tif err != nil {\n\t\treturn jsonapi.JSONErrorResponse(ctx, goa.ErrUnauthorized(err.Error()))\n\t}\n\t// Validate Request\n\tif ctx.Payload.Data == nil {\n\t\treturn jsonapi.JSONErrorResponse(ctx, errors.NewBadParameterError(\"data\", nil).Expected(\"not nil\"))\n\t}\n\treqIter := ctx.Payload.Data\n\tif reqIter.Attributes.Name == nil {\n\t\treturn jsonapi.JSONErrorResponse(ctx, errors.NewBadParameterError(\"data.attributes.name\", nil).Expected(\"not nil\"))\n\t}\n\n\treturn application.Transactional(c.db, func(appl application.Application) error {\n\t\ts, err := appl.Spaces().Load(ctx, ctx.SpaceID)\n\t\tif err != nil {\n\t\t\treturn jsonapi.JSONErrorResponse(ctx, goa.ErrNotFound(err.Error()))\n\t\t}\n\t\tif !uuid.Equal(*currentUser, s.OwnerID) {\n\t\t\tlog.Warn(ctx, map[string]interface{}{\n\t\t\t\t\"space_id\": ctx.SpaceID,\n\t\t\t\t\"space_owner\": s.OwnerID,\n\t\t\t\t\"current_user\": *currentUser,\n\t\t\t}, \"user is not the space owner\")\n\t\t\treturn jsonapi.JSONErrorResponse(ctx, errors.NewForbiddenError(\"user is not the space owner\"))\n\t\t}\n\t\t// Put iteration under root iteration\n\t\trootIteration, err := appl.Iterations().Root(ctx, ctx.SpaceID)\n\t\tif err != nil {\n\t\t\treturn jsonapi.JSONErrorResponse(ctx, goa.ErrNotFound(err.Error()))\n\t\t}\n\t\tchildPath := append(rootIteration.Path, rootIteration.ID)\n\t\tnewItr := iteration.Iteration{\n\t\t\tSpaceID: ctx.SpaceID,\n\t\t\tName: *reqIter.Attributes.Name,\n\t\t\tStartAt: reqIter.Attributes.StartAt,\n\t\t\tEndAt: reqIter.Attributes.EndAt,\n\t\t\tPath: childPath,\n\t\t}\n\t\tif reqIter.Attributes.Description != nil {\n\t\t\tnewItr.Description = reqIter.Attributes.Description\n\t\t}\n\t\terr = appl.Iterations().Create(ctx, &newItr)\n\t\tif err != nil {\n\t\t\treturn jsonapi.JSONErrorResponse(ctx, err)\n\t\t}\n\t\t// For create, count will always be zero hence no need to query\n\t\t// by passing empty map, updateIterationsWithCounts will be able to put zero values\n\t\twiCounts := make(map[string]workitem.WICountsPerIteration)\n\t\tlog.Info(ctx, map[string]interface{}{\n\t\t\t\"iteration_id\": newItr.ID,\n\t\t\t\"wiCounts\": wiCounts,\n\t\t}, \"wicounts for created iteration %s -> %v\", newItr.ID.String(), wiCounts)\n\n\t\tvar responseData *app.Iteration\n\t\tif newItr.Path.IsEmpty() == false {\n\t\t\tallParentsUUIDs := newItr.Path\n\t\t\titerations, error := appl.Iterations().LoadMultiple(ctx, allParentsUUIDs)\n\t\t\tif error != nil {\n\t\t\t\treturn jsonapi.JSONErrorResponse(ctx, err)\n\t\t\t}\n\t\t\titrMap := make(iterationIDMap)\n\t\t\tfor _, itr := range iterations {\n\t\t\t\titrMap[itr.ID] = itr\n\t\t\t}\n\t\t\tresponseData = ConvertIteration(ctx.Request, newItr, parentPathResolver(itrMap), updateIterationsWithCounts(wiCounts))\n\t\t} else {\n\t\t\tresponseData = ConvertIteration(ctx.Request, newItr, updateIterationsWithCounts(wiCounts))\n\t\t}\n\t\tres := &app.IterationSingle{\n\t\t\tData: responseData,\n\t\t}\n\t\tctx.ResponseData.Header().Set(\"Location\", rest.AbsoluteURL(ctx.Request, app.IterationHref(res.Data.ID)))\n\t\treturn ctx.Created(res)\n\t})\n}",
"func newWoc() *wfOperationCtx {\n\twf := &wfv1.Workflow{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: \"test-wf\",\n\t\t\tNamespace: \"default\",\n\t\t},\n\t}\n\twoc := wfOperationCtx{\n\t\twf: wf,\n\t\torig: wf.DeepCopyObject().(*wfv1.Workflow),\n\t\tupdated: false,\n\t\tlog: log.WithFields(log.Fields{\n\t\t\t\"workflow\": wf.ObjectMeta.Name,\n\t\t\t\"namespace\": wf.ObjectMeta.Namespace,\n\t\t}),\n\t\tcontroller: &WorkflowController{\n\t\t\tConfig: WorkflowControllerConfig{\n\t\t\t\tExecutorImage: \"executor:latest\",\n\t\t\t},\n\t\t\tclientset: fake.NewSimpleClientset(),\n\t\t},\n\t\tcompletedPods: make(map[string]bool),\n\t}\n\treturn &woc\n}",
"func NewApp(root string) *App {\n\n CheckEnv()\n\n // Use negroni for middleware\n ne := negroni.New()\n\n // Use gorilla/mux for routing\n ro := mux.NewRouter()\n\n // Use Render for template. Pass in path to templates folder\n // as well as asset helper functions.\n re := render.New(render.Options{\n Directory: filepath.Join(root, \"templates\"),\n Layout: \"layouts/layout\",\n Extensions: []string{\".html\"},\n Funcs: []template.FuncMap{\n\t\t\tAssetHelpers(root),\n\t\t},\n })\n qre := render.New(render.Options{\n Directory: filepath.Join(root, \"templates\"),\n Layout: \"layouts/message\",\n Extensions: []string{\".html\"},\n Funcs: []template.FuncMap{\n\t\t\tAssetHelpers(root),\n\t\t},\n })\n\n // Establish connection to DB as specificed in database.go\n db := NewDB()\n\n // Add middleware to the stack\n ne.Use(negroni.NewRecovery())\n ne.Use(negroni.NewLogger())\n ne.Use(NewAssetHeaders())\n ne.Use(negroni.NewStatic(http.Dir(\"public\")))\n ne.UseHandler(ro)\n\n train.Config.SASS.DebugInfo = true\n train.Config.SASS.LineNumbers = true\n train.Config.Verbose = true\n train.Config.BundleAssets = true\n //ZZZtrain.ConfigureHttpHandler(ro)\n\n // Return a new App struct with all these things.\n return &App{ne, ro, re, qre, db}\n}",
"func (c *Client) CreateIrActionsActWindows(iaas []*IrActionsActWindow) ([]int64, error) {\n\tvar vv []interface{}\n\tfor _, v := range iaas {\n\t\tvv = append(vv, v)\n\t}\n\treturn c.Create(IrActionsActWindowModel, vv)\n}",
"func CreateGame(w http.ResponseWriter, r *http.Request) {\n\t//----------------------------------------------------------------------------\n\t// Initialize an empty Game model\n\t//----------------------------------------------------------------------------\n\tdata := model.Game{}\n\t\n\t//----------------------------------------------------------------------------\n\t// Parse the body into a Game model structure\n\t//----------------------------------------------------------------------------\n\tutils.ParseBody(r, data)\n\n\t//----------------------------------------------------------------------------\n\t// Delegate to the Game data access object to create\n\t//----------------------------------------------------------------------------\n\trequestResult := GameDAO.CreateGame( data )\n\t\n\t//----------------------------------------------------------------------------\n\t// Marshal the model into a JSON object\n\t//----------------------------------------------------------------------------\n\tres,_ := json.Marshal(requestResult)\n\n\tw.WriteHeader(http.StatusOK)\n\tw.Write(res)\n}",
"func New() *WarmerImpl {\n\treturn &WarmerImpl{}\n}",
"func (ah *AppHandler) CreateMovie(w http.ResponseWriter, r *http.Request) {\n\tlogger := *hlog.FromRequest(r)\n\n\t// Initialize the MovieController\n\tmc := moviecontroller.NewMovieController(ah.App)\n\n\t// Send the request context and request struct to the controller\n\t// Receive a response or error in return\n\tresponse, err := mc.CreateMovie(r)\n\tif err != nil {\n\t\terrs.HTTPErrorResponse(w, logger, err)\n\t\treturn\n\t}\n\n\t// Encode response struct to JSON for the response body\n\terr = json.NewEncoder(w).Encode(*response)\n\tif err != nil {\n\t\terrs.HTTPErrorResponse(w, logger, errs.E(errs.Internal, err))\n\t\treturn\n\t}\n}",
"func (c *Controller) CreateMembership(w http.ResponseWriter, req *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\n\t//connect to database\n\tdb, err := c.Session.Connect()\n\tif err != nil {\n\t\terror := models.RespError{Error: \"Failed to connect, cannot reach database\"}\n\t\tresp, _ := json.Marshal(error)\n\t\thttp.Error(w, string(resp), 400)\n\t\tc.Logger.Logging(req, 400)\n\t\treturn\n\t}\n\tdefer db.Close()\n\n\tauth := models.Authentication{Decoded: context.Get(req, \"decoded\")}\n\tok, err := auth.Authorize(db, 3)\n\tif err != nil {\n\t\terror := models.RespError{Error: \"Failed to authorize, error during authorization\"}\n\t\tresp, _ := json.Marshal(error)\n\t\thttp.Error(w, string(resp), 400)\n\t\tc.Logger.Logging(req, 400)\n\t\treturn\n\t}\n\tif !ok {\n\t\terror := models.RespError{Error: \"Failed to authorize, error during authorization. Make sure you have permissions to use this route.\"}\n\t\tresp, _ := json.Marshal(error)\n\t\thttp.Error(w, string(resp), 401)\n\t\tc.Logger.Logging(req, 401)\n\t\treturn\n\t}\n\n\tparams := mux.Vars(req)\n\tid, err := strconv.Atoi(params[\"id\"])\n\tif err != nil {\n\t\terror := models.RespError{Error: \"Id is required in route\"}\n\t\tresp, _ := json.Marshal(error)\n\t\thttp.Error(w, string(resp), 400)\n\t\tc.Logger.Logging(req, 400)\n\t\treturn\n\t}\n\n\ttranstype := req.URL.Query().Get(\"type\")\n\tif transtype == \"\" {\n\t\ttranstype = \"Unknown\"\n\t}\n\n\tvar membership models.Membership\n\terr = json.NewDecoder(req.Body).Decode(&membership)\n\tif err != nil {\n\t\terror := models.RespError{Error: \"Failed to parse request. Please make sure request is valid format\"}\n\t\tresp, _ := json.Marshal(error)\n\t\thttp.Error(w, string(resp), 404)\n\t\tc.Logger.Logging(req, 404)\n\t\treturn\n\t}\n\n\tplayer := models.Player{Id: id}\n\terr = player.GetPlayer(db)\n\tif err != nil {\n\t\terror := models.RespError{Error: \"Failed to find player\"}\n\t\tresp, _ := json.Marshal(error)\n\t\thttp.Error(w, string(resp), 404)\n\t\tc.Logger.Logging(req, 404)\n\t\treturn\n\t}\n\tmembership.EmployeeID = auth.EmployeeID\n\tmembership.PlayerID = player.Id\n\tmembership.PlayTime = (membership.Amount / 10) * 3600\n\tmembership.ActiveDate = time.Now().Format(time.RFC3339)\n\tmembership.DeactiveDate = time.Now().AddDate(20, 0, 0).Format(time.RFC3339)\n\tmembership.Active = true\n\n\terr = membership.CreateMembership(db, transtype)\n\tif err != nil {\n\t\terror := models.RespError{Error: \"Failed to create membership for player\"}\n\t\tresp, _ := json.Marshal(error)\n\t\thttp.Error(w, string(resp), 404)\n\t\tc.Logger.Logging(req, 404)\n\t\treturn\n\t}\n\tw.WriteHeader(http.StatusOK)\n\tc.Logger.Logging(req, 200)\n\tjson.NewEncoder(w).Encode(membership)\n\treturn\n}",
"func Generate(categoryName string, area geography.Area, originCulture culture.Culture) (Town, error) {\n\tvar newProducers []profession.Profession\n\tvar producers []profession.Profession\n\tvar newResources []resource.Resource\n\n\ttown := Town{}\n\n\tif categoryName == \"random\" {\n\t\ttownCategory, err := getRandomWeightedCategory()\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(townGenerationError, err)\n\t\t\treturn Town{}, err\n\t\t}\n\t\ttown.Category = townCategory\n\t} else {\n\t\ttown.Category = getCategoryByName(categoryName)\n\t}\n\n\ttown.Geography = area\n\ttown.Culture = originCulture\n\n\tname, err := town.Culture.Language.RandomTownName()\n\tif err != nil {\n\t\terr = fmt.Errorf(townGenerationError, err)\n\t\treturn Town{}, err\n\t}\n\ttown.Name = name\n\n\ttown.Population = generateRandomPopulation(town.Category)\n\n\ttown.BuildingStyle = town.Culture.BuildingStyle\n\n\tmayor, err := town.generateMayor()\n\tif err != nil {\n\t\terr = fmt.Errorf(townGenerationError, err)\n\t\treturn Town{}, err\n\t}\n\ttown.Mayor = mayor\n\n\tresources := area.GetResources()\n\n\tfor i := 0; i < town.Category.ProductionIterations; i++ {\n\t\tnewProducers, err = getProducers(town.Population, resources)\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(townGenerationError, err)\n\t\t\treturn Town{}, err\n\t\t}\n\t\tnewResources, err = goods.Produce(newProducers, resources)\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(townGenerationError, err)\n\t\t\treturn Town{}, err\n\t\t}\n\t\tresources = append(resources, newResources...)\n\t\tproducers = append(producers, newProducers...)\n\t}\n\n\ttown.Resources = resources\n\ttown.NotableProducers = producers\n\n\ttown.Exports = town.generateRandomExports()\n\timports, err := town.generateRandomImports()\n\tif err != nil {\n\t\terr = fmt.Errorf(townGenerationError, err)\n\t\treturn Town{}, err\n\t}\n\ttown.Imports = imports\n\n\treturn town, nil\n}",
"func MakeController(datastore datastore.DataStore) (*Controller, error) {\n\n // Check that we can retrieve an inventory from the datastore\n _, error := datastore.GetInventory()\n if error != nil {\n return nil, error\n }\n\n controller := Controller{\n datastore: datastore,\n register: MakeRegister(),\n }\n return &controller, nil\n}",
"func (a *Agent) startNewAction() {\n\tactionTypes := a.mind.actionTypes()\n\n\thighestValue := 0.0\n\tvar bestActionTypes []actionType\n\tfor _, t := range actionTypes {\n\t\tisActive := false\n\t\t// if we currently have an active action, we do not want to start a new action\n\t\tfor _, ac := range a.activity.activeActions {\n\t\t\tif ac.getState() == actionStateActive {\n\t\t\t\tisActive = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\tif isActive {\n\t\t\treturn\n\t\t}\n\n\t\t// TODO what if an action cannot be started\n\t\t// highest value is to eat an apple, but there is no apple, we should somehow start thinking\n\t\t// about how to obtain an apple\n\n\t\tv := actionTypeValue(t)\n\t\tif v >= highestValue {\n\t\t\tcanStart := true\n\t\t\tfor startCond := range t.getConditions()[actionConditionTypeStart] {\n\t\t\t\tif !startCond.isSatisfied(a) {\n\t\t\t\t\tcanStart = false\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif canStart {\n\t\t\t\tif v > highestValue {\n\t\t\t\t\thighestValue = v\n\t\t\t\t\tbestActionTypes = []actionType{}\n\t\t\t\t}\n\t\t\t\tbestActionTypes = append(bestActionTypes, t)\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(bestActionTypes) == 0 {\n\t\treturn\n\t}\n\n\tbestActionType := bestActionTypes[rand.Intn(len(bestActionTypes))]\n\tfor startCondition := range bestActionType.getConditions()[actionConditionTypeStart] {\n\t\tif !startCondition.isSatisfied(a) {\n\t\t\treturn\n\t\t}\n\t}\n\n\tnewAction := bestActionType.instantiate().(action)\n\ta.activity.activeActions = append(a.activity.activeActions, newAction)\n\ta.mind.addItem(bestActionType, 1.0)\n\n\t// add pre-action conditions for hypothesis training\n\tfor cond := range a.getConditions() {\n\t\tpreActionConditions := newAction.getType().getConditions()[actionConditionTypeObservedAtStart]\n\t\tpreActionConditions[cond] = true\n\t\tnewAction.getPreConditions()[cond] = true\n\t}\n}",
"func Index(w http.ResponseWriter, r *http.Request) {\n\ttmpl.ExecuteTemplate(w, \"New\", nil)\n}",
"func Create(deployment *Deployment) (*Deployment, error) {\n\targs := []string{\n\t\t\"deployment-manager\",\n\t\t\"deployments\",\n\t\t\"create\",\n\t\tdeployment.config.Name,\n\t\t\"--config\",\n\t\tdeployment.configFile,\n\t\t\"--project\",\n\t\tdeployment.config.Project,\n\t}\n\t_, err := runGCloud(args...)\n\tif err != nil {\n\t\tlog.Printf(\"Failed to create deployment: %v, error: %v\", deployment, err)\n\t\treturn nil, err\n\t}\n\toutputs, err := GetOutputs(deployment.config.Name, deployment.config.Project)\n\tif err != nil {\n\t\tlog.Printf(\"Failed to get outputs for deployment: %v, error: %v\", deployment, err)\n\t\treturn nil, err\n\t}\n\tdeployment.Outputs = outputs\n\treturn deployment, nil\n}",
"func (h *Handler) NewAction(act action.Action, settings map[string]interface{}) *Action {\n\n\tvalue := reflect.ValueOf(act)\n\tvalue = value.Elem()\n\tref := value.Type().PkgPath()\n\n\tnewAct := &Action{ref: ref, settings: settings}\n\th.actions = append(h.actions, newAct)\n\n\treturn newAct\n}",
"func NewCreateGoalController(cgtRepos *persistence.Services, logger *log.Logger, authorizationService authorization.JwtService) Controller {\n\tcreateGoalUsecase := usecase.NewCreateGoalUsecase(&cgtRepos.Achiever, &cgtRepos.Goal, authorizationService)\n\n\tctrl := &createGoalController{\n\t\tUsecase: createGoalUsecase,\n\t\tLogger: logger,\n\t\tAuthorization: authorizationService,\n\t}\n\treturn ctrl\n}",
"func (w *RandomWorld) CreateRandomFlyer() *GoWorld.Being {\n\t// Create an empty being\n\tbeing := &GoWorld.Being{ID: uuid.New()}\n\tbeing.Type = \"Flying\"\n\n\t// Give the being the basic necessities\n\tbeing.Hunger = hungerRange.randomFloat()\n\tbeing.Thirst = thirstRange.randomFloat()\n\tbeing.WantsChild = wantsChildRange.randomFloat()\n\n\t// Shape the being\n\tbeing.LifeExpectancy = lifeExpectancyRange.randomFloat()\n\tbeing.VisionRange = visionRange.randomFloat()\n\tbeing.Speed = speedRange.randomFloat()\n\tbeing.Durability = durabilityRange.randomFloat()\n\tbeing.Stress = stressRange.randomFloat()\n\tbeing.Size = sizeRange.randomFloat()\n\tbeing.Gender = randomGender()\n\tbeing.Fertility = fertilityRange.randomFloat()\n\tbeing.MutationRate = mutationRange.randomFloat()\n\n\t// Flying beings 'feel' home in the forest, but can spawn anywhere\n\t// Create some random coordinates within the world limits\n\trX := rand.Intn(w.Width)\n\trY := rand.Intn(w.Height)\n\toverflow := 0\n\t// If no being present at location set it as the spawn point\n\tfor w.TerrainSpots[rX][rY].Being != uuid.Nil {\n\t\trX = rand.Intn(w.Width)\n\t\trY = rand.Intn(w.Height)\n\t\t// Recover somehow if we look for a location for too long\n\t\toverflow++\n\t\tif overflow > 100000 {\n\t\t\t// Todo handle the infinite loop a little nicer than panicking\n\t\t\tpanic(\"error placing flying being: tried 100k random spots and all occupied\")\n\t\t}\n\t}\n\tbeing.Position.X = rX\n\tbeing.Position.Y = rY\n\tbeing.Habitat = Surfaces[2].ID\n\n\treturn being\n}",
"func (srv *Server) Create(w http.ResponseWriter, r *http.Request) {\n\tif r.Method != \"PUT\" {\n\t\thttp.Error(w, \"Please use a PUT request to create an application.\", http.StatusBadRequest)\n\t\treturn\n\t}\n\t// Read in body\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\thttp.Error(w, \"Failed to read body of request\", http.StatusInternalServerError)\n\t}\n\t// Try to parse the metadata content\n\tmetadata := &types.ApplicationMetadata{}\n\terr = yaml.Unmarshal(body, metadata)\n\tif err != nil {\n\t\thttp.Error(w, \"Failed to parse YAML input. This likely indicates malformed request body. Verify the payload fields and parameter types are correct.\", http.StatusBadRequest)\n\t\tlog.Info(\"YAML parse error\")\n\t\treturn\n\t}\n\n\t// Validate input\n\terr = srv.Validate.Struct(metadata)\n\tif err != nil {\n\t\t// If we fail to validate, automatically return 400\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\tw.Write([]byte(\"Failed to validate input of the following parameters:\\n\"))\n\n\t\t// Be helpful and tell users what fails in their request\n\t\tfor _, err := range err.(validator.ValidationErrors) {\n\t\t\tfmt.Fprintf(w, \"%s has invalid value %s\\n\", err.Namespace(), err.Value())\n\t\t}\n\t\tlog.Info(\"Rejected invalid input.\")\n\t\treturn\n\t}\n\n\t// Check if a conflicting application already exists\n\tif util.CheckTitle(srv.Applications, metadata.Title) {\n\t\tw.WriteHeader(http.StatusConflict)\n\t\tfmt.Fprintf(w, \"An application with title %s already exists, please use a unique title.\", metadata.Title)\n\t\treturn\n\t}\n\n\tw.WriteHeader(http.StatusCreated)\n\tsrv.Applications = append(srv.Applications, metadata)\n\tlog.WithFields(log.Fields{\"name\": metadata.Title}).Info(\"Object added\")\n\treturn\n}",
"func New(w http.ResponseWriter, r *http.Request) {\r\n\ttmpl.ExecuteTemplate(w, \"New\", nil)\r\n}",
"func New(w http.ResponseWriter, r *http.Request) {\r\n\ttmpl.ExecuteTemplate(w, \"New\", nil)\r\n}",
"func CreateProject(w http.ResponseWriter, r *http.Request) {\n\tvar p models.Project\n\n\tu := mw.GetUser(r.Context())\n\tif u == nil || !u.IsAdmin {\n\t\tw.WriteHeader(403)\n\t\tw.Write(apiError(\"you must be logged in as a system administrator to create a project\"))\n\t\treturn\n\t}\n\n\tdecoder := json.NewDecoder(r.Body)\n\terr := decoder.Decode(&p)\n\tif err != nil {\n\t\tw.WriteHeader(400)\n\t\tw.Write(apiError(\"invalid body\"))\n\t\tlog.Println(err)\n\t\treturn\n\t}\n\n\terr = Store.Projects().New(&p)\n\tif err != nil {\n\t\tw.WriteHeader(400)\n\t\tw.Write(apiError(err.Error()))\n\t\tlog.Println(err)\n\t\treturn\n\t}\n\n\tsendJSON(w, p)\n}",
"func NewActionStats() ActionStats {\n stats := ActionStats{}\n stats.stats = make(map[string]*actionData)\n return stats\n}",
"func (j *JDB) Create(w http.ResponseWriter, r *http.Request) {\n\tpath := mux.Vars(r)[\"host\"]\n\tcol := mux.Vars(r)[\"col\"]\n\tid := mux.Vars(r)[\"slug\"]\n\tdata := post.Post{}\n\terr := r.ParseForm()\n\tif err != nil {\n\t\t// Handle error\n\t}\n\tvar post post.Post\n\t// r.PostForm is a map of our POST form values\n\terr = decoder.Decode(&post, r.PostForm)\n\tif err != nil {\n\t\t// Handle error\n\t}\n\n\tif err := j.db.Write(path+\"/\"+col, id, data); err != nil {\n\t\tfmt.Println(\"Error\", err)\n\t}\n}",
"func newDeploymentForCR(cr *tesseractv1alpha1.OutgoingPortal) *appsv1.Deployment {\n\tvar replicas int32 = 1\n\n\treturn &appsv1.Deployment{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: cr.Name + \"-portal\",\n\t\t\tNamespace: cr.Namespace,\n\t\t\tLabels: map[string]string{\n\t\t\t\t\"app\": cr.Name + \"-portal\",\n\t\t\t},\n\t\t},\n\t\tSpec: appsv1.DeploymentSpec{\n\t\t\tReplicas: &replicas,\n\t\t\tSelector: &metav1.LabelSelector{\n\t\t\t\tMatchLabels: map[string]string{\n\t\t\t\t\t\"app\": cr.Name + \"-portal\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tTemplate: corev1.PodTemplateSpec{\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tLabels: map[string]string{\n\t\t\t\t\t\t\"app\": cr.Name + \"-portal\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tSpec: corev1.PodSpec{\n\t\t\t\t\tContainers: []corev1.Container{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"busybox\",\n\t\t\t\t\t\t\tImage: \"envoyproxy/envoy:v1.10.0\",\n\t\t\t\t\t\t\tPorts: []corev1.ContainerPort{\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\tName: \"proxy\",\n\t\t\t\t\t\t\t\t\tContainerPort: 80,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\tName: \"admin\",\n\t\t\t\t\t\t\t\t\tContainerPort: 8001,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tCommand: []string{\n\t\t\t\t\t\t\t\t\"/usr/local/bin/envoy\",\n\t\t\t\t\t\t\t\t\"-c\",\n\t\t\t\t\t\t\t\t\"/config/envoy.yaml\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\tName: \"config\",\n\t\t\t\t\t\t\t\t\tMountPath: \"/config\",\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\tName: \"secret\",\n\t\t\t\t\t\t\t\t\tMountPath: \"/secret\",\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tVolumes: []corev1.Volume{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"config\",\n\t\t\t\t\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\t\t\t\t\tConfigMap: &corev1.ConfigMapVolumeSource{\n\t\t\t\t\t\t\t\t\tLocalObjectReference: corev1.LocalObjectReference{\n\t\t\t\t\t\t\t\t\t\tName: cr.Name + \"-portal\",\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"secret\",\n\t\t\t\t\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\t\t\t\t\tSecret: &corev1.SecretVolumeSource{\n\t\t\t\t\t\t\t\t\tSecretName: cr.Name + \"-portal\",\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}",
"func (v ProspectsResource) Create(c buffalo.Context) error {\n\t// Allocate an empty Prospect\n\tprospect := &models.Prospect{}\n\n\t// Bind prospect to the html form elements\n\tif err := c.Bind(prospect); err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\n\t// Get the DB connection from the context\n\ttx := c.Value(\"tx\").(*pop.Connection)\n\n\t// Validate the data from the html form\n\tverrs, err := tx.ValidateAndCreate(prospect)\n\tif err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\n\tif verrs.HasAny() {\n\t\t// Make prospect available inside the html template\n\t\tc.Set(\"prospect\", prospect)\n\n\t\t// Make the errors available inside the html template\n\t\tc.Set(\"errors\", verrs)\n\n\t\t// Render again the new.html template that the user can\n\t\t// correct the input.\n\t\treturn c.Render(422, r.HTML(\"prospects/new.html\"))\n\t}\n\n\t// If there are no errors set a success message\n\tc.Flash().Add(\"success\", \"Prospect was created successfully\")\n\n\t// and redirect to the prospects index page\n\treturn c.Redirect(302, \"/prospects/%s\", prospect.ID)\n}",
"func (router *router) createProject(request *restful.Request, response *restful.Response) {\n\tproject := &model.Project{}\n\tcreatedProject, err := router.projectManager.CreateProject(project)\n\tif err != nil {\n\t\tlog.Fatalf(\"create Project failed,err msg%s\", err)\n\t}\n\tresponse.WriteHeaderAndEntity(http.StatusCreated, createdProject)\n}",
"func Create(logger *zap.SugaredLogger, tfDir, bucket, attackTag string) error {\n\terr := InitIfNeeded(logger, tfDir, bucket, attackTag)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlogger.Info(\"Running terraform plan\")\n\t_, err = Terraform(tfDir, \"plan\", bucket)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlogger.Info(\"Running terraform apply\")\n\t_, err = Terraform(tfDir, \"apply\", bucket)\n\treturn err\n}",
"func (h *MovieHandler) create(w http.ResponseWriter, r *http.Request) {\n\t// Parse the page form values.\n\terr := r.ParseForm()\n\tif err != nil {\n\t\t// Render an error response and set status code.\n\t\thttp.Error(w, \"Unprocessable Entity\", http.StatusUnprocessableEntity)\n\t\tlog.Println(\"Error:\", err)\n\t\treturn\n\t}\n\n\t// Create a temporary movie struct to unmarshal the request body into.\n\tmovie := &service.Movie{\n\t\tTitle: r.FormValue(\"title\"),\n\t\tImdbID: r.FormValue(\"imdb_id\"),\n\t}\n\n\t// Call the CreateMovie to add the new movie to the database.\n\tid, err := h.MovieService.CreateMovie(movie)\n\tif err != nil {\n\t\t// Render an error response and set status code.\n\t\thttp.Error(w, \"Internal Server Error\", http.StatusInternalServerError)\n\t\tlog.Println(\"Error:\", err)\n\t\treturn\n\t}\n\n\t// Call GetMovie to get the movie from the database.\n\tif _, err := h.MovieService.GetMovie(id); err != nil {\n\t\t// Render an error response and set status code.\n\t\thttp.Error(w, \"Not Found\", http.StatusNotFound)\n\t\tlog.Println(\"Error:\", err)\n\t} else {\n\t\thttp.Redirect(w, r, \"/movies/\"+strconv.FormatInt(id, 10), http.StatusCreated)\n\t\treturn\n\t}\n}"
] | [
"0.5422324",
"0.53584725",
"0.53448",
"0.5287516",
"0.52773815",
"0.5246244",
"0.52251047",
"0.51807857",
"0.51713514",
"0.511613",
"0.51064235",
"0.5069214",
"0.5046266",
"0.5044224",
"0.5038556",
"0.50373405",
"0.5035873",
"0.49775773",
"0.49586117",
"0.49527675",
"0.49415317",
"0.49260935",
"0.48839775",
"0.4879449",
"0.487664",
"0.4876575",
"0.48728073",
"0.48627123",
"0.48540422",
"0.48482248",
"0.4839693",
"0.4825478",
"0.48160395",
"0.48157406",
"0.48063716",
"0.47945446",
"0.4788718",
"0.47877687",
"0.47648484",
"0.47609606",
"0.47530872",
"0.47421438",
"0.4738749",
"0.4735902",
"0.4734885",
"0.4733615",
"0.47287306",
"0.47281215",
"0.4719085",
"0.4712222",
"0.47030133",
"0.46924695",
"0.46792898",
"0.46712968",
"0.46640536",
"0.46633357",
"0.46374828",
"0.46366334",
"0.46361682",
"0.46278286",
"0.46223396",
"0.46158147",
"0.461352",
"0.4613381",
"0.46125767",
"0.46084318",
"0.46071407",
"0.4601797",
"0.45857993",
"0.45853883",
"0.45853636",
"0.4573249",
"0.45653477",
"0.45634767",
"0.45560482",
"0.45454943",
"0.45425376",
"0.45330223",
"0.4526887",
"0.45258814",
"0.45208877",
"0.4518464",
"0.4513581",
"0.45122558",
"0.45092782",
"0.4505428",
"0.45053384",
"0.45037583",
"0.45028228",
"0.44942087",
"0.4493451",
"0.4493451",
"0.44910187",
"0.448767",
"0.44850838",
"0.44829783",
"0.4482326",
"0.44755626",
"0.44755405",
"0.44691163"
] | 0.57780945 | 0 |
Creates a new civil war | func (s *State) NewCivilWar(target pb.ProvinceId) bool { // TODO: Error return
if s.IsAtWar(target) || s.IsSiteOfConflict(target) {
return false
}
c := &Conflict{
name: "Civil War", // TODO
length: 0,
attackers: Faction{
rebels: *(s.Get(target).Dissidents()),
progress: 0,
},
defenders: Faction{
members: []pb.ProvinceId{target},
progress: 0,
},
goal: s.Settings().GetConflictGoal(pb.ConflictType_CIVIL_WAR),
base_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_CIVIL_WAR),
locations: []pb.ProvinceId{target},
conflict_type: pb.ConflictType_CIVIL_WAR,
}
s.Conflicts[target] = c
return true
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (s *State) NewColonialWar(target pb.ProvinceId) bool { // TODO: Error return\n\tif s.IsAtWar(target) || s.IsSiteOfConflict(target) || s.Get(target).Occupier() != pb.ProvinceId_NONE {\n\t\treturn false\n\t}\n\tc := &Conflict{\n\t\tname: \"Colonial War\", // TODO\n\t\tlength: 0,\n\t\tattackers: Faction{\n\t\t\t// Dissidents\n\t\t\tprogress: 0,\n\t\t},\n\t\tdefenders: Faction{\n\t\t\tmembers: []pb.ProvinceId{s.Get(target).Occupier()},\n\t\t\tprogress: 0,\n\t\t},\n\t\tgoal: s.Settings().GetConflictGoal(pb.ConflictType_COLONIAL_WAR),\n\t\tbase_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_COLONIAL_WAR),\n\t\tlocations: []pb.ProvinceId{target},\n\t\tconflict_type: pb.ConflictType_COLONIAL_WAR,\n\t}\n\ts.Conflicts[target] = c\n\treturn true\n}",
"func (s *State) NewConventionalWar(defenders []pb.ProvinceId, attackers []pb.ProvinceId, locations []pb.ProvinceId) bool { // TODO: Error return\n\tfor _, d := range defenders {\n\t\tif s.IsAtWar(d) || s.IsSiteOfConflict(d) {\n\t\t\treturn false\n\t\t}\n\t}\n\tfor _, a := range attackers {\n\t\tif s.IsAtWar(a) || s.IsSiteOfConflict(a) {\n\t\t\treturn false\n\t\t}\n\t}\n\tfor _, l := range locations {\n\t\tif s.IsAtWar(l) || s.IsSiteOfConflict(l) {\n\t\t\treturn false\n\t\t}\n\t}\n\t// TODO: Logic for joining wars?\n\tc := &Conflict{\n\t\tname: \"War!\", // TODO\n\t\tlength: 0,\n\t\tattackers: Faction{\n\t\t\tmembers: attackers,\n\t\t\tprogress: 0,\n\t\t},\n\t\tdefenders: Faction{\n\t\t\tmembers: defenders,\n\t\t\tprogress: 0,\n\t\t},\n\t\tgoal: s.Settings().GetConflictGoal(pb.ConflictType_CONVENTIONAL_WAR),\n\t\tbase_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_CONVENTIONAL_WAR),\n\t\tlocations: locations,\n\t\tconflict_type: pb.ConflictType_CONVENTIONAL_WAR,\n\t}\n\t// For now it maps only to the first location\n\ts.Conflicts[locations[0]] = c\n\treturn true\n}",
"func (wds *WeaponAISystem) New(w *ecs.World) {\n\n}",
"func createWorld() {\n\tspace = chipmunk.NewSpace()\n\tspace.Gravity = vect.Vect{0, -900}\n\n\tstaticBody := chipmunk.NewBodyStatic()\n\tstaticLines = []*chipmunk.Shape{\n\t\tchipmunk.NewSegment(vect.Vect{0, -600}, vect.Vect{800.0, -600}, 0),\n\t\tchipmunk.NewSegment(vect.Vect{0, -600}, vect.Vect{0, 0}, 0),\n\t\tchipmunk.NewSegment(vect.Vect{800, -600}, vect.Vect{800.0, 0}, 0),\n\t}\n\tfor _, segment := range staticLines {\n\t\t// segment.SetElasticity(0.6)\n\t\tstaticBody.AddShape(segment)\n\t}\n\tspace.AddBody(staticBody)\n}",
"func createTeam(w http.ResponseWriter, r *http.Request) {\n\tteam := models.NewTeam(\"\")\n\tskue.Create(view, team, w, r)\n}",
"func (w *RandomWorld) CreateRandomCarnivore() *GoWorld.Being {\n\t// Create an empty being\n\tbeing := &GoWorld.Being{ID: uuid.New()}\n\tbeing.Type = \"Carnivore\"\n\n\t// Give the being the basic necessities\n\tbeing.Hunger = hungerRange.randomFloat()\n\tbeing.Thirst = thirstRange.randomFloat()\n\tbeing.WantsChild = wantsChildRange.randomFloat()\n\n\t// Shape the being\n\tbeing.LifeExpectancy = lifeExpectancyRange.randomFloat()\n\tbeing.VisionRange = visionRange.randomFloat()\n\tbeing.Speed = speedRange.randomFloat()\n\tbeing.Durability = durabilityRange.randomFloat()\n\tbeing.Stress = stressRange.randomFloat()\n\tbeing.Size = sizeRange.randomFloat()\n\tbeing.Gender = randomGender()\n\tbeing.Fertility = fertilityRange.randomFloat()\n\tbeing.MutationRate = mutationRange.randomFloat()\n\n\t// Pick a random (valid) position and check which habitat it is\n\tw.ThrowBeing(being)\n\n\treturn being\n}",
"func CreateBattle(LeaderID string, BattleName string) (*Battle, error) {\n\tnewID, _ := uuid.NewUUID()\n\tid := newID.String()\n\n\tvar b = &Battle{\n\t\tBattleID: id,\n\t\tLeaderID: LeaderID,\n\t\tBattleName: BattleName,\n\t\tWarriors: make([]*Warrior, 0),\n\t\tPlans: make([]*Plan, 0),\n\t\tVotingLocked: true,\n\t\tActivePlanID: \"\",\n\t}\n\n\te := db.QueryRow(`INSERT INTO battles (id, leader_id, name) VALUES ($1, $2, $3) RETURNING id`, id, LeaderID, BattleName).Scan(&b.BattleID)\n\tif e != nil {\n\t\tlog.Println(e)\n\t\treturn nil, errors.New(\"Error Creating Battle\")\n\t}\n\n\treturn b, nil\n}",
"func CreateWarrior(WarriorName string) (*Warrior, error) {\n\tnewID, _ := uuid.NewUUID()\n\tid := newID.String()\n\n\tvar WarriorID string\n\te := db.QueryRow(`INSERT INTO warriors (id, name) VALUES ($1, $2) RETURNING id`, id, WarriorName).Scan(&WarriorID)\n\tif e != nil {\n\t\tlog.Println(e)\n\t\treturn nil, errors.New(\"Unable to create new warrior\")\n\t}\n\n\treturn &Warrior{WarriorID: WarriorID, WarriorName: WarriorName}, nil\n}",
"func makeNewGame(name string, playerNames []string) *Game {\n\tvar g = new(Game)\n\tid, err := uuid.GenUUID()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tg.ID = id\n\tg.Name = name\n\tg.Messages.Capacity = 500\n\tg.Phase = Development\n\tGames[g.ID] = g\n\tg.addMessage(fmt.Sprintf(\"Created game %s...\", g.Name))\n\tg.loadLocos()\n\tg.prepareLocos()\n\tg.initPlayers(playerNames)\n\tg.determineTurnOrder()\n\n\treturn g\n}",
"func (w *RandomWorld) CreateCarnivores(quantity int) {\n\t// Initialize each being to a random one\n\tfor i := 0; i < quantity; i++ {\n\t\t// Create random being and place it into the map\n\t\tb := w.CreateRandomCarnivore()\n\t\tw.BeingList[b.ID.String()] = b\n\t}\n}",
"func CreateWare(c *server.Context) error {\n\tvar (\n\t\terr error\n\t\taddReq ware.Ware\n\t\tconn orm.Connection\n\t)\n\n\tisAdmin := c.Request().Context().Value(\"user\").(jwtgo.MapClaims)[util.IsAdmin].(bool)\n\tif !isAdmin {\n\t\tlogger.Error(\"You don't have access\")\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrToken, nil)\n\t}\n\n\terr = c.JSONBody(&addReq)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInvalidParam, nil)\n\t}\n\n\terr = c.Validate(addReq)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInvalidParam, nil)\n\t}\n\n\tif len(addReq.Avatar) > 0 {\n\t\taddReq.Avatar, err = util.SavePicture(addReq.Avatar, \"ware/\")\n\t\tif err != nil {\n\t\t\tlogger.Error(err)\n\t\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInternalServerError, nil)\n\t\t}\n\t}\n\tif len(addReq.Image) > 0 {\n\t\taddReq.Image, err = util.SavePicture(addReq.Image, \"ware/\")\n\t\tif err != nil {\n\t\t\tlogger.Error(err)\n\t\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInternalServerError, nil)\n\t\t}\n\t}\n\tif len(addReq.DetailPic) > 0 {\n\t\taddReq.DetailPic, err = util.SavePicture(addReq.DetailPic, \"wareIntro/\")\n\t\tif err != nil {\n\t\t\tlogger.Error(err)\n\t\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInternalServerError, nil)\n\t\t}\n\t}\n\n\tconn, err = mysql.Pool.Get()\n\tdefer mysql.Pool.Release(conn)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrMysql, nil)\n\t}\n\n\terr = ware.Service.CreateWare(conn, &addReq)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\tif (len(addReq.Avatar) > 0 && !util.DeletePicture(addReq.Avatar)) ||\n\t\t\t(len(addReq.Image) > 0 && !util.DeletePicture(addReq.Image)) ||\n\t\t\t(len(addReq.DetailPic) > 0 && !util.DeletePicture(addReq.DetailPic)) {\n\t\t\tlogger.Error(errors.New(\"create ware failed and delete it's pictures go wrong, please delete picture manually\"))\n\t\t}\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrMysql, nil)\n\t}\n\n\tlogger.Info(\"create ware\", addReq.Name, \"success\")\n\treturn core.WriteStatusAndDataJSON(c, constants.ErrSucceed, nil)\n}",
"func (planetDeliveryRest *PlanetDeliveryRest) Create(w http.ResponseWriter, r *http.Request) {\n\tvar planet entity.Planet\n\n\terr := json.NewDecoder(r.Body).Decode(&planet)\n\tif err != nil {\n\t\tError(w, \"Failed to decode JSON\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tplanetToInsert := *entity.NewPlanet(planet.Name, planet.Climate, planet.Terrain)\n\n\tnewPlanet, err := planetDeliveryRest.planetUsecase.Create(r.Context(), planetToInsert)\n\tif err != nil {\n\t\tError(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tJSON(w, newPlanet, http.StatusCreated)\n}",
"func (s WashingtonPostScraper) CreateNewWashingtonPostScraper() *WashingtonPostScraper {\n\tc := colly.NewCollector()\n\t// c := colly.NewCollector(colly.Debugger(&debug.LogDebugger{}))\n\tc.UserAgent = s.UserAgent()\n\tc.IgnoreRobotsTxt = false\n\n\t// Adding this wait so AJAX can load, might need to look at https://github.com/chromedp/chromedp in the future\n\tc.Limit(&colly.LimitRule{\n\t\tDelay: 5 * time.Second,\n\t})\n\n\tscraper := WashingtonPostScraper{\n\t\tcollector: c,\n\t}\n\treturn &scraper\n}",
"func CreateProject(w http.ResponseWriter, r *http.Request) {\n\t// Get incoming data, content n' stuff\n\t// Pass those data and create em'\n\t// Return new project and response\n}",
"func New() *WarmerImpl {\n\treturn &WarmerImpl{}\n}",
"func (c *CaptainClient) CreateFormation(name string, flightID, CPU, RAM, disk int, baseName, domain string, targetCount int, preflightPlaybook string) (Formation, error) {\n\tresult, err := c.restPOST(\"formation\", map[string]interface{}{\n\t\t\"FlightID\": flightID,\n\t\t\"Name\": name,\n\t\t\"CPU\": CPU,\n\t\t\"RAM\": RAM,\n\t\t\"Disk\": disk,\n\t\t\"BaseName\": baseName,\n\t\t\"Domain\": domain,\n\t\t\"TargetCount\": targetCount,\n\t\t\"PreflightPlaybook\": preflightPlaybook,\n\t})\n\tif err != nil {\n\t\treturn Formation{}, fmt.Errorf(\"unable to create Formation:\\n%w\", err)\n\t}\n\tvar formation Formation\n\terr = json.Unmarshal(result, &formation)\n\tif err != nil {\n\t\treturn Formation{}, fmt.Errorf(\"unable to format response as Formation:\\n%w\", err)\n\t}\n\treturn formation, nil\n}",
"func NewWorldCup(w http.ResponseWriter, r *http.Request, u *mdl.User) error {\n\tc := appengine.NewContext(r)\n\tdesc := \"New World Cup Handler:\"\n\n\tif r.Method == \"POST\" {\n\t\ttournament, err := mdl.CreateWorldCup2018(c, u.Id)\n\t\tif err != nil {\n\t\t\tlog.Errorf(c, \"%s error when trying to create a tournament: %v\", desc, err)\n\t\t\treturn &helpers.InternalServerError{Err: errors.New(helpers.ErrorCodeTournamentCannotCreate)}\n\t\t}\n\n\t\treturn templateshlp.RenderJSON(w, c, tournament)\n\t}\n\treturn &helpers.BadRequest{Err: errors.New(helpers.ErrorCodeNotSupported)}\n}",
"func Generate(categoryName string, area geography.Area, originCulture culture.Culture) (Town, error) {\n\tvar newProducers []profession.Profession\n\tvar producers []profession.Profession\n\tvar newResources []resource.Resource\n\n\ttown := Town{}\n\n\tif categoryName == \"random\" {\n\t\ttownCategory, err := getRandomWeightedCategory()\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(townGenerationError, err)\n\t\t\treturn Town{}, err\n\t\t}\n\t\ttown.Category = townCategory\n\t} else {\n\t\ttown.Category = getCategoryByName(categoryName)\n\t}\n\n\ttown.Geography = area\n\ttown.Culture = originCulture\n\n\tname, err := town.Culture.Language.RandomTownName()\n\tif err != nil {\n\t\terr = fmt.Errorf(townGenerationError, err)\n\t\treturn Town{}, err\n\t}\n\ttown.Name = name\n\n\ttown.Population = generateRandomPopulation(town.Category)\n\n\ttown.BuildingStyle = town.Culture.BuildingStyle\n\n\tmayor, err := town.generateMayor()\n\tif err != nil {\n\t\terr = fmt.Errorf(townGenerationError, err)\n\t\treturn Town{}, err\n\t}\n\ttown.Mayor = mayor\n\n\tresources := area.GetResources()\n\n\tfor i := 0; i < town.Category.ProductionIterations; i++ {\n\t\tnewProducers, err = getProducers(town.Population, resources)\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(townGenerationError, err)\n\t\t\treturn Town{}, err\n\t\t}\n\t\tnewResources, err = goods.Produce(newProducers, resources)\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(townGenerationError, err)\n\t\t\treturn Town{}, err\n\t\t}\n\t\tresources = append(resources, newResources...)\n\t\tproducers = append(producers, newProducers...)\n\t}\n\n\ttown.Resources = resources\n\ttown.NotableProducers = producers\n\n\ttown.Exports = town.generateRandomExports()\n\timports, err := town.generateRandomImports()\n\tif err != nil {\n\t\terr = fmt.Errorf(townGenerationError, err)\n\t\treturn Town{}, err\n\t}\n\ttown.Imports = imports\n\n\treturn town, nil\n}",
"func (w *RandomWorld) CreateRandomFlyer() *GoWorld.Being {\n\t// Create an empty being\n\tbeing := &GoWorld.Being{ID: uuid.New()}\n\tbeing.Type = \"Flying\"\n\n\t// Give the being the basic necessities\n\tbeing.Hunger = hungerRange.randomFloat()\n\tbeing.Thirst = thirstRange.randomFloat()\n\tbeing.WantsChild = wantsChildRange.randomFloat()\n\n\t// Shape the being\n\tbeing.LifeExpectancy = lifeExpectancyRange.randomFloat()\n\tbeing.VisionRange = visionRange.randomFloat()\n\tbeing.Speed = speedRange.randomFloat()\n\tbeing.Durability = durabilityRange.randomFloat()\n\tbeing.Stress = stressRange.randomFloat()\n\tbeing.Size = sizeRange.randomFloat()\n\tbeing.Gender = randomGender()\n\tbeing.Fertility = fertilityRange.randomFloat()\n\tbeing.MutationRate = mutationRange.randomFloat()\n\n\t// Flying beings 'feel' home in the forest, but can spawn anywhere\n\t// Create some random coordinates within the world limits\n\trX := rand.Intn(w.Width)\n\trY := rand.Intn(w.Height)\n\toverflow := 0\n\t// If no being present at location set it as the spawn point\n\tfor w.TerrainSpots[rX][rY].Being != uuid.Nil {\n\t\trX = rand.Intn(w.Width)\n\t\trY = rand.Intn(w.Height)\n\t\t// Recover somehow if we look for a location for too long\n\t\toverflow++\n\t\tif overflow > 100000 {\n\t\t\t// Todo handle the infinite loop a little nicer than panicking\n\t\t\tpanic(\"error placing flying being: tried 100k random spots and all occupied\")\n\t\t}\n\t}\n\tbeing.Position.X = rX\n\tbeing.Position.Y = rY\n\tbeing.Habitat = Surfaces[2].ID\n\n\treturn being\n}",
"func createPokemon(n string, d int, c bool, cp villains) pokemon {\n\tnewPokemon := pokemon{n, d, c, cp}\n\treturn newPokemon\n}",
"func Create(logger *zap.SugaredLogger, tfDir, bucket, attackTag string) error {\n\terr := InitIfNeeded(logger, tfDir, bucket, attackTag)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlogger.Info(\"Running terraform plan\")\n\t_, err = Terraform(tfDir, \"plan\", bucket)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlogger.Info(\"Running terraform apply\")\n\t_, err = Terraform(tfDir, \"apply\", bucket)\n\treturn err\n}",
"func NewTrainCar() TrainCar {\n c := TrainCar{name: \"TrainCar\", vehicle: \"TrainCar\", speed: 30, capacity: 30, railway: \"CNR\"}\n return c\n}",
"func CreateNewSchool(c echo.Context) error {\n\n\tdb, ok := c.Get(\"db\").(*gorm.DB)\n\n\tif !ok {\n\t\treturn c.NoContent(http.StatusInternalServerError)\n\t}\n\n\tvar modelview view.CreateNewSchoolModelView\n\n\tc.Bind(&modelview)\n\n\tcanteens := make([]canteen.Canteen, len(modelview.Canteens))\n\n\tfor index := range modelview.Canteens {\n\n\t\tlocation := canteen.Location{}\n\n\t\tlocation.Latitude = modelview.Canteens[index].Location.Latitude\n\n\t\tlocation.Longitude = modelview.Canteens[index].Location.Longitude\n\n\t\tcanteen, cerr := canteen.New(modelview.Canteens[index].Name, location)\n\t\tif cerr != nil {\n\n\t\t\tmodelview := customerrorview.UsingFieldErrorToErrorMessageModelView(*cerr)\n\n\t\t\treturn c.JSON(http.StatusBadRequest, modelview)\n\t\t}\n\t\tcanteens[index] = canteen\n\t}\n\n\tschool, serr := model.New(modelview.Acronym, modelview.Name, canteens)\n\n\tif serr != nil {\n\n\t\tmodelview := customerrorview.UsingFieldErrorToErrorMessageModelView(*serr)\n\n\t\treturn c.JSON(http.StatusBadRequest, modelview)\n\t}\n\n\tvar existingSchool model.School\n\n\t// Finds if school with same acronym already exists\n\n\terr := db.Where(map[string]interface{}{\"acronym\": modelview.Acronym}).First(&existingSchool).Error\n\n\tif err == nil {\n\n\t\tcerr := customerrormodel.FieldError{Field: \"acronym\", Model: \"school\", Explanation: \"a school with the same acronym already exists\"}\n\n\t\tmodelview := customerrorview.UsingFieldErrorToErrorMessageModelView(cerr)\n\n\t\treturn c.JSON(http.StatusBadRequest, modelview)\n\t}\n\n\t// Creates school\n\tdb.Create(&school)\n\n\tmodelviewres := view.ToGetDetailedSchoolInformationModelView(school)\n\n\treturn c.JSON(http.StatusCreated, modelviewres)\n\n}",
"func MakeWorley(shaderpath string) Worley {\n\tcomputeshader, err := shader.MakeCompute(shaderpath + \"/noise/worley.comp\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t//create random seed\n\trandomdata := createRandom(1024 * 1024 * 4)\n\tnoisetexture, err := texture.MakeFromData(randomdata, 1024, 1024, gl.RGBA32F, gl.RGBA)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn Worley{\n\t\tcomputeshader: computeshader,\n\t\tnoisetexture: noisetexture,\n\n\t\twidth: 1024,\n\t\theight: 1024,\n\t\tresolution: 32,\n\t\toctaves: 1,\n\t\tradius: 40.0,\n\t\tradiusscale: 1,\n\n\t\tbrightness: 1.0,\n\t\tcontrast: 1.0,\n\t}\n}",
"func (ff *fftag) Create(eng vu.Eng, s *vu.State) {\n\trand.Seed(time.Now().UTC().UnixNano())\n\n\t// create the overlay\n\tff.top = eng.Root().NewPov()\n\tview := ff.top.NewView()\n\tview.SetUI()\n\tff.cam = view.Cam()\n\tff.mmap = ff.top.NewPov().SetScale(10, 10, 0)\n\tff.mmap.SetLocation(30, 30, 0)\n\n\t// populate the map\n\tff.msize = 69\n\tff.plan = grid.New(grid.ROOMS_SKIRMISH)\n\tff.plan.Generate(ff.msize, ff.msize)\n\twidth, height := ff.plan.Size()\n\tfor x := 0; x < width; x++ {\n\t\tfor y := 0; y < height; y++ {\n\t\t\tif ff.plan.IsOpen(x, y) {\n\t\t\t\tblock := ff.mmap.NewPov()\n\t\t\t\tblock.SetLocation(float64(x), float64(y), 0)\n\t\t\t\tblock.NewModel(\"uv\").LoadMesh(\"icon\").AddTex(\"wall\")\n\t\t\t\tff.spots = append(ff.spots, ff.id(x, y))\n\t\t\t}\n\t\t}\n\t}\n\n\t// populate chasers and a goal.\n\tnumChasers := 30\n\tfor cnt := 0; cnt < numChasers; cnt++ {\n\t\tchaser := ff.mmap.NewPov()\n\t\tchaser.NewModel(\"uv\").LoadMesh(\"icon\").AddTex(\"token\")\n\t\tff.chasers = append(ff.chasers, chaser)\n\t}\n\tff.goal = ff.mmap.NewPov()\n\tff.goal.NewModel(\"uv\").LoadMesh(\"icon\").AddTex(\"goal\")\n\tff.flow = grid.NewFlow(ff.plan) // flow field for the given plan.\n\tff.resetLocations()\n\n\t// set non default engine state.\n\teng.SetColor(0.15, 0.15, 0.15, 1)\n\tff.resize(s.W, s.H)\n}",
"func (c *SpaceClient) Create(ctx context.Context, r *resource.SpaceCreate) (*resource.Space, error) {\n\tvar space resource.Space\n\t_, err := c.client.post(ctx, \"/v3/spaces\", r, &space)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &space, nil\n}",
"func createPlayer(fname, lname, pos string) *Athelete {\n\n\tplayer1 := &Athelete{\n\n\t\tFirstname: fname,\n\t\tLastname: lname,\n\t\tLMType: \"player\",\n\t\tTeamName: \"Free Agent\",\n\t\tEligible: &Eligible{\n\t\t\tReason: \"\",\n\t\t\tSlips: make([]*Slip, 10, 30),\n\t\t\tLMActive: true,\n\t\t\tReturnDate: 0,\n\t\t},\n\t\tAtti: Attributes{\n\t\t\tPosition: pos,\n\t\t},\n\t}\n\n\treturn player1\n}",
"func createHyperShiftVPC() (*HyperShiftVPC, error) {\n\tctx := context.Background()\n\n\tvar vpc HyperShiftVPC\n\tworkingDir := viper.GetString(config.ReportDir)\n\n\ttf, err := terraform.New(workingDir)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdefer func() {\n\t\t_ = tf.Uninstall(ctx)\n\t}()\n\n\tlog.Println(\"Creating ROSA HyperShift aws vpc\")\n\n\terr = copyFile(\"terraform/setup-vpc.tf\", fmt.Sprintf(\"%s/setup-vpc.tf\", workingDir))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = tf.Init(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = callAndSetAWSSession(func() error {\n\t\terr := tf.Plan(\n\t\t\tctx,\n\t\t\ttfexec.Var(fmt.Sprintf(\"aws_region=%s\", viper.GetString(config.AWSRegion))),\n\t\t\ttfexec.Var(fmt.Sprintf(\"cluster_name=%s\", viper.GetString(config.Cluster.Name))),\n\t\t)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\terr = tf.Apply(ctx)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\toutput, err := tf.Output(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvpc.PrivateSubnet = strings.ReplaceAll(string(output[\"cluster-private-subnet\"].Value), \"\\\"\", \"\")\n\tvpc.PublicSubnet = strings.ReplaceAll(string(output[\"cluster-public-subnet\"].Value), \"\\\"\", \"\")\n\tvpc.NodePrivateSubnet = strings.ReplaceAll(string(output[\"node-private-subnet\"].Value), \"\\\"\", \"\")\n\n\tlog.Println(\"ROSA HyperShift aws vpc created!\")\n\n\treturn &vpc, nil\n}",
"func CreateDeploy(w http.ResponseWriter, r *http.Request) {\n\tdeploy := models.Deploy{}\n\terr := json.NewDecoder(r.Body).Decode(&deploy)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\t// Todo validate requirement id\n\n\terr = models.InsertDeploy(deploy)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tw.WriteHeader(200)\n\terr = json.NewEncoder(w).Encode(deploy)\n\tif err != nil {\n\t\tpanic(error(err))\n\t}\n}",
"func Create(w http.ResponseWriter, r *http.Request) {\n\tc := flight.Context(w, r)\n\tnow := time.Now()\n\n\tv := c.View.New(\"code/create\")\n\tv.Vars[\"curdate\"] = now.Format(\"2006-01-02\")\n\t//c.Repopulate(v.Vars, \"name\")\n\tv.Render(w, r)\n}",
"func newPlane(mk, mdl string) *plane {\n\tp := &plane{}\n\tp.make = mk\n\tp.model = mdl\n\treturn p\n}",
"func createPlayer(w http.ResponseWriter, r *http.Request) {\n\tplayer := models.NewPlayer(\"\")\n\tskue.Create(view, player, w, r)\n}",
"func CreateResidence(o *entities.Player, x float64, y float64) (*entities.Residence, error) {\n\tif o.Level != entities.Admin {\n\t\treturn nil, fmt.Errorf(\"no permission\")\n\t}\n\n\tr := Model.NewResidence(x, y)\n\tr.Name = \"NoName\"\n\n\tStartRouting()\n\tAddOpLog(\"CreateResidence\", o, r)\n\treturn r, nil\n}",
"func Create(engine *leader.Leader, port int) *http.Server {\n\tgame.InitGames()\n\tlgger := logger.Init(\"BattleSnake Web\", true, false, ioutil.Discard)\n\tvar host string\n\tif os.Getenv(\"ENV\") == \"dev\" {\n\t\thost = \"localhost\"\n\t} else {\n\t\thost = \"\"\n\t}\n\treturn &http.Server{\n\t\tAddr: fmt.Sprintf(\"%s:%d\", host, port),\n\t\tHandler: web.NewRouter(engine, lgger),\n\t\tReadTimeout: time.Duration(500) * time.Millisecond, // TODO remove hardcoding\n\t\tWriteTimeout: time.Duration(500) * time.Millisecond, // TODO remove hardcoding\n\t}\n}",
"func NewProgramControl()(*ProgramControl) {\n m := &ProgramControl{\n Entity: *NewEntity(),\n }\n return m\n}",
"func (d *Database) CreateBattle(LeaderID string, BattleName string, PointValuesAllowed []string, Plans []*model.Plan, AutoFinishVoting bool, PointAverageRounding string) (*model.Battle, error) {\n\tvar pointValuesJSON, _ = json.Marshal(PointValuesAllowed)\n\n\tvar b = &model.Battle{\n\t\tName: BattleName,\n\t\tUsers: make([]*model.BattleUser, 0),\n\t\tPlans: make([]*model.Plan, 0),\n\t\tVotingLocked: true,\n\t\tPointValuesAllowed: PointValuesAllowed,\n\t\tAutoFinishVoting: AutoFinishVoting,\n\t\tLeaders: make([]string, 0),\n\t}\n\tb.Leaders = append(b.Leaders, LeaderID)\n\n\te := d.db.QueryRow(\n\t\t`SELECT battleId FROM create_battle($1, $2, $3, $4, $5);`,\n\t\tLeaderID,\n\t\tBattleName,\n\t\tstring(pointValuesJSON),\n\t\tAutoFinishVoting,\n\t\tPointAverageRounding,\n\t).Scan(&b.Id)\n\tif e != nil {\n\t\tlog.Println(e)\n\t\treturn nil, errors.New(\"error creating battle\")\n\t}\n\n\tfor _, plan := range Plans {\n\t\tplan.Votes = make([]*model.Vote, 0)\n\n\t\te := d.db.QueryRow(\n\t\t\t`INSERT INTO plans (battle_id, name, type, reference_id, link, description, acceptance_criteria) VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING id`,\n\t\t\tb.Id,\n\t\t\tplan.Name,\n\t\t\tplan.Type,\n\t\t\tplan.ReferenceId,\n\t\t\tplan.Link,\n\t\t\tplan.Description,\n\t\t\tplan.AcceptanceCriteria,\n\t\t).Scan(&plan.Id)\n\t\tif e != nil {\n\t\t\tlog.Println(e)\n\t\t}\n\t}\n\n\tb.Plans = Plans\n\n\treturn b, nil\n}",
"func (s *workspaces) Create(ctx context.Context, organization string, options WorkspaceCreateOptions) (*Workspace, error) {\n\tif !validStringID(&organization) {\n\t\treturn nil, ErrInvalidOrg\n\t}\n\tif err := options.valid(); err != nil {\n\t\treturn nil, err\n\t}\n\n\tu := fmt.Sprintf(\"organizations/%s/workspaces\", url.QueryEscape(organization))\n\treq, err := s.client.NewRequest(\"POST\", u, &options)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tw := &Workspace{}\n\terr = req.Do(ctx, w)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn w, nil\n}",
"func (w *RandomWorld) CreateRandomFish() *GoWorld.Being {\n\t// Create an empty being\n\tbeing := &GoWorld.Being{ID: uuid.New()}\n\tbeing.Type = \"Water\"\n\n\t// Give the being the basic necessities\n\tbeing.Hunger = hungerRange.randomFloat()\n\tbeing.Thirst = thirstRange.randomFloat()\n\tbeing.WantsChild = wantsChildRange.randomFloat()\n\n\t// Shape the being\n\tbeing.LifeExpectancy = lifeExpectancyRange.randomFloat()\n\tbeing.VisionRange = visionRange.randomFloat()\n\tbeing.Speed = speedRange.randomFloat()\n\tbeing.Durability = durabilityRange.randomFloat()\n\tbeing.Stress = stressRange.randomFloat()\n\tbeing.Size = sizeRange.randomFloat()\n\tbeing.Gender = randomGender()\n\tbeing.Fertility = fertilityRange.randomFloat()\n\tbeing.MutationRate = mutationRange.randomFloat()\n\n\t// Water beings should spawn in water\n\trX := rand.Intn(w.Width)\n\trY := rand.Intn(w.Height)\n\toverflow := 0\n\t// If no being present at location set it as the spawn point\n\tfor w.TerrainSpots[rX][rY].Surface.CommonName != \"Water\" && w.TerrainSpots[rX][rY].Being == uuid.Nil {\n\t\trX = rand.Intn(w.Width)\n\t\trY = rand.Intn(w.Height)\n\t\t// Recover somehow if we look for a location for too long\n\t\toverflow++\n\t\tif overflow > 100000 {\n\t\t\t// Todo handle the infinite loop a little nicer than panicking\n\t\t\tpanic(\"error placing water being: tried 100k random spots and all occupied / not water\")\n\t\t}\n\t}\n\tbeing.Position.X = rX\n\tbeing.Position.Y = rY\n\t// Should always be water ID\n\tbeing.Habitat = w.TerrainSpots[rX][rY].Surface.ID\n\n\treturn being\n}",
"func Create (w http.ResponseWriter, r *http.Request) {\n\t/* This is an SBC */\n\tif CREATED == false {\n\t\t/* Move the checking of ID up first to confirm this is allowed */\n\t\t/* Do most of start. Just don't download because that would be downloading from self */\n\t\t/* Get address and ID */\n\t\t/* Get port number and set that to ID */\n\t\t/* Save localhost as Addr */\n\t\tsplitHostPort := strings.Split(r.Host, \":\")\n\t\ti, err := strconv.ParseInt(splitHostPort[1], 10, 32)\n\t\tif err != nil {\n\t\t\tw.WriteHeader(500)\n\t\t\tpanic(err)\n\t\t}\n\t\t/* ID is now port number. Address is now correct Address */\n\t\tID = int32(i)\n\t\tSELF_ADDR = r.Host\n\t\t/* Check if ID is allowed in ALLOWED_IDs */\n\t\tif _, ok := ALLOWED_IDS[ID]; ok {\n\t\t\tnewBlockChain := data.NewBlockChain()\n\n\t\t\tmpt1 := p1.MerklePatriciaTrie{}\n\t\t\tmpt1.Initial()\n\t\t\tmpt1.Insert(\"1\", \"Origin\")\n\n\t\t\tmpt2 := p1.MerklePatriciaTrie{}\n\t\t\tmpt2.Initial()\n\t\t\tmpt2.Insert(\"1\", \"Decoy1\")\n\n\t\t\tmpt3 := p1.MerklePatriciaTrie{}\n\t\t\tmpt3.Initial()\n\t\t\tmpt3.Insert(\"1\", \"Decoy2\")\n\n\t\t\tmpt4 := p1.MerklePatriciaTrie{}\n\t\t\tmpt4.Initial()\n\t\t\tmpt4.Insert(\"1\", \"Decoy3\")\n\n\t\t\thexPubKey := hexutil.Encode(signature_p.PUBLIC_KEY)\n\t\t\tnewBlockChain.GenBlock(mpt1, hexPubKey)\n\t\t\tnewBlockChain.GenBlock(mpt2, hexPubKey)\n\t\t\tnewBlockChain.GenBlock(mpt3, hexPubKey)\n\t\t\tnewBlockChain.GenBlock(mpt4, hexPubKey)\n\t\t\t/* Set Global variable SBC to be this new blockchain */\n\t\t\tSBC = newBlockChain\n\t\t\t/* Generate Multiple Blocks Initially */\n\t\t\t\t\n\t\t\tblockChainJson, _ := SBC.BlockChainToJson()\n\t\t\t/* Write this to the server */\n\t\t\tw.Write([]byte(blockChainJson))\n\n\t\t\t/* Need to instantiate the peer list */\n\t\t\tPeers = data.NewPeerList(ID, 32)\n\t\t\tBALLOT = ReadDataFromBallot()\n\t\t\tCREATED = true\n\t\t}\n\t}\n}",
"func newResources(clusterID, clusterVPCID string, publicAccessFW publicAccessFirewall, gclient *godo.Client) *resources {\n\treturn &resources{\n\t\tclusterID: clusterID,\n\t\tclusterVPCID: clusterVPCID,\n\t\tfirewall: publicAccessFW,\n\n\t\tgclient: gclient,\n\t}\n}",
"func newScenario(name string) *Instruction {\n\treturn &Instruction{\n\t\tType: ScenarioInst,\n\t\tName: name,\n\t\tVersion: &Version{},\n\t}\n}",
"func New(w http.ResponseWriter, r *http.Request) {\r\n\ttmpl.ExecuteTemplate(w, \"New\", nil)\r\n}",
"func New(w http.ResponseWriter, r *http.Request) {\r\n\ttmpl.ExecuteTemplate(w, \"New\", nil)\r\n}",
"func CreateGame(w http.ResponseWriter, r *http.Request) {\n\t//----------------------------------------------------------------------------\n\t// Initialize an empty Game model\n\t//----------------------------------------------------------------------------\n\tdata := model.Game{}\n\t\n\t//----------------------------------------------------------------------------\n\t// Parse the body into a Game model structure\n\t//----------------------------------------------------------------------------\n\tutils.ParseBody(r, data)\n\n\t//----------------------------------------------------------------------------\n\t// Delegate to the Game data access object to create\n\t//----------------------------------------------------------------------------\n\trequestResult := GameDAO.CreateGame( data )\n\t\n\t//----------------------------------------------------------------------------\n\t// Marshal the model into a JSON object\n\t//----------------------------------------------------------------------------\n\tres,_ := json.Marshal(requestResult)\n\n\tw.WriteHeader(http.StatusOK)\n\tw.Write(res)\n}",
"func (v ToursResource) Create(c buffalo.Context) error {\n\t// Allocate an empty Tour\n\ttour := &models.Tour{}\n\n\t// Bind tour to the html form elements\n\tif err := c.Bind(tour); err != nil {\n\t\treturn err\n\t}\n\n\t// Get the DB connection from the context\n\ttx, ok := c.Value(\"tx\").(*pop.Connection)\n\tif !ok {\n\t\treturn errors.New(\"no transaction found\")\n\t}\n\n\t// Validate the data from the html form\n\tverrs, err := tx.ValidateAndCreate(tour)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif verrs.HasAny() {\n\t\t// Make the errors available inside the html template\n\t\tc.Set(\"errors\", verrs)\n\n\t\t// Render again the new.html template that the user can\n\t\t// correct the input.\n\t\treturn c.Render(422, r.Auto(c, tour))\n\t}\n\n\t// If there are no errors set a success message\n\tc.Flash().Add(\"success\", T.Translate(c, \"tour.created.success\"))\n\t// and redirect to the tours index page\n\treturn c.Render(201, r.Auto(c, tour))\n}",
"func createRace(w http.ResponseWriter, r *http.Request) {\n core_module.SetHeaders(&w)\n\n var race DndRace\n decoder := json.NewDecoder(r.Body)\n err := decoder.Decode(&race)\n\n if err != nil {\n w.WriteHeader(http.StatusBadRequest)\n json.NewEncoder(w).Encode(&core_module.CoreException{\n Message: \"Could not decode!\",\n })\n log.Println(err)\n return\n }\n\n _, err = core_module.Db.Model(&race).Insert()\n if err != nil {\n w.WriteHeader(http.StatusInternalServerError)\n json.NewEncoder(w).Encode(&core_module.CoreException{\n Message: \"Could not insert into database!\",\n })\n log.Println(err)\n return\n }\n\n w.WriteHeader(http.StatusAccepted)\n}",
"func CreateProject(w http.ResponseWriter, r *http.Request) {\n\tvar p models.Project\n\n\tu := mw.GetUser(r.Context())\n\tif u == nil || !u.IsAdmin {\n\t\tw.WriteHeader(403)\n\t\tw.Write(apiError(\"you must be logged in as a system administrator to create a project\"))\n\t\treturn\n\t}\n\n\tdecoder := json.NewDecoder(r.Body)\n\terr := decoder.Decode(&p)\n\tif err != nil {\n\t\tw.WriteHeader(400)\n\t\tw.Write(apiError(\"invalid body\"))\n\t\tlog.Println(err)\n\t\treturn\n\t}\n\n\terr = Store.Projects().New(&p)\n\tif err != nil {\n\t\tw.WriteHeader(400)\n\t\tw.Write(apiError(err.Error()))\n\t\tlog.Println(err)\n\t\treturn\n\t}\n\n\tsendJSON(w, p)\n}",
"func doCreate(constructor func() base.IGameObject2D, isActive *bool) base.IGameObject2D {\r\n\tobj := constructor()\r\n\tobj.Obj().SetIGameObject2D(obj)\r\n\tapp.registerChannel <- resourceAccessRequest{\r\n\t\tpayload: obj,\r\n\t\tisActive: isActive,\r\n\t}\r\n\treturn obj\r\n}",
"func createCat() *Cat {\n\treturn NewCat(\"Mike\")\n}",
"func NewWorld(serverUrl string, apiVersion string) *World {\n\treturn &World{\n\t\tserverUrl: serverUrl,\n\t\tapiVersion: apiVersion,\n\t}\n}",
"func NewWorkspace(volume, containerName, imageName string) {\n CreateReadOnlyLayer(imageName)\n CreateWriteLayer(containerName)\n CreateMountPoint(containerName, imageName)\n if volume == \"\" {\n return\n }\n volumeURLs := strings.Split(volume, \":\")\n if len(volumeURLs) != 2 || volumeURLs[0] == \"\" || volumeURLs[1] == \"\" {\n log.Warn(\"Volume argument input is not correct.\")\n return\n }\n MountVolume(containerName, volumeURLs)\n log.Infof(\"Mount volume %q\", volumeURLs)\n}",
"func CreateRogue(name string) *rogue {\n\tr := &rogue{\n\t\tname: name,\n\t\tmaxhp: 50 + Rolld(10),\n\t\tdex: 5 + Rolld(6),\n\t}\n\tr.ap += 10 + r.dex\n\tr.armor += Rolld(6) + r.dex\n\tr.hp = r.maxhp\n\treturn r\n}",
"func (s *Server) NewWorld(addr string, initTrucks int32) (err error) {\n\tworldId, err := s.sim.NewWorld(addr, initTrucks)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = db.WithTx(s.db, func(tx *sql.Tx) (err error) {\n\t\tdb.DestroySchema(tx)\n\t\terr = db.InitSchema(tx)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t\terr = db.SetMeta(tx, \"world_id\", strconv.FormatInt(worldId, 10))\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t\terr = s.initTrucks(tx, initTrucks)\n\t\treturn\n\t})\n\tif err != nil {\n\t\treturn\n\t}\n\tlog.Println(\"created world\", worldId)\n\treturn\n}",
"func New(w http.ResponseWriter, r *http.Request) {\n\tgetTemplates().ExecuteTemplate(w, \"New\", nil)\n}",
"func (u *App) Create(c echo.Context, req *Create) (*model.Course, error) {\n\tif err := u.rbac.EnforceRole(c, model.AdminRole); err != nil {\n\t\treturn nil, err\n\t}\n\n\tid, err := util.GenerateUUID()\n\tif err = zaplog.ZLog(err); err != nil {\n\t\treturn nil, err\n\t}\n\n\tschoolName := \"\"\n\tvar school model.Organization\n\tif err := u.db.Model(&model.Organization{}).Where(\"uuid = ?\", req.School).First(&school).Error; err == nil {\n\t\tschoolName = school.Name\n\t}\n\n\tcourse := model.Course{\n\t\tBase: model.Base{ID: id},\n\t\tName: req.Name,\n\t\tSchool: req.School,\n\t\tSchoolName: schoolName,\n\t\tDepartment: req.Department,\n\t\tDomain: req.Domain,\n\t\tCluster: req.Cluster,\n\t\tType: req.Type,\n\t\tLevel: req.Level,\n\t}\n\n\tvar domain model.CourseDomain\n\tif err := u.db.Model(&model.CourseDomain{}).Where(\"uuid = ?\", req.Domain).First(&domain).Error; err == nil {\n\t\tcourse.DomainName = domain.Name\n\t\tfor _, cluster := range domain.Clusters {\n\t\t\tif course.Cluster == cluster.ID {\n\t\t\t\tcourse.ClusterName = cluster.Name\n\t\t\t}\n\t\t}\n\t}\n\n\treturn u.udb.Create(u.db, course)\n}",
"func (sh *Shift) Create() error {\n\tvalidator := validatorimpl.NewDefaultValidator()\n\terrs := validator.Verify(sh)\n\tif len(errs) != 0 {\n\t\treturn fmt.Errorf(\"Save the shift failed due to content errors: %v\", errs)\n\t}\n\tshiftRepo := repoimpl.GetShiftRepo()\n\tfindCtx, findCancel := utils.GetDefaultCtx()\n\tdefer findCancel()\n\trst := shiftRepo.FindOne(findCtx, bson.M{\"projectId\": sh.ProjectID})\n\tif rst.Err() == nil {\n\t\treturn DuplicateShiftError{}\n\t}\n\tctxInsert, cancelInsert := utils.GetDefaultCtx()\n\tdefer cancelInsert()\n\t_, err := shiftRepo.InsertOne(ctxInsert, sh)\n\treturn err\n}",
"func newcomputer(brand string) *computer {\n\treturn &computer{brand: brand}\n}",
"func (p *Provider) CreateSpace(name string, projectID int, clusterID *int) (int, error) {\n\t// Response struct\n\tresponse := struct {\n\t\tCreateSpace *struct {\n\t\t\tSpaceID int\n\t\t} `json:\"manager_createSpace\"`\n\t}{}\n\n\t// Do the request\n\terr := p.GrapqhlRequest(`\n\t\tmutation($spaceName: String!, $clusterID: Int, $projectID: Int!) {\n\t\t\tmanager_createSpace(spaceName: $spaceName, clusterID: $clusterID, projectID: $projectID) {\n\t\t\t\tSpaceID\n\t\t\t}\n\t\t}\n\t`, map[string]interface{}{\n\t\t\"spaceName\": name,\n\t\t\"projectID\": projectID,\n\t\t\"clusterID\": clusterID,\n\t}, &response)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\t// Check result\n\tif response.CreateSpace == nil {\n\t\treturn 0, errors.New(\"Couldn't create project: returned answer is null\")\n\t}\n\n\treturn response.CreateSpace.SpaceID, nil\n}",
"func (l *LifeSystem) New(w *ecs.World) {\n\tl.world = w\n\tl.isFirstTime = true\n}",
"func createLobby(maxRounds int, startingTeam int, team1Name string, team2Name string) Lobby {\n\n\t// Firstly generate uuids\n\tlobbyUUID := generateUUID()\n\tteam1UIDGenerated := generateUUID()\n\tteam2UIDGenerated := generateUUID()\n\n\tlobby := Lobby{\n\t\tLobbyUID: lobbyUUID,\n\t\tTeam1UID: team1UIDGenerated,\n\t\tTeam1Name: team1Name,\n\t\tTeam2Name: team2Name,\n\t\tTeam2UID: team2UIDGenerated,\n\t\tTeam1link: fmt.Sprintf(`/pv/%v/%v`, lobbyUUID, team1UIDGenerated),\n\t\tTeam2link: fmt.Sprintf(`/pv/%v/%v`, lobbyUUID, team2UIDGenerated),\n\t\tMaxRounds: maxRounds,\n\t\tEnabled: false,\n\t\tStartingTeam: startingTeam,\n\t}\n\n\t// By default whenever a new lobby is generated then a game on nagrand is added.\n\t// With the exception of there being a potential different map for single play.\n\tif maxRounds != 1 {\n\t\tlobby.createDefaultGame(startingTeam)\n\t}\n\n\tif maxRounds == 1 {\n\t\tlobby.createGame(startingTeam, 1)\n\t}\n\n\treturn lobby\n\n}",
"func createBooking(w http.ResponseWriter, r *http.Request) {\n\treqBody, _ := ioutil.ReadAll(r.Body)\n\tvar bookingRequest BookingRequest\n\terr := json.Unmarshal(reqBody, &bookingRequest)\n\tif err != nil {\n\t\terr = errorResponse(w, InvalidJSON, http.StatusBadRequest)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\treturn\n\t}\n\n\tdate, err := time.Parse(layoutISO, bookingRequest.Date)\n\tif err != nil {\n\t\terr = errorResponse(w, InvalidDate, http.StatusBadRequest)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\treturn\n\t}\n\n\tclass, err := findClassReference(bookingRequest.ClassName, date)\n\tif err != nil {\n\t\terr = errorResponse(w, ClassDoesNotExists, http.StatusNotFound)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\treturn\n\t}\n\tbookingRequest.Id = createID()\n\tclass.addBooking(Booking{bookingRequest.MemberName, bookingRequest.Id})\n\tw.WriteHeader(http.StatusCreated)\n\terr = json.NewEncoder(w).Encode(bookingRequest)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n}",
"func NewSpace(t *testing.T, awaitilities wait.Awaitilities, opts ...SpaceOption) *toolchainv1alpha1.Space {\n\tnamePrefix := strings.ToLower(t.Name())\n\t// Remove all invalid characters\n\tnamePrefix = notAllowedChars.ReplaceAllString(namePrefix, \"\")\n\n\t// Trim if the length exceeds 40 chars (63 is the max)\n\tif len(namePrefix) > 40 {\n\t\tnamePrefix = namePrefix[0:40]\n\t}\n\n\tspace := &toolchainv1alpha1.Space{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tNamespace: awaitilities.Host().Namespace,\n\t\t\tGenerateName: namePrefix + \"-\",\n\t\t},\n\t}\n\tfor _, apply := range opts {\n\t\tapply(space)\n\t}\n\treturn space\n}",
"func WorkloadNew(homeDirectory string, org string) {\n\n\t// Verify that env vars are set properly and determine the working directory.\n\tdir, err := VerifyEnvironment(homeDirectory, false, false, \"\")\n\tif err != nil {\n\t\tcliutils.Fatal(cliutils.CLI_INPUT_ERROR, \"'%v %v' %v\", WORKLOAD_COMMAND, WORKLOAD_CREATION_COMMAND, err)\n\t}\n\n\tif org == \"\" && os.Getenv(DEVTOOL_HZN_ORG) == \"\" {\n\t\tcliutils.Fatal(cliutils.CLI_INPUT_ERROR, \"'%v %v' must specify either --org or set the %v environment variable.\", WORKLOAD_COMMAND, WORKLOAD_CREATION_COMMAND, DEVTOOL_HZN_ORG)\n\t}\n\n\t// Create the working directory.\n\tif err := CreateWorkingDir(dir); err != nil {\n\t\tcliutils.Fatal(cliutils.CLI_INPUT_ERROR, \"'%v %v' %v\", WORKLOAD_COMMAND, WORKLOAD_CREATION_COMMAND, err)\n\t}\n\n\t// If there are any horizon metadata files already in the directory then we wont create any files.\n\tcmd := fmt.Sprintf(\"%v %v\", WORKLOAD_COMMAND, WORKLOAD_CREATION_COMMAND)\n\tFileNotExist(dir, cmd, USERINPUT_FILE, UserInputExists)\n\tFileNotExist(dir, cmd, WORKLOAD_DEFINITION_FILE, WorkloadDefinitionExists)\n\t//FileNotExist(dir, cmd, DEPENDENCIES_FILE, DependenciesExists)\n\n\tif org == \"\" {\n\t\torg = os.Getenv(DEVTOOL_HZN_ORG)\n\t}\n\n\t// Create the metadata files.\n\tif err := CreateUserInputs(dir, true, false, org); err != nil {\n\t\tcliutils.Fatal(cliutils.CLI_GENERAL_ERROR, \"'%v %v' %v\", WORKLOAD_COMMAND, WORKLOAD_CREATION_COMMAND, err)\n\t} else if err := CreateWorkloadDefinition(dir, org); err != nil {\n\t\tcliutils.Fatal(cliutils.CLI_GENERAL_ERROR, \"'%v %v' %v\", WORKLOAD_COMMAND, WORKLOAD_CREATION_COMMAND, err)\n\t}\n\t// } else if err := CreateDependencies(dir); err != nil {\n\t// \tcliutils.Fatal(cliutils.CLI_GENERAL_ERROR, \"'%v %v' %v\", WORKLOAD_COMMAND, WORKLOAD_CREATION_COMMAND, err)\n\t// }\n\n\tfmt.Printf(\"Created horizon metadata files in %v. Edit these files to define and configure your new %v.\\n\", dir, WORKLOAD_COMMAND)\n\n}",
"func newPerson(name string,class string, nationality string ) *Person {\n\treturn &Person{name: name,job: class, nationality: nationality}\n\n}",
"func CreateIstanbulPolicy(toolchainID string, policyName string) error {\n\turl := getGateService() + \"/api/v5/toolchainids/\" + toolchainID + \"/policies\"\n\n\t// prepare the payload\n\trule := map[string]interface{}{\n\t\t\"name\": \"testcoveragepercentage\",\n\t\t\"format\": \"istanbul\",\n\t\t\"stage\": \"code\",\n\t\t\"codeCoverage\": 100,\n\t}\n\n\trules := [1]map[string]interface{}{rule}\n\n\tpayload := map[string]interface{}{\n\t\t\"name\": policyName,\n\t\t\"rules\": rules,\n\t}\n\n\t//fmt.Printf(\"%+v\", payload)\n\tbytesRepresentation, marshalerr := json.Marshal(payload)\n\tif marshalerr != nil {\n\t\treturn errors.New(\"Failed to json.Marshal the payload Error: \" + marshalerr.Error())\n\t}\n\n\tstatusCode, body, reqerr := httpRequest(\"POST\", url, bytes.NewBuffer(bytesRepresentation))\n\tif reqerr != nil {\n\t\treturn errors.New(\"Failed to create istanbul policy Error: \" + reqerr.Error())\n\t}\n\tif statusCode != 201 {\n\t\treturn errors.New(\"Failed to create istanbul policy \" + fmt.Sprintf(\"statusCode %v\", statusCode) + \" Error: \" + fmt.Sprintf(\"body: %v\", body))\n\t}\n\treturn nil\n}",
"func CreateWorkload(pce PCE, workload Workload) (Workload, APIResponse, error) {\n\tvar newWL Workload\n\tvar api APIResponse\n\tvar err error\n\n\t// Build the API URL\n\tapiURL, err := url.Parse(\"https://\" + pceSanitization(pce.FQDN) + \":\" + strconv.Itoa(pce.Port) + \"/api/v2/orgs/\" + strconv.Itoa(pce.Org) + \"/workloads\")\n\tif err != nil {\n\t\treturn newWL, api, fmt.Errorf(\"create workload - %s\", err)\n\t}\n\n\t// Call the API\n\tworkloadJSON, err := json.Marshal(workload)\n\tif err != nil {\n\t\treturn newWL, api, fmt.Errorf(\"create workload - %s\", err)\n\t}\n\tapi, err = apicall(\"POST\", apiURL.String(), pce, workloadJSON, false)\n\tif err != nil {\n\t\treturn newWL, api, fmt.Errorf(\"create workload - %s\", err)\n\t}\n\n\t// Marshal JSON\n\tjson.Unmarshal([]byte(api.RespBody), &newWL)\n\n\treturn newWL, api, nil\n}",
"func (obj *MovieTypeController) CreateNewMovieType(c *fiber.Ctx) error {\n\tmoveTypeRequest := new(request.MovieTypeRequest)\n\n\tif err := c.BodyParser(moveTypeRequest); err != nil {\n\t\treturn util.ResponseError(err.Error(), nil)\n\t}\n\n\tmoveType := model.MovieType{\n\t\tName: moveTypeRequest.Name,\n\t\tSlug: moveTypeRequest.Slug,\n\t\tStatus: moveTypeRequest.Status,\n\t}\n\n\tif _, err := obj.movieTypeRepository.SaveMovieType(moveType); err != nil {\n\t\treturn util.ResponseError(err.Error(), nil)\n\t}\n\n\treturn util.ResponseSuccess(\"Thành công\", nil)\n}",
"func doCreate(enviro env.Project, appJson, rootDir, appName, vendorDir, constraints string) error {\n\tfmt.Printf(\"Creating initial project structure, this might take a few seconds ... \\n\")\n\tdescriptor, err := ParseAppDescriptor(appJson)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif appName != \"\" {\n\t\t// override the application name\n\n\t\taltJson := strings.Replace(appJson, `\"`+descriptor.Name+`\"`, `\"`+appName+`\"`, 1)\n\t\taltDescriptor, err := ParseAppDescriptor(altJson)\n\n\t\t//see if we can get away with simple replace so we don't reorder the existing json\n\t\tif err == nil && altDescriptor.Name == appName {\n\t\t\tappJson = altJson\n\t\t} else {\n\t\t\t//simple replace didn't work so we have to unmarshal & re-marshal the supplied json\n\t\t\tvar appObj map[string]interface{}\n\t\t\terr := json.Unmarshal([]byte(appJson), &appObj)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tappObj[\"name\"] = appName\n\n\t\t\tupdApp, err := json.MarshalIndent(appObj, \"\", \" \")\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tappJson = string(updApp)\n\t\t}\n\n\t\tdescriptor.Name = appName\n\t} else {\n\t\tappName = descriptor.Name\n\t\trootDir = filepath.Join(rootDir, appName)\n\t}\n\n\terr = enviro.Init(rootDir)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = enviro.Create(false, \"\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = fgutil.CreateFileFromString(filepath.Join(rootDir, \"flogo.json\"), appJson)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// create initial structure\n\tappDir := filepath.Join(enviro.GetSourceDir(), descriptor.Name)\n\tos.MkdirAll(appDir, os.ModePerm)\n\n\t// Validate structure\n\terr = enviro.Open()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Create the dep manager\n\tdepManager := &dep.DepManager{Env: enviro}\n\n\t// Initialize the dep manager\n\terr = depManager.Init()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Create initial files\n\tdeps, err := config.ExtractAllDependencies(appJson)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcreateMainGoFile(appDir, \"\")\n\tcreateImportsGoFile(appDir, deps)\n\n\t// Add constraints\n\tif len(constraints) > 0 {\n\t\tnewConstraints := []string{\"-add\"}\n\t\tnewConstraints = append(newConstraints, strings.Split(constraints, \",\")...)\n\t\terr = depManager.Ensure(newConstraints...)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tensureArgs := []string{}\n\n\tif len(vendorDir) > 0 {\n\t\t// Copy vendor directory\n\t\tfgutil.CopyDir(vendorDir, enviro.GetVendorDir())\n\t\t// Do not touch vendor folder when ensuring\n\t\tensureArgs = append(ensureArgs, \"-no-vendor\")\n\t}\n\n\t// Sync up\n\terr = depManager.Ensure(ensureArgs...)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func Create (appName string) {\n\n checkGopath ()\n checkContainer (appName)\n\n app := Application { Name: appName }\n\n app.createContainer ()\n\n err := app.copyFileTree (\n GOPATH + slash + applicationTemplatesPath,\n GOPATH_SRC + app.Name,\n )\n\n if err != nil {\n log.Fatal (err)\n }\n}",
"func (c *Controller) Create(w http.ResponseWriter, r *http.Request) {\n\tvar err error\n\ttoken := r.FormValue(\"token\")\n\tmaxPlayers := r.FormValue(\"maxPlayers\")\n\tname := r.FormValue(\"name\")\n\tservice, err := createDSTService(token, maxPlayers, name)\n\tif c.CheckError(err, http.StatusBadRequest, w) {\n\t\treturn\n\t}\n\tc.SendJSON(\n\t\tw,\n\t\tr,\n\t\tservice,\n\t\thttp.StatusOK,\n\t)\n}",
"func NewPlane(name string, client sleepwalker.RESTClient) Plane {\n\tdesc := \"airstrike.NewPlane\"\n\tlog.WithFields(map[string]interface{}{\n\t\t\"name\": name,\n\t\t\"client\": client,\n\t}).Debug(desc)\n\treturn Plane{Name: name, Client: client}\n}",
"func create_site() {\n\n\tcreate_dirs()\n\tcreate_theme_files()\n}",
"func New(width int, visualization string, zeitpunkte []time.Time) (slm sunlightmap) {\n\tslm = sunlightmap{}\n\tslm.Width = width - width%2\n\tslm.Height = slm.Width / 2\n\tslm.visualization = visualization\n\tslm.DaylightImageFilename = \"world_mine_day_solarized_720-360.png\"\n\tslm.NighttimeImageFilename = \"world_mine_night_solarized_720-360.png\"\n\tslm.zeitpunkte = zeitpunkte //[]time.Time{time.Date(2017, 10, 24, 17, 30, 0, 0, time.UTC)}\n\treturn\n}",
"func createClass(w http.ResponseWriter, r *http.Request) {\n\treqBody, _ := ioutil.ReadAll(r.Body)\n\n\tvar classRequest ClassRequest\n\terr := json.Unmarshal(reqBody, &classRequest)\n\tif err != nil {\n\t\terr = errorResponse(w, InvalidJSON, http.StatusBadRequest)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\treturn\n\t}\n\n\tvar classes []Class\n\tstartDate, err := time.Parse(layoutISO, classRequest.StartDate)\n\tif err != nil {\n\t\terr = errorResponse(w, InvalidDate, http.StatusBadRequest)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\treturn\n\t}\n\tendDate, err := time.Parse(layoutISO, classRequest.EndDate)\n\tif err != nil {\n\t\terr = errorResponse(w, InvalidDate, http.StatusBadRequest)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\treturn\n\t}\n\n\tfor days := 0; days <= int(endDate.Sub(startDate).Hours()/24); days++ {\n\t\tclass := Class{\n\t\t\tId: createID(),\n\t\t\tName: classRequest.Name,\n\t\t\tDate: startDate.Add(time.Hour * 24 * time.Duration(days)),\n\t\t\tCapacity: classRequest.Capacity,\n\t\t}\n\t\tclasses = append(classes, class)\n\t}\n\tDBClasses = append(DBClasses, classes...)\n\n\tw.WriteHeader(http.StatusCreated)\n\terr = json.NewEncoder(w).Encode(classes)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n}",
"func (w *RandomWorld) CreateFlyers(quantity int) {\n\t// Initialize each being to a random one\n\tfor i := 0; i < quantity; i++ {\n\t\t// Create random being and place it into the map\n\t\tb := w.CreateRandomFlyer()\n\t\tw.BeingList[b.ID.String()] = b\n\t}\n}",
"func (router *router) createProject(request *restful.Request, response *restful.Response) {\n\tproject := &model.Project{}\n\tcreatedProject, err := router.projectManager.CreateProject(project)\n\tif err != nil {\n\t\tlog.Fatalf(\"create Project failed,err msg%s\", err)\n\t}\n\tresponse.WriteHeaderAndEntity(http.StatusCreated, createdProject)\n}",
"func (s *API) CreateDatabase(req *CreateDatabaseRequest, opts ...scw.RequestOption) (*Database, error) {\n\tvar err error\n\n\tif req.Region == \"\" {\n\t\tdefaultRegion, _ := s.client.GetDefaultRegion()\n\t\treq.Region = defaultRegion\n\t}\n\n\tif fmt.Sprint(req.Region) == \"\" {\n\t\treturn nil, errors.New(\"field Region cannot be empty in request\")\n\t}\n\n\tif fmt.Sprint(req.InstanceID) == \"\" {\n\t\treturn nil, errors.New(\"field InstanceID cannot be empty in request\")\n\t}\n\n\tscwReq := &scw.ScalewayRequest{\n\t\tMethod: \"POST\",\n\t\tPath: \"/rdb/v1/regions/\" + fmt.Sprint(req.Region) + \"/instances/\" + fmt.Sprint(req.InstanceID) + \"/databases\",\n\t\tHeaders: http.Header{},\n\t}\n\n\terr = scwReq.SetBody(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar resp Database\n\n\terr = s.client.Do(scwReq, &resp, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resp, nil\n}",
"func CreateRelease(res http.ResponseWriter, req *http.Request) {\n\tres.Header().Set(\"Content-Type\", \"application/json\")\n\tc := Release{\"relid\", \"http://ispw:8080/ispw/ispw/releases/relid\"}\n\toutgoingJSON, err := json.Marshal(c)\n\tif err != nil {\n\t\tlog.Println(err.Error())\n\t\thttp.Error(res, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tres.WriteHeader(http.StatusCreated)\n\tfmt.Fprint(res, string(outgoingJSON))\n}",
"func NewWeather(closeTapped func()) (view fyne.CanvasObject, viewModel *Weather) {\n\tw := Weather{}\n\tw.city = widget.NewLabel(\"City\")\n\tw.city.Alignment = fyne.TextAlignCenter\n\tw.city.TextStyle.Bold = true\n\n\tw.currentTemperature = widget.NewLabel(\"Current Temperature\")\n\tw.currentTemperature.Alignment = fyne.TextAlignCenter\n\n\tw.clock = widget.NewLabel(\"Clock\")\n\tw.clock.TextStyle.Bold = true\n\n\tw.lastUpdate = widget.NewLabel(\"Last update\")\n\tw.lastUpdate.Alignment = fyne.TextAlignCenter\n\n\tw.background = &canvas.Image{FillMode: canvas.ImageFillStretch}\n\tw.today = newForecast()\n\tw.tomorrow = newForecast()\n\tw.afterTomorrow = newForecast()\n\n\theader := container.New(layout.NewHBoxLayout(),\n\t\tlayout.NewSpacer(),\n\t\tcontainer.New(layout.NewVBoxLayout(),\n\t\t\tw.city,\n\t\t\tw.currentTemperature,\n\t\t),\n\t\tcontainer.NewVBox(),\n\t\tlayout.NewSpacer(),\n\t)\n\tfooter := container.New(layout.NewHBoxLayout(),\n\t\twidget.NewButton(assets.GetLabel(assets.Close), closeTapped),\n\t\tlayout.NewSpacer(),\n\t\tw.clock,\n\t)\n\tcenter := container.New(layout.NewVBoxLayout(),\n\t\tcontainer.New(layout.NewGridLayout(3),\n\t\t\tw.today.layout,\n\t\t\tw.tomorrow.layout,\n\t\t\tw.afterTomorrow.layout,\n\t\t),\n\t\tw.lastUpdate,\n\t)\n\tw.view = container.New(layout.NewMaxLayout(),\n\t\tw.background,\n\t\tcontainer.New(layout.NewVBoxLayout(),\n\t\t\theader,\n\t\t\tlayout.NewSpacer(),\n\t\t\tcenter,\n\t\t\tlayout.NewSpacer(),\n\t\t\tfooter,\n\t\t),\n\t)\n\n\tw.today.header.SetText(assets.GetLabel(assets.Today))\n\tw.tomorrow.header.SetText(assets.GetLabel(assets.Tomorrow))\n\tw.afterTomorrow.header.SetText(assets.GetLabel(assets.AfterTomorrow))\n\tdefaultBackground, _ := assets.GetBackgroundImage(weather.ConditionClear)\n\tw.SetBackground(defaultBackground)\n\n\treturn w.view, &w\n}",
"func New() *World {\n\treturn &World{}\n}",
"func (v ProspectsResource) Create(c buffalo.Context) error {\n\t// Allocate an empty Prospect\n\tprospect := &models.Prospect{}\n\n\t// Bind prospect to the html form elements\n\tif err := c.Bind(prospect); err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\n\t// Get the DB connection from the context\n\ttx := c.Value(\"tx\").(*pop.Connection)\n\n\t// Validate the data from the html form\n\tverrs, err := tx.ValidateAndCreate(prospect)\n\tif err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\n\tif verrs.HasAny() {\n\t\t// Make prospect available inside the html template\n\t\tc.Set(\"prospect\", prospect)\n\n\t\t// Make the errors available inside the html template\n\t\tc.Set(\"errors\", verrs)\n\n\t\t// Render again the new.html template that the user can\n\t\t// correct the input.\n\t\treturn c.Render(422, r.HTML(\"prospects/new.html\"))\n\t}\n\n\t// If there are no errors set a success message\n\tc.Flash().Add(\"success\", \"Prospect was created successfully\")\n\n\t// and redirect to the prospects index page\n\treturn c.Redirect(302, \"/prospects/%s\", prospect.ID)\n}",
"func newBoard(x, y, w, h int) *Board {\n\treturn &Board{\n\t\tDimension: Dimension{\n\t\t\tx,\n\t\t\ty,\n\t\t\tw,\n\t\t\th,\n\t\t},\n\t}\n}",
"func CreateTrade(w http.ResponseWriter, r *http.Request) {\n\tdata := &TradeRequest{}\n\tif err := render.Bind(r, data); err != nil {\n\t\trender.Render(w, r, ErrInvalidRequest(err))\n\t\treturn\n\t}\n\n\t// request auth is from initiator\n\t_, claims, _ := jwtauth.FromContext(r.Context())\n\ttrade, err := NewTrade(data, claims[\"userID\"].(string))\n\tif err != nil {\n\t\trender.Render(w, r, ErrInvalidRequest(err))\n\t\treturn\n\t}\n\n\tschID, err := primitive.ObjectIDFromHex(data.ScheduleID)\n\tif err != nil {\n\t\trender.Render(w, r, ErrServer(err))\n\t\treturn\n\t}\n\tif err = mh.InsertTrade(trade, schID); err != nil {\n\t\trender.Render(w, r, ErrServer(err))\n\t\treturn\n\t}\n\trender.Status(r, http.StatusCreated)\n\trender.Render(w, r, NewTradeResponse(*trade))\n}",
"func NewWorld() *World {\n\treturn &World{\n\t\tCities: make(map[CityName]*City),\n\t\tRoads: make(map[CityName][]*Road),\n\t}\n}",
"func (c *DetaClient) NewProgram(r *NewProgramRequest) (*NewProgramResponse, error) {\n\ti := &requestInput{\n\t\tPath: fmt.Sprintf(\"/%s/\", \"programs\"),\n\t\tMethod: \"POST\",\n\t\tNeedsAuth: true,\n\t\tBody: *r,\n\t}\n\n\to, err := c.request(i)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif o.Status != 200 {\n\t\tmsg := o.Error.Message\n\t\tif msg == \"\" {\n\t\t\tmsg = o.Error.Errors[0]\n\t\t}\n\t\treturn nil, fmt.Errorf(\"failed to create new program: %v\", msg)\n\t}\n\n\tvar resp NewProgramResponse\n\terr = json.Unmarshal(o.Body, &resp)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create new program: %v\", err)\n\t}\n\treturn &resp, nil\n}",
"func New(ctx context.Context, t *testing.T, cfg Config) *Hospital {\n\tt.Helper()\n\treturn WithTime(ctx, t, cfg, now)\n}",
"func newGame(renderer *sdl.Renderer) (*Game, error) {\n\tground, err := ground.NewGrounds(renderer)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\ttrex, err := trex.NewTrex(renderer)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcactus, err := cactus.NewCactus(renderer)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tclouds, err := clouds.NewClouds(renderer)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Game{\n\t\trenderer: renderer,\n\t\tground: ground,\n\t\ttrex: trex,\n\t\tcactus: cactus,\n\t\tclouds: clouds,\n\t}, nil\n}",
"func (u *Usecase) CreateDrone(w http.ResponseWriter, r *http.Request) (interface{}, error) {\n\tdrone := &models.Drone{}\n\tjson.NewDecoder(r.Body).Decode(drone)\n\n\tlog.Println(\"drone \", drone)\n\n\t//check if drone is already created\n\tif _, ok := models.DronesMap[drone.ID]; ok {\n\t\treturn nil, models.CreateAppError(\"drone already exist\", http.StatusConflict)\n\t}\n\n\t//check if sector exists\n\tif _, ok := models.SectorsMap[drone.SectorID]; !ok {\n\t\treturn nil, models.CreateAppError(\"sector doesn't exist\", http.StatusBadRequest)\n\t}\n\n\tmodels.DroneIDCounter++\n\t//set drone ID\n\tdrone.ID = models.DroneIDCounter\n\n\tif drone.Type == \"\" {\n\t\tdrone.Type = \"v1\"\n\t}\n\n\tsector := models.SectorsMap[drone.SectorID]\n\tsector.DroneList = append(sector.DroneList, drone)\n\n\t//insert in global map\n\tmodels.DronesMap[drone.ID] = drone\n\n\treturn drone, nil\n}",
"func (c *PlanetClient) Create() *PlanetCreate {\n\tmutation := newPlanetMutation(c.config, OpCreate)\n\treturn &PlanetCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}\n}",
"func NewReserve(appPath, name string) *Reserve {\n\tdbPath := appPath + \"data/\"\n\tos.MkdirAll(dbPath, 0755)\n\treturn &Reserve{\n\t\tname: name,\n\t\tpath: dbPath + name + \".json\",\n\t}\n}",
"func (repo GymRepository) CreateGym(gym models.GymProfile) models.GymProfile {\n\trepo.db.Create(&gym)\n\treturn gym\n}",
"func (esc *ExtendedSimpleContract) Create(ctx utils.CustomTransactionContextInterface, key string) error {\n\texisting := ctx.GetCallData()\n\n\tif existing != nil {\n\t\treturn fmt.Errorf(\"Cannot create world state pair with key %s. Already exists\", key)\n\t}\n\n\terr := ctx.GetStub().PutState(key, []byte(\"Initialised\"))\n\n\tif err != nil {\n\t\treturn errors.New(\"Unable to interact with world state\")\n\t}\n\n\treturn nil\n}",
"func NewProgram(lessons []*LessonPgm) *Program {\n\treturn &Program{base.WildCardLabel, lessons}\n}",
"func createPerson(w http.ResponseWriter, r *http.Request) {\n\tfmt.Println(\"CREATE HIT\")\n\tstmt, err := db.Prepare(\"INSERT INTO Persons(pAge, pName) VALUES (?,?)\")\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tvar per Person\n\tjson.Unmarshal(body, &per)\n\tage := per.Age\n\tname := per.Name\n\t_, err = stmt.Exec(age, name)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tfmt.Fprintf(w, \"New person was created\")\n}",
"func (app *application) createBoard(w http.ResponseWriter, r *http.Request) {\n\tplayerID := app.session.GetInt(r, \"authenticatedPlayerID\")\n\t// POST /create/board\n\terr := r.ParseForm()\n\tif err != nil {\n\t\tapp.clientError(w, http.StatusBadRequest)\n\t\treturn\n\t}\n\t\n\t// Create a new forms.Form struct containing the POSTed data\n\t// - Use the validation methods to check the content\n\tform := forms.New(r.PostForm)\n\tform.Required(\"boardName\")\n\tform.MaxLength(\"boardName\", 35)\n\n\t// Before returning to the caller, let's check the validity of the ship coordinates\n\t// - If anything is amiss, we can send those errors back as well\n\tvar carrier []string\n\tcInd := 0\n\tvar battleship []string\n\tbInd := 0\n\tvar cruiser []string\n\trInd := 0\n\tvar submarine []string\n\tsInd := 0\n\tvar destroyer []string\n\tdInd := 0\n\t// Loop through the POSTed data, checking for their values\n\t// - Add coordinates to a given ship's array\n for row := 1; row < 11; row++ {\n\t\trowStr := strconv.Itoa(row)\n \t\tfor _, col := range \"ABCDEFGHIJ\" {\n\t\t\tcolStr := string(col)\n\t\t\tshipXY := form.Get(\"shipXY\"+rowStr+colStr)\n\t\t\tif shipXY != \"\" {\n\t\t\t\t// Only I, the program, should be permitted to update this as a player enters a game\n\t\t\t\t//battleID := r.URL.Query().Get(\"battleID\")\n\t\t\t\t// playerID should be gotten from somewhere else\n\t\t\t\t//playerID = r.PostForm(\"playerID\")\n\n\t\t\t\t// Upper the values to simplify testing\n\t\t\t\t// - Build the slices containing the submitted coordinates\n\t\t\t\tswitch strings.ToUpper(shipXY) {\n\t\t\t\tcase \"C\":\n\t\t\t\t\tcarrier = append(carrier, rowStr+\",\"+colStr)\n\t\t\t\t\tcInd += 1\n\t\t\t\tcase \"B\":\n\t\t\t\t\tbattleship = append(battleship, rowStr+\",\"+colStr)\n\t\t\t\t\tbInd += 1\n\t\t\t\tcase \"R\":\n\t\t\t\t\tcruiser = append(cruiser, rowStr+\",\"+colStr)\n\t\t\t\t\trInd += 1\n\t\t\t\tcase \"S\":\n\t\t\t\t\tsubmarine = append(submarine, rowStr+\",\"+colStr)\n\t\t\t\t\tsInd += 1\n\t\t\t\tcase \"D\":\n\t\t\t\t\tdestroyer = append(destroyer, rowStr+\",\"+colStr)\n\t\t\t\t\tdInd += 1\n\t\t\t\tdefault:\n\t\t\t\t\t// Add this to Form's error object?\n\t\t\t\t\t// - I don't think it helps to tell the user this info\n\t\t\t\t\t// unless they're struggling to build the board\n\t\t\t\t\tfmt.Println(\"Unsupported character:\", shipXY)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Test our numbers, update .Valid property of our Form object\n\tform.RequiredNumberOfItems(\"carrier\", 5, cInd)\n\tform.RequiredNumberOfItems(\"battleship\", 4, bInd)\n\tform.RequiredNumberOfItems(\"cruiser\", 3, rInd)\n\tform.RequiredNumberOfItems(\"submarine\", 3, sInd)\n\tform.RequiredNumberOfItems(\"destroyer\", 2, dInd)\n\n\tform.ValidNumberOfItems(carrier, \"carrier\")\n\tform.ValidNumberOfItems(battleship, \"battleship\")\n\tform.ValidNumberOfItems(cruiser, \"cruiser\")\n\tform.ValidNumberOfItems(submarine, \"submarine\")\n\tform.ValidNumberOfItems(destroyer, \"destroyer\")\n\n\t// If our validation has failed anywhere along the way\n\t// - Take the user back to their board\n\tif !form.Valid() {\n\t\t// helper\n\t\tapp.renderBoard(w, r, \"create.board.page.tmpl\", &templateDataBoard{Form: form})\n\t\treturn\n\t}\n\n\t// If we've made it to here, then we have a good set of coordinates for a ship\n\t// - We have a boardID, playerID, shipName, and a bunch of coordinates\n\n\t// Create a new board, return boardID\n\tboardID, _ := app.boards.Create(playerID, form.Get(\"boardName\"))\n\n\t// Carrier\n\t_, err = app.boards.Insert(playerID, boardID, \"carrier\", carrier)\n\tif err != nil {\n\t\tapp.serverError(w, err)\n\t\treturn\n\t}\n\n\t// Battleship\n\t_, err = app.boards.Insert(playerID, boardID, \"battleship\", battleship)\n\tif err != nil {\n\t\tapp.serverError(w, err)\n\t\treturn\n\t}\n\n\t// Cruiser\n\t_, err = app.boards.Insert(playerID, boardID, \"cruiser\", cruiser)\n\tif err != nil {\n\t\tapp.serverError(w, err)\n\t\treturn\n\t}\n\n\t// Submarine\n\t_, err = app.boards.Insert(playerID, boardID, \"submarine\", submarine)\n\tif err != nil {\n\t\tapp.serverError(w, err)\n\t\treturn\n\t}\n\n\t// Destroyer\n\t_, err = app.boards.Insert(playerID, boardID, \"destroyer\", destroyer)\n\tif err != nil {\n\t\tapp.serverError(w, err)\n\t\treturn\n\t}\n\n\t// Add status message to session data; create new if one doesn't exist\n\tapp.session.Put(r, \"flash\", \"Board successfully created!\")\n\t// Send user back to list of boards\n\thttp.Redirect(w, r, \"/board/list\", http.StatusSeeOther)\n}",
"func NewLife(w, h int) *Life {\n\ta := NewField(w, h)\n\t// choose random boundary as 0.619 as life.awk\n\tboundary := 0.619\n\tfor i := 0; i < a.w; i++ {\n\t\tfor j := 0; j < a.h; j++ {\n\t\t\ta.Set(i, j, rand.Float64() < boundary)\n\t\t}\n\t}\n\treturn &Life{\n\t\ta: a,\n\t\tb: NewField(w, h),\n\t\tw: w, h: h,\n\t}\n}",
"func Create(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {\n\tfmt.Fprint(w, \"Welcome!\\n\")\n}",
"func (c *Client) ClanWar(tag string) (war CurrentWar, err error) {\n\tvar b []byte\n\tpath := \"/clans/%23\" + strings.ToUpper(tag) + \"/currentwar\"\n\tif b, err = c.get(path, map[string][]string{}); err == nil {\n\t\terr = json.Unmarshal(b, &war)\n\t}\n\treturn\n}",
"func createIceCream(w http.ResponseWriter, r *http.Request) {\r\n\tw.Header().Set(\"Content-Type\", \"application/json\")\r\n\tvar IceCream IceCream\r\n\t_ = json.NewDecoder(r.Body).Decode(&IceCream)\r\n\tIceCream.FlavourId = genNextId()\r\n\tflavours = append(flavours, IceCream)\r\n\tjson.NewEncoder(w).Encode(IceCream)\r\n}",
"func New() {\n\ttypeOfProject()\n}"
] | [
"0.63012815",
"0.6095818",
"0.6035038",
"0.57367706",
"0.5401127",
"0.5373335",
"0.53593504",
"0.5359165",
"0.53430766",
"0.5266468",
"0.5165891",
"0.5091316",
"0.5086812",
"0.5075339",
"0.5065485",
"0.5056614",
"0.50516707",
"0.50405157",
"0.50387293",
"0.5027701",
"0.5026452",
"0.50095874",
"0.50076663",
"0.5003414",
"0.5002449",
"0.4951717",
"0.4941419",
"0.49342835",
"0.49127913",
"0.49066472",
"0.49042675",
"0.48940054",
"0.48722026",
"0.4869868",
"0.48540223",
"0.48318344",
"0.48290372",
"0.48219573",
"0.48207065",
"0.48192993",
"0.48189056",
"0.48044366",
"0.48044366",
"0.48001587",
"0.47922173",
"0.47911018",
"0.47749028",
"0.47746268",
"0.47527862",
"0.47479832",
"0.4746739",
"0.47450224",
"0.47435954",
"0.4730639",
"0.47226387",
"0.47206506",
"0.4715225",
"0.47030997",
"0.47002557",
"0.4695669",
"0.4693703",
"0.46906847",
"0.46885228",
"0.46767715",
"0.4676189",
"0.46737328",
"0.4672926",
"0.46611145",
"0.4660093",
"0.46517426",
"0.46492377",
"0.4649211",
"0.46464846",
"0.4639458",
"0.46384427",
"0.46357584",
"0.46264365",
"0.46260536",
"0.46245608",
"0.4622086",
"0.46215636",
"0.45970324",
"0.4591735",
"0.45857188",
"0.458398",
"0.45837477",
"0.45805496",
"0.45748353",
"0.4574259",
"0.45733583",
"0.4566901",
"0.45635688",
"0.4557349",
"0.45550913",
"0.4537385",
"0.45361787",
"0.45360982",
"0.45343405",
"0.45342785",
"0.45337138"
] | 0.70646715 | 0 |
Creates a new colonial war | func (s *State) NewColonialWar(target pb.ProvinceId) bool { // TODO: Error return
if s.IsAtWar(target) || s.IsSiteOfConflict(target) || s.Get(target).Occupier() != pb.ProvinceId_NONE {
return false
}
c := &Conflict{
name: "Colonial War", // TODO
length: 0,
attackers: Faction{
// Dissidents
progress: 0,
},
defenders: Faction{
members: []pb.ProvinceId{s.Get(target).Occupier()},
progress: 0,
},
goal: s.Settings().GetConflictGoal(pb.ConflictType_COLONIAL_WAR),
base_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_COLONIAL_WAR),
locations: []pb.ProvinceId{target},
conflict_type: pb.ConflictType_COLONIAL_WAR,
}
s.Conflicts[target] = c
return true
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func New() *WarmerImpl {\n\treturn &WarmerImpl{}\n}",
"func (s *State) NewCivilWar(target pb.ProvinceId) bool { // TODO: Error return\n\tif s.IsAtWar(target) || s.IsSiteOfConflict(target) {\n\t\treturn false\n\t}\n\tc := &Conflict{\n\t\tname: \"Civil War\", // TODO\n\t\tlength: 0,\n\t\tattackers: Faction{\n\t\t\trebels: *(s.Get(target).Dissidents()),\n\t\t\tprogress: 0,\n\t\t},\n\t\tdefenders: Faction{\n\t\t\tmembers: []pb.ProvinceId{target},\n\t\t\tprogress: 0,\n\t\t},\n\t\tgoal: s.Settings().GetConflictGoal(pb.ConflictType_CIVIL_WAR),\n\t\tbase_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_CIVIL_WAR),\n\t\tlocations: []pb.ProvinceId{target},\n\t\tconflict_type: pb.ConflictType_CIVIL_WAR,\n\t}\n\ts.Conflicts[target] = c\n\treturn true\n}",
"func createWorld() {\n\tspace = chipmunk.NewSpace()\n\tspace.Gravity = vect.Vect{0, -900}\n\n\tstaticBody := chipmunk.NewBodyStatic()\n\tstaticLines = []*chipmunk.Shape{\n\t\tchipmunk.NewSegment(vect.Vect{0, -600}, vect.Vect{800.0, -600}, 0),\n\t\tchipmunk.NewSegment(vect.Vect{0, -600}, vect.Vect{0, 0}, 0),\n\t\tchipmunk.NewSegment(vect.Vect{800, -600}, vect.Vect{800.0, 0}, 0),\n\t}\n\tfor _, segment := range staticLines {\n\t\t// segment.SetElasticity(0.6)\n\t\tstaticBody.AddShape(segment)\n\t}\n\tspace.AddBody(staticBody)\n}",
"func CreateApplication() *Alpha {\n app := &Alpha{}\n app.Request = &Request{}\n app.Response = &Response{}\n app.init()\n return app\n}",
"func (s *State) NewConventionalWar(defenders []pb.ProvinceId, attackers []pb.ProvinceId, locations []pb.ProvinceId) bool { // TODO: Error return\n\tfor _, d := range defenders {\n\t\tif s.IsAtWar(d) || s.IsSiteOfConflict(d) {\n\t\t\treturn false\n\t\t}\n\t}\n\tfor _, a := range attackers {\n\t\tif s.IsAtWar(a) || s.IsSiteOfConflict(a) {\n\t\t\treturn false\n\t\t}\n\t}\n\tfor _, l := range locations {\n\t\tif s.IsAtWar(l) || s.IsSiteOfConflict(l) {\n\t\t\treturn false\n\t\t}\n\t}\n\t// TODO: Logic for joining wars?\n\tc := &Conflict{\n\t\tname: \"War!\", // TODO\n\t\tlength: 0,\n\t\tattackers: Faction{\n\t\t\tmembers: attackers,\n\t\t\tprogress: 0,\n\t\t},\n\t\tdefenders: Faction{\n\t\t\tmembers: defenders,\n\t\t\tprogress: 0,\n\t\t},\n\t\tgoal: s.Settings().GetConflictGoal(pb.ConflictType_CONVENTIONAL_WAR),\n\t\tbase_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_CONVENTIONAL_WAR),\n\t\tlocations: locations,\n\t\tconflict_type: pb.ConflictType_CONVENTIONAL_WAR,\n\t}\n\t// For now it maps only to the first location\n\ts.Conflicts[locations[0]] = c\n\treturn true\n}",
"func (wds *WeaponAISystem) New(w *ecs.World) {\n\n}",
"func Create(engine *leader.Leader, port int) *http.Server {\n\tgame.InitGames()\n\tlgger := logger.Init(\"BattleSnake Web\", true, false, ioutil.Discard)\n\tvar host string\n\tif os.Getenv(\"ENV\") == \"dev\" {\n\t\thost = \"localhost\"\n\t} else {\n\t\thost = \"\"\n\t}\n\treturn &http.Server{\n\t\tAddr: fmt.Sprintf(\"%s:%d\", host, port),\n\t\tHandler: web.NewRouter(engine, lgger),\n\t\tReadTimeout: time.Duration(500) * time.Millisecond, // TODO remove hardcoding\n\t\tWriteTimeout: time.Duration(500) * time.Millisecond, // TODO remove hardcoding\n\t}\n}",
"func newWorkspace(session *session, root string) *Workspace {\n\treturn &Workspace{\n\t\tsession: session,\n\t\trootPath: root,\n\t}\n}",
"func createLark() *Lark {\n\tvar talk = NewLark(\"Title Prefix\", \"5ff9b6ab-fbe3-490f-8980-71509263efe2\")\n\treturn talk\n}",
"func NewThestralApp(config Config) (app *Thestral, err error) {\n\tif len(config.Downstreams) == 0 {\n\t\terr = errors.New(\"no downstream server defined\")\n\t}\n\tif err == nil && len(config.Upstreams) == 0 {\n\t\terr = errors.New(\"no upstream server defined\")\n\t}\n\n\tapp = &Thestral{\n\t\tdownstreams: make(map[string]ProxyServer),\n\t\tupstreams: make(map[string]ProxyClient),\n\t}\n\n\t// create logger\n\tif err == nil {\n\t\tapp.log, err = CreateLogger(config.Logging)\n\t\tif err != nil {\n\t\t\terr = errors.WithMessage(err, \"failed to create logger\")\n\t\t}\n\t}\n\n\t// init db\n\tif err == nil && config.DB != nil {\n\t\terr = db.InitDB(*config.DB)\n\t}\n\n\t// create downstream servers\n\tif err == nil {\n\t\tdsLogger := app.log.Named(\"downstreams\")\n\t\tfor k, v := range config.Downstreams {\n\t\t\tapp.downstreams[k], err = CreateProxyServer(dsLogger.Named(k), v)\n\t\t\tif err != nil {\n\t\t\t\terr = errors.WithMessage(\n\t\t\t\t\terr, \"failed to create downstream server: \"+k)\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\n\t// create upstream clients\n\tif err == nil {\n\t\tfor k, v := range config.Upstreams {\n\t\t\tapp.upstreams[k], err = CreateProxyClient(v)\n\t\t\tif err != nil {\n\t\t\t\terr = errors.WithMessage(\n\t\t\t\t\terr, \"failed to create upstream client: \"+k)\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tapp.upstreamNames = append(app.upstreamNames, k)\n\t\t}\n\t}\n\n\t// create rule matcher\n\tif err == nil {\n\t\tapp.ruleMatcher, err = NewRuleMatcher(config.Rules)\n\t\tif err != nil {\n\t\t\terr = errors.WithMessage(err, \"failed to create rule matcher\")\n\t\t}\n\t}\n\tif err == nil {\n\t\tfor _, ruleUpstream := range app.ruleMatcher.AllUpstreams {\n\t\t\tif _, ok := app.upstreams[ruleUpstream]; !ok {\n\t\t\t\terr = errors.Errorf(\n\t\t\t\t\t\"undefined upstream '%s' used in the rule set\",\n\t\t\t\t\truleUpstream)\n\t\t\t}\n\t\t}\n\t}\n\n\t// parse other settings\n\tif err == nil {\n\t\tif config.Misc.ConnectTimeout != \"\" {\n\t\t\tapp.connectTimeout, err = time.ParseDuration(\n\t\t\t\tconfig.Misc.ConnectTimeout)\n\t\t\tif err != nil {\n\t\t\t\terr = errors.WithStack(err)\n\t\t\t}\n\t\t\tif err == nil && app.connectTimeout <= 0 {\n\t\t\t\terr = errors.New(\"'connect_timeout' should be greater than 0\")\n\t\t\t}\n\t\t} else {\n\t\t\tapp.connectTimeout = defaultConnectTimeout\n\t\t}\n\t}\n\tif err == nil && config.Misc.EnableMonitor {\n\t\tapp.monitor.Start(config.Misc.MonitorPath)\n\t}\n\n\treturn\n}",
"func NewApp(root string) *App {\n\n CheckEnv()\n\n // Use negroni for middleware\n ne := negroni.New()\n\n // Use gorilla/mux for routing\n ro := mux.NewRouter()\n\n // Use Render for template. Pass in path to templates folder\n // as well as asset helper functions.\n re := render.New(render.Options{\n Directory: filepath.Join(root, \"templates\"),\n Layout: \"layouts/layout\",\n Extensions: []string{\".html\"},\n Funcs: []template.FuncMap{\n\t\t\tAssetHelpers(root),\n\t\t},\n })\n qre := render.New(render.Options{\n Directory: filepath.Join(root, \"templates\"),\n Layout: \"layouts/message\",\n Extensions: []string{\".html\"},\n Funcs: []template.FuncMap{\n\t\t\tAssetHelpers(root),\n\t\t},\n })\n\n // Establish connection to DB as specificed in database.go\n db := NewDB()\n\n // Add middleware to the stack\n ne.Use(negroni.NewRecovery())\n ne.Use(negroni.NewLogger())\n ne.Use(NewAssetHeaders())\n ne.Use(negroni.NewStatic(http.Dir(\"public\")))\n ne.UseHandler(ro)\n\n train.Config.SASS.DebugInfo = true\n train.Config.SASS.LineNumbers = true\n train.Config.Verbose = true\n train.Config.BundleAssets = true\n //ZZZtrain.ConfigureHttpHandler(ro)\n\n // Return a new App struct with all these things.\n return &App{ne, ro, re, qre, db}\n}",
"func newApp(name string) (app *App, err error) {\n\tapp = &App{\n\t\tName: name,\n\t\tID: uuid.NewV5(namespace, \"org.homealone.\"+name).String(),\n\t\thandler: make(map[queue.Topic]message.Handler),\n\t\tdebug: *debug,\n\t\tfilterMessages: true,\n\t}\n\tapp.Log = log.NewLogger().With(log.Fields{\"app\": name, \"id\": app.ID})\n\treturn app, errors.Wrap(err, \"newApp failed\")\n}",
"func newPlane(mk, mdl string) *plane {\n\tp := &plane{}\n\tp.make = mk\n\tp.model = mdl\n\treturn p\n}",
"func New(mws ...Middleware) *Router {\n\tr := &Router{\n\t\tparent: nil,\n\t\thostrm: newHostMatcher(),\n\t\tmiddlewares: Middlewares{},\n\t\tnamedMiddlewares: make(map[string]Middlewares),\n\t\tpool: newCtxPool(),\n\t}\n\tr.Use(mws...)\n\tr.Configure(\n\t\tWithLogger(lionLogger),\n\t\tWithServer(&http.Server{\n\t\t\tReadTimeout: 5 * time.Second,\n\t\t\tWriteTimeout: 10 * time.Second,\n\t\t}),\n\t)\n\treturn r\n}",
"func NewSpace(t *testing.T, awaitilities wait.Awaitilities, opts ...SpaceOption) *toolchainv1alpha1.Space {\n\tnamePrefix := strings.ToLower(t.Name())\n\t// Remove all invalid characters\n\tnamePrefix = notAllowedChars.ReplaceAllString(namePrefix, \"\")\n\n\t// Trim if the length exceeds 40 chars (63 is the max)\n\tif len(namePrefix) > 40 {\n\t\tnamePrefix = namePrefix[0:40]\n\t}\n\n\tspace := &toolchainv1alpha1.Space{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tNamespace: awaitilities.Host().Namespace,\n\t\t\tGenerateName: namePrefix + \"-\",\n\t\t},\n\t}\n\tfor _, apply := range opts {\n\t\tapply(space)\n\t}\n\treturn space\n}",
"func MakeWorley(shaderpath string) Worley {\n\tcomputeshader, err := shader.MakeCompute(shaderpath + \"/noise/worley.comp\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t//create random seed\n\trandomdata := createRandom(1024 * 1024 * 4)\n\tnoisetexture, err := texture.MakeFromData(randomdata, 1024, 1024, gl.RGBA32F, gl.RGBA)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn Worley{\n\t\tcomputeshader: computeshader,\n\t\tnoisetexture: noisetexture,\n\n\t\twidth: 1024,\n\t\theight: 1024,\n\t\tresolution: 32,\n\t\toctaves: 1,\n\t\tradius: 40.0,\n\t\tradiusscale: 1,\n\n\t\tbrightness: 1.0,\n\t\tcontrast: 1.0,\n\t}\n}",
"func Create (appName string) {\n\n checkGopath ()\n checkContainer (appName)\n\n app := Application { Name: appName }\n\n app.createContainer ()\n\n err := app.copyFileTree (\n GOPATH + slash + applicationTemplatesPath,\n GOPATH_SRC + app.Name,\n )\n\n if err != nil {\n log.Fatal (err)\n }\n}",
"func New() *Yam {\n\ty := &Yam{}\n\ty.Config = NewConfig()\n\ty.Root = &Route{yam: y}\n\n\treturn y\n}",
"func New(width int, visualization string, zeitpunkte []time.Time) (slm sunlightmap) {\n\tslm = sunlightmap{}\n\tslm.Width = width - width%2\n\tslm.Height = slm.Width / 2\n\tslm.visualization = visualization\n\tslm.DaylightImageFilename = \"world_mine_day_solarized_720-360.png\"\n\tslm.NighttimeImageFilename = \"world_mine_night_solarized_720-360.png\"\n\tslm.zeitpunkte = zeitpunkte //[]time.Time{time.Date(2017, 10, 24, 17, 30, 0, 0, time.UTC)}\n\treturn\n}",
"func NewBoard() Board {\n\tspaces := make(map[string]map[string]bool)\n\tspaces[\"white\"] = make(map[string]bool)\n\tspaces[\"black\"] = make(map[string]bool)\n\treturn Board{\n\t\tSpaces: spaces,\n\t}\n}",
"func NewApp(host string, port int) (*App, error) {\n\ta := &App{\n\t\tHost: host,\n\t\tPort: port,\n\t}\n\t// setup DB\n\tdb, err := newDB(\"hades.db\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ta.DB = db\n\t// setup Sessions\n\ts, err := newSessions(a)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ta.Sessions = s\n\t// setup Hades\n\th, err := hades.NewHades(db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ta.Hades = h\n\t// setup Listener\n\tln, err := newListener(a)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ta.Listener = ln\n\t// setup Templates\n\tt, err := newTemplates(\"../../templates\")\n\ta.Templates = t\n\t// setup Router\n\tr := mux.NewRouter().StrictSlash(true)\n\t// static file handler\n\tsbox := packr.NewBox(\"../../static\")\n\tfsHandler := http.StripPrefix(\"/static/\", http.FileServer(sbox))\n\tr.PathPrefix(\"/static/\").Handler(fsHandler).Methods(\"GET\")\n\t// application routes\n\tr.HandleFunc(\"/\", a.getIndexHandler).Methods(\"GET\")\n\tr.HandleFunc(\"/error\", a.getErrorHandler).Methods(\"GET\")\n\tr.HandleFunc(\"/login\", a.getLoginHandler).Methods(\"GET\")\n\tr.HandleFunc(\"/login\", a.postLoginHandler).Methods(\"POST\")\n\tr.HandleFunc(\"/logout\", a.getLogoutHandler).Methods(\"POST\")\n\tr.HandleFunc(\"/add\", a.getAddHandler).Methods(\"GET\")\n\tr.HandleFunc(\"/add\", a.postAddHandler).Methods(\"POST\")\n\tr.HandleFunc(\"/{id}/action\", a.postActionHandler).Methods(\"POST\")\n\ta.Router = r\n\treturn a, nil\n}",
"func NewProgram(lessons []*LessonPgm) *Program {\n\treturn &Program{base.WildCardLabel, lessons}\n}",
"func newBoard(x, y, w, h int) *Board {\n\treturn &Board{\n\t\tDimension: Dimension{\n\t\t\tx,\n\t\t\ty,\n\t\t\tw,\n\t\t\th,\n\t\t},\n\t}\n}",
"func createBaseAIR() *diam.Message {\n\tair := diameter.NewProxiableRequest(diam.AuthenticationInformation, diam.TGPP_S6A_APP_ID, dict.Default)\n\tair.NewAVP(avp.OriginHost, avp.Mbit, 0, datatype.DiameterIdentity(\"magma.com\"))\n\tair.NewAVP(avp.OriginRealm, avp.Mbit, 0, datatype.DiameterIdentity(\"magma.com\"))\n\tair.NewAVP(avp.AuthSessionState, avp.Mbit, 0, datatype.Enumerated(1))\n\treturn air\n}",
"func (t tApp) New(w http.ResponseWriter, r *http.Request, ctr, act string) *contr.App {\n\tc := &contr.App{}\n\tc.Controllers = Controllers.New(w, r, ctr, act)\n\treturn c\n}",
"func NewWorkspace(volume, containerName, imageName string) {\n CreateReadOnlyLayer(imageName)\n CreateWriteLayer(containerName)\n CreateMountPoint(containerName, imageName)\n if volume == \"\" {\n return\n }\n volumeURLs := strings.Split(volume, \":\")\n if len(volumeURLs) != 2 || volumeURLs[0] == \"\" || volumeURLs[1] == \"\" {\n log.Warn(\"Volume argument input is not correct.\")\n return\n }\n MountVolume(containerName, volumeURLs)\n log.Infof(\"Mount volume %q\", volumeURLs)\n}",
"func createCat() *Cat {\n\treturn NewCat(\"Mike\")\n}",
"func New(staff repository.IStaffRepo, security security.ISecurity) *router {\n\treturn &router{\n\t\tstaff: staff,\n\t\tsecurity: security,\n\t}\n}",
"func CreateBoard(m, n int) Board {\n\tboard := make([][]rune, m)\n\tfor i := range board {\n\t\tboard[i] = make([]rune, n)\n\t}\n\tinitializeBoard(board, m, n)\n\tvar used []primitives.Attacks\n\treturn Board{m, n, board, used, make(map[string]bool)}\n}",
"func newAtlasKarta(w, h, c, r int) *AtlasKarta {\n\treturn &AtlasKarta{\n\t\t&karta.Karta {\n\t\t\tWidth: w,\n\t\t\tHeight: h,\n\t\t\tUnit: float64(math.Min(float64(w), float64(h)) / 20),\n\t\t\tCells: karta.Cells{},\n\t\t\tDiagram: newKartaDiagram(float64(w), float64(h), c, r),\n\t\t\tNoise: noise.New(rand.Int63n(int64(w * h))),\n\t\t},\n\t}\n}",
"func NewRelwarc() *Relwarc {\n\tctx, cancel := chromedp.NewExecAllocator(context.Background(), defaultExecAllocatorOptions...)\n\treturn &Relwarc{\n\t\tctx: ctx,\n\t\tcancel: cancel,\n\t}\n}",
"func New(db database.Database, st storage.Storage, s *discordgo.Session,\n\tcmd shireikan.Handler, lct *lctimer.LifeCycleTimer, config *config.Config,\n\tpmw *middleware.PermissionsMiddleware, ota *onetimeauth.OneTimeAuth) (ws *WebServer, err error) {\n\n\tws = new(WebServer)\n\n\tif !strings.HasPrefix(config.WebServer.PublicAddr, \"http\") {\n\t\tprotocol := \"http\"\n\t\tif config.WebServer.TLS != nil && config.WebServer.TLS.Enabled {\n\t\t\tprotocol += \"s\"\n\t\t}\n\t\tconfig.WebServer.PublicAddr = fmt.Sprintf(\"%s://%s\", protocol, config.WebServer.PublicAddr)\n\t}\n\n\tif config.WebServer.APITokenKey == \"\" {\n\t\tconfig.WebServer.APITokenKey, err = random.GetRandBase64Str(32)\n\t} else if len(config.WebServer.APITokenKey) < 32 {\n\t\terr = errors.New(\"APITokenKey must have at leats a length of 32 characters\")\n\t}\n\tif err != nil {\n\t\treturn\n\t}\n\n\tws.config = config\n\tws.db = db\n\tws.st = st\n\tws.session = s\n\tws.cmdhandler = cmd\n\tws.pmw = pmw\n\tws.ota = ota\n\tws.rlm = NewRateLimitManager()\n\tws.af = NewAntiForgery()\n\tws.router = routing.New()\n\tws.server = &fasthttp.Server{\n\t\tHandler: ws.router.HandleRequest,\n\t}\n\n\tws.auth, err = NewAuth(db, s, lct, []byte(config.WebServer.APITokenKey))\n\tif err != nil {\n\t\treturn\n\t}\n\n\tws.dcoauth = discordoauth.NewDiscordOAuth(\n\t\tconfig.Discord.ClientID,\n\t\tconfig.Discord.ClientSecret,\n\t\tconfig.WebServer.PublicAddr+endpointAuthCB,\n\t\tws.auth.LoginFailedHandler,\n\t\tws.auth.LoginSuccessHandler,\n\t)\n\n\tws.registerHandlers()\n\n\treturn\n}",
"func newRouter() *Router {\n\treturn &Router{routes: make([]*Route, 0)}\n}",
"func NewWorkspace(name string, environment map[string]string, columns map[string]map[string][]string, inheritEnv bool) *Workspace {\n\tif environment == nil {\n\t\tenvironment = make(map[string]string)\n\t}\n\tws := &Workspace{\n\t\tName: name,\n\t\tEnvironment: environment,\n\t\tTasks: make(map[string]*Task),\n\t\tFunctions: make(map[string]*Function),\n\t\tColumns: columns,\n\t\tInheritEnvironment: inheritEnv,\n\t}\n\tif _, ok := ws.Environment[\"WORKSPACE\"]; !ok {\n\t\tws.Environment[\"WORKSPACE\"] = name\n\t}\n\treturn ws\n}",
"func NewDashing(root string, port string, token string) *Dashing {\n\tbroker := NewBroker()\n\tworker := NewWorker(broker)\n\tserver := NewServer(broker)\n\n\tserver.webroot = root\n\tworker.webroot = root\n\tworker.url = \"http://127.0.0.1:\" + port\n\tworker.token = token\n\n\tif os.Getenv(\"DEV\") != \"\" {\n\t\tserver.dev = true\n\t}\n\n\tserver.dev = true\n\treturn &Dashing{\n\t\tstarted: false,\n\t\tBroker: broker,\n\t\tWorker: worker,\n\t\tServer: server,\n\t}\n}",
"func newDeployment(apployment *appscodev1alpha1.Apployment) *appsv1.Deployment {\n\tlabels := map[string]string{\n\t\t\"app\": \"Appscode\",\n\t\t\"controller\": apployment.Name,\n\t}\n\treturn &appsv1.Deployment{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: apployment.Spec.ApploymentName,\n\t\t\tNamespace: apployment.Namespace,\n\t\t\tOwnerReferences: []metav1.OwnerReference{\n\t\t\t\t*metav1.NewControllerRef(apployment, appscodev1alpha1.SchemeGroupVersion.WithKind(\"Apployment\")),\n\t\t\t},\n\t\t},\n\t\tSpec: appsv1.DeploymentSpec{\n\t\t\tReplicas: apployment.Spec.Replicas,\n\t\t\tSelector: &metav1.LabelSelector{\n\t\t\t\tMatchLabels: labels,\n\t\t\t},\n\t\t\tTemplate: corev1.PodTemplateSpec{\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tLabels: labels,\n\t\t\t\t},\n\t\t\t\tSpec: corev1.PodSpec{\n\t\t\t\t\tContainers: []corev1.Container{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: apployment.Name,\n\t\t\t\t\t\t\tImage: apployment.Spec.Image,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}",
"func NewSpace() *Space {\n\tsp := &Space{}\n\treturn sp\n}",
"func Create(cfg map[string]interface{}) {\r\n\tgo func(c map[string]interface{}) {\r\n\t\tserver.CreateSingle(\"ws\", c, serveWs)\r\n\t}(cfg)\r\n\tgo func(c map[string]interface{}) {\r\n\t\tserver.CreateSingle(\"wss\", c, serveWs)\r\n\t}(cfg)\r\n}",
"func NewAirport(code string) (*Airport, error) {\n\tcode = strings.TrimSpace(code)\n\tc := &Airport{\n\t\tCode: code,\n\t}\n\n\terr := c.Validate()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn c, nil\n}",
"func New(info Info) *WebServer {\n\trouter := bone.New()\n\t// Add more to this later on\n\treturn &WebServer{info.Listen + \":\" + info.Port, router}\n}",
"func newRouter() *router {\n\treturn &router{\n\t\troots: make(map[string]*node),\n\t}\n}",
"func (FinagleFmt) Create(host string, port int) ZKRecord {\n\treturn &FinagleRecord{\n\t\tServiceEndpoint: endpoint{host, port},\n\t\tAdditionalEndpoints: make(map[string]endpoint),\n\t\tShard: 0,\n\t\tStatus: statusAlive,\n\t}\n}",
"func NewPlane(name string, client sleepwalker.RESTClient) Plane {\n\tdesc := \"airstrike.NewPlane\"\n\tlog.WithFields(map[string]interface{}{\n\t\t\"name\": name,\n\t\t\"client\": client,\n\t}).Debug(desc)\n\treturn Plane{Name: name, Client: client}\n}",
"func MakeTopology() Topology {\n\treturn Topology{\n\t\tNodes: map[string]Node{},\n\t}\n}",
"func New(prj *project.Project) *Application {\n\tcli := &Application{\n\t\tLog: log.NewStdout(log.NOTICE),\n\t\tProject: prj,\n\t\tcommands: make(map[string]Command),\n\t\tflags: make(map[int]flags.Interface),\n\t\tflagAliases: make(map[string]int),\n\t\tosArgs: os.Args[1:],\n\t}\n\t// set initial startup time\n\tcli.started = time.Now()\n\tcli.Log.TsDisabled()\n\tif prj.Config.InitTerm {\n\t\tcli.Log.InitTerm()\n\t}\n\tcli.Log.SetPrimaryColor(prj.Config.Color)\n\tcli.Log.SetLogLevel(prj.Config.LogLevel)\n\tcli.addInternalFlags()\n\tif prj.Config.Color != \"\" {\n\t\tcli.Log.Colors()\n\t}\n\n\t// Set log level to debug and lock the log level, but only if --debug\n\t// flag was found before any command. If --debug flag was found later\n\t// then we want to set debugging later for that command only.\n\tif cli.flag(\"debug\").IsGlobal() && cli.flag(\"debug\").Present() {\n\t\tcli.Log.SetLogLevel(log.DEBUG)\n\t\tcli.Log.LockLevel()\n\t\tcli.flag(\"verbose\").Unset()\n\t}\n\n\t// Only lock log level to verbose if no --debug flag was set\n\tif !cli.flag(\"debug\").Present() && cli.flag(\"verbose\").Present() {\n\t\tcli.Log.SetLogLevel(log.INFO)\n\t\tcli.Log.LockLevel()\n\t}\n\n\tcli.Log.Debugf(\"CLI:Create - accepting configuration changes debugging(%t)\",\n\t\tcli.flag(\"debug\").Present())\n\n\t// Add internal commands besides help\n\tcli.AddCommand(cmdAbout())\n\tcli.rootCmd = NewCommand(prj.Name)\n\tcli.Header.Defaults()\n\tcli.Footer.Defaults()\n\treturn cli\n}",
"func New() *Wirer {\n\treturn &Wirer{}\n}",
"func New(svc pb.OrganizationSvcServer) (ws *warden.Server, err error) {\n\t//var (\n\t//\tcfg warden.ServerConfig\n\t//\tct paladin.TOML\n\t//)\n\t//if err = paladin.Get(\"grpc.toml\").Unmarshal(&ct); err != nil {\n\t//\treturn\n\t//}\n\t//if err = ct.Get(\"Server\").UnmarshalTOML(&cfg); err != nil {\n\t//\treturn\n\t//}\n\tws = warden.NewServer(nil)\n\tpb.RegisterOrganizationSvcServer(ws.Server(), svc)\n\tws, err = ws.Start()\n\treturn\n}",
"func createTeam(w http.ResponseWriter, r *http.Request) {\n\tteam := models.NewTeam(\"\")\n\tskue.Create(view, team, w, r)\n}",
"func New(config Config) (spec.Forwarder, error) {\n\tnewForwarder := &forwarder{\n\t\tConfig: config,\n\n\t\tID: id.MustNew(),\n\t\tType: ObjectType,\n\t}\n\n\t// Dependencies.\n\tif newForwarder.FactoryCollection == nil {\n\t\treturn nil, maskAnyf(invalidConfigError, \"factory collection must not be empty\")\n\t}\n\tif newForwarder.Log == nil {\n\t\treturn nil, maskAnyf(invalidConfigError, \"logger must not be empty\")\n\t}\n\tif newForwarder.StorageCollection == nil {\n\t\treturn nil, maskAnyf(invalidConfigError, \"storage collection must not be empty\")\n\t}\n\n\t// Settings.\n\tif newForwarder.MaxSignals == 0 {\n\t\treturn nil, maskAnyf(invalidConfigError, \"maximum signals must not be empty\")\n\t}\n\n\tnewForwarder.Log.Register(newForwarder.GetType())\n\n\treturn newForwarder, nil\n}",
"func (routeObj *Routes)NewRouter() *mux.Router {\n log := logger.GetLoggerInstance()\n router := mux.NewRouter().StrictSlash(true)\n routeObj.CreateAllRoutes()\n for _, route := range routeObj.entries {\n var handler http.Handler\n handler = route.HandlerFunc\n router.\n Methods(route.Method).\n Path(route.Pattern).\n Name(route.Name).\n Handler(handler)\n log.Trace(\"Created route for %s\", route.Name)\n }\n routeObj.controller = new(controller)\n return router\n}",
"func newPixelWand(cpw *C.PixelWand) *PixelWand {\n\tpw := &PixelWand{pw: cpw}\n\truntime.SetFinalizer(pw, Destroy)\n\tpw.IncreaseCount()\n\n\treturn pw\n}",
"func PtrNewSaiyan(name string, power int) *Saiyan {\n\treturn &Saiyan{\n\t\tName: name,\n\t\tPower: power,\n\t}\n}",
"func newDeployment() *appsv1.Deployment {\n\tvar replicas int32 = 1\n\treturn &appsv1.Deployment{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tNamespace: tNs,\n\t\t\tName: tName,\n\t\t\tLabels: map[string]string{\n\t\t\t\tapplicationNameLabelKey: tName,\n\t\t\t},\n\t\t\tOwnerReferences: []metav1.OwnerReference{tOwnerRef},\n\t\t},\n\t\tSpec: appsv1.DeploymentSpec{\n\t\t\tReplicas: &replicas,\n\t\t\tSelector: &metav1.LabelSelector{MatchLabels: map[string]string{applicationNameLabelKey: tName}},\n\t\t\tTemplate: corev1.PodTemplateSpec{\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tLabels: map[string]string{\n\t\t\t\t\t\tdashboardLabelKey: dashboardLabelValue,\n\t\t\t\t\t\teventSourceLabelKey: eventSourceLabelValue,\n\t\t\t\t\t\tapplicationNameLabelKey: tName,\n\t\t\t\t\t\tapplicationLabelKey: tName,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tSpec: corev1.PodSpec{\n\t\t\t\t\tContainers: []corev1.Container{{\n\t\t\t\t\t\tImage: tImg,\n\t\t\t\t\t\tPorts: []corev1.ContainerPort{{\n\t\t\t\t\t\t\tName: portName,\n\t\t\t\t\t\t\tContainerPort: tPort,\n\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tName: metricsPortName,\n\t\t\t\t\t\t\t\tContainerPort: metricsPort,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tName: adapterContainerName,\n\t\t\t\t\t\tEnv: tEnvVars,\n\t\t\t\t\t\tReadinessProbe: &corev1.Probe{\n\t\t\t\t\t\t\tHandler: corev1.Handler{\n\t\t\t\t\t\t\t\tHTTPGet: &corev1.HTTPGetAction{\n\t\t\t\t\t\t\t\t\tPath: adapterHealthEndpoint,\n\t\t\t\t\t\t\t\t\tPort: intstr.FromInt(adapterPort),\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}",
"func createBodies() {\n\tspace = chipmunk.NewSpace()\n\tspace.Gravity = vect.Vect{0, -900}\n\n\tstaticBody := chipmunk.NewBodyStatic()\n\tstaticLines = []*chipmunk.Shape{\n\t\tchipmunk.NewSegment(vect.Vect{111.0, 280.0}, vect.Vect{407.0, 246.0}, 0),\n\t\tchipmunk.NewSegment(vect.Vect{407.0, 246.0}, vect.Vect{407.0, 343.0}, 0),\n\t}\n\tfor _, segment := range staticLines {\n\t\tsegment.SetElasticity(0.6)\n\t\tstaticBody.AddShape(segment)\n\t}\n\tspace.AddBody(staticBody)\n}",
"func (t tApp) newC(w http.ResponseWriter, r *http.Request, ctr, act string) *contr.App {\n\t// Allocate a new controller. Set values of special fields, if necessary.\n\tc := &contr.App{}\n\n\t// Allocate its parents. Make sure controller of every type\n\t// is allocated just once, then reused.\n\tc.Controllers = &contr.Controllers{}\n\tc.Controllers.Templates = c.Controllers.Errors.Templates\n\tc.Controllers.Errors = &c5.Errors{}\n\tc.Controllers.Static = &c3.Static{}\n\tc.Controllers.Sessions = &c2.Sessions{\n\n\t\tRequest: r,\n\n\t\tResponse: w,\n\t}\n\tc.Controllers.Requests = &c1.Requests{\n\n\t\tRequest: r,\n\n\t\tResponse: w,\n\t}\n\tc.Controllers.Global = &c0.Global{\n\n\t\tCurrentAction: act,\n\n\t\tCurrentController: ctr,\n\t}\n\tc.Controllers.Errors.Templates = &c4.Templates{}\n\tc.Controllers.Errors.Templates.Requests = c.Controllers.Requests\n\tc.Controllers.Errors.Templates.Global = c.Controllers.Global\n\tc.Controllers.Templates.Requests = c.Controllers.Requests\n\tc.Controllers.Templates.Global = c.Controllers.Global\n\n\treturn c\n}",
"func newApp() *iris.Application {\n\tapp := iris.New()\n\t// Optionally, add two built'n handlers\n\t// that can recover from any http-relative panics\n\t// and log the requests to the terminal.\n\tapp.Use(recover.New())\n\tapp.Use(logger.New())\n\n\t// Serve a controller based on the root Router, \"/\".\n\tmvc.New(app).Handle(new(comment.CommentsController))\n\tmvc.New(app).Handle(new(comment.OneCommentController))\n\tmvc.New(app).Handle(new(plant.PlantsCtrl))\n\tmvc.New(app).Handle(new(plant.OnePlantCtrl))\n\treturn app\n}",
"func NewTorontoTrip(weight float32, deadline int) *Trip {\n trip := Trip{weight: weight, destination: \"Toronto\", deadline: deadline}\n return &trip\n}",
"func create(s string) (p program) {\n\tre := regexp.MustCompile(`\\w+`)\n\tt := re.FindAllStringSubmatch(s, -1)\n\tp.name = t[0][0]\n\tp.weight, _ = strconv.Atoi(string(t[1][0]))\n\tfor _, r := range t[2:] {\n\t\tp.children = append(p.children, program{r[0], 0, nil})\n\t}\n\treturn\n}",
"func New(host, port string, h http.Handler) *WebServer {\n\tvar ws WebServer\n\n\tws.Addr = net.JoinHostPort(host, port)\n\tws.Handler = h\n\n\treturn &ws\n}",
"func New(host, port string) *Yeelight {\n\ty := &Yeelight{host: host, port: port}\n\treturn y\n}",
"func newRouter() *router {\n\treturn &router{\n\t\troots: make(map[string]*node),\n\t\thandlers: make(map[string]HandlerFunc),\n\t}\n}",
"func NewForth() *Forth {\n\tf := new(Forth)\n\tf.ds = *stack.NewStack()\n\tf.rs = *stack.NewStack()\n\n\tf.dict = make(map[string]word)\n\tf.addBuiltins()\n\treturn f\n}",
"func newBathroom(space uint64) *bathroomSpaces {\n\tb := &bathroomSpaces{\n\t\tavailable: &maxHeap{space},\n\t\tmultiplicity: map[uint64]uint64{\n\t\t\tspace: 1,\n\t\t},\n\t}\n\theap.Init(b.available)\n\n\treturn b\n}",
"func createRouter() *mux.Router {\n\tlog.WithFields(log.Fields{\n\t\t\"environment\": environment,\n\t\t\"address\": addr,\n\t\t\"TLS\": (tlsCertFile != \"\" && tlsKeyFile != \"\"),\n\t}).Info(\"Create server\")\n\trenderer := render.NewRenderer()\n\trouter := mux.NewRouter()\n\trouter.HandleFunc(\"/render\", renderHandler(renderer)).Methods(http.MethodGet)\n\trouter.HandleFunc(\"/statistics\", statisticsHandler(renderer)).Methods(http.MethodGet)\n\trouter.Use(loggingMiddleware)\n\treturn router\n}",
"func CreateForwarder(entry config.AmazonEntry, snsClient ...snsiface.SNSAPI) forwarder.Client {\n\tvar client snsiface.SNSAPI\n\tif len(snsClient) > 0 {\n\t\tclient = snsClient[0]\n\t} else {\n\t\tclient = sns.New(session.Must(session.NewSession()))\n\t}\n\tforwarder := Forwarder{entry.Name, client, entry.Target}\n\tlog.WithField(\"forwarderName\", forwarder.Name()).Info(\"Created forwarder\")\n\treturn forwarder\n}",
"func newTapestry(tap *tapestry.Node, zkAddr string) (*Tapestry, error) {\n\t//TODO: Setup a zookeeper connection and return a Tapestry struct\n\tconn, err := connectZk(zkAddr)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\texists, _, err := conn.Exists(\"/tapestry\")\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error: zookeeper fail to find target, reason is %v\", err)\n\t}\n\tif !exists {\n\t\t_, err = conn.Create(\"/tapestry\", nil, 0, zk.WorldACL(zk.PermAll))\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\t// Tapestry register them in ZooKeeper\n\t// we will simply use file paths as unique IDs for files and directories.\n\terr = createEphSeq(conn, filepath.Join(\"/tapestry\", tap.Addr()), []byte(tap.Addr()))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Tapestry{\n\t\ttap: tap,\n\t\tzk: conn,\n\t}, nil\n}",
"func New(descr string) App {\n\treturn &app{descr: descr}\n}",
"func NewSpace() *cp.Space {\n\tspace := cp.NewSpace()\n\t// rules:\n\tspace.SetDamping(constants.Damping)\n\tspace.SetGravity(cp.Vector{0, 0}) // no gravity\n\treturn space\n}",
"func newRouter() *Router {\n\tr := new(Router)\n\tr.routeMap = make(map[string]*Route)\n\tfor m := range METHODS {\n\t\tr.routeMap[m] = newRoute(\"/\", nil, nil)\n\t}\n\tr.routeNamedMap = make(map[string]string)\n\tr.group = newGroup()\n\treturn r\n}",
"func NewHOTP() *HOTP { return &HOTP{} }",
"func (s WashingtonPostScraper) CreateNewWashingtonPostScraper() *WashingtonPostScraper {\n\tc := colly.NewCollector()\n\t// c := colly.NewCollector(colly.Debugger(&debug.LogDebugger{}))\n\tc.UserAgent = s.UserAgent()\n\tc.IgnoreRobotsTxt = false\n\n\t// Adding this wait so AJAX can load, might need to look at https://github.com/chromedp/chromedp in the future\n\tc.Limit(&colly.LimitRule{\n\t\tDelay: 5 * time.Second,\n\t})\n\n\tscraper := WashingtonPostScraper{\n\t\tcollector: c,\n\t}\n\treturn &scraper\n}",
"func New(view *rom.View) *Server {\n\trouter := vestigo.NewRouter()\n\n\ts := &Server{\n\t\trom: view,\n\t\thttpServer: &http.Server{\n\t\t\tAddr: \"127.0.0.1:8064\",\n\t\t\tReadTimeout: 5 * time.Second,\n\t\t\tWriteTimeout: 10 * time.Second,\n\t\t\tIdleTimeout: 15 * time.Second,\n\t\t\tHandler: router,\n\t\t},\n\t\tstatic: packr.NewBox(\"../front/dist\"),\n\t\trouter: router,\n\t}\n\n\ts.setupRoutes()\n\n\treturn s\n}",
"func newClassy(view Viewer) Classy {\n\treturn classy{\n\t\tchain: alice.New(),\n\t\tpath: \"/\",\n\t\tstructname: getStructName(view),\n\t\tview: view,\n\t}.Name(getViewName(view, true))\n}",
"func CreateAdam(targetPhrase []rune) (*Offspring, error) {\n\tphrase := RandomRuneSlice( len(targetPhrase) )\n\tfitness, err := GetFitness(targetPhrase, phrase)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t\n\treturn &Offspring {\n\t\tParent: nil,\n\t\tPhrase: phrase,\n\t\tTargetPhrase: targetPhrase,\n\t\tGeneration: 0,\n\t\tFitness: fitness,\n\t}, nil\n}",
"func newHomeMVC() *MenuMVC {\n\t// create MenuMVC\n\tm := &MenuMVC{\n\t\tTitle: \"Sia Alpha v3\",\n\t\tMenuWidth: HomeMenuWidth,\n\t\tItems: []string{\n\t\t\t\"Wallets\",\n\t\t\t\"Participants\",\n\t\t\t\"Settings\",\n\t\t},\n\t}\n\n\t// add subviews\n\tm.Windows = []MVC{\n\t\tnewWalletMenuMVC(m),\n\t\tnewParticipantMenuMVC(m),\n\t\tnewSettingsMVC(m),\n\t}\n\n\treturn m\n}",
"func NewTimeConstraint()(*TimeConstraint) {\n m := &TimeConstraint{\n }\n m.backingStore = ie8677ce2c7e1b4c22e9c3827ecd078d41185424dd9eeb92b7d971ed2d49a392e.BackingStoreFactoryInstance();\n m.SetAdditionalData(make(map[string]any))\n return m\n}",
"func NewWorkspace(identifier string) Workspace {\n\treturn Workspace{\n\t\tIdentifier: identifier,\n\t\tCurrentVersion: \"4.0\",\n\t\tCreateVersion: \"4.0\",\n\t}\n}",
"func newWriter(w io.Writer) (*orc.Writer, error) {\n\tschema, err := orc.ParseSchema(entrySchema)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn orc.NewWriter(w, orc.SetSchema(schema))\n}",
"func NewDaddy() *Daddy {\n\treturn &Daddy{\n\t\tSon: make(map[string]*Assassin),\n\t\tSibling: make(map[string]*Sibling),\n\t}\n}",
"func (pc *programCode) createBss() {\n\tbssString := \"\\nsection .bss\\n\"\n\tfor v := range pc.intMap {\n\t\tbssString += \"\\t\" + v + \": resb 8\" + \"\\n\"\n\t}\n\tpc.code += bssString // appends this Assembly code to the end after creating all the functions\n}",
"func makeApp(def pchannel.App, data pchannel.Data) perun.App {\n\treturn perun.App{\n\t\tDef: def,\n\t\tData: data,\n\t}\n}",
"func NewProgram(cfg *client.Config, parentName string) *tea.Program {\n\tm := NewModel(cfg)\n\tm.standalone = true\n\tm.parentName = parentName\n\treturn tea.NewProgram(m)\n}",
"func (d *Director) Construct() {\n\td.builder.makeTitle(\"Greeting\")\n\td.builder.makeString(\"From the morning to the afternoon\")\n\td.builder.makeItems([]string{\"Good morning\", \"Hello\"})\n\td.builder.makeString(\"In the evening\")\n\td.builder.makeItems([]string{\"Good evening\", \"Good night\", \"Good bye\"})\n\td.builder.close()\n}",
"func makeShip(x, y, wx, wy int) ship {\n\t// The waypoint starts 10 units east and 1 unit north relative to the ship.\n\treturn ship{\n\t\tpos: []int{x, y},\n\t\twaypoint: []int{wx, wy},\n\t}\n}",
"func NewCabRouter(\n\tcabController controller.Cab,\n\n) Cab {\n\treturn Cab{\n\n\t\tcabController: cabController,\n\t}\n\n}",
"func (c *SpaceClient) Create(ctx context.Context, r *resource.SpaceCreate) (*resource.Space, error) {\n\tvar space resource.Space\n\t_, err := c.client.post(ctx, \"/v3/spaces\", r, &space)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &space, nil\n}",
"func newSwimmer(x, y int, right bool, moveTimer int) *swimmer {\n\tvar direction int\n\tif right {\n\t\tdirection = 1\n\t} else {\n\t\tdirection = -1\n\t}\n\n\treturn &swimmer{\n\t\txPos: x,\n\t\tyPos: y,\n\t\tmoveDirection: direction,\n\t\tmoveTimer: moveTimer,\n\t}\n}",
"func (f food) create(g grid) {\n}",
"func NewShip(size uint8) Ship {\n\treturn Ship{\n\t\tsize: size,\n\t\thealth: size,\n\t}\n}",
"func newDeployment(name, ns string, replicas int32) *apps.Deployment {\n\treturn &apps.Deployment{\n\t\tTypeMeta: metav1.TypeMeta{\n\t\t\tKind: \"Deployment\",\n\t\t\tAPIVersion: \"apps/v1\",\n\t\t},\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tNamespace: ns,\n\t\t\tName: name,\n\t\t},\n\t\tSpec: apps.DeploymentSpec{\n\t\t\tReplicas: &replicas,\n\t\t\tSelector: &metav1.LabelSelector{MatchLabels: testLabels()},\n\t\t\tStrategy: apps.DeploymentStrategy{\n\t\t\t\tType: apps.RollingUpdateDeploymentStrategyType,\n\t\t\t\tRollingUpdate: new(apps.RollingUpdateDeployment),\n\t\t\t},\n\t\t\tTemplate: v1.PodTemplateSpec{\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tLabels: testLabels(),\n\t\t\t\t},\n\t\t\t\tSpec: v1.PodSpec{\n\t\t\t\t\tContainers: []v1.Container{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: fakeContainerName,\n\t\t\t\t\t\t\tImage: fakeImage,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}",
"func New(w http.ResponseWriter, r *http.Request) {\r\n\ttmpl.ExecuteTemplate(w, \"New\", nil)\r\n}",
"func New(w http.ResponseWriter, r *http.Request) {\r\n\ttmpl.ExecuteTemplate(w, \"New\", nil)\r\n}",
"func New(app *grun.App, newW func() gtk.Widgetter) *Maker {\n\tbaseID := app.ID\n\tif baseID == \"\" {\n\t\tbaseID = \"com.github.gotk4.gtkest.default\"\n\t}\n\treturn &Maker{app: app, newW: newW, baseID: baseID}\n}",
"func NewWarmer(cf registry.ClientFactory, cacheClient Client, burst int) (*Warmer, error) {\n\tif cf == nil || cacheClient == nil || burst <= 0 {\n\t\treturn nil, errors.New(\"arguments must be non-nil (or > 0 in the case of burst)\")\n\t}\n\treturn &Warmer{\n\t\tclientFactory: cf,\n\t\tcache: cacheClient,\n\t\tburst: burst,\n\t}, nil\n}",
"func newBrains() *brains {\n\treturn &brains{\n\t\tk: make(map[string]*brain),\n\t\tn: make(map[string]*brain),\n\t}\n}",
"func Create() *Router {\n\tconfig := cors.DefaultConfig()\n\tconfig.AllowOrigins = []string{\"http://localhost:3000\"}\n\tr := Router{Router: gin.New()}\n\tr.Router.Use(gin.Recovery())\n\tr.Router.Use(cors.New(config))\n\t// SetUserGroup set user group in the router\n\tr.UserGroup = r.Router.Group(\"/users\")\n\tr.RadioGroup = r.Router.Group(\"/radios\")\n\tr.ActionGroup = r.Router.Group(\"/recommendations\")\n\treturn &r\n}",
"func New(address, port string, logger *log.Logger, secure bool) *Web {\n\tw := Web{\n\t\tAddress: address,\n\t\tPort: port,\n\t\tSecure: secure,\n\t}\n\tw.Init(logger)\n\treturn &w\n}",
"func New() *Beeper { return &Beeper{} }",
"func NewProgramControl()(*ProgramControl) {\n m := &ProgramControl{\n Entity: *NewEntity(),\n }\n return m\n}",
"func Create(pools *pools.Pool, port int) *http.Server {\n\tlgger := logger.Init(\"Engine Leader Web\", true, false, ioutil.Discard)\n\tvar host string\n\tif os.Getenv(\"ENV\") == \"dev\" {\n\t\thost = \"localhost\"\n\t} else {\n\t\thost = \"\"\n\t}\n\treturn &http.Server{\n\t\tAddr: fmt.Sprintf(\"%s:%d\", host, port),\n\t\tHandler: NewRouter(pools, lgger),\n\t\tReadTimeout: time.Duration(10000) * time.Millisecond, // TODO remove hardcoding\n\t\tWriteTimeout: time.Duration(10000) * time.Millisecond, // TODO remove hardcoding\n\t}\n}"
] | [
"0.56587833",
"0.55127555",
"0.5402266",
"0.5390911",
"0.5193612",
"0.5152806",
"0.5053274",
"0.5042677",
"0.5010594",
"0.49777117",
"0.4954968",
"0.49475503",
"0.49337056",
"0.49120912",
"0.49102825",
"0.490297",
"0.4892262",
"0.48919317",
"0.48588496",
"0.48468164",
"0.4788776",
"0.47820163",
"0.4779229",
"0.47726655",
"0.47707537",
"0.47670943",
"0.47581547",
"0.47301757",
"0.47272864",
"0.46981868",
"0.4678992",
"0.46712968",
"0.46711916",
"0.4667632",
"0.46616688",
"0.46378997",
"0.46341163",
"0.46314025",
"0.46200302",
"0.46180713",
"0.46099114",
"0.4608946",
"0.46083793",
"0.45921302",
"0.45875707",
"0.45868847",
"0.45787492",
"0.45661885",
"0.4561071",
"0.45571467",
"0.4540855",
"0.45332196",
"0.45294583",
"0.45272195",
"0.45270562",
"0.45218727",
"0.45179453",
"0.4514485",
"0.45090166",
"0.45010528",
"0.4488746",
"0.44863027",
"0.4484949",
"0.4473462",
"0.44688037",
"0.44686738",
"0.44675103",
"0.4460475",
"0.445804",
"0.44527698",
"0.4451695",
"0.44483808",
"0.44483075",
"0.44481668",
"0.44459596",
"0.44427538",
"0.4441739",
"0.44349387",
"0.44340172",
"0.4432979",
"0.4429038",
"0.4428397",
"0.4425246",
"0.44217393",
"0.4413825",
"0.44126767",
"0.44112238",
"0.44095424",
"0.4404405",
"0.44033533",
"0.4398672",
"0.4398672",
"0.43978763",
"0.4390365",
"0.43886614",
"0.43870574",
"0.43833897",
"0.43833616",
"0.4382218",
"0.43821666"
] | 0.6932265 | 0 |
Creates a new military intervention TODO Processes a conflict (rolls to determine progress, handles outcome) Returns false if conflict resolves | func (c *Conflict) Process(p *pseudo.State) WarResult {
c.length = c.length + 1
def_prog := p.Happens(c.GetModDefenderChance())
att_prog := p.Happens(c.GetModAttackerChance())
if att_prog {
// Attackers progress
c.attackers.progress++
}
if def_prog {
// Defenders progress
c.defenders.progress++
}
if att_prog && def_prog {
c.goal++
}
if c.attackers.progress >= c.goal {
return ATTACKER
} else if c.defenders.progress >= c.goal {
return DEFENDER
}
return ONGOING
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func NewConflict(parameters ...wparams.ParamStorer) Error {\n\treturn newGenericError(nil, DefaultConflict, wparams.NewParamStorer(parameters...))\n}",
"func NewConflict(field string) *AppError {\n\treturn NewError(AlreadyExists, field, \"already exists\")\n}",
"func Conflict(id Identifier) Constraint {\n\treturn conflict(id)\n}",
"func Conflict(w http.ResponseWriter, message ...interface{}) {\n\tboom(w, 409, message...)\n}",
"func Conflict(message string, args ...interface{}) *Failure {\n\treturn NewWithStatus(fmt.Sprintf(message, args...), http.StatusConflict)\n}",
"func NewCreateImageConflict() *CreateImageConflict {\n\treturn &CreateImageConflict{}\n}",
"func NewCreateHPCResourceConflict() *CreateHPCResourceConflict {\n\n\treturn &CreateHPCResourceConflict{}\n}",
"func NewCreatePackageConflict() *CreatePackageConflict {\n\treturn &CreatePackageConflict{}\n}",
"func Conflict(message ...interface{}) Err {\n\treturn Boomify(http.StatusConflict, message...)\n}",
"func Conflict(message string, errors []Error) {\n\tresponse := Response{\n\t\tStatus: http.StatusConflict,\n\t\tMessage: message,\n\t\tData: nil,\n\t\tErrors: errors,\n\t}\n\tpanic(response)\n}",
"func NewCreateFlowConflict() *CreateFlowConflict {\n\treturn &CreateFlowConflict{}\n}",
"func NewConflict(msg string) error {\n\treturn &ELBError{\n\t\tmsg: msg,\n\t\tCode: http.StatusConflict,\n\t}\n}",
"func Conflict(msg string) Error {\n\te := err{msg: msg, code: conflictCode, group: generic, kind: conflict}\n\treturn &e\n}",
"func NewOnConflict(target ConflictTarget, action ConflictAction) OnConflict {\n\treturn OnConflict{\n\t\tTarget: target,\n\t\tAction: action,\n\t}\n}",
"func NewCreateLookmlModelConflict() *CreateLookmlModelConflict {\n\treturn &CreateLookmlModelConflict{}\n}",
"func (s *State) NewColonialWar(target pb.ProvinceId) bool { // TODO: Error return\n\tif s.IsAtWar(target) || s.IsSiteOfConflict(target) || s.Get(target).Occupier() != pb.ProvinceId_NONE {\n\t\treturn false\n\t}\n\tc := &Conflict{\n\t\tname: \"Colonial War\", // TODO\n\t\tlength: 0,\n\t\tattackers: Faction{\n\t\t\t// Dissidents\n\t\t\tprogress: 0,\n\t\t},\n\t\tdefenders: Faction{\n\t\t\tmembers: []pb.ProvinceId{s.Get(target).Occupier()},\n\t\t\tprogress: 0,\n\t\t},\n\t\tgoal: s.Settings().GetConflictGoal(pb.ConflictType_COLONIAL_WAR),\n\t\tbase_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_COLONIAL_WAR),\n\t\tlocations: []pb.ProvinceId{target},\n\t\tconflict_type: pb.ConflictType_COLONIAL_WAR,\n\t}\n\ts.Conflicts[target] = c\n\treturn true\n}",
"func NewCreateRuleSetConflict() *CreateRuleSetConflict {\n\treturn &CreateRuleSetConflict{}\n}",
"func NewCreateClaimConflict() *CreateClaimConflict {\n\treturn &CreateClaimConflict{}\n}",
"func NewCreateThemeConflict() *CreateThemeConflict {\n\treturn &CreateThemeConflict{}\n}",
"func NewConflictR(field string, message string, args ...interface{}) *AppError {\n\treturn NewError(AlreadyExists, field, message, args...)\n}",
"func (c *interacts) Create(interact *v1.Interact) (result *v1.Interact, err error) {\n\tresult = &v1.Interact{}\n\terr = c.client.Post().\n\t\tNamespace(c.ns).\n\t\tResource(\"interacts\").\n\t\tBody(interact).\n\t\tDo().\n\t\tInto(result)\n\treturn\n}",
"func (r RuleSet) AddConflict(option, mutuallyExclusiveWith string) {\n\t*r[option].mutuallyExclusiveWith = append(*r[option].mutuallyExclusiveWith, *r[mutuallyExclusiveWith])\n\t*r[mutuallyExclusiveWith].mutuallyExclusiveWith = append(*r[mutuallyExclusiveWith].mutuallyExclusiveWith, *r[option])\n}",
"func NewTeamCreateConflict() *TeamCreateConflict {\n\treturn &TeamCreateConflict{}\n}",
"func NewCreateGUIDConflict() *CreateGUIDConflict {\n\treturn &CreateGUIDConflict{}\n}",
"func (s *State) NewCivilWar(target pb.ProvinceId) bool { // TODO: Error return\n\tif s.IsAtWar(target) || s.IsSiteOfConflict(target) {\n\t\treturn false\n\t}\n\tc := &Conflict{\n\t\tname: \"Civil War\", // TODO\n\t\tlength: 0,\n\t\tattackers: Faction{\n\t\t\trebels: *(s.Get(target).Dissidents()),\n\t\t\tprogress: 0,\n\t\t},\n\t\tdefenders: Faction{\n\t\t\tmembers: []pb.ProvinceId{target},\n\t\t\tprogress: 0,\n\t\t},\n\t\tgoal: s.Settings().GetConflictGoal(pb.ConflictType_CIVIL_WAR),\n\t\tbase_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_CIVIL_WAR),\n\t\tlocations: []pb.ProvinceId{target},\n\t\tconflict_type: pb.ConflictType_CIVIL_WAR,\n\t}\n\ts.Conflicts[target] = c\n\treturn true\n}",
"func CreateContainerConflict(t goatest.TInterface, ctx context.Context, service *goa.Service, ctrl app.ContainerController, command []string, entrypoint []string, env []string, image string, name string, sslRedirect bool, volumes []string, workingDir *string) (http.ResponseWriter, error) {\n\t// Setup service\n\tvar (\n\t\tlogBuf bytes.Buffer\n\t\tresp interface{}\n\n\t\trespSetter goatest.ResponseSetterFunc = func(r interface{}) { resp = r }\n\t)\n\tif service == nil {\n\t\tservice = goatest.Service(&logBuf, respSetter)\n\t} else {\n\t\tlogger := log.New(&logBuf, \"\", log.Ltime)\n\t\tservice.WithLogger(goa.NewLogger(logger))\n\t\tnewEncoder := func(io.Writer) goa.Encoder { return respSetter }\n\t\tservice.Encoder = goa.NewHTTPEncoder() // Make sure the code ends up using this decoder\n\t\tservice.Encoder.Register(newEncoder, \"*/*\")\n\t}\n\n\t// Setup request context\n\trw := httptest.NewRecorder()\n\tquery := url.Values{}\n\t{\n\t\tsliceVal := command\n\t\tquery[\"command\"] = sliceVal\n\t}\n\t{\n\t\tsliceVal := entrypoint\n\t\tquery[\"entrypoint\"] = sliceVal\n\t}\n\t{\n\t\tsliceVal := env\n\t\tquery[\"env\"] = sliceVal\n\t}\n\t{\n\t\tsliceVal := []string{image}\n\t\tquery[\"image\"] = sliceVal\n\t}\n\t{\n\t\tsliceVal := []string{name}\n\t\tquery[\"name\"] = sliceVal\n\t}\n\t{\n\t\tsliceVal := []string{fmt.Sprintf(\"%v\", sslRedirect)}\n\t\tquery[\"sslRedirect\"] = sliceVal\n\t}\n\t{\n\t\tsliceVal := volumes\n\t\tquery[\"volumes\"] = sliceVal\n\t}\n\tif workingDir != nil {\n\t\tsliceVal := []string{*workingDir}\n\t\tquery[\"workingDir\"] = sliceVal\n\t}\n\tu := &url.URL{\n\t\tPath: fmt.Sprintf(\"/api/v2/container/create\"),\n\t\tRawQuery: query.Encode(),\n\t}\n\treq, err := http.NewRequest(\"GET\", u.String(), nil)\n\tif err != nil {\n\t\tpanic(\"invalid test \" + err.Error()) // bug\n\t}\n\tprms := url.Values{}\n\t{\n\t\tsliceVal := command\n\t\tprms[\"command\"] = sliceVal\n\t}\n\t{\n\t\tsliceVal := entrypoint\n\t\tprms[\"entrypoint\"] = sliceVal\n\t}\n\t{\n\t\tsliceVal := env\n\t\tprms[\"env\"] = sliceVal\n\t}\n\t{\n\t\tsliceVal := []string{image}\n\t\tprms[\"image\"] = sliceVal\n\t}\n\t{\n\t\tsliceVal := []string{name}\n\t\tprms[\"name\"] = sliceVal\n\t}\n\t{\n\t\tsliceVal := []string{fmt.Sprintf(\"%v\", sslRedirect)}\n\t\tprms[\"sslRedirect\"] = sliceVal\n\t}\n\t{\n\t\tsliceVal := volumes\n\t\tprms[\"volumes\"] = sliceVal\n\t}\n\tif workingDir != nil {\n\t\tsliceVal := []string{*workingDir}\n\t\tprms[\"workingDir\"] = sliceVal\n\t}\n\tif ctx == nil {\n\t\tctx = context.Background()\n\t}\n\tgoaCtx := goa.NewContext(goa.WithAction(ctx, \"ContainerTest\"), rw, req, prms)\n\tcreateCtx, _err := app.NewCreateContainerContext(goaCtx, req, service)\n\tif _err != nil {\n\t\te, ok := _err.(goa.ServiceError)\n\t\tif !ok {\n\t\t\tpanic(\"invalid test data \" + _err.Error()) // bug\n\t\t}\n\t\treturn nil, e\n\t}\n\n\t// Perform action\n\t_err = ctrl.Create(createCtx)\n\n\t// Validate response\n\tif _err != nil {\n\t\tt.Fatalf(\"controller returned %+v, logs:\\n%s\", _err, logBuf.String())\n\t}\n\tif rw.Code != 409 {\n\t\tt.Errorf(\"invalid response status code: got %+v, expected 409\", rw.Code)\n\t}\n\tvar mt error\n\tif resp != nil {\n\t\tvar _ok bool\n\t\tmt, _ok = resp.(error)\n\t\tif !_ok {\n\t\t\tt.Fatalf(\"invalid response media: got variable of type %T, value %+v, expected instance of error\", resp, resp)\n\t\t}\n\t}\n\n\t// Return results\n\treturn rw, mt\n}",
"func (v *View) AddConflict(c Conflict) {\n\tv.cMutex.Lock()\n\tdefer v.cMutex.Unlock()\n\n\tif _, ok := v.Conflicts[c.ID]; !ok {\n\t\tv.Conflicts[c.ID] = Entry{\n\t\t\tOpinions: Opinions{c.Opinion},\n\t\t\tTimestamp: clock.SyncedTime(),\n\t\t}\n\t\treturn\n\t}\n\n\tentry := v.Conflicts[c.ID]\n\tentry.Opinions = append(entry.Opinions, c.Opinion)\n\tv.Conflicts[c.ID] = entry\n}",
"func IsConflict(err error) bool {\n\treturn errors.Cause(err) == errConflict\n}",
"func (s *State) NewConventionalWar(defenders []pb.ProvinceId, attackers []pb.ProvinceId, locations []pb.ProvinceId) bool { // TODO: Error return\n\tfor _, d := range defenders {\n\t\tif s.IsAtWar(d) || s.IsSiteOfConflict(d) {\n\t\t\treturn false\n\t\t}\n\t}\n\tfor _, a := range attackers {\n\t\tif s.IsAtWar(a) || s.IsSiteOfConflict(a) {\n\t\t\treturn false\n\t\t}\n\t}\n\tfor _, l := range locations {\n\t\tif s.IsAtWar(l) || s.IsSiteOfConflict(l) {\n\t\t\treturn false\n\t\t}\n\t}\n\t// TODO: Logic for joining wars?\n\tc := &Conflict{\n\t\tname: \"War!\", // TODO\n\t\tlength: 0,\n\t\tattackers: Faction{\n\t\t\tmembers: attackers,\n\t\t\tprogress: 0,\n\t\t},\n\t\tdefenders: Faction{\n\t\t\tmembers: defenders,\n\t\t\tprogress: 0,\n\t\t},\n\t\tgoal: s.Settings().GetConflictGoal(pb.ConflictType_CONVENTIONAL_WAR),\n\t\tbase_chance: s.Settings().GetConflictBaseChance(pb.ConflictType_CONVENTIONAL_WAR),\n\t\tlocations: locations,\n\t\tconflict_type: pb.ConflictType_CONVENTIONAL_WAR,\n\t}\n\t// For now it maps only to the first location\n\ts.Conflicts[locations[0]] = c\n\treturn true\n}",
"func (r Response) Conflict(code string, payload Payload, header ...ResponseHeader) {\n\tr.Response(code, http.Conflict, payload, header...)\n}",
"func (sh *Shift) Create() error {\n\tvalidator := validatorimpl.NewDefaultValidator()\n\terrs := validator.Verify(sh)\n\tif len(errs) != 0 {\n\t\treturn fmt.Errorf(\"Save the shift failed due to content errors: %v\", errs)\n\t}\n\tshiftRepo := repoimpl.GetShiftRepo()\n\tfindCtx, findCancel := utils.GetDefaultCtx()\n\tdefer findCancel()\n\trst := shiftRepo.FindOne(findCtx, bson.M{\"projectId\": sh.ProjectID})\n\tif rst.Err() == nil {\n\t\treturn DuplicateShiftError{}\n\t}\n\tctxInsert, cancelInsert := utils.GetDefaultCtx()\n\tdefer cancelInsert()\n\t_, err := shiftRepo.InsertOne(ctxInsert, sh)\n\treturn err\n}",
"func NewServiceCreateConflict() *ServiceCreateConflict {\n\treturn &ServiceCreateConflict{}\n}",
"func NewCreateFnConflict() *CreateFnConflict {\n\treturn &CreateFnConflict{}\n}",
"func (r *Responder) Conflict() { r.write(http.StatusConflict) }",
"func NewConflictResolver(\n\tconfig Config, fbo *folderBranchOps) *ConflictResolver {\n\t// make a logger with an appropriate module name\n\tbranchSuffix := \"\"\n\tif fbo.branch() != data.MasterBranch {\n\t\tbranchSuffix = \" \" + string(fbo.branch())\n\t}\n\ttlfStringFull := fbo.id().String()\n\tlog := config.MakeLogger(\n\t\tfmt.Sprintf(\"CR %s%s\", tlfStringFull[:8], branchSuffix))\n\n\tcr := &ConflictResolver{\n\t\tconfig: config,\n\t\tfbo: fbo,\n\t\tprepper: folderUpdatePrepper{\n\t\t\tconfig: config,\n\t\t\tfolderBranch: fbo.folderBranch,\n\t\t\tblocks: &fbo.blocks,\n\t\t\tlog: log,\n\t\t\tvlog: config.MakeVLogger(log),\n\t\t},\n\t\tlog: traceLogger{log},\n\t\tdeferLog: traceLogger{log.CloneWithAddedDepth(1)},\n\t\tmaxRevsThreshold: crMaxRevsThresholdDefault,\n\t\tcurrInput: conflictInput{\n\t\t\tunmerged: kbfsmd.RevisionUninitialized,\n\t\t\tmerged: kbfsmd.RevisionUninitialized,\n\t\t},\n\t}\n\n\tif fbo.bType == standard && config.Mode().ConflictResolutionEnabled() {\n\t\tcr.startProcessing(libcontext.BackgroundContextWithCancellationDelayer())\n\t}\n\treturn cr\n}",
"func (_pc *PCCreate) OnConflict(opts ...sql.ConflictOption) *PCUpsertOne {\n\t_pc.conflict = opts\n\treturn &PCUpsertOne{\n\t\tcreate: _pc,\n\t}\n}",
"func IsConflict(err error) bool {\n\treturn ReasonForError(err) == http.StatusConflict\n}",
"func RenderConflict(w http.ResponseWriter, message ...interface{}) {\n\tRender(w, Conflict(message...))\n}",
"func NewPerformIncidentActionConflict() *PerformIncidentActionConflict {\n\treturn &PerformIncidentActionConflict{}\n}",
"func NewCreateUserConflict() *CreateUserConflict {\n\n\treturn &CreateUserConflict{}\n}",
"func NewCreateUserConflict() *CreateUserConflict {\n\n\treturn &CreateUserConflict{}\n}",
"func (r *Reply) Conflict() *Reply {\n\treturn r.Status(http.StatusConflict)\n}",
"func (rs *RuleSet) AddConflict(a, b string) {\n\trs.conflicts[a] = append(rs.conflicts[a], b)\n\trs.conflicts[b] = append(rs.conflicts[b], a)\n}",
"func CreateCompetition(competition Competition, competitionRepo ICompetitionRepository,\n\tprovisionRepo IOrganizerProvisionRepository, historyRepo IOrganizerProvisionHistoryRepository) error {\n\t// check if data received is validationErr\n\tif validationErr := competition.validateCreateCompetition(); validationErr != nil {\n\t\treturn validationErr\n\t}\n\n\tif competition.statusID == 0 {\n\t\tcompetition.statusID = CompetitionStatusPreRegistration\n\t}\n\n\t// check if organizer is provisioned with available competitions\n\tprovisions, _ := provisionRepo.SearchOrganizerProvision(SearchOrganizerProvisionCriteria{\n\t\tOrganizerID: competition.CreateUserID,\n\t})\n\tif len(provisions) != 1 {\n\t\treturn errors.New(\"no organizer record is found\")\n\t}\n\tprovision := provisions[0]\n\tif provision.Available < 1 {\n\t\treturn errors.New(\"no available competition slot\")\n\t}\n\n\tnewProvision := provision.updateForCreateCompetition(competition)\n\thistoryEntry := newProvisionHistoryEntry(newProvision, competition)\n\tupdateOrganizerProvision(newProvision, historyEntry, provisionRepo, historyRepo)\n\n\terr := competitionRepo.CreateCompetition(&competition)\n\tif err != nil {\n\t\t// refund competition organizer's provision\n\t\trefundProvision := newProvision\n\t\trefundProvision.Available += 1\n\t\trefundProvision.Hosted -= 1\n\n\t\trefundEntry := OrganizerProvisionHistoryEntry{\n\t\t\tOrganizerRoleID: refundProvision.OrganizerRoleID,\n\t\t\tAmount: 1,\n\t\t\tNote: fmt.Sprintf(\"Refund for failing in creating competition %v %v\", competition.Name, competition.StartDateTime),\n\t\t\tCreateUserID: competition.CreateUserID,\n\t\t\tDateTimeCreated: time.Now(),\n\t\t\tUpdateUserID: competition.UpdateUserID,\n\t\t\tDateTimeUpdated: time.Now(),\n\t\t}\n\t\tupdateOrganizerProvision(refundProvision, refundEntry, provisionRepo, historyRepo)\n\t}\n\treturn err\n}",
"func NewStalemate() Outcome { return Outcome{Winner: Transparent, Reason: stalemate} }",
"func (self *AgentState) HasConflict(potentialJobName string, potentialConflicts []string) (bool, string) {\n\t// Iterate through each existing Job, asserting two things:\n\tfor existingJobName, existingConflicts := range self.conflicts {\n\n\t\t// 1. Each tracked Job does not conflict with the potential conflicts\n\t\tfor _, pc := range potentialConflicts {\n\t\t\tif globMatches(pc, existingJobName) {\n\t\t\t\treturn true, existingJobName\n\t\t\t}\n\t\t}\n\n\t\t// 2. The new Job does not conflict with any of the tracked confclits\n\t\tfor _, ec := range existingConflicts {\n\t\t\tif globMatches(ec, potentialJobName) {\n\t\t\t\treturn true, existingJobName\n\t\t\t}\n\t\t}\n\t}\n\n\treturn false, \"\"\n}",
"func Conflict(err error) Response {\n\tmessage := \"already exists\"\n\tif err != nil {\n\t\tmessage = err.Error()\n\t}\n\treturn &errorResponse{\n\t\tcode: http.StatusConflict,\n\t\tmsg: message,\n\t}\n}",
"func IsConflict(err error) bool {\n\t// TODO(horwitz): This is supposed to be fixed soon. It's a bug in the OCI API that causes a 409 to\n\t// be returned instead of a 412.\n\treturn IsError(err, \"409\", \"Conflict\") || IsError(err, \"412\", \"NoEtagMatch\")\n}",
"func NewCreateUserConflict() *CreateUserConflict {\n\treturn &CreateUserConflict{}\n}",
"func (b *BranchDAG) Conflict(conflictID ConflictID) *CachedConflict {\n\treturn &CachedConflict{CachedObject: b.conflictStorage.Load(conflictID.Bytes())}\n}",
"func NewCreateOctopusPackageMetadataConflict() *CreateOctopusPackageMetadataConflict {\n\treturn &CreateOctopusPackageMetadataConflict{}\n}",
"func (o *UpdateMTOPostCounselingInformationConflict) Code() int {\n\treturn 409\n}",
"func NewCreateSessionConflict() *CreateSessionConflict {\n\treturn &CreateSessionConflict{}\n}",
"func NewCreateInputPortConflict() *CreateInputPortConflict {\n\treturn &CreateInputPortConflict{}\n}",
"func NewCreateServerSwitchingRuleConflict() *CreateServerSwitchingRuleConflict {\n\treturn &CreateServerSwitchingRuleConflict{\n\t\tConfigurationVersion: 0,\n\t}\n}",
"func NewCreateMyProfileConflict() *CreateMyProfileConflict {\n\treturn &CreateMyProfileConflict{}\n}",
"func NewCreateSpoeConflict() *CreateSpoeConflict {\n\n\treturn &CreateSpoeConflict{}\n}",
"func NewPayConflict() *PayConflict {\n\treturn &PayConflict{}\n}",
"func NewCreateDeploymentConflict() *CreateDeploymentConflict {\n\treturn &CreateDeploymentConflict{}\n}",
"func SendConflict(w http.ResponseWriter, opts ...ErrorOpts) {\n\tres := errorResponse{\n\t\tCode: CodeConflict,\n\t\tMessage: \"Conflict\",\n\t}\n\tres.apply(opts)\n\tSendJSON(w, 409, &res)\n}",
"func (rnode *RuleNode) HasConflict() bool {\n\treturn false // TODO\n}",
"func NewCreateTCPCheckConflict() *CreateTCPCheckConflict {\n\n\treturn &CreateTCPCheckConflict{}\n}",
"func (c *SeaterController) Conflictf(format string, args ...interface{}) {\n\tc.TraceConflictf(nil, format, args...)\n}",
"func (o *PcloudVolumeOnboardingPostConflict) IsSuccess() bool {\n\treturn false\n}",
"func (o *CreateUsingPOSTMixin5Conflict) IsCode(code int) bool {\n\treturn code == 409\n}",
"func isRetryableConflict(err *genericarmclient.CloudError) bool {\n\tif err == nil {\n\t\treturn false\n\t}\n\n\t// We retry on this code as ADS may be in the process of being enabled (in the case of parallel deployment)\n\treturn err.Code() == \"VulnerabilityAssessmentADSIsDisabled\"\n}",
"func (service *ResultService) ResolveConflict(in *proto_job.ResultRequest) (*proto_job.ResultReply, error) {\n\tresult, err := service.accessor.GetByID(uint(in.Id))\n\n\tif err != nil {\n\t\treturn nil, err\n\t} else if result.ID == 0 {\n\t\tlog.Fatal(\"Conflict not found in SetResultState\")\n\t}\n\n\tresult.State = \"RESOLVED\"\n\tresult.TaxonID = uint(in.TaxonId)\n\terr = service.accessor.Save(result)\n\n\treturn converters.ResultModelToProto(result), err\n}",
"func NewCreateMergeQueryConflict() *CreateMergeQueryConflict {\n\treturn &CreateMergeQueryConflict{}\n}",
"func (o *UpdateMTOPostCounselingInformationConflict) IsCode(code int) bool {\n\treturn code == 409\n}",
"func NewCreateStat1Conflict() *CreateStat1Conflict {\n\treturn &CreateStat1Conflict{}\n}",
"func NewInitiateReplaceProcessGroupConflict() *InitiateReplaceProcessGroupConflict {\n\treturn &InitiateReplaceProcessGroupConflict{}\n}",
"func (o *PcloudVolumeOnboardingPostConflict) IsCode(code int) bool {\n\treturn code == 409\n}",
"func NewCreateRemoteProcessGroupConflict() *CreateRemoteProcessGroupConflict {\n\treturn &CreateRemoteProcessGroupConflict{}\n}",
"func (bot *DiscordBot) InteractionCreate(s *discordgo.Session, i *discordgo.InteractionCreate) {\n\tif h, ok := commandHandlers[i.ApplicationCommandData().Name]; ok {\n\t\th(s, i)\n\t}\n}",
"func Conflict(data Serializer, logging ...interface{}) Response {\n\tif data == nil {\n\t\tdata = String(\"409 Conflict\")\n\t}\n\treturn Response{\n\t\tStatus: http.StatusConflict,\n\t\tData: data,\n\t\tLogging: logging,\n\t}\n}",
"func NewImportArchiveConflict() *ImportArchiveConflict {\n\treturn &ImportArchiveConflict{}\n}",
"func (b *BranchDAG) CreateConflictBranch(branchID BranchID, parentBranchIDs BranchIDs, conflictIDs ConflictIDs) (cachedConflictBranch *CachedBranch, newBranchCreated bool, err error) {\n\tnormalizedParentBranchIDs, err := b.normalizeBranches(parentBranchIDs)\n\tif err != nil {\n\t\terr = errors.Errorf(\"failed to normalize parent Branches: %w\", err)\n\t\treturn\n\t}\n\n\tcachedConflictBranch, newBranchCreated, err = b.createConflictBranchFromNormalizedParentBranchIDs(branchID, normalizedParentBranchIDs, conflictIDs)\n\treturn\n}",
"func NewConflictNoAction() ConflictNoAction {\n\treturn ConflictNoAction{}\n}",
"func attemptLoadImpteamAndConflict(ctx context.Context, g *libkb.GlobalContext, impTeamName keybase1.ImplicitTeamDisplayName,\n\tnameWithoutConflict string, preResolveDisplayName string, skipCache bool) (conflicts []keybase1.ImplicitTeamConflictInfo, teamID keybase1.TeamID, hitCache bool, err error) {\n\n\tdefer g.CTrace(ctx,\n\t\tfmt.Sprintf(\"attemptLoadImpteamAndConflict(impName=%q,woConflict=%q,preResolve=%q,skipCache=%t)\", impTeamName, nameWithoutConflict, preResolveDisplayName, skipCache),\n\t\t&err)()\n\timp, hitCache, err := loadImpteam(ctx, g, nameWithoutConflict, impTeamName.IsPublic, skipCache)\n\tif err != nil {\n\t\treturn conflicts, teamID, hitCache, err\n\t}\n\tif len(imp.Conflicts) > 0 {\n\t\tg.Log.CDebugf(ctx, \"LookupImplicitTeam found %v conflicts\", len(imp.Conflicts))\n\t}\n\t// We will use this team. Changed later if we selected a conflict.\n\tvar foundSelectedConflict bool\n\tteamID = imp.TeamID\n\t// We still need to iterate over Conflicts because we are returning parsed\n\t// conflict list. So even if caller is not requesting a conflict team, go\n\t// through this loop.\n\tfor i, conflict := range imp.Conflicts {\n\t\tg.Log.CDebugf(ctx, \"| checking conflict: %+v (iter %d)\", conflict, i)\n\t\tconflictInfo, err := conflict.parse()\n\t\tif err != nil {\n\t\t\t// warn, don't fail\n\t\t\tg.Log.CDebugf(ctx, \"LookupImplicitTeam got conflict suffix: %v\", err)\n\t\t\tcontinue\n\t\t}\n\t\tif conflictInfo == nil {\n\t\t\tg.Log.CDebugf(ctx, \"| got unexpected nil conflictInfo (iter %d)\", i)\n\t\t\tcontinue\n\t\t}\n\t\tconflicts = append(conflicts, *conflictInfo)\n\n\t\tg.Log.CDebugf(ctx, \"| parsed conflict into conflictInfo: %+v\", *conflictInfo)\n\n\t\tif impTeamName.ConflictInfo != nil {\n\t\t\tmatch := libkb.FormatImplicitTeamDisplayNameSuffix(*impTeamName.ConflictInfo) == libkb.FormatImplicitTeamDisplayNameSuffix(*conflictInfo)\n\t\t\tif match {\n\t\t\t\tteamID = conflict.TeamID\n\t\t\t\tfoundSelectedConflict = true\n\t\t\t\tg.Log.CDebugf(ctx, \"| found conflict suffix match: %v\", teamID)\n\t\t\t} else {\n\t\t\t\tg.Log.CDebugf(ctx, \"| conflict suffix didn't match (teamID %v)\", conflict.TeamID)\n\t\t\t}\n\t\t}\n\t}\n\tif impTeamName.ConflictInfo != nil && !foundSelectedConflict {\n\t\t// We got the team but didn't find the specific conflict requested.\n\t\treturn conflicts, teamID, hitCache, NewTeamDoesNotExistError(\n\t\t\timpTeamName.IsPublic, \"could not find team with suffix: %v\", preResolveDisplayName)\n\t}\n\treturn conflicts, teamID, hitCache, nil\n}",
"func WrapWithConflict(cause error, parameters ...wparams.ParamStorer) Error {\n\treturn newGenericError(cause, DefaultConflict, wparams.NewParamStorer(parameters...))\n}",
"func (c *client) CreateMilestone(\n\tid interface{},\n\topt *glab.CreateMilestoneOptions,\n\toptions ...glab.RequestOptionFunc,\n) (*glab.Milestone, *glab.Response, error) {\n\treturn c.c.Milestones.CreateMilestone(id, opt, options...)\n}",
"func (m *ManualIntervention) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateApprover(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateCreatedOn(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateModifiedOn(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateRelease(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateReleaseDefinition(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateReleaseEnvironment(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateTaskInstanceID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}",
"func NewCreateACLConflict() *CreateACLConflict {\n\n\treturn &CreateACLConflict{}\n}",
"func (o *CreateUsingPOSTMixin5Conflict) IsSuccess() bool {\n\treturn false\n}",
"func TestConflictResolution(t *testing.T) {\n\tpoolB := mkPool(poolBUID, \"pool-b\", []string{\"10.0.10.0/24\", \"FF::0/48\"})\n\tpoolB.CreationTimestamp = meta_v1.Date(2022, 10, 16, 13, 30, 00, 0, time.UTC)\n\tfixture := mkTestFixture([]*cilium_api_v2alpha1.CiliumLoadBalancerIPPool{\n\t\tmkPool(poolAUID, \"pool-a\", []string{\"10.0.10.0/24\"}),\n\t\tpoolB,\n\t}, true, false, nil)\n\n\tawait := fixture.AwaitPool(func(action k8s_testing.Action) bool {\n\t\tif action.GetResource() != poolResource || action.GetVerb() != \"patch\" {\n\t\t\treturn false\n\t\t}\n\n\t\tpool := fixture.PatchedPool(action)\n\n\t\tif pool.Name != \"pool-b\" {\n\t\t\treturn false\n\t\t}\n\n\t\tif !isPoolConflicting(pool) {\n\t\t\treturn false\n\t\t}\n\n\t\treturn true\n\t}, time.Second)\n\n\tgo fixture.hive.Start(context.Background())\n\tdefer fixture.hive.Stop(context.Background())\n\n\tif await.Block() {\n\t\tt.Fatal(\"Pool B has not been marked conflicting\")\n\t}\n\n\t// All ranges of a conflicting pool must be disabled\n\tpoolBRanges, _ := fixture.lbIPAM.rangesStore.GetRangesForPool(\"pool-b\")\n\tfor _, r := range poolBRanges {\n\t\tif !r.internallyDisabled {\n\t\t\tt.Fatalf(\"Range '%s' from pool B hasn't been disabled\", ipNetStr(r.allocRange.CIDR()))\n\t\t}\n\t}\n\n\t// Phase 2, resolving the conflict\n\n\tawait = fixture.AwaitPool(func(action k8s_testing.Action) bool {\n\t\tif action.GetResource() != poolResource || action.GetVerb() != \"patch\" {\n\t\t\treturn false\n\t\t}\n\n\t\tpool := fixture.PatchedPool(action)\n\n\t\tif pool.Name != \"pool-b\" {\n\t\t\treturn false\n\t\t}\n\n\t\tif isPoolConflicting(pool) {\n\t\t\treturn false\n\t\t}\n\n\t\treturn true\n\t}, time.Second)\n\n\tpoolB, err := fixture.poolClient.Get(context.Background(), \"pool-b\", meta_v1.GetOptions{})\n\tif err != nil {\n\t\tt.Fatal(poolB)\n\t}\n\n\t// Remove the conflicting range\n\tpoolB.Spec.Cidrs = []cilium_api_v2alpha1.CiliumLoadBalancerIPPoolCIDRBlock{\n\t\t{\n\t\t\tCidr: cilium_api_v2alpha1.IPv4orIPv6CIDR(\"FF::0/48\"),\n\t\t},\n\t}\n\n\t_, err = fixture.poolClient.Update(context.Background(), poolB, meta_v1.UpdateOptions{})\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tif await.Block() {\n\t\tt.Fatal(\"Pool b has not de-conflicted\")\n\t}\n}",
"func createMilitaryMachine() EnigmaMachine {\n\tr1 := GenerateRotorI()\n\tr2 := GenerateRotorII()\n\tr3 := GenerateRotorIII()\n\n\tvar rotors RotorSet\n\n\trotors.left = &r1\n\trotors.middle = &r2\n\trotors.right = &r3\n\n\tstraightThroughPlugBoard := Plugboard{map[string]string{}}\n\n\tem, err := CreateEnigmaMachine(rotors, \"AAA\", straightThroughPlugBoard, GenerateReflectorB(), GenerateMilitaryInputRotor())\n\n\tif err != nil {\n\t\tlog.Fatal(\"There was an issue creating the machine: \" + err.Error())\n\t}\n\treturn em\n}",
"func (cr *ConflictResolver) createResolvedMD(ctx context.Context,\n\tlState *kbfssync.LockState, unmergedPaths []data.Path,\n\tunmergedChains, mergedChains *crChains,\n\tmostRecentMergedMD ImmutableRootMetadata) (*RootMetadata, error) {\n\terr := cr.checkDone(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tnewMD, err := mostRecentMergedMD.MakeSuccessor(\n\t\tctx, cr.config.MetadataVersion(), cr.config.Codec(),\n\t\tcr.config.KeyManager(), cr.config.KBPKI(),\n\t\tcr.config.KBPKI(), cr.config, mostRecentMergedMD.MdID(), true)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar newPaths []data.Path\n\tfor original, chain := range unmergedChains.byOriginal {\n\t\tadded := false\n\t\tfor i, op := range chain.ops {\n\t\t\tif cop, ok := op.(*createOp); ok {\n\t\t\t\t// We need to add in any creates that happened\n\t\t\t\t// within newly-created directories (which aren't\n\t\t\t\t// being merged with other newly-created directories),\n\t\t\t\t// to ensure that the overall Refs are correct and\n\t\t\t\t// that future CR processes can check those create ops\n\t\t\t\t// for conflicts.\n\t\t\t\tif unmergedChains.isCreated(original) &&\n\t\t\t\t\t!mergedChains.isCreated(original) {\n\t\t\t\t\t// Shallowly copy the create op and update its\n\t\t\t\t\t// directory to the most recent pointer -- this won't\n\t\t\t\t\t// work with the usual revert ops process because that\n\t\t\t\t\t// skips chains which are newly-created within this\n\t\t\t\t\t// branch.\n\t\t\t\t\tnewCreateOp := *cop\n\t\t\t\t\tnewCreateOp.Dir, err = makeBlockUpdate(\n\t\t\t\t\t\tchain.mostRecent, chain.mostRecent)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\t\t\t\t\tchain.ops[i] = &newCreateOp\n\t\t\t\t\tif !added {\n\t\t\t\t\t\tnewPaths = append(newPaths, data.Path{\n\t\t\t\t\t\t\tFolderBranch: cr.fbo.folderBranch,\n\t\t\t\t\t\t\tPath: []data.PathNode{{\n\t\t\t\t\t\t\t\tBlockPointer: chain.mostRecent}},\n\t\t\t\t\t\t\tChildObfuscator: cr.fbo.makeObfuscator(),\n\t\t\t\t\t\t})\n\t\t\t\t\t\tadded = true\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif cop.Type == data.Dir || len(cop.Refs()) == 0 {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\t// Add any direct file blocks too into each create op,\n\t\t\t\t// which originated in later unmerged syncs.\n\t\t\t\tptr, err :=\n\t\t\t\t\tunmergedChains.mostRecentFromOriginalOrSame(cop.Refs()[0])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\ttrackSyncPtrChangesInCreate(\n\t\t\t\t\tptr, chain, unmergedChains, cop.NewName)\n\t\t\t}\n\t\t}\n\t}\n\tif len(newPaths) > 0 {\n\t\t// Put the new paths at the beginning so they are processed\n\t\t// last in sorted order.\n\t\tunmergedPaths = append(newPaths, unmergedPaths...)\n\t}\n\n\tops, err := cr.makeRevertedOps(\n\t\tctx, lState, unmergedPaths, unmergedChains, mergedChains)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcr.log.CDebugf(ctx, \"Remote notifications: %v\", ops)\n\tfor _, op := range ops {\n\t\tcr.log.CDebugf(ctx, \"%s: refs %v\", op, op.Refs())\n\t\tnewMD.AddOp(op)\n\t}\n\n\t// Add a final dummy operation to collect all of the block updates.\n\tnewMD.AddOp(newResolutionOp())\n\n\treturn newMD, nil\n}",
"func NewCreateAuthTokenConflict() *CreateAuthTokenConflict {\n\treturn &CreateAuthTokenConflict{}\n}",
"func (c *ConflictResolver) Resolve(conflict Conflict) (winner Body, resolutionType ConflictResolutionType, err error) {\n\n\twinner, err = c.crf(conflict)\n\tif err != nil {\n\t\treturn winner, \"\", err\n\t}\n\n\twinningRev, ok := winner[BodyRev]\n\tif !ok {\n\t\tc.stats.ConflictResultMergeCount.Add(1)\n\t\treturn winner, ConflictResolutionMerge, nil\n\t}\n\n\tlocalRev, ok := conflict.LocalDocument[BodyRev]\n\tif ok && localRev == winningRev {\n\t\tc.stats.ConflictResultLocalCount.Add(1)\n\t\treturn winner, ConflictResolutionLocal, nil\n\t}\n\n\tremoteRev, ok := conflict.RemoteDocument[BodyRev]\n\tif ok && remoteRev == winningRev {\n\t\tc.stats.ConflictResultRemoteCount.Add(1)\n\t\treturn winner, ConflictResolutionRemote, nil\n\t}\n\n\tbase.InfofCtx(context.Background(), base.KeyReplicate, \"Conflict resolver returned non-empty revID (%s) not matching local (%s) or remote (%s), treating result as merge.\", winningRev, localRev, remoteRev)\n\tc.stats.ConflictResultMergeCount.Add(1)\n\treturn winner, ConflictResolutionMerge, err\n}",
"func NeverConflict(sourceFiles ...string) (string, error) {\n\treturn \"\", ErrFileConflict\n}",
"func (o *UpdateMTOServiceItemStatusConflict) IsCode(code int) bool {\n\treturn code == 409\n}",
"func (o *AddServerGroupConflict) IsCode(code int) bool {\n\treturn code == 409\n}",
"func NewRegisterUserConflict() *RegisterUserConflict {\n\treturn &RegisterUserConflict{}\n}",
"func (o *AddServerGroupConflict) Code() int {\n\treturn 409\n}",
"func (o *V2ReportMonitoredOperatorStatusConflict) IsCode(code int) bool {\n\treturn code == 409\n}",
"func (fbo *folderBranchOps) forceStuckConflictForTesting(\n\tctx context.Context) (err error) {\n\tstartTime, timer := fbo.startOp(ctx, \"Forcing a stuck conflict\")\n\tdefer func() {\n\t\tfbo.endOp(\n\t\t\tctx, startTime, timer, \"Forcing a stuck conflict done: %+v\", err)\n\t}()\n\n\tlState := makeFBOLockState()\n\tfbo.mdWriterLock.Lock(lState)\n\tdefer fbo.mdWriterLock.Unlock(lState)\n\n\tif fbo.isUnmergedLocked(lState) {\n\t\treturn errors.New(\"Cannot force conflict when already unmerged\")\n\t}\n\n\t// Disable updates.\n\tunpauseUpdatesCh := make(chan struct{})\n\tselect {\n\tcase fbo.updatePauseChan <- unpauseUpdatesCh:\n\tcase <-ctx.Done():\n\t\treturn ctx.Err()\n\t}\n\tdefer func() { unpauseUpdatesCh <- struct{}{} }()\n\n\t// Make a no-op revision with an empty resolutionOp. Wait for it\n\t// to flush to the server.\n\torigHead, _ := fbo.getHead(ctx, lState, mdNoCommit)\n\tmergedGCOp := newGCOp(origHead.data.LastGCRevision)\n\terr = fbo.finalizeGCOpLocked(ctx, lState, mergedGCOp)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tjManager, _ := GetJournalManager(fbo.config)\n\tif jManager != nil {\n\t\terr := fbo.waitForJournalLocked(ctx, lState, jManager)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// Wait for the flush handler to finish, so we don't\n\t\t// accidentally swap in the upcoming MD on the conflict branch\n\t\t// over the \"merged\" one we just flushed, before the pointer\n\t\t// archiving step happens.\n\t\terr = fbo.mdFlushes.Wait(ctx)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// Roll back the local view to the original revision.\n\terr = func() error {\n\t\tfbo.headLock.Lock(lState)\n\t\tdefer fbo.headLock.Unlock(lState)\n\t\terr = fbo.setHeadLocked(ctx, lState, origHead, headTrusted, mdNoCommit)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfbo.setLatestMergedRevisionLocked(\n\t\t\tctx, lState, origHead.Revision(), true)\n\t\treturn nil\n\t}()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Set CR to always fail.\n\toldMode := fbo.cr.getFailModeForTesting()\n\tfbo.cr.setFailModeForTesting(alwaysFailCR)\n\tdefer func() { fbo.cr.setFailModeForTesting(oldMode) }()\n\n\t// Make fake conflicting files to trigger CR. Make one for each\n\t// attempt needed to result in stuck CR.\n\thandle := origHead.GetTlfHandle()\n\trootNode, err := fbo.nodeCache.GetOrCreate(\n\t\torigHead.data.Dir.BlockPointer,\n\t\tdata.NewPathPartString(string(handle.GetCanonicalName()),\n\t\t\tfbo.makeObfuscator()),\n\t\tnil, data.Dir)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfor i := 0; i < maxConflictResolutionAttempts+1; i++ {\n\t\tfilename := fmt.Sprintf(\"FILE_FOR_STUCK_CONFLICT_%02d\", i)\n\t\t_, _, err := fbo.createEntryLocked(\n\t\t\tctx, lState, rootNode, rootNode.ChildName(filename), data.File,\n\t\t\tNoExcl)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\terr = fbo.syncAllLocked(ctx, lState, NoExcl)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif jManager != nil && TLFJournalEnabled(fbo.config, fbo.id()) {\n\t\t\t// Can't use fbo.waitForJournalLocked here, since the\n\t\t\t// flushing won't actually complete.\n\t\t\terr := jManager.Wait(ctx, fbo.id())\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tnewHead, _ := fbo.getHead(ctx, lState, mdNoCommit)\n\t\t\tfbo.cr.Resolve(\n\t\t\t\tctx, newHead.Revision(), kbfsmd.RevisionUninitialized)\n\t\t}\n\n\t\terr = fbo.cr.Wait(ctx)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// Make sure we're stuck.\n\tisStuck, err := fbo.cr.isStuck()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif !isStuck {\n\t\treturn errors.New(\"CR not stuck after trying to force conflict\")\n\t}\n\n\treturn nil\n}",
"func NewConflictUpdateAction(set Set) ConflictUpdateAction {\n\treturn ConflictUpdateAction{\n\t\tSet: set,\n\t}\n}",
"func (s *MilestonesService) CreateMilestone(pid interface{}, opt *CreateMilestoneOptions, options ...RequestOptionFunc) (*Milestone, *Response, error) {\n\tproject, err := parseID(pid)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tu := fmt.Sprintf(\"projects/%s/milestones\", PathEscape(project))\n\n\treq, err := s.client.NewRequest(http.MethodPost, u, opt, options)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tm := new(Milestone)\n\tresp, err := s.client.Do(req, m)\n\tif err != nil {\n\t\treturn nil, resp, err\n\t}\n\n\treturn m, resp, nil\n}",
"func hMissionAccepted(json UnstructuredJson) {\n\tmissionId := json[\"MissionID\"].(float64)\n\n\t// check Trade Wing missions\n\tif _, ok := activeTradeMissions[missionId]; !ok {\n\t\tif json[\"Commodity\"] != nil && json[\"Reward\"] != nil {\n\n\t\t\tcommodityName := json[\"Commodity\"].(string)\n\t\t\tcommodityName = strings.ReplaceAll(commodityName, \"$\", \"\")\n\t\t\tcommodityName = strings.ReplaceAll(commodityName, \"_Name;\", \"\")\n\n\t\t\tactiveTradeMissions[missionId] = &TradeMission{\n\t\t\t\tmissionId,\n\t\t\t\tjson[\"Reward\"].(float64),\n\t\t\t\tcommodityName,\n\t\t\t\tjson[\"Count\"].(float64),\n\t\t\t\tcurrentCommanderName,\n\t\t\t}\n\t\t}\n\t\t//fmt.Printf(\"MissionAccepted, %v\\n\", missionId)\n\t}\n\n\t// check PIRATE Wing missions\n\tif _, ok := activePirateMissions[missionId]; !ok {\n\n\t\tif json[\"KillCount\"] != nil && json[\"Faction\"] != nil && json[\"TargetFaction\"] != nil {\n\n\t\t\tfaction := json[\"Faction\"].(string)\n\n\t\t\t//layout := \"2021-04-08T12:08:50Z\"\n\t\t\ttimestamp, _ := time.Parse(time.RFC3339 /*layout*/, json[\"timestamp\"].(string))\n\n\t\t\tactivePirateMissions[missionId] = PirateMission{\n\t\t\t\tmissionId,\n\t\t\t\tjson[\"Reward\"].(float64),\n\t\t\t\tfaction,\n\t\t\t\tint64(json[\"KillCount\"].(float64)),\n\t\t\t\tcurrentCommanderName,\n\t\t\t\tjson[\"TargetFaction\"].(string),\n\t\t\t\ttimestamp.Unix(),\n\t\t\t}\n\t\t}\n\t\t//fmt.Printf(\"MissionAccepted, %v\\n\", json)\n\t}\n\n\t//todo other mission handlers ...\n}",
"func ComplianceStatusPConflict() *ComplianceStatus {\n\tv := ComplianceStatusVConflict\n\treturn &v\n}"
] | [
"0.529359",
"0.5229895",
"0.52119213",
"0.5167439",
"0.51114",
"0.5056564",
"0.49670407",
"0.4965979",
"0.4913384",
"0.48987553",
"0.48867497",
"0.48669258",
"0.4865068",
"0.48591155",
"0.48306325",
"0.48230824",
"0.48123473",
"0.48010728",
"0.4800759",
"0.47991014",
"0.47915033",
"0.4786725",
"0.478373",
"0.47822338",
"0.47700235",
"0.47521314",
"0.4747699",
"0.47447997",
"0.4736805",
"0.47237396",
"0.4722873",
"0.47057706",
"0.46778604",
"0.46550807",
"0.4644458",
"0.46291375",
"0.462795",
"0.46242216",
"0.46051332",
"0.46021",
"0.46021",
"0.45981532",
"0.45801973",
"0.45768568",
"0.457561",
"0.45734307",
"0.45715937",
"0.45639685",
"0.45626432",
"0.45534492",
"0.45523015",
"0.45408118",
"0.45357755",
"0.45314005",
"0.45293283",
"0.4492101",
"0.44890794",
"0.44867817",
"0.4484507",
"0.44803587",
"0.44803432",
"0.44744843",
"0.4470004",
"0.44656655",
"0.44649318",
"0.44606233",
"0.44526735",
"0.44364274",
"0.44269747",
"0.44237828",
"0.44156003",
"0.44085562",
"0.436982",
"0.43636388",
"0.4359898",
"0.4354301",
"0.43431622",
"0.43405852",
"0.43350774",
"0.4326587",
"0.43077013",
"0.4306619",
"0.43065396",
"0.4306401",
"0.43036067",
"0.42986473",
"0.42866",
"0.42745733",
"0.42672512",
"0.4263856",
"0.4260274",
"0.4242043",
"0.42372748",
"0.42319396",
"0.42261764",
"0.4225098",
"0.42249513",
"0.420859",
"0.42079398",
"0.42045212"
] | 0.52392757 | 1 |
I also wanted to implement map, foreach, ... methods, but it's just not possible | func (l List) IsEmpty() bool {
if len(l.elements) == 0 {
return true
} else {
return false
}
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func Map(array []interface{}, iterator ResultIterator) []interface{} {\r\n\tvar result = make([]interface{}, len(array))\r\n\tfor index, data := range array {\r\n\t\tresult[index] = iterator(data, index)\r\n\t}\r\n\treturn result\r\n}",
"func Map[T any, U any](items []T, f func(T) U) []U {\n\toutputItems := make([]U, len(items))\n\tfor index, item := range items {\n\t\toutputItems[index] = f(item)\n\t}\n\treturn outputItems\n}",
"func Map[T any, R any](collection []T, iteratee func(T, int) R) []R {\n\tresult := make([]R, len(collection))\n\n\tfor i, item := range collection {\n\t\tresult[i] = iteratee(item, i)\n\t}\n\n\treturn result\n}",
"func (e Elements) Map(f MapFunc) Elements {\n\tfor i := range e {\n\t\te[i] = f(e[i])\n\t}\n\treturn e\n}",
"func (array *Array) Map(f func(interface{}, int) interface{}) *Array {\n\tnewArray := NewArray()\n\tfor index, object := range array.data {\n\t\tnewArray.Add(f(object, index))\n\t}\n\treturn newArray\n}",
"func TestMap() {\n\txs := []a{1, 2, 3, 4, 5}\n\tbs := gunc.Map(func(x a) b { return x.(int) + 2 }, xs)\n\tfmt.Printf(\"mapped:: %s\", bs)\n\tfor i := range bs {\n\t\tif bs[i].(int) != xs[i].(int)+2 {\n\t\t\tlog.Fatalf(\"mapping failed:: expected %d got %d\", (xs[i].(int) + 2), bs[i].(int))\n\t\t}\n\t}\n\tlog.Println(\"Map succeeded...\")\n}",
"func Map(xs, fn interface{}) interface{} {\n\tvf, vxs := reflect.ValueOf(fn), reflect.ValueOf(xs)\n\txsLen := vxs.Len()\n\tvys := reflect.MakeSlice(reflect.SliceOf(vf.Type().Out(0)), xsLen, xsLen)\n\tfor i := 0; i < xsLen; i++ {\n\t\tif vf.Type().NumIn() == 2 {\n\t\t\tvy := vf.Call([]reflect.Value{vxs.Index(i), reflect.ValueOf(Int(i))})[0]\n\t\t\tvys.Index(i).Set(vy)\n\t\t} else {\n\t\t\tvy := vf.Call([]reflect.Value{vxs.Index(i)})[0]\n\t\t\tvys.Index(i).Set(vy)\n\t\t}\n\t}\n\treturn vys.Interface()\n}",
"func (coll *Collection) Map(f MapFunc) Collection {\n\tresults := make(Collection, 0)\n\tfor _, v := range *coll {\n\t\titem := f(v)\n\t\tresults = append(results, item)\n\t}\n\n\treturn results\n}",
"func Map[In, Out any](s []In, fn func(In) Out) []Out {\n\tif s == nil {\n\t\treturn nil\n\t}\n\n\tout := make([]Out, len(s))\n\tfor i, v := range s {\n\t\tout[i] = fn(v)\n\t}\n\n\treturn out\n}",
"func (a Slice[T]) Map(block func(T) T) Slice[T] {\n\tresult := Slice[T]{}\n\tfor _, o := range a {\n\t\tresult = append(result, block(o))\n\t}\n\treturn result\n}",
"func Map(elements []Value, mapper Mapper) []Value {\n\tresult := make([]Value, len(elements))\n\tfor idx, elem := range elements {\n\t\tresult[idx] = mapper(elem)\n\t}\n\treturn result\n}",
"func (l *list) Map(sample interface{}, fn EachElementCallback) interface{} {\n\ttypeOf := reflect.TypeOf(sample)\n\n\tif typeOf.Kind() != reflect.Ptr {\n\t\tpanic(\"sample must be pointer\")\n\t}\n\n\tvalueOf := reflect.ValueOf(sample)\n\tvalueElem := valueOf.Elem()\n\n\tl.ForEach(func(index int, el interface{}) {\n\t\tres := fn(index, el)\n\t\tif res != nil {\n\t\t\tvalueElem.Set(reflect.Append(valueElem, reflect.ValueOf(res)))\n\t\t}\n\t})\n\n\treturn valueElem.Interface()\n}",
"func MapEcho(itr Iterator) interface{} {\n\tvar values []interface{}\n\n\tfor k, v := itr.Next(); k != -1; k, v = itr.Next() {\n\t\tvalues = append(values, v)\n\t}\n\treturn values\n}",
"func (obj *object) Map(f func(*Term, *Term) (*Term, *Term, error)) (Object, error) {\n\tcpy := newobject(obj.Len())\n\terr := obj.Iter(func(k, v *Term) error {\n\t\tvar err error\n\t\tk, v, err = f(k, v)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tcpy.insert(k, v)\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn cpy, nil\n}",
"func Map[T comparable](tt []T, fn func(T) T) []T {\n\tret := make([]T, len(tt))\n\tfor i, t := range tt {\n\t\tret[i] = fn(t)\n\t}\n\n\treturn ret\n}",
"func (array Array) Map(function MapFunc) Array {\n\tnewArray := New()\n\tfor i, v := range array {\n\t\tnewArray = newArray.Push(function(array, i, v))\n\t}\n\treturn newArray\n}",
"func Map(f func(string) string, s []string) []string {\n\tfor i, e := range s {\n\t\ts[i] = f(e)\n\t}\n\treturn s\n}",
"func (s *SliceOfByte) Map(mapFunc func(byte) byte) *SliceOfByte {\n\tfor index, value := range s.items {\n\t\ts.items[index] = mapFunc(value)\n\t}\n\treturn s\n}",
"func (s *SliceOfInt8) Map(mapFunc func(int8) int8) *SliceOfInt8 {\n\tfor index, value := range s.items {\n\t\ts.items[index] = mapFunc(value)\n\t}\n\treturn s\n}",
"func ForEach(arr interface{}, predicate interface{}) {\n\tif !IsIteratee(arr) {\n\t\tpanic(\"First parameter must be an iteratee\")\n\t}\n\n\tvar (\n\t\tfuncValue = reflect.ValueOf(predicate)\n\t\tarrValue = reflect.ValueOf(arr)\n\t\tarrType = arrValue.Type()\n\t\tfuncType = funcValue.Type()\n\t)\n\n\tif arrType.Kind() == reflect.Slice || arrType.Kind() == reflect.Array {\n\t\tif !IsFunction(predicate, 1, 0) {\n\t\t\tpanic(\"Second argument must be a function with one parameter\")\n\t\t}\n\n\t\tarrElemType := arrValue.Type().Elem()\n\t\tarrElemPointerType := reflect.New(arrElemType).Type()\n\t\tusePointer := arrElemPointerType.ConvertibleTo(funcType.In(0))\n\n\t\t// Checking whether element type is convertible to function's first argument's type.\n\t\tif !arrElemType.ConvertibleTo(funcType.In(0)) && !usePointer {\n\t\t\tpanic(\"Map function's argument is not compatible with type of array.\")\n\t\t}\n\n\t\tfor i := 0; i < arrValue.Len(); i++ {\n\t\t\tif usePointer {\n\t\t\t\tfuncValue.Call([]reflect.Value{arrValue.Index(i).Addr()})\n\t\t\t} else {\n\t\t\t\tfuncValue.Call([]reflect.Value{arrValue.Index(i)})\n\t\t\t}\n\t\t}\n\t}\n\n\tif arrType.Kind() == reflect.Map {\n\t\tif !IsFunction(predicate, 2, 0) {\n\t\t\tpanic(\"Second argument must be a function with two parameters\")\n\t\t}\n\n\t\t// Type checking for Map<key, value> = (key, value)\n\t\tkeyType := arrType.Key()\n\t\tvalueType := arrType.Elem()\n\n\t\tif !keyType.ConvertibleTo(funcType.In(0)) {\n\t\t\tpanic(fmt.Sprintf(\"function first argument is not compatible with %s\", keyType.String()))\n\t\t}\n\n\t\tif !valueType.ConvertibleTo(funcType.In(1)) {\n\t\t\tpanic(fmt.Sprintf(\"function second argument is not compatible with %s\", valueType.String()))\n\t\t}\n\n\t\tfor _, key := range arrValue.MapKeys() {\n\t\t\tfuncValue.Call([]reflect.Value{key, arrValue.MapIndex(key)})\n\t\t}\n\t}\n}",
"func (s Sequence) Map(f func(el El) El) Sequence {\n\tif s.IsConcurrent() {return s.CMap(f)}\n\treturn s.SMap(f)\n}",
"func Map[T, U any](slice []T, init U, fn func(accum U, v T) U) U {\n\tif len(slice) == 0 {\n\t\treturn init\n\t}\n\taccum := init\n\tfor _, v := range slice {\n\t\taccum = fn(accum, v)\n\t}\n\treturn accum\n}",
"func (p *SliceOfMap) Map(mod func(O) O) ISlice {\n\tvar slice ISlice\n\tif p == nil || len(*p) == 0 {\n\t\treturn NewSliceOfMapV()\n\t}\n\tfor i := range *p {\n\t\tv := mod((*p)[i])\n\t\tif slice == nil {\n\t\t\tslice = Slice(v)\n\t\t} else {\n\t\t\tslice.Append(v)\n\t\t}\n\t}\n\treturn slice\n}",
"func Map(ss []string, funcInterface interface{}) []string {\n\tif ss == nil {\n\t\treturn nil\n\t}\n\tif funcInterface == nil {\n\t\treturn ss\n\t}\n\tf := func(i int, s string) string {\n\t\tswitch tf := funcInterface.(type) {\n\t\tcase func(int, string) string:\n\t\t\treturn tf(i, s)\n\t\tcase func(string) string:\n\t\t\treturn tf(s)\n\t\t}\n\t\tpanic(fmt.Sprintf(\"Map cannot understand function type %T\", funcInterface))\n\t}\n\tresult := make([]string, len(ss))\n\tfor i, s := range ss {\n\t\tresult[i] = f(i, s)\n\t}\n\treturn result\n}",
"func Map(series interface{}, f interface{}) (interface{}, error) {\n\tst := reflect.TypeOf(series)\n\tsv := reflect.ValueOf(series)\n\tft := reflect.TypeOf(f)\n\tfv := reflect.ValueOf(f)\n\n\tswitch {\n\tcase st.Kind() != reflect.Slice && st.Kind() != reflect.Array:\n\t\treturn nil, ErrNotArrayOrSlice\n\tcase ft.Kind() != reflect.Func:\n\t\treturn nil, ErrNotFunc\n\tcase ft.NumIn() != 1 || ft.NumOut() != 1:\n\t\treturn nil, ErrFuncParam\n\tcase ft.In(0).Kind() != st.Elem().Kind():\n\t\treturn nil, ErrNotCompatible\n\t}\n\n\tmapped := reflect.MakeSlice(reflect.SliceOf(ft.Out(0)), 0, 0)\n\tfor i := 0; i < sv.Len(); i++ {\n\t\tmapped = reflect.Append(mapped, fv.Call([]reflect.Value{sv.Index(i)})...)\n\t}\n\n\treturn mapped.Interface(), nil\n}",
"func (s *SliceOfString) Map(mapFunc func(string) string) *SliceOfString {\n\tfor index, value := range s.items {\n\t\ts.items[index] = mapFunc(value)\n\t}\n\treturn s\n}",
"func (arr *FloatArray) Map(exec func(el float64) float64) []float64 {\n\treturn arr.Collect(exec)\n}",
"func Map(slice []string, mapping ...MapFunc) []string {\n\tout := make([]string, len(slice))\n\tfor i, s := range slice {\n\t\tfor _, mapping := range mapping {\n\t\t\ts = mapping(s)\n\t\t}\n\t\tout[i] = s\n\t}\n\treturn out\n}",
"func (s *SliceOfUint8) Map(mapFunc func(uint8) uint8) *SliceOfUint8 {\n\tfor index, value := range s.items {\n\t\ts.items[index] = mapFunc(value)\n\t}\n\treturn s\n}",
"func (jz *Jzon) Map(mapFunc func(string, *Jzon) Any) (res Any, err error) {\n\tswitch jz.Type {\n\tcase JzTypeArr:\n\t\treturn jz.AMap(func(j *Jzon) (res Any) { return mapFunc(\"\", j) })\n\n\tcase JzTypeObj:\n\t\treturn jz.OMap(mapFunc)\n\n\tdefault:\n\t\treturn mapFunc(\"\", jz), err\n\t}\n}",
"func (tokens Tokens) Map(fn func(string) string) Tokens {\n\tres := make(Tokens, len(tokens))\n\tfor i := range tokens {\n\t\tres[i] = fn(tokens[i])\n\t}\n\treturn res\n}",
"func MapFn[S ~[]E, E any](list S, fn func(E) E) S {\n\tif list == nil {\n\t\treturn nil\n\t}\n\tdest := make(S, len(list))\n\tfor i, s := range list {\n\t\tdest[i] = fn(s)\n\t}\n\treturn dest\n}",
"func (s *SliceOfInt16) Map(mapFunc func(int16) int16) *SliceOfInt16 {\n\tfor index, value := range s.items {\n\t\ts.items[index] = mapFunc(value)\n\t}\n\treturn s\n}",
"func (w wireFormat) MapBySlice() {}",
"func Map(f func(interface{}) interface{}, l List) List {\n\tif IsEmpty(l) {\n\t\treturn Mzero()\n\t}\n\telem := l.([2]interface{})\n\tvalFunc := elem[0].(func() interface{})\n\tnext := elem[1].(func() List)\n\tmapperFunc := func() interface{} {\n\t\treturn f(valFunc())\n\t}\n\treturn Consf(mapperFunc, Map(f, next()))\n}",
"func mapiterinit(t unsafe.Pointer, m unsafe.Pointer, it *hiter)",
"func (s *SliceOfUint16) Map(mapFunc func(uint16) uint16) *SliceOfUint16 {\n\tfor index, value := range s.items {\n\t\ts.items[index] = mapFunc(value)\n\t}\n\treturn s\n}",
"func Map(v interface{}) map[string]interface{} {\n\treturn New(v).Map()\n}",
"func (s Series) Map(f MapFunction) Series {\n\tmappedValues := make([]Element, s.Len())\n\tfor i := 0; i < s.Len(); i++ {\n\t\tvalue := f(s.elements.Elem(i))\n\t\tmappedValues[i] = value\n\t}\n\treturn New(mappedValues, s.Type(), s.Name)\n}",
"func (s *SliceOfFloat64) Map(mapFunc func(float64) float64) *SliceOfFloat64 {\n\tfor index, value := range s.items {\n\t\ts.items[index] = mapFunc(value)\n\t}\n\treturn s\n}",
"func (t TypeSlice) Map(fn func(Type) Type) TypeSlice {\n\to := make(TypeSlice, len(t))\n\tfor i := range t {\n\t\to[i] = fn(t[i])\n\t}\n\treturn o\n}",
"func (self *Map) Each(fn ItemFunc, tagName ...string) error {\n\ttn := ``\n\n\tif len(tagName) > 0 && tagName[0] != `` {\n\t\ttn = tagName[0]\n\t}\n\n\treturn self.each(fn, IterOptions{\n\t\tTagName: tn,\n\t})\n}",
"func (s *SliceOfUint) Map(mapFunc func(uint) uint) *SliceOfUint {\n\tfor index, value := range s.items {\n\t\ts.items[index] = mapFunc(value)\n\t}\n\treturn s\n}",
"func Map[M map[K]V, K ~int, V string | int](s []V) M {\n// func Map[M map[int]V, V string | int](s []V) M {\n\tvar m = make(M)\n\tfor i, one := range s {\n\t\tm[i] = one\n\t}\n\treturn m\n}",
"func Map(a []string, fn func(string) string) []string {\n\tif len(a) == 0 {\n\t\treturn a\n\t}\n\tsl := make([]string, len(a))\n\tfor i, v := range a {\n\t\tsl[i] = fn(v)\n\t}\n\treturn sl\n}",
"func (s *SliceOfUint64) Map(mapFunc func(uint64) uint64) *SliceOfUint64 {\n\tfor index, value := range s.items {\n\t\ts.items[index] = mapFunc(value)\n\t}\n\treturn s\n}",
"func MapM(f func(interface{}), l List) {\n\tadapter := func(i interface{}) interface{} {\n\t\tf(i)\n\t\treturn nil\n\t}\n\tSeq(Map(adapter, l))\n}",
"func Map(args ...interface{}) dgo.MapType {\n\treturn internal.MapType(args...)\n}",
"func (d *Dump) Map(f func(item Item) error) error {\n\td.mutex.Lock()\n\tdefer d.mutex.Unlock()\n\n\tvar err error\n\tfor _, i := range d.items {\n\t\tif err = f(i); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif d.persist == PERSIST_WRITES {\n\t\treturn d.save()\n\t}\n\n\treturn nil\n}",
"func Map(vs []string, f func(string) string) []string {\n\tvsm := make([]string, len(vs))\n\tfor i, v := range vs {\n\t\tvsm[i] = f(v)\n\t}\n\treturn vsm\n}",
"func Map(vs []string, f func(string) string) []string {\n\tvsm := make([]string, len(vs))\n\tfor i, v := range vs {\n\t\tvsm[i] = f(v)\n\t}\n\treturn vsm\n}",
"func Map(vs []string, f func(string) string) []string {\n\tvsm := make([]string, len(vs))\n\tfor i, v := range vs {\n\t\tvsm[i] = f(v)\n\t}\n\treturn vsm\n}",
"func Map(p ParseTree, f func(e Expr) *Expr) ParseTree {\n\tp2 := make(ParseTree, 0, len(p))\n\tfor _, e := range p {\n\t\tcpy := *e\n\t\te = &cpy\n\t\tif result := f(*e); result != nil {\n\t\t\tp2 = append(p2, result)\n\t\t}\n\t}\n\treturn p2\n}",
"func (p Doc) Map() map[string]string {\n\tm := make(map[string]string)\n\n\tp.Foreach(func(v, k string) bool { m[k] = v; return true })\n\n\treturn m\n}",
"func (builder *streamBuilder[K, F]) Map(mapper Mapper[K, F]) *streamBuilder[K, F] {\n\tbuilder.stream.Mappers = append(builder.stream.Mappers, mapper)\n\treturn builder\n}",
"func (l IntList) Map(fn unaryFunc) IntList {\n\tfor i, v := range l {\n\t\tl[i] = fn(v)\n\t}\n\treturn l\n}",
"func Map[T, R any](it TryNextor[T], mapper func(T) R) TryNextor[R] {\n\treturn pureMap[T, R]{\n\t\tinner: it,\n\t\tmapper: mapper,\n\t}\n}",
"func (s *SliceOfInt) Map(mapFunc func(int) int) *SliceOfInt {\n\tfor index, value := range s.items {\n\t\ts.items[index] = mapFunc(value)\n\t}\n\treturn s\n}",
"func Map(it []string, fn func(string) string) []string {\n\toutSlice := []string{}\n\tfor _, str := range it {\n\t\toutSlice = append(outSlice, fn(str))\n\t}\n\treturn outSlice\n}",
"func (s *SliceOfInt64) Map(mapFunc func(int64) int64) *SliceOfInt64 {\n\tfor index, value := range s.items {\n\t\ts.items[index] = mapFunc(value)\n\t}\n\treturn s\n}",
"func (vind *UTF8cihash) Map(_ VCursor, ids []interface{}) ([][]byte, error) {\n\tout := make([][]byte, 0, len(ids))\n\tfor _, id := range ids {\n\t\tdata, err := getutf8cihash(id)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"utf8cihash.Map :%v\", err)\n\t\t}\n\t\tout = append(out, data)\n\t}\n\treturn out, nil\n}",
"func Map[A, B any](seq Seq[A], f func(A) B) Seq[B] {\n\tif seq == nil {\n\t\treturn nil\n\t}\n\n\treturn fmap[A, B]{Seq: seq, f: f}\n}",
"func (set *AppleSet) Map(f func(Apple) Apple) *AppleSet {\n\tif set == nil {\n\t\treturn nil\n\t}\n\n\tresult := NewAppleSet()\n\tset.s.RLock()\n\tdefer set.s.RUnlock()\n\n\tfor v := range set.m {\n\t\tk := f(v)\n\t\tresult.m[k] = struct{}{}\n\t}\n\n\treturn result\n}",
"func (vind *Varbinary) Map(_ VCursor, ids []interface{}) ([][]byte, error) {\n\tout := make([][]byte, 0, len(ids))\n\tfor _, id := range ids {\n\t\tdata, err := getVarbinaryHash(id)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"VarBinary_hash.Map :%v\", err)\n\t\t}\n\t\tout = append(out, data)\n\t}\n\treturn out, nil\n}",
"func (s *SliceOfFloat32) Map(mapFunc func(float32) float32) *SliceOfFloat32 {\n\tfor index, value := range s.items {\n\t\ts.items[index] = mapFunc(value)\n\t}\n\treturn s\n}",
"func reflect_mapiterinit(rtype unsafe.Pointer, m unsafe.Pointer, hiter unsafe.Pointer)",
"func (s *Stream) Map(f interface{}) *Stream {\n\top, err := unary.MapFunc(f)\n\tif err != nil {\n\t\ts.drainErr(err)\n\t\treturn s\n\t}\n\treturn s.Transform(op)\n}",
"func _map(fn mapfn, chunks []string, c chan dict) {\n\tfor _, chunk := range chunks {\n\t\tgo fn(chunk, c)\n\t}\n}",
"func (a *Array64) Map(f MapFunc) (ret *Array64) {\n\tif a == nil || a.err != nil {\n\t\treturn a\n\t}\n\n\tdefer func() {\n\t\tif r := recover(); r != nil {\n\t\t\tret = a\n\t\t\tret.err = FoldMapError\n\t\t\tret.debug = fmt.Sprint(r)\n\t\t\tif debug {\n\t\t\t\tret.stack = string(stackBuf[:runtime.Stack(stackBuf, false)])\n\t\t\t}\n\t\t}\n\t}()\n\n\tret = newArray64(a.shape...)\n\tfor i := 0; i < a.strides[0]; i++ {\n\t\tret.data[i] = f(a.data[i])\n\t}\n\treturn\n}",
"func itermap(n node) []mapitem {\n\titems := []mapitem{}\n\tif n.Kind != yamlast.MappingNode {\n\t\tpanic(\"expected mapping node\")\n\t}\n\tfor i := 0; i < len(n.Children)-1; i += 2 {\n\t\tk := n.Children[i]\n\t\tv := n.Children[i+1]\n\t\titems = append(items, mapitem{k.Value, v})\n\t}\n\treturn items\n}",
"func Map(f func(float64) float64, x []float64) []float64 {\n\ty := make([]float64, len(x))\n\tfor n, xn := range x {\n\t\ty[n] = f(xn)\n\t}\n\treturn y\n}",
"func (w *SimpleMapReduce) Map(mapFn MapFn) *SimpleMapReduce {\n w.mapFn = mapFn\n return w\n}",
"func (m *mapper) run() {\n\tfor m.itr.NextIterval() {\n\t\tm.fn(m.itr, m)\n\t}\n\tclose(m.c)\n}",
"func (self *Map) Map(key string, tagName ...string) map[typeutil.Variant]typeutil.Variant {\n\tif len(tagName) == 0 {\n\t\ttagName = []string{self.structTagKey}\n\t}\n\n\treturn self.Get(key).Map(tagName...)\n}",
"func Map[I, O any](input <-chan I, mapFunc func(element I) O) <-chan O {\n\toutput := make(chan O)\n\tgo func() {\n\t\tdefer close(output)\n\n\t\tfor element := range input {\n\t\t\toutput <- mapFunc(element)\n\t\t}\n\t}()\n\n\treturn output\n}",
"func (this Pairs) MapExpressions(mapper expression.Mapper) (err error) {\n\tfor _, pair := range this {\n\t\terr = pair.MapExpressions(mapper)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\treturn\n}",
"func (ll *LinkedList) Map(fn func(interface{}, uint) interface{}) *LinkedList {\n\tll.RLock()\n\tdefer ll.RUnlock()\n\n\tnewList := NewLinkedList()\n\n\tcurrentNode := ll.head\n\tcurrentIndex := uint(0)\n\n\tfor currentNode != nil {\n\t\tnewList.PushBack(fn(currentNode.data, currentIndex))\n\t\tcurrentNode = currentNode.next\n\t\tcurrentIndex++\n\t}\n\n\treturn newList\n}",
"func (v Vec) Map(f func(int, float64) float64) Vec {\n\treturn v.Copy().MapBy(f)\n}",
"func (r Resources) Map(f func(Resource)) {\n\tfor _, resource := range r {\n\t\tf(resource)\n\t}\n}",
"func (m AoS) Map(f func(s string) S) AoS {\n\tif m.IsErr() {\n\t\treturn m\n\t}\n\n\txss := make([]string, len(m.just))\n\tfor i, v := range m.just {\n\t\tstr, err := f(v).Unbox()\n\t\tif err != nil {\n\t\t\treturn ErrAoS(err)\n\t\t}\n\t\txss[i] = str\n\t}\n\n\treturn JustAoS(xss)\n}",
"func (list IntList) Map(fn unaryFunc) IntList {\n\tr := []int{}\n\tfor _, e := range list {\n\t\tr = append(r, fn(e))\n\t}\n\treturn IntList(r)\n}",
"func (p *SliceOfMap) Each(action func(O)) ISlice {\n\tif p == nil {\n\t\treturn p\n\t}\n\tfor i := range *p {\n\t\taction((*p)[i])\n\t}\n\treturn p\n}",
"func (s *set) Map(f func(*Term) (*Term, error)) (Set, error) {\n\tset := NewSet()\n\terr := s.Iter(func(x *Term) error {\n\t\tterm, err := f(x)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tset.Add(term)\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn set, nil\n}",
"func (isf intSliceFunctorImpl) Map(fn func(int) int) IntSliceFunctor {\n\tif len(isf.ints) < 100 {\n\t\tisf.ints = serialIntMapper(isf.ints, fn)\n\t\treturn isf\n\t}\n\tisf.ints = concurrentIntMapper(isf.ints, fn)\n\treturn isf\n}",
"func (t *StringSlice) Map(mappers ...func(string) string) *StringSlice {\n\tret := NewStringSlice()\n\tfor _, i := range t.items {\n\t\tval := i\n\t\tfor _, m := range mappers {\n\t\t\tval = m(val)\n\t\t}\n\t\tret.Push(val)\n\t}\n\treturn ret\n}",
"func (f *Map) Call(line int, i *Interpreter, arguments []interface{}) (interface{}, error) {\n\tfun, ok := arguments[0].(Function)\n\tif !ok {\n\t\treturn nil, &executionError{line, \"<map> expects a function as first parameter\"}\n\t}\n\n\tif fun.Arity() != 1 {\n\t\treturn nil, &executionError{line, \"<map> expects a function which accepts one argument\"}\n\t}\n\n\tlist, ok := arguments[1].(List)\n\tif !ok {\n\t\treturn nil, &executionError{line, \"<map> expects a list as second parameter\"}\n\t}\n\n\tvar mappedElements []interface{}\n\n\trestOfList := list\n\n\tfor restOfList.Len() > 0 {\n\t\tvar args []interface{}\n\t\targs = append(args, restOfList.First())\n\n\t\tnewEl, err := fun.Call(line, i, args)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tmappedElements = append(mappedElements, newEl)\n\n\t\trestOfList = restOfList.Rest()\n\t}\n\n\treturn NewArrayList(mappedElements), nil\n}",
"func Map(dst, src []float64, f func(v float64) float64) []float64 {\n\n\tif dst == nil {\n\t\tdst = make([]float64, len(src))\n\t}\n\n\tif len(src) != len(dst) {\n\t\tpanic(errLength)\n\t}\n\n\tfor i, x := range src {\n\t\tdst[i] = f(x)\n\t}\n\treturn dst\n}",
"func (u Vec) Map(f func(float64) float64) Vec {\n\treturn Vec{\n\t\tf(u.X),\n\t\tf(u.Y),\n\t}\n}",
"func (self *Map) each(fn ItemFunc, opts IterOptions) error {\n\tif fn != nil {\n\t\tvar tn []string\n\n\t\tif opts.TagName != `` {\n\t\t\ttn = append(tn, opts.TagName)\n\t\t}\n\n\t\tkeys := self.StringKeys(tn...)\n\n\t\tif opts.SortKeys {\n\t\t\tsort.Strings(keys)\n\t\t}\n\n\t\tfor _, key := range keys {\n\t\t\tif err := fn(key, self.Get(key)); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}",
"func (s *SliceOfUint32) Map(mapFunc func(uint32) uint32) *SliceOfUint32 {\n\tfor index, value := range s.items {\n\t\ts.items[index] = mapFunc(value)\n\t}\n\treturn s\n}",
"func Map_(children ...HTML) HTML {\n return Map(nil, children...)\n}",
"func (s Stream) Map(fn func(r Record) (Record, error)) Stream {\n\treturn s.Pipe(func() func(r Record) (Record, error) {\n\t\treturn fn\n\t})\n}",
"func (t *Table) Map(f TableMapFunc) *Table {\n\ttm := NewTable()\n\tfor term, x := range t.self {\n\t\ttm.self[term] = f(x)\n\t}\n\treturn tm\n}",
"func (jz *Jzon) OMap(itFunc func(key string, g *Jzon) Any) (res []Any, err error) {\n\tif jz.Type != JzTypeObj {\n\t\treturn res, expectTypeOf(JzTypeObj, jz.Type)\n\t}\n\n\tres = make([]Any, 0)\n\n\tfor k, v := range jz.data.(map[string]*Jzon) {\n\t\tres = append(res, itFunc(k, v))\n\t}\n\n\treturn res, nil\n}",
"func Map(vs []int, f func(int) int) []int {\n\tvsm := make([]int, len(vs))\n\tfor i, v := range vs {\n\t\tvsm[i] = f(v)\n\t}\n\treturn vsm\n}",
"func (m *Map) Map(f func(key1 interface{}, value1 interface{}) (interface{}, interface{})) *Map {\n\tnewMap := NewWith(m.keyComparator, m.valueComparator)\n\titerator := m.Iterator()\n\tfor iterator.Next() {\n\t\tkey2, value2 := f(iterator.Key(), iterator.Value())\n\t\tnewMap.Put(key2, value2)\n\t}\n\treturn newMap\n}",
"func (r Result) allMap(columns []string, obj interface{}, list reflect.Value) error {\n\t// TODO support scaning into existing or partially populated slices?\n\t_, ok := obj.(*[]Values)\n\tif !ok {\n\t\treturn fmt.Errorf(\n\t\t\t\"sol: slices of maps must have an element type of sol.Values\",\n\t\t)\n\t}\n\n\t// TODO How to scan directly into values?\n\taddr := make([]interface{}, len(columns))\n\tdest := make([]interface{}, len(columns))\n\tfor i := range addr {\n\t\tdest[i] = &addr[i]\n\t}\n\n\tfor r.Next() {\n\t\tif err := r.Scan(dest...); err != nil {\n\t\t\treturn fmt.Errorf(\"sol: error scanning map slice: %s\", err)\n\t\t}\n\n\t\tvalues := Values{}\n\t\tfor i, name := range columns {\n\t\t\tvalues[name] = addr[i]\n\t\t}\n\n\t\tlist.Set(reflect.Append(list, reflect.ValueOf(values)))\n\t}\n\treturn r.Err() // Check for delayed scan errors\n}",
"func Map(target interface{}, useTag string) (map[string]interface{}, error) {\n\tif nil == target {\n\t\treturn nil, nil\n\t}\n\tv := reflect.ValueOf(target)\n\tfor v.Kind() == reflect.Ptr {\n\t\tv = v.Elem()\n\t}\n\tif v.Kind() != reflect.Struct {\n\t\treturn nil, ErrNoneStructTarget\n\t}\n\tt := v.Type()\n\tresult := make(map[string]interface{})\n\tfor i := 0; i < t.NumField(); i++ {\n\t\tkeyName := getKey(t.Field(i), useTag)\n\t\tif \"\" == keyName {\n\t\t\tcontinue\n\t\t}\n\t\tresult[keyName] = v.Field(i).Interface()\n\t}\n\treturn result, nil\n}",
"func pointyMap() interface{} {\n\treturn map[string]interface{}{\n\t\t\"bar\": map[string]interface{}{\n\t\t\t\"baz\": &b{0, []int{1, 2, 3}},\n\t\t\t\"buzz\": []int{4, 5, 6}},\n\t\t\"baz\": []int{7, 8, 9},\n\t\t\"bazzle\": []string{\"10\", \"11\", \"12\"}}\n}",
"func (set Int64Set) Map(fn func(int64) int64) Int64Set {\n\tresult := NewInt64Set()\n\n\tfor v := range set {\n result[fn(v)] = struct{}{}\n\t}\n\n\treturn result\n}",
"func (grid *SquareGrid) Map() map[Loc]interface{} {\n\treturn grid.Data\n}"
] | [
"0.6765267",
"0.6430332",
"0.6413866",
"0.6319985",
"0.6291821",
"0.6289037",
"0.6270705",
"0.62303656",
"0.6190384",
"0.60611576",
"0.6056677",
"0.60296446",
"0.5927682",
"0.5918407",
"0.59146017",
"0.588991",
"0.5885376",
"0.5875884",
"0.5833732",
"0.5825184",
"0.5819225",
"0.5793214",
"0.5789852",
"0.5788141",
"0.57615674",
"0.57499343",
"0.57497203",
"0.57390386",
"0.5717119",
"0.5714826",
"0.5713009",
"0.56747234",
"0.5659141",
"0.5635202",
"0.56260043",
"0.5605085",
"0.55989146",
"0.5594735",
"0.5567589",
"0.556688",
"0.55636895",
"0.5559595",
"0.5558485",
"0.555332",
"0.55507696",
"0.5549014",
"0.5547073",
"0.5535646",
"0.5528621",
"0.5519428",
"0.5519428",
"0.5519428",
"0.5511101",
"0.5490464",
"0.54779273",
"0.5474529",
"0.5474158",
"0.54719114",
"0.54718816",
"0.54654634",
"0.5462495",
"0.54348373",
"0.5432439",
"0.54122925",
"0.54061633",
"0.54041874",
"0.5398786",
"0.5376233",
"0.53724235",
"0.5364023",
"0.53631127",
"0.5354804",
"0.5352961",
"0.5335578",
"0.5335233",
"0.53314745",
"0.5323568",
"0.5313058",
"0.53037137",
"0.5301207",
"0.5291761",
"0.5290575",
"0.5289762",
"0.5273914",
"0.52715003",
"0.5251225",
"0.52472967",
"0.5242512",
"0.5227343",
"0.52188015",
"0.52044237",
"0.5203632",
"0.51999027",
"0.5197378",
"0.51940674",
"0.5186933",
"0.51856685",
"0.5183175",
"0.51735955",
"0.51679164",
"0.5166542"
] | 0.0 | -1 |
NewDialog is a helper to spawn a new bit of game dialog | func NewDialog(text string, fontSize float32) {
rl.DrawRectangleRec(
rl.NewRectangle(0, 0, float32(rl.GetScreenWidth()), float32(rl.GetScreenHeight()/5)),
rl.Black,
)
rl.DrawRectangleLinesEx(
rl.NewRectangle(0, 0, float32(rl.GetScreenWidth()), float32(rl.GetScreenHeight()/5)),
4,
rl.White,
)
rl.DrawTextRecEx(
rl.GetFontDefault(),
text,
rl.NewRectangle(20, 20, float32(rl.GetScreenWidth()), float32(rl.GetScreenHeight()/5)),
fontSize,
1,
true,
rl.RayWhite,
0,
int32(rl.GetScreenWidth()),
rl.White,
rl.Black,
)
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func NewDialog(text string) *Dialog {\n\treturn &Dialog{\n\t\tStyleName: \"Default\",\n\t\tStart: \"0:00:00.00\", End: \"0:00:05.00\",\n\t\tText: text}\n}",
"func NewDialog(input TL) (d *Dialog) {\n\td = new(Dialog)\n\tif dialog, ok := input.(TL_dialog); ok {\n\t\tswitch pt := dialog.Peer.(type) {\n\t\tcase TL_peerChat:\n\t\t\td.Type = DIALOG_TYPE_CHAT\n\t\t\td.PeerID = pt.Chat_id\n\t\tcase TL_peerUser:\n\t\t\td.Type = DIALOG_TYPE_USER\n\t\t\td.PeerID = pt.User_id\n\t\tcase TL_peerChannel:\n\t\t\td.Type = DIALOG_TYPE_CHANNEL\n\t\t\td.PeerID = pt.Channel_id\n\t\tdefault:\n\t\t\treturn nil\n\t\t}\n\t\td.Pts = dialog.Pts\n\t\td.TopMessageID = dialog.Top_message\n\t\td.UnreadCount = dialog.Unread_count\n\n\t\treturn d\n\t}\n\treturn nil\n\n}",
"func NewDialog(text string) *writer.Dialog {\n\treturn writer.NewDialog(text)\n}",
"func NewDialog() *Dialog {\n\treturn &Dialog{}\n}",
"func (d Client) CreateDialog(name string, filename string, data io.Reader) (string, error) {\n\treturn d.createOrUpdateDialog(\"\", name, filename, data)\n}",
"func newDialogFromNative(obj unsafe.Pointer) interface{} {\n\td := &Dialog{}\n\td.object = C.to_GtkDialog(obj)\n\n\tif gobject.IsObjectFloating(d) {\n\t\tgobject.RefSink(d)\n\t} else {\n\t\tgobject.Ref(d)\n\t}\n\td.Window = newWindowFromNative(obj).(*Window)\n\tdialogFinalizer(d)\n\n\treturn d\n}",
"func (dialog *Dialog) NewSubdialog() *Dialog {\n\treturn &Dialog{\n\t\tdepth: dialog.depth,\n\t\tisSub: true,\n\t}\n}",
"func NewModalDialog(idd uintptr, parent win.HWND, dialogConfig *DialogConfig, cb ModalDialogCallBack) int {\n\tif dialogConfig == nil {\n\t\tdialogConfig = &DialogConfig{}\n\t}\n\tdlg := &Dialog{\n\t\titems: make(map[win.HWND]Widget),\n\t\tiddMap: make(map[uintptr]Widget),\n\t\tconfig: dialogConfig,\n\t\tcb: cb,\n\t}\n\tdlg.idd = idd\n\treturn win.DialogBoxParam(hInstance, win.MAKEINTRESOURCE(idd), parent, syscall.NewCallback(dlg.dialogWndProc), 0)\n}",
"func Dialog(props *DialogProps, children ...Element) *DialogElem {\n\trProps := &_DialogProps{\n\t\tBasicHTMLElement: newBasicHTMLElement(),\n\t}\n\n\tif props != nil {\n\t\tprops.assign(rProps)\n\t}\n\n\treturn &DialogElem{\n\t\tElement: createElement(\"dialog\", rProps, children...),\n\t}\n}",
"func (s *Script) AddDialog(d *Dialog) {\n\tif d.Text != \"\" {\n\t\ts.Dialog = append(s.Dialog, d)\n\t}\n}",
"func makeNewGame(name string, playerNames []string) *Game {\n\tvar g = new(Game)\n\tid, err := uuid.GenUUID()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tg.ID = id\n\tg.Name = name\n\tg.Messages.Capacity = 500\n\tg.Phase = Development\n\tGames[g.ID] = g\n\tg.addMessage(fmt.Sprintf(\"Created game %s...\", g.Name))\n\tg.loadLocos()\n\tg.prepareLocos()\n\tg.initPlayers(playerNames)\n\tg.determineTurnOrder()\n\n\treturn g\n}",
"func newMessageDialogFromNative(obj unsafe.Pointer) interface{} {\n\td := &MessageDialog{}\n\td.object = C.to_GtkMessageDialog(obj)\n\n\tif gobject.IsObjectFloating(d) {\n\t\tgobject.RefSink(d)\n\t} else {\n\t\tgobject.Ref(d)\n\t}\n\td.Dialog = newDialogFromNative(obj).(*Dialog)\n\tmessageDialogFinalizer(d)\n\treturn d\n}",
"func newGameHandle(w http.ResponseWriter, r *http.Request) {\n\t// Use non-blocking send\n\tselect {\n\tcase model.NewGameCh <- 1:\n\tdefault:\n\t}\n}",
"func (d *driver) newGame() func() {\n\tg := game{d}\n\tg.player.Start(g.sender)\n\treturn g.forward\n}",
"func WindowNew(t WindowType) *Window {\n\tid := Candy().Guify(\"gtk_window_new\", t).String()\n\treturn NewWindow(Candy(), id)\n}",
"func (fx *Script) Add(dialog interface{}) {\n\n\tswitch dlg := dialog.(type) {\n\tcase Line:\n\t\td := NewDialog(dlg.Text)\n\t\td.Layer = dlg.Layer\n\t\td.Start = asstime.MStoSSA(dlg.StartTime + fx.Shift)\n\t\td.End = asstime.MStoSSA(dlg.EndTime + fx.Shift)\n\t\td.StyleName = dlg.StyleName\n\t\td.Actor = dlg.Actor\n\t\td.Effect = dlg.Effect\n\t\td.Tags = dlg.Tags\n\t\td.Comment = dlg.Comment\n\t\tfx.scriptOut.AddDialog(d)\n\tcase Syl:\n\t\td := NewDialog(dlg.Text)\n\t\td.Layer = dlg.Layer\n\t\td.Start = asstime.MStoSSA(dlg.StartTime + fx.Shift)\n\t\td.End = asstime.MStoSSA(dlg.EndTime + fx.Shift)\n\t\td.StyleName = dlg.StyleName\n\t\td.Actor = dlg.Actor\n\t\td.Effect = dlg.Effect\n\t\td.Tags = dlg.Tags\n\t\td.Comment = dlg.Comment\n\t\tfx.scriptOut.AddDialog(d)\n\tcase Char:\n\t\td := NewDialog(dlg.Text)\n\t\td.Layer = dlg.Layer\n\t\td.Start = asstime.MStoSSA(dlg.StartTime + fx.Shift)\n\t\td.End = asstime.MStoSSA(dlg.EndTime + fx.Shift)\n\t\td.StyleName = dlg.StyleName\n\t\td.Actor = dlg.Actor\n\t\td.Effect = dlg.Effect\n\t\td.Tags = dlg.Tags\n\t\td.Comment = dlg.Comment\n\t\tfx.scriptOut.AddDialog(d)\n\tdefault:\n\t\tfmt.Println(\"Not admitted object\")\n\t}\n\n}",
"func DialogBox(\n\thInstance HINSTANCE,\n\tTemplateName string,\n\thWndParent HWND,\n\tlpDialogFunc DLGPROC,\n) INT_PTR {\n\tvar ret, _, _ = userDialogBoxParamW.Call(\n\t\tuintptr(hInstance),\n\t\tUintptrFromString(&TemplateName),\n\t\tuintptr(hWndParent),\n\t\tuintptr(lpDialogFunc),\n\t\t0,\n\t)\n\treturn INT_PTR(ret)\n}",
"func newGame() *guessingGame {\n\treturn &guessingGame{\n\t\tnum: rand.Intn(10) + 1,\n\t}\n}",
"func (c Client) newGameRequest() {\n\terr := c.Encoder.Encode(messages.PlayerReq{Action: game.NewGame})\n\tif err != nil {\n\t\tfmt.Fprintf(c.Output, \"unexpected error: %v \\n\", err)\n\t}\n\n\tvar resp messages.GameStateResp\n\terr = c.decodeResponse(&resp)\n\tif err != nil {\n\t\tfmt.Fprintf(c.Output, \"unexpected error: %s \\n\", err)\n\t}\n\n\tif resp.Error != nil {\n\t\tfmt.Fprintln(c.Output, resp.Error)\n\t} else {\n\t\tfmt.Fprintf(c.Output, \"Guess the hero: %s \\n\", resp.State.WordToGuess)\n\t\tfmt.Fprintln(c.Output, drawing.Display[len(resp.State.CharsTried)])\n\t\tfmt.Fprintf(c.Output, \"Characters tried: %s \\n\", strings.Join(resp.State.CharsTried, \" - \"))\n\t}\n}",
"func (game *Game) StartNewGame() {\n\tgame.field = startField\n\tgame.finished = false\n\tgame.turn = \"x\"\n\tgame.winRow = zeroRow\n\tgame.winner = \"\"\n}",
"func NewDialogData(\n\tdynamo *awsutils.DynamoRequest,\n\tenvironmentName,\n\torganization,\n\tdialogRepo ,\n\tdialogFolder ,\n\tdialogCatalog ,\n\tdialogTable ,\n\taliasFolder ,\n\tlearnMoreRepo ,\n\tlearnMoreFolder ,\n\tbuildBranch ,\n\tcultivationBranch ,\n\tmasterBranch string,\n) (rv DialogData) {\n\tif dynamo == nil ||\n\t\tenvironmentName == \"\" ||\n\t\torganization == \"\" ||\n\t\tdialogRepo == \"\" ||\n\t\tdialogFolder == \"\" ||\n\t\tdialogCatalog == \"\" ||\n\t\tdialogTable == \"\" ||\n\t\taliasFolder == \"\" ||\n\t\tlearnMoreRepo == \"\" ||\n\t\tlearnMoreFolder == \"\" ||\n\t\tbuildBranch == \"\" ||\n\t\tcultivationBranch == \"\" ||\n\t\tmasterBranch == \"\"\t{\n\t\tpanic(\"cannot have empty initialization values\")\n\t}\n\n\trv.EnvironmentName = environmentName\n\trv.Organization = organization\n\trv.DialogRepo = dialogRepo\n\trv.DialogFolder = dialogFolder\n\trv.DialogCatalog = dialogCatalog\n\trv.DialogTable = environmentName+dialogTable\n\trv.AliasFolder = aliasFolder\n\trv.LearnMoreRepo = learnMoreRepo\n\trv.LearnMoreFolder = learnMoreFolder\n\trv.BuildBranch = buildBranch\n\trv.CultivationBranch = cultivationBranch\n\trv.MasterBranch = masterBranch\n\trv.Modified = false\n\trv.BuildID = core_utils_go.Uuid()\n\trv.dialogIDs = make(map[string]bool)\n\trv.dynamo = dynamo\n\treturn rv\n}",
"func newGame() *game {\n\trand.Seed(time.Now().UTC().UnixNano())\n\tg := &game{}\n\tg.State = \"lobby\"\n\tg.StateTime = time.Now()\n\tg.Name = \"MafiosoGame\"\n\tg.Winner = \"\"\n\tg.Players = make([]*player, 0)\n\tif g.Id != \"\" {\n\t\tgameList[g.Id] = g\n\t}\n\treturn g\n}",
"func handleNewCommand() {\n\tneoCliRoot := os.Getenv(\"GOPATH\") + \"/src/github.com/ivpusic/neo/cmd/neo\"\n\n\tif len(*templateName) == 0 {\n\t\tlogger.Info(\"Creating Neo project\")\n\t\trunCmd(neoCliRoot+\"/scripts/neo-template\", []string{*projectName})\n\n\t} else {\n\t\tswitch *templateName {\n\t\tcase \"angular\":\n\t\t\tlogger.Info(\"Creating Neo Angular project\")\n\t\t\trunCmd(neoCliRoot+\"/scripts/angular-template\", []string{*projectName})\n\t\tcase \"html\":\n\t\t\tlogger.Info(\"Creating Neo HTML project\")\n\t\t\trunCmd(neoCliRoot+\"/scripts/neo-html-template\", []string{*projectName})\n\t\tdefault:\n\t\t\tlogger.Errorf(\"Unkonown template %s!\", *projectName)\n\t\t}\n\t}\n}",
"func (d MessagesDialogs) construct() MessagesDialogsClass { return &d }",
"func (me TxsdShow) IsNew() bool { return me == \"new\" }",
"func (t *tbfe) MessageDialog(msg string) {\n\tlog4go.Info(msg)\n}",
"func (t *tbfe) MessageDialog(msg string) {\n\tlog4go.Info(msg)\n}",
"func (d MessagesDialogsNotModified) construct() MessagesDialogsClass { return &d }",
"func (d *Dialog) String() string {\n\tvar builder strings.Builder\n\tbuilder.WriteString(\"Dialog(\")\n\tbuilder.WriteString(fmt.Sprintf(\"id=%v\", d.ID))\n\tbuilder.WriteString(\", create_time=\")\n\tbuilder.WriteString(d.CreateTime.Format(time.ANSIC))\n\tbuilder.WriteString(\", update_time=\")\n\tbuilder.WriteString(d.UpdateTime.Format(time.ANSIC))\n\tbuilder.WriteString(\", meta=\")\n\tbuilder.WriteString(fmt.Sprintf(\"%v\", d.Meta))\n\tbuilder.WriteByte(')')\n\treturn builder.String()\n}",
"func newGame(width, height int) *Game {\n\treturn &Game{\n\t\tw: width,\n\t\th: height,\n\t}\n}",
"func (d DialogPeer) construct() DialogPeerClass { return &d }",
"func (o *EditorPlugin) GetScriptCreateDialog() ScriptCreateDialogImplementer {\n\t//log.Println(\"Calling EditorPlugin.GetScriptCreateDialog()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"EditorPlugin\", \"get_script_create_dialog\")\n\n\t// Call the parent method.\n\t// ScriptCreateDialog\n\tretPtr := gdnative.NewEmptyObject()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := newScriptCreateDialogFromPointer(retPtr)\n\n\t// Check to see if we already have an instance of this object in our Go instance registry.\n\tif instance, ok := InstanceRegistry.Get(ret.GetBaseObject().ID()); ok {\n\t\treturn instance.(ScriptCreateDialogImplementer)\n\t}\n\n\t// Check to see what kind of class this is and create it. This is generally used with\n\t// GetNode().\n\tclassName := ret.GetClass()\n\tif className != \"ScriptCreateDialog\" {\n\t\tactualRet := getActualClass(className, ret.GetBaseObject())\n\t\treturn actualRet.(ScriptCreateDialogImplementer)\n\t}\n\n\treturn &ret\n}",
"func newGLWindow(opts *oswin.NewWindowOptions) (*glfw.Window, error) {\n\t_, _, tool, fullscreen := oswin.WindowFlagsToBool(opts.Flags)\n\tglfw.DefaultWindowHints()\n\tglfw.WindowHint(glfw.Resizable, glfw.True)\n\tglfw.WindowHint(glfw.Visible, glfw.True) // needed to position\n\tglfw.WindowHint(glfw.Focused, glfw.True)\n\tglfw.WindowHint(glfw.ContextVersionMajor, 4) // 4.1 is max supported on macos\n\tglfw.WindowHint(glfw.ContextVersionMinor, 1)\n\tglfw.WindowHint(glfw.OpenGLProfile, glfw.OpenGLCoreProfile)\n\tglfw.WindowHint(glfw.OpenGLForwardCompatible, glfw.True)\n\tglfw.WindowHint(glfw.Samples, 0) // don't do multisampling for main window -- only in sub-render\n\tif glosDebug {\n\t\tglfw.WindowHint(glfw.OpenGLDebugContext, glfw.True)\n\t}\n\n\t// todo: glfw.Samples -- multisampling\n\tif fullscreen {\n\t\tglfw.WindowHint(glfw.Maximized, glfw.True)\n\t}\n\tif tool {\n\t\tglfw.WindowHint(glfw.Decorated, glfw.False)\n\t} else {\n\t\tglfw.WindowHint(glfw.Decorated, glfw.True)\n\t}\n\t// todo: glfw.Floating for always-on-top -- could set for modal\n\twin, err := glfw.CreateWindow(opts.Size.X, opts.Size.Y, opts.GetTitle(), nil, nil)\n\tif err != nil {\n\t\treturn win, err\n\t}\n\twin.SetPos(opts.Pos.X, opts.Pos.Y)\n\treturn win, err\n}",
"func newGame(renderer *sdl.Renderer) (*Game, error) {\n\tground, err := ground.NewGrounds(renderer)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\ttrex, err := trex.NewTrex(renderer)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcactus, err := cactus.NewCactus(renderer)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tclouds, err := clouds.NewClouds(renderer)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Game{\n\t\trenderer: renderer,\n\t\tground: ground,\n\t\ttrex: trex,\n\t\tcactus: cactus,\n\t\tclouds: clouds,\n\t}, nil\n}",
"func InputDialog(opt ...interface{}) string {\n b, _ := gtk.BuilderNewFromFile(\"glade/input-dialog.glade\")\n d := GetDialog(b, \"input_dialog\")\n entry := GetEntry(b, \"input_entry\")\n\n for i, v := range(opt) {\n if i % 2 == 0 {\n key := v.(string)\n switch key {\n case \"title\":\n d.SetTitle(opt[i+1].(string))\n case \"label\":\n l := GetLabel(b,\"input_label\")\n l.SetText(opt[i+1].(string))\n case \"password-mask\":\n entry.SetInvisibleChar(opt[i+1].(rune))\n entry.SetVisibility(false)\n case \"default\":\n entry.SetText(opt[i+1].(string))\n }\n }\n }\n\n output := \"\"\n entry.Connect(\"activate\", func (o *gtk.Entry) { d.Response(gtk.RESPONSE_OK) } )\n btok := GetButton(b, \"bt_ok\")\n btok.Connect(\"clicked\", func (b *gtk.Button) { d.Response(gtk.RESPONSE_OK) } )\n\n btcancel := GetButton(b, \"bt_cancel\")\n btcancel.Connect(\"clicked\", func (b *gtk.Button) { d.Response(gtk.RESPONSE_CANCEL) } )\n\n code := d.Run()\n if code == gtk.RESPONSE_OK {\n output, _ = entry.GetText()\n }\n\n d.Destroy()\n return output\n}",
"func createBookListDialog(controls *ControlList, conf *cf.Config) {\n\tcontrols.bookListWindow = ui.AddWindow(0, 0, 12, 7, \"Book list\")\n\tcontrols.bookListWindow.SetPack(ui.Vertical)\n\tcontrols.bookListWindow.SetModal(true)\n\n\tcontrols.bookTable = ui.CreateTableView(controls.bookListWindow, minWidth, minHeight, 1)\n\tcontrols.bookInfoDetail = ui.CreateLabel(controls.bookListWindow, 1, 1, \"\", ui.Fixed)\n\tui.ActivateControl(controls.bookListWindow, controls.bookTable)\n\tcontrols.bookTable.SetShowLines(true)\n\tcontrols.bookTable.SetShowRowNumber(true)\n\tcontrols.bookListWindow.SetMaximized(true)\n\n\tcontrols.bookTable.SetRowCount(len(conf.DbDriver.FilteredBooks()))\n\tcontrols.bookListWindow.SetTitle(fmt.Sprintf(\"Book list [%s]\", conf.DbDriver.Filter()))\n\n\tcols := []ui.Column{\n\t\tui.Column{Title: \"Author\", Width: 16, Alignment: ui.AlignLeft},\n\t\tui.Column{Title: \"Title\", Width: 25, Alignment: ui.AlignLeft},\n\t\tui.Column{Title: \"Done\", Width: 4, Alignment: ui.AlignRight},\n\t\tui.Column{Title: \"Sequence\", Width: 8, Alignment: ui.AlignLeft},\n\t\tui.Column{Title: \"Genre\", Width: 8, Alignment: ui.AlignLeft},\n\t\tui.Column{Title: \"Added\", Width: 20, Alignment: ui.AlignLeft},\n\t\tui.Column{Title: \"Completed\", Width: 20, Alignment: ui.AlignLeft},\n\t\tui.Column{Title: \"FilePath\", Width: 100, Alignment: ui.AlignLeft},\n\t}\n\tcontrols.bookTable.SetColumns(cols)\n\n\t// override OnKeyDown to support incremental search and\n\t// opening selected book by pressing Enter\n\t// Escape closes the dialog without doing anything\n\tcontrols.bookListWindow.OnKeyDown(func(ev ui.Event, data interface {}) bool {\n\t\tif ev.Ch != 0 {\n\t\t\tfilter := conf.DbDriver.Filter() + string(ev.Ch)\n\t\t\tconf.DbDriver.SetFilter(filter)\n\n\t\t\tcontrols.bookTable.SetRowCount(len(conf.DbDriver.FilteredBooks()))\n\t\t\tcontrols.bookListWindow.SetTitle(fmt.Sprintf(\"Book list [%s]\", filter))\n\t\t\treturn true\n\t\t}\n\n\t\tswitch ev.Key {\n\t\tcase term.KeyBackspace:\n\t\t\tfilter := conf.DbDriver.Filter()\n\t\t\tif filter != \"\" {\n\t\t\t\tfilter = xs.Slice(filter, 0, xs.Len(filter)-1)\n\t\t\t\tconf.DbDriver.SetFilter(filter)\n\n\t\t\t\tcontrols.bookTable.SetRowCount(len(conf.DbDriver.FilteredBooks()))\n\t\t\t\tcontrols.bookListWindow.SetTitle(fmt.Sprintf(\"Book list [%s]\", filter))\n\t\t\t}\n\t\t\treturn true\n\t\tcase term.KeyEsc:\n\t\t\tgo ui.PutEvent(ui.Event{Type: ui.EventCloseWindow})\n\t\t\treturn true\n\t\tcase term.KeyEnter:\n\t\t\trow := controls.bookTable.SelectedRow()\n\t\t\tif row != -1 {\n\t\t\t\tbook := conf.DbDriver.FilteredBooks()[row]\n\t\t\t\tif book.FilePath != conf.LastFile {\n\t\t\t\t\tcloseBook(conf)\n\t\t\t\t\tloadBook(controls, conf)\n\t\t\t\t}\n\t\t\t}\n\t\t\tgo ui.PutEvent(ui.Event{Type: ui.EventCloseWindow})\n\t\t\treturn true\n\t\t}\n\t\treturn false\n\t}, nil)\n\n\t// without overriding this function TableView shows empty values\n\tcontrols.bookTable.OnDrawCell(func(info *ui.ColumnDrawInfo) {\n\t\tfiltered := conf.DbDriver.FilteredBooks()\n\t\tif info.Row >= len(filtered) {\n\t\t\treturn\n\t\t}\n\t\tbook := filtered[info.Row]\n\t\tinfo.Text = getBookColumnText(book, info.Col)\n\t})\n\n\t// override onSelect to display full cell text in a 'statusbar' - the\n\t// widget at the bottom of the dialog\n\tcontrols.bookTable.OnSelectCell(func(col, row int) {\n\t\tif col == -1 || row == -1 {\n\t\t\treturn\n\t\t}\n\t\tfiltered := conf.DbDriver.FilteredBooks()\n\t\tbook := filtered[row]\n\t\tcontrols.bookInfoDetail.SetTitle(getBookColumnText(book, col))\n\t})\n\n\t// override it to do custom sorting and delete a book from library\n\tcontrols.bookTable.OnAction(func(ev ui.TableEvent) {\n\t\tfiltered := conf.DbDriver.FilteredBooks()\n\t\tif ev.Action == ui.TableActionDelete {\n\t\t\tif ev.Row == -1 {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tbook := filtered[ev.Row]\n\t\t\tcontrols.bookListWindow.SetModal(false)\n\t\t\tcontrols.askLabel.SetTitle(fmt.Sprintf(\"Information about book <c:bright green>'%s'<c:> will be removed from the library. Continue?\", book.Title))\n\t\t\tcontrols.askWindow.SetModal(true)\n\t\t\tcontrols.askWindow.SetVisible(true)\n\t\t\tui.ActivateControl(controls.askWindow, controls.askCancel)\n\n\t\t\tcontrols.askRemove.OnClick(func(evBtn ui.Event) {\n\t\t\t\tconf.DbDriver.DeleteBookByIndex(ev.Row)\n\t\t\t\tcontrols.bookTable.SetRowCount(controls.bookTable.RowCount() - 1)\n\n\t\t\t\tcontrols.askWindow.SetModal(false)\n\t\t\t\tcontrols.askWindow.SetVisible(false)\n\t\t\t\tui.ActivateControl(controls.bookListWindow, controls.bookTable)\n\t\t\t})\n\n\t\t\treturn\n\t\t}\n\n\t\tif ev.Action != ui.TableActionSort {\n\t\t\treturn\n\t\t}\n\n\t\tif ev.Col == -1 {\n\t\t\treturn\n\t\t}\n\t\tfields := []string{\n\t\t\tcommon.FIELD_AUTHOR,\n\t\t\tcommon.FIELD_TITLE,\n\t\t\tcommon.FIELD_PERCENT,\n\t\t\t\"\",\n\t\t\tcommon.FIELD_GENRE,\n\t\t\tcommon.FIELD_ADDED,\n\t\t\tcommon.FIELD_COMPLETED,\n\t\t}\n\n\t\tif ev.Sort == ui.SortNone || ev.Col >= len(fields) {\n\t\t\tconf.DbDriver.SetSortMode(common.FIELD_AUTHOR, true)\n\t\t\treturn\n\t\t}\n\n\t\tfield := fields[ev.Col]\n\t\tif field == \"\" {\n\t\t\tconf.DbDriver.SetSortMode(common.FIELD_AUTHOR, ev.Sort == ui.SortAsc)\n\t\t} else {\n\t\t\tconf.DbDriver.SetSortMode(field, ev.Sort == ui.SortAsc)\n\t\t}\n\t})\n}",
"func newGLWindow(opts *oswin.NewWindowOptions, sc *oswin.Screen) (*glfw.Window, error) {\n\t_, _, tool, fullscreen := oswin.WindowFlagsToBool(opts.Flags)\n\tglfw.DefaultWindowHints()\n\tglfw.WindowHint(glfw.Resizable, glfw.True)\n\tglfw.WindowHint(glfw.Visible, glfw.False) // needed to position\n\tglfw.WindowHint(glfw.Focused, glfw.True)\n\t// glfw.WindowHint(glfw.ScaleToMonitor, glfw.True)\n\tglfw.WindowHint(glfw.ContextVersionMajor, glosGlMajor)\n\tglfw.WindowHint(glfw.ContextVersionMinor, glosGlMinor)\n\tglfw.WindowHint(glfw.OpenGLProfile, glfw.OpenGLCoreProfile)\n\tglfw.WindowHint(glfw.OpenGLForwardCompatible, glfw.True)\n\tglfw.WindowHint(glfw.Samples, 0) // don't do multisampling for main window -- only in sub-render\n\tif glosDebug {\n\t\tglfw.WindowHint(glfw.OpenGLDebugContext, glfw.True)\n\t}\n\n\t// todo: glfw.Samples -- multisampling\n\tif fullscreen {\n\t\tglfw.WindowHint(glfw.Maximized, glfw.True)\n\t}\n\tif tool {\n\t\tglfw.WindowHint(glfw.Decorated, glfw.False)\n\t} else {\n\t\tglfw.WindowHint(glfw.Decorated, glfw.True)\n\t}\n\t// todo: glfw.Floating for always-on-top -- could set for modal\n\tsz := opts.Size // note: this is already in standard window size units!\n\twin, err := glfw.CreateWindow(sz.X, sz.Y, opts.GetTitle(), nil, theApp.shareWin)\n\tif err != nil {\n\t\treturn win, err\n\t}\n\twin.SetPos(opts.Pos.X, opts.Pos.Y)\n\treturn win, err\n}",
"func (me TxsdShow) IsNew() bool { return me.String() == \"new\" }",
"func newWindow(width, height int) (wdeWindow wde.Window, err error) {\n\tw := &sdlWindow{}\n\twdeWindow = w\n\tsdlWrap.Size <- &geom.Coord{float64(width), float64(height)}\n\tw.Surface = <-sdlWrap.Surface\n\tw.events = make(chan interface{}, 16)\n\tgo w.poolSdl()\n\n\tif w.Surface == nil {\n\t\terr = sdlError(sdl.GetError())\n\t\treturn\n\t}\n\n\treturn\n}",
"func FrameNew(label string) *Frame {\n\tid := Candy().Guify(\"gtk_frame_new\", label).String()\n\treturn NewFrame(Candy(), id)\n}",
"func (b *App) NewWindow(url, title string) error {\n\tlog.Println(\"=== gallium.NewWindow ===\")\n\tcerr := newCerr()\n\tdefer cerr.free()\n\tC.GalliumCreateWindow(C.CString(url), C.CString(title), &cerr.c)\n\treturn nil\n}",
"func createComponentWindow(sX, sY, sW, sH float32) *gui.Window {\n\t// create a window for operating on the component file\n\tcomponentWindow := uiman.NewWindow(\"Component\", sX, sY, sW, sH, func(wnd *gui.Window) {\n\t\tloadComponent, _ := wnd.Button(\"componentFileLoadButton\", \"Load\")\n\t\tsaveComponent, _ := wnd.Button(\"componentFileSaveButton\", \"Save\")\n\t\twnd.Editbox(\"componentFileEditbox\", &flagComponentFile)\n\t\tif saveComponent {\n\t\t\terr := doSaveComponent(&theComponent, flagComponentFile)\n\t\t\tif err != nil {\n\t\t\t\tfmt.Printf(\"Failed to save the component.\\n%v\\n\", err)\n\t\t\t} else {\n\t\t\t\tfmt.Printf(\"Saved the component file: %s\\n\", flagComponentFile)\n\t\t\t}\n\t\t}\n\n\t\tif loadComponent {\n\t\t\t// remove all existing mesh windows\n\t\t\tcloseAllMeshWindows()\n\t\t\t// load the component file again and create mesh windows / renderables\n\t\t\tdoLoadComponentFile(flagComponentFile)\n\t\t}\n\n\t\twnd.Separator()\n\t\twnd.RequestItemWidthMin(textWidth)\n\t\twnd.Text(\"Name\")\n\t\twnd.Editbox(\"componentNameEditbox\", &theComponent.Name)\n\n\t\t// do the user interface for mesh windows\n\t\twnd.Separator()\n\t\twnd.RequestItemWidthMin(textWidth)\n\t\twnd.Text(\"Meshes:\")\n\t\taddMesh, _ := wnd.Button(\"componentFileAddMeshButton\", \"Add Mesh\")\n\t\tif addMesh {\n\t\t\tdoAddMesh()\n\t\t}\n\n\t\tmeshesThatSurvive := theComponent.Meshes[:0]\n\t\tfor compMeshIndex, compMesh := range theComponent.Meshes {\n\t\t\twnd.StartRow()\n\t\t\twnd.RequestItemWidthMin(textWidth)\n\t\t\twnd.Text(fmt.Sprintf(\"%s\", compMesh.Name))\n\t\t\tshowMeshWnd, _ := wnd.Button(fmt.Sprintf(\"buttonShowMesh%d\", compMeshIndex), \"Show\")\n\t\t\thideMeshWnd, _ := wnd.Button(fmt.Sprintf(\"buttonHideMesh%d\", compMeshIndex), \"Hide\")\n\t\t\tdeleteMesh, _ := wnd.Button(fmt.Sprintf(\"buttonDeleteMesh%d\", compMeshIndex), \"Delete\")\n\t\t\tif showMeshWnd {\n\t\t\t\tdoShowMeshWindow(compMesh)\n\t\t\t}\n\t\t\tif hideMeshWnd || deleteMesh {\n\t\t\t\tdoHideMeshWindow(compMesh)\n\t\t\t}\n\t\t\tif !deleteMesh {\n\t\t\t\tmeshesThatSurvive = append(meshesThatSurvive, compMesh)\n\t\t\t} else {\n\t\t\t\tdoDeleteMesh(compMesh.Name)\n\t\t\t}\n\n\t\t}\n\t\t// FIXME: not Destroying renderables for meshes that don't survive\n\t\ttheComponent.Meshes = meshesThatSurvive\n\n\t\t// do the user interface for colliders\n\t\twnd.Separator()\n\t\twnd.RequestItemWidthMin(textWidth)\n\t\twnd.Text(\"Colliders: \")\n\t\taddNewCollider, _ := wnd.Button(\"buttonAddCollider\", \"Add Collider\")\n\t\tif addNewCollider {\n\t\t\tdoAddCollider(&theComponent)\n\t\t}\n\n\t\tcollidersThatSurvive := theComponent.Collisions[:0]\n\t\tvisibleCollidersThatSurvive := visibleColliders[:0]\n\t\tfor colliderIndex, collider := range theComponent.Collisions {\n\t\t\twnd.StartRow()\n\t\t\twnd.RequestItemWidthMin(textWidth)\n\t\t\twnd.Text(fmt.Sprintf(\"Collider %d:\", colliderIndex))\n\n\t\t\tdelCollider, _ := wnd.Button(fmt.Sprintf(\"buttonDeleteCollider%d\", colliderIndex), \"X\")\n\t\t\tprevColliderType, _ := wnd.Button(fmt.Sprintf(\"buttonPrevColliderType%d\", colliderIndex), \"<\")\n\t\t\tnextColliderType, _ := wnd.Button(fmt.Sprintf(\"buttonNextColliderType%d\", colliderIndex), \">\")\n\n\t\t\tif !delCollider {\n\t\t\t\tcollidersThatSurvive = append(collidersThatSurvive, collider)\n\n\t\t\t\tif prevColliderType {\n\t\t\t\t\tdoPrevColliderType(collider)\n\t\t\t\t}\n\t\t\t\tif nextColliderType {\n\t\t\t\t\tdoNextColliderType(collider)\n\t\t\t\t}\n\n\t\t\t\tswitch collider.Type {\n\t\t\t\tcase component.ColliderTypeAABB:\n\t\t\t\t\twnd.Text(\"Axis Aligned Bounding Box\")\n\t\t\t\t\twnd.StartRow()\n\t\t\t\t\twnd.Space(textWidth)\n\t\t\t\t\twnd.RequestItemWidthMin(width4Col)\n\t\t\t\t\twnd.Text(\"Min\")\n\t\t\t\t\tguiAddDragSliderVec3(wnd, width4Col, \"ColliderMin\", colliderIndex, 0.01, &collider.Min)\n\n\t\t\t\t\twnd.StartRow()\n\t\t\t\t\twnd.Space(textWidth)\n\t\t\t\t\twnd.RequestItemWidthMin(width4Col)\n\t\t\t\t\twnd.Text(\"Max\")\n\t\t\t\t\tguiAddDragSliderVec3(wnd, width4Col, \"ColliderMax\", colliderIndex, 0.01, &collider.Max)\n\n\t\t\t\tcase component.ColliderTypeSphere:\n\t\t\t\t\twnd.Text(\"Sphere\")\n\t\t\t\t\twnd.StartRow()\n\t\t\t\t\twnd.Space(textWidth)\n\t\t\t\t\twnd.RequestItemWidthMin(width4Col)\n\t\t\t\t\twnd.Text(\"Offset\")\n\t\t\t\t\tguiAddDragSliderVec3(wnd, width4Col, \"ColliderOffset\", colliderIndex, 0.01, &collider.Offset)\n\n\t\t\t\t\twnd.StartRow()\n\t\t\t\t\twnd.Space(textWidth)\n\t\t\t\t\twnd.RequestItemWidthMin(width4Col)\n\t\t\t\t\twnd.Text(\"Radius\")\n\t\t\t\t\twnd.DragSliderFloat(fmt.Sprintf(\"ColliderRadius%d\", colliderIndex), 0.01, &collider.Radius)\n\t\t\t\tdefault:\n\t\t\t\t\twnd.Text(fmt.Sprintf(\"Unknown collider (%d)!\", collider.Type))\n\t\t\t\t}\n\n\t\t\t\t// see if we need to update the renderable if it exists already\n\t\t\t\tvisibleColliders = doUpdateVisibleCollider(visibleColliders, collider, colliderIndex)\n\t\t\t\tvisibleCollidersThatSurvive = append(visibleCollidersThatSurvive, visibleColliders[colliderIndex])\n\t\t\t}\n\t\t}\n\t\ttheComponent.Collisions = collidersThatSurvive\n\t\tvisibleColliders = visibleCollidersThatSurvive\n\n\t\twnd.Separator()\n\t\twnd.RequestItemWidthMin(textWidth)\n\t\twnd.Text(\"Child Components:\")\n\t\taddChildComponent, _ := wnd.Button(\"addChildComponent\", \"Add Child\")\n\t\tif addChildComponent {\n\t\t\tdoAddChildReference(&theComponent)\n\t\t}\n\n\t\tchildRefsThatSurvive := theComponent.ChildReferences[:0]\n\t\tfor childRefIndex, childRef := range theComponent.ChildReferences {\n\t\t\twnd.StartRow()\n\t\t\twnd.RequestItemWidthMin(textWidth)\n\t\t\twnd.Text(\"File:\")\n\t\t\tremoveReference, _ := wnd.Button(fmt.Sprintf(\"childRefRemove%d\", childRefIndex), \"X\")\n\t\t\tloadChildReference, _ := wnd.Button(fmt.Sprintf(\"childRefLoad%d\", childRefIndex), \"L\")\n\t\t\twnd.Editbox(fmt.Sprintf(\"childRefFileEditbox%d\", childRefIndex), &childRef.File)\n\n\t\t\twnd.StartRow()\n\t\t\twnd.Space(textWidth)\n\t\t\twnd.RequestItemWidthMin(width4Col)\n\t\t\twnd.Text(\"Offset\")\n\t\t\tguiAddDragSliderVec3(wnd, width4Col, \"childRefLocation\", childRefIndex, 0.01, &childRef.Location)\n\n\t\t\twnd.StartRow()\n\t\t\twnd.Space(textWidth)\n\t\t\twnd.RequestItemWidthMin(width4Col)\n\t\t\twnd.Text(\"Scale\")\n\t\t\tguiAddDragSliderVec3(wnd, width4Col, \"childRefScale\", childRefIndex, 0.01, &childRef.Scale)\n\n\t\t\twnd.StartRow()\n\t\t\twnd.Space(textWidth)\n\t\t\twnd.RequestItemWidthMin(width4Col)\n\t\t\twnd.Text(\"Rot Axis\")\n\t\t\tguiAddDragSliderVec3(wnd, width4Col, \"childRefRotAxis\", childRefIndex, 0.01, &childRef.RotationAxis)\n\n\t\t\twnd.StartRow()\n\t\t\twnd.Space(textWidth)\n\t\t\twnd.RequestItemWidthMin(width4Col)\n\t\t\twnd.Text(\"Rot Deg\")\n\t\t\twnd.DragSliderFloat(fmt.Sprintf(\"childRefRotDeg%d\", childRefIndex), 0.1, &childRef.RotationDegrees)\n\n\t\t\tif !removeReference {\n\t\t\t\tchildRefsThatSurvive = append(childRefsThatSurvive, childRef)\n\t\t\t}\n\t\t\tif loadChildReference {\n\t\t\t\tvar err error\n\t\t\t\tchildComponents, err = doLoadChildComponent(childComponents, childRef)\n\t\t\t\tif err != nil {\n\t\t\t\t\tfmt.Printf(\"Failed to load child component.\\n%v\\n\", err)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\ttheComponent.ChildReferences = childRefsThatSurvive\n\n\t\t// remove any visible child components that no longer have a reference\n\t\tchildComponents = removeStaleChildComponents(childComponents, &theComponent, childRefFilenames)\n\t})\n\treturn componentWindow\n}",
"func NewGame() *G {\n\t/* Fill in this Function */\n\treturn &G{}\n}",
"func (gui *Gui) createConfirmationPanel(g *gocui.Gui, currentView *gocui.View, title, prompt string, handleConfirm, handleClose func(*gocui.Gui, *gocui.View) error) error {\n\treturn gui.createPopupPanel(g, currentView, title, prompt, handleConfirm, handleClose)\n}",
"func newWindow(X *xgbutil.XUtil, color uint32) *xwindow.Window {\n\twin, err := xwindow.Generate(X)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\terr = win.CreateChecked(X.RootWin(), 0, 0, 400, 400,\n\t\txproto.CwBackPixel|xproto.CwEventMask,\n\t\tcolor, xproto.EventMaskPointerMotion)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\twin.Map()\n\treturn win\n}",
"func (s *service) NewGame(word string) Game {\n\treturn *NewGame(word)\n}",
"func (lob *Lobby) createGame(startingTeam int, round int) {\n\n\tnewGame := Game{\n\t\tGameUid: shortuuid.New(),\n\t\tStartingTeam: startingTeam,\n\t\tCurrentRound: round,\n\t\tTeam1Ready: false,\n\t\tTeam2Ready: false,\n\t\tGameState: created,\n\t\tTeam1UID: lob.Team1UID,\n\t\tTeam2UID: lob.Team2UID,\n\t}\n\n\tlob.Games = append(lob.Games, newGame)\n\n}",
"func (s *BasemumpsListener) EnterNew_(ctx *New_Context) {}",
"func newCreateCmd() *cobra.Command {\n\tcreateCmd := cobra.Command{\n\t\tUse: \"create\",\n\t\tShort: `Create a new verless object`,\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\treturn cmd.Help()\n\t\t},\n\t}\n\n\tcreateCmd.AddCommand(newCreateProjectCmd())\n\tcreateCmd.AddCommand(newCreateThemeCmd())\n\tcreateCmd.AddCommand(newCreateFile())\n\n\treturn &createCmd\n}",
"func newButton(eng vu.Engine, parent vu.Part, size int, icon string, action vu.Reaction) *button {\n\tbtn := &button{}\n\tbtn.model = parent.AddPart()\n\tbtn.action = action\n\tbtn.w, btn.h = size, size\n\n\t// create the button icon.\n\tbtn.icon = btn.model.AddPart()\n\tbtn.icon.SetFacade(\"icon\", \"uv\").SetMaterial(\"half\")\n\tbtn.icon.SetTexture(icon, 0)\n\tbtn.icon.SetScale(float64(btn.w/2), float64(btn.h/2), 1)\n\n\t// create a hilite that is only shown on mouse over.\n\tbtn.hilite = btn.model.AddPart()\n\tbtn.hilite.SetFacade(\"square\", \"flat\").SetMaterial(\"tblue\")\n\tbtn.hilite.SetScale(float64(btn.w/2.0), float64(btn.h/2.0), 1)\n\tbtn.hilite.SetVisible(false)\n\treturn btn\n}",
"func CreateForm(window *glfw.Window, font Font, screen Screen) Form {\n\treturn Form{window, []Button{}, []Text{}, font, screen, true}\n}",
"func CreateMainWindow() {\n\n\tvBox := tui.NewVBox()\n\tvBox.SetSizePolicy(tui.Minimum, tui.Minimum)\n\tSidebar := tui.NewVBox()\n\tSidebar.SetSizePolicy(tui.Minimum, tui.Minimum)\n\n\tfor _, cmd := range strings.Split(libs.Cmds, \",\") {\n\t\tSidebar.Append(tui.NewLabel(wordwrap.WrapString(cmd, 50)))\n\t}\n\n\tSidebar.SetBorder(true)\n\tSidebar.Prepend(tui.NewLabel(\"***COMMANDS***\"))\n\n\tInput.SetFocused(true)\n\tInput.SetSizePolicy(tui.Expanding, tui.Maximum)\n\n\tinputBox := tui.NewHBox(Input)\n\tinputBox.SetBorder(true)\n\tinputBox.SetSizePolicy(tui.Expanding, tui.Maximum)\n\n\thistoryScroll := tui.NewScrollArea(History)\n\thistoryScroll.SetAutoscrollToBottom(true)\n\thistoryBox := tui.NewVBox(historyScroll)\n\thistoryBox.SetBorder(true)\n\n\tchat := tui.NewVBox(historyBox, inputBox)\n\tchat.SetSizePolicy(tui.Expanding, tui.Expanding)\n\n\t// create root window and add all windows\n\troot := tui.NewHBox(Sidebar, chat)\n\tui, err := tui.New(root)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tui.SetKeybinding(\"Esc\", func() { ui.Quit() })\n\n\tInput.OnSubmit(func(e *tui.Entry) {\n\t\t// this is just to see what command given\n\t\tuserCommand := e.Text()\n\t\tif userCommand == \"\" {\n\t\t\tHistory.Append(tui.NewLabel(\"that is not acceptable command\"))\n\t\t\tHistory.Append(tui.NewLabel(libs.PrintHelp()))\n\t\t} else {\n\t\t\tHistory.Append(tui.NewHBox(\n\t\t\t\ttui.NewLabel(\"Your Command: \" + userCommand),\n\t\t\t))\n\t\t\tHistory.Append(tui.NewHBox(tui.NewLabel(\"\")))\n\n\t\t\tif strings.HasPrefix(userCommand, \"\\\\\") {\n\t\t\t\t// then this is command ..\n\t\t\t\tswitch userCommand {\n\t\t\t\tcase \"\\\\help\":\n\t\t\t\t\tHistory.Append(tui.NewLabel(libs.PrintHelp()))\n\t\t\t\tcase \"\\\\monitor\":\n\t\t\t\t\tHistory.Append(tui.NewLabel(\"Switching to MONITOR mode for device \" + DeviceName))\n\t\t\t\t\tChangeToMonitorMode()\n\t\t\t\tcase \"\\\\managed\":\n\t\t\t\t\tHistory.Append(tui.NewLabel(\"Switching to MANAGED mode for device \" + DeviceName))\n\t\t\t\t\tChangeToManagedMode()\n\t\t\t\tcase \"\\\\exit\":\n\t\t\t\t\tHistory.Append(tui.NewHBox(tui.NewLabel(\"quitting...\")))\n\t\t\t\t\ttime.Sleep(1000 * time.Millisecond)\n\t\t\t\t\t// os.Exit(0)\n\n\t\t\t\t}\n\t\t\t} else if strings.Contains(userCommand, \":\") {\n\t\t\t\t// then this is declaration\n\t\t\t\tcmdSplit := strings.Split(userCommand, \":\")\n\t\t\t\tif cmdSplit[1] == \"\" {\n\t\t\t\t\tHistory.Append(tui.NewLabel(\"that is not acceptable command\"))\n\t\t\t\t\tHistory.Append(tui.NewLabel(libs.PrintHelp()))\n\t\t\t\t} else {\n\t\t\t\t\tswitch cmdSplit[0] {\n\t\t\t\t\tcase \"device\":\n\t\t\t\t\t\tSetDeviceName(cmdSplit[1])\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tHistory.Append(tui.NewLabel(\"there is no such declaration or command\"))\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t} else {\n\t\t\t\tHistory.Append(tui.NewHBox(tui.NewLabel(userCommand + \" is not command or a declaration\")))\n\t\t\t}\n\t\t}\n\t\tInput.SetText(\"\")\n\t})\n\n\tif err := ui.Run(); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}",
"func DialogButton() page.EventI {\n\te := &page.Event{JsEvent: DialogButtonEvent}\n\te.ActionValue(javascript.JsCode(\"ui\"))\n\treturn e\n}",
"func (client *ClientRPC) CreateGame(name string, hostPassword string) bool {\n\t// Tell relay to host game\n\tsuccess := false\n\tdata := GameData{\n\t\tName: name,\n\t\tPassword: hostPassword,\n\t}\n\tfor i := 0; i < 2; i++ {\n\t\terr := client.relay.Call(\"ServerRPCMethods.NewGame\", data, &success)\n\t\tif err == nil {\n\t\t\tbreak\n\t\t}\n\t\tif err == rpc.ErrShutdown {\n\t\t\tif !client.connect() {\n\t\t\t\tlog.Printf(\"ClientRPC: Lost connection to relay and are unable to reconnect\")\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tlog.Printf(\"ClientRPC: Lost connection to relay but was able to reconnect\")\n\t\t} else {\n\t\t\tlog.Printf(\"ClientRPC error: %v\", err)\n\t\t\treturn false\n\t\t}\n\t}\n\treturn success\n}",
"func newGUIComputedQuestion(question interfaces.Question, expr interfaces.Expr, varID interfaces.VarID) *GUIComputedQuestion {\n\tguiQuestion := createDisabledGUIQuestion(question)\n\treturn &GUIComputedQuestion{GUIQuestion: guiQuestion, Expr: expr, VarID: varID}\n}",
"func (a *MockApp) NewWindow(title string) fyne.Window {\n\targs := a.Called(title)\n\treturn args.Get(0).(fyne.Window)\n}",
"func (b *WindowDialogFactory) CreateButton() Button {\n\treturn &WindowButton{}\n}",
"func (lob *Lobby) createDefaultGame(startingTeam int) {\n\n\tnewGame := Game{\n\t\tGameUid: shortuuid.New(),\n\t\tStartingTeam: startingTeam,\n\t\tCurrentRound: 1,\n\t\tTeam1Ready: false,\n\t\tTeam2Ready: false,\n\t\tGameState: selectedMap,\n\t\tTeam1UID: lob.Team1UID,\n\t\tTeam2UID: lob.Team2UID,\n\t\tGameMap: nagrand,\n\t}\n\n\tlob.Games = append(lob.Games, newGame)\n\n}",
"func newGUIInputQuestion(question interfaces.Question, callback func(interfaces.Expr, error)) *GUIInputQuestion {\n\treturn &GUIInputQuestion{GUIQuestion: createEnabledGUIQuestion(question, callback)}\n}",
"func BindNewButton(idd uintptr, dlg *Dialog) (*Button, error) {\n\tb := NewButton(idd)\n\terr := dlg.BindWidgets(b)\n\treturn b, err\n}",
"func NewWindow() *Window {\n\tfile := ui.NewFileWithName(\":/widget.ui\")\n\tloader := ui.NewUiLoader()\n\twidget := loader.Load(file)\n\n\t// Init main window\n\twindow := ui.NewMainWindow()\n\twindow.SetCentralWidget(widget)\n\twindow.SetWindowTitle(\"DFSS Demonstrator v\" + dfss.Version)\n\n\tw := &Window{\n\t\tQMainWindow: window,\n\t\tscene: &Scene{},\n\t}\n\tw.InstallEventFilter(w)\n\n\t// Load dynamic elements from driver\n\tw.logField = ui.NewTextEditFromDriver(widget.FindChild(\"logField\"))\n\tw.graphics = ui.NewGraphicsViewFromDriver(widget.FindChild(\"graphicsView\"))\n\tw.progress = ui.NewLabelFromDriver(widget.FindChild(\"progressLabel\"))\n\n\tw.playButton = ui.NewPushButtonFromDriver(widget.FindChild(\"playButton\"))\n\tw.stopButton = ui.NewPushButtonFromDriver(widget.FindChild(\"stopButton\"))\n\tw.replayButton = ui.NewPushButtonFromDriver(widget.FindChild(\"replayButton\"))\n\n\tw.quantumField = ui.NewSpinBoxFromDriver(widget.FindChild(\"quantumField\"))\n\tw.speedSlider = ui.NewSliderFromDriver(widget.FindChild(\"speedSlider\"))\n\n\t// Load pixmaps\n\tw.pixmaps = map[string]*ui.QPixmap{\n\t\t\"ttp\": ui.NewPixmapWithFilenameFormatFlags(\":/images/server_key.png\", \"\", ui.Qt_AutoColor),\n\t\t\"platform\": ui.NewPixmapWithFilenameFormatFlags(\":/images/server_connect.png\", \"\", ui.Qt_AutoColor),\n\t}\n\n\t// Load icons\n\tw.addIcons()\n\n\t// Add actions\n\tw.addActions()\n\tw.initScene()\n\tw.initTimer()\n\n\tw.StatusBar().ShowMessage(\"Ready\")\n\tw.PrintQuantumInformation()\n\treturn w\n}",
"func (c Threads) ShowNew() revel.Result {\n return c.Render()\n}",
"func DialogInfo(body string) {\n\tif currentWindow == nil {\n\t\treturn\n\t}\n\n\t(*currentWindow).Dispatch(func() {\n\t\t(*currentWindow).Dialog(webview.DialogTypeAlert, webview.DialogFlagInfo,\n\t\t\t\"DTransfer\", body)\n\t})\n}",
"func newGame() Game {\n\t//Create a starting board state\n\tboard := [8][8]int{}\n\n\tboard[3][3] = 1\n\tboard[3][4] = -1\n\tboard[4][3] = -1\n\tboard[4][4] = 1\n\n\t//Initialize game stats\n\tw := 2\n\tb := 2\n\toutcome := 0\n\tturn := -1\n\n\t//Create a game struct\n\tgame := Game{board, w, b, outcome, turn, []Cell{}, []Cell{}, []Cell{}}\n\n\t//Fill placed slice\n\tgame.placed = append(game.placed, Cell{3, 3})\n\tgame.placed = append(game.placed, Cell{3, 4})\n\tgame.placed = append(game.placed, Cell{4, 3})\n\tgame.placed = append(game.placed, Cell{4, 4})\n\n\t//Fill other slices\n\tgame.findAdjacents()\n\tgame.findValidMoves()\n\n\treturn game\n}",
"func CreatePrompt(db *sql.DB) *Prompt {\n\tp := &Prompt{\n\t\tdb: db,\n\t}\n\treturn p\n}",
"func (d DialogPeerFolder) construct() DialogPeerClass { return &d }",
"func (t *Table) NewGame() {\n\tfor _, p := range t.players {\n\t\tp.ResetPassedTo()\n\t\tp.ResetPassedFrom()\n\t\tp.ResetTricks()\n\t\tp.ResetScore()\n\t}\n\tt.trick = make([]*card.Card, len(t.players))\n\tt.NewRound()\n\tt.dir = direction.Right\n}",
"func CreateGame(options Options) Game {\n\tgame := Game{}\n\tgame.deck = deck.NewDeck()\n\tgame.deck.AddDecks(options.NumberOfDecks - 1)\n\tgame.deck.Shuffle()\n\tfor i := 0; i < options.NumberOfAI; i++ {\n\t\tgame.players = append(game.players, createAIPlayer())\n\t}\n\tfor i := 0; i < options.NumberOfHumans; i++ {\n\t\tgame.players = append(game.players, createHumanPlayer())\n\t}\n\tgame.dealer = createDealer()\n\tgame.numberOfHands = options.NumberOfHands\n\tgame.numberOfDecks = options.NumberOfDecks\n\tgame.blackjackPayout = options.BlackjackPayout\n\treturn game\n}",
"func dialogue(conn net.Conn) {\n\tdefer conn.Close()\n\tprocessor := process.Processor{Conn: conn}\n\tprocessor.MainProcess()\n}",
"func (g *GameController) NewGame(user *models.User, name string,\n\tmaxPlayers int, password string) (string, error) {\n\tif g.db.GamesDAO.IsPlayerInActiveGame(user.ID) {\n\t\tlog.Debugf(\"User %v (%v) already participating in a game\\n\", user.Username, user.ID.Hex())\n\t\treturn \"\", ErrAlreadyInGame\n\t}\n\tgame, err := g.db.GamesDAO.CreateGame(user, name, maxPlayers, password)\n\tif err != nil {\n\t\treturn \"\", ErrCreateGame\n\t}\n\n\tg.registerActiveGame(game)\n\n\treturn game.ID.Hex(), nil\n}",
"func (match *Match) NewGame(shuffle bool) error {\n\tmatch.deck = buildDeck(shuffle)\n\terr := match.deal()\n\tmatch.playerOneLed = match.dealerPlayerOne\n\tmatch.buildMeldSlices()\n\treturn err\n}",
"func CreateSingleplayer(g *gocui.Gui) error {\n\ttext, err := game.ChooseText()\n\tif err != nil {\n\t\treturn err\n\t}\n\tstate := game.NewState(text)\n\n\tw, h := g.Size()\n\n\tstatsFrameWi := widgets.NewCollection(\"singleplayer-stats\", \"STATS\", false, 0, 0, w/5, h)\n\n\tstatWis := []*widgets.Text{\n\t\twidgets.NewText(\"singleplayer-stats-wpm\", \"wpm: 0 \", false, false, 2, 1),\n\t\twidgets.NewText(\"singleplayer-stats-time\", \"time: 0s \", false, false, 2, 2),\n\t}\n\n\ttextFrameWi := widgets.NewCollection(\"singleplayer-text\", \"\", false, w/5+1, 0, 4*w/5, 5*h/6+1)\n\n\tpoints := organiseText(state.Words, 4*w/5-2)\n\tvar textWis []*widgets.Text\n\tfor i, p := range points {\n\t\ttextWis = append(textWis, widgets.NewText(\"singleplayer-text-\"+strconv.Itoa(i), state.Words[i], false, false, w/5+1+p.x, p.y))\n\t}\n\n\tvar inputWi *widgets.Input\n\tinputWi = widgets.NewInput(\"singleplayer-input\", true, false, w/5+1, h-h/6, w-w/5-1, h/6, func(v *gocui.View, key gocui.Key, ch rune, mod gocui.Modifier) bool {\n\t\tif key == gocui.KeyEnter || len(v.Buffer()) == 0 && ch == 0 {\n\t\t\treturn false\n\t\t}\n\n\t\tif state.StartTime.IsZero() {\n\t\t\tstate.Start()\n\t\t\tgo func() {\n\t\t\t\tticker := time.NewTicker(100 * time.Millisecond)\n\t\t\t\tfor {\n\t\t\t\t\t<-ticker.C\n\t\t\t\t\tif state.CurrWord == len(state.Words) {\n\t\t\t\t\t\tticker.Stop()\n\t\t\t\t\t\treturn\n\t\t\t\t\t}\n\n\t\t\t\t\tg.Update(func(g *gocui.Gui) error {\n\t\t\t\t\t\terr := statWis[1].ChangeText(\n\t\t\t\t\t\t\tfmt.Sprintf(\"time: %.02fs\", time.Since(state.StartTime).Seconds()),\n\t\t\t\t\t\t)(g)\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\treturn err\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\terr = statWis[0].ChangeText(\n\t\t\t\t\t\t\tfmt.Sprintf(\"wpm: %.0f\", state.Wpm()),\n\t\t\t\t\t\t)(g)\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\treturn err\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn nil\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t}()\n\t\t}\n\n\t\tgocui.DefaultEditor.Edit(v, key, ch, mod)\n\n\t\tb := v.Buffer()[:len(v.Buffer())-1]\n\n\t\tif ch != 0 && (len(b) > len(state.Words[state.CurrWord]) || rune(state.Words[state.CurrWord][len(b)-1]) != ch) {\n\t\t\tstate.IncError()\n\t\t}\n\n\t\tansiWord := state.PaintDiff(b)\n\n\t\tg.Update(textWis[state.CurrWord].ChangeText(ansiWord))\n\n\t\tif b == state.Words[state.CurrWord] {\n\t\t\tstate.NextWord()\n\t\t\tif state.CurrWord == len(state.Words) {\n\t\t\t\tstate.End()\n\t\t\t}\n\t\t\tg.Update(inputWi.ChangeText(\"\"))\n\t\t}\n\n\t\treturn false\n\t})\n\n\tvar wis []gocui.Manager\n\twis = append(wis, statsFrameWi)\n\tfor _, stat := range statWis {\n\t\twis = append(wis, stat)\n\t}\n\twis = append(wis, textFrameWi)\n\tfor _, text := range textWis {\n\t\twis = append(wis, text)\n\t}\n\twis = append(wis, inputWi)\n\n\tg.SetManager(wis...)\n\n\tif err := keybindings(g, CreateWelcome); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func (s *BasemumpsListener) ExitNew_(ctx *New_Context) {}",
"func newCreateProjectCmd() *cobra.Command {\n\tvar (\n\t\toptions core.CreateProjectOptions\n\t)\n\n\tcreateProjectCmd := cobra.Command{\n\t\tUse: \"project NAME\",\n\t\tShort: `Create a new verless project`,\n\t\tArgs: cobra.ExactArgs(1),\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tpath := args[0]\n\t\t\treturn core.CreateProject(path, options)\n\t\t},\n\t}\n\n\tcreateProjectCmd.Flags().BoolVar(&options.Overwrite, \"overwrite\",\n\t\tfalse, `overwrite the directory if it already exists`)\n\n\treturn &createProjectCmd\n}",
"func NewForm1(owner vcl.IComponent) (root *TForm1) {\n vcl.CreateResForm(owner, &root)\n return\n}",
"func (b *BoardInfo) NewGame() error {\n\tsg := SnakeGame{\n\t\tBoardInfo: *b,\n\t\tCurrentRound: 1,\n\t\tScore: 0,\n\t\tGameOver: false,\n\t}\n\n\t// Set food position\n\tsg.SetFood(sg.BoardInfo.FoodPos)\n\n\t// Set snake head position\n\tsg.SetSnake()\n\n\tif err := sg.SetGameState(); err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Printf(\"Created new cli-snake game with board size of %v rows and %v colums.\\n\", b.Size.Rows, b.Size.Columns)\n\n\tsg.RenderBoard()\n\n\treturn nil\n}",
"func OpenDialog(triggerID string) error {\n\ttoken := os.Getenv(\"SLACK_BOT_USER_OAUTH_ACCESS_TOKEN\")\n\n\td, err := getRepoCreateDialog()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"[ERROR] %s\", err.Error())\n\t}\n\n\tdString, err := json.Marshal(&d)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"[ERROR] %s\", err.Error())\n\t}\n\n\todr := &OpenDialogRequest{\n\t\tTriggerID: triggerID,\n\t\tDialog: string(dString),\n\t}\n\n\tpayload, err := json.Marshal(odr)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"[ERROR] %s\", err.Error())\n\t}\n\n\turl := \"https://slack.com/api/dialog.open\"\n\treq, err := http.NewRequest(\"POST\", url, strings.NewReader(string(payload)))\n\tif err != nil {\n\t\treturn fmt.Errorf(\"[ERROR] %s\", err.Error())\n\t}\n\treq.Header.Add(\"Content-Type\", \"application/json; charset=utf-8\")\n\treq.Header.Add(\"Authorization\", fmt.Sprintf(\"Bearer %s\", token))\n\n\tclient := http.Client{}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"[ERROR] %s\", err.Error())\n\t}\n\tdefer resp.Body.Close()\n\n\tb, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"[ERROR] %s\", err.Error())\n\t}\n\n\thasErrors, errMsg, err := ContainsErrors(string(b))\n\tif err != nil {\n\t\treturn fmt.Errorf(\"[ERROR] %s\", err.Error())\n\t}\n\tif hasErrors {\n\t\treturn fmt.Errorf(errMsg)\n\t}\n\treturn nil\n}",
"func NewContext(window interfaces.Window) *Context {\n\tsett := settings.GetSettings()\n\timgui.SetAssertHandler(nil)\n\tcontext := &Context{\n\t\timguiContext: imgui.CreateContext(nil),\n\t\twindow: window,\n\n\t\tisFrame: false,\n\n\t\tviewControls: dialogs.NewViewControls(),\n\t\tviewModels: dialogs.NewViewModels(),\n\t\tviewOptions: dialogs.NewViewOptions(),\n\n\t\tcomponentLog: components.NewComponentLog(),\n\t\tcomponentIDE: components.NewComponentIDE(),\n\t\tcomponentImport: components.NewComponentImport(),\n\t\tcomponentExport: components.NewComponentExport(),\n\t\tcomponentFileSaver: components.NewComponentFileSaver(),\n\t\tcomponentShadertoy: components.NewComponentShadertoy(window),\n\t}\n\n\tcontext.GuiVars.showModels = true\n\tcontext.GuiVars.showControls = true\n\tcontext.GuiVars.showOptions = false\n\n\tcontext.GuiVars.showLog = sett.App.ShowLog\n\n\tcontext.GuiVars.showOpenDialog = false\n\tcontext.GuiVars.showSaveDialog = false\n\n\tcontext.GuiVars.showDemoWindow = false\n\tcontext.GuiVars.showAboutImGui = false\n\tcontext.GuiVars.showAboutKuplung = false\n\tcontext.GuiVars.showMetrics = false\n\n\tcontext.GuiVars.showParsing = false\n\n\tcontext.GuiVars.ParsingPercentage = 0.0\n\n\tcontext.GuiVars.showImageSave = false\n\tcontext.GuiVars.showRendererUI = false\n\n\tcontext.GuiVars.showImporterFile = false\n\tcontext.GuiVars.showExporterFile = false\n\tcontext.GuiVars.dialogImportType = types.ImportExportFormatUNDEFINED\n\tcontext.GuiVars.dialogExportType = types.ImportExportFormatUNDEFINED\n\n\tcontext.GuiVars.showKuplungIDE = false\n\tcontext.GuiVars.showScreenshotWindow = false\n\tcontext.GuiVars.showSceneStats = false\n\tcontext.GuiVars.showSVS = false\n\tcontext.GuiVars.showShadertoy = false\n\n\tcontext.GuiVars.recentFiles = nil\n\tcontext.GuiVars.recentFilesImported = nil\n\tcontext.GuiVars.showRecentFileImportedDoesntExists = false\n\n\terr := context.createDeviceObjects()\n\tif err != nil {\n\t\tcontext.Destroy()\n\t\tcontext = nil\n\t\tsettings.LogError(\"[gui context] Error initialized ImGui Context: %v\", err)\n\t}\n\n\tcontext.setKeyMapping()\n\n\ttrigger.On(types.ActionParsingShow, func() {\n\t\tcontext.GuiVars.showParsing = true\n\t})\n\ttrigger.On(types.ActionParsingHide, func() {\n\t\tcontext.GuiVars.showParsing = false\n\t})\n\n\ttrigger.On(types.ActionFileImportAddToRecentFiles, context.recentFilesAddImported)\n\n\treturn context\n}",
"func CreateUnknownMenu() {\n\tFirstOrCreateMenu(&UnknownMenu)\n}",
"func NewResultWin(c Color) Result {\n\tif c == White {\n\t\treturn WhiteWin\n\t} else if c == Black {\n\t\treturn BlackWin\n\t}\n\treturn Draw\n}",
"func newScene(win wde.Window, e *Engine) *scene {\n\ts := &scene{\n\t\twin: win,\n\t\te: e,\n\t}\n\n\treturn s\n}",
"func (d MessagesDialogsSlice) construct() MessagesDialogsClass { return &d }",
"func NewGame(game Game_Detail) bool {\n\torm := get_DBFront()\n\terr := orm.SetTable(\"game\").Save(&game)\n\tif !check_err(err) {\n\t\tLog(Log_Struct{\"error\", \"DB_Error_Line_423\", err})\n\t\treturn false\n\t}\n\treturn true\n}",
"func NewUi(w *app.Window) *Ui {\n\tu := Ui{\n\t\tw: w,\n\t\tth: material.NewTheme(gofont.Collection()),\n\t\tga: engine.NewGame(),\n\t}\n\tu.th.TextSize = unit.Dp(topMenuPx / 5)\n\tu.ga.ScaleOffset(WidthPx)\n\tu.nameEditor = &widget.Editor{\n\t\tSingleLine: true,\n\t\tSubmit: true,\n\t}\n\tu.menuBtn.pressed = true\n\tu.titleScreen = true\n\treturn &u\n}",
"func newCard() string {\n\treturn \"Five of Diamonds\"\n}",
"func newCard() string {\n\treturn \"Five of Diamonds\"\n}",
"func NewCheckmate(winner Colour) Outcome { return Outcome{Winner: winner, Reason: checkmate} }",
"func ShowDevelopingEvent(s *discordgo.Session, m *discordgo.MessageCreate, channel string, newEvent developingEvent) {\n\tmessage := \"\"\n\n\t// Get channel\n\tc, err := s.Channel(channel)\n\tif err != nil {\n\t\ts.ChannelMessageSend(channel, \"EventsBot had trouble obtaining the channel information :no_mouth:\")\n\t\treturn\n\t}\n\n\t// Get guild variables\n\tgv, ok := guildVarsMap[c.GuildID]\n\tif !ok {\n\t\ts.ChannelMessageSend(channel, \"EventsBot had trouble obtaining the guild information :no_mouth:\")\n\t\treturn\n\t}\n\n\t// Get time zone\n\ttzInfo := \"\"\n\teventLocation := defaultLocation\n\n\tif newEvent.Event.TimeZone != \"\" {\n\t\ttz, ok := gv.tzByAbbr[newEvent.Event.TimeZone]\n\t\tif !ok {\n\t\t\ts.ChannelMessageSend(channel, \"EventsBot had trouble interpreting the time zone information of this event. Are we anywhere near a worm hole perhaps? :no_mouth:\")\n\t\t\treturn\n\t\t}\n\t\ttzInfo = tz.Abbrev\n\t\teventLocation, _ = time.LoadLocation(tz.Location)\n\t} else {\n\t\tif len(gv.timezones) == 1 {\n\t\t\ttz := gv.timezones[0]\n\t\t\ttzInfo = tz.Abbrev\n\t\t\tnewEvent.Event.TimeZone = tz.Abbrev\n\t\t\teventLocation, _ = time.LoadLocation(tz.Location)\n\t\t}\n\t}\n\n\t// Construct message\n\tmessage = \"NEW EVENT\"\n\tmessage = fmt.Sprintf(\"%s\\r\\n**Creator:** %s\", message, newEvent.Event.Creator.Mention())\n\tmessage = fmt.Sprintf(\"%s\\r\\n**Name:** %s\", message, newEvent.Event.Name)\n\tif newEvent.State >= stateNew {\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n**Date:** %s\", message, newEvent.Event.DateTime.In(eventLocation).Format(\"Mon 2 Jan 2006\"))\n\t}\n\tif newEvent.State >= stateTime {\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n**Time:** %s\", message, newEvent.Event.DateTime.In(eventLocation).Format(\"15:04\"))\n\t}\n\tif newEvent.State > stateTimeZone { // note that this is >, not >= as we want to display the time zone only after it has been selected\n\t\tif newEvent.Event.TimeZone != \"\" {\n\t\t\tmessage = fmt.Sprintf(\"%s (%s)\", message, tzInfo)\n\t\t}\n\t}\n\tif newEvent.State >= stateDuration {\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n**Duration:** %d\", message, newEvent.Event.Duration)\n\t}\n\tif newEvent.State >= stateTeamSize {\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n**Team Size:** %d\", message, newEvent.Event.TeamSize)\n\t}\n\tif newEvent.State >= stateDone {\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n\\r\\nDoes the above appear correct?\", message)\n\t}\n\n\t// Add appliccable reaction legend\n\tswitch newEvent.State {\n\tcase stateNew:\n\t\tfallthrough\n\tcase stateDate:\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n\\r\\n⏫ = Increase date by 1 month\", message)\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n🔼 = Increase date by 1 day\", message)\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n🔽 = Decrease date by 1 day\", message)\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n⏬ = Decrease date by 1 month\", message)\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n👍 = Continue\", message)\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n❌ = Cancel\", message)\n\tcase stateTime:\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n\\r\\n⏪ = Decrease time by 1 hour\", message)\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n◀ = Decrease time by 10 minutes\", message)\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n▶ = Increase time by 10 minutes\", message)\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n⏩ = Increase time by 1 hour\", message)\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n👍 = Continue\", message)\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n❌ = Cancel\", message)\n\tcase stateTimeZone:\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n\\r\\n Specify time zone\", message)\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n❌ = Cancel\", message)\n\tcase stateDuration:\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n\\r\\n :one: - :nine: Specify duration (in hours)\", message)\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n❌ = Cancel\", message)\n\tcase stateTeamSize:\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n\\r\\nSpecify team size:\", message)\n\t\tif newEvent.Event.TeamSize < 10 {\n\t\t\tmessage = fmt.Sprintf(\"%s\\r\\n\\r\\n :one: - :nine: = 1 - 9\", message)\n\t\t\tmessage = fmt.Sprintf(\"%s\\r\\n ▶ = More than 9\", message)\n\t\t} else {\n\t\t\tmessage = fmt.Sprintf(\"%s\\r\\n\\r\\n :zero: - :nine: = %d0 - %d9\", message, newEvent.Event.TeamSize/10, newEvent.Event.TeamSize/10)\n\t\t\tmessage = fmt.Sprintf(\"%s\\r\\n ◀ = Less than %d0\", message, newEvent.Event.TeamSize/10)\n\t\t\tmessage = fmt.Sprintf(\"%s\\r\\n ▶ = More than %d9\", message, newEvent.Event.TeamSize/10)\n\t\t}\n\t\t//message = fmt.Sprintf(\"%s\\r\\n👍 = Continue\", message)\n\t\tmessage = fmt.Sprintf(\"%s\\r\\n❌ = Cancel\", message)\n\tcase stateDone:\n\t\tEditEvent(s, m, channel, newEvent.MessageID, \"\")\n\t\treturn\n\t\t// message = fmt.Sprintf(\"%s\\r\\n✅ = OK\", message)\n\t\t// message = fmt.Sprintf(\"%s\\r\\n❌ = Cancel\", message)\n\t\t// message = fmt.Sprintf(\"%s\\r\\n🗓 = Back to Date\", message)\n\t\t// message = fmt.Sprintf(\"%s\\r\\n🕑 = Back to Time\", message)\n\t\t// message = fmt.Sprintf(\"%s\\r\\n🌍 = Back to Time Zone\", message)\n\t\t// message = fmt.Sprintf(\"%s\\r\\n⏳ = Back to Duration\", message)\n\t\t// message = fmt.Sprintf(\"%s\\r\\n👬 = Back to Team Size\", message)\n\tdefault:\n\t}\n\n\t// Post or update message\n\tif newEvent.State == stateNew {\n\t\tnewMsg, _ := s.ChannelMessageSend(channel, message)\n\t\tnewEvent.MessageID = newMsg.ID\n\t\tgv.escrowEvents[newMsg.ID] = newEvent\n\t} else {\n\t\ts.ChannelMessageEdit(channel, newEvent.MessageID, \"\")\n\t\ts.ChannelMessageEdit(channel, newEvent.MessageID, message)\n\t}\n\n\t// Add appliccable reactions\n\ts.MessageReactionsRemoveAll(channel, newEvent.MessageID)\n\tswitch newEvent.State {\n\tcase stateNew:\n\t\tfallthrough\n\tcase stateDate:\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"⏫\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"🔼\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"🔽\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"⏬\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"👍\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"❌\")\n\tcase stateTime:\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"⏪\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"◀\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"▶\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"⏩\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"👍\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"❌\")\n\tcase stateTimeZone:\n\t\tfor emoji := range gv.tzByEmoji {\n\t\t\ts.MessageReactionAdd(channel, newEvent.MessageID, emoji)\n\t\t}\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"❌\")\n\tcase stateDuration:\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiOne)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiTwo)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiThree)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiFour)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiFive)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiSix)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiSeven)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiEight)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiNine)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"❌\")\n\tcase stateTeamSize:\n\t\tif newEvent.Event.TeamSize > 9 {\n\t\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"◀\")\n\t\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiZero)\n\t\t}\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiOne)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiTwo)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiThree)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiFour)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiFive)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiSix)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiSeven)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiEight)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, EmojiNine)\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"▶\")\n\t\t//s.MessageReactionAdd(channel, newEvent.MessageID, \"👍\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"❌\")\n\tcase stateDone:\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"✅\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"❌\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"🗓\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"🕑\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"🌍\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"⏳\")\n\t\ts.MessageReactionAdd(channel, newEvent.MessageID, \"👬\")\n\tdefault:\n\t}\n}",
"func CreateGame(hand Hand, trumpCard Card, isOpponentMove bool) Game {\n\tif len(hand) != 6 {\n\t\tpanic(\"player's hand is not complete\")\n\t}\n\n\treturn Game{\n\t\ttrump: trumpCard.Suit,\n\t\tscore: 0,\n\t\topponentScore: 0,\n\t\thand: hand,\n\t\tknownOpponentCards: NewHand(),\n\t\tseenCards: NewPile(),\n\t\tunseenCards: getHiddenCards(hand, trumpCard),\n\t\ttrumpCard: &trumpCard,\n\t\tcardPlayed: nil,\n\t\tisOpponentMove: isOpponentMove,\n\t\tisClosed: false,\n\t\tagent: dummyAgent{},\n\t}\n}",
"func execNew(_ int, p *gop.Context) {\n\targs := p.GetArgs(1)\n\tret := template.New(args[0].(string))\n\tp.Ret(1, ret)\n}",
"func newRoutine(wg *sync.WaitGroup) {\n\tfmt.Println(\"New Routine\")\n\twg.Done()\n}",
"func newCreateFile() *cobra.Command {\n\tvar (\n\t\toptions core.CreateFileOptions\n\t)\n\tcreateFileCmd := cobra.Command{\n\t\tUse: \"file NAME\",\n\t\tShort: `Create a new content file`,\n\t\tArgs: cobra.ExactArgs(1),\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tpath := args[0]\n\t\t\treturn core.CreateFile(path, options)\n\t\t},\n\t}\n\n\tcreateFileCmd.Flags().StringVarP(&options.Project, \"project\", \"p\", \".\", `project path to create file in.`)\n\n\treturn &createFileCmd\n}",
"func (sd *SimpleDialog) Custom(owner walk.Form, widget Widget) (accepted bool, err error) {\n\tvar (\n\t\tdlg *walk.Dialog\n\t)\n\n\tif _, err := (Dialog{\n\t\tAssignTo: &dlg,\n\t\tLayout: VBox{Margins: Margins{}},\n\t\tChildren: []Widget{\n\t\t\twidget,\n\t\t\tComposite{\n\t\t\t\tLayout: HBox{Margins: Margins{}},\n\t\t\t\tChildren: []Widget{\n\t\t\t\t\tPushButton{\n\t\t\t\t\t\tText: i18n.Tr(\"widget.button.ok\"),\n\t\t\t\t\t\tOnClicked: func() {\n\t\t\t\t\t\t\t// some stuff here...\n\t\t\t\t\t\t\tdlg.Close(0)\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tPushButton{\n\t\t\t\t\t\tText: i18n.Tr(\"widget.button.cancel\"),\n\t\t\t\t\t\tOnClicked: func() {\n\t\t\t\t\t\t\tdlg.Close(0)\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTitle: sd.Title,\n\t\tSize: sd.Size,\n\t\tFixedSize: sd.FixedSize,\n\t}).Run(owner); err != nil {\n\t\treturn false, err\n\t}\n\n\treturn\n}",
"func newView() tcell.Screen {\n\ttcell.SetEncodingFallback(tcell.EncodingFallbackASCII)\n\ts, e := tcell.NewScreen()\n\tif e != nil {\n\t\tfmt.Fprintf(os.Stderr, \"%v\\n\", e)\n\t\tos.Exit(1)\n\t}\n\tif e = s.Init(); e != nil {\n\t\tfmt.Fprintf(os.Stderr, \"%v\\n\", e)\n\t\tos.Exit(1)\n\t}\n\ts.Clear()\n\twindowWidth, windowHeight = s.Size()\n\treturn s\n}",
"func BuildModal(p tview.Primitive, width, height int) tview.Primitive {\n\tcpnt := tview.NewFlex().SetDirection(tview.FlexRow).\n\t\tAddItem(nil, 0, 1, false).\n\t\tAddItem(p, height, 1, false).\n\t\tAddItem(nil, 0, 1, false)\n\n\treturn tview.NewFlex().\n\t\tAddItem(nil, 0, 1, false).\n\t\tAddItem(cpnt, width, 1, false).\n\t\tAddItem(nil, 0, 1, false)\n}",
"func (uc *UserCreate) SetDialog(d *Dialog) *UserCreate {\n\treturn uc.SetDialogID(d.ID)\n}",
"func New()(*Window,error){\n\n\n/*8:*/\n\n\n//line goacme.w:147\n\n{\nvar err error\nonce.Do(func(){fsys,err= client.MountService(\"acme\")})\nif err!=nil{\nreturn nil,err\n}\n}\n\n\n\n/*:8*/\n\n\n//line goacme.w:160\n\nf,err:=fsys.Open(\"new/ctl\",plan9.OREAD)\nif err!=nil{\nreturn nil,err\n}\ndefer f.Close()\nvar id int\nif _,err:=fmt.Fscan(f,&id);err!=nil{\nreturn nil,err\n}\nreturn Open(id)\n}",
"func (ptr *Application) onClickMenuAboutNotepad() {\n\tptr.setCurrentMsgBox(MsgBoxAboutNotepad)\n}",
"func NewFileOpen(callback func(fyne.URIReadCloser, error), parent fyne.Window) *FileDialog {\n\tdialog := &FileDialog{callback: callback, parent: parent}\n\treturn dialog\n}",
"func (ptr *Application) onClickMenuFileNew() {\n\n\t// reset the text editor\n\tptr.textEditor.SetText(\"\")\n}"
] | [
"0.6481888",
"0.63578427",
"0.62546325",
"0.6170362",
"0.61563224",
"0.60556924",
"0.5908183",
"0.58998257",
"0.56593597",
"0.5580943",
"0.547547",
"0.5463997",
"0.54177654",
"0.53838277",
"0.53588384",
"0.53536123",
"0.5321696",
"0.52956635",
"0.5292912",
"0.5272295",
"0.5265524",
"0.52388394",
"0.5226447",
"0.52215266",
"0.52052855",
"0.51588076",
"0.51588076",
"0.5150056",
"0.50535655",
"0.50245374",
"0.50193185",
"0.4980593",
"0.49554208",
"0.4930668",
"0.4929701",
"0.49175856",
"0.49119443",
"0.48978937",
"0.48609954",
"0.48494726",
"0.48361838",
"0.48330277",
"0.48290676",
"0.4818578",
"0.47811428",
"0.474087",
"0.47262844",
"0.47181094",
"0.47128645",
"0.4687326",
"0.46811327",
"0.46749482",
"0.46549034",
"0.46525994",
"0.4634257",
"0.46294153",
"0.46248382",
"0.46098003",
"0.460313",
"0.46018943",
"0.46015787",
"0.45804814",
"0.45762604",
"0.45651236",
"0.45549035",
"0.45527524",
"0.4543616",
"0.4520808",
"0.4517616",
"0.45117363",
"0.4511285",
"0.45098603",
"0.4507333",
"0.45061427",
"0.44899186",
"0.44887236",
"0.44884062",
"0.4480568",
"0.44679093",
"0.44539168",
"0.44511002",
"0.44494197",
"0.44465923",
"0.4442768",
"0.44401023",
"0.44401023",
"0.44394824",
"0.44370937",
"0.44301403",
"0.44247487",
"0.44184342",
"0.4417937",
"0.4413831",
"0.4410543",
"0.43921256",
"0.43883187",
"0.43839177",
"0.43749085",
"0.43703318",
"0.43665704"
] | 0.7018586 | 0 |
NewLocalHashMapDBMgr instantiates a new local LRU memmgr. | func NewLocalHashMapDBMgr(pfx string) *LocalHashMapDBMgr {
return &LocalHashMapDBMgr{memMap: make(map[common.Key]interface{}),
policy: common.DBMgrPolicyLocalMap, prefix: pfx, base: Base{common.Key{BPTkey: ""}, 0}}
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func NewLocalManager(ctx context.Context, root string) *LocalManager {\n\treturn &LocalManager{path: root}\n}",
"func (m *LocalManager) New(ctx context.Context, id string) (linker.Storage, error) {\n\tdb, err := NewLocalStorage(ctx, fmt.Sprintf(\"%s/db-%s\", m.path, id))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn db, nil\n}",
"func newHLLCache(size int, wsize int, db *RockDB) (*hllCache, error) {\n\tc := &hllCache{\n\t\tdb: db,\n\t}\n\tvar err error\n\tc.readCache, err = lru.NewWithEvict(size, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tc.dirtyWriteCache, err = lru.NewWithEvict(wsize, c.onEvicted)\n\treturn c, err\n}",
"func newTableManager() (*tableManager, error) {\n\tvar tb *tableManager = new(tableManager)\n\tif tb == nil {\n\t\treturn nil, errors.New(\"Could not create the TableManager\")\n\t}\n\ttb.data = make(map[uint64]*tableEntry)\n\tif tb.data == nil {\n\t\treturn nil, errors.New(\"Could not allocate the map for table.\")\n\t}\n\ttb.dirtyList = make([]uint64, 0, tableSize)\n\tif tb.dirtyList == nil {\n\t\treturn nil, errors.New(\"Could not allocate dirtyList.\")\n\t}\n\ttb.lruHead = new(tableEntry)\n\ttb.lruTail = new(tableEntry)\n\tif tb.lruHead == nil || tb.lruTail == nil {\n\t\treturn nil, errors.New(\"Could not allocate LRU Head/Tail.\")\n\t}\n\n\ttb.lruHead.lruNext = tb.lruTail\n\ttb.lruTail.lruPrev = tb.lruHead\n\treturn tb, nil\n}",
"func CreateNewLocalSecretManager() *LocalSecretManager {\n\treturn &LocalSecretManager{}\n}",
"func NewManager(redisHosts []string, redisPw string, deadChan chan<- string, commitChan chan<- int64) *Mgr {\n\tm := &Mgr{workers: NewMap(), deadChan: deadChan, commitChan: commitChan}\n\tdeadWorkers, err := NewCache(redisHosts, redisPw, \"deadWs24\", 100000)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tm.deadWorkers = deadWorkers\n\tgo m.doPing()\n\treturn m\n}",
"func NewLocal(label string) *Local {\n\treturn &Local{Root: configdir.LocalCache(label)}\n}",
"func NewLocalFS(m map[string]struct{}, basePath string) LocalFS {\n\tpfs := make(map[string]*LocalFile, len(m))\n\tfor k := range m {\n\t\tpfs[k] = &LocalFile{\n\t\t\tpath: k,\n\t\t}\n\t}\n\n\treturn LocalFS{\n\t\tm: pfs,\n\t\tbasePath: basePath,\n\t}\n}",
"func NewLocalLedger() *LocalLedger {\n\tlog.Printf(\"BLURB-LEDGER: Initializing\")\n\treturn &LocalLedger{\n\t\tledger: sync.Map{},\n\t\tbidCounter: 0,\n\t\tbidMutex: sync.Mutex{},\n\n\t\tfeeds: struct {\n\t\t\tcache sync.Map\n\t\t\tlength int\n\t\t\tblurbsPerUser int\n\t\t}{\n\t\t\tcache: sync.Map{},\n\t\t\tlength: 100,\n\t\t\tblurbsPerUser: 10,\n\t\t},\n\t}\n}",
"func NewLocalStore() *LocalStore {\n\treturn &LocalStore{\n\t\tstore: make(map[string]string),\n\t\tlock: &sync.RWMutex{},\n\t}\n}",
"func New(options ...LocalCacheOption) *LocalCache {\n\tc := LocalCache{\n\t\tData: make(map[interface{}]*Value),\n\t\tLRU: NewLRUQueue(),\n\t}\n\tc.Sweeper = async.NewInterval(c.Sweep, 500*time.Millisecond)\n\tfor _, opt := range options {\n\t\topt(&c)\n\t}\n\treturn &c\n}",
"func NewManager(partitioner Partitioner, serviceName, dataDir, myEntryId string) *Manager {\n manager := &Manager{\n table : nil,\n connections : make(map[string]client.Client),\n DataDir : dataDir,\n ServiceName : serviceName,\n MyEntryId : myEntryId,\n }\n //attempt to load from disk\n err := manager.load()\n if err != nil {\n log.Println(err)\n }\n return manager\n}",
"func (cd *Codec) UseLocalCache(maxLen int, expiration time.Duration) {\n\tcd.localCache = lrucache.New(maxLen, expiration)\n}",
"func newLocalStore(address net.Address) (Store, error) {\n\tctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)\n\tdefer cancel()\n\tsession, err := primitive.NewSession(ctx, primitive.Partition{ID: 1, Address: address})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tupdatesName := primitive.Name{\n\t\tNamespace: \"local\",\n\t\tName: primitiveName,\n\t}\n\tupdates, err := _map.New(context.Background(), updatesName, []*primitive.Session{session})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &atomixStore{\n\t\tupdates: updates,\n\t}, nil\n}",
"func NewLocalRegistry(artHome string) *LocalRegistry {\n\tr := &LocalRegistry{\n\t\tRepositories: []*Repository{},\n\t\tArtHome: artHome,\n\t}\n\t// load local registry\n\tr.Load()\n\treturn r\n}",
"func NewManager(local *peer.Local, f LoadMessageFunc, log *logger.Logger) *Manager {\n\tm := &Manager{\n\t\tlocal: local,\n\t\tloadMessageFunc: f,\n\t\tlog: log,\n\t\tevents: Events{\n\t\t\tMessageReceived: events.NewEvent(messageReceived),\n\t\t},\n\t\tneighborsEvents: map[NeighborsGroup]NeighborsEvents{\n\t\t\tNeighborsGroupAuto: NewNeighborsEvents(),\n\t\t\tNeighborsGroupManual: NewNeighborsEvents(),\n\t\t},\n\t\tneighbors: map[identity.ID]*Neighbor{},\n\t\tserver: nil,\n\t}\n\n\tm.messageWorkerPool = workerpool.New(func(task workerpool.Task) {\n\t\tm.processPacketMessage(task.Param(0).([]byte), task.Param(1).(*Neighbor))\n\n\t\ttask.Return(nil)\n\t}, workerpool.WorkerCount(messageWorkerCount), workerpool.QueueSize(messageWorkerQueueSize))\n\n\tm.messageRequestWorkerPool = workerpool.New(func(task workerpool.Task) {\n\t\tm.processMessageRequest(task.Param(0).([]byte), task.Param(1).(*Neighbor))\n\n\t\ttask.Return(nil)\n\t}, workerpool.WorkerCount(messageRequestWorkerCount), workerpool.QueueSize(messageRequestWorkerQueueSize))\n\n\treturn m\n}",
"func NewLevelDbManager(dbRoot string) Manager {\n\treturn levelDbManager(dbRoot)\n}",
"func New(bufferPoolSize int, views map[string]View) *Manager {\n\tif bufferPoolSize <= 0 {\n\t\tpanic(ErrBufferPoolSizeInvalid)\n\t}\n\tm := &Manager{\n\t\tm: &sync.RWMutex{},\n\t\tbuffers: newBufferPool(bufferPoolSize),\n\t\tviews: make(map[string]View),\n\t}\n\tif views != nil {\n\t\tfor name, v := range views {\n\t\t\tif v == nil {\n\t\t\t\tpanic(fmt.Errorf(\"view: View \\\"%s\\\" is nil\", name))\n\t\t\t}\n\t\t\tm.views[name] = v\n\t\t}\n\t}\n\treturn m\n}",
"func New(dir, name string) (mp *MapDB, err error) {\n\tvar m MapDB\n\t// Initialize map\n\tm.m = make(map[string]string)\n\n\t// Encryption middleware\n\tcmw := middleware.NewCryptyMW([]byte(\" encryption key \"), make([]byte, 16))\n\tif cmw == nil {\n\n\t}\n\n\t// Create a new instance of mrT\n\tif m.mrT, err = mrT.New(dir, name); err != nil {\n\t\treturn\n\t}\n\n\tif err = m.mrT.ForEach(m.load); err != nil {\n\t\treturn\n\t}\n\n\t// Assign pointer to our MapDB\n\tmp = &m\n\treturn\n}",
"func New(capacity int64) chainstore.Store {\n\tmemStore := &memStore{\n\t\tdata: make(map[string][]byte, 1000),\n\t}\n\tstore := lrumgr.New(capacity, memStore)\n\treturn store\n}",
"func NewLocalPool(sizePerProc int, newFn func() interface{}, resetFn func(obj interface{})) *LocalPool {\n\tslots := make([]*slot, runtime.GOMAXPROCS(0))\n\tfor i := 0; i < len(slots); i++ {\n\t\tslots[i] = &slot{\n\t\t\tobjs: make([]interface{}, 0, sizePerProc),\n\t\t}\n\t}\n\treturn &LocalPool{\n\t\tsizePerProc: sizePerProc,\n\t\tslots: slots,\n\t\tnewFn: newFn,\n\t\tresetFn: resetFn,\n\t}\n}",
"func NewLRU(cap int) kv.Store {\n\treturn newStore(newLRUStore(cap))\n}",
"func NewLRUMap() *LRUMap {\n\treturn &LRUMap{\n\t\tdict: make(map[int]*keyValNode),\n\t\tfront: nil,\n\t\trear: nil,\n\t\tlen: 0,\n\t}\n}",
"func New(cf Config) cache.Cache {\n\treturn &localCache{\n\t\tcacheEngine: freecache.NewCache(cf.Size),\n\t\tcf: cf,\n\t}\n}",
"func newRouteManager(wg *sync.WaitGroup, logRouteChanges bool, syncPeriod time.Duration) *routeManager {\n\treturn &routeManager{\n\t\tlogRouteChanges: logRouteChanges,\n\t\tsyncPeriod: syncPeriod,\n\t\tstore: make(map[string]routesPerLink),\n\t\taddRouteCh: make(chan routesPerLink, 5),\n\t\tdelRouteCh: make(chan routesPerLink, 5),\n\t\twg: wg,\n\t}\n}",
"func NewLocalNode(db *DB, key *ecdsa.PrivateKey) *LocalNode {\n\tln := &LocalNode{\n\t\tid: PubkeyToIDV4(&key.PublicKey),\n\t\tdb: db,\n\t\tkey: key,\n\t\tentries: make(map[string]qnr.Entry),\n\t\tendpoint4: lnEndpoint{\n\t\t\ttrack: netutil.NewIPTracker(iptrackWindow, iptrackContactWindow, iptrackMinStatements),\n\t\t},\n\t\tendpoint6: lnEndpoint{\n\t\t\ttrack: netutil.NewIPTracker(iptrackWindow, iptrackContactWindow, iptrackMinStatements),\n\t\t},\n\t}\n\tln.seq = db.localSeq(ln.id)\n\tln.invalidate()\n\treturn ln\n}",
"func NewLocalStore(meta Container, storage LocalStorage, pd Prophet) LocalStore {\n\treturn &defaultLocalStore{\n\t\tmeta: meta,\n\t\tdb: &defaultLocalDB{storage: storage},\n\t\tpd: pd,\n\t}\n}",
"func NewMgr(dbPath string, ccInfoProvider ledger.DeployedChaincodeInfoProvider) (*Mgr, error) {\n\tp, err := newDBProvider(dbPath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &Mgr{ccInfoProvider, p}, nil\n}",
"func NewLocalAuthManager() AuthManager {\n\tcache := NewLocalSessionCache()\n\treturn AuthManager{\n\t\tMode: AuthManagerModeJWT,\n\t\tCookieSecure: DefaultCookieSecure,\n\t\tCookieHTTPOnly: DefaultCookieHTTPOnly,\n\t\tCookieSameSite: DefaultCookieSameSite,\n\t\tPersistHandler: cache.PersistHandler,\n\t\tFetchHandler: cache.FetchHandler,\n\t\tRemoveHandler: cache.RemoveHandler,\n\t}\n}",
"func NewHashMap(hashFn func() hash.Hash64, size uint64) *HashMap {\n\thm := &HashMap{\n\t\tnodes: make([]*node, size),\n\t\thasher: sync.Pool{\n\t\t\tNew:func() interface{} {\n\t\t\t\treturn hashFn()\n\t\t\t},\n\t\t},\n\n\t\tsize: size,\n\t}\n\tfor i := range hm.nodes {\n\t\thm.nodes[i] = &node{}\n\t}\n\treturn hm\n}",
"func NewLocalDatabaseProvider(name string, type_ string) *LocalDatabaseProvider {\n\tthis := LocalDatabaseProvider{}\n\tthis.Name = name\n\tthis.Type = type_\n\tvar deviceLimitPerUser int32 = 100\n\tthis.DeviceLimitPerUser = &deviceLimitPerUser\n\tvar adminProvider bool = false\n\tthis.AdminProvider = &adminProvider\n\tvar inactivityTimeoutMinutes int32 = 0\n\tthis.InactivityTimeoutMinutes = &inactivityTimeoutMinutes\n\tvar networkInactivityTimeoutEnabled bool = false\n\tthis.NetworkInactivityTimeoutEnabled = &networkInactivityTimeoutEnabled\n\tvar blockLocalDnsRequests bool = false\n\tthis.BlockLocalDnsRequests = &blockLocalDnsRequests\n\tvar userLockoutThreshold int32 = 5\n\tthis.UserLockoutThreshold = &userLockoutThreshold\n\tvar userLockoutDurationMinutes int32 = 1\n\tthis.UserLockoutDurationMinutes = &userLockoutDurationMinutes\n\tvar minPasswordLength int32 = 0\n\tthis.MinPasswordLength = &minPasswordLength\n\treturn &this\n}",
"func NewLocalStore() (Store, error) {\n\t_, address := utils.StartLocalNode()\n\treturn newLocalStore(address)\n}",
"func createCache(cacheSize int) (glc glcache.Cache) {\n\tconf := glcache.Config{\n\t\tMaxSize: defaultCacheSize,\n\t\tEnableLRU: true,\n\t}\n\n\tif cacheSize > 0 {\n\t\tconf.MaxSize = uint(cacheSize)\n\t}\n\n\treturn glcache.New(conf)\n}",
"func NewManager() Manager {\n\treturn &manager{\n\t\tcontexts: make(map[uint64]*context),\n\t\tglobal: make(Map),\n\t}\n}",
"func NewLocalRedis(p *providers.Provider) (providers.ClowderProvider, error) {\n\tconfig := config.InMemoryDBConfig{}\n\n\tredisProvider := localRedis{Provider: *p, Config: config}\n\n\treturn &redisProvider, nil\n}",
"func NewManager(rawurl string, opts ...Option) (Manager, error) {\n\turl, err := url.Parse(rawurl)\n\tif err != nil {\n\t\treturn nil, ErrInvalidURL\n\t}\n\n\tcache, _ := NewLRUCache(_defaultCacheSize)\n\n\tlogger := zerolog.\n\t\tNew(os.Stderr).With().\n\t\tLogger().\n\t\tLevel(zerolog.Disabled)\n\n\tmng := &manager{\n\t\turl: url,\n\t\tcache: cache,\n\t\tclient: &http.Client{Timeout: _defaultTimeout},\n\t\tlookup: true,\n\t\tretries: _defaultRetries,\n\t\tlogger: logger,\n\t}\n\n\tfor _, opt := range opts {\n\t\topt(mng)\n\t}\n\n\treturn mng, nil\n}",
"func newCache(size int) *cache {\n\treturn &cache{0, make([]byte, size), make([]FileID, 0), make(map[FileID]location)}\n}",
"func initLMDB() {\n\tlmdb, err := db.MakeLMDBHandler(\"./\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\t// Add random proposal to the database\n\tid := uuid.NewV4()\n\tbeego.Info(\"Prop ID: \", id.String())\n\tprop := messages.Proposal{}\n\tprop.FillRandom()\n\tdata, _ := json.Marshal(prop)\n\tlmdb.Write(db.PROPOSALS, id.Bytes(), data)\n\t// Add random chat message to the database\n\tid = uuid.NewV4()\n\n\tchatb := messages.ChatBucket{}\n\tchatb.FillRandom(15)\n\tdata, _ = json.Marshal(chatb)\n\tlmdb.Write(db.CHAT, id.Bytes(), data)\n\n\tprev := id.String()\n\tid = uuid.NewV4()\n\tchatb.FillRandom(20)\n\tchatb.Previous = &prev\n\tdata, _ = json.Marshal(chatb)\n\tlmdb.Write(db.CHAT, id.Bytes(), data)\n\n\tbeego.Info(\"Chat Bucket ID: \", id.String())\n\t// Global database\n\tdb.DB = lmdb\n}",
"func NewLocal(l *util.StandardLogger, basePath string, maxSize int) (*Local, error) {\n\tp, err := filepath.Abs(basePath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Local{l, maxSize, p}, nil\n}",
"func New(cap int) (*HashMap, error) {\n\tif cap <= 0 {\n\t\treturn nil, errors.New(\"Capacity should be greater than 0\")\n\t}\n\n\treturn &HashMap{make([]*mapNode, cap), 0}, nil\n}",
"func New() (hm *HashMap) {\n\thm = new(HashMap)\n\thm.data = make([]*list.List, dataLen)\n\tfor i := uint32(0); i < dataLen; i++ {\n\t\thm.data[i] = list.New()\n\t}\n\treturn\n}",
"func newCache(conf configuration.Conf, fname string) *Cache {\n\tvar err error\n\n\tc := new(Cache)\n\tc.apiKey = conf.APISecretKey\n\tc.DB, err = gorm.Open(\"sqlite3\", fname)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\terr = c.DB.AutoMigrate(&quandl.Record{}, &ref{}).Error\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tc.ref = make(map[string]time.Time)\n\n\t// Restore saved refs if any from the db.\n\t// That will avoid unnecessary refresh, and identify all Tickers.\n\t// Extract tickers from config file as default tickers.\n\tc.restoreRefs(conf.Tickers()...)\n\n\treturn c\n}",
"func newLavaToolsCache(c *lava.Connection, retry *retry, opt Options) (obj *cache, err error) {\n\tobj = &cache{c: c,\n\t\tdeviceList: deviceListCache{DeviceList: []lava.DeviceList{}},\n\t\tdevices: map[string]deviceCache{},\n\t\tdeviceTags: map[string]tagsCache{},\n\t\tdeviceTypeTemplate: map[string]deviceTypeTemplateCache{},\n\t\tpollInterval: opt.PollInterval,\n\t\tinvalidTimeout: opt.InvalidTimeout,\n\t\tretry: retry,\n\t}\n\n\tif opt.BackgroundPrefetching {\n\t\tgo obj.updatePeriodic(opt.BackgroundInterval)\n\t}\n\n\treturn\n}",
"func NewMapLRU(capacity int) LRU {\n\tif capacity <= 0 {\n\t\tcapacity = 1\n\t}\n\n\treturn &mapLRU{\n\t\tcapacity: capacity,\n\t\tcache: make(map[string]*mapLRUItem, capacity),\n\t}\n}",
"func NewHashMap(blockSize int, fn ...HashFunc) (*HashMap, error) {\n\tif blockSize <= 0 {\n\t\treturn nil, errors.New(\"blockSize must be more than 0\")\n\t}\n\tf := hashFunc\n\tif len(fn) > 0 {\n\t\tf = fn[0]\n\t}\n\treturn &HashMap{\n\t\tBlockSize: blockSize,\n\t\tHashFunc: f,\n\t\tBuckets: make([]*Entry, blockSize),\n\t\tEntries: 0,\n\t}, nil\n}",
"func newDbCache(ldb *leveldb.DB, store *blockStore, maxSize uint64, flushIntervalSecs uint32) *dbCache {\n\treturn &dbCache{\n\t\tldb: ldb,\n\t\tstore: store,\n\t\tmaxSize: maxSize,\n\t\tflushInterval: time.Second * time.Duration(flushIntervalSecs),\n\t\tlastFlush: time.Now(),\n\t\tcachedKeys: treap.NewImmutable(),\n\t\tcachedRemove: treap.NewImmutable(),\n\t}\n}",
"func NewLocalAuthManagerFromCache(cache *LocalSessionCache) AuthManager {\n\treturn AuthManager{\n\t\tMode: AuthManagerModeLocal,\n\t\tCookieSecure: DefaultCookieSecure,\n\t\tCookieHTTPOnly: DefaultCookieHTTPOnly,\n\t\tCookieSameSite: DefaultCookieSameSite,\n\t\tPersistHandler: cache.PersistHandler,\n\t\tFetchHandler: cache.FetchHandler,\n\t\tRemoveHandler: cache.RemoveHandler,\n\t}\n}",
"func newLockMap() *lockMap {\n\treturn &lockMap{\n\t\tmutexMap: make(map[string]*sync.Mutex),\n\t}\n}",
"func NewLockBasedTxMgr(initializer *Initializer) (*LockBasedTxMgr, error) {\n\tif initializer.HashFunc == nil {\n\t\treturn nil, errors.New(\"create new lock based TxMgr failed: passed in nil ledger hasher\")\n\t}\n\n\tif err := initializer.DB.Open(); err != nil {\n\t\treturn nil, err\n\t}\n\ttxmgr := &LockBasedTxMgr{\n\t\tledgerid: initializer.LedgerID,\n\t\tdb: initializer.DB,\n\t\tstateListeners: initializer.StateListeners,\n\t\tccInfoProvider: initializer.CCInfoProvider,\n\t\thashFunc: initializer.HashFunc,\n\t}\n\tpvtstatePurgeMgr, err := pvtstatepurgemgmt.InstantiatePurgeMgr(\n\t\tinitializer.LedgerID,\n\t\tinitializer.DB,\n\t\tinitializer.BtlPolicy,\n\t\tinitializer.BookkeepingProvider)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ttxmgr.pvtdataPurgeMgr = &pvtdataPurgeMgr{pvtstatePurgeMgr, false}\n\ttxmgr.commitBatchPreparer = validation.NewCommitBatchPreparer(\n\t\ttxmgr,\n\t\tinitializer.DB,\n\t\tinitializer.CustomTxProcessors,\n\t\tinitializer.HashFunc)\n\treturn txmgr, nil\n}",
"func New(name string, MaxEntries int) *Cache {\n\treturn &Cache{\n\t\tlru: lru.New(MaxEntries),\n\t\tName: name,\n\t}\n}",
"func newlru(what string, maxItems int, new func(epoch uint64) interface{}) *lru {\n\tif maxItems <= 1 {\n\t\tmaxItems = 5\n\t}\n\tcache, _ := simplelru.NewLRU(maxItems, func(key, value interface{}) {\n\t\tlog.Trace(\"Evicted minerva \"+what, \"epoch\", key)\n\t})\n\treturn &lru{what: what, new: new, cache: cache}\n}",
"func New(maxEntries int) *Cache {\n\treturn &Cache{\n\t\tmaxEntries: maxEntries,\n\t\tddl: list.New(),\n\t\tcache: map[Key]*list.Element{},\n\t}\n}",
"func New(size int) *GCache {\n\treturn NewWithType(size, gcache.TYPE_LRU)\n}",
"func NewManagerSeed(partitioner Partitioner, serviceName, dataDir, myEntryId string, seedHttpUrls []string) (*Manager, error) {\n //TODO: can we get the servicename from the routing table?\n\n manager := NewManager(partitioner, serviceName, dataDir, myEntryId)\n var err error\n for _,url := range(seedHttpUrls) {\n\n client := client.NewHttp(url)\n tble, err := RequestRouterTable(client)\n client.Close()\n if err != nil {\n //found a table.. woot\n manager.SetRouterTable(tble)\n return manager, nil\n }\n }\n\n if manager.table != nil {\n //uhh, I guess we sucessfully loaded it elsewhere\n return manager, nil\n }\n //we still return the manager since it is usable just doesnt have a routing table.\n return manager, err\n}",
"func newMemorySliceDB() (*sliceDB, error) {\r\n\tdb, err := leveldb.Open(storage.NewMemStorage(), nil)\r\n\tif err != nil {\r\n\t\treturn nil, err\r\n\t}\r\n\treturn &sliceDB{\r\n\t\tlvl: db,\r\n\t\tquit: make(chan struct{}),\r\n\t}, nil\r\n}",
"func NewLockedLRU(size int, name string, openStat bool,\n\temitCounter func(name string, value interface{}, prefix string, tagkv map[string]string)) *lockedLRU {\n\tif size <= 0 {\n\t\tpanic(\"inmem: must provide a positive size\")\n\t}\n\tlru := &lockedLRU{\n\t\topenStat: openStat,\n\t\tstopSignal: make(chan struct{}),\n\t\tname: name,\n\t\tc: simpleLRU{\n\t\t\tsize: size,\n\t\t\tlru: list.New(),\n\t\t\titems: make(map[interface{}]*list.Element),\n\t\t},\n\t\temitCounter: emitCounter,\n\t}\n\n\tif openStat && emitCounter != nil {\n\t\tgo runStat(lru)\n\t\truntime.SetFinalizer(lru, stopStat)\n\n\t}\n\treturn lru\n}",
"func NewLRUCache(capacity int, highPriorityPoolRatio float64) *Cache {\n\treturn &Cache{\n\t\tc: C.rocksdb_cache_create_lru_with_ratio(\n\t\t\tC.size_t(capacity),\n\t\t\tC.double(highPriorityPoolRatio),\n\t\t),\n\t}\n}",
"func newCacheDB(persistent, memory storage.DataStore) storage.DataStore {\n\treturn &memoryDB{\n\t\tpersistent: persistent,\n\t\tinmem: memory,\n\t\tpersistentAPN: nil,\n\t\tinmemAPN: nil,\n\t}\n}",
"func NewLRUTokenStore(maxOrigins, tokensPerOrigin int) TokenStore {\n\treturn &lruTokenStore{\n\t\tm: make(map[string]*list.Element[*lruTokenStoreEntry]),\n\t\tq: list.New[*lruTokenStoreEntry](),\n\t\tcapacity: maxOrigins,\n\t\tsingleOriginSize: tokensPerOrigin,\n\t}\n}",
"func NewLRUCache(max int) *LRUCache {\n\tc := &LRUCache{max: max}\n\tc.reset()\n\treturn c\n}",
"func NewManager() (*Manager, error) {\n\tsm := &Manager{\n\t\tsessions: make(map[string]*client),\n\t}\n\tsm.updateCondition = sync.NewCond(&sm.mu)\n\treturn sm, nil\n}",
"func InitDB() (*LevelLedger, error) {\n\t// Options struct doc: https://godoc.org/github.com/syndtr/goleveldb/leveldb/opt#Options.\n\topts := &opt.Options{\n\t\tAltFilters: nil,\n\t\tBlockCacher: opt.LRUCacher,\n\t\t// BlockCacheCapacity increased to 32MiB from default 8 MiB.\n\t\t// BlockCacheCapacity defines the capacity of the 'sorted table' block caching.\n\t\tBlockCacheCapacity: 32 * 1024 * 1024,\n\t\tBlockRestartInterval: 16,\n\t\tBlockSize: 4 * 1024,\n\t\tCompactionExpandLimitFactor: 25,\n\t\tCompactionGPOverlapsFactor: 10,\n\t\tCompactionL0Trigger: 4,\n\t\tCompactionSourceLimitFactor: 1,\n\t\tCompactionTableSize: 2 * 1024 * 1024,\n\t\tCompactionTableSizeMultiplier: 1.0,\n\t\tCompactionTableSizeMultiplierPerLevel: nil,\n\t\t// CompactionTotalSize increased to 32MiB from default 10 MiB.\n\t\t// CompactionTotalSize limits total size of 'sorted table' for each level.\n\t\t// The limits for each level will be calculated as:\n\t\t// CompactionTotalSize * (CompactionTotalSizeMultiplier ^ Level)\n\t\tCompactionTotalSize: 32 * 1024 * 1024,\n\t\tCompactionTotalSizeMultiplier: 10.0,\n\t\tCompactionTotalSizeMultiplierPerLevel: nil,\n\t\tComparer: comparer.DefaultComparer,\n\t\tCompression: opt.DefaultCompression,\n\t\tDisableBufferPool: false,\n\t\tDisableBlockCache: false,\n\t\tDisableCompactionBackoff: false,\n\t\tDisableLargeBatchTransaction: false,\n\t\tErrorIfExist: false,\n\t\tErrorIfMissing: false,\n\t\tFilter: nil,\n\t\tIteratorSamplingRate: 1 * 1024 * 1024,\n\t\tNoSync: false,\n\t\tNoWriteMerge: false,\n\t\tOpenFilesCacher: opt.LRUCacher,\n\t\tOpenFilesCacheCapacity: 500,\n\t\tReadOnly: false,\n\t\tStrict: opt.DefaultStrict,\n\t\tWriteBuffer: 16 * 1024 * 1024, // Default is 4 MiB\n\t\tWriteL0PauseTrigger: 12,\n\t\tWriteL0SlowdownTrigger: 8,\n\t}\n\n\tabsPath, err := filepath.Abs(dbDirPath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdb, err := leveldb.OpenFile(absPath, opts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar zeroID record.ID\n\tledger := LevelLedger{\n\t\tldb: db,\n\t\t// FIXME: temporary pulse implementation\n\t\tpulseFn: func() record.PulseNum {\n\t\t\treturn record.PulseNum(time.Now().Unix() / 10)\n\t\t},\n\t\tzeroRef: record.Reference{\n\t\t\tDomain: record.ID{}, // TODO: fill domain\n\t\t\tRecord: zeroID,\n\t\t},\n\t}\n\t_, err = db.Get([]byte(zeroRecordHash), nil)\n\tif err == leveldb.ErrNotFound {\n\t\terr = db.Put([]byte(zeroRecordHash), []byte(zeroRecordBinary), nil)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn &ledger, nil\n\t} else if err != nil {\n\t\treturn nil, err\n\t}\n\treturn &ledger, nil\n}",
"func NewMgr(ctx context.Context, bc lbcf.ConfigSetting) (*GtwyMgr, error) {\n\tpreflight(ctx, bc)\n\n\tif EnvDebugOn {\n\t\tlblog.LogEvent(\"GtwyMgr\", \"NewMgr\", \"info\", \"start\")\n\t}\n\n\tdatastoreClient, err := datastore.NewClient(ctx, bc.GetConfigValue(ctx, \"EnvGtwayGcpProject\"), option.WithGRPCConnectionPool(EnvClientPool))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcm1 := &GtwyMgr{\n\t\tds: datastoreClient,\n\t\tbc: bc,\n\t}\n\n\tif EnvDebugOn {\n\t\tlblog.LogEvent(\"GtwyMgr\", \"NewMgr\", \"info\", \"end\")\n\t}\n\n\treturn cm1, nil\n}",
"func New(gcPercent uint64) *CacheMap {\n\tc := &CacheMap{\n\t\titems: make(map[string]item),\n\t\tgcPercent: gcPercent,\n\t\tnbytes: 0,\n\t}\n\tgo c.GC()\n\treturn c\n}",
"func NewLevelDB(name, dir string) (db dbm.DB, err error) {\n\tbackend := dbm.GoLevelDBBackend\n\tif DBBackend == string(dbm.CLevelDBBackend) {\n\t\tbackend = dbm.CLevelDBBackend\n\t}\n\tdefer func() {\n\t\tif r := recover(); r != nil {\n\t\t\terr = fmt.Errorf(\"couldn't create db: %v\", r)\n\t\t}\n\t}()\n\treturn dbm.NewDB(name, backend, dir), err\n}",
"func NewLRU(maxEntries int) *LRU {\n\treturn &LRU{\n\t\tmaxEntries: maxEntries,\n\t\tll: list.New(),\n\t\tcache: make(map[interface{}]*list.Element),\n\t}\n}",
"func newCache() cache {\n\treturn cache{\n\t\taddressRecords: make(map[addressRecordID]addressRecord),\n\t\tpointerRecords: make(map[serviceInstanceName]pointerRecord),\n\t\tserviceRecords: make(map[serviceInstanceName]serviceRecord),\n\t\ttextRecords: make(map[serviceInstanceName]textRecord),\n\t}\n}",
"func (h *Handle) LocalDB() (IDB, error) {\n\tdb := C.alpm_get_localdb(h.ptr)\n\tif db == nil {\n\t\treturn nil, h.LastError()\n\t}\n\n\treturn &DB{db, *h}, nil\n}",
"func InitDBM() DBMem {\n\treturn DBMem{\n data: make(map[int]Person),\n history: InitHistoryStore(),\n }\n}",
"func New(cb Done, transport http.RoundTripper) *Manager {\n\treturn &Manager{\n\t\tkeys: sets.NewString(),\n\t\tcb: cb,\n\t\ttransport: transport,\n\t}\n}",
"func newLdbCacheIter(snap *dbCacheSnapshot, slice *util.Range) *ldbCacheIter {\n\titer := snap.pendingKeys.Iterator(slice.Start, slice.Limit)\n\treturn &ldbCacheIter{Iterator: iter}\n}",
"func NewManager(storage *storage.Storage, ledgerIndex iotago.MilestoneIndex) (*Manager, error) {\n\tmanager := &Manager{\n\t\tEvents: &Events{\n\t\t\tNextMilestoneUnsupported: event.New1[*iotago.ProtocolParamsMilestoneOpt](),\n\t\t\tCriticalErrors: event.New1[error](),\n\t\t},\n\t\tstorage: storage,\n\t\tcurrent: nil,\n\t\tpending: nil,\n\t}\n\n\tif err := manager.init(ledgerIndex); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn manager, nil\n}",
"func New(size int) *LRU {\r\n\treturn &LRU{\r\n\t\tcache: make(Map),\r\n\t\tlink: list.New(),\r\n\t\tsize: size,\r\n\t}\r\n\r\n}",
"func CreateNewCache(fileName string, maxEntry uint64, save bool) *SimpleCache {\n\tcache := &SimpleCache{\n\t\tData: make(map[string]*queueData),\n\t\tFileName: fileName,\n\t\tMaxEntry: maxEntry,\n\t\tQueue: newPriorityQueue(),\n\t\tTTL: -1,\n\t\tLock: new(sync.Mutex),\n\t\tExpiryChannel: make(chan bool),\n\t\tSaveFile: save,\n\t}\n\twg.Add(1)\n\tgo cache.concurrentProcessChecks()\n\treturn cache\n}",
"func New(maxEntries int) *Cache {\n\treturn &Cache{\n\t\tmaxEntries: maxEntries,\n\t\tlinkedList: list.New(),\n\t\tcacheByKey: make(map[string]*list.Element),\n\t}\n}",
"func New(conf map[string]interface{}) (cache.Cache, error) {\n\tc := &config{}\n\terr := mapstructure.Decode(conf, c)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"lru: error decoding config\")\n\t}\n\tc.init()\n\n\tsvc := &lru{\n\t\tconfig: c,\n\t\tcache: gcache.New(c.Size).LRU().Build(),\n\t}\n\n\treturn svc, nil\n}",
"func New(folder string, cfg *Config) (*DB, error) {\n\tenv, err := lmdb.NewEnv()\n\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"env create failed\")\n\t}\n\n\terr = env.SetMaxDBs(cfg.MaxDBs)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"env config failed\")\n\t}\n\terr = env.SetMapSize(cfg.SizeMbs * 1024 * 1024)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"map size failed\")\n\t}\n\n\tif err = env.SetFlags(cfg.EnvFlags); err != nil {\n\t\treturn nil, errors.Wrap(err, \"set flag\")\n\t}\n\n\tos.MkdirAll(folder, os.ModePerm)\n\terr = env.Open(folder, 0, cfg.Mode)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"open env\")\n\t}\n\n\tvar staleReaders int\n\tif staleReaders, err = env.ReaderCheck(); err != nil {\n\t\treturn nil, errors.Wrap(err, \"reader check\")\n\t}\n\tif staleReaders > 0 {\n\t\tlog.Printf(\"cleared %d reader slots from dead processes\", staleReaders)\n\t}\n\n\tvar dbi lmdb.DBI\n\terr = env.Update(func(txn *lmdb.Txn) (err error) {\n\t\tdbi, err = txn.CreateDBI(\"agg\")\n\t\treturn err\n\t})\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"create DB\")\n\t}\n\n\treturn &DB{env, dbi}, nil\n\n}",
"func NewCache(maxEntries int) *Cache {\r\n\treturn &Cache{\r\n\t\tMaxEntries: maxEntries,\r\n\t\tPurgeInterval: 60,\r\n\t\tll: list.New(),\r\n\t\tcache: make(map[interface{}]*list.Element),\r\n\t}\r\n}",
"func NewGlobalManager(key sdk.StoreKey, holder param.ParamHolder) GlobalManager {\n\treturn GlobalManager{\n\t\tstorage: model.NewGlobalStorage(key),\n\t\tparamHolder: holder,\n\t}\n}",
"func New(logger *log.Logger, root string) (LDCache, error) {\n\tpath := filepath.Join(root, ldcachePath)\n\n\tlogger.Debugf(\"Opening ld.conf at %v\", path)\n\tf, err := os.Open(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer f.Close()\n\n\tfi, err := f.Stat()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\td, err := syscall.Mmap(int(f.Fd()), 0, int(fi.Size()),\n\t\tsyscall.PROT_READ, syscall.MAP_PRIVATE)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcache := &ldcache{\n\t\tdata: d,\n\t\tReader: bytes.NewReader(d),\n\t\troot: root,\n\t\tlogger: logger,\n\t}\n\treturn cache, cache.parse()\n}",
"func NewManager(m project.Manager) *Manager {\n\treturn &Manager{\n\t\tBaseManager: cached.NewBaseManager(cached.ResourceTypeProject),\n\t\tdelegator: m,\n\t\tkeyBuilder: cached.NewObjectKey(cached.ResourceTypeProject),\n\t\tlifetime: time.Duration(config.CacheExpireHours()) * time.Hour,\n\t}\n}",
"func NewDBManager(name, path, secret string, batchMode bool, buckets []string) (dbm *DBManager, err error) {\n\tvar info os.FileInfo\n\tif path != \"\" {\n\t\tinfo, err = os.Stat(path)\n\t\tif err != nil || !info.Mode().IsDir() {\n\t\t\terr = ErrPathInvalid\n\t\t\treturn\n\t\t}\n\t}\n\n\tfullPath := filepath.Join(path, name)\n\tinfo, err = os.Stat(fullPath)\n\tif err == nil && !info.Mode().IsRegular() {\n\t\terr = ErrFileNameInvalid\n\t\treturn\n\t}\n\n\tif err != nil {\n\t\tif Debug {\n\t\t\tLogger.Printf(\"NewDBManager DB file %s does not exist, will be created\", fullPath)\n\t\t}\n\t}\n\n\tdbm = &DBManager{\n\t\tname: name,\n\t\tpath: path,\n\t\tfullPath: fullPath,\n\t\tbatchMode: batchMode,\n\t\tbuckets: buckets,\n\t}\n\n\tif err = dbm.SetSecret(secret); err != nil {\n\t\treturn\n\t}\n\n\tif err = dbm.openDB(); err != nil {\n\t\treturn\n\t}\n\tdefer dbm.closeDB()\n\n\treturn\n}",
"func NewLRUCache(capacity uint64) Cache {\n return ConstructShardedLRUCache(capacity)\n}",
"func newLocalService(config fab.EndpointConfig, mspID string, opts ...coptions.Opt) *LocalService {\n\tlogger.Debug(\"Creating new local discovery service\")\n\n\ts := &LocalService{mspID: mspID}\n\ts.service = newService(config, s.queryPeers, opts...)\n\treturn s\n}",
"func New() *LocalStore {\n\treturn &LocalStore{}\n}",
"func NewLRUClientSessionCache(capacity int) tls.ClientSessionCache",
"func NewCacheL1(executor executor.Executor) executor.Launcher {\n\treturn New(executor, \"stress-ng-cache-l1\", fmt.Sprintf(\"--cache=%d --cache-level=1\", StressngCacheL1ProcessNumber.Value()))\n}",
"func New(tracker *bestblock.Tracker, lag int64, config Config) (*Manager, error) {\n\tm := &Manager{\n\t\tconfig: config,\n\t\tlag: lag,\n\t\ttracker: tracker,\n\t\tclosed: false,\n\t\tstopChan: make(chan bool),\n\t}\n\n\tlog.Info(\"setting up redis connection\")\n\tm.redis = redis.NewClient(&redis.Options{\n\t\tAddr: config.RedisServer,\n\t\tPassword: config.RedisPassword,\n\t\tDB: 0,\n\t\tReadTimeout: time.Second * 1,\n\t})\n\n\terr := m.redis.Ping().Err()\n\tif err != nil {\n\t\tlog.Error(err)\n\t\treturn nil, err\n\t}\n\n\tlog.Info(\"connected to redis successfully\")\n\n\tgo m.watchNewBlocks()\n\n\treturn m, nil\n}",
"func NewManager(ps common.PubsubInterface, primaryCapacity int, overflowCapacity int) *Manager {\n\tshelves := map[string]*primaryShelf{\n\t\t\"hot\": NewPrimaryShelf(primaryCapacity),\n\t\t\"cold\": NewPrimaryShelf(primaryCapacity),\n\t\t\"frozen\": NewPrimaryShelf(primaryCapacity),\n\t}\n\toverflow := NewOverflowShelf(overflowCapacity, []string{\"hot\", \"cold\", \"frozen\"})\n\treturn &Manager{shelves, overflow, ps}\n}",
"func NewLocalLogger(\n\tprefix LogPrefix,\n\tlevel fw.LogLevel,\n\tstdout fw.StdOut,\n\ttimer fw.Timer,\n\tprogramRuntime fw.ProgramRuntime,\n) mdlogger.Local {\n\treturn mdlogger.NewLocal(string(prefix), level, stdout, timer, programRuntime)\n}",
"func NewManager() *Manager {\n\tm := &Manager{\n\t\tPersistenceManager: persistencemanager.NewMapPersistenceManager(),\n\t}\n\n\tm.DeckManager = deck.NewDeckManager(m.PersistenceManager)\n\tm.PileManager = pile.NewPileManager(m.PersistenceManager)\n\n\treturn m\n}",
"func New(opts ...Option) *LRU {\n\tl := &LRU{}\n\tfor _, o := range opts {\n\t\to.apply(l)\n\t}\n\tif l.nshards < 1 {\n\t\tl.nshards = 1\n\t}\n\n\tcap := l.cap / l.nshards\n\tl.shards = make([]*shard, l.nshards)\n\tfor i := 0; i < l.nshards; i++ {\n\t\tl.shards[i] = newShard(cap)\n\t}\n\treturn l\n}",
"func New(ctx context.Context, m map[string]interface{}) (auth.Manager, error) {\n\tvar mgr manager\n\tif err := mgr.Configure(m); err != nil {\n\t\treturn nil, err\n\t}\n\tgw, err := pool.GetGatewayServiceClient(pool.Endpoint(mgr.c.GatewayAddr))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tmgr.gw = gw\n\n\treturn &mgr, nil\n}",
"func NewManager(config config.Config) *Manager {\n\ttm := &Manager{config: config}\n\ttm.tasks = make(map[uint64]*task)\n\treturn tm\n}",
"func NewManager(r *http.Request) *Manager {\n\tsession := db.GetDBSession(r)\n\treturn &Manager{\n\t\tsession: session,\n\t\tcollection: getCollection(session),\n\t}\n}",
"func NewLevelManager(levelFilePath string, seed int64) *LevelManager {\n\tsrc := rand.NewSource(seed)\n\tfmt.Printf(\"Seed: %d\\n\", seed)\n\treturn &LevelManager{\n\t\tfilePath: levelFilePath,\n\t\tlevels: make(map[string]Map),\n\t\trandomSource: rand.New(src),\n\t}\n}",
"func NewLocalService(chroot string, routes []Route) *LocalService {\n\treturn &LocalService{\n\t\troutes: routes,\n\t\tchroot: chroot,\n\t\twriteNotifications: make(chan WriteNotification, 100),\n\t}\n}",
"func NewHashMap(name string, client *Client) HashMap {\n\treturn HashMap{name: name, c: client}\n}",
"func newMapCache(dss map[string]rrd.DataSourcer) *mapCache {\n\tmc := &mapCache{make(map[string]int64), make(map[int64]rrd.DataSourcer)}\n\tvar n int64\n\tfor name, ds := range dss {\n\t\tmc.byName[name] = n\n\t\tmc.byId[n] = ds\n\t\tn++\n\t}\n\treturn mc\n}",
"func NewLocal() build_remote.ExecutionCacheServiceServer {\n\tstore, err := action.NewOnDisk()\n\tif err != nil {\n\t\tlog.Fatalf(\"could not initialise ExecutionCacheService: %v\", err)\n\t}\n\treturn &local{store}\n}"
] | [
"0.61819696",
"0.60259074",
"0.5961291",
"0.5807322",
"0.5721401",
"0.5707583",
"0.5698278",
"0.5694999",
"0.5629836",
"0.56234133",
"0.5574107",
"0.5541237",
"0.55317944",
"0.55115384",
"0.5459979",
"0.54501456",
"0.54334444",
"0.5427154",
"0.5390991",
"0.5389694",
"0.53836066",
"0.5377683",
"0.5375434",
"0.5359582",
"0.5353348",
"0.53482616",
"0.53354025",
"0.532056",
"0.52916515",
"0.5284211",
"0.5267258",
"0.52604157",
"0.5249791",
"0.5228152",
"0.5224863",
"0.5221276",
"0.52120644",
"0.52001995",
"0.5196861",
"0.51852506",
"0.51667",
"0.5150599",
"0.5149123",
"0.5141555",
"0.5139266",
"0.5137381",
"0.5133383",
"0.51201564",
"0.510054",
"0.5093685",
"0.5091284",
"0.50773656",
"0.5074612",
"0.50458926",
"0.50408393",
"0.5040508",
"0.5026618",
"0.5025556",
"0.5025451",
"0.50170404",
"0.5016414",
"0.50094867",
"0.50072575",
"0.50024545",
"0.49993673",
"0.49984893",
"0.4990108",
"0.497377",
"0.4970062",
"0.496397",
"0.4962606",
"0.49600613",
"0.49598375",
"0.49594772",
"0.49562034",
"0.4956087",
"0.49537534",
"0.4952274",
"0.49366185",
"0.49299285",
"0.4926158",
"0.49256203",
"0.4925532",
"0.4925323",
"0.49245605",
"0.4923184",
"0.491709",
"0.49153236",
"0.49091658",
"0.4904808",
"0.49046335",
"0.48970178",
"0.4895718",
"0.48948982",
"0.48942426",
"0.48863786",
"0.48685265",
"0.48678398",
"0.4857144",
"0.48541862"
] | 0.79498154 | 0 |
GetRoot Get root base for the tree. | func (mgr *LocalHashMapDBMgr) GetRoot() (*Base, error) {
if mgr.base.RootKey.IsNil() || mgr.base.Degree == 0 {
return nil, nil
}
return &mgr.base, nil
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (mt *MerkleTree) GetRoot() {\n\tvar concat string\n\t// if no current transactions, set root to ''\n\tif len(mt.TransactionIDs) == 0 {\n\t\troot := \"\"\n\t\tmt.Root = &root\n\t\treturn\n\t}\n\n\tif len(mt.TransactionIDs) == 1 {\n\t\troot := mt.TransactionIDs[0]\n\t\tmt.Root = &root\n\t} else {\n\t\tconcat = mt.hashPair(mt.TransactionIDs[0], mt.TransactionIDs[1])\n\t\tmt.TransactionIDs = append([]string{concat}, mt.TransactionIDs[2:]...)\n\t\tmt.layers = append(mt.layers, mt.TransactionIDs)\n\t\tmt.GetRoot()\n\t}\n}",
"func (t *Tree) Root() *TreeNode {\n\treturn t.root\n}",
"func (bst *Bst) Root() *Node {\n\treturn bst.root\n}",
"func (r Ctx) GetRoot() string {\n\treturn r[RootKey].(string)\n}",
"func (t *tree) Root() *node {\n\treturn t.root\n}",
"func (t *Tree) Root() *node {\n\treturn t.root\n}",
"func (r *RouterEventFilter) getRoot(ctx context.Context, rootId string) *tree.Node {\n\n\tif node, ok := r.RootNodesCache.Get(rootId); ok {\n\t\treturn node.(*tree.Node)\n\t}\n\tresp, e := r.pool.GetTreeClient().ReadNode(ctx, &tree.ReadNodeRequest{Node: &tree.Node{Uuid: rootId}})\n\tif e == nil && resp.Node != nil {\n\t\tr.RootNodesCache.Set(rootId, resp.Node.Clone(), cache.DefaultExpiration)\n\t\treturn resp.Node\n\t}\n\treturn nil\n\n}",
"func (mt *merkleTreeImp) GetRootNode() Node {\n\treturn mt.root\n}",
"func (fs *FS) Root() (fspkg.Node, error) {\n\tte, ok := fs.r.Lookup(\"\")\n\tif !ok {\n\t\treturn nil, errors.New(\"failed to find root in stargz\")\n\t}\n\treturn &node{fs, te}, nil\n}",
"func (b *BinarySearch) Root() *BinarySearchNode {\n\treturn b.root\n}",
"func (c *Client) GetRoot() (*RootOutput, *ErrorOutput) {\n\toutput := new(RootOutput)\n\terrorOutput := c.sendAPIRequest(\"GET\", \"\", nil, output)\n\treturn output, errorOutput\n}",
"func (fs *FS) Root() (fs.Node, error) {\n\tfs.μ.RLock()\n\tdefer fs.μ.RUnlock()\n\treturn fs.rnode, nil\n}",
"func (n *NamespacedMerkleTree) Root() []byte {\n\tif n.rawRoot == nil {\n\t\tn.rawRoot = n.computeRoot(0, len(n.leaves))\n\t}\n\treturn n.rawRoot\n}",
"func (a *AST) GetRoot() AST {\n\tif a.RootToken {\n\t\treturn *a\n\t}\n\n\tif len(a.Children) == 0 {\n\t\treturn AST{}\n\t}\n\n\treturn a.Children[0]\n}",
"func (n *NodeBuilder) Root() *TagNode {\n\treturn n.root\n}",
"func (e *Environment) GetRoot() Namespace {\n\treturn e.Get(RootDomain, func() Namespace {\n\t\treturn e.New(RootDomain)\n\t})\n}",
"func (dfs *DaosFileSystem) Root() *DaosNode {\n\treturn dfs.root\n}",
"func (c *Container) Root() *Root {\n\treturn c.root\n}",
"func (db *Database) Root() *doltdb.RootValue {\n\treturn db.root\n}",
"func (r *Root) Root() (fs.Node, error) {\n\treturn newDir(nil, r.registry), nil\n}",
"func (agent *MerkleAgent) Root() []byte {\n\treturn agent.root\n}",
"func (r *Root) Root() (fs.Node, error) {\n\tdebug.Log(\"Root()\")\n\treturn r, nil\n}",
"func (m *SiteCollection) GetRoot()(Rootable) {\n return m.root\n}",
"func (this *Heap[T]) GetRoot() (data T, ok bool) {\n\tif len(this.datas) > 0 {\n\t\treturn this.datas[0], true\n\t}\n\treturn\n}",
"func (tree *tree) Root() *zipkincore.Span {\n\tnodes := tree.nodes[0]\n\tif len(nodes) == 1 {\n\t\treturn nodes[0]\n\t} else {\n\t\treturn nil\n\t}\n}",
"func (p *ancestorTree) GetRoot(blkID ids.ID) ids.ID {\n\tfor {\n\t\tparentID, ok := p.childToParent[blkID]\n\t\t// this is the furthest parent available, break loop and return blkID\n\t\tif !ok {\n\t\t\treturn blkID\n\t\t}\n\t\t// continue to loop with parentID\n\t\tblkID = parentID\n\t}\n}",
"func (mt *merkleTreeImp) GetRootHash() []byte {\n\tif mt.root == nil {\n\t\treturn EmptyTreeRootHash\n\t}\n\treturn mt.root.getNodeHash()\n\n}",
"func (s *ShortenBlock) GetRootID() string {\n\treturn s.tree.id\n}",
"func (n Node) GetRoot() Node {\n\tif n.Parent == nil {\n\t\treturn n\n\t}\n\n\treturn n.Parent.GetRoot()\n}",
"func (l *Loader) Root() *ecsgen.Root {\n\treturn l.root\n}",
"func (fs HgmFs) Root() (fs.Node, error) {\n\treturn &HgmDir{hgmFs: fs, localDir: \"/\"}, nil\n}",
"func (t *BinaryTree) RootValue() interface{} { return t.root.value }",
"func (db *Database) Root() *Group {\n\treturn db.root\n}",
"func (w *RootWalker) Root() *Root {\n\treturn w.r\n}",
"func (tb *TreeBuilder) Root() (Node, error) {\n\tif len(tb.levels) == 0 {\n\t\treturn nil, nil\n\t}\n\n\tif len(tb.levels[0].Chunks) > 0 {\n\t\tfor i := 0; i < len(tb.levels)-1; i++ {\n\t\t\tvar n Node = tb.levels[i]\n\t\t\tif tb.F != nil {\n\t\t\t\tvar err error\n\t\t\t\tn, err = tb.F(tb.levels[i])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t}\n\t\t\ttb.levels[i+1].Nodes = append(tb.levels[i+1].Nodes, n)\n\t\t\ttb.levels[i] = nil // help the gc reclaim memory sooner, maybe\n\t\t}\n\t}\n\n\t// Don't necessarily return the highest node in tb.levels.\n\t// We can prune any would-be root nodes that have only one child.\n\n\t// If we _are_ going to return tb.levels[len(tb.levels)-1],\n\t// we have to call tb.F on it.\n\t// If we're not, we don't:\n\t// tb.F has already been called on all other nodes.\n\n\tif len(tb.levels) == 1 {\n\t\tvar result Node = tb.levels[0]\n\t\tif tb.F != nil {\n\t\t\treturn tb.F(tb.levels[0])\n\t\t}\n\t\treturn result, nil\n\t}\n\n\ttop := tb.levels[len(tb.levels)-1]\n\tif len(top.Nodes) > 1 {\n\t\tif tb.F != nil {\n\t\t\treturn tb.F(top)\n\t\t}\n\t\treturn top, nil\n\t}\n\n\tvar (\n\t\troot Node = top\n\t\terr error\n\t)\n\tfor root.NumChildren() == 1 {\n\t\troot, err = root.Child(0)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn root, nil\n}",
"func (r *RPCClient) Root() (t *RPCClientRoot) {\n\treturn &RPCClientRoot{r}\n}",
"func (el *Tree) Root() *Tree {\n\tif el.Parent() == nil {\n\t\treturn el\n\t}\n\t\n\treturn el.Parent().Root()\n}",
"func (v *VaultFS) Root() (fs.Node, error) {\n\tlogrus.Debug(\"returning root\")\n\treturn NewRoot(v.root, v.Logical()), nil\n}",
"func GetRoot(dbPath string) (data db.Data, code int, err error) {\n\tif _, ok := allDB[dbPath]; !ok {\n\t\treturn data, http.StatusBadRequest, errors.New(\"There's no any db with such path (\" + dbPath + \")\")\n\t}\n\n\tdata, err = allDB[dbPath].GetRoot()\n\tif err != nil {\n\t\treturn data, http.StatusInternalServerError, err\n\t}\n\n\treturn data, http.StatusOK, nil\n}",
"func (fs *Ossfs) Root() *ossInode {\n\treturn fs.root\n}",
"func (bn *ObjList) GetRoot() (address *obj_container, obj *Object) {\n\n\tbn.lock.MinLock()\n\tif bn.root == nil {\n\t\t// log.Println(\"ObjBank>> GetRoot was called while ObjList was empty\")\n\t\tbn.lock.MinUnlock()\n\t\treturn nil, nil\n\t}\n\taddress, obj = bn.root, bn.root.data\n\tbn.lock.MinUnlock()\n\treturn\n}",
"func (k *KeyTransactions) Root() []byte {\n\treturn k.root\n}",
"func (d *Document) Root() Node {\n\treturn Node{0, d.rev, d}\n}",
"func (m *Drive) GetRoot()(DriveItemable) {\n return m.root\n}",
"func (p *Parser) Root() *FileNode {\n\treturn p.nod\n}",
"func (f *FS) Root() (fs.Node, error) {\n\treturn &Node{fs: f}, nil\n}",
"func (r *router) Root() *OpenAPI {\n\treturn r.root\n}",
"func (f *Fs) Root() string {\n\treturn f.root\n}",
"func (f *Fs) Root() string {\n\treturn f.root\n}",
"func (f *Fs) Root() string {\n\treturn f.root\n}",
"func (f *Fs) Root() string {\n\treturn f.root\n}",
"func (f *Fs) Root() string {\n\treturn f.root\n}",
"func (f *Fs) Root() string {\n\treturn f.root\n}",
"func (f *Fs) Root() string {\n\treturn f.root\n}",
"func (l *loaderImpl) Root() string {\n\treturn l.root\n}",
"func (_ZKOnacci *ZKOnacciCaller) Root(opts *bind.CallOpts) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _ZKOnacci.contract.Call(opts, &out, \"root\")\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn out0, err\n\n}",
"func (g *Graph) Root() *Node {\n\treturn g.Nodes[0]\n}",
"func (nf *NavigationFactory) Root() string {\n\treturn nf.rootPath\n}",
"func getRoot(b Builder) Builder {\n\tfor {\n\t\tp := b.GetParent()\n\t\tif p == nil {\n\t\t\treturn b\n\t\t}\n\t\tb = p\n\t}\n}",
"func (T *SparseMerkleTree) GetLatestRoot() (string) {\n\trootDigest, _ := T.getLatestNode(\"\")\n\treturn base64.StdEncoding.EncodeToString(rootDigest)\n}",
"func (node *Node) Root() *Node {\n\tfor node.Parent != nil {\n\t\tnode = node.Parent\n\t}\n\treturn node\n}",
"func (cmd *Command) Root() *Command {\n\tif cmd.parent == nil {\n\t\treturn cmd\n\t}\n\n\treturn cmd.parent.Root()\n}",
"func GetRoot(store store.Store, id string) Root { return GetFromCache(store, id).(Root) }",
"func GetRootCommand() *cobra.Command { return rootCmd }",
"func (t *Tree) GetRootData() int {\n\treturn t.Root.Data\n}",
"func (x *Indexer) Root() string {\n\treturn x.config.IndexRoot\n}",
"func (lt *LinkedTreeNode) GetRootNode() (*LinkedTreeNode, error) {\n\treturn lt.getRootNode()\n}",
"func (obj *language) Root() string {\n\treturn obj.root\n}",
"func (adder *Adder) curRootNode() (ipld.Node, error) {\n\tmr, err := adder.mfsRoot()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\troot, err := mr.GetDirectory().GetNode()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// if one root file, use that hash as root.\n\tif len(root.Links()) == 1 {\n\t\tnd, err := root.Links()[0].GetNode(adder.ctx, adder.dagService)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\troot = nd\n\t}\n\n\treturn root, err\n}",
"func (l *fileLoader) Root() string {\n\treturn l.root\n}",
"func (f *Forest) Root() *RuleNode {\n\tif f == nil || len(f.symbolNodes) == 0 {\n\t\treturn nil\n\t}\n\treturn &RuleNode{\n\t\tsymbol: f.root,\n\t}\n}",
"func (p *Project) Root() string {\n\treturn p.root\n}",
"func root(n uint) uint {\n\tw := nodeWidth(n)\n\treturn uint((1 << log2(w)) - 1)\n}",
"func (cache *ContentCache) Root() string {\n\treturn cache.root\n}",
"func Root(name, path string) *TRoot {\n\tvar tmpl = &Template{template.New(name), name}\n\tvar t = &TRoot{tmpl, path}\n\n\treturn t\n}",
"func (a *API) getRoot(w http.ResponseWriter, r *http.Request) {\n\tout := map[string]string{\n\t\t\"apiName\": \"rutte-api\",\n\t\t\"apiDescription\": \"API's for voting platform\",\n\t\t\"apiVersion\": \"v0.0\",\n\t\t\"appVersion\": version.String(),\n\t}\n\trender.JSON(w, r, out)\n}",
"func GetRoot(t pb.Trace) *pb.Span {\n\t// That should be caught beforehand\n\tif len(t) == 0 {\n\t\treturn nil\n\t}\n\t// General case: go over all spans and check for one which matching parent\n\tparentIDToChild := map[uint64]*pb.Span{}\n\n\tfor i := range t {\n\t\t// Common case optimization: check for span with ParentID == 0, starting from the end,\n\t\t// since some clients report the root last\n\t\tj := len(t) - 1 - i\n\t\tif t[j].ParentID == 0 {\n\t\t\treturn t[j]\n\t\t}\n\t\tparentIDToChild[t[j].ParentID] = t[j]\n\t}\n\n\tfor i := range t {\n\t\tdelete(parentIDToChild, t[i].SpanID)\n\t}\n\n\t// Here, if the trace is valid, we should have len(parentIDToChild) == 1\n\tif len(parentIDToChild) != 1 {\n\t\tlog.Debugf(\"Didn't reliably find the root span for traceID:%v\", t[0].TraceID)\n\t}\n\n\t// Have a safe behavior if that's not the case\n\t// Pick the first span without its parent\n\tfor parentID := range parentIDToChild {\n\t\treturn parentIDToChild[parentID]\n\t}\n\n\t// Gracefully fail with the last span of the trace\n\treturn t[len(t)-1]\n}",
"func (a *AttesterSlashing) HashTreeRoot() ([32]byte, error) {\n\treturn ssz.HashWithDefaultHasher(a)\n}",
"func (kh kelvinHandler) GetBase() string {\n\treturn scaleToText(kh.Base)\n}",
"func (tree *DNFTree) CreateRoot(phi br.ClauseSet, isFinal bool) int {\n\treturn tree.CreateNodeEntry(phi, 0, isFinal)\n}",
"func (v *GenerateServiceRequest) GetThriftRoot() (o string) {\n\tif v != nil {\n\t\to = v.ThriftRoot\n\t}\n\treturn\n}",
"func baseForRoot(root interface{}, cache ResolutionCache) string {\n\tif root == nil {\n\t\treturn \"\"\n\t}\n\n\t// cache the root document to resolve $ref's\n\tnormalizedBase := normalizeBase(rootBase)\n\tcache.Set(normalizedBase, root)\n\n\treturn normalizedBase\n}",
"func (s *SVFS) Root() (fs.Node, error) {\n\t// Mount a specific container\n\tif TargetContainer != \"\" {\n\t\tbaseContainer, _, err := SwiftConnection.Container(TargetContainer)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// Find segment container too\n\t\tsegmentContainerName := TargetContainer + SegmentContainerSuffix\n\t\tsegmentContainer, _, err := SwiftConnection.Container(segmentContainerName)\n\n\t\t// Create it if missing\n\t\tif err == swift.ContainerNotFound {\n\t\t\tvar container *swift.Container\n\t\t\tcontainer, err = createContainer(segmentContainerName)\n\t\t\tsegmentContainer = *container\n\t\t}\n\t\tif err != nil && err != swift.ContainerNotFound {\n\t\t\treturn nil, err\n\t\t}\n\n\t\treturn &Container{\n\t\t\tDirectory: &Directory{\n\t\t\t\tapex: true,\n\t\t\t\tc: &baseContainer,\n\t\t\t\tcs: &segmentContainer,\n\t\t\t},\n\t\t}, nil\n\t}\n\n\t// Mount all containers within an account\n\treturn &Root{\n\t\tDirectory: &Directory{\n\t\t\tapex: true,\n\t\t},\n\t}, nil\n}",
"func (fr *fakeRequest) Root() cid.Cid {\n\treturn fr.root\n}",
"func (s *StateDB) RootHash() *corecrypto.HashType {\n\treturn s.trie.RootHash()\n}",
"func (obj *file) Root() string {\n\treturn obj.root\n}",
"func (tree *LinkedTree) GetRootNode() (*LinkedTreeNode, error) {\n\ttree.mux.Lock()\n\tdefer tree.mux.Unlock()\n\tif tree.root != nil {\n\t\treturn tree.root, nil\n\t}\n\treturn nil, fmt.Errorf(\"Can't get a node from a tree without a root node\")\n}",
"func (s *Server) GetStateRoot() stateroot.Service {\n\treturn s.stateRoot\n}",
"func (t *BalanceTable) Root() (cid.Cid, error) {\n\treturn (*Map)(t).Root()\n}",
"func (b *BTree) GetRootNodeRef() *storageref.StorageRef {\n\tb.mtx.Lock()\n\tdefer b.mtx.Unlock()\n\n\treturn b.rootNodRef\n}",
"func (f *FileSystem) Root() (fs.Node, error) {\n\tf.logger.Debugf(\"Root() request\\n\")\n\n\troot, err := f.get(nil, 0)\n\tif err != nil {\n\t\tf.logger.Printf(\"Root failed: %v\\n\", err)\n\t\treturn nil, fuse.EIO\n\t}\n\n\taccount, err := f.putio.Account.Info(nil)\n\tif err != nil {\n\t\tf.logger.Debugf(\"Fetching account info failed: %v\\n\", err)\n\t\treturn nil, fuse.EIO\n\t}\n\tf.account = account\n\n\treturn &Dir{\n\t\tfs: f,\n\t\tFile: &root,\n\t}, nil\n}",
"func (s *Session) Root() Rootable {\n\n\treturn s.root\n}",
"func (db *DB) GetStateRoot() (core.UserState, error) {\n\tvar state core.UserState\n\terr := db.Instance.Scopes(QueryByType(core.TYPE_ROOT), QueryByDepth(0)).Find(&state).Error\n\tif err != nil {\n\t\treturn state, core.ErrRecordNotFound(fmt.Sprintf(\"unable to find record. err:%v\", err))\n\t}\n\treturn state, nil\n}",
"func (f *FileList) Root() string {\n\treturn f.root\n}",
"func (q *pathCompression) Root(r int) int {\n\tfor {\n\t\tif r == q.IDs[r] {\n\t\t\tbreak\n\t\t}\n\t\tq.IDs[r] = q.IDs[q.IDs[r]]\n\t\tr = q.IDs[r]\n\t}\n\treturn r\n}",
"func (f *Filler) Root() *Root {\n\treturn f.r\n}",
"func GetRootLanguageTag(t language.Tag) language.Tag {\n\tfor {\n\t\tparent := t.Parent()\n\t\tif parent == language.Und {\n\t\t\treturn t\n\t\t}\n\t\tt = parent\n\t}\n}",
"func (sc *TraceScope) Root() *TraceScope {\n\treturn sc.top\n}",
"func (c Cache) Root() Path {\n\treturn c.Join(\"root\")\n}",
"func (un Decoder) Root() dom.Node {\n\tfor n := un.Node; n != nil; n = n.Parent() {\n\t\tif n.Parent() == nil {\n\t\t\treturn n\n\t\t}\n\t}\n\treturn nil\n}"
] | [
"0.72971463",
"0.6967713",
"0.6905524",
"0.68161714",
"0.6813087",
"0.6761039",
"0.6685708",
"0.6630886",
"0.66104454",
"0.6586958",
"0.6486263",
"0.6472821",
"0.64700806",
"0.6454522",
"0.64251405",
"0.6360517",
"0.6333982",
"0.6299451",
"0.6248249",
"0.62472093",
"0.6242822",
"0.62346697",
"0.6232892",
"0.62168634",
"0.6212113",
"0.6207682",
"0.62062746",
"0.6195026",
"0.6172461",
"0.61615133",
"0.61168766",
"0.6110793",
"0.6061909",
"0.6050642",
"0.60393095",
"0.6035057",
"0.6034974",
"0.6031947",
"0.6014249",
"0.6000308",
"0.599112",
"0.5989138",
"0.598202",
"0.59637",
"0.59278274",
"0.59157616",
"0.5911425",
"0.5896549",
"0.5896549",
"0.5896549",
"0.5896549",
"0.5896549",
"0.5896549",
"0.5896549",
"0.58923125",
"0.58611125",
"0.5859474",
"0.5849432",
"0.583758",
"0.5836774",
"0.5828033",
"0.5814039",
"0.5810767",
"0.58036244",
"0.5800528",
"0.57905126",
"0.57740045",
"0.57715636",
"0.5763361",
"0.5734202",
"0.57224345",
"0.5719554",
"0.57182366",
"0.5705077",
"0.5704783",
"0.5700256",
"0.56759644",
"0.5657907",
"0.56546247",
"0.56453687",
"0.5637345",
"0.5633382",
"0.5618613",
"0.5611757",
"0.5606504",
"0.55952126",
"0.5568897",
"0.5556989",
"0.5552695",
"0.5543489",
"0.55280066",
"0.55269605",
"0.55251473",
"0.55214894",
"0.5518078",
"0.55075955",
"0.55004364",
"0.55002445",
"0.54959315",
"0.54817635"
] | 0.7211411 | 1 |
SetRoot Set root base for the tree. | func (mgr *LocalHashMapDBMgr) SetRoot(base *Base) error {
mgr.base.Degree = base.Degree
mgr.base.RootKey = base.RootKey
return nil
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func SetRoot(path string) {\n\trootPath = path\n}",
"func (db *Database) SetRoot(newRoot *doltdb.RootValue) {\n\t// TODO: races\n\tdb.root = newRoot\n}",
"func (m *SiteCollection) SetRoot(value Rootable)() {\n m.root = value\n}",
"func (f *FileList) SetRoot(path string) {\n\tf.root = path\n}",
"func (api *Client) SetRoot(RootURL string) *Client {\n\tapi.mu.Lock()\n\tdefer api.mu.Unlock()\n\tapi.rootURL = RootURL\n\treturn api\n}",
"func (cfg *Config) SetRoot(root string) *Config {\n\tcfg.RootDir = root\n\treturn cfg\n}",
"func (c *Container) SetRoot(root *Root) (ok bool) {\n\tif root == nil {\n\t\tpanic(ErrMissingRoot)\n\t}\n\tif c.root == nil {\n\t\tc.root, ok = root, true\n\t\treturn\n\t}\n\tif c.root.Time < root.Time {\n\t\tc.root, ok = root, true\n\t}\n\treturn\n}",
"func (m *Drive) SetRoot(value DriveItemable)() {\n m.root = value\n}",
"func (a *Application) SetRoot(root Primitive, fullscreen bool) *Application {\n\ta.Lock()\n\ta.root = root\n\ta.rootFullscreen = fullscreen\n\tif a.screen != nil {\n\t\ta.screen.Clear()\n\t}\n\ta.Unlock()\n\n\ta.SetFocus(root)\n\n\treturn a\n}",
"func (f *Fs) setRoot(root string) {\n\tf.root = strings.Trim(root, \"/\")\n\tf.slashRoot = \"/\" + f.root\n\tf.slashRootSlash = f.slashRoot\n\tif f.root != \"\" {\n\t\tf.slashRootSlash += \"/\"\n\t}\n}",
"func (c *Compiler) SetAstRoot(root Node) {\n\tc.ast = root\n}",
"func (s *EnablePolicyTypeOutput) SetRoot(v *Root) *EnablePolicyTypeOutput {\n\ts.Root = v\n\treturn s\n}",
"func (s *DisablePolicyTypeOutput) SetRoot(v *Root) *DisablePolicyTypeOutput {\n\ts.Root = v\n\treturn s\n}",
"func (ft *functionTree) setRoot(f *function) error {\n\tif ft.root != nil {\n\t\treturn errors.New(\"setRoot: root already exists\")\n\t}\n\tft.root = f\n\treturn nil\n}",
"func SetRootNode(node []byte, isRoot bool) {\n\tvar IsRootNodeField *uint8 = (*uint8)(unsafe.Pointer(&node[IsRootNodeOffset]))\n\tif isRoot {\n\t\t*IsRootNodeField = 1\n\t} else {\n\t\t*IsRootNodeField = 0\n\t}\n}",
"func SetRootDirectory(rootDirectory string) {\n\troot = rootDirectory\n\tif root[len(root)-1] != '/' {\n\t\troot += \"/\"\n\t}\n\tlog.Println(\"Set root directory to : '\" + root + \"'\")\n}",
"func (bb *BlockBuilder) SetStateRoot(root cid.Cid) {\n\tbb.block.ParentStateRoot = root\n}",
"func (_m *MockDataCoord) SetRootCoord(rootCoord types.RootCoord) {\n\t_m.Called(rootCoord)\n}",
"func (m *RestaurantMutation) SetRootID(id int) {\n\tm.root = &id\n}",
"func (bb *BlockBuilder) SetStateRoot(root cid.Cid) {\n\tbb.block.StateRoot = root\n}",
"func (b *Builder) SetStateRoot(h hash.Hash32B) *Builder {\n\tb.blk.Header.stateRoot = h\n\treturn b\n}",
"func (t *Tree) UpdateRoot(p *Pos, model ModelInterface) {\n\tif t.p == nil || t.p.Hash() != p.Hash() {\n\t\tt.p = p\n\t\tt.root = t.NewTreeNode(nil, 0, false, 1, true)\n\t\tt.root.rootify(p, model)\n\t}\n}",
"func SetRootPrefix(prefix string) {\n\trootPrefix = prefix\n}",
"func (node *DataNode) SetRootCoord(rc types.RootCoord) error {\n\tswitch {\n\tcase rc == nil, node.rootCoord != nil:\n\t\treturn errors.New(\"nil parameter or repeatedly set\")\n\tdefault:\n\t\tnode.rootCoord = rc\n\t\treturn nil\n\t}\n}",
"func (s *State) SetRootKey(rootKey session.RootKeyable) {\n\ts.rootKey = rootKey.(*root.Key)\n}",
"func (d *VirtualDom) SetRootNode(node Node) {\n\tswitch node.(type) {\n\tcase string:\n\t\td.RootNode = node\n\tcase *ElementNode:\n\t\td.RootNode = node\n\tcase Component:\n\t\td.RootNode = node\n\tdefault:\n\t\tpanic(\"invalid dom node\")\n\t}\n}",
"func (_e *MockDataCoord_Expecter) SetRootCoord(rootCoord interface{}) *MockDataCoord_SetRootCoord_Call {\n\treturn &MockDataCoord_SetRootCoord_Call{Call: _e.mock.On(\"SetRootCoord\", rootCoord)}\n}",
"func (adder *Adder) SetMfsRoot(r *mfs.Root) {\n\tadder.mroot = r\n}",
"func (fs *fsMutable) initRoot() (err error) {\n\t_, found := fs.lookupTree.Get(formKey(fuseops.RootInodeID))\n\tif found {\n\t\treturn\n\t}\n\terr = fs.createNode(\n\t\tformLookupKey(fuseops.RootInodeID, rootPath),\n\t\tfuseops.RootInodeID,\n\t\trootPath,\n\t\tnil,\n\t\tfuseutil.DT_Directory,\n\t\ttrue)\n\treturn\n}",
"func (m *RestaurantMutation) ResetRoot() {\n\tm.root = nil\n\tm.clearedroot = false\n}",
"func (_e *MockQueryCoord_Expecter) SetRootCoord(rootCoord interface{}) *MockQueryCoord_SetRootCoord_Call {\n\treturn &MockQueryCoord_SetRootCoord_Call{Call: _e.mock.On(\"SetRootCoord\", rootCoord)}\n}",
"func SetRootHandler(path string) {\n\troothandlerpath = path\n}",
"func (c *Container) SetEncodedRoot(data []byte) (ok bool, err error) {\n\tvar root *Root\n\tif root, err = decodeRoot(data); err != nil {\n\t\treturn\n\t}\n\tok = c.SetRoot(root)\n\treturn\n}",
"func (self *StateObject) updateRoot(db trie.Database) {\n\tself.updateTrie(db)\n\tself.data.Root = self.trie.Hash()\n}",
"func (obj *StateObject) updateRoot(db Database) {\n\tobj.updateTrie(db)\n\tobj.data.StorageRoot = obj.trie.Hash()\n}",
"func ExtendRoot(overrides RootIface) *Root {\n\tjsiiID, err := jsii.GlobalRuntime.Client().Create(\n\t\t\"jsii$cdk$0.0.0.Root\",\n\t\t[]interface{}{},\n\t\tnil,\n\t)\n\tif err != nil {\n\t\tpanic(\"how are error handled?\" + err.Error())\n\t}\n\treturn &Root{\n\t\tbase: jsii.Base{ID: jsiiID},\n\t\tConstruct: InternalNewConstructAsBaseClass(jsiiID),\n\t}\n}",
"func (tp *Template) Root(name string) *Template {\n\ttp.root = name\n\treturn tp\n}",
"func (delegateObject *delegateObject) updateRoot(db Database) {\n\tdelegateObject.updateTrie(db)\n\tdelegateObject.data.Root = delegateObject.trie.Hash()\n}",
"func (mt *MerkleTree) GetRoot() {\n\tvar concat string\n\t// if no current transactions, set root to ''\n\tif len(mt.TransactionIDs) == 0 {\n\t\troot := \"\"\n\t\tmt.Root = &root\n\t\treturn\n\t}\n\n\tif len(mt.TransactionIDs) == 1 {\n\t\troot := mt.TransactionIDs[0]\n\t\tmt.Root = &root\n\t} else {\n\t\tconcat = mt.hashPair(mt.TransactionIDs[0], mt.TransactionIDs[1])\n\t\tmt.TransactionIDs = append([]string{concat}, mt.TransactionIDs[2:]...)\n\t\tmt.layers = append(mt.layers, mt.TransactionIDs)\n\t\tmt.GetRoot()\n\t}\n}",
"func (in *ActionExportCreateInput) SetRootSquash(value bool) *ActionExportCreateInput {\n\tin.RootSquash = value\n\n\tif in._selectedParameters == nil {\n\t\tin._selectedParameters = make(map[string]interface{})\n\t}\n\n\tin._selectedParameters[\"RootSquash\"] = nil\n\treturn in\n}",
"func (g *Graph) UpdateRoot(root Transaction) {\n\tptr := &root\n\n\tg.Lock()\n\n\tg.depthIndex[root.Depth] = append(g.depthIndex[root.Depth], ptr)\n\tg.eligibleIndex.ReplaceOrInsert((*sortByDepthTX)(ptr))\n\n\tg.transactions[root.ID] = ptr\n\n\tif g.indexer != nil {\n\t\tg.indexer.Index(hex.EncodeToString(root.ID[:]))\n\t}\n\n\tg.height = root.Depth + 1\n\n\tg.Unlock()\n\n\tg.UpdateRootDepth(root.Depth)\n}",
"func (n *TreeNode) rootify(p *Pos, model ModelInterface) {\n\tn.step = 0\n\tn.side = 1\n\tn.first = true\n\tn.parent = nil\n\tn.children = n.children[:0]\n\tn.Expand(p, model)\n}",
"func (d *Deployer) setRootID(f *v1.FilterSpec) error {\n\tif f.RootID != \"\" {\n\t\treturn nil\n\t}\n\trootId, err := d.getRootId(f.Image)\n\tif err != nil {\n\t\treturn err\n\t}\n\tf.RootID = rootId\n\treturn nil\n}",
"func Root(name, path string) *TRoot {\n\tvar tmpl = &Template{template.New(name), name}\n\tvar t = &TRoot{tmpl, path}\n\n\treturn t\n}",
"func (t *Tree) Root() *TreeNode {\n\treturn t.root\n}",
"func (tree *DNFTree) CreateRoot(phi br.ClauseSet, isFinal bool) int {\n\treturn tree.CreateNodeEntry(phi, 0, isFinal)\n}",
"func (bst *Bst) Root() *Node {\n\treturn bst.root\n}",
"func NewRoot() *Root {\n\treturn ExtendRoot(nil)\n}",
"func (g *Generator) SetRootfsType(rootfsType string) {\n\tg.image.RootFS.Type = rootfsType\n}",
"func (log Logger) Root(root Data) Logger {\n\tnewRoot := Data{}\n\tfor k, v := range log.root {\n\t\tnewRoot[k] = v\n\t}\n\tfor k, v := range root {\n\t\tnewRoot[k] = v\n\t}\n\tlog.root = newRoot\n\treturn log\n}",
"func (s *EnablePolicyTypeInput) SetRootId(v string) *EnablePolicyTypeInput {\n\ts.RootId = &v\n\treturn s\n}",
"func NewRoot() *Root {\n\tr := new(Root)\n\tr.objects.init(8)\n\tr.idCache = make(map[int]libFldDoc)\n\tr.missing = make(map[int]libFldDoc)\n\treturn r\n}",
"func (a *AttesterSlashing) HashTreeRoot() ([32]byte, error) {\n\treturn ssz.HashWithDefaultHasher(a)\n}",
"func (r *Root) Root() (fs.Node, error) {\n\treturn newDir(nil, r.registry), nil\n}",
"func Root(appName, version string) (opt *RootCmdOpt) {\n\troot := &RootCommand{AppName: appName, Version: version, Command: Command{BaseOpt: BaseOpt{Name: appName}}}\n\t// rootCommand = root\n\topt = RootFrom(root)\n\treturn\n}",
"func Root(appName, version string) (opt *RootCmdOpt) {\n\troot := &RootCommand{AppName: appName, Version: version, Command: Command{BaseOpt: BaseOpt{Name: appName}}}\n\t// rootCommand = root\n\topt = RootFrom(root)\n\treturn\n}",
"func (hi *HandshakeInfo) SetRootCertProvider(root certprovider.Provider) {\n\thi.mu.Lock()\n\thi.rootProvider = root\n\thi.mu.Unlock()\n}",
"func (s *DisablePolicyTypeInput) SetRootId(v string) *DisablePolicyTypeInput {\n\ts.RootId = &v\n\treturn s\n}",
"func (repo *Repo) LinkRoot() {\n\trepo.Root = repo\n\tLinkTo(repo.Externals, repo)\n}",
"func (r *Root) Root() (fs.Node, error) {\n\tdebug.Log(\"Root()\")\n\treturn r, nil\n}",
"func (a *ActStatus) SetTrieRoot(stateRoot arry.Hash) error {\n\treturn a.db.SetRoot(stateRoot)\n}",
"func (s *DjangoEngine) RootDir(root string) *DjangoEngine {\n\tif s.fs != nil && root != \"\" && root != \"/\" && root != \".\" && root != s.rootDir {\n\t\tsub, err := fs.Sub(s.fs, s.rootDir)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\ts.fs = sub // here so the \"middleware\" can work.\n\t}\n\n\ts.rootDir = filepath.ToSlash(root)\n\treturn s\n}",
"func (t *tree) Root() *node {\n\treturn t.root\n}",
"func (stateObj *stateObject) updateRoot(db StateDatabase) {\n\t// If nothing changed, don't bother with hashing anything\n\tif stateObj.updateTrie(db) == nil {\n\t\treturn\n\t}\n\t// Track the amount of time wasted on hashing the storage trie\n\tif metrics.EnabledExpensive {\n\t\tdefer func(start time.Time) { stateObj.db.StorageHashes += time.Since(start) }(time.Now())\n\t}\n\tstateObj.data.Root = stateObj.trie.Hash()\n}",
"func InternalNewRootAsBaseClass(jsiiID string) *Root {\n\treturn &Root{\n\t\tbase: jsii.Base{ID: jsiiID},\n\t\tConstruct: InternalNewConstructAsBaseClass(jsiiID),\n\t}\n}",
"func (b *Bst) Reset() {\n b.root = nil\n}",
"func (fs *FS) Root() (fspkg.Node, error) {\n\tte, ok := fs.r.Lookup(\"\")\n\tif !ok {\n\t\treturn nil, errors.New(\"failed to find root in stargz\")\n\t}\n\treturn &node{fs, te}, nil\n}",
"func (w *RootWalker) Root() *Root {\n\treturn w.r\n}",
"func (self *Map) SetRootTagName(root string) {\n\tself.rootTagName = root\n}",
"func (fs HgmFs) Root() (fs.Node, error) {\n\treturn &HgmDir{hgmFs: fs, localDir: \"/\"}, nil\n}",
"func (dfs *DaosFileSystem) Root() *DaosNode {\n\treturn dfs.root\n}",
"func (n *NamespacedMerkleTree) Root() []byte {\n\tif n.rawRoot == nil {\n\t\tn.rawRoot = n.computeRoot(0, len(n.leaves))\n\t}\n\treturn n.rawRoot\n}",
"func NewSetRootResourceReference(receiver, runtime string) New {\n\treturn func(f *jen.File, o types.Object) {\n\t\tf.Commentf(\"SetResourceReference of this %s.\", o.Name())\n\t\tf.Func().Params(jen.Id(receiver).Op(\"*\").Id(o.Name())).Id(\"SetResourceReference\").Params(jen.Id(\"r\").Qual(runtime, \"TypedReference\")).Block(\n\t\t\tjen.Id(receiver).Dot(\"ResourceReference\").Op(\"=\").Id(\"r\"),\n\t\t)\n\t}\n}",
"func (f *Pub) UseRoot(rx Publisher) {\n\tf.roots.Add(rx)\n}",
"func (t *Tree) InitTree(val int) {\n\tt.Root = initNode(val)\n}",
"func ReleaseRoot(oldUID int) error {\n\treturn syscall.Setuid(oldUID)\n}",
"func (c *Container) Root() *Root {\n\treturn c.root\n}",
"func NewRoot(repo restic.Repository, cfg Config) *Root {\n\tdebug.Log(\"NewRoot(), config %v\", cfg)\n\n\troot := &Root{\n\t\trepo: repo,\n\t\tcfg: cfg,\n\t\tblobCache: bloblru.New(blobCacheSize),\n\t}\n\n\tif !cfg.OwnerIsRoot {\n\t\troot.uid = uint32(os.Getuid())\n\t\troot.gid = uint32(os.Getgid())\n\t}\n\n\t// set defaults, if PathTemplates is not set\n\tif len(cfg.PathTemplates) == 0 {\n\t\tcfg.PathTemplates = []string{\n\t\t\t\"ids/%i\",\n\t\t\t\"snapshots/%T\",\n\t\t\t\"hosts/%h/%T\",\n\t\t\t\"tags/%t/%T\",\n\t\t}\n\t}\n\n\troot.SnapshotsDir = NewSnapshotsDir(root, rootInode, rootInode, NewSnapshotsDirStructure(root, cfg.PathTemplates, cfg.TimeTemplate), \"\")\n\n\treturn root\n}",
"func (_Gatekeeper *GatekeeperSession) VoteRoot(_root [32]byte) (*types.Transaction, error) {\n\treturn _Gatekeeper.Contract.VoteRoot(&_Gatekeeper.TransactOpts, _root)\n}",
"func (n *NodeBuilder) Root() *TagNode {\n\treturn n.root\n}",
"func (_Gatekeeper *GatekeeperTransactor) VoteRoot(opts *bind.TransactOpts, _root [32]byte) (*types.Transaction, error) {\n\treturn _Gatekeeper.contract.Transact(opts, \"VoteRoot\", _root)\n}",
"func (db *Database) Root() *doltdb.RootValue {\n\treturn db.root\n}",
"func (v *EventButton) XRoot() float64 {\n\tc := v.native().x_root\n\treturn float64(c)\n}",
"func (o *JsonEnvironment) SetRootHashOffset(v string) {\n\to.RootHashOffset = &v\n}",
"func (t *Tree) Root() *node {\n\treturn t.root\n}",
"func (s *QuickUnionSet) Root(element int) int {\n\tparent := element\n\tfor s.ids[parent] != parent {\n\t\tparent = s.ids[parent]\n\t}\n\treturn parent\n}",
"func (n *RforkNode) SetTree(t *Tree) {\n\tn.tree = t\n}",
"func (node *Node) Root() *Node {\n\tfor node.Parent != nil {\n\t\tnode = node.Parent\n\t}\n\treturn node\n}",
"func (m *DeviceManagementConfigurationSettingDefinition) SetRootDefinitionId(value *string)() {\n err := m.GetBackingStore().Set(\"rootDefinitionId\", value)\n if err != nil {\n panic(err)\n }\n}",
"func (o *IdentityAccount) SetRootUserId(v string) {\n\to.RootUserId = &v\n}",
"func (app *fileBuilder) WithRoot(root string) FileBuilder {\n\tapp.root = root\n\treturn app\n}",
"func newRoot(view *View, leafAllocation int64) *root {\n\tif leafAllocation < 10 {\n\t\tleafAllocation = 10\n\t}\n\tleafNum := 3 - ((leafAllocation - 1) % 3) + leafAllocation\n\tnodeNum := (leafNum - 1) / 3\n\tr := new(root)\n\tr.leaves = make([]leaf, leafNum, leafNum)\n\tfor i := 0; i < len(r.leaves)-2; i++ {\n\t\tr.leaves[i].nextFree = &r.leaves[i+1]\n\t}\n\tr.nodes = make([]node, nodeNum, nodeNum)\n\tfor i := 0; i < len(r.nodes)-2; i++ {\n\t\tr.nodes[i].nextFree = &r.nodes[i+1]\n\t}\n\tr.freeNode = &r.nodes[0]\n\tr.freeLeaf = &r.leaves[0]\n\trootNode := r.newNode(view)\n\tr.rootNode = rootNode\n\treturn r\n}",
"func (q *pathCompression) Root(r int) int {\n\tfor {\n\t\tif r == q.IDs[r] {\n\t\t\tbreak\n\t\t}\n\t\tq.IDs[r] = q.IDs[q.IDs[r]]\n\t\tr = q.IDs[r]\n\t}\n\treturn r\n}",
"func (adder *Adder) PinRoot(root ipld.Node) error {\n\tif !adder.Pin {\n\t\treturn nil\n\t}\n\n\trnk := root.Cid()\n\n\terr := adder.dagService.Add(adder.ctx, root)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif adder.tempRoot.Defined() {\n\t\terr := adder.pinning.Unpin(adder.ctx, adder.tempRoot, true)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tadder.tempRoot = rnk\n\t}\n\n\tdur, err := pin.ExpiresAtWithUnitAndCount(pin.DefaultDurationUnit, adder.PinDuration)\n\tif err != nil {\n\t\treturn err\n\t}\n\tadder.pinning.PinWithMode(rnk, dur, pin.Recursive)\n\treturn adder.pinning.Flush(adder.ctx)\n}",
"func (m *RestaurantMutation) ClearRoot() {\n\tm.clearedroot = true\n}",
"func (s *Client) ensureRoot() error {\n\texists, _, err := s.conn.Exists(s.zkRoot)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif !exists {\n\t\t_, err := s.conn.Create(s.zkRoot, []byte(\"\"), 0, zk.WorldACL(zk.PermAll))\n\t\tif err != nil && err != zk.ErrNodeExists {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}",
"func SetLevel(level Level) {\n\troot.SetLevel(level)\n}",
"func (m *MockSpaceStorage) TreeRoot(arg0 string) (*treechangeproto.RawTreeChangeWithId, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"TreeRoot\", arg0)\n\tret0, _ := ret[0].(*treechangeproto.RawTreeChangeWithId)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}",
"func (_Gatekeeper *GatekeeperTransactorSession) VoteRoot(_root [32]byte) (*types.Transaction, error) {\n\treturn _Gatekeeper.Contract.VoteRoot(&_Gatekeeper.TransactOpts, _root)\n}",
"func (a *AttesterSlashing) HashTreeRootWith(hh *ssz.Hasher) (err error) {\n\tindx := hh.Index()\n\n\t// Field (0) 'Attestation1'\n\tif err = a.Attestation1.HashTreeRootWith(hh); err != nil {\n\t\treturn\n\t}\n\n\t// Field (1) 'Attestation2'\n\tif err = a.Attestation2.HashTreeRootWith(hh); err != nil {\n\t\treturn\n\t}\n\n\thh.Merkleize(indx)\n\treturn\n}"
] | [
"0.76017094",
"0.7533801",
"0.73356307",
"0.71561253",
"0.7128585",
"0.7020409",
"0.7009551",
"0.6978859",
"0.6884092",
"0.6771216",
"0.66614586",
"0.6644592",
"0.65779394",
"0.6544975",
"0.6466843",
"0.6459183",
"0.6454095",
"0.6399124",
"0.6379322",
"0.62789136",
"0.62344044",
"0.62098116",
"0.6179826",
"0.61702716",
"0.61642176",
"0.61068714",
"0.60867053",
"0.6044791",
"0.6017419",
"0.601505",
"0.5976435",
"0.59312385",
"0.59114313",
"0.59097636",
"0.5883659",
"0.5863507",
"0.58382684",
"0.5808614",
"0.572677",
"0.5717967",
"0.57143724",
"0.57007223",
"0.5699518",
"0.5683765",
"0.56753606",
"0.56674063",
"0.56625694",
"0.5662377",
"0.56353813",
"0.563426",
"0.5608046",
"0.55950683",
"0.5565471",
"0.55454105",
"0.5545407",
"0.5545407",
"0.55409074",
"0.551329",
"0.55032295",
"0.55022615",
"0.5496664",
"0.5479684",
"0.54778785",
"0.545668",
"0.5456517",
"0.54454195",
"0.5426433",
"0.5424829",
"0.53988206",
"0.5396799",
"0.5396505",
"0.53884834",
"0.53841215",
"0.53717184",
"0.53650147",
"0.53618866",
"0.53570884",
"0.53463453",
"0.5346134",
"0.53454375",
"0.5336198",
"0.53304285",
"0.53240037",
"0.53227025",
"0.5315973",
"0.5308605",
"0.5303157",
"0.5291636",
"0.52824855",
"0.52822524",
"0.5279002",
"0.52755904",
"0.5266642",
"0.52619666",
"0.52526915",
"0.5251236",
"0.524953",
"0.52447796",
"0.5211782",
"0.5210216"
] | 0.7836139 | 0 |
Store store a key in the DB | func (mgr *LocalHashMapDBMgr) Store(k common.Key, e interface{}) error {
tn := e.(*treeNode)
mgr.memMap[k] = tn.deepCopy()
glog.V(1).Infof("storing %v in db (val: %v)", k, mgr.memMap[k])
return nil
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (ks *VRF) Store(key *vrfkey.PrivateKey, phrase string, scryptParams utils.ScryptParams) error {\n\tks.lock.Lock()\n\tdefer ks.lock.Unlock()\n\tencrypted, err := key.Encrypt(phrase, scryptParams)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to encrypt key\")\n\t}\n\tif err := ks.orm.FirstOrCreateEncryptedSecretVRFKey(encrypted); err != nil {\n\t\treturn errors.Wrap(err, \"failed to save encrypted key to db\")\n\t}\n\tks.keys[key.PublicKey] = *key\n\treturn nil\n}",
"func store(ctx gliderssh.Context, key string, value interface{}) {\n\tctx.SetValue(key, value)\n}",
"func StoreByKey(key string) (*Store, error) {\n\tdb := GetDB()\n\tvar store Store\n\terr := db.Where(\"key = ?\", key).First(&store).Error\n\treturn &store, err\n}",
"func (db *LDB) Put(key []byte, value []byte) error {\n return db.db.Put(key, value, nil)\n}",
"func (c *CoordinatorHelper) StoreKey(\n\tctx context.Context,\n\tdbTx storage.DatabaseTransaction,\n\taddress string,\n\tkeyPair *keys.KeyPair,\n) error {\n\t// We optimisically add the interesting address although the dbTx could be reverted.\n\tc.balanceStorageHelper.AddInterestingAddress(address)\n\n\t_, _ = c.counterStorage.UpdateTransactional(ctx, dbTx, storage.AddressesCreatedCounter, big.NewInt(1))\n\treturn c.keyStorage.StoreTransactional(ctx, address, keyPair, dbTx)\n}",
"func (txn *levelDBTxn) Save(key, value string) error {\n\ttxn.mu.Lock()\n\tdefer txn.mu.Unlock()\n\n\ttxn.batch.Put([]byte(key), []byte(value))\n\treturn nil\n}",
"func Store(ctx context.Context, db *sql.DB, data []byte, replication, expiration time.Time) (int64, error) {\n\tkey := crypto.GetKey(data)\n\tquery := \"INSERT INTO keys(key, data, replication, expiration) VALUES (?, ?, ?, ?)\"\n\tctx, cancel := context.WithDeadline(ctx, time.Now().Add(3*time.Second))\n\tdefer cancel()\n\n\tstmt, err := db.PrepareContext(ctx, query)\n\tif err != nil {\n\t\treturn 0, errors.Errorf(\"could not prepare statement: %w\", err).WithField(\"query\", query)\n\t}\n\n\tres, err := stmt.ExecContext(ctx, string(key), data, replication, expiration)\n\tif err != nil {\n\t\treturn 0, errors.Errorf(\"could not insert keys: %w\", err)\n\t}\n\n\t// If the database is being written to ensure to check for Close\n\t// errors that may be returned from the driver. The query may\n\t// encounter an auto-commit error and be forced to rollback changes.\n\tif err := stmt.Close(); err != nil {\n\t\treturn 0, errors.Errorf(\"could not close statement: %w\", err)\n\t}\n\n\trows, err := res.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Errorf(\"could not get the number of affected rows: %w\", err)\n\t}\n\n\treturn rows, nil\n}",
"func (k *Keyring) Set(key *api.Key) error {\n\tif err := k.initDB(); err != nil {\n\t\treturn err\n\t}\n\treturn Transact(k.db, func(tx *sqlx.Tx) error {\n\t\tlogger.Debugf(\"Saving key %s\", key.ID)\n\t\tif err := updateKeyTx(tx, key); err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t})\n}",
"func (s *LevelDBStore) Set(key string, value []byte) {\n\t_ = s.db.Put([]byte(key), value, nil)\n}",
"func (self *LevelDBStore) Put(key []byte, value []byte) error {\n\treturn self.db.Put(key, value, nil)\n}",
"func (c *DetailsDB) Set(key, value []byte) error {\n\terr := c.db.Update(func(tx *bolt.Tx) error {\n\t\tbucket, err := tx.CreateBucketIfNotExists(c.bucket)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\terr = bucket.Put(key, value)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t})\n\n\treturn err\n}",
"func StoreMe(key interface{}, val interface{}) error {\n\treturn nil\n}",
"func StoreKey(key, value string, d time.Duration) error {\n\tconn := aredis.Connection()\n\tdefer func() {\n\t\t_ = conn.Close()\n\t}()\n\n\t_, err := redis.String(conn.Do(\"PSETEX\", prefix+key, d.Nanoseconds()/1000, value))\n\treturn errors.Wrap(err, \"set failed\")\n}",
"func (d *database) Set(bucket, key string, value interface{}) (err error) {\n\tvar b []byte\n\tb, err = json.Marshal(value)\n\tif err != nil {\n\t\treturn err\n\t}\n\ttx, err := d.db.Begin()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"Set\")\n\t}\n\tstmt, err := tx.Prepare(\"insert or replace into keystore(bucket_key,value) values (?, ?)\")\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"Set\")\n\t}\n\tdefer stmt.Close()\n\n\t_, err = stmt.Exec(bucket+\"/\"+key, string(b))\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"Set\")\n\t}\n\n\terr = tx.Commit()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"Set\")\n\t}\n\n\treturn\n}",
"func (s *DHT) Store(ctx context.Context, data []byte) (string, error) {\n\tkey := s.hashKey(data)\n\n\t// replicate time for the key\n\treplication := time.Now().Add(defaultReplicateTime)\n\n\t// store the key to local storage\n\tif err := s.store.Store(ctx, key, data, replication); err != nil {\n\t\treturn \"\", fmt.Errorf(\"store data to local storage: %v\", err)\n\t}\n\n\t// iterative store the data\n\tif _, err := s.iterate(ctx, IterateStore, key, data); err != nil {\n\t\treturn \"\", fmt.Errorf(\"iterative store data: %v\", err)\n\t}\n\n\treturn base58.Encode(key), nil\n}",
"func (storage *Storage) Put(key []byte, data []byte) error {\n\tstorage.Db[string(key)] = string(data)\n\treturn nil\n}",
"func (h *RedisHelper) Store(key string, data []byte) (err error) {\n\tif 1 == 2 {\n\t\tfmt.Printf(\"key: %s\\n\", key)\n\t}\n\t_, err = h.Conn.Do(\"SET\", []byte(key), data)\n\treturn\n}",
"func (h *Handle) Store(key, value []byte) error {\n\trv := C.unqlite_kv_store(h.db, unsafe.Pointer(&key[0]), C.int(len(key)), unsafe.Pointer(&value[0]), C.unqlite_int64(len(value)))\n\tif rv == C.UNQLITE_OK {\n\t\treturn nil\n\t}\n\treturn Errno(rv)\n}",
"func (d *Datastore) Store(key []byte, data interface{}) error {\n\td.mutex.Lock()\n\tdefer d.mutex.Unlock()\n\n\td.cleanup()\n\tsKey := convertKey(key)\n\td.items[sKey] = item{data, time.Now()}\n\treturn nil\n}",
"func (db *DB) Put(key, value []byte) (err error) {\n\treturn db.LevigoDB.Put(db.wo, key, value)\n}",
"func (s *StoreDb) Set(key, v string) error {\n\treturn s.db.Update(func(txn *badger.Txn) error {\n\t\treturn txn.Set([]byte(key), []byte(v))\n\t})\n}",
"func (cs *CStore) StoreKey(key string) {\n\tcs.Store(key, \"\")\n}",
"func (db *memorydb) Put(key, value []byte) error {\n\n\tif db.enableBatch {\n\t\tdb.batch.Put(key, value)\n\t} else {\n\t\tdb.writeLock <- struct{}{}\n\t\tdefer func() {\n\t\t\t<-db.writeLock\n\t\t}()\n\n\t\tdb.sm.Lock()\n\t\tdefer db.sm.Unlock()\n\n\t\tdb.db[string(key)] = value\n\t}\n\n\treturn nil\n}",
"func kvSave(w http.ResponseWriter, r *http.Request) {\n\tdefer r.Body.Close()\n\tvars := mux.Vars(r)\n\tkey := vars[\"key\"]\n\tbody, _ := ioutil.ReadAll(r.Body)\n\tkvStorage[key] = string(body)\n\tw.Write(body)\n}",
"func (db *DB) set(ctx context.Context, key, value []byte) error {\n\treturn db.Update(ctx, func(tx *TransactionManager) error {\n\t\treturn tx.set(ctx, key, value)\n\t})\n}",
"func (db *DB) Set(key, value []byte) error {\n\treturn db.Update(func(tx *TransactionManager) error {\n\t\treturn tx.Set(key, value)\n\t})\n}",
"func Put(key []byte, value []byte) error {\n\treturn db.Put(key, value, nil)\n}",
"func KeyAdd(organizationId uint, clusterId uint) (string, error) {\n\tlog.Info(\"Generate and store SSH key \")\n\n\tsshKey, err := KeyGenerator()\n\tif err != nil {\n\t\tlog.Errorf(\"KeyGenerator failed reason: %s\", err.Error())\n\t\treturn \"\", err\n\t}\n\n\tdb := model.GetDB()\n\tcluster := model.ClusterModel{ID: clusterId}\n\tif err = db.First(&cluster).Error; err != nil {\n\t\tlog.Errorf(\"Cluster with id=% not found: %s\", cluster.ID, err.Error())\n\t\treturn \"\", err\n\t}\n\tsecretId, err := KeyStore(sshKey, organizationId, cluster.Name)\n\tif err != nil {\n\t\tlog.Errorf(\"KeyStore failed reason: %s\", err.Error())\n\t\treturn \"\", err\n\t}\n\treturn secretId, nil\n}",
"func Save(keyS string, valueS string) error {\n\n\tdb := Connect()\n\n\t//defer db.Close()\n\n\tkey := []byte(keyS)\n\tvalue := []byte(valueS)\n\n\terr := db.Update(func(tx *bolt.Tx) error {\n\n\t\tbucket, err := tx.CreateBucketIfNotExists(Database)\n\n\t\tif err != nil {\n\n\t\t\treturn err\n\t\t}\n\n\t\terr = bucket.Put(key, value)\n\n\t\tif err != nil {\n\n\t\t\treturn err\n\n\t\t} else {\n\n\t\t\t//fmt.Println(\"save sucess\")\n\t\t\treturn nil\n\t\t}\n\t})\n\n\tif err != nil {\n\n\t\tfmt.Println(\"erro try save \", err)\n\t\tos.Exit(1)\n\t}\n\n\treturn nil\n}",
"func (kvs *keyValueServer) putIntoDB(request []string) {\n\tkey := request[1]\n\tvalue := []byte(request[2])\n\tput(key, value)\n}",
"func (db *MemoryStorage) Put(key []byte, value []byte) error {\n\tdb.data.Store(common.BytesToHex(key), value)\n\treturn nil\n}",
"func (kvstore *KVStore) put(key string, value []byte) {\n\t/* kvstore is a pointer, can it be used in this way? yes*/\n\tkvstore.kvstore[key] = value\n}",
"func APIKeySave(key, loggedInUser string, dateCreated time.Time) error {\n\t// Make sure the API key isn't already in the database\n\tdbQuery := `\n\t\tSELECT count(key)\n\t\tFROM api_keys\n\t\tWHERE key = $1`\n\tvar keyCount int\n\terr := pdb.QueryRow(dbQuery, key).Scan(&keyCount)\n\tif err != nil {\n\t\tlog.Printf(\"Checking if an API key exists failed: %v\\n\", err)\n\t\treturn err\n\t}\n\tif keyCount != 0 {\n\t\t// API key is already in our system\n\t\tlog.Printf(\"Duplicate API key (%s) generated for user '%s'\\n\", key, loggedInUser)\n\t\treturn fmt.Errorf(\"API generator created duplicate key. Try again, just in case...\")\n\t}\n\n\t// Add the new API key to the database\n\tdbQuery = `\n\t\tINSERT INTO api_keys (user_id, key, date_created)\n\t\tSELECT (SELECT user_id FROM users WHERE lower(user_name) = lower($1)), $2, $3`\n\tcommandTag, err := pdb.Exec(dbQuery, loggedInUser, key, dateCreated)\n\tif err != nil {\n\t\tlog.Printf(\"Adding API key to database failed: %v\\n\", err)\n\t\treturn err\n\t}\n\tif numRows := commandTag.RowsAffected(); numRows != 1 {\n\t\tlog.Printf(\"Wrong number of rows (%d) affected when adding API key: %v, username: %v\\n\", numRows, key,\n\t\t\tloggedInUser)\n\t}\n\treturn nil\n}",
"func (r *redisPersist) Store(key string, p mqtt.Packet) error {\n\tif p == nil || r.conn == nil {\n\t\treturn nil\n\t}\n\n\tif ok, err := r.conn.HSet(r.mainKey, key, p.Bytes()).Result(); !ok {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func (b *BadgerDBStore) Put(key, value []byte) error {\n\treturn b.db.Update(func(txn *badger.Txn) error {\n\t\terr := txn.Set(key, value)\n\t\treturn err\n\t})\n}",
"func Save(conn redis.Conn, key, setKey string, value []byte) error {\n\t_, err := conn.Do(\"SET\", key, value)\n\tif len(setKey) > 0 && !strings.HasPrefix(key, \"player\") {\n\t\tSadd(conn, setKey, key)\n\t}\n\treturn err\n}",
"func (c *Container) Save(i interface{}) cipher.SHA256 {\n\treturn c.db.AddAutoKey(encoder.Serialize(i))\n}",
"func put(w http.ResponseWriter, r *http.Request) {\n\n value := r.FormValue(\"value\")\n key := r.FormValue(\"key\")\n \n fmt.Print(\"%s,%s\",key, value)\n db, err := leveldb.OpenFile(\"db\", nil)\n \n if err != nil {\n fmt.Println(err)\n }\n err = db.Put([]byte(key),[]byte(value), nil) \n \n fmt.Println(err)\n defer db.Close()\n}",
"func (b *DynamoDbBackend) Store(key string, value interface{}) error {\n\tdata, err := b.encrypt(value)\n\tif err != nil {\n\t\treturn err\n\t}\n\tlog.Debugf(\"DynamoDB Encrypted: %s\", data)\n\n\ti := dynamodb.PutItemInput{\n\t\tTableName: aws.String(b.table),\n\t\tItem: map[string]*dynamodb.AttributeValue{\n\t\t\tb.pk: {S: aws.String(key)},\n\t\t\t\"value\": {S: aws.String(data)},\n\t\t\t\"encrypted\": {BOOL: aws.Bool(true)},\n\t\t},\n\t}\n\n\tlog.Debugf(\"writing key %s in DynamoDB table %s\", key, b.table)\n\tif _, err := b.c.PutItem(&i); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func (db *TriasDB) Set(key []byte, value []byte) {\n\tdb.mtx.Lock()\n\tdefer db.mtx.Unlock()\n\n\tdb.SetNoLock(key, value)\n}",
"func (db *DBDriver) Create(ctx context.Context, member, key string, value []byte) ([]byte, error) {\n\tbuf, err := randomBuf()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tctx, tx, err := dbtx.BeginTx(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer db.reapTx(tx)\n\n\tkv := models.Kvstore{\n\t\tMember: member,\n\t\tKey: key,\n\t\tValue: value,\n\t\tNonce: buf,\n\t}\n\n\tif err := kv.Insert(ctx, tx, boil.Infer()); err != nil {\n\t\treturn nil, errors.Wrap(ErrAlreadySet, err.Error())\n\t}\n\n\treturn buf, tx.Commit()\n}",
"func (pk *PublicKey) SaveToDB() error {\n\treturn model.SetPublicKey(string(pk.ID), pk)\n}",
"func (s Server) Store(id, payload []byte) (aesKey []byte, err error) {\n\n\tvar encryptedData []byte\n\n\tencryptedData, aesKey, err = s.Encrypt(payload)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\trecord := &dataService.Record{\n\t\tID: id,\n\t\tData: encryptedData}\n\n\treturn aesKey, s.DataService.StoreData(record)\n\n}",
"func (store *Storage) Save(key []byte, value []byte) error {\n\terr := store.db.Put(store.writeOptions, key, value)\n\tif err != nil {\n\t\tfmt.Println(\"Write data to RocksDB failed!\")\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func (s Store) Set(ctx context.Context, key int64, value string) error {\n\tconn := s.Pool.Get()\n\tdefer conn.Close()\n\n\t_, err := conn.Do(\"SET\", key, value)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func Put(key string, value string){\n \n h := sha256.New()\n h.Write([]byte(value))\n sha := base64.URLEncoding.EncodeToString(h.Sum(nil))\n \n //fmt.Println(sha)\n var n Data \n \n n.val = value //storing key value in keyValue hash map\n n.hash = sha // storing key hash in keyHash hash map \n \n keyValue[key] = n\n}",
"func (kv KeyValueStore) Set(key, value string) error {\n\treturn set(kv.DB, key, value)\n}",
"func (s *Store) Put(key string, v interface{}) {\n\ts.store.Put(key, v)\n}",
"func (ks *KVSStore) StoreKey(k bccsp.Key) error {\n\tentry := &entry{}\n\tvar id string\n\n\tswitch key := k.(type) {\n\tcase *handlers.NymSecretKey:\n\t\tentry.NymSecretKey = &NymSecretKey{\n\t\t\tSki: key.Ski,\n\t\t\tSk: key.Sk.Bytes(),\n\t\t\tPk: ks.Translator.G1ToProto(key.Pk),\n\t\t\tExportable: key.Exportable,\n\t\t}\n\n\t\tpk, err := k.PublicKey()\n\t\tif err != nil {\n\t\t\treturn errors.Errorf(\"could not get public version for key [%s]\", k.SKI())\n\t\t}\n\n\t\tid = hex.EncodeToString(pk.SKI())\n\tcase *handlers.UserSecretKey:\n\t\tentry.UserSecretKey = &UserSecretKey{\n\t\t\tSk: key.Sk.Bytes(),\n\t\t\tExportable: key.Exportable,\n\t\t}\n\t\tid = hex.EncodeToString(k.SKI())\n\tdefault:\n\t\treturn errors.Errorf(\"unknown type [%T] for the supplied key\", key)\n\t}\n\n\treturn ks.KVS.Put(id, entry)\n}",
"func Set(key string, val interface{}) error {\n\treturn DB.Set(key, val)\n}",
"func (kv *LevelDBKV) Save(key, value string) error {\n\treturn errors.WithStack(kv.Put([]byte(key), []byte(value), nil))\n}",
"func (kv *KVStore) Put(key, value string) error {\n\tpayload := kvPayload{\n\t\tKey: key,\n\t\tValue: value,\n\t\tOp: OpPut,\n\t}\n\n\t_, err := kv.db.Add(&payload)\n\treturn err\n}",
"func (d *Datastore) Put(key string, value *models.User) error {\n\t_, err := d.db.RunInTransaction(context.Background(),\n\t\tfunc(tx *datastore.Transaction) error {\n\t\t\tif _, err := tx.Put(d.NewKey(key), value); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\treturn nil\n\t\t})\n\n\t// maybe do something here\n\n\treturn err\n}",
"func Key(id string) datastore.Key {\n\treturn datastore.NewKey(kind, id)\n}",
"func (r *RedisAdapter) Store(ctx context.Context, key string, data []byte, expiration time.Duration) error {\n\t// TODO: Handle message trace from ctx.\n\tif len(key) == 0 {\n\t\treturn errors.New(\"invalid key\")\n\t}\n\tclient := r.c.WithContext(ctx)\n\tvalue := base64.StdEncoding.EncodeToString(data)\n\terr := client.Set(key, value, expiration).Err()\n\treturn errors.Wrap(err, \"error storing key value pair in Redis\")\n}",
"func (this *Database) Put(key string, value []byte) error {\n\tclient, err := redis.Dial(\"tcp\", this.address)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer client.Close()\n\treply := client.Cmd(\"SET\", key, value)\n\tif reply.Err != nil {\n\t\treturn reply.Err\n\t}\n\treturn nil\n}",
"func (b *LDBBatch) Put(key []byte, value []byte) error {\n b.batch.Put(key, value)\n return nil\n}",
"func (p *Promoter) Store(ctx context.Context) (*datastore.Key, error) {\n\tvar k *datastore.Key\n\n\t// See if a key exists, or if a new one is required\n\tif p.DatastoreKey.Incomplete() {\n\t\tk = datastore.NewIncompleteKey(ctx, \"Promoter\", nil)\n\t} else {\n\t\tk = &p.DatastoreKey\n\t}\n\n\t// Stash the entry in the datastore\n\tkey, err := datastore.Put(ctx, k, p)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn key, nil\n}",
"func (d Database) Insert(key string, value string) error {\n\tif d.connection == nil {\n\t\treturn errors.New(\"connection not initialized\")\n\t}\n\t_, err := d.connection.Set(d.ctx, key, value, 0).Result()\n\treturn err\n}",
"func (db *Database) Set(key string, data []byte) error {\n\tif db == nil || db.conn == nil {\n\t\treturn hord.ErrNoDial\n\t}\n\n\tif err := hord.ValidKey(key); err != nil {\n\t\treturn err\n\t}\n\n\tif err := hord.ValidData(data); err != nil {\n\t\treturn err\n\t}\n\n\terr := db.conn.Query(`UPDATE hord SET data = ? WHERE key = ?`, data, key).Exec()\n\treturn err\n}",
"func (c *HostKeyCollector) StoreKey() ssh.HostKeyCallback {\n\treturn func(hostname string, remote net.Addr, key ssh.PublicKey) error {\n\t\tc.knownKeys = append(\n\t\t\tc.knownKeys,\n\t\t\tfmt.Sprintf(\"%s %s %s\\n\", knownhosts.Normalize(hostname), key.Type(), base64.StdEncoding.EncodeToString(key.Marshal()))...,\n\t\t)\n\t\treturn nil\n\t}\n}",
"func Insert(key []byte, value []byte) error {\n\tif db == nil {\n\t\treturn errors.New(\"database not initialized\")\n\t}\n\n\tif len(key) == 0 {\n\t\treturn errors.New(\"empty key provided\")\n\t}\n\n\treturn db.Put(key, value, nil)\n}",
"func (m *MySQL) Set(key string, th *Blob) error {\n\tstart := time.Now()\n\tgo func() {\n\t\tklog.Infof(\"set(%q) took %s\", key, time.Since(start))\n\t}()\n\n\tsetMem(m.memcache, key, th)\n\n\tgo func() {\n\t\tb := new(bytes.Buffer)\n\t\tge := gob.NewEncoder(b)\n\n\t\tif err := ge.Encode(th); err != nil {\n\t\t\tklog.Errorf(\"encode: %w\", err)\n\t\t}\n\n\t\t_, err := m.db.Exec(`\n\t\t\tINSERT INTO persist2 (k, v, saved) VALUES (?, ?, ?)\n\t\t\tON DUPLICATE KEY UPDATE k=VALUES(k), v=VALUES(v)`, key, b.Bytes(), time.Now())\n\n\t\tif err != nil {\n\t\t\tklog.Errorf(\"insert failed: %v\", err)\n\t\t}\n\t}()\n\treturn nil\n}",
"func KeyStore(key *Key, organizationID uint, clusterName string) (secretID string, err error) {\n\tlog.Info(\"Store SSH Key to Bank Vaults\")\n\tvar createSecretRequest secret.CreateSecretRequest\n\tcreateSecretRequest.Type = secretTypes.SSHSecretType\n\tcreateSecretRequest.Name = clusterName\n\n\tcreateSecretRequest.Values = map[string]string{\n\t\tsecretTypes.User: key.User,\n\t\tsecretTypes.Identifier: key.Identifier,\n\t\tsecretTypes.PublicKeyData: key.PublicKeyData,\n\t\tsecretTypes.PublicKeyFingerprint: key.PublicKeyFingerprint,\n\t\tsecretTypes.PrivateKeyData: key.PrivateKeyData,\n\t}\n\n\tsecretID, err = secret.Store.Store(organizationID, &createSecretRequest)\n\n\tif err != nil {\n\t\tlog.Errorf(\"Error during store: %s\", err.Error())\n\t\treturn \"\", err\n\t}\n\n\tlog.Info(\"SSH Key stored.\")\n\treturn\n}",
"func (r *RedisPersist) Store(key string, p lib.Packet) error {\n\tif r == nil || r.conn == nil {\n\t\treturn nil\n\t}\n\n\tif p.WriteTo(r.buf) != nil {\n\t\tif ok, err := r.conn.HSet(r.mainKey, key, r.buf.String()).Result(); !ok {\n\t\t\treturn err\n\t\t}\n\t}\n\tr.buf.Reset()\n\n\treturn nil\n}",
"func (d *Database) Set(key string, value string) error {\n\td.dbMutex.Lock()\n\tdefer d.dbMutex.Unlock()\n\n\tdata := []byte(value)\n\td.Dlog.Append(key, data)\n\td.Data[key] = data\n\n\treturn nil\n}",
"func (k *FileKeystore) Store(key string, value []byte) error {\n\tk.Lock()\n\tdefer k.Unlock()\n\n\tk.secrets[key] = serializableSecureString{Value: value}\n\tk.dirty = true\n\treturn nil\n}",
"func (ds *DS) datastoreKey(id int64) *datastore.Key {\n\n\tc := ds.ctx\n\treturn datastore.NewKey(c, \"guest\", \"\", id, nil)\n\n}",
"func SetKey(key string, data interface{}) {\n\tconnect := Connect()\n\tif data == nil {\n\t\tlog.Println(\"should not be nil\")\n\t}\n\t_, err := connect.Do(\"SET\", key, data)\n\tif err != nil {\n\t\tlog.Fatal(err, \"not ok\")\n\t}\n\tdefer connect.Close()\n}",
"func (s Storage) Save(bucket, key string, data Storable) error {\n\tif !s.Opened {\n\t\treturn fmt.Errorf(\"db must be opened before saving\")\n\t}\n\terr := s.DB.Update(func(tx *bolt.Tx) error {\n\t\tmBucket, err := tx.CreateBucketIfNotExists([]byte(bucket))\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Error creating bucket : %s\", err)\n\t\t}\n\t\tenc, err := data.Encode()\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Could not encode : %s\", err)\n\t\t}\n\t\terr = mBucket.Put([]byte(key), enc)\n\t\treturn err\n\t})\n\treturn err\n}",
"func (s *Storage) Put(key, val []byte) error {\n\topts := gorocksdb.NewDefaultWriteOptions()\n\n\tdefer opts.Destroy()\n\n\treturn s.db.Put(opts, key, val)\n}",
"func (manager *KeysManager) Insert(key jose.JSONWebKey) {\n\tmanager.KeyMap[key.KeyID] = &key\n\tmanager.KeyList = append(manager.KeyList, &key)\n}",
"func Put(key string, value string) {\n\tdb.Update(func(tx *bolt.Tx) error {\n\t\tb := tx.Bucket([]byte(DefaultBucket))\n\t\terr := b.Put([]byte(key), []byte(value))\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\treturn nil\n\t})\n}",
"func (km *keyManager) save(callbacks ...func(pg.Queryer) error) error {\n\tekb, err := km.keyRing.Encrypt(km.password, km.scryptParams)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"unable to encrypt keyRing\")\n\t}\n\treturn km.orm.saveEncryptedKeyRing(&ekb, callbacks...)\n}",
"func (l *LocalStore) Set(ctx context.Context, key, data string) error {\n\tl.lock.Lock()\n\tl.store[key] = data\n\tl.lock.Unlock()\n\n\treturn nil\n}",
"func AddKey(key * Key) {\n\tKeys = append(Keys, *key)\n\tSaveDatabase(Keys, \"keys\")\n}",
"func AddKey(s Server, password string, template *Key) (*Key, error) {\n\t// fill meta data about key\n\tnewkey := &Key{\n\t\tCreated: time.Now(),\n\t\tKDF: \"scrypt\",\n\t\tN: scryptN,\n\t\tR: scryptR,\n\t\tP: scryptP,\n\t}\n\n\thn, err := os.Hostname()\n\tif err == nil {\n\t\tnewkey.Hostname = hn\n\t}\n\n\tusr, err := user.Current()\n\tif err == nil {\n\t\tnewkey.Username = usr.Username\n\t}\n\n\t// generate random salt\n\tnewkey.Salt = make([]byte, scryptSaltsize)\n\tn, err := rand.Read(newkey.Salt)\n\tif n != scryptSaltsize || err != nil {\n\t\tpanic(\"unable to read enough random bytes for salt\")\n\t}\n\n\t// call scrypt() to derive user key\n\tnewkey.user, err = newkey.scrypt(password)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif template == nil {\n\t\t// generate new random master keys\n\t\tnewkey.master, err = newkey.newKeys()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t} else {\n\t\t// copy master keys from old key\n\t\tnewkey.master = template.master\n\t}\n\n\t// encrypt master keys (as json) with user key\n\tbuf, err := json.Marshal(newkey.master)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tnewkey.Data = GetChunkBuf(\"key\")\n\tn, err = newkey.EncryptUser(newkey.Data, buf)\n\tnewkey.Data = newkey.Data[:n]\n\n\t// dump as json\n\tbuf, err = json.Marshal(newkey)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// store in repository and return\n\tid, err := s.Create(backend.Key, buf)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tnewkey.id = id\n\n\tFreeChunkBuf(\"key\", newkey.Data)\n\n\treturn newkey, nil\n}",
"func homefaceKey(c appengine.Context, id string) *datastore.Key {\n return datastore.NewKey(c, \"Homeface\", id, 0, nil)\n}",
"func (r *RedisCli) Insert(jk string) error {\n\tk := r.Hash(jk)\n\terr := r.Client.Set(k, jk, time.Hour*24).Err()\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Printf(\"Insertion into redis, KEY: %s\", k)\n\treturn nil\n}",
"func (m *DHTModule) Store(key string, val string) {\n\n\tif m.IsAttached() {\n\t\tif err := m.Client.DHT().Store(key, val); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\treturn\n\t}\n\n\tctx, cn := context.WithTimeout(context.Background(), 60*time.Second)\n\tdefer cn()\n\tif err := m.dht.Store(ctx, dht2.MakeKey(key), []byte(val)); err != nil {\n\t\tpanic(errors.ReqErr(500, StatusCodeServerErr, \"key\", err.Error()))\n\t}\n}",
"func (c *creds) Store(key string, v interface{}) error {\n\tb, err := json.Marshal(v)\n\tif err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\n\terr = keyring.Set(c.service, key, string(b))\n\treturn errors.WithStack(err)\n}",
"func insertNewKeyValue(w http.ResponseWriter, req *http.Request) {\n\tkey := req.URL.Query().Get(\":key\")\n\tval, err := ioutil.ReadAll(req.Body)\n\treq.Body.Close()\n\tif err != nil {\n\t\tlog.Print(\"error: ioutil.ReadAll: \", err)\n\t\thttp.Error(w, \"Internal Server Error\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\terr = runInTransaction(func(tx *sql.Tx) error {\n\t\tif isKeyInDB(tx, key) {\n\t\t\treturn ErrKeyAlreadyExists\n\t\t}\n\t\t_, err = getStmt(tx, \"insertNewKeyValue\").Exec(key, val)\n\t\treturn err\n\t})\n\tif err == ErrKeyAlreadyExists {\n\t\thttp.Error(w, fmt.Sprintf(\"key %q already exists\", key), 400)\n\t} else if err != nil {\n\t\tlog.Print(\"error: insertNewKeyValue: \", err)\n\t\thttp.Error(w, \"Internal Server Error\", http.StatusInternalServerError)\n\t} else {\n\t\thttp.Error(w, \"\", http.StatusCreated)\n\t}\n}",
"func (session KeyValueSession) Set(key DatabaseKey, value interface{}) bool {\n\tbuffer := convertData(value)\n\tsession.store.tree.Set(key, buffer)\n\n\treturn true\n}",
"func (db *DB) Put(key []byte, value []byte) error {\n\tif len(key) > MaxKeyLength {\n\t\treturn errKeyTooLarge\n\t}\n\tif len(value) > MaxValueLength {\n\t\treturn errValueTooLarge\n\t}\n\th := db.hash(key)\n\tdb.metrics.Puts.Add(1)\n\tdb.mu.Lock()\n\tdefer db.mu.Unlock()\n\n\tsegID, offset, err := db.datalog.put(key, value)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tsl := slot{\n\t\thash: h,\n\t\tsegmentID: segID,\n\t\tkeySize: uint16(len(key)),\n\t\tvalueSize: uint32(len(value)),\n\t\toffset: offset,\n\t}\n\n\tif err := db.put(sl, key); err != nil {\n\t\treturn err\n\t}\n\n\tif db.syncWrites {\n\t\treturn db.sync()\n\t}\n\treturn nil\n}",
"func (local *Node) Store(key string, value []byte) (err error) {\n\tdone, err := local.Publish(key)\n\tif err != nil {\n\t\treturn err\n\t}\n\tlocal.blobstore.Put(key, value, done)\n\treturn nil\n}",
"func (b *BadgerDB) Set(key Key, value []byte) error {\n\tfullKey := append(key.Scope().Bytes(), key.ID()...)\n\n\terr := b.backend.Update(func(txn *badger.Txn) error {\n\t\treturn txn.Set(fullKey, value)\n\t})\n\n\treturn err\n}",
"func (s *MemStore) Store(mimetype string, k string, v string) error {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\tif curVal := s.items[k]; curVal != \"\" {\n\t\treturn KeyConflict\n\t}\n\ts.items[k] = v\n\ts.mimetype[k] = mimetype\n\treturn nil\n}",
"func (m *SearchBucket) SetKey(value *string)() {\n m.key = value\n}",
"func (dbm *DBManager) Save(bucket, key string, data interface{}) error {\n\tvar err error\n\n\tif err = dbm.openDB(); err != nil {\n\t\treturn err\n\t}\n\tdefer dbm.closeDB()\n\n\tif data == nil {\n\t\treturn errors.New(\"data is nil\")\n\t}\n\n\tsave := func(tx *boltsecTx) error {\n\t\tvar err error\n\t\tbkt := tx.Bucket([]byte(bucket))\n\n\t\tvalue, err := json.Marshal(data)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif dbm.cryptor != nil {\n\t\t\t//encrypt the content before store in the db\n\t\t\tenc, err := dbm.cryptor.encrypt(value)\n\t\t\tif err != nil {\n\t\t\t\treturn errors.New(\"Decrypt error from db\")\n\t\t\t}\n\n\t\t\tif err = bkt.Put([]byte(key), enc); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else {\n\t\t\tif err = bkt.Put([]byte(key), value); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t}\n\n\treturn dbm.db.update(save)\n}",
"func SaveKeyValue(key, value string, duration time.Duration) {\n\n\trdb := redis.NewClient(&redis.Options{\n\t\tAddr: \"localhost:15000\",\n\t\tPassword: \"\", // no password set\n\t\tDB: 0, // use default DB\n\t})\n\n\ttestping, err := rdb.Ping().Result()\n\tif err != nil {\n\t\tpanic(err)\n\t} else {\n\t\tfmt.Printf(\"Redis Server Connected with %s & Error: %e \\n\", testping, err)\n\t}\n\n\terr = rdb.Set(key, value, duration*(time.Second)).Err()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}",
"func (s *Store) Save(bucket, key, val []byte) (err error) {\n\treturn s.db.Update(func(tx *bolt.Tx) error {\n\t\tb := tx.Bucket(bucket)\n\t\treturn b.Put(key, val)\n\t})\n}",
"func (h *DBHandle) Put(key []byte, value []byte, sync bool) error {\n\treturn h.db.Put(constructLevelKey(h.dbName, key), value, sync)\n}",
"func (h *DBHandle) Put(key []byte, value []byte, sync bool) error {\n\treturn h.db.Put(constructLevelKey(h.dbName, key), value, sync)\n}",
"func storeStringIndex (txn *badger.Txn, key string, value []byte, prefix byte) error {\r\n\r\n\tindex := append ([]byte{prefix}, []byte(key)...)\r\n\treturn txn.Set(index, value)\r\n}",
"func (s *Store) Put(key string, value interface{},\n\toptions *store.WriteOptions) error {\n\tval, err := s.encode(value, s.cipherSuites, s.key[:])\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn s.Store.Put(key, val, options)\n}",
"func (d *Dam) Store(key Marshallable, value interface{}) error {\n\tk, err := hash(key)\n\tif err != nil {\n\t\treturn err\n\t}\n\te := &element{\n\t\tvalue: value,\n\t\tready: make(chan struct{}),\n\t}\n\tclose(e.ready)\n\td.freeze.Lock()\n\td.mutex.Lock()\n\td.storage[k] = e\n\td.mutex.Unlock()\n\td.freeze.Unlock()\n\treturn nil\n}",
"func (s *PostgresStore) Set(key, value interface{}) error {\n\ts.lock.Lock()\n\tdefer s.lock.Unlock()\n\n\ts.data[key] = value\n\treturn nil\n}",
"func (ds *MySQLDatastore) Put(ctx context.Context, key, value []byte) error {\n\t_, err := ds.db.Exec(`\n\t INSERT INTO extras (\n\t\t\tid,\n\t\t\tvalue\n\t\t)\n\t\tVALUES (?, ?)\n\t\tON DUPLICATE KEY UPDATE\n\t\t\tvalue = ?\n\t\t`, string(key), string(value), string(value))\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func (c *Context) Store(key string, value interface{}) {\n\tc.Data[key] = value\n}",
"func (d *Datastore) Put(key datastore.Key, value []byte) error {\n\treturn d.db.Update(func(tx *bbolt.Tx) error {\n\t\treturn tx.Bucket(d.bucket).Put(key.Bytes(), value)\n\t})\n}"
] | [
"0.67714393",
"0.6709693",
"0.6652686",
"0.66463214",
"0.64961",
"0.64282036",
"0.64141",
"0.64117914",
"0.63930196",
"0.636005",
"0.6357298",
"0.6352423",
"0.6337577",
"0.63271534",
"0.63177425",
"0.63129336",
"0.6247293",
"0.6224383",
"0.62135607",
"0.6200179",
"0.619796",
"0.61893153",
"0.618016",
"0.61586094",
"0.61545455",
"0.61414266",
"0.6138885",
"0.61345",
"0.61338866",
"0.61235285",
"0.6116937",
"0.60798055",
"0.6073628",
"0.60725206",
"0.6072005",
"0.60657036",
"0.60619885",
"0.6046233",
"0.60244685",
"0.6011003",
"0.5998494",
"0.5995136",
"0.5994162",
"0.5988837",
"0.5985522",
"0.59832716",
"0.59649456",
"0.5959122",
"0.5958694",
"0.5949882",
"0.5944378",
"0.59256876",
"0.5922952",
"0.59207124",
"0.5916985",
"0.59117025",
"0.59088457",
"0.5908741",
"0.59060925",
"0.5906075",
"0.5905768",
"0.590335",
"0.5895982",
"0.5892828",
"0.5889939",
"0.58892465",
"0.5887958",
"0.58873606",
"0.5884795",
"0.58824843",
"0.58803123",
"0.5871693",
"0.58713865",
"0.5868795",
"0.586741",
"0.58587617",
"0.58577824",
"0.5837763",
"0.5835574",
"0.58331025",
"0.5828146",
"0.58225983",
"0.58207285",
"0.5813644",
"0.5810976",
"0.5783884",
"0.57822365",
"0.5780342",
"0.5779811",
"0.5779524",
"0.57785684",
"0.5768248",
"0.5768248",
"0.57647794",
"0.5759067",
"0.5750889",
"0.5748344",
"0.5745096",
"0.5744142",
"0.5742206"
] | 0.5798247 | 85 |
Delete deletes a key from the db | func (mgr *LocalHashMapDBMgr) Delete(k common.Key) error {
glog.V(1).Infof("deleting %v from db", k)
delete(mgr.memMap, k)
return nil
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (db *FlatDatabase) Delete(key []byte) error { panic(\"not supported\") }",
"func (db *DB) Delete(key []byte) (err error) {\n\treturn db.LevigoDB.Delete(db.wo, key)\n}",
"func Del(key string) error {\n\treturn db.Update(func(txn *badger.Txn) error {\n\t\ttxn.Delete([]byte(key))\n\t\treturn nil\n\t})\n}",
"func (sq *SQ3Driver) Delete(key string) error {\n\t_, err := sq.DB.Exec(fmt.Sprintf(\"DELETE FROM %v WHERE key=$1\", dbTable), key)\n\treturn err\n}",
"func Delete(key string) error {\n\treturn DB.Delete(key)\n}",
"func Delete(key string) error {\n\tconn := db.Pool.Get()\n\tdefer conn.Close()\n\n\treturn db.Delete(conn, key)\n}",
"func (db *Database) Delete(key string) error {\n\tif db == nil || db.conn == nil {\n\t\treturn hord.ErrNoDial\n\t}\n\n\tif err := hord.ValidKey(key); err != nil {\n\t\treturn err\n\t}\n\n\terr := db.conn.Query(`DELETE FROM hord WHERE key = ?;`, key).Exec()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func Delete(key string) {\n\tdb.Update(func(tx *bolt.Tx) error {\n\t\tb := tx.Bucket([]byte(DefaultBucket))\n\t\terr := b.Delete([]byte(key))\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\treturn nil\n\t})\n}",
"func (tcdb *Teocdb) Delete(key string) (err error) {\n\t// Does not return err of tcdb.session.Query function\n\tif err = tcdb.session.Query(`DELETE data FROM map WHERE key = ?`,\n\t\tkey).Exec(); err != nil {\n\t}\n\treturn\n}",
"func (db *Database) Delete(key []byte) error {\n\treturn updateError(db.DB.Delete(key, nil))\n}",
"func delete(w http.ResponseWriter, r *http.Request) {\n \n key := r.FormValue(\"key\")\n \n fmt.Print(key)\n db, err := leveldb.OpenFile(\"db\", nil)\n \n if err != nil {\n fmt.Println(err)\n }\n err = db.Delete([]byte(key),nil)\n fmt.Println(err)\n\n defer db.Close()\n\n}",
"func (db *DB) Delete(wo *WriteOptions, key []byte) error {\n\tif db.closed {\n\t\tpanic(ErrDBClosed)\n\t}\n\n\tvar errStr *C.char\n\tvar k *C.char\n\tif len(key) != 0 {\n\t\tk = (*C.char)(unsafe.Pointer(&key[0]))\n\t}\n\n\tC.leveldb_delete(\n\t\tdb.Ldb, wo.Opt, k, C.size_t(len(key)), &errStr)\n\n\tif errStr != nil {\n\t\tgs := C.GoString(errStr)\n\t\tC.leveldb_free(unsafe.Pointer(errStr))\n\t\treturn DatabaseError(gs)\n\t}\n\treturn nil\n}",
"func (p *MemDB) Delete(key []byte) {\n\tp.Put(key, nil)\n}",
"func (db *BoltDB) Delete(key []byte) error {\n\terr := db.dbOpen()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer db.dbClose()\n\n\treturn db.boltdb.Update(func(tx *bolt.Tx) error {\n\t\tbucket := tx.Bucket([]byte(db.bucketname))\n\t\tif bucket == nil {\n\t\t\treturn errors.NotFound{Message: string(key[:]) + \" does not exist\"}\n\t\t}\n\n\t\tv := bucket.Get(key)\n\t\tif len(v) == 0 {\n\t\t\treturn errors.NotFound{Message: string(key[:]) + \" does not exist\"}\n\t\t}\n\n\t\treturn bucket.Delete(key)\n\t})\n}",
"func (kv *KV) Delete(key []byte) error {\n\t_, err := kv.db.Exec(\n\t\tfmt.Sprintf(\"DELETE FROM %s WHERE id=?\", string(kv.table)),\n\t\tkv.id(key),\n\t)\n\treturn err\n}",
"func (db *TriasDB) Delete(key []byte) {\n\tdb.mtx.Lock()\n\tdefer db.mtx.Unlock()\n\n\tdb.DeleteNoLock(key)\n}",
"func (h *Handle) Delete(key []byte) error {\n\trv := C.unqlite_kv_delete(h.db, unsafe.Pointer(&key[0]), C.int(len(key)))\n\tif rv != C.UNQLITE_OK {\n\t\treturn Errno(rv)\n\t}\n\treturn nil\n}",
"func (h *DBHandle) Delete(key []byte, sync bool) error {\n\treturn h.db.Delete(constructLevelKey(h.dbName, key), sync)\n}",
"func (h *DBHandle) Delete(key []byte, sync bool) error {\n\treturn h.db.Delete(constructLevelKey(h.dbName, key), sync)\n}",
"func (db *DB) Delete(key interface{}, value interface{}) error {\n\treturn db.bolt.Update(func(tx *bolt.Tx) error {\n\t\treturn db.DeleteTx(tx, key, value)\n\t})\n}",
"func (r LevelDBRepository) Delete(key string) error {\n\treturn r.DB.Delete([]byte(key), nil)\n}",
"func (db *memorydb) Del(key []byte) error {\n\n\tif db.enableBatch {\n\t\tdb.batch.Del(key)\n\t} else {\n\t\tdb.writeLock <- struct{}{}\n\t\tdefer func() {\n\t\t\t<-db.writeLock\n\t\t}()\n\n\t\tdb.sm.Lock()\n\t\tdefer db.sm.Unlock()\n\n\t\tdelete(db.db, string(key))\n\t}\n\n\treturn nil\n}",
"func (b *BadgerDBStore) Delete(key []byte) error {\n\treturn b.db.Update(func(txn *badger.Txn) error {\n\t\treturn txn.Delete(key)\n\t})\n}",
"func (bs *badgerStore) Delete(key []byte) (err error) {\n\treturn bs.db.Update(func(txn *badger.Txn) error {\n\t\treturn txn.Delete(key)\n\t})\n}",
"func (db *DB) Delete(key string, value string) error {\n\treturn db.Collection[db.tablename].Remove(bson.D{{Name: key, Value: key}})\n}",
"func (s *Storage) Del(key []byte) error {\n\topts := gorocksdb.NewDefaultWriteOptions()\n\n\tdefer opts.Destroy()\n\n\treturn s.db.Delete(opts, key)\n}",
"func (db *DB) Delete(key []byte) error {\n\th := db.hash(key)\n\tdb.metrics.Dels.Add(1)\n\tdb.mu.Lock()\n\tdefer db.mu.Unlock()\n\tif err := db.del(h, key, true); err != nil {\n\t\treturn err\n\t}\n\tif db.syncWrites {\n\t\treturn db.sync()\n\t}\n\treturn nil\n}",
"func (o *APIKey) Delete(exec boil.Executor) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no APIKey provided for delete\")\n\t}\n\n\tif err := o.doBeforeDeleteHooks(exec); err != nil {\n\t\treturn err\n\t}\n\n\targs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), apiKeyPrimaryKeyMapping)\n\tsql := \"DELETE FROM \\\"api_keys\\\" WHERE \\\"id\\\"=$1\"\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\t_, err := exec.Exec(sql, args...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to delete from api_keys\")\n\t}\n\n\tif err := o.doAfterDeleteHooks(exec); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func (m *redisDB) Delete(key string) (err error) {\n\terr = m.client.Del(m.ctx, key).Err()\n\n\treturn\n}",
"func (t *Testzzz) Delete(ctx context.Context, key ...interface{}) error {\n\tvar err error\n\tvar dbConn *sql.DB\n\n\t// if deleted, bail\n\tif t._deleted {\n\t\treturn nil\n\t}\n\n\ttx, err := components.M.GetConnFromCtx(ctx)\n\tif err != nil {\n\t\tdbConn, err = components.M.GetMasterConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\ttableName, err := GetTestzzzTableName(key...)\n\tif err != nil {\n\t\treturn err\n\t}\n\t//1\n\n\t// sql query with composite primary key\n\tsqlstr := `UPDATE ` + tableName + ` SET is_del = 1 WHERE id = ?`\n\n\t// run query\n\tutils.GetTraceLog(ctx).Debug(\"DB\", zap.String(\"SQL\", fmt.Sprint(sqlstr, t.ID)))\n\tif tx != nil {\n\t\t_, err = tx.Exec(sqlstr, t.ID)\n\t} else {\n\t\t_, err = dbConn.Exec(sqlstr, t.ID)\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set deleted\n\tt._deleted = true\n\n\treturn nil\n}",
"func Delete(conn redis.Conn, key string) error {\n\t_, err := conn.Do(\"DEL\", key)\n\treturn err\n}",
"func (kv *KVStore) Delete(key string) error {\n\tpayload := kvPayload{\n\t\tKey: key,\n\t\tOp: OpDel,\n\t}\n\n\t_, err := kv.db.Add(&payload)\n\treturn err\n}",
"func (b *BadgerDB) Delete(key Key) error {\n\tfullKey := append(key.Scope().Bytes(), key.ID()...)\n\terr := b.backend.Update(func(txn *badger.Txn) error {\n\t\treturn txn.Delete(fullKey)\n\t})\n\n\treturn err\n}",
"func (r *rds) Del(ctx context.Context, key ...string) error {\n\t_, err := r.db.Del(ctx, key...).Result()\n\tif err != nil {\n\t\tzapLogger.Prepare(logger).\n\t\t\tDevelopment().\n\t\t\tLevel(zap.ErrorLevel).\n\t\t\tCommit(err.Error())\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (self *LevelDBStore) Delete(key []byte) error {\n\treturn self.db.Delete(key, nil)\n}",
"func (c *Conn) Delete(key []byte) error {\n\tresponse := c.client.Cmd(cmdDelete, key)\n\tif !isOK(response) {\n\t\treturn errx.Errorf(\"delete command failed\")\n\t}\n\treturn nil\n}",
"func (storage *Storage) Delete(key []byte) error {\n\tdelete(storage.Db, string(key))\n\treturn nil\n}",
"func (db *Database) Delete(key []byte) error {\n\tdb.lock.RLock()\n\tdefer db.lock.RUnlock()\n\n\tswitch {\n\tcase db.db == nil:\n\t\treturn database.ErrClosed\n\tcase db.corrupted():\n\t\treturn database.ErrAvoidCorruption\n\t}\n\n\terr := db.db.Delete(db.writeOptions, key)\n\tif err != nil {\n\t\tatomic.StoreUint64(&db.errored, 1)\n\t}\n\treturn err\n}",
"func Delete(ctx context.Context, key string) error {\n\terr := Client().Delete(ctx, key)\n\tTrace(\"Delete\", err, logrus.Fields{fieldKey: key})\n\treturn err\n}",
"func TestDelete(t *testing.T) {\n\tdb, err := Open(db_filename, \"c\")\n\tdefer db.Close()\n\tdefer os.Remove(db_filename)\n\n\tdb.Insert(\"foo\", \"bar\")\n\terr = db.Delete(\"foo\")\n\texists := db.Exists(\"foo\")\n\tif err != nil || exists {\n\t\tt.Error(\"Delete()ed key not removed\")\n\t}\n}",
"func (db DatabaseRedis) Delete(key string) error {\n\treturn db.Client.Del(key).Err()\n}",
"func (tx *Tx) Delete(key string) error {\n\te := newRecord([]byte(key), nil, StringRecord, StringRem)\n\ttx.addRecord(e)\n\n\treturn nil\n}",
"func Delete(key string){\n n := keyValue[key]\n n.val = \"\"\n n.hash = \"\"\n keyValue[key] = n\n}",
"func (r *Redis) Delete(k *protocol.Kite) error {\n\trKey := r.redisKey(k)\n\n\tr.log.Debug(\"DEL %s\", rKey)\n\n\tconn := r.db.Get()\n\tdefer conn.Close()\n\n\t_, err := conn.Do(\"DEL\", rKey)\n\n\treturn err\n}",
"func Remove(ctx context.Context, db *sql.DB, key []byte) error {\n\tctx, cancel := context.WithDeadline(ctx, time.Now().Add(3*time.Second))\n\tdefer cancel()\n\tquery := \"DELETE FROM keys WHERE key=?\"\n\t_, err := db.ExecContext(ctx, query, string(key))\n\tif err != nil {\n\t\treturn errors.Errorf(\"could not delete key=%q: %w\", string(key), err).WithField(\"query\", query)\n\t}\n\n\treturn nil\n}",
"func Delete(key string) error {\n\tc, err := Connect()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefer c.Close()\n\n\tif _, err := c.Do(\"DEL\", key); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func (kv *KV) Delete(key []byte) error {\n\treturn kv.db.Update(func(tx *buntdb.Tx) error {\n\t\t_, err := tx.Delete(gkv.Btos(key))\n\t\treturn err\n\t})\n}",
"func (s *PostgresStore) Delete(key interface{}) error {\n\ts.lock.Lock()\n\tdefer s.lock.Unlock()\n\n\tdelete(s.data, key)\n\treturn nil\n}",
"func (h *RedisHelper) Delete(key string) (err error) {\n\t_, err = h.Conn.Do(\"DEL\", []byte(key))\n\treturn\n}",
"func (dbInst *DB) Delete(cfHandle *gorocksdb.ColumnFamilyHandle, key []byte) error {\n\terr := dbInst.rocksDB.DeleteCF(dbInst.writeOpts, cfHandle, key)\n\tif err != nil {\n\t\tfmt.Println(\"Error while trying to delete key:\", key)\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (node *Node) delete(key int) error {\n\tnode.dataStoreLock.Lock()\n\tdefer node.dataStoreLock.Unlock()\n\t_, keyExists := node.hashTable[key]\n\tif keyExists {\n\t\tdelete(node.hashTable, key)\n\t\treturn nil\n\t} else {\n\t\treturn errors.New(\"Key with identifier\" + strconv.Itoa(key) + \"does not exist in table\")\n\t}\n}",
"func (r *RedisPersist) Delete(key string) error {\n\tif r == nil || r.conn == nil {\n\t\treturn nil\n\t}\n\t_, err := r.conn.HDel(r.mainKey, key).Result()\n\treturn err\n}",
"func (bd BoltDB) Del(bucket, key string) error {\n\treturn bd.db.Update(func(tx *bolt.Tx) error {\n\t\tb := tx.Bucket([]byte(bucket))\n\t\treturn b.Delete([]byte(key))\n\t})\n}",
"func (r *redisPersist) Delete(key string) error {\n\tif r == nil || r.conn == nil {\n\t\treturn nil\n\t}\n\t_, err := r.conn.HDel(r.mainKey, key).Result()\n\treturn err\n}",
"func (dbm *DBManager) Delete(bucket, key string) error {\n\tvar err error\n\n\tif err = dbm.openDB(); err != nil {\n\t\treturn err\n\t}\n\tdefer dbm.closeDB()\n\n\tif key == \"\" {\n\t\treturn errors.New(\"cannot delete, key is nil\")\n\t}\n\n\tdelete := func(tx *boltsecTx) error {\n\t\tbkt := tx.Bucket([]byte(bucket))\n\t\tif err := bkt.Delete([]byte(key)); err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t}\n\n\treturn dbm.db.update(delete)\n}",
"func (m *MonkeyWrench) Delete(table string, key spanner.Key) error {\n\treturn m.DeleteMulti(table, []spanner.Key{key})\n}",
"func (db *MemoryCache) Delete(key []byte) error {\n\tdb.lock.Lock()\n\tdefer db.lock.Unlock()\n\n\tif db.db == nil {\n\t\treturn NewMemCacheError(MemCacheClosedError, nil)\n\t}\n\tkeyStr := base58.Base58Check{}.Encode(key, 0x0)\n\tdelete(db.db, keyStr)\n\treturn nil\n}",
"func (fb *FlatBatch) Delete(key []byte) error { panic(\"not supported\") }",
"func (s *RedisStore) Delete(key interface{}) error {\n\t_, err := s.client.Del(key.(string)).Result()\n\treturn err\n}",
"func (d *Datastore) Delete(key datastore.Key) error {\n\treturn d.db.Update(func(tx *bbolt.Tx) error {\n\t\treturn tx.Bucket(d.bucket).Delete(key.Bytes())\n\t})\n}",
"func (db *MemoryStorage) Del(key []byte) error {\n\tdb.data.Delete(common.BytesToHex(key))\n\treturn nil\n}",
"func (s *Store) Delete(k string) error {\n\tif err := util.CheckKey(k); err != nil {\n\t\treturn err\n\t}\n\n\titem := Item{\n\t\tKey: k,\n\t\tTable: s.Sql.table,\n\t\tSplit: s.Sql.split,\n\t}\n\t_, err := s.Sql.engine.Delete(&item)\n\treturn err\n}",
"func (m *TokenManager) Delete(key string) error {\n\tstmt := Tokens.Delete().Where(Tokens.C[\"key\"].Equals(key))\n\trowsAffected, err := m.conn.MustExecute(stmt).RowsAffected()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"auth: error during rows affected: %s\", err)\n\t}\n\tif rowsAffected == 0 {\n\t\treturn fmt.Errorf(\"auth: token key %s was not deleted\", key)\n\t}\n\treturn nil\n}",
"func (s *Badger) Delete(k []byte) error {\n\terr := s.db.Update(func(txn *badger.Txn) error {\n\t\treturn txn.Delete(k)\n\t})\n\n\treturn err\n}",
"func (s *sqlStore) Delete(key string, opts ...store.DeleteOption) error {\n\tvar options store.DeleteOptions\n\tfor _, o := range opts {\n\t\to(&options)\n\t}\n\n\tdb, queries, err := s.db(options.Database, options.Table)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t_, err = db.Exec(s.options.Context, queries.Delete, key)\n\treturn err\n}",
"func (rc *Store) Delete(key string) error {\n\tif rc.conn == nil {\n\t\tif err := rc.connectInit(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn rc.conn.Delete(key)\n}",
"func (kv *keyValue) Delete(key string) error {\n\tkv.mu.Lock()\n\tdefer kv.mu.Unlock()\n\terr := kv.db.Remove(&bson.M{mgoKey: key})\n\tif err == mgo.ErrNotFound {\n\t\treturn nil\n\t}\n\treturn err\n}",
"func (mdl *Model) Delete(key interface{}) error {\n\tmdl.mux.Lock()\n\tdefer mdl.mux.Unlock()\n\tif std.ModelTypeList == mdl.GetType() {\n\t\tk := key.(int)\n\t\tif k > len(mdl.data) {\n\t\t\treturn errors.New(InvalidIndex, \"index '%d' out of range\", k)\n\t\t}\n\t\tmdl.data = append(mdl.data[:key.(int)-1], mdl.data[key.(int):]...)\n\t\treturn nil\n\t}\n\n\tk := key.(string)\n\tif idx, ok := mdl.hashIdx[k]; ok {\n\t\tmdl.data = append(mdl.data[:idx-1], mdl.data[idx:]...)\n\t\tdelete(mdl.hashIdx, k)\n\t\tdelete(mdl.idxHash, idx)\n\t\treturn nil\n\t}\n\treturn errors.New(InvalidIndex, \"index '%s' out of range\", k)\n}",
"func Delete(ctx *grumble.Context) error {\n\tclient, execCtx, cancel := newClientAndCtx(ctx, 5*time.Second)\n\tdefer cancel()\n\tval, err := client.Delete(execCtx, &ldProto.Key{Key: ctx.Args.String(\"key\")})\n\tif err != nil || val.Key == \"\" {\n\t\treturn err\n\t}\n\treturn exec(ctx, handleKeyValueReturned(val))\n}",
"func (ust *UsersShopTrace) Delete(ctx context.Context, key ...interface{}) error {\n\tvar err error\n\tvar dbConn *sql.DB\n\n\t// if deleted, bail\n\tif ust._deleted {\n\t\treturn nil\n\t}\n\n\ttx, err := components.M.GetConnFromCtx(ctx)\n\tif err != nil {\n\t\tdbConn, err = components.M.GetMasterConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\ttableName, err := GetUsersShopTraceTableName(key...)\n\tif err != nil {\n\t\treturn err\n\t}\n\t//1\n\n\t// sql query with composite primary key\n\tsqlstr := `UPDATE ` + tableName + ` SET is_del = 1 WHERE id = ?`\n\n\t// run query\n\tutils.GetTraceLog(ctx).Debug(\"DB\", zap.String(\"SQL\", fmt.Sprint(sqlstr, ust.ID)))\n\tif tx != nil {\n\t\t_, err = tx.Exec(sqlstr, ust.ID)\n\t} else {\n\t\t_, err = dbConn.Exec(sqlstr, ust.ID)\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set deleted\n\tust._deleted = true\n\n\treturn nil\n}",
"func (b *badgerDB) Delete(namespace string, key []byte) error {\n\tb.mutex.Lock()\n\tdefer b.mutex.Unlock()\n\n\tvar err error\n\tfor c := uint8(0); c < b.config.NumRetries; c++ {\n\t\terr = b.db.Update(func(txn *badger.Txn) error {\n\t\t\tk := append([]byte(namespace), key...)\n\t\t\treturn txn.Delete(k)\n\t\t})\n\t\tif err == nil {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn err\n}",
"func (DummyStore) Delete(key string) error {\n\treturn nil\n}",
"func (c *Cassandra) Delete(ctx context.Context, req *state.DeleteRequest) error {\n\treturn c.session.Query(fmt.Sprintf(\"DELETE FROM %s WHERE key = ?\", c.table), req.Key).WithContext(ctx).Exec()\n}",
"func (s *MongodbStore) Delete(key interface{}) error {\n\ts.lock.Lock()\n\tdefer s.lock.Unlock()\n\n\tdelete(s.data, key)\n\treturn nil\n}",
"func (hd *HelpDoc) Delete(ctx context.Context, key ...interface{}) error {\n\tvar err error\n\tvar dbConn *sql.DB\n\n\t// if deleted, bail\n\tif hd._deleted {\n\t\treturn nil\n\t}\n\n\ttx, err := components.M.GetConnFromCtx(ctx)\n\tif err != nil {\n\t\tdbConn, err = components.M.GetMasterConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\ttableName, err := GetHelpDocTableName(key...)\n\tif err != nil {\n\t\treturn err\n\t}\n\t//1\n\n\t// sql query with composite primary key\n\tsqlstr := `UPDATE ` + tableName + ` SET is_del = 1 WHERE id = ?`\n\n\t// run query\n\tutils.GetTraceLog(ctx).Debug(\"DB\", zap.String(\"SQL\", fmt.Sprint(sqlstr, hd.ID)))\n\tif tx != nil {\n\t\t_, err = tx.Exec(sqlstr, hd.ID)\n\t} else {\n\t\t_, err = dbConn.Exec(sqlstr, hd.ID)\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set deleted\n\thd._deleted = true\n\n\treturn nil\n}",
"func (s *Store) Delete(key string) {\n\ts.data.Del(key)\n}",
"func (m *MapDB) Delete(key string) (err error) {\n\tvar ok bool\n\tm.mux.Lock()\n\tdefer m.mux.Unlock()\n\n\tif m.closed {\n\t\terr = errors.ErrIsClosed\n\t\treturn\n\t}\n\n\tif _, ok = m.m[key]; !ok {\n\t\treturn ErrKeyDoesNotExist\n\t}\n\n\tif err = m.mrT.Txn(func(txn *mrT.Txn) (err error) {\n\t\ttxn.Delete([]byte(key))\n\t\treturn\n\t}); err != nil {\n\t\treturn\n\t}\n\n\tdelete(m.m, key)\n\treturn\n}",
"func DEL(key string) (err error) {\n\tconn := pool.Get()\n\tdefer conn.Close()\n\n\t_, err = conn.Do(\"DEL\", key)\n\treturn\n}",
"func (rs *Store) Delete(ctx context.Context, key interface{}) error {\n\trs.lock.Lock()\n\tdefer rs.lock.Unlock()\n\tdelete(rs.values, key)\n\treturn nil\n}",
"func DelKeyonBolt(dbFileName string, bucketName string, key string) error {\n\tdb, err := bolt.Open(dbFileName, 0666, nil)\n\tdefer db.Close()\n\n\tif err != nil {\n\t\tlog.ErrLog(err)\n\t}\n\n\tif err = db.Update(func(tx *bolt.Tx) error {\n\t\tb := tx.Bucket([]byte(bucketName))\n\t\terr := b.Delete([]byte(key))\n\t\treturn err\n\t}); err != nil {\n\t\tlog.ErrLog(err)\n\t}\n\n\treturn nil\n}",
"func (s *Syslog) Delete(ctx context.Context, key ...interface{}) error {\n\tvar err error\n\tvar dbConn *sql.DB\n\n\t// if deleted, bail\n\tif s._deleted {\n\t\treturn nil\n\t}\n\n\ttx, err := components.M.GetConnFromCtx(ctx)\n\tif err != nil {\n\t\tdbConn, err = components.M.GetMasterConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\ttableName, err := GetSyslogTableName(key...)\n\tif err != nil {\n\t\treturn err\n\t}\n\t//1\n\n\t// sql query with composite primary key\n\tsqlstr := `UPDATE ` + tableName + ` SET is_del = 1 WHERE id = ?`\n\n\t// run query\n\tutils.GetTraceLog(ctx).Debug(\"DB\", zap.String(\"SQL\", fmt.Sprint(sqlstr, s.ID)))\n\tif tx != nil {\n\t\t_, err = tx.Exec(sqlstr, s.ID)\n\t} else {\n\t\t_, err = dbConn.Exec(sqlstr, s.ID)\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set deleted\n\ts._deleted = true\n\n\treturn nil\n}",
"func (wu *WxUser) Delete(ctx context.Context, key ...interface{}) error {\n\tvar err error\n\tvar dbConn *sql.DB\n\n\t// if deleted, bail\n\tif wu._deleted {\n\t\treturn nil\n\t}\n\n\ttx, err := components.M.GetConnFromCtx(ctx)\n\tif err != nil {\n\t\tdbConn, err = components.M.GetMasterConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\ttableName, err := GetWxUserTableName(key...)\n\tif err != nil {\n\t\treturn err\n\t}\n\t//1\n\n\t// sql query with composite primary key\n\tsqlstr := `UPDATE ` + tableName + ` SET is_del = 1 WHERE id = ?`\n\n\t// run query\n\tutils.GetTraceLog(ctx).Debug(\"DB\", zap.String(\"SQL\", fmt.Sprint(sqlstr, wu.ID)))\n\tif tx != nil {\n\t\t_, err = tx.Exec(sqlstr, wu.ID)\n\t} else {\n\t\t_, err = dbConn.Exec(sqlstr, wu.ID)\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set deleted\n\twu._deleted = true\n\n\treturn nil\n}",
"func (m *SessionManager) Delete(key string) error {\n\tstmt := Sessions.Delete().Where(Sessions.C(\"key\").Equals(key))\n\treturn m.conn.Query(stmt)\n}",
"func (c Cache[T]) Delete(key string) error {\n\tdb, err := openDB(c.storage.storagePath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer db.Close()\n\n\treturn db.Update(func(tx *bolt.Tx) error {\n\t\tb := tx.Bucket([]byte(c.namespace))\n\t\tif b == nil {\n\t\t\treturn nil\n\t\t}\n\n\t\treturn b.Delete([]byte(key))\n\t})\n}",
"func (o *Kvstore) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) {\n\tif o == nil {\n\t\treturn 0, errors.New(\"models: no Kvstore provided for delete\")\n\t}\n\n\tif err := o.doBeforeDeleteHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\n\targs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), kvstorePrimaryKeyMapping)\n\tsql := \"DELETE FROM \\\"kvstore\\\" WHERE \\\"member\\\"=$1 AND \\\"key\\\"=$2\"\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to delete from kvstore\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by delete for kvstore\")\n\t}\n\n\tif err := o.doAfterDeleteHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn rowsAff, nil\n}",
"func (b *Backend) Delete(ctx context.Context, key []byte) error {\n\tif len(key) == 0 {\n\t\treturn trace.BadParameter(\"missing parameter key\")\n\t}\n\n\tdocRef := b.svc.Collection(b.CollectionName).Doc(b.keyToDocumentID(key))\n\tif _, err := docRef.Delete(ctx, firestore.Exists); err != nil {\n\t\tif status.Code(err) == codes.NotFound {\n\t\t\treturn trace.NotFound(\"key %s does not exist\", string(key))\n\t\t}\n\n\t\treturn ConvertGRPCError(err)\n\t}\n\n\treturn nil\n}",
"func (db *DBDriver) Delete(ctx context.Context, member, key string, nonce []byte) error {\n\tctx, tx, err := dbtx.BeginTx(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer db.reapTx(tx)\n\n\tvar kv models.Kvstore\n\n\tif nonce != nil {\n\t\tkv = models.Kvstore{\n\t\t\tMember: member,\n\t\t\tKey: key,\n\t\t\tNonce: nonce,\n\t\t}\n\t} else {\n\t\tkv = models.Kvstore{\n\t\t\tMember: member,\n\t\t\tKey: key,\n\t\t}\n\t}\n\n\tres, err := kv.Delete(ctx, tx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif res == 0 {\n\t\tif nonce != nil {\n\t\t\t// return not equal here because we're not really sure if we're unset or not.\n\t\t\t// Instead of checking twice, this is safe and moderately sane.\n\t\t\treturn ErrNotEqual\n\t\t}\n\t\treturn ErrUnsetValue\n\t}\n\n\treturn tx.Commit()\n}",
"func (c Redis) Delete(key string) error {\n\treturn c.conn.Del(key).Err()\n}",
"func (sm safeMap) Delete(key string) {\n\tsm <- commandData{action: REMOVE, key: key}\n}",
"func (og *OrderGood) Delete(ctx context.Context, key ...interface{}) error {\n\tvar err error\n\tvar dbConn *sql.DB\n\n\t// if deleted, bail\n\tif og._deleted {\n\t\treturn nil\n\t}\n\n\ttx, err := components.M.GetConnFromCtx(ctx)\n\tif err != nil {\n\t\tdbConn, err = components.M.GetMasterConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\ttableName, err := GetOrderGoodTableName(key...)\n\tif err != nil {\n\t\treturn err\n\t}\n\t//1\n\n\t// sql query with composite primary key\n\tsqlstr := `UPDATE ` + tableName + ` SET is_del = 1 WHERE order_gid = ?`\n\n\t// run query\n\tutils.GetTraceLog(ctx).Debug(\"DB\", zap.String(\"SQL\", fmt.Sprint(sqlstr, og.OrderGid)))\n\tif tx != nil {\n\t\t_, err = tx.Exec(sqlstr, og.OrderGid)\n\t} else {\n\t\t_, err = dbConn.Exec(sqlstr, og.OrderGid)\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set deleted\n\tog._deleted = true\n\n\treturn nil\n}",
"func (g *GCache) Del(key string) error {\n\tg.db.Remove(key)\n\treturn nil\n}",
"func (ss *redisStore) Delete(key string) error {\n\ti, err := ss.rdb.Del(ctx, key).Result()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif i == 0 {\n\t\treturn storage.ErrNoRecord\n\t}\n\treturn nil\n}",
"func (st *SessionStoreMySQL) Delete(key interface{}) error {\n\tst.lock.Lock()\n\tdefer st.lock.Unlock()\n\tdelete(st.values, key)\n\treturn nil\n}",
"func Delete(collection string, key string) error {\n\treturn getStorage().Delete(collection, key)\n}",
"func (u *UdMap) Del(key string) { delete(u.Data, key) }",
"func (s *RethinkDB) Delete(ctx context.Context, req *state.DeleteRequest) error {\n\tif req == nil || req.Key == \"\" {\n\t\treturn errors.New(\"invalid request, missing key\")\n\t}\n\n\treturn s.BulkDelete(ctx, []state.DeleteRequest{*req}, state.BulkStoreOpts{})\n}",
"func (txn *Txn) Delete(key []byte) error {\n\te := &Entry{\n\t\tKey: key,\n\t\tmeta: bitDelete,\n\t}\n\treturn txn.modify(e)\n}",
"func DeleteKey(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r)\n\tkey := vars[\"key\"]\n\tctx := r.Context()\n\tret, err := redis.Del(ctx, key)\n\tif err != nil {\n\t\thttp.Error(w, \"Redis fetch failed: \"+err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tfmt.Fprintf(w, strconv.Itoa(ret))\n}",
"func (store *RedisStore) Delete(key string) error {\n\terr := store.Client.Del(key).Err()\n\treturn err\n}",
"func (ms Memorystore) Delete(key string) error {\n\treturn ms.DeleteMulti([]string{key})\n}",
"func (s Storage) Delete(bucket, key string) error {\n\tif !s.Opened {\n\t\treturn fmt.Errorf(\"db must be opened before using it\")\n\t}\n\terr := s.DB.Update(func(tx *bolt.Tx) error {\n\t\tmBucket := tx.Bucket([]byte(bucket))\n\n\t\tif mBucket != nil {\n\t\t\terr := mBucket.Delete([]byte(key))\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t})\n\treturn err\n}"
] | [
"0.8436193",
"0.78931457",
"0.78192097",
"0.7810238",
"0.7771407",
"0.771081",
"0.76976925",
"0.7601846",
"0.7575751",
"0.75548416",
"0.74997205",
"0.74844617",
"0.7399691",
"0.73848826",
"0.7374052",
"0.7370087",
"0.7347871",
"0.7308298",
"0.7308298",
"0.7302377",
"0.73018545",
"0.73002535",
"0.72921765",
"0.72689426",
"0.72478426",
"0.7233453",
"0.7229996",
"0.72021925",
"0.72015554",
"0.71679145",
"0.7163043",
"0.71556",
"0.71522677",
"0.71436936",
"0.7134049",
"0.71127045",
"0.71124893",
"0.7100364",
"0.70965713",
"0.7038276",
"0.70360076",
"0.70331824",
"0.69942075",
"0.6988218",
"0.69770885",
"0.69748324",
"0.69722056",
"0.69476247",
"0.6946144",
"0.69361097",
"0.6927656",
"0.6927438",
"0.69235116",
"0.6916575",
"0.69110084",
"0.6890483",
"0.68740886",
"0.6873084",
"0.6870859",
"0.6859817",
"0.68579835",
"0.68372124",
"0.68242913",
"0.6822058",
"0.6816483",
"0.68085366",
"0.6795261",
"0.67932534",
"0.67923856",
"0.6761369",
"0.6754101",
"0.67519885",
"0.674987",
"0.6720056",
"0.67192525",
"0.67147034",
"0.6695757",
"0.66816896",
"0.6660991",
"0.66490316",
"0.66357535",
"0.66335684",
"0.66297746",
"0.66240036",
"0.6621546",
"0.6619692",
"0.6616829",
"0.66154486",
"0.66147417",
"0.6612181",
"0.6607904",
"0.6595109",
"0.6592462",
"0.6575816",
"0.65714365",
"0.656007",
"0.6559827",
"0.6558893",
"0.65579885",
"0.6553425",
"0.6548211"
] | 0.0 | -1 |
AtomicUpdate Updates the DB atomically with the provided ops. | func (mgr *LocalHashMapDBMgr) AtomicUpdate(ops []common.DBOp) error {
for i := 0; i < len(ops); i++ {
switch {
case ops[i].Op == common.DBOpStore:
mgr.Store(ops[i].K, ops[i].E)
case ops[i].Op == common.DBOpDelete:
mgr.Delete(ops[i].K)
case ops[i].Op == common.DBOpSetRoot:
mgr.SetRoot(ops[i].E.(*Base))
}
}
return nil
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func update(ctx context.Context, db transactor, fn func(*bolt.Tx) error) error {\n\ttx, ok := ctx.Value(transactionKey{}).(*bolt.Tx)\n\tif !ok {\n\t\treturn db.Update(fn)\n\t} else if !tx.Writable() {\n\t\treturn errors.Wrap(bolt.ErrTxNotWritable, \"unable to use transaction from context\")\n\t}\n\treturn fn(tx)\n}",
"func (db *DB) Update(ctx context.Context, fn func(*TransactionManager) error) error {\n\ttries := db.txretiries\n\tvar tx *TransactionManager\n\tvar err error\n\tfor {\n\t\ttx = db.BeginTransaction(true)\n\t\terr = fn(tx)\n\t\tif err != nil {\n\t\t\tbreak\n\t\t}\n\t\terr = tx.Commit()\n\t\tif err == nil {\n\t\t\tbreak\n\t\t}\n\t\tif err != badger.ErrConflict {\n\t\t\tbreak\n\t\t}\n\t\tif tries < 1 {\n\t\t\tif db.txretiries > 0 {\n\t\t\t\terr = ErrConflictRetriesOver\n\t\t\t} else {\n\t\t\t\terr = ErrConflict\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\t\ttries--\n\t\ttx.Discard()\n\t}\n\ttx.Discard()\n\n\tif err != nil {\n\t\tinslogger.FromContext(ctx).Errorln(\"DB Update error:\", err)\n\t}\n\treturn err\n}",
"func TestTaskDBConcurrentUpdate(t sktest.TestingT, db TaskDB) {\n\tctx := context.Background()\n\n\t// Insert a task.\n\tt1 := types.MakeTestTask(time.Now(), []string{\"a\", \"b\", \"c\", \"d\"})\n\trequire.NoError(t, db.PutTask(ctx, t1))\n\n\t// Retrieve a copy of the task.\n\tt1Cached, err := db.GetTaskById(ctx, t1.Id)\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, t1, t1Cached)\n\n\t// Update the original task.\n\tt1.Commits = []string{\"a\", \"b\"}\n\trequire.NoError(t, db.PutTask(ctx, t1))\n\n\t// Update the cached copy; should get concurrent update error.\n\tt1Cached.Status = types.TASK_STATUS_RUNNING\n\terr = db.PutTask(ctx, t1Cached)\n\trequire.True(t, IsConcurrentUpdate(err))\n\n\t{\n\t\t// DB should still have the old value of t1.\n\t\tt1Again, err := db.GetTaskById(ctx, t1.Id)\n\t\trequire.NoError(t, err)\n\t\tAssertDeepEqual(t, t1, t1Again)\n\t}\n\n\t// Insert a second task.\n\tt2 := types.MakeTestTask(time.Now(), []string{\"e\", \"f\"})\n\trequire.NoError(t, db.PutTask(ctx, t2))\n\n\t// Update t2 at the same time as t1Cached; should still get an error.\n\tt2Before := t2.Copy()\n\tt2.Status = types.TASK_STATUS_MISHAP\n\terr = db.PutTasks(ctx, []*types.Task{t2, t1Cached})\n\trequire.True(t, IsConcurrentUpdate(err))\n\n\t{\n\t\t// DB should still have the old value of t1 and t2.\n\t\tt1Again, err := db.GetTaskById(ctx, t1.Id)\n\t\trequire.NoError(t, err)\n\t\tAssertDeepEqual(t, t1, t1Again)\n\n\t\tt2Again, err := db.GetTaskById(ctx, t2.Id)\n\t\trequire.NoError(t, err)\n\t\tAssertDeepEqual(t, t2Before, t2Again)\n\t}\n}",
"func (tbl AssociationTable) Update(req require.Requirement, vv ...*Association) (int64, error) {\n\tif req == require.All {\n\t\treq = require.Exactly(len(vv))\n\t}\n\n\tvar count int64\n\td := tbl.Dialect()\n\tq := d.Quoter()\n\n\tfor _, v := range vv {\n\t\tvar iv interface{} = v\n\t\tif hook, ok := iv.(sqlapi.CanPreUpdate); ok {\n\t\t\terr := hook.PreUpdate()\n\t\t\tif err != nil {\n\t\t\t\treturn count, tbl.Logger().LogError(err)\n\t\t\t}\n\t\t}\n\n\t\tb := dialect.Adapt(&bytes.Buffer{})\n\t\tb.WriteString(\"UPDATE \")\n\t\ttbl.quotedNameW(b)\n\t\tb.WriteString(\" SET \")\n\n\t\targs, err := constructAssociationTableUpdate(tbl, b, v)\n\t\tif err != nil {\n\t\t\treturn count, err\n\t\t}\n\t\targs = append(args, v.Id)\n\n\t\tb.WriteString(\" WHERE \")\n\t\tq.QuoteW(b, tbl.pk)\n\t\tb.WriteString(\"=?\")\n\n\t\tquery := b.String()\n\t\tn, err := tbl.Exec(nil, query, args...)\n\t\tif err != nil {\n\t\t\treturn count, err\n\t\t}\n\t\tcount += n\n\t}\n\n\treturn count, tbl.Logger().LogIfError(require.ErrorIfExecNotSatisfiedBy(req, count))\n}",
"func update(ctx context.Context, tx *sqlx.Tx, todo *Todo) error {\n\t_, err := tx.NamedExecContext(ctx, updateTodo, todo)\n\treturn err\n}",
"func (bc *Blockchain) dbUpdate(ps ...bucket.TxHandler) error {\n\treturn bc.db.Update(func(tx *bolt.Tx) error {\n\t\trollbackFuncs := []bucket.Rollback{}\n\t\tfor _, p := range ps {\n\t\t\trb, err := p(tx)\n\t\t\tif err != nil {\n\t\t\t\t// rollback previous updates if any\n\t\t\t\tfor _, r := range rollbackFuncs {\n\t\t\t\t\tr()\n\t\t\t\t}\n\t\t\t\treturn err\n\t\t\t}\n\t\t\trollbackFuncs = append(rollbackFuncs, rb)\n\t\t}\n\n\t\treturn nil\n\t})\n}",
"func (db *boltsecDB) update(fn func(*boltsecTx) error) error {\n\twrapper := func(tx *bolt.Tx) error {\n\t\treturn fn(&boltsecTx{tx})\n\t}\n\treturn db.DB.Update(wrapper)\n}",
"func (t *Transaction) Update(list ...interface{}) (int64, error) {\n\treturn update(t.dbmap, t, list...)\n}",
"func (o *Kvstore) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tkvstoreUpdateCacheMut.RLock()\n\tcache, cached := kvstoreUpdateCache[key]\n\tkvstoreUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tkvstoreAllColumns,\n\t\t\tkvstorePrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update kvstore, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"kvstore\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, kvstorePrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(kvstoreType, kvstoreMapping, append(wl, kvstorePrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update kvstore row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for kvstore\")\n\t}\n\n\tif !cached {\n\t\tkvstoreUpdateCacheMut.Lock()\n\t\tkvstoreUpdateCache[key] = cache\n\t\tkvstoreUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}",
"func (e *SqlExecutor) Update(list ...interface{}) (int64, error) {\n\thook := e.db.ExecutorHook()\n\tvar rv int64\n\tfor _, item := range list {\n\t\tvar qArg queryArgs\n\t\tbuilder, err := buildUpdate(e.dbp, item.(Model))\n\t\tif err != nil {\n\t\t\treturn rv, err\n\t\t}\n\t\tqArg.query, qArg.args, err = builder.ToSql()\n\t\tif err != nil {\n\t\t\treturn rv, err\n\t\t}\n\t\thook.BeforeUpdate(e.ctx, qArg.query, qArg.args...)\n\t\tv, err := e.SqlExecutor.Update(item)\n\t\tif err != nil {\n\t\t\treturn rv, err\n\t\t}\n\t\trv += v\n\t\thook.AfterUpdate(e.ctx, qArg.query, qArg.args...)\n\t}\n\treturn rv, nil\n}",
"func (blt Bolt) Update(execute dbtx.Execute) error {\n\treturn blt.db.Update(func(tx *b.Tx) error {\n\t\treturn execute(tx.Bucket(blt.Bucket))\n\t})\n}",
"func (o *Store) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\to.UpdatedAt = currTime\n\t}\n\n\tvar err error\n\tkey := makeCacheKey(columns, nil)\n\tstoreUpdateCacheMut.RLock()\n\tcache, cached := storeUpdateCache[key]\n\tstoreUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tstoreAllColumns,\n\t\t\tstorePrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update stores, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"stores\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, storePrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(storeType, storeMapping, append(wl, storePrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update stores row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for stores\")\n\t}\n\n\tif !cached {\n\t\tstoreUpdateCacheMut.Lock()\n\t\tstoreUpdateCache[key] = cache\n\t\tstoreUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, nil\n}",
"func (db *DB) update(fn func(*Tx) error) error {\n\treturn db.DB.Update(func(tx *bolt.Tx) error {\n\t\treturn fn(&Tx{tx})\n\t})\n}",
"func (uqs ControlUpdateQS) Exec(db models.DBInterface) (int64, error) {\n\tif len(uqs.updates) == 0 {\n\t\treturn 0, nil\n\t}\n\n\tc := &models.PositionalCounter{}\n\n\tvar params []interface{}\n\n\tvar sets []string\n\tfor _, set := range uqs.updates {\n\t\ts, p := set.GetConditionFragment(c)\n\n\t\tsets = append(sets, s)\n\t\tparams = append(params, p...)\n\t}\n\n\tws, wp := ControlQS{condFragments: uqs.condFragments}.whereClause(c)\n\n\tst := `UPDATE \"heatcontrol_control\" SET ` + strings.Join(sets, \", \") + ws\n\n\tparams = append(params, wp...)\n\n\tresult, err := db.Exec(st, params...)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn result.RowsAffected()\n}",
"func (upd *Update) Execute(vcursor VCursor, bindVars map[string]*querypb.BindVariable, wantfields bool) (*sqltypes.Result, error) {\n\tif upd.QueryTimeout != 0 {\n\t\tcancel := vcursor.SetContextTimeout(time.Duration(upd.QueryTimeout) * time.Millisecond)\n\t\tdefer cancel()\n\t}\n\n\tswitch upd.Opcode {\n\tcase Unsharded:\n\t\treturn upd.execUpdateUnsharded(vcursor, bindVars)\n\tcase Equal:\n\t\treturn upd.execUpdateEqual(vcursor, bindVars)\n\tcase In:\n\t\treturn upd.execUpdateIn(vcursor, bindVars)\n\tcase Scatter:\n\t\treturn upd.execUpdateByDestination(vcursor, bindVars, key.DestinationAllShards{})\n\tcase ByDestination:\n\t\treturn upd.execUpdateByDestination(vcursor, bindVars, upd.TargetDestination)\n\tdefault:\n\t\t// Unreachable.\n\t\treturn nil, fmt.Errorf(\"unsupported opcode: %v\", upd)\n\t}\n}",
"func (o *Doc) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tqueries.SetScanner(&o.UpdatedAt, currTime)\n\t}\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tdocUpdateCacheMut.RLock()\n\tcache, cached := docUpdateCache[key]\n\tdocUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tdocAllColumns,\n\t\t\tdocPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update doc, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `doc` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, docPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(docType, docMapping, append(wl, docPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update doc row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for doc\")\n\t}\n\n\tif !cached {\n\t\tdocUpdateCacheMut.Lock()\n\t\tdocUpdateCache[key] = cache\n\t\tdocUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}",
"func (s BoltStore) BatchUpdate(ids []interface{}, data []interface{}, store string, opts ObjectStoreOptions) (err error) {\n\treturn ErrNotImplemented\n}",
"func (o *Task) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\to.UpdatedAt = currTime\n\t}\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\ttaskUpdateCacheMut.RLock()\n\tcache, cached := taskUpdateCache[key]\n\ttaskUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\ttaskAllColumns,\n\t\t\ttaskPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update tasks, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"tasks\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, taskPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(taskType, taskMapping, append(wl, taskPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update tasks row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for tasks\")\n\t}\n\n\tif !cached {\n\t\ttaskUpdateCacheMut.Lock()\n\t\ttaskUpdateCache[key] = cache\n\t\ttaskUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}",
"func (o *Transaction) Update(exec boil.Executor, whitelist ...string) error {\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\to.UpdatedAt.Time = currTime\n\to.UpdatedAt.Valid = true\n\n\tvar err error\n\tkey := makeCacheKey(whitelist, nil)\n\ttransactionUpdateCacheMut.RLock()\n\tcache, cached := transactionUpdateCache[key]\n\ttransactionUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := strmangle.UpdateColumnSet(\n\t\t\ttransactionColumns,\n\t\t\ttransactionPrimaryKeyColumns,\n\t\t\twhitelist,\n\t\t)\n\n\t\tif len(whitelist) == 0 {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn errors.New(\"models: unable to update transactions, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `transactions` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, transactionPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(transactionType, transactionMapping, append(wl, transactionPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\t_, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to update transactions row\")\n\t}\n\n\tif !cached {\n\t\ttransactionUpdateCacheMut.Lock()\n\t\ttransactionUpdateCache[key] = cache\n\t\ttransactionUpdateCacheMut.Unlock()\n\t}\n\n\treturn nil\n}",
"func UpdateWithTx(ctx context.Context, dr *firestore.DocumentRef, data []firestore.Update, preconds ...firestore.Precondition) error {\n\ttx, ok := GetTx(ctx)\n\tif ok {\n\t\treturn tx.Update(dr, data, preconds...)\n\t} else {\n\t\t_, err := dr.Update(ctx, data, preconds...)\n\t\treturn err\n\t}\n}",
"func (d *DbBackendCouch) Update(params dragonfruit.QueryParams, operation int) (interface{},\n\terror) {\n\n\tpathmap, doc, id, v, err := d.getPathSpecificStuff(params)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tnewdoc, partial, err := findSubDoc(pathmap[1:],\n\t\tparams,\n\t\treflect.ValueOf(doc.Value),\n\t\treflect.ValueOf(v),\n\t\toperation)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdatabase := getDatabaseName(params)\n\t_, out, err := d.save(database, id, newdoc.Interface())\n\tif err != nil {\n\t\treturn out, err\n\t}\n\tout, err = sanitizeDoc(partial.Interface())\n\n\treturn out, err\n\n}",
"func (u *updater) Update(ctx context.Context, at int64, payload []byte) error {\n\terr := u.Put(ctx, &index{u.next}, at, payload)\n\tif err != nil {\n\t\treturn err\n\t}\n\tu.next++\n\treturn nil\n}",
"func (u *updater) Update(ctx context.Context, at int64, payload []byte) error {\n\terr := u.Put(ctx, &index{u.next}, at, payload)\n\tif err != nil {\n\t\treturn err\n\t}\n\tu.next++\n\treturn nil\n}",
"func (db *DB) Update(fn func(*TransactionManager) error) error {\n\ttries := db.txretiries\n\tvar tx *TransactionManager\n\tvar err error\n\tfor {\n\t\ttx = db.BeginTransaction(true)\n\t\terr = fn(tx)\n\t\tif err != nil {\n\t\t\tbreak\n\t\t}\n\t\terr = tx.Commit()\n\t\tif err == nil {\n\t\t\tbreak\n\t\t}\n\t\tif err != badger.ErrConflict {\n\t\t\tbreak\n\t\t}\n\t\tif tries < 1 {\n\t\t\tif db.txretiries > 0 {\n\t\t\t\terr = ErrConflictRetriesOver\n\t\t\t} else {\n\t\t\t\tlog.Info(\"local storage transaction conflict\")\n\t\t\t\terr = ErrConflict\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\t\ttries--\n\t\ttx.Discard()\n\t}\n\ttx.Discard()\n\treturn err\n}",
"func (o *Transaction) Update(exec boil.Executor, whitelist ...string) error {\n\tvar err error\n\tkey := makeCacheKey(whitelist, nil)\n\ttransactionUpdateCacheMut.RLock()\n\tcache, cached := transactionUpdateCache[key]\n\ttransactionUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := strmangle.UpdateColumnSet(\n\t\t\ttransactionColumns,\n\t\t\ttransactionPrimaryKeyColumns,\n\t\t\twhitelist,\n\t\t)\n\n\t\tif len(wl) == 0 {\n\t\t\treturn errors.New(\"model: unable to update transaction, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `transaction` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, transactionPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(transactionType, transactionMapping, append(wl, transactionPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\t_, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"model: unable to update transaction row\")\n\t}\n\n\tif !cached {\n\t\ttransactionUpdateCacheMut.Lock()\n\t\ttransactionUpdateCache[key] = cache\n\t\ttransactionUpdateCacheMut.Unlock()\n\t}\n\n\treturn nil\n}",
"func (tbl DbCompoundTable) UpdateFields(req require.Requirement, wh where.Expression, fields ...sql.NamedArg) (int64, error) {\n\treturn support.UpdateFields(tbl, req, wh, fields...)\n}",
"func (d *dbBase) UpdateBatch(ctx context.Context, q dbQuerier, qs *querySet, mi *modelInfo, cond *Condition, params Params, tz *time.Location) (int64, error) {\n\tcolumns := make([]string, 0, len(params))\n\tvalues := make([]interface{}, 0, len(params))\n\tfor col, val := range params {\n\t\tif fi, ok := mi.fields.GetByAny(col); !ok || !fi.dbcol {\n\t\t\tpanic(fmt.Errorf(\"wrong field/column name `%s`\", col))\n\t\t} else {\n\t\t\tcolumns = append(columns, fi.column)\n\t\t\tvalues = append(values, val)\n\t\t}\n\t}\n\n\tif len(columns) == 0 {\n\t\tpanic(fmt.Errorf(\"update params cannot empty\"))\n\t}\n\n\ttables := newDbTables(mi, d.ins)\n\tvar specifyIndexes string\n\tif qs != nil {\n\t\ttables.parseRelated(qs.related, qs.relDepth)\n\t\tspecifyIndexes = tables.getIndexSql(mi.table, qs.useIndex, qs.indexes)\n\t}\n\n\twhere, args := tables.getCondSQL(cond, false, tz)\n\n\tvalues = append(values, args...)\n\n\tjoin := tables.getJoinSQL()\n\n\tvar query, T string\n\n\tQ := d.ins.TableQuote()\n\n\tif d.ins.SupportUpdateJoin() {\n\t\tT = \"T0.\"\n\t}\n\n\tcols := make([]string, 0, len(columns))\n\n\tfor i, v := range columns {\n\t\tcol := fmt.Sprintf(\"%s%s%s%s\", T, Q, v, Q)\n\t\tif c, ok := values[i].(colValue); ok {\n\t\t\tswitch c.opt {\n\t\t\tcase ColAdd:\n\t\t\t\tcols = append(cols, col+\" = \"+col+\" + ?\")\n\t\t\tcase ColMinus:\n\t\t\t\tcols = append(cols, col+\" = \"+col+\" - ?\")\n\t\t\tcase ColMultiply:\n\t\t\t\tcols = append(cols, col+\" = \"+col+\" * ?\")\n\t\t\tcase ColExcept:\n\t\t\t\tcols = append(cols, col+\" = \"+col+\" / ?\")\n\t\t\tcase ColBitAnd:\n\t\t\t\tcols = append(cols, col+\" = \"+col+\" & ?\")\n\t\t\tcase ColBitRShift:\n\t\t\t\tcols = append(cols, col+\" = \"+col+\" >> ?\")\n\t\t\tcase ColBitLShift:\n\t\t\t\tcols = append(cols, col+\" = \"+col+\" << ?\")\n\t\t\tcase ColBitXOR:\n\t\t\t\tcols = append(cols, col+\" = \"+col+\" ^ ?\")\n\t\t\tcase ColBitOr:\n\t\t\t\tcols = append(cols, col+\" = \"+col+\" | ?\")\n\t\t\t}\n\t\t\tvalues[i] = c.value\n\t\t} else {\n\t\t\tcols = append(cols, col+\" = ?\")\n\t\t}\n\t}\n\n\tsets := strings.Join(cols, \", \") + \" \"\n\n\tif d.ins.SupportUpdateJoin() {\n\t\tquery = fmt.Sprintf(\"UPDATE %s%s%s T0 %s%sSET %s%s\", Q, mi.table, Q, specifyIndexes, join, sets, where)\n\t} else {\n\t\tsupQuery := fmt.Sprintf(\"SELECT T0.%s%s%s FROM %s%s%s T0 %s%s%s\",\n\t\t\tQ, mi.fields.pk.column, Q,\n\t\t\tQ, mi.table, Q,\n\t\t\tspecifyIndexes, join, where)\n\t\tquery = fmt.Sprintf(\"UPDATE %s%s%s SET %sWHERE %s%s%s IN ( %s )\", Q, mi.table, Q, sets, Q, mi.fields.pk.column, Q, supQuery)\n\t}\n\n\td.ins.ReplaceMarks(&query)\n\tres, err := q.ExecContext(ctx, query, values...)\n\tif err == nil {\n\t\treturn res.RowsAffected()\n\t}\n\treturn 0, err\n}",
"func updateBatch(db *IndexerDb, updateQuery string, data [][]interface{}) error {\n\tdb.accountingLock.Lock()\n\tdefer db.accountingLock.Unlock()\n\n\ttx, err := db.db.Begin()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer tx.Rollback() // ignored if .Commit() first\n\n\tupdate, err := tx.Prepare(updateQuery)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error preparing update query: %v\", err)\n\t}\n\tdefer update.Close()\n\n\tfor _, txpr := range data {\n\t\t_, err = update.Exec(txpr...)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"problem updating row (%v): %v\", txpr, err)\n\t\t}\n\t}\n\n\treturn tx.Commit()\n}",
"func (o WithdrawalCryptoSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"sqlite3: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), withdrawalCryptoPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"withdrawal_crypto\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, withdrawalCryptoPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"sqlite3: unable to update all in withdrawalCrypto slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"sqlite3: unable to retrieve rows affected all in update all withdrawalCrypto\")\n\t}\n\treturn rowsAff, nil\n}",
"func (db *DB) Update(fn func(*Tx) error) error {\n\tt, err := db.Begin(true)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Make sure the transaction rolls back in the event of a panic.\n\tdefer func() {\n\t\tif t.db != nil {\n\t\t\tt.rollback()\n\t\t}\n\t}()\n\n\t// Mark as a managed tx so that the inner function cannot manually commit.\n\tt.managed = true\n\n\t// If an error is returned from the function then rollback and return error.\n\terr = fn(t)\n\tt.managed = false\n\tif err != nil {\n\t\t_ = t.Rollback()\n\t\treturn err\n\t}\n\n\treturn t.Commit()\n}",
"func TestApplyUpdates(t *testing.T) {\n\tif testing.Short() {\n\t\tt.SkipNow()\n\t}\n\tt.Parallel()\n\n\tt.Run(\"TestApplyUpdates\", func(t *testing.T) {\n\t\tsiadir, err := newTestDir(t.Name())\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\ttestApply(t, siadir, ApplyUpdates)\n\t})\n\tt.Run(\"TestSiaDirApplyUpdates\", func(t *testing.T) {\n\t\tsiadir, err := newTestDir(t.Name())\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\ttestApply(t, siadir, siadir.applyUpdates)\n\t})\n\tt.Run(\"TestCreateAndApplyTransaction\", func(t *testing.T) {\n\t\tsiadir, err := newTestDir(t.Name())\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\ttestApply(t, siadir, siadir.createAndApplyTransaction)\n\t})\n}",
"func (o *Utxo) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tutxoUpdateCacheMut.RLock()\n\tcache, cached := utxoUpdateCache[key]\n\tutxoUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tutxoAllColumns,\n\t\t\tutxoPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update utxo, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"utxo\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, utxoPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(utxoType, utxoMapping, append(wl, utxoPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update utxo row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for utxo\")\n\t}\n\n\tif !cached {\n\t\tutxoUpdateCacheMut.Lock()\n\t\tutxoUpdateCache[key] = cache\n\t\tutxoUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}",
"func (p *AutoCommitter) Update(pair []interface{}) (e error) {\n\tif p.started {\n\t\tp.docsUpdate <- pair\n\t} else {\n\t\te = errors.New(fmt.Sprintf(\"AutoCommitter-%s(%s)_is_closed\", p.name, p.coll))\n\t}\n\treturn\n}",
"func (c *Client) Update(filename string, value Update, values ...Update) error {\n\targs := make([]interface{}, len(values)+2)\n\targs[0] = filename\n\targs[1] = value\n\tfor i, v := range values {\n\t\targs[i+2] = v\n\t}\n\t_, err := c.ExecCmd(NewCmd(\"update\").WithArgs(args...))\n\treturn err\n}",
"func (o *Item) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tkey := makeCacheKey(columns, nil)\n\titemUpdateCacheMut.RLock()\n\tcache, cached := itemUpdateCache[key]\n\titemUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\titemAllColumns,\n\t\t\titemPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update items, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"items\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", 0, itemPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(itemType, itemMapping, append(wl, itemPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update items row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for items\")\n\t}\n\n\tif !cached {\n\t\titemUpdateCacheMut.Lock()\n\t\titemUpdateCache[key] = cache\n\t\titemUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, nil\n}",
"func (ust *UsersShopTrace) Update(ctx context.Context, key ...interface{}) error {\n\tvar err error\n\tvar dbConn *sql.DB\n\n\t// if deleted, bail\n\tif ust._deleted {\n\t\treturn errors.New(\"update failed: marked for deletion\")\n\t}\n\n\ttx, err := components.M.GetConnFromCtx(ctx)\n\tif err != nil {\n\t\tdbConn, err = components.M.GetMasterConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\ttableName, err := GetUsersShopTraceTableName(key...)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// sql query\n\tsqlstr := `UPDATE ` + tableName + ` SET ` +\n\t\t`openid = ?, unionid = ?, appid = ?, uid = ?, fid = ?, sid = ?, updated = ?` +\n\t\t` WHERE id = ?`\n\n\t// run query\n\tutils.GetTraceLog(ctx).Debug(\"DB\", zap.String(\"SQL\", fmt.Sprint(sqlstr, ust.Openid, ust.Unionid, ust.Appid, ust.UID, ust.Fid, ust.Sid, ust.Updated, ust.ID)))\n\tif tx != nil {\n\t\t_, err = tx.Exec(sqlstr, ust.Openid, ust.Unionid, ust.Appid, ust.UID, ust.Fid, ust.Sid, ust.Updated, ust.ID)\n\t} else {\n\t\t_, err = dbConn.Exec(sqlstr, ust.Openid, ust.Unionid, ust.Appid, ust.UID, ust.Fid, ust.Sid, ust.Updated, ust.ID)\n\t}\n\treturn err\n}",
"func (u *__Notify_Updater) Update(db XODB) (int, error) {\n\tvar err error\n\n\tvar updateArgs []interface{}\n\tvar sqlUpdateArr []string\n\t/*for up, newVal := range u.updates {\n\t sqlUpdateArr = append(sqlUpdateArr, up)\n\t updateArgs = append(updateArgs, newVal)\n\t}*/\n\tfor _, up := range u.updates {\n\t\tsqlUpdateArr = append(sqlUpdateArr, up.col)\n\t\tupdateArgs = append(updateArgs, up.val)\n\t}\n\tsqlUpdate := strings.Join(sqlUpdateArr, \",\")\n\n\tsqlWherrs, whereArgs := whereClusesToSql(u.wheres, u.whereSep)\n\n\tvar allArgs []interface{}\n\tallArgs = append(allArgs, updateArgs...)\n\tallArgs = append(allArgs, whereArgs...)\n\n\tsqlstr := `UPDATE sun.notify SET ` + sqlUpdate\n\n\tif len(strings.Trim(sqlWherrs, \" \")) > 0 { //2 for safty\n\t\tsqlstr += \" WHERE \" + sqlWherrs\n\t}\n\n\tif LogTableSqlReq.Notify {\n\t\tXOLog(sqlstr, allArgs)\n\t}\n\tres, err := db.Exec(sqlstr, allArgs...)\n\tif err != nil {\n\t\tif LogTableSqlReq.Notify {\n\t\t\tXOLogErr(err)\n\t\t}\n\t\treturn 0, err\n\t}\n\n\tnum, err := res.RowsAffected()\n\tif err != nil {\n\t\tif LogTableSqlReq.Notify {\n\t\t\tXOLogErr(err)\n\t\t}\n\t\treturn 0, err\n\t}\n\n\treturn int(num), nil\n}",
"func TestAtomic(t *testing.T) {\n\tvar ops uint64 = 0\n\n\tfor i := 0; i < 8; i++ {\n\t\tgo func() {\n\t\t\tfor {\n\t\t\t\tatomic.AddUint64(&ops, 1)\n\t\t\t\truntime.Gosched()\n\t\t\t}\n\t\t}()\n\t}\n\ttime.Sleep(1 * time.Second)\n\topsFinal := atomic.LoadUint64(&ops)\n\tt.Log(\"ops:\", opsFinal)\n}",
"func (o KvstoreSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), kvstorePrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"kvstore\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, kvstorePrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in kvstore slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all kvstore\")\n\t}\n\treturn rowsAff, nil\n}",
"func (o DocSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), docPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `doc` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, docPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in doc slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all doc\")\n\t}\n\treturn rowsAff, nil\n}",
"func (ix *IndexedBucket) Update(inputRecs []Record, writeFn WriteFn) error {\n\treturn ix.writeRecs(inputRecs, writeFn, nil)\n}",
"func (so *SQLOrderItemStore) Update(oi *model.OrderItem) (*model.OrderItem, error) {\n\toi.UpdatedAt = time.Now().UnixNano()\n\t_, err := so.SQLStore.Tx.Update(oi)\n\treturn oi, err\n}",
"func (r *DarwinTimetable) Update(f func(*bolt.Tx) error) error {\n\treturn r.db.Update(f)\n}",
"func (mm *Model) Update(selector, update interface{}, keys ...string) error {\n\treturn mm.execute(func(c CachedCollection) error {\n\t\treturn c.Update(selector, update, keys...)\n\t})\n}",
"func (store TodoStore) Update(_ sqlx.Ext, update gtimer.Todo) (gtimer.Todo, error) {\n\ttodo, err := store.Get(update.ID)\n\tif err != nil {\n\t\treturn gtimer.Todo{}, err\n\t}\n\tif update.Status != \"completed\" && update.Status != \"active\" {\n\t\treturn gtimer.Todo{}, fmt.Errorf(\"invalid status: %s\", update.Status)\n\t}\n\ttodo.Title = update.Title\n\ttodo.Status = update.Status\n\ttodo.Updated = time.Now()\n\tstore[todo.ID] = todo\n\treturn todo, nil\n}",
"func Update(ctx context.Context, tx pgx.Tx, sb sq.UpdateBuilder) (int64, error) {\n\tq, vs, err := sb.ToSql()\n\tif err != nil {\n\t\treturn -1, err\n\t}\n\ttag, err := tx.Exec(ctx, q, vs...)\n\tif err != nil {\n\t\treturn -1, err\n\t}\n\treturn tag.RowsAffected(), nil\n}",
"func (o *Stock) Update(exec boil.Executor, whitelist ...string) error {\n\tvar err error\n\tkey := makeCacheKey(whitelist, nil)\n\tstockUpdateCacheMut.RLock()\n\tcache, cached := stockUpdateCache[key]\n\tstockUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := strmangle.UpdateColumnSet(\n\t\t\tstockColumns,\n\t\t\tstockPrimaryKeyColumns,\n\t\t\twhitelist,\n\t\t)\n\n\t\tif len(whitelist) == 0 {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn errors.New(\"models: unable to update stock, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `stock` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, stockPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(stockType, stockMapping, append(wl, stockPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\t_, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to update stock row\")\n\t}\n\n\tif !cached {\n\t\tstockUpdateCacheMut.Lock()\n\t\tstockUpdateCache[key] = cache\n\t\tstockUpdateCacheMut.Unlock()\n\t}\n\n\treturn nil\n}",
"func (t *Table) ExecuteUpdate(u *UpdateCommand) {\n\tu.real.execute(t, u.allocated[:u.allocSplit])\n\tu.virt.execute(t, u.allocated[u.allocSplit:])\n\tu.clear()\n}",
"func (o *WithdrawalCrypto) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\twithdrawalCryptoUpdateCacheMut.RLock()\n\tcache, cached := withdrawalCryptoUpdateCache[key]\n\twithdrawalCryptoUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\twithdrawalCryptoAllColumns,\n\t\t\twithdrawalCryptoPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"sqlite3: unable to update withdrawal_crypto, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"withdrawal_crypto\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", 0, withdrawalCryptoPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(withdrawalCryptoType, withdrawalCryptoMapping, append(wl, withdrawalCryptoPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"sqlite3: unable to update withdrawal_crypto row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"sqlite3: failed to get rows affected by update for withdrawal_crypto\")\n\t}\n\n\tif !cached {\n\t\twithdrawalCryptoUpdateCacheMut.Lock()\n\t\twithdrawalCryptoUpdateCache[key] = cache\n\t\twithdrawalCryptoUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}",
"func (d *Database) Update(fn func(db *gorm.DB) error) error {\n\ttx := d.db.Begin()\n\tif err := fn(tx); err != nil {\n\t\ttx.Rollback()\n\t\treturn err\n\t}\n\treturn tx.Commit().Error\n}",
"func (o TaskSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), taskPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"tasks\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, taskPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in task slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all task\")\n\t}\n\treturn rowsAff, nil\n}",
"func (c *Collection) Update(ctx context.Context, doc Document, mods Mods) error {\n\t_, err := c.Actions().Update(doc, mods).Do(ctx)\n\treturn err\n}",
"func (md *ImpMySQLDB) Update(_ context.Context, schema, table string, keys map[string]interface{}, values map[string]interface{}) error {\n\targs := make([]interface{}, 0, len(keys)+len(values))\n\tkvs := genSetFields(values, &args)\n\twhere := genWhere(keys, &args)\n\tstmt := fmt.Sprintf(\"UPDATE `%s`.`%s` SET %s WHERE %s;\", schema, table, kvs, where)\n\t_, err := md.db.Exec(stmt, args...)\n\n\tif md.verbose {\n\t\tstmt = md.genPlainSQL(stmt, args)\n\t\tfmt.Println(stmt)\n\t}\n\n\treturn errors.Trace(err)\n}",
"func (o StoreSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), storePrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"stores\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, storePrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in store slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all store\")\n\t}\n\treturn rowsAff, nil\n}",
"func (o UtxoSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), utxoPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"utxo\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, utxoPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in utxo slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all utxo\")\n\t}\n\treturn rowsAff, nil\n}",
"func (idx *Autoincrement) Update(id, oldV, newV string) error {\n\tif err := idx.Remove(id, oldV); err != nil {\n\t\treturn err\n\t}\n\n\tif _, err := idx.Add(id, newV); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func (c *MySQLClient) Update(p *purchase.Purchase) error {\n\tif p.ID == 0 {\n\t\treturn fmt.Errorf(\"purchase must have a preexisting ID\")\n\t}\n\n\tbuyBytes, err := json.Marshal(p.BuyOrder)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to marshal buy order: %v\", err)\n\t}\n\n\tsellBytes, err := json.Marshal(p.SellOrder)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to marshal sell order: %v\", err)\n\t}\n\n\tquery := `UPDATE trader_one\n SET\n buy_order = ?,\n sell_order = ?,\n updated_at = NOW()\n WHERE\n id = ?`\n\tctx, cancelFunc := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancelFunc()\n\tstmt, err := c.db.PrepareContext(ctx, query)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to prepare SQL statement: %v\", err)\n\t}\n\tdefer stmt.Close()\n\n\t_, err = stmt.ExecContext(ctx, jsonString(buyBytes), jsonString(sellBytes), p.ID)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to update row: %v\", err)\n\t}\n\treturn nil\n}",
"func Update(s Session, dbname string, collection string, selector map[string]interface{}, updator map[string]interface{}) error {\n\treturn s.DB(dbname).C(collection).Update(selector, updator)\n}",
"func (t *Testzzz) Update(ctx context.Context, key ...interface{}) error {\n\tvar err error\n\tvar dbConn *sql.DB\n\n\t// if deleted, bail\n\tif t._deleted {\n\t\treturn errors.New(\"update failed: marked for deletion\")\n\t}\n\n\ttx, err := components.M.GetConnFromCtx(ctx)\n\tif err != nil {\n\t\tdbConn, err = components.M.GetMasterConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\ttableName, err := GetTestzzzTableName(key...)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// sql query\n\tsqlstr := `UPDATE ` + tableName + ` SET ` +\n\t\t`a = ?, b = ?, c = ?` +\n\t\t` WHERE id = ?`\n\n\t// run query\n\tutils.GetTraceLog(ctx).Debug(\"DB\", zap.String(\"SQL\", fmt.Sprint(sqlstr, t.A, t.B, t.C, t.ID)))\n\tif tx != nil {\n\t\t_, err = tx.Exec(sqlstr, t.A, t.B, t.C, t.ID)\n\t} else {\n\t\t_, err = dbConn.Exec(sqlstr, t.A, t.B, t.C, t.ID)\n\t}\n\treturn err\n}",
"func testUpdateOperationDelete(t *testing.T) {\n\tt.Parallel()\n\n\th := UpdateOperationHandler(&matcher.Mock{\n\t\tDeleteUpdateOperations_: func(context.Context, ...uuid.UUID) (int64, error) { return 0, nil },\n\t})\n\tsrv := httptest.NewServer(h)\n\tdefer srv.Close()\n\tc := srv.Client()\n\n\tid := uuid.New().String()\n\tu := srv.URL + \"/\" + id\n\treq, err := http.NewRequest(http.MethodDelete, u, nil)\n\tif err != nil {\n\t\tt.Fatalf(\"failed to create request: %v\", err)\n\t}\n\tresp, err := c.Do(req)\n\tif err != nil {\n\t\tt.Fatalf(\"failed to do request: %v\", err)\n\t}\n\n\tif resp.StatusCode != http.StatusOK {\n\t\tt.Fatalf(\"got: %v, want: %v\", resp.StatusCode, http.StatusOK)\n\t}\n}",
"func (o *Ticket) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tkey := makeCacheKey(columns, nil)\n\tticketUpdateCacheMut.RLock()\n\tcache, cached := ticketUpdateCache[key]\n\tticketUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tticketAllColumns,\n\t\t\tticketPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update tickets, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"tickets\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, ticketPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(ticketType, ticketMapping, append(wl, ticketPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update tickets row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for tickets\")\n\t}\n\n\tif !cached {\n\t\tticketUpdateCacheMut.Lock()\n\t\tticketUpdateCache[key] = cache\n\t\tticketUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, nil\n}",
"func Update(db gorp.SqlExecutor, i interface{}) error {\n\treturn Mapper.Update(db, i)\n}",
"func (o MempoolBinSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), mempoolBinPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"mempool_bin\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, mempoolBinPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in mempoolBin slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all mempoolBin\")\n\t}\n\treturn rowsAff, nil\n}",
"func (uqs InstantprofileUpdateQS) Exec(db models.DBInterface) (int64, error) {\n\tif len(uqs.updates) == 0 {\n\t\treturn 0, nil\n\t}\n\n\tc := &models.PositionalCounter{}\n\n\tvar params []interface{}\n\n\tvar sets []string\n\tfor _, set := range uqs.updates {\n\t\ts, p := set.GetConditionFragment(c)\n\n\t\tsets = append(sets, s)\n\t\tparams = append(params, p...)\n\t}\n\n\tws, wp := InstantprofileQS{condFragments: uqs.condFragments}.whereClause(c)\n\n\tst := `UPDATE \"heatcontrol_instantprofile\" SET ` + strings.Join(sets, \", \") + ws\n\n\tparams = append(params, wp...)\n\n\tresult, err := db.Exec(st, params...)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn result.RowsAffected()\n}",
"func IncidentUpdates(incidentId int, resolvedBy string, description string, isResolved string) {\n\tsess := SetupDB()\n\n\t//select component id on which incident happen ....===========================\n\tid, err1 := sess.Select(\"component_id\").\n\t\tFrom(\"incidents\").\n\t\tWhere(\"id = ?\", incidentId).\n\t\tReturnInt64()\n\tCheckErr(err1)\n\t//============================================================================\n\n\t//insert record into incident_updates table ...================================\n\tvar incident IncidentUpdate\n\tincident.ComponentId = id\n\tincident.IncidentId = incidentId\n\tincident.Updated_by = resolvedBy\n\tincident.Description = description\n\n\t_, err := sess.InsertInto(\"incident_update\").\n\t\tColumns(\"incident_id\", \"component_id\", \"description\", \"updated_by\").\n\t\tRecord(incident).\n\t\tExec()\n\tCheckErr(err)\n\t//============================================================================\n\n\tif isResolved == \"true\" {\n\t\t//update status of incident to resolved ...===================================\n\t\t_, err3 := sess.Update(\"incidents\").\n\t\t\tSet(\"status\", \"resolved\").\n\t\t\tSet(\"resolved_at\", \"NOW()\").\n\t\t\tWhere(\"id = ?\", incident.IncidentId).\n\t\t\tExec()\n\t\tCheckErr(err3)\n\t\t//============================================================================\n\t}\n}",
"func Update(tableName string, property []string, values []interface{}, whereProperty string, whereOpt string, whereValue interface{}) bool {\n\tif db == nil {\n\t\tlogger.Println(tag_DBUtil, \"Update, db is nil\")\n\t\treturn false\n\t}\n\tlogger.Println(tag_DBUtil, property, values, whereProperty, whereValue)\n\tif len(property) < 1 || len(property) != len(values) {\n\t\tlogger.Println(tag_DBUtil, \"Update, property length is not equal values\")\n\t\treturn false\n\t}\n\tvar mysql string = \"update \" + tableName + \" set\"\n\tfor i := 0; i < len(property); i++ {\n\t\tif i == 0 {\n\t\t\tmysql += \" \" + property[i] + \" = ?\"\n\t\t} else {\n\t\t\tmysql += \" ,\" + property[i] + \" = ?\"\n\t\t}\n\t}\n\t//where\n\tif whereProperty != \"\" && whereOpt != \"\" {\n\t\tmysql += \" where \" + whereProperty + \" \" + whereOpt + \" ?\"\n\t}\n\tlogger.Println(tag_DBUtil, mysql)\n\tstmt, err := db.Prepare(mysql)\n\tif err != nil {\n\t\tlogger.Println(tag_DBUtil, err)\n\t\treturn false\n\t}\n\tif len(whereProperty) < 1 {\n\t\t_, err = stmt.Exec(values...)\n\t} else {\n\t\tnewValue := append(values, whereValue)\n\t\t_, err = stmt.Exec(newValue...)\n\t}\n\tif err != nil {\n\t\tlogger.Println(tag_DBUtil, err)\n\t\treturn false\n\t}\n\treturn true\n}",
"func (o VoteSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), votePrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"vote\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, votePrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in vote slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all vote\")\n\t}\n\treturn rowsAff, nil\n}",
"func (o *Storestate) Update(exec boil.Executor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tstorestateUpdateCacheMut.RLock()\n\tcache, cached := storestateUpdateCache[key]\n\tstorestateUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tstorestateColumns,\n\t\t\tstorestatePrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"stellarcore: unable to update storestate, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"storestate\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, storestatePrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(storestateType, storestateMapping, append(wl, storestatePrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tvar result sql.Result\n\tresult, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"stellarcore: unable to update storestate row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"stellarcore: failed to get rows affected by update for storestate\")\n\t}\n\n\tif !cached {\n\t\tstorestateUpdateCacheMut.Lock()\n\t\tstorestateUpdateCache[key] = cache\n\t\tstorestateUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(exec)\n}",
"func ExampleBuilder_WithUpdate() {\n\tcfg, err := config.LoadDefaultConfig(context.TODO())\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t\treturn\n\t}\n\n\tclient := dynamodb.NewFromConfig(cfg)\n\n\t// Create an update to set two fields in the table.\n\tupdate := expression.Set(\n\t\texpression.Name(\"Year\"),\n\t\texpression.Value(2015),\n\t).Set(\n\t\texpression.Name(\"AlbumTitle\"),\n\t\texpression.Value(\"Louder Than Ever\"),\n\t)\n\n\t// Create the DynamoDB expression from the Update.\n\texpr, err := expression.NewBuilder().\n\t\tWithUpdate(update).\n\t\tBuild()\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\t// Use the built expression to populate the DynamoDB UpdateItem API\n\t// input parameters.\n\tinput := &dynamodb.UpdateItemInput{\n\t\tExpressionAttributeNames: expr.Names(),\n\t\tExpressionAttributeValues: expr.Values(),\n\t\tKey: map[string]types.AttributeValue{\n\t\t\t\"Artist\": &types.AttributeValueMemberS{Value: \"Acme Band\"},\n\t\t\t\"SongTitle\": &types.AttributeValueMemberS{Value: \"Happy Day\"},\n\t\t},\n\t\tReturnValues: \"ALL_NEW\",\n\t\tTableName: aws.String(\"Music\"),\n\t\tUpdateExpression: expr.Update(),\n\t}\n\n\tresult, err := client.UpdateItem(context.TODO(), input)\n\tif err != nil {\n\t\tif apiErr := new(types.ProvisionedThroughputExceededException); errors.As(err, &apiErr) {\n\t\t\tfmt.Println(\"throughput exceeded\")\n\t\t} else if apiErr := new(types.ResourceNotFoundException); errors.As(err, &apiErr) {\n\t\t\tfmt.Println(\"resource not found\")\n\t\t} else if apiErr := new(types.InternalServerError); errors.As(err, &apiErr) {\n\t\t\tfmt.Println(\"internal server error\")\n\t\t} else {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\treturn\n\t}\n\n\tfmt.Println(result)\n}",
"func (s *dbstore) Put(keyVal ...*api.KeyValue) error {\n\tvar docs []*couchdb.CouchDoc\n\tfor _, kv := range keyVal {\n\t\tdataDoc, err := s.createCouchDoc(string(encodeKey(kv.Key, time.Time{})), kv.Value)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif dataDoc != nil {\n\t\t\tdocs = append(docs, dataDoc)\n\t\t}\n\t}\n\n\tif len(docs) == 0 {\n\t\tlogger.Debugf(\"[%s] Nothing to do\", s.dbName)\n\t\treturn nil\n\t}\n\n\t_, err := s.db.BatchUpdateDocuments(docs)\n\tif nil != err {\n\t\treturn errors.WithMessage(err, fmt.Sprintf(\"BatchUpdateDocuments failed for [%d] documents\", len(docs)))\n\t}\n\n\treturn nil\n}",
"func TestJobDBConcurrentUpdate(t sktest.TestingT, db JobDB) {\n\tctx := context.Background()\n\t// Insert a job.\n\tj1 := types.MakeTestJob(time.Now())\n\trequire.NoError(t, db.PutJob(ctx, j1))\n\n\t// Retrieve a copy of the job.\n\tj1Cached, err := db.GetJobById(ctx, j1.Id)\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, j1, j1Cached)\n\n\t// Update the original job.\n\tj1.Repo = \"another-repo\"\n\trequire.NoError(t, db.PutJob(ctx, j1))\n\n\t// Update the cached copy; should get concurrent update error.\n\tj1Cached.Status = types.JOB_STATUS_IN_PROGRESS\n\terr = db.PutJob(ctx, j1Cached)\n\trequire.True(t, IsConcurrentUpdate(err))\n\n\t{\n\t\t// DB should still have the old value of j1.\n\t\tj1Again, err := db.GetJobById(ctx, j1.Id)\n\t\trequire.NoError(t, err)\n\t\tAssertDeepEqual(t, j1, j1Again)\n\t}\n\n\t// Insert a second job.\n\tj2 := types.MakeTestJob(time.Now())\n\trequire.NoError(t, db.PutJob(ctx, j2))\n\n\t// Update j2 at the same time as j1Cached; should still get an error.\n\tj2Before := j2.Copy()\n\tj2.Status = types.JOB_STATUS_MISHAP\n\terr = db.PutJobs(ctx, []*types.Job{j2, j1Cached})\n\trequire.True(t, IsConcurrentUpdate(err))\n\n\t{\n\t\t// DB should still have the old value of j1 and j2.\n\t\tj1Again, err := db.GetJobById(ctx, j1.Id)\n\t\trequire.NoError(t, err)\n\t\tAssertDeepEqual(t, j1, j1Again)\n\n\t\tj2Again, err := db.GetJobById(ctx, j2.Id)\n\t\trequire.NoError(t, err)\n\t\tAssertDeepEqual(t, j2Before, j2Again)\n\t}\n}",
"func (o *Currency) Update(exec boil.Executor, columns boil.Columns) (int64, error) {\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\to.UpdatedAt = currTime\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tcurrencyUpdateCacheMut.RLock()\n\tcache, cached := currencyUpdateCache[key]\n\tcurrencyUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tcurrencyColumns,\n\t\t\tcurrencyPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update currency, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"currency\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, currencyPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(currencyType, currencyMapping, append(wl, currencyPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tvar result sql.Result\n\tresult, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update currency row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for currency\")\n\t}\n\n\tif !cached {\n\t\tcurrencyUpdateCacheMut.Lock()\n\t\tcurrencyUpdateCache[key] = cache\n\t\tcurrencyUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(exec)\n}",
"func (q withdrawalCryptoQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"sqlite3: unable to update all for withdrawal_crypto\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"sqlite3: unable to retrieve rows affected for withdrawal_crypto\")\n\t}\n\n\treturn rowsAff, nil\n}",
"func (q kvstoreQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for kvstore\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for kvstore\")\n\t}\n\n\treturn rowsAff, nil\n}",
"func (od *OrderDetail) Update(ctx context.Context, db DB) error {\n\tswitch {\n\tcase !od._exists: // doesn't exist\n\t\treturn logerror(&ErrUpdateFailed{ErrDoesNotExist})\n\tcase od._deleted: // deleted\n\t\treturn logerror(&ErrUpdateFailed{ErrMarkedForDeletion})\n\t}\n\t// update with primary key\n\tconst sqlstr = `UPDATE northwind.order_details SET ` +\n\t\t`unit_price = ?, quantity = ?, discount = ? ` +\n\t\t`WHERE order_id = ? AND product_id = ?`\n\t// run\n\tlogf(sqlstr, od.UnitPrice, od.Quantity, od.Discount, od.OrderID, od.ProductID)\n\tif _, err := db.ExecContext(ctx, sqlstr, od.UnitPrice, od.Quantity, od.Discount, od.OrderID, od.ProductID); err != nil {\n\t\treturn logerror(err)\n\t}\n\treturn nil\n}",
"func (tc *TaskCount) Update(c context.Context, queue string, exec, tot int) error {\n\t// Queue names are globally unique, so we can use them as IDs.\n\ttc.ID = queue\n\ttc.Computed = clock.Now(c).UTC()\n\ttc.Queue = queue\n\ttc.Executing = exec\n\ttc.Total = tot\n\tif err := datastore.Put(c, tc); err != nil {\n\t\treturn errors.Annotate(err, \"failed to store count\").Err()\n\t}\n\treturn nil\n}",
"func (q storeQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for stores\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for stores\")\n\t}\n\n\treturn rowsAff, nil\n}",
"func (t *DbService) Update(request *UpdateRequest) (*UpdateResponse, error) {\n\trsp := &UpdateResponse{}\n\treturn rsp, t.client.Call(\"db\", \"Update\", request, rsp)\n}",
"func (o *Auth) Update(exec boil.Executor, whitelist ...string) error {\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\to.UpdatedAt.Time = currTime\n\to.UpdatedAt.Valid = true\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn err\n\t}\n\tkey := makeCacheKey(whitelist, nil)\n\tauthUpdateCacheMut.RLock()\n\tcache, cached := authUpdateCache[key]\n\tauthUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := strmangle.UpdateColumnSet(\n\t\t\tauthColumns,\n\t\t\tauthPrimaryKeyColumns,\n\t\t\twhitelist,\n\t\t)\n\n\t\tif len(whitelist) == 0 {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn errors.New(\"models: unable to update auths, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"auths\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, authPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(authType, authMapping, append(wl, authPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\t_, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to update auths row\")\n\t}\n\n\tif !cached {\n\t\tauthUpdateCacheMut.Lock()\n\t\tauthUpdateCache[key] = cache\n\t\tauthUpdateCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpdateHooks(exec)\n}",
"func (s structModel)Update(table string)(query string, values[]interface{}, err error){\n\tif s.err != nil{\n\t\treturn \"\", nil, s.err\n\t}\n\tvar arrQuery []string\n\tquery = \"UPDATE \" + table + \" SET\"\n\tlistValues := make([]interface{}, 0)\n\tfor i, _ := range s.value{\n\t\tarrQuery = append(arrQuery, \" \" + s.key[i] + \"= $\" + strconv.Itoa(i+1))\n\t\tlistValues = append(listValues, s.value[i])\n\t}\n\tquery = query + strings.Join(arrQuery, \",\")\n\treturn query, listValues, nil\n}",
"func ExampleUpdate() {\n\tupd := q.Update(q.T(\"user\")).Set(q.C(\"name\"), \"hackme\").Where(q.Eq(q.C(\"id\"), 1))\n\tfmt.Println(upd)\n\t// Even in this case, the original name is used as a table and a column name\n\t// because Insert, Delete and Update aren't supporting \"AS\" syntax.\n\tu := q.T(\"user\", \"u\")\n\tfmt.Println(q.Update(u).Set(u.C(\"name\"), \"hackme\").Where(q.Eq(u.C(\"id\"), 1)))\n\t// When overwriting in the same name, the last one is effective.\n\tfmt.Println(q.Update(u).Set(u.C(\"name\"), \"hackyou\").Set(u.C(\"name\"), \"hackme\").Where(q.Eq(u.C(\"id\"), 1)))\n\t// Output:\n\t// UPDATE \"user\" SET \"name\" = ? WHERE \"id\" = ? [hackme 1]\n\t// UPDATE \"user\" SET \"name\" = ? WHERE \"id\" = ? [hackme 1]\n\t// UPDATE \"user\" SET \"name\" = ? WHERE \"id\" = ? [hackme 1]\n}",
"func (o AutomodRuleDatumSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), automodRuleDatumPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"automod_rule_data\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, automodRuleDatumPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in automodRuleDatum slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all automodRuleDatum\")\n\t}\n\treturn rowsAff, nil\n}",
"func (m *Account) Updates(values interface{}) error {\n\treturn UnscopedDb().Model(m).UpdateColumns(values).Error\n}",
"func (o *FilesStorage) Update(exec boil.Executor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tfilesStorageUpdateCacheMut.RLock()\n\tcache, cached := filesStorageUpdateCache[key]\n\tfilesStorageUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tfilesStorageColumns,\n\t\t\tfilesStoragePrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update files_storages, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `files_storages` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, filesStoragePrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(filesStorageType, filesStorageMapping, append(wl, filesStoragePrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tvar result sql.Result\n\tresult, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update files_storages row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for files_storages\")\n\t}\n\n\tif !cached {\n\t\tfilesStorageUpdateCacheMut.Lock()\n\t\tfilesStorageUpdateCache[key] = cache\n\t\tfilesStorageUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(exec)\n}",
"func TestUpdate(t *testing.T) {\n\tlocalStore := NewEventLocalStore()\n\n\teventTest1 := &entities.Event{ID: \"id1\", Title: \"Title1\"}\n\terr := localStore.Create(eventTest1)\n\tassert.NoError(t, err)\n\n\teventTest2 := &entities.Event{ID: \"id1\", Title: \"Title2\"}\n\terr = localStore.Update(eventTest2)\n\tassert.NoError(t, err)\n\n\tassert.Equal(t, localStore.events[\"id1\"].Title, \"Title2\")\n}",
"func (o CMFBalanceChargeAdminSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfBalanceChargeAdminPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_balance_charge_admin` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfBalanceChargeAdminPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfBalanceChargeAdmin slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfBalanceChargeAdmin\")\n\t}\n\treturn rowsAff, nil\n}",
"func (o *Store) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no stores provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t\to.UpdatedAt = currTime\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(storeColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tstoreUpsertCacheMut.RLock()\n\tcache, cached := storeUpsertCache[key]\n\tstoreUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tstoreAllColumns,\n\t\t\tstoreColumnsWithDefault,\n\t\t\tstoreColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tstoreAllColumns,\n\t\t\tstorePrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert stores, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(storePrimaryKeyColumns))\n\t\t\tcopy(conflict, storePrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"stores\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(storeType, storeMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(storeType, storeMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert stores\")\n\t}\n\n\tif !cached {\n\t\tstoreUpsertCacheMut.Lock()\n\t\tstoreUpsertCache[key] = cache\n\t\tstoreUpsertCacheMut.Unlock()\n\t}\n\n\treturn nil\n}",
"func (o *Vote) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tkey := makeCacheKey(columns, nil)\n\tvoteUpdateCacheMut.RLock()\n\tcache, cached := voteUpdateCache[key]\n\tvoteUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tvoteAllColumns,\n\t\t\tvotePrimaryKeyColumns,\n\t\t)\n\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update vote, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"vote\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, votePrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(voteType, voteMapping, append(wl, votePrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update vote row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for vote\")\n\t}\n\n\tif !cached {\n\t\tvoteUpdateCacheMut.Lock()\n\t\tvoteUpdateCache[key] = cache\n\t\tvoteUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, nil\n}",
"func (o *Vote) Update(exec boil.Executor, whitelist ...string) error {\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\to.UpdatedAt = currTime\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn err\n\t}\n\tkey := makeCacheKey(whitelist, nil)\n\tvoteUpdateCacheMut.RLock()\n\tcache, cached := voteUpdateCache[key]\n\tvoteUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := strmangle.UpdateColumnSet(\n\t\t\tvoteColumns,\n\t\t\tvotePrimaryKeyColumns,\n\t\t\twhitelist,\n\t\t)\n\n\t\tif len(whitelist) == 0 {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn errors.New(\"models: unable to update vote, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `vote` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, votePrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(voteType, voteMapping, append(wl, votePrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\t_, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to update vote row\")\n\t}\n\n\tif !cached {\n\t\tvoteUpdateCacheMut.Lock()\n\t\tvoteUpdateCache[key] = cache\n\t\tvoteUpdateCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpdateHooks(exec)\n}",
"func (b *Backend) Update() (c context.Context, err error) {\n\tvar m Mutation\n\tfor {\n\t\terr = b.cursor.Next(c, &m)\n\t\tif err == scroll.Done {\n\t\t\tbreak\n\t\t} else if err != nil {\n\t\t\treturn\n\t\t}\n\t\tm.Update(b)\n\t}\n\treturn\n}",
"func (o *Repository) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\trepositoryUpdateCacheMut.RLock()\n\tcache, cached := repositoryUpdateCache[key]\n\trepositoryUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\trepositoryAllColumns,\n\t\t\trepositoryPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update repositories, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `repositories` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, repositoryPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(repositoryType, repositoryMapping, append(wl, repositoryPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update repositories row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for repositories\")\n\t}\n\n\tif !cached {\n\t\trepositoryUpdateCacheMut.Lock()\n\t\trepositoryUpdateCache[key] = cache\n\t\trepositoryUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}",
"func TestUpdate(T *testing.T) {\n\tvar db etcdDB\n\tts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tfmt.Fprintln(w, `{\"action\":\"set\",\"node\":{\"key\":\"/testDir\",\"value\":\"Hello\",\"modifiedIndex\":4,\"createdIndex\":4}}`)\n\t}))\n\n\tdefer ts.Close()\n\n\tconfig := ts.URL\n\tdb.Cfg = cli.Config{\n\t\tEndpoints: []string{config},\n\t\tTransport: cli.DefaultTransport,\n\t\t// set timeout per request to fail fast when the target endpoint is unavailable\n\t\tHeaderTimeoutPerRequest: time.Second,\n\t}\n\n\tdb.C, _ = cli.New(db.Cfg)\n\n\tdb.Kapi = cli.NewKeysAPI(db.C)\n\tdb.Ctx = context.Background()\n\n\terr := db.Update(\"/testDir\", \"Hello\", true)\n\n\tif err != nil {\n\t\tT.Fail()\n\t}\n\n}",
"func (t *TableStruct) Update(id int, doc map[string]string) (err error) {\n\tif t == nil {\n\t\terr = fmt.Errorf(\"%s\", \"Requested table does not exist\")\n\t\treturn\n\t}\n\n\tif id > t.IndexNum {\n\t\terr = fmt.Errorf(\"%s\", \"id is missing\")\n\t\treturn\n\t}\n\n\tfor name, val := range doc {\n\t\tif _, ok := t.Fields[name]; !ok {\n\t\t\t//if field not exist - just skip it\n\t\t\tcontinue\n\t\t}\n\t\tfield := t.Fields[name]\n\n\t\tif len(val) > field.Size {\n\t\t\tval = val[0:field.Size]\n\t\t}\n\n\t\tvar b []byte\n\t\tmisslen := field.Size - len(val)\n\t\tb = append([]byte(val), make([]byte, misslen)...)\n\t\t_, err = field.File.WriteAt(b, int64(id*field.Size))\n\t}\n\n\treturn\n}",
"func (o *AutomodRuleDatum) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tkey := makeCacheKey(columns, nil)\n\tautomodRuleDatumUpdateCacheMut.RLock()\n\tcache, cached := automodRuleDatumUpdateCache[key]\n\tautomodRuleDatumUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tautomodRuleDatumAllColumns,\n\t\t\tautomodRuleDatumPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update automod_rule_data, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"automod_rule_data\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, automodRuleDatumPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(automodRuleDatumType, automodRuleDatumMapping, append(wl, automodRuleDatumPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update automod_rule_data row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for automod_rule_data\")\n\t}\n\n\tif !cached {\n\t\tautomodRuleDatumUpdateCacheMut.Lock()\n\t\tautomodRuleDatumUpdateCache[key] = cache\n\t\tautomodRuleDatumUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, nil\n}",
"func (u *__Room_Updater) Update(db XODB) (int, error) {\n\tvar err error\n\n\tvar updateArgs []interface{}\n\tvar sqlUpdateArr []string\n\tfor up, newVal := range u.updates {\n\t\tsqlUpdateArr = append(sqlUpdateArr, up)\n\t\tupdateArgs = append(updateArgs, newVal)\n\t}\n\tsqlUpdate := strings.Join(sqlUpdateArr, \",\")\n\n\tsqlWherrs, whereArgs := whereClusesToSql(u.wheres, u.whereSep)\n\n\tvar allArgs []interface{}\n\tallArgs = append(allArgs, updateArgs...)\n\tallArgs = append(allArgs, whereArgs...)\n\n\tsqlstr := `UPDATE ms.room SET ` + sqlUpdate\n\n\tif len(strings.Trim(sqlWherrs, \" \")) > 0 { //2 for safty\n\t\tsqlstr += \" WHERE \" + sqlWherrs\n\t}\n\n\tXOLog(sqlstr, allArgs)\n\tres, err := db.Exec(sqlstr, allArgs...)\n\tif err != nil {\n\t\tXOLogErr(err)\n\t\treturn 0, err\n\t}\n\n\tnum, err := res.RowsAffected()\n\tif err != nil {\n\t\tXOLogErr(err)\n\t\treturn 0, err\n\t}\n\n\treturn int(num), nil\n}",
"func (o *SysDB) Update(db *gorm.DB, fields ...SysDBDBSchemaField) error {\n\tdbNameToFieldName := map[string]interface{}{\n\t\t\"id\": o.ID,\n\t\t\"show_name\": o.ShowName,\n\t\t\"host\": o.Host,\n\t\t\"port\": o.Port,\n\t\t\"user\": o.User,\n\t\t\"password\": o.Password,\n\t\t\"db_name\": o.DBName,\n\t\t\"created_at\": o.CreatedAt,\n\t\t\"updated_at\": o.UpdatedAt,\n\t}\n\tu := map[string]interface{}{}\n\tfor _, f := range fields {\n\t\tfs := f.String()\n\t\tu[fs] = dbNameToFieldName[fs]\n\t}\n\tif err := db.Model(o).Updates(u).Error; err != nil {\n\t\tif err == gorm.ErrRecordNotFound {\n\t\t\treturn err\n\t\t}\n\n\t\treturn fmt.Errorf(\"can't update SysDB %v fields %v: %s\",\n\t\t\to, fields, err)\n\t}\n\n\treturn nil\n}",
"func (b *Builder) Update(updates ...Eq) *Builder {\r\n\tb.updates = updates\r\n\tb.optype = updateType\r\n\treturn b\r\n}",
"func (m *SQLIgnoreStore) Update(id int, rule *IgnoreRule) error {\n\tstmt := `UPDATE ignorerule SET updated_by=?, expires=?, query=?, note=? WHERE id=?`\n\n\tres, err := m.vdb.DB.Exec(stmt, rule.UpdatedBy, rule.Expires.Unix(), rule.Query, rule.Note, rule.ID)\n\tif err != nil {\n\t\treturn err\n\t}\n\tn, err := res.RowsAffected()\n\tif err == nil && n == 0 {\n\t\treturn fmt.Errorf(\"Did not find an IgnoreRule with id: %d\", id)\n\t}\n\tm.inc()\n\treturn nil\n}",
"func (o ItemSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), itemPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"items\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, itemPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in item slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all item\")\n\t}\n\treturn rowsAff, nil\n}",
"func (o HoldenAtSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), holdenAtPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"HoldenAt\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, holdenAtPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in holdenAt slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all holdenAt\")\n\t}\n\treturn rowsAff, nil\n}"
] | [
"0.53321916",
"0.5310427",
"0.5304057",
"0.5290424",
"0.52536964",
"0.5223758",
"0.5218239",
"0.5211529",
"0.5206224",
"0.5174038",
"0.51708364",
"0.5149535",
"0.51188",
"0.5112181",
"0.5062066",
"0.49757835",
"0.4972469",
"0.49406815",
"0.49391848",
"0.493276",
"0.49311072",
"0.49192944",
"0.49192944",
"0.48968837",
"0.48887056",
"0.4886131",
"0.48677903",
"0.48657212",
"0.4865557",
"0.48646715",
"0.48562282",
"0.48367456",
"0.48120964",
"0.48048437",
"0.4794715",
"0.47716355",
"0.4760329",
"0.47545445",
"0.4753615",
"0.47389293",
"0.47369036",
"0.47350627",
"0.47178683",
"0.47169754",
"0.4710537",
"0.4709137",
"0.47032768",
"0.47013855",
"0.47013566",
"0.46999383",
"0.46979427",
"0.46965903",
"0.46931666",
"0.46900767",
"0.46777496",
"0.46757483",
"0.46755084",
"0.46742752",
"0.46730766",
"0.46725395",
"0.46628505",
"0.46595618",
"0.4650766",
"0.46486142",
"0.46389863",
"0.46261913",
"0.4622737",
"0.46182472",
"0.461343",
"0.46124095",
"0.45892054",
"0.45856413",
"0.4585266",
"0.45837367",
"0.45714536",
"0.45655543",
"0.45600554",
"0.4556554",
"0.45444942",
"0.45441714",
"0.45431787",
"0.45402473",
"0.45382312",
"0.45372784",
"0.4532298",
"0.45311594",
"0.45283407",
"0.45282745",
"0.45280692",
"0.45265913",
"0.45241445",
"0.4519387",
"0.4516854",
"0.45165655",
"0.45151302",
"0.45138702",
"0.45137188",
"0.4509855",
"0.45097363",
"0.45087925"
] | 0.74732876 | 0 |
Load Load a key from the DB | func (mgr *LocalHashMapDBMgr) Load(k common.Key) (interface{}, error) {
node, ok := mgr.memMap[k]
glog.V(2).Infof("loading %v from db", k)
if !ok {
return nil, common.ErrNotFound
}
glog.V(2).Infof("successfully loaded %v (%v) from db", k, node)
return node.(*treeNode).deepCopy(), nil
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (a apiKey) Load(id interface{}, col string) (key apiKey) {\n\t// Open database connection\n\tdb, err := dbConnect()\n\tif err != nil {\n\t\tlog.Println(err.Error())\n\t\treturn\n\t}\n\n\t// Load apiKey\n\tif key, err = db.LoadAPIKey(id, col); err != nil {\n\t\tlog.Println(err.Error())\n\t}\n\n\treturn\n}",
"func (txn *levelDBTxn) Load(key string) (string, error) {\n\treturn txn.kv.Load(key)\n}",
"func (d *Dam) Load(key Marshallable) (interface{}, error) {\n\tk, err := hash(key)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\td.mutex.RLock()\n\te, ok := d.storage[k]\n\td.mutex.RUnlock()\n\tif !ok {\n\t\treturn nil, ErrNotFound\n\t}\n\t<-e.ready\n\treturn e.value, nil\n}",
"func (kv *LevelDBKV) Load(key string) (string, error) {\n\tv, err := kv.Get([]byte(key), nil)\n\tif err != nil {\n\t\tif err == leveldb.ErrNotFound {\n\t\t\treturn \"\", nil\n\t\t}\n\t\treturn \"\", errors.WithStack(err)\n\t}\n\treturn string(v), err\n}",
"func (r *RedisPersist) Load(key string) (lib.Packet, bool) {\n\tif r == nil || r.conn == nil {\n\t\treturn nil, false\n\t}\n\n\tif rs, err := r.conn.HGet(r.mainKey, key).Result(); err != nil {\n\t\tif pkt, err := lib.DecodeOnePacket(strings.NewReader(rs)); err != nil {\n\t\t\t// delete wrong packet\n\t\t\tr.Delete(key)\n\t\t} else {\n\t\t\treturn pkt, true\n\t\t}\n\t}\n\n\treturn nil, false\n}",
"func (r *redisPersist) Load(key string) (mqtt.Packet, bool) {\n\tif r == nil || r.conn == nil {\n\t\treturn nil, false\n\t}\n\n\tif rs, err := r.conn.HGet(r.mainKey, key).Result(); err != nil {\n\t\tif pkt, err := mqtt.Decode(mqtt.V311, strings.NewReader(rs)); err != nil {\n\t\t\t// delete wrong packet\n\t\t\tr.Delete(key)\n\t\t} else {\n\t\t\treturn pkt, true\n\t\t}\n\t}\n\n\treturn nil, false\n}",
"func (db *FlatDatabase) Get(key []byte) ([]byte, error) { panic(\"not supported\") }",
"func (l *Locker) Load(key Flags) (interface{}, bool) {\n\treturn l.data.Load(key)\n}",
"func (db RDB) Load(o DBObject, keys map[string]interface{}) error {\n\twhere := make([]string, 0, len(keys))\n\tfor k, v := range keys {\n\t\twhere = append(where, fmt.Sprintf(\"%s=%v\", k, v))\n\t}\n\tconst text = \"select %s from %s where %s\"\n\tquery := fmt.Sprintf(text, o.SelectFields(), o.TableName(), strings.Join(where, \" and \"))\n\treturn db.get(o.Receivers(), query)\n}",
"func (cs *CStore) Load(key string) (string, bool) {\n\tcs.mu.RLock()\n\tval, ok := cs.store[key]\n\tcs.mu.RUnlock()\n\n\treturn val, ok\n}",
"func (db *LDB) Get(key []byte) ([]byte, error) {\n dat, err := db.db.Get(key, nil)\n if err != nil {\n return nil, err\n }\n return dat, nil\n}",
"func (d *DynamoDBMetastore) Load(ctx context.Context, keyID string, created int64) (*appencryption.EnvelopeKeyRecord, error) {\n\tdefer loadDynamoDBTimer.UpdateSince(time.Now())\n\n\tproj := expression.NamesList(expression.Name(keyRecord))\n\texpr, err := expression.NewBuilder().WithProjection(proj).Build()\n\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"dynamodb expression error\")\n\t}\n\n\tres, err := d.svc.GetItemWithContext(ctx, &dynamodb.GetItemInput{\n\t\tExpressionAttributeNames: expr.Names(),\n\t\tKey: map[string]*dynamodb.AttributeValue{\n\t\t\tpartitionKey: {S: &keyID},\n\t\t\tsortKey: {N: aws.String(strconv.FormatInt(created, 10))},\n\t\t},\n\t\tProjectionExpression: expr.Projection(),\n\t\tTableName: aws.String(d.tableName),\n\t\tConsistentRead: aws.Bool(true), // always use strong consistency\n\t})\n\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"metastore error\")\n\t}\n\n\tif res.Item == nil {\n\t\treturn nil, nil\n\t}\n\n\treturn parseResult(res.Item[keyRecord])\n}",
"func (d *DB) LoadApp(ctx context.Context, key string) (*App, error) {\n\tlog := logger.FromContext(ctx)\n\n\tif d.verbose {\n\t\tlog.Log(\n\t\t\t\"msg\", \"loading app\",\n\t\t)\n\t}\n\n\tparts := strings.Split(key, \"-\")\n\tif len(parts) != 2 {\n\t\treturn nil, errors.New(\"invalid key\")\n\t}\n\n\tsql := `SELECT uid, app_name, scope, rate FROM applications WHERE uid = $1 AND key_hash = crypt($2, key_hash)`\n\n\tvar app App\n\n\terr := d.DB.Get(&app, sql, parts[0], parts[1])\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to get app from DB\")\n\t}\n\n\treturn &app, nil\n}",
"func (kp *KeyPool) loadKey(loadKey *signkeys.PublicKey) (*[signkeys.KeyIDSize]byte, error) {\n\tif kp.Generator.Usage != \"\" && loadKey.Usage != kp.Generator.Usage {\n\t\t// Don't load if usage is a mismatch\n\t\treturn nil, ErrBadUsage\n\t}\n\tif loadKey.Expire < times.Now() {\n\t\t// Don't load expired keys\n\t\treturn nil, ErrExpired\n\t}\n\tif !kp.HasVerifyKey(&loadKey.Signer, true) {\n\t\t// Don't load keys without matching signature\n\t\treturn nil, ErrBadSigner\n\t}\n\tif !loadKey.Verify(&loadKey.Signer) {\n\t\t// Don't load keys without matching signature\n\t\treturn nil, ErrBadSigner\n\t}\n\tif _, exists := kp.keys[loadKey.KeyID]; exists {\n\t\treturn &loadKey.KeyID, ErrExists\n\t}\n\tkp.keys[loadKey.KeyID] = loadKey\n\treturn &loadKey.KeyID, nil\n}",
"func (s *RedisStore) load(ctx context.Context, session *sessions.Session) (bool, error) {\n\tdata, err := s.Cmd.Get(ctx, s.keyPrefix+session.ID).Bytes()\n\tif err != nil {\n\t\treturn false, err\n\t}\n\tif data == nil {\n\t\treturn false, nil // no data was associated with this key\n\t}\n\treturn true, s.serializer.Deserialize(data, session)\n}",
"func (s *store) Load(key string) (rec hash.Record, ok bool) {\n\ts.worldMu.Lock()\n\tdefer s.worldMu.Unlock()\n\n\trec, ok = s.hashMap.Load(key)\n\tif !ok {\n\t\treturn rec, ok\n\t}\n\n\t// Remove an expired key to guarantee consistency of the storage.\n\tif rec.IsExpired() {\n\t\tlog.DebugLogf(\"store/LOAD\", \"key %s is expired, deleting\", key)\n\t\ts.hashMap.Delete(key)\n\t\treturn hash.Record{}, false\n\t}\n\n\treturn rec, ok\n}",
"func (s *lockState) load(key string) string {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\treturn s.locks[key]\n}",
"func (db *LocalDb) load() error {\n\n\tdb.mutex.Lock()\n\tdefer db.mutex.Unlock()\n\n\t// read file\n\tdata, err := ioutil.ReadFile(constLocalDbFn)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t\treturn err\n\t}\n\n\t// decode json\n\tdb.users = make(map[string]*UserInfo)\n\tif err := json.Unmarshal(data, &db.users); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func (gorilla Gorilla) Load(req *http.Request, key string, result interface{}) error {\n\tvalue := gorilla.Get(req, key)\n\tif value != \"\" {\n\t\treturn json.Unmarshal([]byte(value), result)\n\t}\n\treturn nil\n}",
"func (s *Store) Load(key string, dst interface{}) (bool, error) {\n\tval, ok, err := s.Get(key)\n\tif err != nil {\n\t\treturn false, fmt.Errorf(\"Failed to get %s: %v\", key, err)\n\t}\n\tif !ok {\n\t\treturn false, nil\n\t}\n\treturn true, mapstructure.Decode(val, dst)\n}",
"func LoadKey(filename string) {\n\tif _, err := os.Stat(filename); os.IsNotExist(err) {\n\t\tkey := RandomString(128)\n\t\tf, err := os.OpenFile(filename, os.O_RDWR|os.O_CREATE, 0600)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tfmt.Fprintln(f, key)\n\t\tf.Close()\n\t\tSetKey(key)\n\t\treturn\n\t}\n\tkey, err := ioutil.ReadFile(filename)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tSetKey(strings.TrimSpace(string(key)))\n}",
"func (r *StringJsonCodecRedisController) Load(key string) error {\n\t// redis conn\n\tconn := r.pool.Get()\n\tdefer conn.Close()\n\n\t// load data from redis string\n\tdata, err := github_com_gomodule_redigo_redis.Bytes(conn.Do(\"GET\", key))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// unmarshal data to StringStorageType\n\treturn github_com_json_iterator_go.Unmarshal(data, r.m)\n}",
"func (d *Database) Load(db DB, table string, dst interface{}, pk interface{}) error {\n\treturn d.LoadContext(context.Background(), db, table, dst, pk)\n}",
"func (s *syncMapInt64) load(key int) int64 {\n\ts.Lock()\n\tdefer s.Unlock()\n\treturn s.m[key]\n}",
"func (m *Messages) Load(key string) (string, bool) {\n\tvalue, ok := (*m)[key]\n\tif ok == false {\n\t\tvalue = key\n\t}\n\treturn value, ok\n}",
"func (r *repo) LoadKey(iri pub.IRI) (crypto.PrivateKey, error) {\n\tm, err := r.LoadMetadata(iri)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tb, _ := pem.Decode(m.PrivateKey)\n\tif b == nil {\n\t\treturn nil, errors.Errorf(\"failed decoding pem\")\n\t}\n\tprvKey, err := x509.ParsePKCS8PrivateKey(b.Bytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn prvKey, nil\n}",
"func Load(key string, loader ValuesLoader) (Values, error) {\n\treturn nil, nil\n}",
"func (m *Cmap) Load(key interface{}) (value interface{}, ok bool) {\n\thash := ehash(key)\n\t_, b := m.getInodeAndBucket(hash)\n\treturn b.tryLoad(key)\n}",
"func LoadKey(dir string) (wgtypes.Key, error) {\n\tpath := filepath.Join(dir, \"key.priv\")\n\tb, err := os.ReadFile(path)\n\tif err != nil {\n\t\treturn wgtypes.Key{}, err\n\t}\n\treturn wgtypes.ParseKey(string(b))\n}",
"func (a *Account) LoadKey(c client.Client) error {\n\t// create a new keys API\n\tkapi := client.NewKeysAPI(c)\n\t// get the key\n\tctx, cancelFunc := context.WithTimeout(context.Background(), 10*time.Second)\n\tresp, err := kapi.Get(ctx, fmt.Sprintf(cryptoKey, a.email), nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\tcancelFunc()\n\t// decode the key into a keyBlock\n\tkeyBlock, _ := pem.Decode([]byte(resp.Node.Value))\n\t// cast the key to the correct format and store it in a.key\n\tswitch keyBlock.Type {\n\tcase \"RSA PRIVATE KEY\":\n\t\ta.key, err = x509.ParsePKCS1PrivateKey(keyBlock.Bytes)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\tcase \"EC PRIVATE KEY\":\n\t\ta.key, err = x509.ParseECPrivateKey(keyBlock.Bytes)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\tdefault:\n\t\treturn ErrUnknowKeyType\n\t}\n}",
"func Load(db DB, table string, dst interface{}, pk interface{}) error {\n\treturn LoadContext(context.Background(), db, table, dst, pk)\n}",
"func (c *keyCache) load(id KeyMeta, loader keyLoader) (*internal.CryptoKey, error) {\n\tc.rw.Lock()\n\tdefer c.rw.Unlock()\n\n\tkey := cacheKey(id.ID, id.Created)\n\n\te, ok := c.keys[key]\n\tif !ok || isReloadRequired(e, c.policy.RevokeCheckInterval) {\n\t\tk, err := loader.Load()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif ok && e.key.Created() == k.Created() {\n\t\t\t// existing key in cache. update revoked status and last loaded time and close key\n\t\t\t// we just loaded since we don't need it\n\t\t\te.key.SetRevoked(k.Revoked())\n\t\t\te.loadedAt = time.Now()\n\n\t\t\tk.Close()\n\t\t} else {\n\t\t\t// first time loading this key into cache or we have an ID-only key with mismatched\n\t\t\t// create timestamps\n\t\t\te = newCacheEntry(k)\n\t\t\tc.keys[key] = e\n\t\t}\n\n\t\tlatestKey := cacheKey(id.ID, 0)\n\t\tif key == latestKey {\n\t\t\t// we've loaded a key using ID-only, ensure we've got a cache entry with a fully\n\t\t\t// qualified cache key\n\t\t\tc.keys[cacheKey(id.ID, k.Created())] = e\n\t\t} else if latest, ok := c.keys[latestKey]; !ok || latest.key.Created() < k.Created() {\n\t\t\t// we've loaded a key using a fully qualified cache key and the ID-only entry is\n\t\t\t// either missing or stale\n\t\t\tc.keys[latestKey] = e\n\t\t}\n\t}\n\n\treturn e.key, nil\n}",
"func Load(key string, data []byte) Entity {\n\tvar (\n\t\tbuffer bytes.Buffer\n\t\tentity Entity\n\t)\n\n\tbuffer.Write(data)\n\tdecoder := gob.NewDecoder(&buffer)\n\tentityType := strings.Split(key, \".\")[0]\n\n\tswitch entityType {\n\tcase \"player\":\n\t\tentity = new(Player)\n\tcase \"planet\":\n\t\tentity = new(Planet)\n\tcase \"mission\":\n\t\tentity = new(Mission)\n\tcase \"sun\":\n\t\tentity = new(Sun)\n\tcase \"ss\":\n\t\tentity = new(SolarSlot)\n\tcase \"spy_report\":\n\t\tentity = new(SpyReport)\n\tdefault:\n\t\treturn nil\n\t}\n\tdecoder.Decode(entity)\n\treturn entity\n}",
"func (dst *Bravo) Load(key uint64) bool {\n\t_, mask := dst.search(key)\n\treturn mask == 0\n}",
"func (store *SessionCookieStore) Load(key string) (sess Session, err error) {\n\tdecoded, err := store.decode(key)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tunsigned, err := store.verify(decoded)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdecrypted, err := store.decrypt(unsigned)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif err := codec.NewDecoderBytes(decrypted, codecHandler).Decode(&sess); err != nil {\n\t\treturn nil, err\n\t}\n\treturn sess, nil\n}",
"func (d *keyData) load(tpm *tpm2.TPMContext, session tpm2.SessionContext) (tpm2.ResourceContext, error) {\n\tsrkContext, err := tpm.CreateResourceContextFromTPM(tcg.SRKHandle)\n\tif err != nil {\n\t\treturn nil, xerrors.Errorf(\"cannot create context for SRK: %w\", err)\n\t}\n\n\tkeyContext, err := tpm.Load(srkContext, d.keyPrivate, d.keyPublic, session)\n\tif err != nil {\n\t\tinvalidObject := false\n\t\tswitch {\n\t\tcase tpm2.IsTPMParameterError(err, tpm2.AnyErrorCode, tpm2.CommandLoad, tpm2.AnyParameterIndex):\n\t\t\tinvalidObject = true\n\t\tcase tpm2.IsTPMError(err, tpm2.ErrorSensitive, tpm2.CommandLoad):\n\t\t\tinvalidObject = true\n\t\t}\n\t\tif invalidObject {\n\t\t\treturn nil, keyFileError{errors.New(\"cannot load sealed key object in to TPM: bad sealed key object or TPM owner changed\")}\n\t\t}\n\t\treturn nil, xerrors.Errorf(\"cannot load sealed key object in to TPM: %w\", err)\n\t}\n\n\treturn keyContext, nil\n}",
"func (m *privateSetMap) Load(key Value) (value struct{}, ok bool) {\n\tbucket := m.backingVector.Get(m.pos(key))\n\tif bucket != nil {\n\t\tfor _, item := range bucket {\n\t\t\tif item.Key == key {\n\t\t\t\treturn item.Value, true\n\t\t\t}\n\t\t}\n\t}\n\n\tvar zeroValue struct{}\n\treturn zeroValue, false\n}",
"func (kp *KeyPool) LoadKey(loadKey *signkeys.PublicKey) (*[signkeys.KeyIDSize]byte, error) {\n\tkp.mapMutex.Lock()\n\tdefer kp.mapMutex.Unlock()\n\treturn kp.loadKey(loadKey)\n}",
"func (cs ConsulStorage) Load(ctx context.Context, key string) ([]byte, error) {\n\tcs.logger.Debugf(\"loading data from Consul for %s\", key)\n\n\tkv, _, err := cs.ConsulClient.KV().Get(cs.prefixKey(key), ConsulQueryDefaults(ctx))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif kv == nil {\n\t\treturn nil, fs.ErrNotExist\n\t}\n\n\tcontents, err := cs.DecryptStorageData(kv.Value)\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"unable to decrypt data for %s\", cs.prefixKey(key))\n\t}\n\n\treturn contents.Value, nil\n}",
"func (slm *svLockMap) load(key, value string) {\n\tslm.LockMap[key] = value\n}",
"func (m *Map) Load(key Hasher) (value interface{}, ok bool) {\n\tsplit := m.Split(key)\n\tsplit.RLock()\n\tvalue, ok = split.Map[key]\n\tsplit.RUnlock()\n\treturn\n}",
"func (db *DB) Get(key []byte) (value []byte, err error) {\n\treturn db.LevigoDB.Get(db.ro, key)\n}",
"func Load(id int) (*Subject, error) {\n\titem, err := dbMap.Get(Subject{}, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif item == nil {\n\t\treturn nil, fmt.Errorf(\"no subject with id %v\", id)\n\t}\n\treturn item.(*Subject), nil\n}",
"func (m *Uint64) Load(key interface{}) (val uint64) {\n\treturn m.Value(key).Load()\n}",
"func Load(db DB, table string, dst interface{}, pk int64) error {\n\treturn Default.Load(db, table, dst, pk)\n}",
"func (m *Map) Load(key Value) (value Value, ok bool) {\n\tbucket := m.backingVector.Get(m.pos(key))\n\tif bucket != nil {\n\t\tfor _, item := range bucket {\n\t\t\tif item.Key == key {\n\t\t\t\treturn item.Value, true\n\t\t\t}\n\t\t}\n\t}\n\n\tvar zeroValue Value\n\treturn zeroValue, false\n}",
"func (d *Database) Load(db DB, table string, dst interface{}, pk int64) error {\n\tcolumns, err := d.ColumnsQuoted(dst, true)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// make sure we have a primary key field\n\tpkName, _, err := d.PrimaryKey(dst)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif pkName == \"\" {\n\t\treturn fmt.Errorf(\"meddler.Load: no primary key field found\")\n\t}\n\n\t// run the query\n\tq := fmt.Sprintf(\"SELECT %s FROM %s WHERE %s = %s\", columns, d.quoted(table), d.quoted(pkName), d.Placeholder)\n\n\trows, err := db.Query(q, pk)\n\tif err != nil {\n\t\treturn &dbErr{msg: \"meddler.Load: DB error in Query\", err: err}\n\t}\n\n\t// scan the row\n\treturn d.ScanRow(rows, dst)\n}",
"func (s *store) Get(key string) (*api.Value, error) {\n\tlogger.Debugf(\"load key [%s] from db\", key)\n\tvalue, err := s.db.Get(encodeKey(key, time.Time{}))\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"failed to load key [%s] from db\", key)\n\t}\n\tif value != nil {\n\t\tval, err := decodeVal(value)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrapf(err, \"failed to decode value [%s] for key [%s]\", value, key)\n\t\t}\n\t\treturn val, nil\n\t}\n\treturn nil, nil\n}",
"func Load(dst interface{}, val, key string) error {\n\treturn DefaultReader.Load(dst, val, key)\n}",
"func (c *Collection) LoadEncryptedKey(keyID string) (string, bool) {\n\tp, ok := loadProvisioner(c.byKey, keyID)\n\tif !ok {\n\t\treturn \"\", false\n\t}\n\t_, key, ok := p.GetEncryptedKey()\n\treturn key, ok\n}",
"func (kp *KeyPool) Load() error {\n\tif kp.LoadKeysCallback != nil {\n\t\treturn kp.LoadKeysCallback(kp)\n\t}\n\treturn nil\n}",
"func (h *DBHandle) Get(key []byte) ([]byte, error) {\n\treturn h.db.Get(constructLevelKey(h.dbName, key))\n}",
"func (h *DBHandle) Get(key []byte) ([]byte, error) {\n\treturn h.db.Get(constructLevelKey(h.dbName, key))\n}",
"func (rm *ResultMap) Load(key string) (*Result, error) {\n\tv, ok := rm.sm.Load(key)\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"fail to get the value of key %q from results\", key)\n\t}\n\n\tt, ok := v.(*Result)\n\tif !ok {\n\t\treturn nil, errors.New(\"stored type in ResultMap is invalid\")\n\t}\n\n\treturn t, nil\n}",
"func (c *Claim) Load(ctx *ServerContext) Error {\n\tvar err Error\n\tif c.ID != \"\" {\n\t\tbindVars := BindVars{\n\t\t\t\"id\": c.ID,\n\t\t}\n\t\tquery := fmt.Sprintf(`FOR obj IN %s \n FILTER obj.id == @id\n %s\n SORT obj.start DESC\n LIMIT 1 \n RETURN obj`,\n\t\t\tc.CollectionName(),\n\t\t\tc.DateFilter(bindVars))\n\t\terr = FindArangoObject(ctx, query, bindVars, c)\n\t} else if c.ArangoKey() != \"\" {\n\t\terr = LoadArangoObject(ctx, c, c.ArangoKey())\n\t} else {\n\t\terr = NewBusinessError(\"There is no key or id for this Claim\")\n\t}\n\n\treturn err\n}",
"func (rm *FilteredResultMap) Load(key string) (*FilteredResult, error) {\n\tv, ok := rm.sm.Load(key)\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"fail to get the value of key %q from results\", key)\n\t}\n\n\tt, ok := v.(*FilteredResult)\n\tif !ok {\n\t\treturn nil, errors.New(\"stored type in FilteredResultMap is invalid\")\n\t}\n\n\treturn t, nil\n}",
"func (b *Bolt) Load(id string, data interface{}) error {\n\terr := b.client.View(func(tx *bolt.Tx) error {\n\t\tbkt := tx.Bucket([]byte(b.bucket))\n\t\tv := bkt.Get([]byte(id))\n\t\tif v == nil {\n\t\t\treturn storage.ErrNotFound\n\t\t}\n\n\t\terr := json.Unmarshal(v, data)\n\t\treturn err\n\t})\n\n\treturn err\n}",
"func Load(i Item, conn *redis.Client) (Item, bool, error) {\n\treply, err := conn.Cmd(\"HMGET\", i.getValueKey(), i.listKeys()).List()\n\tif err != nil {\n\t\tOnPrimaryFailure()\n\t\treturn nil, false, err\n\t}\n\n\t//copy result to object\n\ti, err = i.fromList(reply)\n\tif err != nil {\n\t\t//error indicates the list was invalid, and since we know the command didn't fail, the item must not exist\n\t\treturn nil, false, nil\n\t}\n\n\t//no error\n\treturn i, true, nil\n}",
"func (db *MemoryStorage) Get(key []byte) ([]byte, error) {\n\tif entry, ok := db.data.Load(common.BytesToHex(key)); ok {\n\t\treturn entry.([]byte), nil\n\t}\n\treturn nil, ErrKeyNotFound\n}",
"func (store *SessionPostgresStore) load(session *sessions.Session) (bool, error) {\n\tvar count int\n\tvar sessionData orm.Session\n\tif err := orm.Engine.\n\t\tWhere(\"token = ?\", store.keyPrefix+session.ID).\n\t\tWhere(\"expiry > ?\", time.Now()).\n\t\tFind(&sessionData).Count(&count).Error; err != nil {\n\t\treturn false, err\n\t}\n\tif count == 0 {\n\t\treturn false, nil // no data was associated with this key\n\t}\n\treturn true, store.serializer.Deserialize(sessionData.Data, session)\n}",
"func Get(key string) (Entity, error) {\n\tconn := db.Pool.Get()\n\tdefer conn.Close()\n\n\trecord, err := db.Get(conn, key)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn Load(key, record), nil\n}",
"func LoadKey(path string) *msgs.PublicKey {\n\tdata := LoadBytes(path)\n\tvar k msgs.PublicKey\n\terr := proto.Unmarshal(data, &k)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn &k\n}",
"func PKLoadUser(db *pg.DB, pk int64) (*models.User, error) {\n\t// Select user by Primary Key\n\tuser := &models.User{ID: pk}\n\terr := db.Select(user)\n\n\tif err != nil {\n\t\treturn &models.User{UserName: \"New\"}, err\n\t}\n\n\tfmt.Println(\"User loaded From DB\")\n\treturn user, nil\n}",
"func (db RDB) LoadSelf(o DBObject) error {\n\tif id, ok := o.Primary(); ok {\n\t\treturn db.LoadBy(o, o.KeyFields()[0], id)\n\t}\n\tif len(o.KeyFields()) == 0 {\n\t\treturn ErrNoKeyField\n\t}\n\tkeys := o.KeyFields()\n\tif len(keys) == 1 {\n\t\treturn db.LoadBy(o, keys[0], o.KeyValues()[0])\n\t}\n\tif len(keys) == 0 {\n\t\treturn ErrKeyMissing\n\t}\n\tvalues := o.KeyValues()\n\tm := make(map[string]interface{}, len(keys))\n\tfor i, key := range keys {\n\t\tm[key] = values[i]\n\t}\n\n\treturn db.Load(o, m)\n}",
"func (s *DatastoreStore) load(r *http.Request,\n\tsession *sessions.Session) error {\n\n\tk := datastore.NameKey(s.kind, session.ID, nil)\n\tentity := Session{}\n\tif err := ds.Get(context.Background(), k, &entity); err != nil {\n\t\tif err == datastore.ErrNoSuchEntity {\n\t\t\treturn ErrNotFound\n\t\t}\n\t\treturn fmt.Errorf(\"Could not get session %s: %v\", session.ID, err)\n\t}\n\tif err := deserialize(entity.Value, &session.Values); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (db *memorydb) Get(key []byte) ([]byte, error) {\n\tdb.sm.RLock()\n\tdefer db.sm.RUnlock()\n\n\tif value, ok := db.db[string(key)]; ok {\n\t\treturn value, nil\n\t}\n\n\treturn nil, nil\n}",
"func (a *Account) Load(c client.Client) error {\n\t// load the registration\n\tif err := a.LoadRegistration(c); err != nil {\n\t\treturn err\n\t}\n\t// load the key\n\tif err := a.LoadKey(c); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (db *DB) Get(key []byte) ([]byte, error) {\n\ttx := db.BeginTransaction(false)\n\tdefer tx.Discard()\n\treturn tx.Get(key)\n}",
"func (s *Int64Map) Load(key int64) (value interface{}, ok bool) {\n\tx := s.header\n\tfor i := maxLevel - 1; i >= 0; i-- {\n\t\tnex := x.loadNext(i)\n\t\tfor nex != nil && nex.lessthan(key) {\n\t\t\tx = nex\n\t\t\tnex = x.loadNext(i)\n\t\t}\n\n\t\t// Check if the key already in the skip list.\n\t\tif nex != nil && nex.equal(key) {\n\t\t\tif nex.flags.MGet(fullyLinked|marked, fullyLinked) {\n\t\t\t\treturn nex.loadVal(), true\n\t\t\t}\n\t\t\treturn nil, false\n\t\t}\n\t}\n\treturn nil, false\n}",
"func (db *Database) Get(key string) ([]byte, error) {\n\tvar data []byte\n\n\tif db == nil || db.conn == nil {\n\t\treturn data, hord.ErrNoDial\n\t}\n\n\tif err := hord.ValidKey(key); err != nil {\n\t\treturn data, err\n\t}\n\n\terr := db.conn.Query(`SELECT data FROM hord WHERE key = ?;`, key).Scan(&data)\n\tif err != nil && err != gocql.ErrNotFound {\n\t\treturn data, err\n\t}\n\tif err == gocql.ErrNotFound {\n\t\treturn data, hord.ErrNil\n\t}\n\n\treturn data, nil\n}",
"func Get(key []byte) ([]byte, error) {\n\treturn db.Get(key, nil)\n}",
"func (a *Account) Load() error {\n\tvalue, err := a.get()\n\tif err != nil {\n\t\treturn err\n\t}\n\tunmarshal(value, &a)\n\treturn nil\n}",
"func (s *LevelDBStore) Get(key string) []byte {\n\tv, err := s.db.Get([]byte(key), nil)\n\tif err != nil {\n\t\treturn []byte{}\n\t}\n\treturn v\n}",
"func (d *Database) Load() error {\n\tb, err := ioutil.ReadFile(d.FilePath)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := json.Unmarshal(b, &d.State); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func (j *JobWorker) load(key string) (*Job, bool) {\n\tj.RLock()\n\tvalue, ok := j.jobs[key]\n\tj.RUnlock()\n\treturn value, ok\n}",
"func (r *HashJsonCodecRedisController) Load(key string) error {\n\t// redis conn\n\tconn := r.pool.Get()\n\tdefer conn.Close()\n\n\t// load data from redis hash\n\tdata, err := github_com_gomodule_redigo_redis.ByteSlices(conn.Do(\"HGETALL\", key))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// parse redis hash field name and value\n\tstructure := make(map[string]interface{})\n\tfor i := 0; i < len(data); i += 2 {\n\t\tswitch string(data[i]) {\n\t\tcase \"HashJsonCodec\":\n\t\t\t// unmarshal HashJsonCodec\n\t\t\tif r.m.HashJsonCodec == nil {\n\t\t\t\tr.m.HashJsonCodec = new(HashJsonCodec)\n\t\t\t}\n\t\t\tif err := github_com_json_iterator_go.Unmarshal(data[i+1], r.m.HashJsonCodec); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\tdefault:\n\t\t\tstructure[string(data[i])] = string(data[i+1])\n\t\t}\n\t}\n\n\t// use mapstructure weak decode structure to HashJsonCodec\n\treturn github_com_mitchellh_mapstructure.WeakDecode(structure, r.m)\n}",
"func (c *Context) LoadUser(key string) error {\n\tif c.User != nil {\n\t\treturn nil\n\t}\n\n\tvar user interface{}\n\tvar err error\n\n\tif index := strings.IndexByte(key, ';'); index > 0 {\n\t\tuser, err = c.OAuth2Storer.GetOAuth(key[:index], key[index+1:])\n\t} else {\n\t\tuser, err = c.Storer.Get(key)\n\t}\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tc.User = Unbind(user)\n\treturn nil\n}",
"func (user *GenaroUser) LoadAsyKey(ecdsapath, eciespath string) (err error) {\n\tuser.Spri, err = crypto.LoadEcdsaKeyFromFile(ecdsapath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tuser.Epri, err = crypto.LoadEciesKeyFromFile(eciespath)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (rk *caIdemixRevocationKey) Load() error {\n\tpubKeyBytes, err := ioutil.ReadFile(rk.pubKeyFile)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"Failed to read revocation public key from %s\", rk.pubKeyFile)\n\t}\n\tif len(pubKeyBytes) == 0 {\n\t\treturn errors.New(\"Revocation public key file is empty\")\n\t}\n\tprivKey, err := ioutil.ReadFile(rk.privateKeyFile)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"Failed to read revocation private key from %s\", rk.privateKeyFile)\n\t}\n\tif len(privKey) == 0 {\n\t\treturn errors.New(\"Revocation private key file is empty\")\n\t}\n\tpk, pubKey, err := DecodeKeys(privKey, pubKeyBytes)\n\tif err != nil {\n\t\treturn errors.WithMessage(err, \"Failed to decode revocation key\")\n\t}\n\tpk.PublicKey = *pubKey\n\trk.key = pk\n\treturn nil\n}",
"func readKey(db *bolt.DB, name string) ([]byte, error) {\n\tkey := make([]byte, 32)\n\terr := db.Update(func(tx *bolt.Tx) error {\n\t\tb, err := tx.CreateBucketIfNotExists([]byte(\"settings\"))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// return key if exists\n\t\tk := b.Get([]byte(name))\n\t\tif k != nil {\n\t\t\tcopy(key, k)\n\t\t\treturn nil\n\t\t}\n\t\t// if key not found, generate one\n\t\t_, err = rand.Read(key)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn b.Put([]byte(name), key)\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn key, nil\n}",
"func (m *jobMap) Load(key string) (*Job, bool) {\n\ti, ok := m.data.Load(key)\n\tif !ok {\n\t\treturn nil, false\n\t}\n\ts, ok := i.(*Job)\n\treturn s, ok\n}",
"func get(w http.ResponseWriter, r *http.Request) {\n \n key := r.FormValue(\"key\")\n \n fmt.Print(key)\n db, err := leveldb.OpenFile(\"db\", nil)\n \n if err != nil {\n fmt.Println(err)\n }\n data ,err2 := db.Get([]byte(key),nil)\n fmt.Println(data)\n if err2 != nil {\n fmt.Println(err2)\n }\n w.Write(data)\n defer db.Close()\n \n}",
"func (u *Session) Load() error {\n\treturn DB.LoadSession(u)\n}",
"func (r *Registry) Load(relPath string) (value interface{}, ok bool) {\n\t_, filename, _, _ := runtime.Caller(1)\n\tdir, _ := filepath.Split(filename)\n\treturn r.db.Load(filepath.Join(dir, relPath))\n}",
"func LoadKey(keyData string) (loadedPrivateKeys int, err string) {\n\tl, e := chevronlib.LoadKey(keyData)\n\tloadedPrivateKeys = l\n\tif e != nil {\n\t\terr = e.Error()\n\t}\n\treturn\n}",
"func (acc *Account) LoadPrivKey(prikey string) ([]byte, error) {\n\tprivHex, err := hex.DecodeString(prikey)\n\tvar priv secp256k1.PrivKeySecp256k1\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tcopy(priv[:], privHex)\n\ttmp := [32]byte(priv)\n\treturn tmp[:], nil\n}",
"func (db RDB) LoadBy(o DBObject, key string, value interface{}) error {\n\tvar text, query string\n\tswitch value := value.(type) {\n\tcase string:\n\t\ttext = \"select %s from %s where %s='%s'\"\n\t\tquery = fmt.Sprintf(text, o.SelectFields(), o.TableName(), key, value)\n\tcase int, int64, uint, uint64:\n\t\ttext = \"select %s from %s where %s=%d\"\n\t\tquery = fmt.Sprintf(text, o.SelectFields(), o.TableName(), key, value)\n\tdefault:\n\t\ttext = \"select %s from %s where %s=%v\"\n\t\tquery = fmt.Sprintf(text, o.SelectFields(), o.TableName(), key, value)\n\t}\n\treturn db.get(o.Receivers(), query)\n}",
"func (f fileRecord) Load(id interface{}, col string) fileRecord {\n\t// Open database connection\n\tdb, err := dbConnect()\n\tif err != nil {\n\t\tlog.Println(err.Error())\n\t\treturn f\n\t}\n\n\t// Load fileRecord by column\n\tif f, err = db.LoadFileRecord(id, col); err != nil {\n\t\tlog.Println(err.Error())\n\t\treturn fileRecord{}\n\t}\n\n\tif err := db.Close(); err != nil {\n\t\tlog.Println(err.Error())\n\t}\n\n\treturn f\n}",
"func (d *DbBackendCouch) load(database string, documentID string, doc interface{}) error {\n\terr := d.ensureConnection()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdb, err := d.client.EnsureDB(database)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// mutate the doc\n\terr = db.Get(documentID, doc, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn err\n}",
"func (m *docsMap) Load(key string) (*Doc, bool) {\n\ti, ok := m.Data().Load(key)\n\tif !ok {\n\t\treturn nil, false\n\t}\n\ts, ok := i.(*Doc)\n\treturn s, ok\n}",
"func (wa *WzAES) LoadKey(pkiDir string) error {\n\tbuff, err := ioutil.ReadFile(path.Join(pkiDir, AES_TOKEN))\n\tif err != nil {\n\t\treturn err\n\t}\n\tif len(buff) != 0x20 {\n\t\treturn fmt.Errorf(\"AES key length is not as expected: %d\", len(buff))\n\t}\n\twa.key = &[32]byte{}\n\tfor idx, elm := range buff {\n\t\twa.key[idx] = elm\n\t}\n\n\treturn nil\n}",
"func Load() {\n\tpostgres.Load()\n}",
"func Load(s *aklib.DBConfig, pwd []byte, priv string) (*Wallet, error) {\n\tvar wallet = Wallet{\n\t\tAddressChange: make(map[string]struct{}),\n\t\tAddressPublic: make(map[string]struct{}),\n\t}\n\n\terr := s.DB.View(func(txn *badger.Txn) error {\n\t\terr := db.Get(txn, []byte(priv), &wallet, db.HeaderWallet)\n\t\treturn err\n\t})\n\treturn &wallet, err\n}",
"func (f *FilePersist) Load(ctx context.Context) (map[string]string, error) {\n\tfr, err := os.Open(f.filename)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to open file: %w\", err)\n\t}\n\tdefer fr.Close()\n\n\tdb := make(map[string]string)\n\tif err := json.NewDecoder(fr).Decode(&db); err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to decode file: %w\", err)\n\t}\n\treturn db, nil\n}",
"func (db *TriasDB) Get(key []byte) []byte {\n\tdb.mtx.Lock()\n\tdefer db.mtx.Unlock()\n\n\t// TODO: unimplement\n\n\tvalue, err := file.Get(key)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn value\n}",
"func (m *infoMap) Load(key string) (Info, bool) {\n\ti, ok := m.data.Load(key)\n\tif !ok {\n\t\treturn Info{}, false\n\t}\n\ts, ok := i.(Info)\n\treturn s, ok\n}",
"func (s *Database) Load() error {\n\tif _, err := os.Stat(s.dbFile); os.IsNotExist(err) {\n\t\treturn nil\n\t}\n\n\ts.groups = map[string]Group{}\n\tjsonString, err := ioutil.ReadFile(s.dbFile)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to load database file '%s': %e\", s.dbFile, err)\n\t}\n\n\tif len(jsonString) == 0 {\n\t\treturn nil\n\t}\n\n\tif err := json.Unmarshal(jsonString, &s.groups); err != nil {\n\t\treturn fmt.Errorf(\"failed to deserialize database '%s' to json: %e\", s.dbFile, err)\n\t}\n\n\treturn nil\n}",
"func (db RDB) LoadByID(o DBObject, id int64) error {\n\tconst text = \"select %s from %s where %s=%d\"\n\tif id, ok := o.Primary(); ok {\n\t\treturn db.LoadBy(o, o.KeyFields()[0], id)\n\t}\n\treturn fmt.Errorf(\"does not have an int primary id\")\n\n}",
"func (s *Store) Get(key string) []byte {\n\n\tval, _ := s.objects.Load(key)\n\n\treturn val.([]byte)\n}",
"func (mdb MongoDBConnection) Load(Name string) (result structs.Character, err error) {\n\tmdb.session = mdb.GetSession()\n\tdefer mdb.session.Close()\n\tc := mdb.session.DB(\"webadventure\").C(\"characters\")\n\terr = c.Find(bson.M{\"name\": Name}).One(&result)\n\treturn result, err\n}"
] | [
"0.78715444",
"0.74463856",
"0.7208976",
"0.7104961",
"0.6805485",
"0.673032",
"0.6668427",
"0.6607099",
"0.65913016",
"0.65868545",
"0.65848124",
"0.6546285",
"0.65399575",
"0.6534491",
"0.6447067",
"0.6364313",
"0.6310213",
"0.62940556",
"0.6269194",
"0.6264871",
"0.6264147",
"0.6263292",
"0.62561566",
"0.623841",
"0.6238123",
"0.6234498",
"0.62170863",
"0.6215296",
"0.61887866",
"0.6184287",
"0.6181155",
"0.6174447",
"0.6172765",
"0.61570686",
"0.61388093",
"0.6126759",
"0.6114504",
"0.6108234",
"0.6103272",
"0.6099742",
"0.60901886",
"0.6077204",
"0.6075624",
"0.6066313",
"0.60518646",
"0.60378885",
"0.6037758",
"0.60172576",
"0.6015208",
"0.60122377",
"0.6012064",
"0.6007922",
"0.6007922",
"0.5999673",
"0.5998514",
"0.5997933",
"0.59952575",
"0.5987867",
"0.59847325",
"0.597175",
"0.5960959",
"0.59456813",
"0.5942626",
"0.5940519",
"0.59401536",
"0.5924534",
"0.5918579",
"0.59133554",
"0.5909613",
"0.5890394",
"0.588712",
"0.5866864",
"0.5865906",
"0.58553576",
"0.5852255",
"0.58487",
"0.58368397",
"0.58322436",
"0.5829738",
"0.5826385",
"0.5824739",
"0.5817804",
"0.5816565",
"0.5816266",
"0.58157545",
"0.58075345",
"0.5797573",
"0.5794861",
"0.57902855",
"0.57880723",
"0.5781181",
"0.57648563",
"0.575188",
"0.5743202",
"0.5735771",
"0.57254326",
"0.57223433",
"0.57032466",
"0.5700733",
"0.56917053"
] | 0.6601643 | 8 |
LogAllKeys Prints the content of memory manager | func (mgr *LocalHashMapDBMgr) LogAllKeys() {
for k, e := range mgr.memMap {
glog.Infof("%v: %v", k, e)
}
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (c *LRUCache) PrintAll() {\n\tc.mutex.RLock()\n\tdefer c.mutex.RUnlock()\n\tfor e := c.cacheList.Front(); e != nil; e = e.Next() {\n\t\tfmt.Printf(\"[%v, %v] \", e.Value.(*entry).key, e.Value.(*entry).value)\n\t}\n\tfmt.Println()\n\treturn\n}",
"func PrintAll(c config.Config) error {\n\tlog.Info(\"Opening session for all entries in vault\")\n\tcleartextValues, err := decryptVault(c)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tmenuItems := []string{}\n\tfor key := range cleartextValues {\n\t\tmenuItems = append(menuItems, key)\n\t}\n\n\tprintWithMenu(menuItems, cleartextValues, c.DecryptSessionTimeout)\n\treturn nil\n}",
"func (fr *FileRing) PrintStore() {\n\tfr.lock.RLock()\n\tdefer fr.lock.RUnlock()\n\tfmt.Println(\"Files being stored at this machine:\")\n\tfor _, m := range fr.hashMap {\n\t\tj := 0\n\t\tfor _, f := range m.SortedFiles() {\n\t\t\tif f.Tombstone {\n\t\t\t\tfmt.Printf(\"%v TOMBSTONE\\n\", f.File)\n\t\t\t} else {\n\t\t\t\tj += 1\n\t\t\t\tif f.AutoTombstone {\n\t\t\t\t\tfmt.Printf(\"%v (Version %v)\\t%v\\tDELETED\\n\", f.File, j, f.Sequence)\n\t\t\t\t} else {\n\t\t\t\t\tfmt.Printf(\"%v (Version %v)\\t%v\\n\", f.File, j, f.Sequence)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}",
"func DisplayAll() {\n\n\tif len(dataStorageMap) == 0 {\n\t\tfmt.Println(\"Data Storage Empty!!! No data Found !!!\")\n\t} else {\n\t\tfor key, val := range dataStorageMap {\n\t\t\tfmt.Println(key, \"-->\", val)\n\t\t}\n\t}\n}",
"func Print(key Type) {\n\tmutex.RLock()\n\tdefer mutex.RUnlock()\n\n\tklog.Infof(\"%s: %s\", key, GlobalStats[key])\n}",
"func Print(key Type) {\n\tmutex.RLock()\n\tdefer mutex.RUnlock()\n\n\tklog.Infof(\"%s: %s\", key, globalStats[key])\n}",
"func Log(m map[string]interface{}) {\n\tfmt.Println(\"[debug] →\")\n\tfor k, v := range m {\n\t\tfmt.Printf(\"\\t%v: %+v\\n\", k, v)\n\t}\n\tfmt.Println(\"[debug] □\")\n}",
"func Dump(nodes *map[string]storage.Node) {\n\tt := time.Now()\n\tf, _ := os.Create(strings.Join([]string{\"persister/data/dump_\", getTimeStr(t), \".log\"}, \"\"))\n\tdefer f.Close()\n\tfor k, v := range *nodes {\n\t\tf.WriteString(\"key: \" + k + \" \" + v.String() + \"\\n\")\n\t}\n\n}",
"func (d *data) printAll(logFileName string) {\n\n\tfileLocation := \"/\" + d.logFileLocation + \"/\" + logFileName\n\tfile, err := os.Open(fileLocation)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed opening file: %s\", err)\n\t}\n\n\tdefer file.Close()\n\n\tscanner := bufio.NewScanner(file)\n\tscanner.Split(bufio.ScanLines)\n\n\tfor scanner.Scan() {\n\t\tfmt.Println(scanner.Text())\n\t}\n}",
"func (m *DBMem) PrintDB() {\n m.RLock()\n defer m.RUnlock()\n\n for i := 0; i < len(m.data); i++ {\n fmt.Println(\"ID:\", i, m.data[i])\n }\n}",
"func ListAllKeys() error {\n\n\tdb := Connect()\n\n\tif ExistDb(PathDb) {\n\n\t\tfmt.Println(\"Exist\", db)\n\n\t} else {\n\n\t\tfmt.Println(\"Not exist!\")\n\t\tos.Exit(1)\n\t}\n\n\tdb.View(func(tx *bolt.Tx) error {\n\n\t\t// Assume bucket exists and has keys\n\t\tb := tx.Bucket([]byte(Database))\n\n\t\tc := b.Cursor()\n\n\t\tfor k, v := c.First(); k != nil; k, v = c.Next() {\n\n\t\t\tfmt.Printf(\"key=%s, value=%s\\n\", k, v)\n\t\t}\n\n\t\treturn nil\n\t})\n\n\treturn nil\n\n}",
"func (u *LRU) show() {\n\tfmt.Println(\"MAP:\")\n\tfor k, v := range u.entries {\n\t\tvv := v.Value.(kvpair)\n\t\tfmt.Printf(\"K:%20s K2: %20s V:%20s\\n\", k, vv.Key, vv.Value)\n\t}\n\tfmt.Println(\"LIST:\")\n\tfor elem := u.last.Front(); elem != nil; elem = elem.Next() {\n\t\tvv := elem.Value.(kvpair)\n\t\tfmt.Printf(\"kvpair:%+v\\n\", vv)\n\t}\n}",
"func (l *Log) DumpLog() {\n\tfor _, v := range l.Entries {\n\t\tfmt.Println(v)\n\t}\n}",
"func printScanKey(ch <-chan []string) {\n\tfor {\n\t\tfor _, v := range <-ch {\n\t\t\tfmt.Println(v)\n\t\t}\n\t}\n}",
"func (o *IgmpFlowTbl) dumpAll() {\n\n\tvar it core.DListIterHead\n\tcnt := 0\n\tfor it.Init(&o.head); it.IsCont(); it.Next() {\n\t\te := covertToIgmpEntry(it.Val())\n\t\tfmt.Printf(\" %v:%v \\n\", cnt, e.Ipv4)\n\t\tcnt++\n\t}\n}",
"func PrintMap(m map[string]string) {\n\tfor k := range m {\n\t log.Println(\"\\t\",k,\"=\",m[k])\n\t}\n}",
"func printKV(ag *alertGroup, m *sync.Mutex) {\n\tm.Lock()\n\tfor _, alert := range ag.Alerts {\n\n\t\tfmt.Printf(\"\\\"status: %s\\\", \", alert.Status)\n\n\t\tfor k, v := range alert.Labels {\n\t\t\tfmt.Printf(\"\\\"%s: %s\\\", \", k, v)\n\t\t}\n\t\tfor k, v := range alert.Annotations {\n\t\t\tfmt.Printf(\"\\\"%s: %s\\\", \", k, v)\n\t\t}\n\t\tfmt.Printf(\"\\\"startsAt: %s\\\", \\\"endsAt: %s\\\"\\n\", alert.StartsAt.Truncate(time.Millisecond), alert.EndsAt.Truncate(time.Millisecond))\n\t}\n\tm.Unlock()\n}",
"func (h *MaxKeyHeap) Print() {\n\tfor i := 0; i < h.Len(); i++ {\n\t\tfmt.Printf(\"%v \", (*h)[i])\n\t}\n\tprintln(\"\")\n}",
"func (store KeyValue) Dump() {\n\t// TODO: Dump out debugging information here\n\ttexts := store.database.Stats()\n\tfor key, value := range texts {\n\t\tlog.Debug(\"Stat\", key, value)\n\t}\n\n\titer := store.database.Iterator(nil, nil)\n\tfor ; iter.Valid(); iter.Next() {\n\t\thash := iter.Key()\n\t\tnode := iter.Value()\n\t\tlog.Debug(\"Row\", hash, node)\n\t}\n}",
"func (s *LoginServer) printServerKeys(w http.ResponseWriter) {\n\n\tfmt.Fprintf(w, \"List of server keys\\n\")\n\tfmt.Fprintf(w, \"-------------------\\n\")\n\tfmt.Fprintf(w, \"Index\\tValue\\n\")\n\tfor _, key := range s.Keys.ServiceKeys() {\n\t\tfmt.Fprintf(w, \"%v\\t%v\\n\", key.Index, hex.EncodeToString(key.Value[:]))\n\t}\n}",
"func (am *AccountManager) DumpKeys() ([]string, error) {\n\tvar keys []string\n\tfor _, a := range am.AllAccounts() {\n\t\tswitch walletKeys, err := a.DumpPrivKeys(); err {\n\t\tcase wallet.ErrWalletLocked:\n\t\t\treturn nil, err\n\n\t\tcase nil:\n\t\t\tkeys = append(keys, walletKeys...)\n\n\t\tdefault: // any other non-nil error\n\t\t\treturn nil, err\n\t\t}\n\n\t}\n\treturn keys, nil\n}",
"func (session KeyValueSession) Dump() {\n\ttexts := session.store.database.Stats()\n\tfor key, value := range texts {\n\t\tlog.Debug(\"Stat\", key, value)\n\t}\n\n\titer := session.store.database.Iterator(nil, nil)\n\tfor ; iter.Valid(); iter.Next() {\n\t\thash := iter.Key()\n\t\tnode := iter.Value()\n\t\tlog.Debug(\"Row\", hash, node)\n\t}\n}",
"func (eh *Hashset) PrintAll() {\n\tfor _, page := range eh.pages {\n\t\tfor _, v := range page.items {\n\t\t\tfmt.Println(v)\n\t\t}\n\t}\n}",
"func (kvStore *KVStore) DumpStore() []KVPair {\n els := make([]KVPair, len(kvStore.mapping))\n\n i := 0\n for k, v := range kvStore.mapping {\n els[i] = KVPair{k, *v}\n i++\n }\n\n return els\n}",
"func ( handle * MemcacheClient) GetAllKey( ) ([]string ,error){\n\treturn nil,nil\n}",
"func GetLogKeys() map[string]bool {\n\tconsoleLogKeys := ConsoleLogKey().EnabledLogKeys()\n\tlogKeys := make(map[string]bool, len(consoleLogKeys))\n\tfor _, v := range consoleLogKeys {\n\t\tlogKeys[v] = true\n\t}\n\treturn logKeys\n}",
"func KeyLog(rw io.ReadWriter) (err error) {\r\n\t// Query key mapped to integer `0x00` to `0xFF` if it's pressed.\r\n\tfor i := 0; i < 0xFF; i++ {\r\n\t\tasynch, _, _ := syscall.Syscall(procGetAsyncKeyState.Addr(), 1, uintptr(i), 0, 0)\r\n\r\n\t\t// If the least significant bit is set ignore it.\r\n\t\t//\r\n\t\t// As it's written in the documentation:\r\n\t\t// `if the least significant bit is set, the key was pressed after the previous call to GetAsyncKeyState.`\r\n\t\t// Which we don't care about :)\r\n\t\tif asynch&0x1 == 0 {\r\n\t\t\tcontinue\r\n\t\t}\r\n\r\n\t\t// Write i to rw.\r\n\t\terr = writeKey(i, rw)\r\n\r\n\t\tif err != nil {\r\n\t\t\treturn err\r\n\t\t}\r\n\t}\r\n\r\n\treturn nil\r\n}",
"func printCmd(cmd data.ShareCommand) {\n\tfor _, v := range cmd.Data() {\n\t\tlog.Println(v)\n\t}\n}",
"func (consensus *Consensus) DebugPrintPublicKeys() {\n\tfor _, k := range consensus.PublicKeys {\n\t\tstr := fmt.Sprintf(\"%s\", k)\n\t\tutils.GetLogInstance().Debug(\"pk:\", \"string\", str)\n\t}\n\n\tutils.GetLogInstance().Debug(\"PublicKeys:\", \"#\", len(consensus.PublicKeys))\n}",
"func (dbm *MemDB) Print() error {\n\tdbm.mtx.RLock()\n\tdefer dbm.mtx.RUnlock()\n\n\tdbm.btree.Ascend(func(i btree.Item) bool {\n\t\titem := i.(*item)\n\t\tfmt.Printf(\"[%X]:\\t[%X]\\n\", item.key, item.value)\n\t\treturn true\n\t})\n\treturn nil\n}",
"func (m *MemoryStorage) GetAll() (map[string]Entry, error) {\n\treturn m.entries, nil\n}",
"func (lmem *lockedMemRepo) List() ([]string, error) {\n\tif err := lmem.checkToken(); err != nil {\n\t\treturn nil, err\n\t}\n\tlmem.RLock()\n\tdefer lmem.RUnlock()\n\n\tres := make([]string, 0, len(lmem.mem.keystore))\n\tfor k := range lmem.mem.keystore {\n\t\tres = append(res, k)\n\t}\n\treturn res, nil\n}",
"func (s *DNSControllerScope) AllKeys() []string {\n\ts.mutex.Lock()\n\tdefer s.mutex.Unlock()\n\n\tvar keys []string\n\tfor k := range s.Records {\n\t\tkeys = append(keys, k)\n\t}\n\treturn keys\n}",
"func main1() {\n\twg := sync.WaitGroup{}\n\tfor i := 0; i < 20; i++ {\n\t\twg.Add(1)\n\t\tgo func() {\n\t\t\tkey := strconv.Itoa(i)\n\t\t\tset(key, i)\n\t\t\tfmt.Printf(\"key:%v,value:%v \\n\", key, i)\n\t\t\twg.Done()\n\t\t}()\n\t}\n\twg.Wait()\n\tfmt.Println(len(m))\n}",
"func (l *MemoryLogger) Debug(msg string, keyvals ...interface{}) {\n\tl.println(\"DEBUG\", msg, keyvals)\n}",
"func (list *APTAuditList) printAll() {\n\tfor _, result := range list.results {\n\t\tfmt.Print(result)\n\t}\n}",
"func printHostStorageMapVerbose(hsm control.HostStorageMap, out io.Writer, opts ...PrintConfigOption) error {\n\tfor _, key := range hsm.Keys() {\n\t\thss := hsm[key]\n\t\thosts := getPrintHosts(hss.HostSet.RangedString(), opts...)\n\t\tlineBreak := strings.Repeat(\"-\", len(hosts))\n\t\tfmt.Fprintf(out, \"%s\\n%s\\n%s\\n\", lineBreak, hosts, lineBreak)\n\t\tfmt.Fprintf(out, \"HugePage Size: %d KB\\n\", hss.HostStorage.MemInfo.HugepageSizeKiB)\n\t\tif len(hss.HostStorage.ScmNamespaces) == 0 {\n\t\t\tif err := PrintScmModules(hss.HostStorage.ScmModules, out, opts...); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else {\n\t\t\tif err := PrintScmNamespaces(hss.HostStorage.ScmNamespaces, out, opts...); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\tfmt.Fprintln(out)\n\t\tif err := PrintNvmeControllers(hss.HostStorage.NvmeDevices, out, opts...); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Fprintln(out)\n\t}\n\n\treturn nil\n}",
"func (kb *Keybase) List() ([]keys.Info, error) {\n\tkb.mx.Lock()\n\tdefer kb.mx.Unlock()\n\treturn kb.kb.List()\n}",
"func PrintMetaMap(metaMap map[string]*FileMetaData) {\n\n\tfmt.Println(\"--------BEGIN PRINT MAP--------\")\n\n\tfor _, filemeta := range metaMap {\n\t\tfmt.Println(\"\\t\", filemeta.Filename, filemeta.Version, filemeta.BlockHashList)\n\t}\n\n\tfmt.Println(\"---------END PRINT MAP--------\")\n\n}",
"func (i *Instance) AllKeys() []string {\n\trows, err := i.db.Query(`SELECT key FROM config`)\n\tif err != nil {\n\t\tmultilog.Error(\"config:AllKeys query failed: %s\", errs.JoinMessage(err))\n\t\treturn nil\n\t}\n\tvar keys []string\n\tdefer rows.Close()\n\tfor rows.Next() {\n\t\tvar key string\n\t\trows.Scan(&key)\n\t\tkeys = append(keys, key)\n\t}\n\treturn keys\n}",
"func showlog(w http.ResponseWriter, _req *http.Request) {\n\tstoreLock.RLock()\n\tvar slogs = make([]*proto.SlowlogEntries, len(storeMap))\n\tidx := 0\n\tfor _, s := range storeMap {\n\t\tslogs[idx] = s.Reply()\n\t\tidx++\n\t}\n\tstoreLock.RUnlock()\n\n\tencoder := json.NewEncoder(w)\n\terr := encoder.Encode(slogs)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"%s\", err), http.StatusInternalServerError)\n\t}\n}",
"func PrintMetaMap(metaMap map[string]FileMetaData) {\n\n\tfmt.Println(\"--------BEGIN PRINT MAP--------\")\n\n\tfor _, filemeta := range metaMap {\n\t\tfmt.Println(\"\\t\", filemeta.Filename, filemeta.Version, filemeta.BlockHashList)\n//\t\tfmt.Println(\"\\t\", filemeta.Filename, filemeta.Version)\n\t}\n\n\tfmt.Println(\"---------END PRINT MAP--------\")\n\n}",
"func (l *MemoryLogger) Info(msg string, keyvals ...interface{}) {\n\tl.println(\"INFO \", msg, keyvals)\n}",
"func keys(c *cli.Context) {\n\tif c.NArg() < 1 {\n\t\tfmt.Fprintln(c.App.ErrWriter, \"keys requires at least 1 argument\")\n\t\tcli.ShowCommandHelp(c, \"keys\")\n\t\treturn\n\t}\n\tfor _, filepath := range c.Args() {\n\t\tdecoder := decoder.NewDecoder()\n\t\tgo dump.Decode(c, decoder, filepath)\n\t\tfor e := range decoder.Entries {\n\t\t\tfmt.Fprintf(c.App.Writer, \"%v\\n\", e.Key)\n\t\t}\n\t}\n}",
"func PrintCommands() {\n logger.Log(fmt.Sprintln(\"** Daemonized Commands **\"))\n for cmd, desc := range DaemonizedCommands() {\n logger.Log(fmt.Sprintf(\"%15s: %s\\n\", cmd, desc.description))\n }\n\n logger.Log(fmt.Sprintln(\"** Information Commands **\"))\n for cmd, desc := range InfoCommands() {\n logger.Log(fmt.Sprintf(\"%15s: %s\\n\", cmd, desc.description))\n }\n\n logger.Log(fmt.Sprintln(\"** Interactive Commands **\"))\n for cmd, desc := range InteractiveCommands() {\n logger.Log(fmt.Sprintf(\"%15s: %s\\n\", cmd, desc.description))\n }\n}",
"func prettyPrintKeys(keyStores []trustmanager.KeyStore, writer io.Writer) {\n\tvar info []keyInfo\n\n\tfor _, store := range keyStores {\n\t\tfor keyID, keyIDInfo := range store.ListKeys() {\n\t\t\tinfo = append(info, keyInfo{\n\t\t\t\trole: keyIDInfo.Role,\n\t\t\t\tlocation: store.Name(),\n\t\t\t\tgun: keyIDInfo.Gun,\n\t\t\t\tkeyID: keyID,\n\t\t\t})\n\t\t}\n\t}\n\n\tif len(info) == 0 {\n\t\twriter.Write([]byte(\"No signing keys found.\\n\"))\n\t\treturn\n\t}\n\n\tsort.Stable(keyInfoSorter(info))\n\n\ttw := initTabWriter([]string{\"ROLE\", \"GUN\", \"KEY ID\", \"LOCATION\"}, writer)\n\n\tfor _, oneKeyInfo := range info {\n\t\tfmt.Fprintf(\n\t\t\ttw,\n\t\t\tfourItemRow,\n\t\t\toneKeyInfo.role,\n\t\t\ttruncateWithEllipsis(oneKeyInfo.gun.String(), maxGUNWidth, true),\n\t\t\toneKeyInfo.keyID,\n\t\t\ttruncateWithEllipsis(oneKeyInfo.location, maxLocWidth, true),\n\t\t)\n\t}\n\ttw.Flush()\n}",
"func (m *MemoryStorer) All() ([]string, error) {\n\tm.mut.RLock()\n\tdefer m.mut.RUnlock()\n\n\tsessions := make([]string, len(m.sessions))\n\n\ti := 0\n\tfor id := range m.sessions {\n\t\tsessions[i] = id\n\t\ti++\n\t}\n\n\treturn sessions, nil\n}",
"func (proxy *StandAloneProxyConfig) Log() {\n\tfmt.Println(\"---------------------\")\n\tfmt.Println(\"managedArrays\")\n\tfor key, val := range proxy.managedArrays {\n\t\tfmt.Printf(\"%s ::: %+v\\n\", key, val)\n\t}\n\tfmt.Println(\"---------------------\")\n\tfmt.Println(\"---------------------\")\n\tfmt.Println(\"managementServers\")\n\tfor key, val := range proxy.managementServers {\n\t\tfmt.Printf(\"%v ::: %+v\\n\", key, val)\n\t}\n\tfmt.Println(\"---------------------\")\n\tfmt.Println(\"---------------------\")\n\tfmt.Println(\"proxyCredentials\")\n\tfor key, val := range proxy.proxyCredentials {\n\t\tfmt.Printf(\"%s ::: %+v\\n\", key, val)\n\t}\n\tfmt.Println(\"---------------------\")\n}",
"func (m *Map) Print() {\n\tfmt.Println(\"Map size:\", m.area)\n\n\tfor y := uint8(0); y < m.area.height; y++ {\n\t\tfmt.Printf(\"%4d |\", y)\n\n\t\tfor x := uint8(0); x < m.area.width; x++ {\n\t\t\tif p := atomic.LoadPointer(m.fields[y][x]); fieldIsEmpty(p) {\n\t\t\t\tfmt.Print(\" .\")\n\t\t\t} else {\n\t\t\t\tfmt.Print(\" x\")\n\t\t\t}\n\t\t}\n\n\t\tfmt.Println()\n\t}\n}",
"func (r *reflectorStore) ListKeys() []string {\n\tpanic(\"not implemented\")\n}",
"func (ks *KeyStore) PrintTokenSlots() {\n\tp, err := initializeLib()\n\tif err != nil {\n\t\tlogrus.Errorf(err.Error())\n\t\treturn\n\t}\n\tslots, err := p.GetSlotList(true)\n\tif err != nil {\n\t\tdefer common.FinalizeAndDestroy(p)\n\t\tlogrus.Errorf(\"loaded library %s, but failed to list HSM slots %s\", pkcs11Lib, err)\n\t\treturn\n\t}\n\tdefer common.FinalizeAndDestroy(p)\n\tprettyPrintTokens(slots, os.Stdout, p)\n}",
"func show(keyStr string) {\n\tkeys, err := keybinding.ParseAll(keyStr)\n\tif err != nil {\n\t\tfmt.Println(\"Error parsing\", keyStr, \":\", err)\n\t} else {\n\t\tfmt.Println(\"Key: \", keyStr, \"=\", keys)\n\t}\n}",
"func PrintAllPods() {\n\tcfg := &etcd.ClientConfig{\n\t\tConfig: &clientv3.Config{\n\t\t\tEndpoints: []string{\"127.0.0.1:32379\"},\n\t\t},\n\t\tOpTimeout: 1 * time.Second,\n\t}\n\t// Create connection to etcd.\n\tdb, err := etcd.NewEtcdConnectionWithBytes(*cfg, logrus.DefaultLogger())\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\tos.Exit(1)\n\t}\n\titr, err := db.ListValues(\"/vnf-agent/contiv-ksr/allocatedIDs/\")\n\tif err != nil {\n\t\tfmt.Printf(\"Error getting values\")\n\t\treturn\n\t}\n\tfor {\n\t\tkv, stop := itr.GetNext()\n\t\tif stop {\n\t\t\tfmt.Println()\n\t\t\tbreak\n\t\t}\n\t\tbuf := kv.GetValue()\n\t\tnodeInfo := &nodeinfomodel.NodeInfo{}\n\t\terr = json.Unmarshal(buf, nodeInfo)\n\t\tfmt.Println(\"\\n\" + nodeInfo.Name + \":\")\n\t\tfmt.Println(\"--------------\")\n\n\t\tPrintPodsPerNode(nodeInfo.ManagementIpAddress)\n\t}\n\tdb.Close()\n}",
"func EnumerateDebugRenderableKeys() []string {\n\tkeys := []string{}\n\tdebugMap.Range(func(k, v interface{}) bool {\n\t\tkey, ok := k.(string)\n\t\tif ok {\n\t\t\tkeys = append(keys, key)\n\t\t}\n\t\treturn true\n\t})\n\treturn keys\n}",
"func (d *DiskStorage) GetAll() (map[string]Entry, error) {\n\treturn d.memStorage.GetAll()\n}",
"func List(memoryStorage *PassgenStorage) {\n\tif len(memoryStorage.Storage) == 0 {\n\t\thelpers.NegativePrintf(\"\\nThere is no item in your storage\\n\\n\")\n\t\treturn\n\t}\n\n\ttable := tablewriter.NewWriter(os.Stdout)\n\ttable.SetHeader([]string{\"Application Name\", \"User Name\", \"Password\"})\n\n\tfor _, v := range memoryStorage.Storage {\n\t\ttable.Append([]string{v.AppName, v.UserName, v.Password})\n\t}\n\ttable.Render()\n}",
"func (cli *Cli) GetFreezedLogsAll(from, to int) map[string]*dai.Mapping {\n\tmappings := make(map[string]*dai.Mapping)\n\tfor {\n\t\tend := from + 100\n\t\tif end >= to {\n\t\t\tend = to\n\t\t}\n\t\tm := cli.GetFreezedLogsRetry(from, end, 5)\n\t\tlog.Printf(\"get freezed logs from %d to %d: %d logs\\n\", from, end, len(m))\n\t\tfor k, v := range m {\n\t\t\tmappings[k] = v\n\t\t}\n\t\tfrom = end + 1\n\t\tif from > to {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn mappings\n}",
"func (m *Manager) clear() error {\n\tfor _, ch := range m.meter {\n\t\tif ch == '\\n' {\n\t\t\t_, err := os.Stdout.WriteString(\"\\x1b[1A\\x1b[2K\")\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}",
"func (s *sequencer) dump() {\n\tif logrus.GetLevel() != logrus.TraceLevel {\n\t\treturn\n\t}\n\n\ts.lock.RLock()\n\tdefer s.lock.RUnlock()\n\n\tfor height := range s.blockPool {\n\t\tblk, err := s.get(height)\n\t\tif err == nil {\n\t\t\tlog.WithField(\"hash\", hex.EncodeToString(blk.Header.Hash)).\n\t\t\t\tWithField(\"height\", blk.Header.Height).\n\t\t\t\tTrace(\"sequencer item\")\n\t\t}\n\t}\n}",
"func (c *AdapterMemory) Keys(ctx context.Context) ([]interface{}, error) {\n\treturn c.data.Keys()\n}",
"func (cache Cache) ShowContent() {\n\tlock.Lock()\n\tdefer lock.Unlock()\n\tfor e := cache.linkedList.Front(); e != nil; e = e.Next() {\n\t\tk := e.Value.(CacheStruct).key\n\t\tv := cache.cacheMap[k].Value.(CacheStruct).value\n\t\tfmt.Println(\" k and v\", k, v)\n\t}\n}",
"func (ks *MemoryStore) RemoveAll() (err error) {\n\n\tfor key := range ks.Keys {\n\t\tdelete(ks.Keys, key)\n\t}\n\n\treturn\n}",
"func printAllocations(c *cli.Context) error {\n\tvar rooms, err = controllers.GetRoomAllocations(con)\n\n\texitOnError(err)\n\n\tfor _, room := range rooms {\n\t\tprintRoomDetails(room)\n\t}\n\n\treturn nil\n}",
"func (cfg *Config) Print() {\n fmt.Println(\"assigned key:\", cfg.ConsumerKey)\n}",
"func (p *PubKey) PrintList() int {\n\treturn p.OutputList(os.Stdout)\n}",
"func (p *Logger) flushLogs() {\n\tvar err error\n\tdefer func() {\n\t\tp.loopWait <- struct{}{}\n\t}()\n\tre := regexp.MustCompile(keyPattern)\n\tfor {\n\t\tselect {\n\t\tcase <-p.loopFactor:\n\t\t\treturn\n\t\tdefault:\n\t\t\those := make(chan interface{}, p.maxWorkers)\n\t\t\tvar wg sync.WaitGroup\n\n\t\t\t// workers\n\t\t\tfor i := 0; i < p.maxWorkers; i++ {\n\t\t\t\tp.env.ScheduleDaemon(func() {\n\t\t\t\t\tfor keyI := range hose {\n\t\t\t\t\t\tkey, _ := keyI.(string)\n\t\t\t\t\t\tmatch := re.FindStringSubmatch(key)\n\t\t\t\t\t\tif len(match) > 2 {\n\t\t\t\t\t\t\tif err = p.sendLogs(match[2]); err == nil {\n\t\t\t\t\t\t\t\tp.cmap.Delete(key)\n\t\t\t\t\t\t\t\twg.Done()\n\t\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\ttsN, _ := strconv.ParseInt(match[1], 10, 64)\n\t\t\t\t\t\t\tts := time.Unix(0, tsN)\n\n\t\t\t\t\t\t\tif time.Since(ts) > p.retentionPeriod {\n\t\t\t\t\t\t\t\tp.cmap.Delete(key)\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\twg.Done()\n\t\t\t\t\t}\n\t\t\t\t})\n\t\t\t}\n\n\t\t\tp.cmap.Range(func(k, v interface{}) bool {\n\t\t\t\twg.Add(1)\n\t\t\t\those <- k\n\t\t\t\treturn true\n\t\t\t})\n\t\t\twg.Wait()\n\t\t\tclose(hose)\n\t\t\ttime.Sleep(500 * time.Millisecond)\n\t\t}\n\t}\n}",
"func (cf *Info) GetAllKeys() []string {\n\tkeys := []string{}\n\tfor key := range cf.objInfo {\n\t\tkeys = append(keys, key)\n\t}\n\n\tsort.Strings(keys)\n\treturn keys\n}",
"func printKeysAndValues(l *log.Logger, keysAndValues ...interface{}) {\n\tfor i := 0; i < len(keysAndValues)/2; i += 2 {\n\t\tl.Printf(\"\\t%s = %s\\n\", keysAndValues[i], keysAndValues[i+1])\n\t}\n}",
"func (l Log) ShowLog () {\n fmt.Printf(\"\\n\\n----------------------------------------------------------------------------\\n\")\n fmt.Printf(\" ACTIVITY LOG \\n\")\n for i := range l.Action {\n fmt.Printf(\"%s\\n\", l.Action[i])\n }\n fmt.Printf(\"____________________________________________________________________________\\n\")\n}",
"func PrintBucketMap() string {\n\tbucketMap.RLock()\n\tdefer bucketMap.RUnlock()\n\n\tres := fmt.Sprintf(\"len:%d\\n\", len(bucketMap.m))\n\tres += \"keys:\\n\"\n\n\tfor k := range bucketMap.m {\n\t\tres += fmt.Sprintf(\"%s\\n\", k)\n\t}\n\n\treturn res\n}",
"func (s *SSHOrch) ShowClientMap() {\n\tfor k, v := range s.lookup {\n\t\tfmt.Printf(\"%s:%s\\n\", k, v.LocalAddr())\n\t}\n}",
"func List() []string {\n\tvar keys []string\n\tfor k := range loggers {\n\t\tkeys = append(keys, k)\n\t}\n\treturn keys\n}",
"func (keyMask LogKey) EnabledLogKeys() []string {\n\tvar logKeys = make([]string, 0, len(logKeyNames))\n\tfor i := 0; i < len(logKeyNames); i++ {\n\t\tlogKey := LogKey(1) << uint32(i)\n\t\tif keyMask.enabled(logKey, false) {\n\t\t\tlogKeys = append(logKeys, LogKeyName(logKey))\n\t\t}\n\t}\n\treturn logKeys\n}",
"func dumpMaps() {\n\t// TODO: make this function part of the exporter\n\tfor name, cmap := range builtinMetricMaps {\n\t\tquery, ok := queryOverrides[name]\n\t\tif !ok {\n\t\t\tfmt.Println(name)\n\t\t} else {\n\t\t\tfor _, queryOverride := range query {\n\t\t\t\tfmt.Println(name, queryOverride.versionRange, queryOverride.query)\n\t\t\t}\n\t\t}\n\n\t\tfor column, details := range cmap.columnMappings {\n\t\t\tfmt.Printf(\" %-40s %v\\n\", column, details)\n\t\t}\n\t\tfmt.Println()\n\t}\n}",
"func (s *collectorCache) ListKeys() []string {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\tkeys := make([]string, 0, len(s.collectorMap))\n\tfor k := range s.collectorMap {\n\t\tkeys = append(keys, k)\n\t}\n\treturn keys\n}",
"func (hm *HashMap) GetAllKeyAsString() []string {\n\thm.lock.RLock()\n\tdefer hm.lock.RUnlock()\n\n\tmapList := make([]string, 0)\n\tfor val := range hm.data {\n\t\tmapList = append(mapList, val.String())\n\t}\n\treturn mapList\n}",
"func dumpInfo(c chan os.Signal) {\n\tfor {\n\t\t<-c\n\t\tlog.Println(\"Signal caught - dumping runtime stats\")\n\t\tvar m runtime.MemStats\n\t\truntime.ReadMemStats(&m)\n\t\ts, _ := json.Marshal(m)\n\t\tlog.Println(\"MemStats JSON follows\")\n\t\tlog.Printf(\"%s\\n\", s)\n\t\tvar garC debug.GCStats\n\t\tdebug.ReadGCStats(&garC)\n\t\tlog.Printf(\"\\nLastGC:\\t%s\", garC.LastGC) // time of last collection\n\t\tlog.Printf(\"\\nNumGC:\\t%d\", garC.NumGC) // number of garbage collections\n\t\tlog.Printf(\"\\nPauseTotal:\\t%s\", garC.PauseTotal) // total pause for all collections\n\t\tlog.Printf(\"\\nPause:\\t%s\", garC.Pause) // pause history, most recent first\n\t\tlog.Println(\"debug.Stack: \" + string(debug.Stack()))\n\t\tlog.Println(\"runtime.NumGoroutine: \" + string(runtime.NumGoroutine()))\n\t}\n}",
"func (p *PersistableEvent) ToStringKeys() []string {\n\treturn []string{\"type\", \"nodeID\", \"fullNodeID\", \"amount\", \"time\", \"manaType\", \"transactionID\", \"inputID\"}\n}",
"func (ms *MemStore) GetAll(data map[string]io.ReaderFrom) error {\n\tvar err error\n\tms.mu.RLock()\n\tfor k, d := range data {\n\t\tbuf, ok := ms.data[k]\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\t\tif _, err = d.ReadFrom(&buf); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tms.mu.RUnlock()\n\treturn err\n}",
"func (km *Keystore) List() ([]string, error) {\n\tentries, err := km.ds.Query(query.Query{})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tkeys, err := entries.Rest()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar ids []string\n\tfor _, v := range keys {\n\t\tids = append(ids, strings.Split(v.Key, \"/\")[1])\n\t}\n\treturn ids, nil\n}",
"func (s Set) print() {\n\tvar (\n\t\tkeys []string\n\t)\n\n\tfor k := range s {\n\t\tkeys = append(keys, k)\n\t}\n\n\tsort.Strings(keys)\n\n\tfor _, k := range keys {\n\t\tfmt.Printf(\"%s -> \", k)\n\t\tfor _, v := range s[k] {\n\t\t\tfmt.Printf(\"%v \", v.name)\n\t\t}\n\t\tfmt.Printf(\"\\n\")\n\t}\n}",
"func (m _Map_String_String_Zapper) MarshalLogObject(enc zapcore.ObjectEncoder) (err error) {\n\tfor k, v := range m {\n\t\tenc.AddString((string)(k), v)\n\t}\n\treturn err\n}",
"func (d *DB) PrintAll() map[string]interface{} {\n\treturn gin.H{\n\t\t\"number\": 10,\n\t}\n}",
"func (manager *KeysManager) Clear() {\n\tmanager.KeyList = make([]*jose.JSONWebKey, 0)\n\tmanager.KeyMap = make(map[string]*jose.JSONWebKey)\n}",
"func (q *SubmitQueue) Dump() {\n\tfor i, item := range q.items {\n\t\tfmt.Printf(\"[%2d] %s:%d %s\\n\", i, item.Repo, item.PRNumber, item.Sha1)\n\t}\n}",
"func (m *logMeasurement) keys() []string {\n\ta := make([]string, 0, len(m.tagSet))\n\tfor k := range m.tagSet {\n\t\ta = append(a, k)\n\t}\n\tsort.Strings(a)\n\treturn a\n}",
"func (kdc krbAuth) DumpStrings() {\n\tfor _, val := range kdc.KrbRequests {\n\t\tdata, _ := val.String()\n\t\tfmt.Println(data)\n\t}\n}",
"func (m _Map_String_Binary_Zapper) MarshalLogObject(enc zapcore.ObjectEncoder) (err error) {\n\tfor k, v := range m {\n\t\tenc.AddString((string)(k), base64.StdEncoding.EncodeToString(v))\n\t}\n\treturn err\n}",
"func (ds *DataStore) GetAllKeys() []uint64 {\n\tds.dataStoreLock.RLock()\n\tdefer ds.dataStoreLock.RUnlock()\n\tkeys := make([]uint64, 0)\n\tfor k := range ds.kvSet {\n\t\tkeys = append(keys, k)\n\t}\n\treturn keys\n}",
"func printAllCommands(cmds []Commander) {\n\tconst format = \"%v\\t%v\\t%v\\t%v\\n\"\n\ttw := new(tabwriter.Writer).Init(os.Stdout, 0, 8, 2, ' ', 0)\n\tfmt.Fprintf(tw, format, \"Path\", \"Alias\", \"Command\", \"Args\")\n\tfmt.Fprintf(tw, format, \"-----\", \"-----\", \"-------\", \"----\")\n\tfor _, t := range cmds {\n\t\tfmt.Fprintf(tw, format, t.Path, t.Alias, t.Command, strings.Join(t.Args, \" \"))\n\t}\n\ttw.Flush()\n}",
"func (t *TLSCheckWriter) PrintAll(authDebug []byte) error {\n\tw, fullAuth, err := t.setupTLSCheckPrint(authDebug)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor _, entry := range fullAuth {\n\t\ttlsCheckPrintln(w, entry)\n\t}\n\treturn w.Flush()\n}",
"func GetAll() interface{} { return viper.AllKeys() }",
"func printState() {\n\tfmt.Println(\"\\nKVNODE STATE:\")\n\tfmt.Println(\"-keyValueStore:\")\n\tfor k := range keyValueStore {\n\t\tmutex.Lock()\n\t\tval := keyValueStore[k]\n\t\tmutex.Unlock()\n\t\tfmt.Println(\" Key:\", k, \"Value:\", val)\n\t}\n\tfmt.Println(\"-transactions:\")\n\tfor txId := range transactions {\n\t\tmutex.Lock()\n\t\ttx := transactions[txId]\n\t\tmutex.Unlock()\n\t\tfmt.Println(\" --Transaction ID:\", tx.ID, \"IsAborted:\", tx.IsAborted, \"IsCommitted:\", tx.IsCommitted, \"CommitId:\", tx.CommitID)\n\t\tfmt.Printf(\" Hash:%x\\n\", tx.CommitHash)\n\t\tfmt.Printf(\" AllHashes:%x\\n\", tx.AllHashes)\n\t\tfmt.Println(\" PutSet:\")\n\t\tfor k := range tx.PutSet {\n\t\t\tfmt.Println(\" Key:\", k, \"Value:\", tx.PutSet[k])\n\t\t}\n\t}\n\tfmt.Println(\"-blockChain:\")\n\tprintBlockChain()\n\tfmt.Println(\"blockChain size:\", len(blockChain))\n\tfmt.Println(\"Total number of transactions is:\", len(transactions), \"\\n\")\n\tfmt.Println(\"Nodes List and Status:\", nodeIpAndStatuses)\n}",
"func (c *Cache) GetAllItems() string {\n\tvar str string\n\tvar keys []string\n\n\tc.lock.RLock()\n\tdefer c.lock.RUnlock()\n\n\tfor k := range c.items {\n\t\tkeys = append(keys, k)\n\t}\n\n\tsort.Strings(keys)\n\tstr += \"\\n\"\n\n\tfor _, k := range keys {\n\t\tstr += k + \" \" + fmt.Sprintf(\"%v\", c.items[k].data) + \"\\n\"\n\t}\n\n\treturn str\n}",
"func (c *Cache) GetAllKeys() []string {\n\tkeys := make([]string, 0, len(c.cacheByKey))\n\n\tfor k := range c.cacheByKey {\n\t\tkeys = append(keys, k)\n\t}\n\n\treturn keys\n}",
"func GetAll(key string, conn *radix.Pool) string {\n\tif conn == nil {\n\t\tconn = GetPool()\n\t}\n\tvar value string\n\terr := conn.Do(radix.Cmd(value, \"hgetall\", key))\n\tif err != nil {\n\t\tprintln(err)\n\t\tpanic(err)\n\t}\n\treturn value\n}",
"func AllLog(ctx *context.Context) *string {\n\tlogString := (*ctx).Value(KeyAllLog)\n\tif logString != nil {\n\t\tv := logString.(string)\n\t\treturn &v\n\t}\n\treturn nil\n}",
"func (tcdb *Teocdb) List(key string) (keyList cdb.KeyList, err error) {\n\tvar keyOut string\n\titer := tcdb.session.Query(`\n\t\tSELECT key FROM map WHERE key >= ? and key < ?\n\t\tALLOW FILTERING`,\n\t\tkey, key+\"a\").Iter()\n\tfor iter.Scan(&keyOut) {\n\t\tkeyList.Append(keyOut)\n\t}\n\treturn\n}",
"func (cs *Caches) ShowAll(db Store) error {\n\ttx := db.Find(cs)\n\treturn tx.Error\n}",
"func (m *WatcherMetrics) NKeys() int { return 4 }"
] | [
"0.6180221",
"0.60460794",
"0.59220344",
"0.5912047",
"0.57457006",
"0.574495",
"0.5689104",
"0.5659756",
"0.5619629",
"0.5607107",
"0.5596631",
"0.55544996",
"0.55493444",
"0.55463827",
"0.55437076",
"0.5507923",
"0.54788",
"0.5450998",
"0.5426974",
"0.54160655",
"0.54081875",
"0.53727245",
"0.5368841",
"0.5360314",
"0.53251034",
"0.53132284",
"0.53121185",
"0.5288654",
"0.5267655",
"0.5266145",
"0.5254447",
"0.5227436",
"0.52050394",
"0.51971495",
"0.51836073",
"0.516645",
"0.5162509",
"0.5160141",
"0.515802",
"0.515218",
"0.51331",
"0.5123023",
"0.51105744",
"0.51045966",
"0.5097505",
"0.50879794",
"0.50288785",
"0.50153154",
"0.50142664",
"0.49897972",
"0.49886778",
"0.49828732",
"0.4974849",
"0.49677914",
"0.49665982",
"0.4964966",
"0.49549755",
"0.49514443",
"0.4934833",
"0.49256277",
"0.49251628",
"0.49187338",
"0.49163696",
"0.4916285",
"0.49071988",
"0.49015212",
"0.48973867",
"0.4896572",
"0.48952723",
"0.48936558",
"0.48927054",
"0.48898694",
"0.48827794",
"0.4869007",
"0.48656186",
"0.48646286",
"0.4853024",
"0.4840376",
"0.4840283",
"0.48398113",
"0.4826027",
"0.4816849",
"0.4811739",
"0.48046857",
"0.4797962",
"0.4788317",
"0.47882167",
"0.47874737",
"0.47805527",
"0.47758952",
"0.47696176",
"0.47559398",
"0.47501096",
"0.47498658",
"0.47422868",
"0.47418433",
"0.47370908",
"0.4733618",
"0.47280207",
"0.4720245"
] | 0.78134096 | 0 |
Policy Get the policy name for this manager. | func (mgr *LocalHashMapDBMgr) Policy() string {
return mgr.policy
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (nri *NodeReportItem) GetPolicy() string {\n\n\tif nri.Policy == \"\" {\n\t\treturn \"no policy\"\n\t}\n\n\treturn nri.Policy\n}",
"func (o ResiliencyPolicyOutput) PolicyName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *ResiliencyPolicy) pulumi.StringOutput { return v.PolicyName }).(pulumi.StringOutput)\n}",
"func (o GroupPolicyOutput) PolicyName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *GroupPolicy) pulumi.StringOutput { return v.PolicyName }).(pulumi.StringOutput)\n}",
"func (o ConfigurationBackupOutput) PolicyName() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ConfigurationBackup) *string { return v.PolicyName }).(pulumi.StringPtrOutput)\n}",
"func (p *Policy) Name() string {\n\treturn p.InternalName\n}",
"func (o MrScalarTaskScalingUpPolicyOutput) PolicyName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MrScalarTaskScalingUpPolicy) string { return v.PolicyName }).(pulumi.StringOutput)\n}",
"func (o ElastigroupScalingTargetPolicyOutput) PolicyName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ElastigroupScalingTargetPolicy) string { return v.PolicyName }).(pulumi.StringOutput)\n}",
"func (o *ExportPolicyCreateRequest) PolicyName() ExportPolicyNameType {\n\tvar r ExportPolicyNameType\n\tif o.PolicyNamePtr == nil {\n\t\treturn r\n\t}\n\tr = *o.PolicyNamePtr\n\treturn r\n}",
"func (km KeyValueMap) Policy() string {\n\treturn km[kmPolicy]\n}",
"func (o MrScalarCoreScalingUpPolicyOutput) PolicyName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MrScalarCoreScalingUpPolicy) string { return v.PolicyName }).(pulumi.StringOutput)\n}",
"func (o *VolumeExportAttributesType) Policy() string {\n\tvar r string\n\tif o.PolicyPtr == nil {\n\t\treturn r\n\t}\n\tr = *o.PolicyPtr\n\treturn r\n}",
"func (o *SnapmirrorCreateRequest) Policy() string {\n\tvar r string\n\tif o.PolicyPtr == nil {\n\t\treturn r\n\t}\n\tr = *o.PolicyPtr\n\treturn r\n}",
"func (o MrScalarTaskScalingDownPolicyOutput) PolicyName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MrScalarTaskScalingDownPolicy) string { return v.PolicyName }).(pulumi.StringOutput)\n}",
"func (o ValidatingAdmissionPolicyBindingSpecPatchOutput) PolicyName() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ValidatingAdmissionPolicyBindingSpecPatch) *string { return v.PolicyName }).(pulumi.StringPtrOutput)\n}",
"func (o RolePolicyAttachmentOutput) PolicyName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *RolePolicyAttachment) pulumi.StringOutput { return v.PolicyName }).(pulumi.StringOutput)\n}",
"func (o ValidatingAdmissionPolicyBindingSpecOutput) PolicyName() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ValidatingAdmissionPolicyBindingSpec) *string { return v.PolicyName }).(pulumi.StringPtrOutput)\n}",
"func (o *ExportPolicyCreateRequest) PolicyName() ExportPolicyNameType {\n\tr := *o.PolicyNamePtr\n\treturn r\n}",
"func (o MrScalarCoreScalingDownPolicyOutput) PolicyName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MrScalarCoreScalingDownPolicy) string { return v.PolicyName }).(pulumi.StringOutput)\n}",
"func (o ConfigurationBackupPtrOutput) PolicyName() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ConfigurationBackup) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.PolicyName\n\t}).(pulumi.StringPtrOutput)\n}",
"func (r *Bucket) Policy() pulumi.StringOutput {\n\treturn (pulumi.StringOutput)(r.s.State[\"policy\"])\n}",
"func (o *ExportPolicyDestroyRequest) PolicyName() ExportPolicyNameType {\n\tvar r ExportPolicyNameType\n\tif o.PolicyNamePtr == nil {\n\t\treturn r\n\t}\n\tr = *o.PolicyNamePtr\n\treturn r\n}",
"func (o ElastigroupScalingDownPolicyOutput) PolicyName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ElastigroupScalingDownPolicy) string { return v.PolicyName }).(pulumi.StringOutput)\n}",
"func (o ElastigroupScalingUpPolicyOutput) PolicyName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ElastigroupScalingUpPolicy) string { return v.PolicyName }).(pulumi.StringOutput)\n}",
"func (r *Policy) Name() pulumi.StringOutput {\n\treturn (pulumi.StringOutput)(r.s.State[\"name\"])\n}",
"func (o ValidatingAdmissionPolicyBindingSpecPatchPtrOutput) PolicyName() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ValidatingAdmissionPolicyBindingSpecPatch) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.PolicyName\n\t}).(pulumi.StringPtrOutput)\n}",
"func (policy *PolicySvc) Name() string {\n\treturn \"policy\"\n}",
"func (o ValidatingAdmissionPolicyBindingSpecPtrOutput) PolicyName() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ValidatingAdmissionPolicyBindingSpec) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.PolicyName\n\t}).(pulumi.StringPtrOutput)\n}",
"func (o AuthorizationPolicyOutput) Name() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *AuthorizationPolicy) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput)\n}",
"func (o TopicPolicyOutput) Policy() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *TopicPolicy) pulumi.StringOutput { return v.Policy }).(pulumi.StringOutput)\n}",
"func (o *GuardianPolicyDataData) GetPolicyName() string {\n\tif o == nil || o.PolicyName == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.PolicyName\n}",
"func (o *SSHAuthorizationPolicy) GetName() string {\n\n\treturn o.Name\n}",
"func (o ReplicaExternalKeyOutput) Policy() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *ReplicaExternalKey) pulumi.StringOutput { return v.Policy }).(pulumi.StringOutput)\n}",
"func (pg *PolicyGroup) GetPolicy() *api.Policy {\n\treturn pg.Policy\n}",
"func (o UserPolicyOutput) Policy() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *UserPolicy) pulumi.StringOutput { return v.Policy }).(pulumi.StringOutput)\n}",
"func (c *IAM) getPolicyName() (string, error) {\n\tclusterName, err := c.cfg.AccessPoint.GetClusterName()\n\tif err != nil {\n\t\treturn \"\", trace.Wrap(err)\n\t}\n\n\tprefix := clusterName.GetClusterName()\n\n\t// If the length of the policy name is over the limit, trim the cluster\n\t// name from right and keep the policyNameSuffix intact.\n\tmaxPrefixLength := maxPolicyNameLength - len(policyNameSuffix)\n\tif len(prefix) > maxPrefixLength {\n\t\tprefix = prefix[:maxPrefixLength]\n\t}\n\n\treturn prefix + policyNameSuffix, nil\n}",
"func (o AclTokenPolicyAttachmentOutput) Policy() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *AclTokenPolicyAttachment) pulumi.StringOutput { return v.Policy }).(pulumi.StringOutput)\n}",
"func (o ServerPolicyOutput) Name() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *ServerPolicy) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput)\n}",
"func (o CloudConfigurationRuleOutput) Policy() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *CloudConfigurationRule) pulumi.StringOutput { return v.Policy }).(pulumi.StringOutput)\n}",
"func (o UserPolicyOutput) Name() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *UserPolicy) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput)\n}",
"func (o BucketOutput) Policy() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *Bucket) pulumi.StringPtrOutput { return v.Policy }).(pulumi.StringPtrOutput)\n}",
"func (e *Enforcer) GetPolicy() [][]string {\n\treturn e.GetNamedPolicy(\"p\")\n}",
"func (m *PolicyRule) GetName()(*string) {\n val, err := m.GetBackingStore().Get(\"name\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*string)\n }\n return nil\n}",
"func (c *controller) GetPolicy(ctx context.Context, id int64) (*policyModels.Schema, error) {\n\treturn c.pManager.Get(ctx, id)\n}",
"func (o *SparseSSHAuthorizationPolicy) GetName() (out string) {\n\n\tif o.Name == nil {\n\t\treturn\n\t}\n\n\treturn *o.Name\n}",
"func (p scheduleWithOverProvisioningAwareness) name() policyName {\n\treturn overProvisioningPolicy\n}",
"func (o AlertPolicyOutput) Name() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *AlertPolicy) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput)\n}",
"func (e *Enforcer) GetNamedPolicy(ptype string) [][]string {\n\treturn e.model.GetPolicy(\"p\", ptype)\n}",
"func (r *Policy) PolicyType() pulumi.StringOutput {\n\treturn (pulumi.StringOutput)(r.s.State[\"policyType\"])\n}",
"func (c *controller) GetPolicyByName(ctx context.Context, projectID int64, name string) (*policyModels.Schema, error) {\n\treturn c.pManager.GetByName(ctx, projectID, name)\n}",
"func (f *Factory) GetPolicyStatement() (string, error) {\n\t// Perform checks\n\tif len(f.accountID) == 0 {\n\t\treturn \"\", errors.New(AccountIDMissingErr)\n\t}\n\n\tif len(f.region) == 0 {\n\t\treturn \"\", errors.New(RegionMissingErr)\n\t}\n\n\tif len(f.partition) == 0 {\n\t\treturn \"\", errors.New(PartitionMissingErr)\n\t}\n\n\t// Replace AWS placeholders\n\tt := fmt.Sprintf(iamTemplate, strings.Join(f.policies, \",\"))\n\tt = strings.ReplaceAll(t, \"${AWS::Partition}\", f.partition)\n\tt = strings.ReplaceAll(t, \"${AWS::Region}\", f.region)\n\tt = strings.ReplaceAll(t, \"${AWS::AccountId}\", f.accountID)\n\n\t// Return the policy document\n\treturn t, nil\n}",
"func (o ArgoCDSpecRbacPtrOutput) Policy() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ArgoCDSpecRbac) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Policy\n\t}).(pulumi.StringPtrOutput)\n}",
"func (o ArgoCDSpecRbacOutput) Policy() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ArgoCDSpecRbac) *string { return v.Policy }).(pulumi.StringPtrOutput)\n}",
"func (m *DeviceManagementConfigurationPolicy) GetName()(*string) {\n val, err := m.GetBackingStore().Get(\"name\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*string)\n }\n return nil\n}",
"func (o NetworkSimPolicyOutput) Name() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *NetworkSimPolicy) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput)\n}",
"func (e *SyncedEnforcer) GetPolicy() [][]string {\n\te.m.RLock()\n\tdefer e.m.RUnlock()\n\treturn e.Enforcer.GetPolicy()\n}",
"func (p *BaseProcessor) GetPolicyType() Type {\n\treturn \"\"\n}",
"func (ps *PolicyStore) GetPolicy(name string) (*Policy, error) {\n\tdefer metrics.MeasureSince([]string{\"policy\", \"get_policy\"}, time.Now())\n\tif ps.lru != nil {\n\t\t// Check for cached policy\n\t\tif raw, ok := ps.lru.Get(name); ok {\n\t\t\treturn raw.(*Policy), nil\n\t\t}\n\t}\n\n\t// Special case the root policy\n\tif name == \"root\" {\n\t\tp := &Policy{Name: \"root\"}\n\t\tif ps.lru != nil {\n\t\t\tps.lru.Add(p.Name, p)\n\t\t}\n\t\treturn p, nil\n\t}\n\n\t// Load the policy in\n\tout, err := ps.view.Get(name)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to read policy: %v\", err)\n\t}\n\tif out == nil {\n\t\treturn nil, nil\n\t}\n\n\t// In Vault 0.1.X we stored the raw policy, but in\n\t// Vault 0.2 we switch to the PolicyEntry\n\tpolicyEntry := new(PolicyEntry)\n\tvar policy *Policy\n\tif err := out.DecodeJSON(policyEntry); err == nil {\n\t\t// Parse normally\n\t\tp, err := Parse(policyEntry.Raw)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"failed to parse policy: %v\", err)\n\t\t}\n\t\tp.Name = name\n\t\tpolicy = p\n\n\t} else {\n\t\t// On error, attempt to use V1 parsing\n\t\tp, err := Parse(string(out.Value))\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"failed to parse policy: %v\", err)\n\t\t}\n\t\tp.Name = name\n\n\t\t// V1 used implicit glob, we need to do a fix-up\n\t\tfor _, pp := range p.Paths {\n\t\t\tpp.Glob = true\n\t\t}\n\t\tpolicy = p\n\t}\n\n\tif ps.lru != nil {\n\t\t// Update the LRU cache\n\t\tps.lru.Add(name, policy)\n\t}\n\n\treturn policy, nil\n}",
"func (c *Client) GetPolicy(name string) (*Policy, error) {\n\tif name == \"\" {\n\t\treturn nil, errored.Errorf(\"Policy invalid: empty string for name\")\n\t}\n\n\tresp, err := c.etcdClient.Get(context.Background(), c.policy(name), nil)\n\tif err != nil {\n\t\treturn nil, errors.EtcdToErrored(err)\n\t}\n\n\ttc := NewPolicy()\n\tif err := json.Unmarshal([]byte(resp.Node.Value), tc); err != nil {\n\t\treturn nil, err\n\t}\n\n\ttc.Name = name\n\n\terr = tc.Validate()\n\treturn tc, err\n}",
"func (o AutoSnapshotPolicyOutput) Name() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *AutoSnapshotPolicy) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput)\n}",
"func (e *Enforcer) GetNamedPolicy(ctx context.Context, ptype string) ([][]string, error) {\n\tres, err := e.client.remoteClient.GetNamedPolicy(ctx, &pb.PolicyRequest{\n\t\tEnforcerHandler: e.handler,\n\t\tPType: ptype,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn replyTo2DSlice(res), nil\n}",
"func (o OrganizationSecurityPolicyOutput) PolicyId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *OrganizationSecurityPolicy) pulumi.StringOutput { return v.PolicyId }).(pulumi.StringOutput)\n}",
"func getStrategyPolicy(policy ParentAbstractionServiceRetryPolicy) string {\n\tswitch policy {\n\tcase ParentAbstractionServiceRetryPolicyConsistentHash:\n\t\treturn `consistent_hash`\n\tcase ParentAbstractionServiceRetryPolicyRoundRobinIP:\n\t\treturn `rr_ip`\n\tcase ParentAbstractionServiceRetryPolicyRoundRobinStrict:\n\t\treturn `rr_strict`\n\tcase ParentAbstractionServiceRetryPolicyFirst:\n\t\treturn `first_live`\n\tcase ParentAbstractionServiceRetryPolicyLatched:\n\t\treturn `latched`\n\tdefault:\n\t\treturn getStrategyPolicy(DefaultParentAbstractionServiceRetryPolicy)\n\t}\n}",
"func (o SharedAccessPolicyOutput) Name() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *SharedAccessPolicy) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput)\n}",
"func (o *ServerConfigImportAllOf) GetPolicyPrefix() string {\n\tif o == nil || o.PolicyPrefix == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.PolicyPrefix\n}",
"func (p scheduleOnHost) name() policyName {\n\treturn scheduleOnHostAnnotationPolicy\n}",
"func (e *CachedEnforcer) GetNamedPolicy(ptype string) [][]string {\n\treturn e.api.GetNamedPolicy(ptype)\n}",
"func (o *GuardianPolicyDataData) GetPolicyId() string {\n\tif o == nil || o.PolicyId == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.PolicyId\n}",
"func (s *Storage) GetPolicy(ctx context.Context, ID int64) (*Policy, error) {\n\tvar policy Policy\n\terr := s.db.QueryRowContext(ctx, `SELECT query_policy($1);`, ID).Scan(&policy)\n\tif err != nil {\n\t\treturn nil, s.database.ProcessError(err)\n\t}\n\n\treturn &policy, nil\n}",
"func (ResourceType) Policy() ent.Policy {\n\t// TODO setup RBAC policies for entities (RBAC based on user's role) such as:\n\t// return authz.NewPolicy(\n\t// \tauthz.WithMutationRules(\n\t// \t\tauthz.ResourceTypeWritePolicyRule(),\n\t// \t),\n\t// )\n\treturn nil\n}",
"func (p antiAffinityLabel) name() policyName {\n\treturn antiAffinityLabelPolicy\n}",
"func (o ListenerPtrOutput) AlpnPolicy() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *Listener) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.AlpnPolicy\n\t}).(pulumi.StringPtrOutput)\n}",
"func (o ListenerOutput) AlpnPolicy() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v Listener) *string { return v.AlpnPolicy }).(pulumi.StringPtrOutput)\n}",
"func (o FioSpecVolumeVolumeSourceVsphereVolumePtrOutput) StoragePolicyName() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *FioSpecVolumeVolumeSourceVsphereVolume) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.StoragePolicyName\n\t}).(pulumi.StringPtrOutput)\n}",
"func getPolicyName(downstream, upstream service.K8sServiceAccount) string {\n\treturn fmt.Sprintf(\"%s to %s\", downstream, upstream)\n}",
"func (o GetSecurityPoliciesPolicyOutput) SecurityPolicyName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v GetSecurityPoliciesPolicy) string { return v.SecurityPolicyName }).(pulumi.StringOutput)\n}",
"func (o ResourcePolicyExemptionOutput) Name() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *ResourcePolicyExemption) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput)\n}",
"func GetPolicyType(policy string) PolicyType {\n\ttypes := map[string]PolicyType{\n\t\t\"ANONYMOUS_POLICY\": AnonymousPolicy,\n\t\t\"READER_POLICY\": ReaderPolicy,\n\t\t\"WRITER_POLICY\": WriterPolicy,\n\t\t\"MANAGER_POLICY\": ManagerPolicy,\n\t\t\"OWNER_POLICY\": OwnerPolicy,\n\t\t\"ADMIN_POLICY\": AdminPolicy,\n\t}\n\n\treturn types[policy]\n}",
"func (o ControlPolicyAttachmentOutput) PolicyId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *ControlPolicyAttachment) pulumi.StringOutput { return v.PolicyId }).(pulumi.StringOutput)\n}",
"func (o IopingSpecVolumeVolumeSourceVsphereVolumePtrOutput) StoragePolicyName() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *IopingSpecVolumeVolumeSourceVsphereVolume) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.StoragePolicyName\n\t}).(pulumi.StringPtrOutput)\n}",
"func (e *Enforcer) GetPolicy(ctx context.Context) ([][]string, error) {\n\tres, err := e.client.remoteClient.GetPolicy(ctx, &pb.EmptyRequest{Handler: e.handler})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn replyTo2DSlice(res), nil\n}",
"func ModuleName(p *policyv1.Policy) string {\n\tswitch pt := p.PolicyType.(type) {\n\tcase *policyv1.Policy_ResourcePolicy:\n\t\treturn ResourcePolicyModuleName(pt.ResourcePolicy.Resource, pt.ResourcePolicy.Version)\n\tcase *policyv1.Policy_PrincipalPolicy:\n\t\treturn PrincipalPolicyModuleName(pt.PrincipalPolicy.Principal, pt.PrincipalPolicy.Version)\n\tcase *policyv1.Policy_DerivedRoles:\n\t\treturn DerivedRolesModuleName(pt.DerivedRoles.Name)\n\tdefault:\n\t\tpanic(fmt.Errorf(\"unknown policy type %T\", pt))\n\t}\n}",
"func (s *DefaultStore) GetPolicy(policyGen object.Generation) (*lang.Policy, object.Generation, error) {\n\t// todo should we use RWMutex for get/update policy?\n\tpolicyData, err := s.GetPolicyData(policyGen)\n\tif err != nil {\n\t\treturn nil, 0, err\n\t}\n\treturn s.getPolicyFromData(policyData)\n}",
"func (e *CachedEnforcer) GetPolicy() [][]string {\n\treturn e.api.GetPolicy()\n}",
"func (Recommendations) Policy() ent.Policy {\n\t/*return authz.NewPolicy(\n\t\tauthz.WithMutationRules(\n\t\t\tauthz.AssuranceTemplatesWritePolicyRule(),\n\t\t),\n\t)*/\n\treturn authz.NewPolicy(\n\t\tauthz.WithMutationRules(\n\t\t\tprivacy.AlwaysAllowRule(),\n\t\t),\n\t)\n}",
"func (p *policy) String() string {\n\treturn fmt.Sprintf(policyDocument,\n\t\tp.Expiration,\n\t\tp.Bucket,\n\t\tp.Key,\n\t\tp.O.MaxFileSize,\n\t\tp.Credential,\n\t\tp.Date,\n\t)\n}",
"func (c *Client) Policy() (*www.PolicyReply, error) {\n\tresponseBody, err := c.makeRequest(http.MethodGet,\n\t\twww.PoliteiaWWWAPIRoute, www.RoutePolicy, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar pr www.PolicyReply\n\terr = json.Unmarshal(responseBody, &pr)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unmarshal PolicyReply: %v\", err)\n\t}\n\n\tif c.cfg.Verbose {\n\t\terr := prettyPrintJSON(pr)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn &pr, nil\n}",
"func (t *TestSpec) NetworkPolicyName() string {\n\treturn fmt.Sprintf(\"%s_policy.json\", t.Prefix)\n}",
"func (sys *IAMSys) PolicyDBGet(name string) (string, error) {\n\tif name == \"\" {\n\t\treturn \"\", errInvalidArgument\n\t}\n\n\tobjectAPI := newObjectLayerFn()\n\tif objectAPI == nil {\n\t\treturn \"\", errServerNotInitialized\n\t}\n\n\tsys.RLock()\n\tdefer sys.RUnlock()\n\n\tif _, ok := sys.iamUsersMap[name]; !ok {\n\t\treturn \"\", errNoSuchUser\n\t}\n\n\tpolicy := sys.iamUserPolicyMap[name]\n\t// returned policy could be empty\n\treturn policy.Policy, nil\n}",
"func PolicyFile() string {\n\tvar filename = path.Join(WorkDir(), \"policy.json\")\n\tpolicyPath := os.Getenv(\"OXBM_POLICY_PATH\")\n\tif len(policyPath) > 0 {\n\t\tfilename = path.Join(policyPath, \"policy.json\")\n\t}\n\treturn filename\n}",
"func (o *WafPolicyGroup) GetName() string {\n\tif o == nil || o.Name == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Name\n}",
"func (r *Policy) ID() pulumi.IDOutput {\n\treturn r.s.ID()\n}",
"func ResourcePolicyModuleName(resource, version string) string {\n\treturn fmt.Sprintf(\"%s.%s.v%s\", ResourcePoliciesPrefix, Sanitize(resource), Sanitize(version))\n}",
"func (p preferScheduleOnHost) name() policyName {\n\treturn preferScheduleOnHostAnnotationPolicy\n}",
"func GetPolicy(ctx *pulumi.Context,\n\tname string, id pulumi.IDInput, state *PolicyState, opts ...pulumi.ResourceOption) (*Policy, error) {\n\tvar resource Policy\n\terr := ctx.ReadResource(\"aws-native:iot:Policy\", name, id, state, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}",
"func (o *VolumeExportAttributesType) SetPolicy(newValue string) *VolumeExportAttributesType {\n\to.PolicyPtr = &newValue\n\treturn o\n}",
"func (s *RemovePermissionOutput) SetPolicy(v string) *RemovePermissionOutput {\n\ts.Policy = &v\n\treturn s\n}",
"func (b *Bundle) PolicyManager() policies.Manager {\n\treturn b.policyManager\n}",
"func GetPolicyRepoPath() string {\n\treturn Global.Policy.RepoPath\n}",
"func (s *PutPermissionOutput) SetPolicy(v string) *PutPermissionOutput {\n\ts.Policy = &v\n\treturn s\n}",
"func (o RolePolicyAttachmentOutput) PolicyType() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *RolePolicyAttachment) pulumi.StringOutput { return v.PolicyType }).(pulumi.StringOutput)\n}"
] | [
"0.71685594",
"0.71337616",
"0.7127744",
"0.7121374",
"0.71197397",
"0.70768213",
"0.7059616",
"0.702617",
"0.70009947",
"0.7000554",
"0.6979791",
"0.69775784",
"0.6968762",
"0.6955846",
"0.69364214",
"0.692103",
"0.69128716",
"0.69038796",
"0.68980217",
"0.682995",
"0.68024987",
"0.67950577",
"0.6791111",
"0.6771931",
"0.67275816",
"0.67075604",
"0.6688253",
"0.66538507",
"0.66464716",
"0.6628853",
"0.66073734",
"0.6604281",
"0.65815574",
"0.6565979",
"0.65296483",
"0.6517541",
"0.646706",
"0.6408185",
"0.6398974",
"0.63649076",
"0.63603646",
"0.6350223",
"0.63459367",
"0.63396835",
"0.633226",
"0.6330829",
"0.63231725",
"0.6268286",
"0.6245451",
"0.62443984",
"0.622477",
"0.62045026",
"0.61795986",
"0.6174511",
"0.61435264",
"0.61297464",
"0.611335",
"0.605974",
"0.6056052",
"0.6052512",
"0.60469764",
"0.6041556",
"0.6035039",
"0.6015701",
"0.5995268",
"0.59857404",
"0.5935773",
"0.5932242",
"0.59120935",
"0.5908589",
"0.58826554",
"0.5841144",
"0.5836523",
"0.5829322",
"0.5828917",
"0.5823592",
"0.581661",
"0.581626",
"0.58160657",
"0.5803721",
"0.5770824",
"0.57659245",
"0.5758214",
"0.5749862",
"0.57303226",
"0.5721584",
"0.57208264",
"0.5707373",
"0.5706228",
"0.5693805",
"0.5660466",
"0.56497514",
"0.5647006",
"0.56436694",
"0.56307364",
"0.56277645",
"0.5607473",
"0.5598355",
"0.55913985",
"0.5588144"
] | 0.7230925 | 0 |
EchoLogger logrus logger in echo log interface | func EchoLogger(logger *logrus.Logger) *EchoLogrus {
return &EchoLogrus{
Logger: logger,
}
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func Logrus() echo.MiddlewareFunc {\n\treturn LogrusDefaultConfig(DefaultLoggerConfig)\n}",
"func Logger(c context.Context) loggers.Advanced",
"func (logger *Logger) echo(w io.Writer, l level.Level, f string, a ...any) {\n\t// Lock the log object for change.\n\tlogger.mu.RLock()\n\tdefer logger.mu.RUnlock()\n\n\t// Get the stack frame.\n\tsf := getStackFrame(logger.skipStackFrames)\n\n\t// If an additional value is set for the output (writer),\n\t// use it with the default settings.\n\toutputs := logger.outputs\n\tif w != nil {\n\t\toutput := Default\n\t\toutput.Writer = w\n\t\toutput.isSystem = true\n\t\toutputs[\"*\"] = &output // this name can be used for system names\n\t}\n\n\t// Output message.\n\tfor _, o := range logger.outputs {\n\t\tvar msg string\n\t\thas, err := o.Levels.Contains(l)\n\t\tif !has || err != nil || !o.Enabled.IsTrue() {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Hide or show the prefix.\n\t\tprefix := logger.prefix\n\t\tif !o.WithPrefix.IsTrue() {\n\t\t\tprefix = \"\"\n\t\t}\n\n\t\t// Text or JSON representation of the message.\n\t\tif o.TextStyle.IsTrue() {\n\t\t\tmsg = textMessage(prefix, l, time.Now(), o, sf, f, a...)\n\t\t} else {\n\t\t\tmsg = objectMessage(prefix, l, time.Now(), o, sf, f, a...)\n\t\t}\n\n\t\t// Print message.\n\t\tfmt.Fprint(o.Writer, msg)\n\t}\n}",
"func setupLogger() {\n\tsl := logrus.New()\n\tsrvLog = sl.WithField(\"context\", \"server\")\n}",
"func NewFromLogrus(logger logrus.FieldLogger) Logger {\n\treturn logrusLogger{logger: logger}\n}",
"func InitLogger(conf config.LogConfig, e *echo.Echo) {\n\tlog.SetOutput(os.Stdout)\n\tlog.SetLevel(log.DebugLevel)\n\n\te.Use(middleware.LoggerWithConfig(middleware.LoggerConfig{\n\t\tFormat: \"[${time_rfc3339_nano}] (${id}) ${method} '${uri}' [${status}] Host: ${host}, IP: ${remote_ip}, error: '${error}', (latency: ${latency_human}) \\n\",\n\t}))\n}",
"func (logger Logger) EchoLogger() echo.MiddlewareFunc {\n\treturn logger.loggerWithConfig(LoggerConfig{\n\t\tSkipper: middleware.DefaultSkipper,\n\t})\n}",
"func LogLogger( l *log.Logger ) mux.MiddlewareFunc {\n return FormatLogger( l.Printf )\n}",
"func Logger(bus ChannelBus) {\n\tfor msg := range bus.Log {\n\t\tif msg != \"\" {\n\t\t\tlog.Output(3, msg)\n\t\t}\n\t}\n}",
"func (c *T) Log(args ...interface{})",
"func configureLogrus(verbosity int) {\n\tlogrus.SetFormatter(&logrus.TextFormatter{DisableTimestamp: true})\n\tlogrus.SetOutput(os.Stderr)\n\n\tif verbosity >= thirdPartyVerboseLevel {\n\t\tlogrus.SetLevel(logrus.DebugLevel)\n\t}\n}",
"func ConsoleLogger() mux.MiddlewareFunc {\n return FormatLogger( log.Printf )\n}",
"func LogFromEchoContext(c echo.Context) *zerolog.Logger {\n\treturn LogFromContext(c.Request().Context())\n}",
"func (b *ASCIIOverTCP) Logger(l Logger) {\n\tb.Handler.Logger = l\n}",
"func makeRemoteLogger(stream pb.OutgoingMessageStream, localLog logrus.FieldLogger) logrus.FieldLogger {\n\tlog := logrus.New()\n\tlog.Level = logrus.DebugLevel\n\tlog.Out = ioutil.Discard\n\n\tlog.Hooks.Add(&remoteHook{stream, localLog})\n\n\treturn log\n}",
"func Log(fmt string, args ...interface{}) {}",
"func (logger *logger) newLogrus() {\n\tlogger.logrus = &logrus.Logger{\n\t\tHooks: make(logrus.LevelHooks),\n\t}\n\n\tlogLevel, err := logrus.ParseLevel(logger.cfg.LogLevel)\n\tif err != nil {\n\t\tlogLevel = defaultLogLevel\n\t}\n\tlogger.logrus.Level = logLevel\n\n\tswitch logger.cfg.LogFormat {\n\tcase jsonLogFormat:\n\t\tlogger.logrus.SetFormatter(&logrus.JSONFormatter{})\n\tdefault:\n\t\tlogger.logrus.SetFormatter(&logrus.TextFormatter{})\n\t}\n\n\tif logger.cfg.LogFilePath == \"\" {\n\t\tlogger.logrus.Out = os.Stdout\n\t\tlogger.logrus.Errorf(\"[%s]:: empty log file. Set 'Stdout' as default \\n\", PackageName)\n\t\tlogger.logrus.Infof(\"[%s]:: initialized logx successfully \\n\", PackageName)\n\t\treturn\n\t}\n\n\tlogfile, err := os.OpenFile(logger.cfg.LogFilePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0755)\n\tif err != nil {\n\t\tlogger.logrus.Errorln(\"[%s]:: failed to set log file. Error : '%v'. Set 'Stdout' as default\", PackageName, err)\n\t\treturn\n\t}\n\n\tlogger.logfile = logfile\n\tlogger.logrus.Out = logger.logfile\n\n\tlogger.logrus.Infof(\"[%s]:: initialized logx successfully\", PackageName)\n}",
"func examLogger() {\n\tvar (\n\t\tbuf bytes.Buffer\n\t\tlogger0 = log.New(&buf, \"logger: \", log.Ltime)\n\t\tlogger1 = log.New(&buf, \"logger: \", log.Lmicroseconds)\n\t\tlogger2 = log.New(&buf, \"logger: \", log.Llongfile)\n\t\tlogger3 = log.New(&buf, \"logger: \", log.Lshortfile)\n\t\tlogger4 = log.New(&buf, \"logger: \", log.LUTC)\n\t\tlogger5 = log.New(&buf, \"logger: \", log.LstdFlags)\n\t\tlogger6 = log.New(&buf, \"logger: \", log.LstdFlags|log.Lshortfile)\n\t)\n\n\tlogger0.Print(\"------------------\")\n\tlogger1.Print(\"------------------\")\n\tlogger2.Print(\"------------------\")\n\tlogger3.Print(\"------------------\")\n\tlogger4.Print(\"------------------\")\n\tlogger5.Print(\"------------------\")\n\tlogger6.Print(\"------------------\")\n\t// logger0.Fatal(\"------------------Fatal------------------\")\n\tfmt.Print(&buf)\n}",
"func LogrusLogger(log *logrus.Logger) Logger {\n\t// TODO control verbosity\n\treturn &lruLogger{jl: logrus.NewEntry(log)}\n}",
"func Logger(l log.FieldLogger) optSetter {\n\treturn func(f *Forwarder) error {\n\t\tif logger, ok := l.(OxyLogger); ok {\n\t\t\tf.log = logger\n\t\t\treturn nil\n\t\t}\n\n\t\tif logger, ok := l.(*log.Logger); ok {\n\t\t\tf.log = &internalLogger{Logger: logger}\n\t\t\treturn nil\n\t\t}\n\n\t\treturn errors.New(\"the type of the logger must be OxyLogger or logrus.Logger\")\n\t}\n}",
"func WithStream() LogReadOption { return LogReadOption{withStream: true} }",
"func LeveledLogger(l level.Level, format string, v ...interface{}) {\n\tif l >= SystemLogLevel {\n\t\tif l == level.Error || l == level.Fatal {\n\t\t\tfmt.Fprintf(os.Stderr, format+\"\\n\", v...)\n\t\t} else {\n\t\t\tfmt.Printf(format+\"\\n\", v...)\n\t\t}\n\t}\n}",
"func New(l logrus.FieldLogger) ctxlog.Logger {\n\tif l == nil {\n\t\tl = logrus.New()\n\t}\n\n\treturn logrusAdapter{l}\n}",
"func LogMsgs() {\n\tlogger := logrus.WithField(\"name\", lcf.CallerName(1))\n\tlogger.Debug(\"Sample debug 1.\")\n\tlogger.WithFields(logrus.Fields{\"a\": \"b\", \"c\": 10}).Debug(\"Sample debug 2.\")\n\tlogger.Info(\"Sample info 1.\")\n\tlogger.WithFields(logrus.Fields{\"a\": \"b\", \"c\": 10}).Info(\"Sample info 2.\")\n\tlogger.Warn(\"Sample warn 1.\")\n\tlogger.WithFields(logrus.Fields{\"a\": \"b\", \"c\": 10}).Warn(\"Sample warn 2.\")\n\tlogger.Error(\"Sample error 1.\")\n\tlogger.WithFields(logrus.Fields{\"a\": \"b\", \"c\": 10}).Error(\"Sample error 2.\")\n}",
"func (dhtClient *DHTClient) setupLogger() {\n\tfields := map[string]string{\n\t\t\"package\": \"DHTClient\",\n\t\t\"relayid\": dhtClient.relayPeer.Pretty(),\n\t}\n\tif dhtClient.routedHost != nil {\n\t\tfields[\"peerid\"] = dhtClient.routedHost.ID().Pretty()\n\t}\n\tdhtClient.logger = utils.NewDefaultLoggerWithFields(fields)\n}",
"func (s *SilentLogger) Log(v ...interface{}) {}",
"func Logger(app *fiber.App) {\n\n\tapp.Use(logger.New(logger.Config{\n\t\tFormat: \"${time} ${status} - ${method} ${path}\\n\",\n\t\tTimeFormat: \"02-Jan-2006 15:04:05\",\n\t\tTimeZone: \"America/Cancun\",\n\t}))\n}",
"func Inject(l *logrus.Entry) {\n\tgrpclog.SetLogger(New(l))\n}",
"func SetLogger(logger *xlog.Logger) {\n\tlogger = logger.Named(ecode.ModClientGrpc).WithOptions(zap.AddCallerSkip(defaultCallerSkip))\n\tgrpclog.SetLoggerV2(&loggerWrapper{logger: logger, sugar: logger.Sugar()})\n}",
"func initLogger() {\n\tlogdir := viper.GetString(\"log.log_dir\")\n\tstdout := viper.GetBool(\"log_stdout\")\n\n\tvar writer io.Writer\n\n\tif logdir != \"\" {\n\t\tfolderPath, err := filepath.Abs(logdir)\n\t\tpanicIfError(err, fmt.Sprintf(\"Error on parsing log path: %s\", logdir))\n\n\t\tabspath, err := filepath.Abs(path.Join(logdir, \"run.log\"))\n\t\tpanicIfError(err, fmt.Sprintf(\"Error on parsing log file path: %s\", logdir))\n\n\t\terr = os.MkdirAll(folderPath, os.ModePerm)\n\t\tpanicIfError(err, fmt.Sprintf(\"Error on creating log dir: %s\", folderPath))\n\n\t\tif stdout {\n\t\t\tfmt.Println(\"Will be logged to stdout and \", abspath)\n\t\t\tfileWriter := mylog.RotateLog(abspath)\n\t\t\twriter = io.MultiWriter(os.Stdout, fileWriter)\n\t\t} else {\n\t\t\tfmt.Println(\"Will be logged to \", abspath)\n\t\t\twriter = mylog.RotateLog(abspath)\n\t\t}\n\t} else {\n\t\t// stdout only\n\t\tfmt.Println(\"Will be logged to stdout\")\n\t\twriter = os.Stdout\n\t}\n\tlogrus.SetOutput(writer)\n\n\t// Only log the warning severity or above.\n\tswitch viper.GetString(\"log.level\") {\n\tcase \"panic\":\n\t\tlogrus.SetLevel(logrus.PanicLevel)\n\tcase \"fatal\":\n\t\tlogrus.SetLevel(logrus.FatalLevel)\n\tcase \"error\":\n\t\tlogrus.SetLevel(logrus.ErrorLevel)\n\tcase \"warn\":\n\t\tlogrus.SetLevel(logrus.WarnLevel)\n\tcase \"info\":\n\t\tlogrus.SetLevel(logrus.InfoLevel)\n\tcase \"debug\":\n\t\tlogrus.SetLevel(logrus.DebugLevel)\n\tcase \"trace\":\n\t\tlogrus.SetLevel(logrus.TraceLevel)\n\tdefault:\n\t\tfmt.Println(\"Unknown level\", viper.GetString(\"log.level\"), \"Set to INFO\")\n\t\tlogrus.SetLevel(logrus.InfoLevel)\n\t}\n\n\tFormatter := new(logrus.TextFormatter)\n\tFormatter.ForceColors = false\n\tFormatter.DisableColors = true\n\tFormatter.TimestampFormat = \"06-01-02 15:04:05.000000\"\n\tFormatter.FullTimestamp = true\n\tlogrus.SetFormatter(Formatter)\n\n\t// redirect standard log to logrus\n\t//log.SetOutput(logrus.StandardLogger().Writer())\n\t//log.Println(\"Standard logger. Am I here?\")\n\tlineNum := viper.GetBool(\"log_line_number\")\n\tif lineNum {\n\t\t//filenameHook := filename.NewHook()\n\t\t//filenameHook.Field = \"line\"\n\t\t//logrus.AddHook(filenameHook)\n\t\tlogrus.SetReportCaller(true)\n\t}\n\tbyLevel := viper.GetBool(\"multifile_by_level\")\n\tif byLevel && logdir != \"\" {\n\t\tpanicLog, _ := filepath.Abs(path.Join(logdir, \"panic.log\"))\n\t\tfatalLog, _ := filepath.Abs(path.Join(logdir, \"fatal.log\"))\n\t\twarnLog, _ := filepath.Abs(path.Join(logdir, \"warn.log\"))\n\t\terrorLog, _ := filepath.Abs(path.Join(logdir, \"error.log\"))\n\t\tinfoLog, _ := filepath.Abs(path.Join(logdir, \"info.log\"))\n\t\tdebugLog, _ := filepath.Abs(path.Join(logdir, \"debug.log\"))\n\t\ttraceLog, _ := filepath.Abs(path.Join(logdir, \"trace.log\"))\n\t\twriterMap := lfshook.WriterMap{\n\t\t\tlogrus.PanicLevel: mylog.RotateLog(panicLog),\n\t\t\tlogrus.FatalLevel: mylog.RotateLog(fatalLog),\n\t\t\tlogrus.WarnLevel: mylog.RotateLog(warnLog),\n\t\t\tlogrus.ErrorLevel: mylog.RotateLog(errorLog),\n\t\t\tlogrus.InfoLevel: mylog.RotateLog(infoLog),\n\t\t\tlogrus.DebugLevel: mylog.RotateLog(debugLog),\n\t\t\tlogrus.TraceLevel: mylog.RotateLog(traceLog),\n\t\t}\n\t\tlogrus.AddHook(lfshook.NewHook(\n\t\t\twriterMap,\n\t\t\tFormatter,\n\t\t))\n\t}\n\tlogger := logrus.StandardLogger()\n\tlogrus.Debug(\"Logger initialized.\")\n\tbyModule := viper.GetBool(\"multifile_by_module\")\n\tif !byModule {\n\t\tlogdir = \"\"\n\t}\n\n\tdownloader.InitLoggers(logger, logdir)\n\tfetcher.InitLoggers(logger, logdir)\n\tp2p.InitLoggers(logger, logdir)\n\tog.InitLoggers(logger, logdir)\n\tsyncer.InitLoggers(logger, logdir)\n\tannsensus.InitLoggers(logger, logdir)\n\n}",
"func (d *dispatcher) log(fmt string, v ...interface{}) {\n\tif d.logger != nil {\n\t\td.logger.Printf(fmt, v...)\n\t}\n}",
"func logger(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tformat := \"[%s] User agent => %s Remote addr => %s\"\n\t\tlog.Printf(format, r.Method, r.UserAgent(), r.RemoteAddr)\n\t\tnext.ServeHTTP(w, r)\n\t})\n}",
"func initLogging() {\n\tbackend := btclog.NewBackend(os.Stdout)\n\tpeer.UseLogger(backend.Logger(\"PEER\"))\n}",
"func Logger(h http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tlog.Printf(\"[%s] [%s] %s\", r.RemoteAddr, r.Method, r.URL.String())\n\t\th.ServeHTTP(w, r)\n\t})\n}",
"func EnableLogging(w io.Writer) {}",
"func New() *logrus.Logger {\n\treturn logrus.New()\n}",
"func Init(service, logLevel string) {\n\tcustomFormatter := &logrus.TextFormatter{}\n\tcustomFormatter.TimestampFormat = timeFormat\n\tcustomFormatter.FullTimestamp = true\n\tlogrus.SetFormatter(customFormatter)\n\tlogrus.SetOutput(os.Stdout)\n\tswitch logLevel {\n\tcase \"error\":\n\t\tlogrus.SetLevel(logrus.ErrorLevel)\n\tcase \"debug\":\n\t\tlogrus.SetLevel(logrus.DebugLevel)\n\tdefault:\n\t\tlogrus.SetLevel(logrus.InfoLevel)\n\t}\n\tlogger = logrus.WithFields(logrus.Fields{\n\t\t\"service\": service,\n\t})\n\tlogger.Debug(\"init_logger\")\n}",
"func New(l *logrus.Logger) loggers.Logger {\n\treturn &Logrus{l}\n}",
"func (s *Status) WrapLogrus(logger *logrus.Logger) {\n\tlogger.SetOutput(s.WrapWriter(logger.Out))\n}",
"func Logger(appName string) echo.MiddlewareFunc {\n\treturn LoggerWithConfig(DefaultLoggerConfig(appName))\n}",
"func (context *Context) Elog(format string, args ...interface{}) {\n\tcontext.Logger.Elog(format, args...)\n}",
"func Log(level logrus.Level, args ...interface{}) {\n\tif mLogger.StdLogger != nil {\n\t\tmLogger.StdLogger.Log(level, args...)\n\t}\n\n\tif mLogger.FileLogger != nil {\n\t\tmLogger.FileLogger.Log(level, args...)\n\t}\n}",
"func logWrap(c client.Client) client.Client {\n\treturn &logWrapper{c}\n}",
"func logWrap(c client.Client) client.Client {\n\treturn &logWrapper{c}\n}",
"func (l eventlog) Log(context interface{}, name string, message string, data ...interface{}) {}",
"func Logger(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tlog.Printf(\"%s \\\"%s %s %s\\\" %d\\n\", r.RemoteAddr, r.Method, r.RequestURI, r.Proto, r.ContentLength)\n\n\t\tnext.ServeHTTP(w, r)\n\t})\n}",
"func (c *B) Log(args ...interface{})",
"func Logger(route routing.Route) Adapter {\n\treturn func(h http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tstart := time.Now()\n\n\t\t\t// server http\n\t\t\th.ServeHTTP(w, r)\n\n\t\t\t// by doing this the log happen after all http request\n\t\t\tlog.Printf(\n\t\t\t\t\"%s\\t%s\\t%s\\t%s\",\n\t\t\t\troute.Method,\n\t\t\t\troute.Pattern,\n\t\t\t\troute.Name,\n\t\t\t\ttime.Since(start),\n\t\t\t)\n\t\t})\n\t}\n}",
"func fmtLogger(msg string, args ...interface{}) {\n\tfmt.Printf(msg, args...)\n\tfmt.Println()\n}",
"func Logger(serviceName string) log.Logger {\n\tlgr := log.NewLogfmtLogger(os.Stderr)\n\tlgr = log.WithPrefix(lgr, \"ts\", log.DefaultTimestampUTC)\n\tlgr = log.WithPrefix(lgr, \"service\", serviceName)\n\treturn lgr\n}",
"func (l *Logger) log(level int64, v string) { l.doMsg(level, v) }",
"func setLogger(logClient logger.Interface) {\n\tlog = logClient\n}",
"func InitLogger(config *LoggerConfig) *Logger {\n\tlog := logrus.New()\n\tlog.SetFormatter(config.Format.Get())\n\tlog.SetLevel(config.Level.Get())\n\tfields := logrus.Fields{\n\t\t\"module\": config.Name,\n\t}\n\tif config.AdditionalFields != nil {\n\t\tfor key, value := range *config.AdditionalFields {\n\t\t\tfields[key] = value\n\t\t}\n\t}\n\tlogger := &Logger{\n\t\tconfig: config,\n\t\tinstanceRaw: log,\n\t\tinstance: log.WithFields(fields),\n\t}\n\treturn logger\n}",
"func getLogger() *logrus.Logger {\n\tlog := logrus.New()\n\t//log.Formatter = new(TextFormatter)\n\tif debug {\n\t\tlog.Level = logrus.DebugLevel\n\t}\n\n\treturn log\n}",
"func logger(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp interface{}, err error) {\n\tlog.Printf(\"---> Unary interceptor: %v\\n\", info.FullMethod)\n\treturn handler(ctx, req)\n}",
"func initLogger() *logrus.Logger {\n\tlogger := logrus.New()\n\n\tlogger.SetFormatter(&logrus.TextFormatter{\n\t\tFullTimestamp: true,\n\t\tDisableLevelTruncation: true,\n\t})\n\n\treturn logger\n}",
"func (ses *Ses) log(enabled bool, v ...interface{}) {\n\tLog := _drv.Cfg().Log\n\tif !Log.IsEnabled(enabled) {\n\t\treturn\n\t}\n\tif len(v) == 0 {\n\t\tLog.Logger.Infof(\"%v %v\", ses.sysName(), callInfo(2))\n\t} else {\n\t\tLog.Logger.Infof(\"%v %v %v\", ses.sysName(), callInfo(2), fmt.Sprint(v...))\n\t}\n}",
"func logging (prefix string, logMsg string) {\n\n\tf, err := os.OpenFile(Conf.Files.LogFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tdefer f.Close()\n\n\tlogger := log.New(f, prefix, log.LstdFlags)\n\tlogger.Println(logMsg)\n}",
"func (keeper Keeper) Logger(ctx sdk.Context) log.Logger {\n\treturn ctx.Logger().With(\"module\", fmt.Sprintf(\"x/%s\", types.ModuleName))\n}",
"func Logger(config map[string]interface{}) (log15.Logger, error) {\n\n\tconfigMap, err := NewLoggerConfig(config)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t// das gehört zusammen!!\n\tc := LoggerConfig{}\n\terr = hooks.Decode(configMap, &c)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn c.NewLogger()\n}",
"func Logger(inner http.Handler, name string) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tstart := time.Now()\n\n\t\tinner.ServeHTTP(w, r)\n\n\t\tlog.Printf(\n\t\t\t\"%s\\t%s\\t%s\\t%s\",\n\t\t\tr.Method,\n\t\t\tr.RequestURI,\n\t\t\tname,\n\t\t\ttime.Since(start),\n\t\t)\n\t})\n}",
"func LogMsgs() {\n\tlogrus.Debug(\"Sample debug 1.\")\n\tlogrus.WithFields(logrus.Fields{\"name\": CallerName(1), \"a\": \"b\", \"c\": 10}).Debug(\"Sample debug 2.\")\n\tlogrus.Info(\"Sample info 1.\")\n\tlogrus.WithFields(logrus.Fields{\"name\": CallerName(1), \"a\": \"b\", \"c\": 10}).Info(\"Sample info 2.\")\n\tlogrus.Warn(\"Sample warn 1.\")\n\tlogrus.WithFields(logrus.Fields{\"name\": CallerName(1), \"a\": \"b\", \"c\": 10}).Warn(\"Sample warn 2.\")\n\tlogrus.Error(\"Sample error 1.\")\n\tlogrus.WithFields(logrus.Fields{\"name\": CallerName(1), \"a\": \"b\", \"c\": 10}).Error(\"Sample error 2.\")\n}",
"func Logger() *logrus.Logger {\n\tif logger == nil {\n\n\t\tlog := logrus.New()\n\n\t\tlogLevel, err := logrus.ParseLevel(GetEnvVariable(\"LOG_LEVEL\", \"info\"))\n\t\tif err == nil {\n\t\t\tlog.SetLevel(logLevel)\n\t\t}\n\n\t\thostname, err := os.Hostname()\n\n\t\tif err != nil {\n\t\t\thostname = \"unnamed_app\"\n\t\t}\n\n\t\thook, err := lSyslog.NewSyslogHook(\"udp\", GetEnvVariable(\"LOG_HOST\", \"logs7.papertrailapp.com:51074\"), syslog.LOG_INFO, GetEnvVariable(\"HOSTNAME\", hostname))\n\n\t\tif err == nil && GetEnvVariable(\"ENABLE_PAPERTRAIL\", \"false\") == \"true\" {\n\t\t\tlog.Hooks.Add(hook)\n\t\t}\n\n\t\tlogger = log\n\n\t}\n\n\treturn logger\n}",
"func Wrap(dest logwrap.Logger) logwrap.Impl {\n\treturn func(ctx context.Context, message logwrap.Message) {\n\t\tdest.Log(ctx, message.Message, logwrap.Data(message.Data), logwrap.Level(message.Level), logwrap.Source(message.Source))\n\t}\n}",
"func WithLogger(l logger) func(rpc *EthRPC) {\n\treturn func(rpc *EthRPC) {\n\t\trpc.log = l\n\t}\n}",
"func httpLogger(r *http.Request, created time.Time, status, bytes int) {\n\t//fmt.Println(httpxtra.ApacheCommonLog(r, created, status, bytes))\n\n\tlog.Printf(\"%s %d %s %q (%s) :: %d bytes in %s%s\",\n\t\tlogProto(r),\n\t\tstatus,\n\t\tr.Method,\n\t\tr.URL.Path,\n\t\tremoteIP(r),\n\t\tbytes,\n\t\ttime.Since(created),\n\t\tlogMsg(r),\n\t)\n}",
"func setupLogger(l log.Logger, cfg *operator.Config) log.Logger {\n\tnewLogger, err := cortex_log.NewPrometheusLogger(cfg.LogLevel, cfg.LogFormat)\n\tif err != nil {\n\t\tlevel.Error(l).Log(\"msg\", \"failed to create logger\", \"err\", err)\n\t\tos.Exit(1)\n\t}\n\tl = newLogger\n\n\tadapterLogger := logutil.Wrap(l)\n\n\t// NOTE: we don't set up a caller field here, unlike the normal agent.\n\t// There's too many multiple nestings of the logger that prevent getting the\n\t// caller from working properly.\n\n\t// Set up the global logger and the controller-local logger.\n\tcontroller.SetLogger(adapterLogger)\n\tcfg.Controller.Logger = adapterLogger\n\treturn l\n}",
"func (env *Env) log(enabled bool, v ...interface{}) {\n\tLog := _drv.Cfg().Log\n\tif !Log.IsEnabled(enabled) {\n\t\treturn\n\t}\n\tif len(v) == 0 {\n\t\tLog.Logger.Infof(\"%v %v\", env.sysName(), callInfo(1))\n\t} else {\n\t\tLog.Logger.Infof(\"%v %v %v\", env.sysName(), callInfo(1), fmt.Sprint(v...))\n\t}\n}",
"func (serve *Server) configureLogger() error {\n\tlevel, err := logrus.ParseLevel(serve.config.LogLevel)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tserve.logger.SetLevel(level)\n\treturn nil\n}",
"func Wrap(l *gol.Logger) logging.Logger {\n\tl.ExtraCalldepth += 1 // one layer of wrapping in loggerImpl struct above\n\treturn &loggerImpl{l}\n}",
"func echoServer() error {\n\tlistener, err := quic.ListenAddr(addr, generateTLSConfig(), nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\tsess, err := listener.Accept()\n\tif err != nil {\n\t\treturn err\n\t}\n\tstream, err := sess.AcceptStream()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\t// Echo through the loggingWriter\n\t_, err = io.Copy(loggingWriter{stream}, stream)\n\treturn err\n}",
"func (l *Lark) Logger(logOpt map[string]interface{}) *LarkLogger {\n\treturn &LarkLogger{l: l, opt: logOpt}\n}",
"func New() *Logger {\n\n\tbaseLogrus := logrus.New()\n\n\tvar logger = &Logger{baseLogrus}\n\n\tf, err := os.OpenFile(\"dummy-api.log\", os.O_CREATE|os.O_WRONLY, 0666)\n\tif err != nil {\n\t\tlog.Fatalf(\"unable to interact with log file: %s\", err)\n\t}\n\n\tlogger.SetFormatter(&logrus.JSONFormatter{\n\t\tTimestampFormat: \"02-01-2006 15:04:05\", // DD-MM-YYYY HH:MM:SS\n\n\t})\n\n\toutputs := io.MultiWriter(os.Stderr, f) // Write to both standard error and the log file.\n\tlogger.Out = outputs\n\n\treturn logger\n\n}",
"func (c *T) Logf(format string, args ...interface{})",
"func SetLogger(logger logrus.FieldLogger) {\n\tlog = logger\n}",
"func SetLogger(logger logrus.FieldLogger) {\n\tlog = logger\n}",
"func httpLogger(handler http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tlog.Printf(\"%s %s %s\", r.RemoteAddr, r.Method, r.URL)\n\t\thandler.ServeHTTP(w, r)\n\t})\n}",
"func TestExtendErrorLogger(t *testing.T) {\n\tDefaultCreateErrorLoggerFunc = NewMockLogger\n\tdefer func() {\n\t\tDefaultCreateErrorLoggerFunc = CreateDefaultErrorLogger\n\t}()\n\tlogName := \"/tmp/mosn/test_mock_log.log\"\n\tos.Remove(logName)\n\t// reset for test\n\terrorLoggerManagerInstance.managers = make(map[string]log.ErrorLogger)\n\tlog.ClearAll()\n\tif err := InitDefaultLogger(logName, INFO); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tDefaultLogger.Infof(\"test_%d\", 123) // [mocked] [INFO] [] test_123\n\tProxy.Infof(context.Background(), \"test_%d\", 123) // [mocked] [INFO] [] [connId,traceId] test_123\n\ttime.Sleep(time.Second)\n\tlines, err := readLines(logName)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif len(lines) != 2 {\n\t\tt.Fatalf(\"logger write lines not expected, writes: %d, expected: %d\", len(lines), 2)\n\t}\n\tfor _, l := range lines {\n\t\tqs := strings.SplitN(l, \" \", 4)\n\t\tif !(len(qs) == 4 &&\n\t\t\tqs[0] == \"[mocked]\" &&\n\t\t\tqs[1] == \"[INFO]\" &&\n\t\t\tqs[2] == \"[]\" &&\n\t\t\tstrings.Contains(qs[3], \"test_123\")) {\n\t\t\tt.Fatalf(\"log output is unexpected: %s\", l)\n\t\t}\n\t}\n\tToggleLogger(logName, true)\n\tDefaultLogger.Infof(\"test_%d\", 123)\n\tProxy.Infof(context.Background(), \"test_%d\", 123)\n\tif lines, err := readLines(logName); err != nil || len(lines) != 2 {\n\t\tt.Fatal(\"disable proxy logger failed\")\n\t}\n}",
"func (logProxy *loggerProxy)Info(msgfmt string, args ...interface{}) {\n var ch loggerProxyChannel\n ch.fnPtr = logProxy.logObj.Info\n ch.msg = logProxy.appendlog(msgfmt, args...)\n logProxy.logChannel <- ch\n}",
"func Log(e e.Event) {\n\tlogger.mu.Lock()\n\tdefer logger.mu.Unlock()\n\tif logger.enabled {\n\t\tif logger.Encoder == nil {\n\t\t\tlogger.init()\n\t\t}\n\t\tlogger.Encoder.Encode(e)\n\t}\n}",
"func Logger(system string) *ZapEventLogger {\n\tif len(system) == 0 {\n\t\tsetuplog := Logger(\"setup-logger\")\n\t\tsetuplog.Error(\"Missing name parameter\")\n\t\tsystem = \"undefined\"\n\t}\n\tlogger := log2.Logger(system)\n\treturn &ZapEventLogger{system: system, SugaredLogger: logger.SugaredLogger}\n}",
"func WithFields(fields ...Field) LeveledLogger {\n\treturn Logger.WithFields(fields...)\n}",
"func (t t) Log(args ...interface{}) {\n\tfmt.Println(args...)\n}",
"func init() {\n\tlogger = &log.Logger{\n\t\tOut: os.Stdout,\n\t\tLevel: log.DebugLevel,\n\t\tFormatter: &logStashFormatter{log.TextFormatter{\n\t\t\tTimestampFormat: \"2006-01-02 15:04:05\",\n\t\t\tFullTimestamp: true},\n\t\t},\n\t}\n}",
"func (logProxy *loggerProxy)Trace(msgfmt string, args ...interface{}) {\n var ch loggerProxyChannel\n ch.fnPtr = logProxy.logObj.Trace\n ch.msg = logProxy.appendlog(msgfmt, args...)\n logProxy.logChannel <- ch\n}",
"func (k Keeper) Logger(ctx sdk.Context) log.Logger {\n\treturn ctx.Logger().With(\"module\", \"ibc/dns/server\")\n}",
"func Logger(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tstart := time.Now()\n\t\tww := middleware.NewWrapResponseWriter(w, r.ProtoMajor)\n\t\thost, port, _ := net.SplitHostPort(r.RemoteAddr)\n\t\tdefer func() {\n\t\t\tvar event *zerolog.Event\n\t\t\tif ww.Status() < 500 {\n\t\t\t\tevent = log.Info()\n\t\t\t} else {\n\t\t\t\tevent = log.Error()\n\t\t\t}\n\t\t\tevent.\n\t\t\t\tFields(map[string]interface{}{\n\t\t\t\t\t\"host\": host,\n\t\t\t\t\t\"port\": port,\n\t\t\t\t\t\"method\": r.Method,\n\t\t\t\t\t\"status\": ww.Status(),\n\t\t\t\t\t\"took\": float64(time.Since(start)) / 1e6,\n\t\t\t\t\t\"bytes_in\": r.Header.Get(\"Content-Length\"),\n\t\t\t\t\t\"bytes_out\": ww.BytesWritten(),\n\t\t\t\t}).\n\t\t\t\tTimestamp().\n\t\t\t\tMsg(r.URL.Path)\n\t\t}()\n\t\tnext.ServeHTTP(ww, r)\n\t})\n}",
"func Logger(inner http.HandlerFunc, name string) http.HandlerFunc {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tstart := time.Now()\n\n\t\tinner.ServeHTTP(w, r)\n\n\t\tlog.Printf(\n\t\t\t\"%s\\t%s\\t%s\\t%s\",\n\t\t\tr.Method,\n\t\t\tr.RequestURI,\n\t\t\tname,\n\t\t\ttime.Since(start),\n\t\t)\n\t})\n}",
"func (e *Huobi) Log(msgs ...interface{}) {\n\te.logger.Log(constant.INFO, \"\", 0.0, 0.0, msgs...)\n}",
"func (l *LogrusLogging) Logger(level string) Logger {\n\tlogger := struct{ LogrusLogger }{}\n\n\tswitch level {\n\tcase DebugLogLevel:\n\t\tlogger.printf = l.Log.Debugf\n\tcase InfoLogLevel:\n\t\tlogger.printf = l.Log.Infof\n\tcase WarningLogLevel:\n\t\tlogger.printf = l.Log.Warningf\n\tcase ErrorLogLevel:\n\t\tlogger.printf = l.Log.Errorf\n\tcase FatalLogLevel:\n\t\tlogger.printf = l.Log.Fatalf\n\t}\n\n\treturn logger\n}",
"func setupLogging(level logging.Level) *logging.Logger {\n\tlogger := logging.MustGetLogger(\"libsteg\")\n\tformat := logging.MustStringFormatter(\n\t\t`%{color}%{time:15:04:05.000} %{shortfunc} ▶ %{level:.4s} %{id:03x}%{color:reset} %{message}`,\n\t)\n\n\tbackend1 := logging.NewLogBackend(os.Stdout, \"\", 0)\n\tbackend1Formatter := logging.NewBackendFormatter(backend1, format)\n\tbackend1Leveled := logging.AddModuleLevel(backend1Formatter)\n\tbackend1Leveled.SetLevel(level, \"\")\n\tlogging.SetBackend(backend1Leveled)\n\n\treturn logger\n}",
"func rpcLogger(flag bool) Logger {\n\treturn makeLogger(flag, Fields{\"layer\": \"rpc\"})\n}",
"func TestLog(t *testing.T) {\n\tt.Skip()\n\n\tfmt.Printf(\"TestLog(): start\\n\")\n\n\tlogrus.Printf(\"bucket_test.go: TestLog(): printing from logrus before format\")\n\n\tlog := logrus.New()\n\tlog.Formatter = &logrus.JSONFormatter{}\n\tlog.Printf(\"bucket_test.go: TestLog(): printing JSONFormatter\")\n\n\tlog.Formatter = &logrus.TextFormatter{ForceColors: true}\n\tlog.Infof(\"bucket_test.go: TestLog(): printing TextFormatter\")\n\t// will not be displayed\n\tlog.Debugf(\"bucket_test.go: TestLog(): no debug level\")\n\t// will be displayed\n\tlog.SetLevel(logrus.DebugLevel)\n\tlog.Debugf(\"bucket_test.go: TestLog(): with debug level\")\n\t\t\n\tlmont := logmont.New()\n\tlmont.Printf(\"bucket_test.go: TestLog(): lmont -> default printing\")\n\tlmont.Formatter = &logrus.TextFormatter{ForceColors: true}\n\tlmont.Printf(\"bucket_test.go: TestLog(): lmont -> force colors\")\n\tlmont.Printf(\"bucket_test.go: TestLog(): lmont -> PRINT s=%s\", \"print_string\")\n\tlmont.Infof(\"bucket_test.go: TestLog(): lmont -> INFO s=%s\", \"info_string\")\n\tlmont.Warnf(\"bucket_test.go: TestLog(): lmont -> WARN s=%s\", \"warn_string\")\n\tlmont.Errorf(\"bucket_test.go: TestLog(): lmont -> ERROR s=%s\", \"error_string\")\n\n\tlmont.SetLevel(logrus.DebugLevel)\n\tlmont.Debugf(\"bucket_test.go: TestLog(): lmont -> DEBUG s=%s\", \"debug_string\")\n\n\tlmont.SetTrace(true)\n\tlmont.Tracef(\"bucket_test.go: TestLog(): lmont -> TRACE a1=%s, a2=%s\\n\", \"args1\", \"args2\")\n\tlmont.Tracef(\"bucket_test.go: TestLog(): lmont -> TRACE no argsstring\")\n\n\t// should not display\n\tlmont.SetTrace(false)\n\tlmont.Tracef(\"bucket_test.go: TestLog(): lmont -> TRACE s=%s\\n\", \"don't snow\")\n\n\tbktLog := bucket.New()\n\tlmont.Infof(\"bucket_test.go: TestLog(): bktLog = %+v\", bktLog)\n\n\t// Default settings for the default logmont\n\tbktLog.Logit()\n\n\t// Package level to keep it simple for now\n\tbucket.SetLogger(lmont)\n\n\t// Info and Debug, not Trace\n\tbktLog.Logit()\n\n\t// Info, Debug, Trace\n\tlmont.SetTrace(true)\n\tbktLog.Logit()\n\t\n\tfmt.Printf(\"TestLog(): finish\\n\")\n}",
"func (c *Channel) Log(l string) {\n\tlog.Printf(\"Channel <%s>: %s\", c.GetName(), l)\n}",
"func (l *Log) init() {\n\t// Init logger as below\n\tl.logger = logrus.New()\n\n\t// Output to stdout instead of the default stderr\n\t// Can be any io.Writer, see below for File example\n\tl.logger.SetOutput(ioutil.Discard)\n\n\t// Log as JSON instead of the default ASCII formatter.\n\t// l.logger.SetFormatter(&log.JSONFormatter{})\n\tl.logger.SetFormatter(&logrus.TextFormatter{\n\t\tForceColors: true,\n\t})\n\n\tl.logger.SetLevel(logrus.TraceLevel)\n}",
"func (i *Interactor) Log(msg string, args ...interface{}) {\n\ti.Logger.Log(msg, args...)\n}",
"func (k Keeper) Logger(ctx sdk.Context) log.Logger {\n\treturn ctx.Logger().With(\"module\", \"x/\"+types.ModuleName)\n}",
"func serviceLogger(msg serviceMsg, args ...any) {\n\tswitch msg {\n\tcase serviceError:\n\t\tl.Printf(\"error: loc=\\\"%s\\\" err=\\\"%v\\\"\", args[0], args[1])\n\tcase serviceCallbacks:\n\t\tif _, ok := args[0].(callback.Connecter); ok {\n\t\t\tl.Println(\"callback: client connection callback registered\")\n\t\t}\n\tcase serviceSetCommands:\n\t\tfor _, arg := range args {\n\t\t\tif cmd, ok := arg.(commander.Command); ok {\n\t\t\t\tl.Printf(\"adding command: %v\", cmd.Name)\n\t\t\t}\n\t\t}\n\tcase serviceListener:\n\t\tif t, ok := args[0].(listener.Listener); ok {\n\t\t\tl.Printf(\"listener: type=\\\"%s\\\"\", t.Type())\n\t\t}\n\tcase serviceRunner:\n\t\tif _, ok := args[0].(runner.Listener); ok {\n\t\t\tl.Println(\"runner: registered Listener\")\n\t\t}\n\t\tif _, ok := args[0].(runner.Starter); ok {\n\t\t\tl.Println(\"runner: registered Starter\")\n\t\t}\n\tcase serviceStore:\n\t\tif t, ok := args[0].(store.Filer); ok {\n\t\t\tl.Printf(\"store: type=\\\"%s\\\"\", t.Type())\n\t\t}\n\tcase serviceStarted:\n\t\tl.Println(\"started\")\n\t}\n}",
"func TestBasicLogger(t *testing.T) {\n\tt.Run(\"New\", func(t *testing.T) {\n\t\tvar buf bytes.Buffer\n\t\tlogger := NewBasicLogger(&buf, \"\", 0)\n\t\tassert.NotNil(t, logger)\n\t\tassert.IsType(t, &BasicLogger{}, logger)\n\t\tassert.NotNil(t, logger.stdLogger)\n\t\tassert.Equal(t, INFO, logger.level)\n\n\t\tt.Run(\"SetLevel\", func(t *testing.T) {\n\t\t\tlogger.SetLevel(ERROR)\n\t\t\tassert.Equal(t, ERROR, logger.level)\n\t\t})\n\n\t\targs := []interface{}{\"First\", \"Second\"}\n\t\tformat := \"%s-%s\"\n\t\tmsg := prepareMessage(logSequenceID, DEBUG, nil, args...)\n\t\tmsgFmt := prepareMessage(logSequenceID, DEBUG, &format, args...)\n\n\t\tt.Run(\"ExtendedLeveledLogger\", func(t *testing.T) {\n\t\t\tassert.Implements(t, (*LeveledLogger)(nil), logger)\n\n\t\t\tlogger.SetLevel(DEBUG)\n\n\t\t\tbuf.Reset()\n\t\t\tlogger.Debug(args...)\n\t\t\tmsg.id = logSequenceID\n\t\t\tassert.Equal(t, fmtMsg(msg), buf.String())\n\n\t\t\tbuf.Reset()\n\t\t\tlogger.Debugf(format, args...)\n\t\t\tmsgFmt.id = logSequenceID\n\t\t\tassert.Equal(t, fmtMsg(msgFmt), buf.String())\n\t\t})\n\n\t\tt.Run(\"LevelAbove\", func(t *testing.T) {\n\t\t\tlogger.SetLevel(INFO)\n\t\t\tbuf.Reset()\n\t\t\tlogger.Debug(args...)\n\n\t\t\tassert.Empty(t, buf.String())\n\t\t})\n\n\t\tt.Run(\"Info\", func(t *testing.T) {\n\t\t\tlogger.SetLevel(INFO)\n\t\t\tbuf.Reset()\n\t\t\tlogger.Info(args...)\n\t\t\tmsg.level = INFO\n\t\t\tmsg.id = logSequenceID\n\t\t\tassert.Equal(t, fmtMsg(msg), buf.String())\n\n\t\t\tbuf.Reset()\n\t\t\tlogger.Infof(format, args...)\n\t\t\tmsgFmt.level = INFO\n\t\t\tmsgFmt.id = logSequenceID\n\t\t\tassert.Equal(t, fmtMsg(msgFmt), buf.String())\n\t\t})\n\n\t\tt.Run(\"Warning\", func(t *testing.T) {\n\t\t\tlogger.SetLevel(WARNING)\n\t\t\tbuf.Reset()\n\t\t\tlogger.Warning(args...)\n\t\t\tmsg.level = WARNING\n\t\t\tmsg.id = logSequenceID\n\t\t\tassert.Equal(t, fmtMsg(msg), buf.String())\n\n\t\t\tbuf.Reset()\n\t\t\tlogger.Warningf(format, args...)\n\t\t\tmsgFmt.level = WARNING\n\t\t\tmsgFmt.id = logSequenceID\n\t\t\tassert.Equal(t, fmtMsg(msgFmt), buf.String())\n\t\t})\n\n\t\tt.Run(\"Error\", func(t *testing.T) {\n\t\t\tlogger.SetLevel(ERROR)\n\t\t\tbuf.Reset()\n\t\t\tlogger.Error(args...)\n\t\t\tmsg.level = ERROR\n\t\t\tmsg.id = logSequenceID\n\t\t\tassert.Equal(t, fmtMsg(msg), buf.String())\n\n\t\t\tbuf.Reset()\n\t\t\tlogger.Errorf(format, args...)\n\t\t\tmsgFmt.level = ERROR\n\t\t\tmsgFmt.id = logSequenceID\n\t\t\tassert.Equal(t, fmtMsg(msgFmt), buf.String())\n\t\t})\n\n\t\tt.Run(\"Panic\", func(t *testing.T) {\n\t\t\tlogger.SetLevel(CRITICAL)\n\t\t\tbuf.Reset()\n\n\t\t\tassert.Panics(t, func() { logger.Panic(args...) })\n\t\t\tmsg.level = CRITICAL\n\t\t\tmsg.id = logSequenceID\n\t\t\tassert.Equal(t, fmtMsg(msg), buf.String())\n\n\t\t\tbuf.Reset()\n\t\t\tmsgFmt.level = CRITICAL\n\t\t\tassert.Panics(t, func() { logger.Panicf(format, args...) })\n\t\t\tmsgFmt.id = logSequenceID\n\t\t\tassert.Equal(t, fmtMsg(msgFmt), buf.String())\n\t\t})\n\t})\n}",
"func exampleLogger(l *gentest.Logger) genny.Logger {\n\tl.CloseFn = func() error {\n\t\ts := l.Stream.String()\n\t\tc := build.Default\n\t\tfor _, src := range c.SrcDirs() {\n\t\t\ts = strings.Replace(s, src, \"/go/src\", -1)\n\t\t}\n\t\ts = strings.Replace(s, \"\\\\\", \"/\", -1)\n\n\t\tfor i, line := range strings.Split(s, \"\\n\") {\n\t\t\tif strings.Contains(line, \"Step:\") {\n\t\t\t\ts = strings.Replace(s, line, fmt.Sprintf(\"[DEBU] Step: %d\", i+1), 1)\n\t\t\t}\n\t\t}\n\t\tfmt.Print(s)\n\t\treturn nil\n\t}\n\treturn l\n}"
] | [
"0.66066176",
"0.61660343",
"0.60266924",
"0.5913209",
"0.5817586",
"0.5776641",
"0.5773255",
"0.5722527",
"0.562674",
"0.55921596",
"0.5576498",
"0.55759066",
"0.5570972",
"0.5550121",
"0.5525077",
"0.5479035",
"0.5456552",
"0.54277515",
"0.54236746",
"0.5412267",
"0.5407686",
"0.54057914",
"0.54032695",
"0.540221",
"0.53896016",
"0.5375159",
"0.53736234",
"0.5363654",
"0.53582335",
"0.5358208",
"0.5350595",
"0.53470093",
"0.5339453",
"0.5334685",
"0.5321548",
"0.5313807",
"0.5313171",
"0.5300562",
"0.5297279",
"0.5295803",
"0.5287903",
"0.52846074",
"0.52779394",
"0.52779394",
"0.5267472",
"0.5265989",
"0.52603894",
"0.52589923",
"0.5256899",
"0.52356315",
"0.5230441",
"0.52229244",
"0.52144796",
"0.52130854",
"0.52130795",
"0.5209361",
"0.5202461",
"0.5201511",
"0.5199281",
"0.5198581",
"0.51984936",
"0.51925015",
"0.51914126",
"0.51898736",
"0.51898503",
"0.51877886",
"0.51855606",
"0.5183519",
"0.5182767",
"0.51814365",
"0.51753795",
"0.51752406",
"0.5171428",
"0.516331",
"0.5162806",
"0.5162806",
"0.5159714",
"0.5158816",
"0.5152444",
"0.51503927",
"0.51455784",
"0.5144506",
"0.51440805",
"0.5143504",
"0.51376235",
"0.5133799",
"0.51334506",
"0.5128409",
"0.512055",
"0.51185393",
"0.5117564",
"0.51143116",
"0.51132363",
"0.51106834",
"0.51101536",
"0.5108974",
"0.51085573",
"0.51084554",
"0.5098741",
"0.5090843"
] | 0.6948606 | 0 |
SetHeader to set header (NOT SUPPORTED) | func (l *EchoLogrus) SetHeader(string) {} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (s *Socket) SetHeader(key, value string) {\n\ts.Lock()\n\ts.Conn.httpHeader().Set(key, value)\n\ts.Unlock()\n}",
"func (rm *REKTManager) SetHeader(key, value string) {\n\trm.headers.Set(key, value)\n}",
"func (req *Request) SetHeader(name, val string) {\n\treq.w.Header().Set(name, val)\n}",
"func SetHeader(w http.ResponseWriter, key, value string) {\n\tw.Header().Set(key, value)\n}",
"func (h *HTTP) SetHeader(key, value string) {\n\th.headers[key] = value\n}",
"func (api *Client) SetHeader(key, value string) *Client {\n\tapi.mu.Lock()\n\tdefer api.mu.Unlock()\n\tapi.headers[key] = value\n\treturn api\n}",
"func (r *Router) SetHeader(w http.ResponseWriter, k, v string) {\n\tw.Header().Set(k, v)\n}",
"func (c *Action) SetHeader(key string, value string) {\n\tc.Header().Set(key, value)\n}",
"func (r *Request) SetHeader(k, v string) *Request {\n\tpanic(\"TODO\")\n\treturn r\n}",
"func (this *Context) SetHeader(hdr string, val string, unique bool) {\n\tif unique {\n\t\tthis.Header().Set(hdr, val)\n\t} else {\n\t\tthis.Header().Add(hdr, val)\n\t}\n}",
"func (c *Client) SetHeader(key, value string) {\n\tc.Headers[key] = value\n}",
"func (ctx *Context) SetHeader(hdr string, val string, unique bool) {\n\tif unique {\n\t\tctx.Header().Set(hdr, val)\n\t} else {\n\t\tctx.Header().Add(hdr, val)\n\t}\n}",
"func (i *ICoreWebView2HttpRequestHeaders) SetHeader(name, value string) error {\n\t_name, err := windows.UTF16PtrFromString(name)\n\tif err != nil {\n\t\treturn nil\n\t}\n\n\t_value, err := windows.UTF16PtrFromString(value)\n\tif err != nil {\n\t\treturn nil\n\t}\n\n\tres, _, err := i.vtbl.SetHeader.Call(\n\t\tuintptr(unsafe.Pointer(i)),\n\t\tuintptr(unsafe.Pointer(_name)),\n\t\tuintptr(unsafe.Pointer(_value)),\n\t)\n\tif err != windows.ERROR_SUCCESS {\n\t\treturn err\n\t}\n\tif windows.Handle(res) != windows.S_OK {\n\t\treturn syscall.Errno(res)\n\t}\n\treturn nil\n}",
"func (c *Context) SetHeader(key, value string) {\n\tc.W.Header().Set(key, value)\n}",
"func (req *Request) SetHeader(key, value string) {\n\treq.header.Set(key, value)\n}",
"func (r *Request) SetHeader(key, val string) {\n\tr.Headers[key] = val\n}",
"func (c *CommandDescriptor) SetHeader(key string, value interface{}) {\n\tc.headers[key] = value\n}",
"func (service *Manager) SetHeader(header interface{}) {\n\tservice.client.AddHeader(header)\n}",
"func (c *Context) SetHeader(key string, value string) {\n\tc.Writer.Header().Set(key, value)\n}",
"func (m *MailYak) SetHeader(name, value string) {\n\tm.headers[m.trimRegex.ReplaceAllString(name, \"\")] = []string{mime.QEncoding.Encode(\"UTF-8\", m.trimRegex.ReplaceAllString(value, \"\"))}\n}",
"func (zr *ZRequest) SetHeader(key, value string) *ZRequest {\n\tif zr.ended {\n\t\treturn zr\n\t}\n\tzr.headers.Set(key, value)\n\treturn zr\n}",
"func (f *fakeDiskUpdateWatchServer) SetHeader(metadata.MD) error { return nil }",
"func (req *Request) SetHeader(key, value string) {\n\treq.Req.Header.Set(key, value)\n}",
"func SetHeader(out io.Writer, key, value string) {\n\tif resp, ok := out.(http.ResponseWriter); ok {\n\t\tresp.Header().Set(key, value)\n\t}\n}",
"func (c *baseClient) SetHeader(key, value string) *baseClient {\n\tc.header.Add(key, value)\n\treturn c\n}",
"func (pw *pooledWriter) SetHeader(h writer.Header) {\n\tpw.Name = h.Name\n\tpw.Extra = h.Extra\n\tpw.Comment = h.Comment\n\tpw.ModTime = h.ModTime\n\tpw.OS = h.OS\n}",
"func (req *PatchJSONRequest) SetHeader(k, v string) {\n\treq.req.Header.Set(k, v)\n}",
"func (request *Request) SetHeader(key string, value string) *Request {\n\trequest.Headers.Set(key, value)\n\treturn request\n}",
"func (F *Frisby) SetHeader(key, value string) *Frisby {\n\tif F.Req.Headers == nil {\n\t\tF.Req.Headers = make(map[string]string)\n\t}\n\tF.Req.Headers[key] = value\n\treturn F\n}",
"func (h Header) Set(key, value string) {\n\ttextproto.MIMEHeader(h).Set(key, value)\n}",
"func (c *httpClient) SetHeader(header http.Header) HttpClient {\n\tclient := *c\n\tclient.headers = header\n\treturn &client\n}",
"func (self *AbtabURL) SetHeader(header []string) {\n\tself.Header = header\n\tself.HeaderMap = make(map[string]int)\n\tfor idx, fname := range header {\n\t\t//fmt.Printf(\"SetHeader: %s=%d\\n\", fname, idx)\n\t\tself.HeaderMap[fname] = idx\n\t}\n}",
"func (h headers) Set(value string) error {\n\tparts := strings.SplitN(value, \":\", 2)\n\tif len(parts) != 2 {\n\t\treturn fmt.Errorf(\"header '%s' has a wrong format\", value)\n\t}\n\tkey, val := strings.TrimSpace(parts[0]), strings.TrimSpace(parts[1])\n\tif key == \"\" || val == \"\" {\n\t\treturn fmt.Errorf(\"header '%s' has a wrong format\", value)\n\t}\n\t// Add key/value directly to the http.Header (map[string][]string).\n\t// http.Header.Add() canonicalizes keys but vegeta is used\n\t// to test systems that require case-sensitive headers.\n\th.Header[key] = append(h.Header[key], val)\n\treturn nil\n}",
"func (ctx *HijackRequest) SetHeader(pairs ...string) *HijackRequest {\n\tctx.req.Header(pairs...)\n\treturn ctx\n}",
"func (p *Part) SetHeader(name string, value string) {\n\theaders := C.g_mime_object_get_header_list(p.asGMimeObject())\n\tcName := C.CString(name)\n\tdefer C.free(unsafe.Pointer(cName))\n\tcValue := C.CString(value)\n\tdefer C.free(unsafe.Pointer(cValue))\n\tcCharset := C.CString(\"UTF-8\")\n\tdefer C.free(unsafe.Pointer(cCharset))\n\n\tC.g_mime_header_list_set(headers, cName, cValue, cCharset)\n}",
"func (ctx *HijackResponse) SetHeader(pairs ...string) {\n\tfor i := 0; i < len(pairs); i += 2 {\n\t\tctx.payload.ResponseHeaders = append(ctx.payload.ResponseHeaders, &proto.FetchHeaderEntry{\n\t\t\tName: pairs[i],\n\t\t\tValue: pairs[i+1],\n\t\t})\n\t}\n}",
"func (r *Response) SetHeader(key, value string) {\n\tif r.Header == nil {\n\t\tr.Header = make(http.Header)\n\t}\n\tr.Header.Set(key, value)\n}",
"func (r *Request) SetHeader(key string, values ...string) *Request {\n\tif r.headers == nil {\n\t\tr.headers = http.Header{}\n\t}\n\n\tr.headers.Del(key)\n\n\tfor _, value := range values {\n\t\tr.headers.Add(key, value)\n\t}\n\n\treturn r\n}",
"func (r *Request) SetHeader(key, value string) *Request {\n\tr.header.Set(key, value)\n\treturn r\n}",
"func (h *headerFlags) Set(value string) error {\n\th.flat = append(h.flat, value)\n\theaderParts := strings.Split(value, \":\")\n\tif len(headerParts) != 2 {\n\t\t_ = fmt.Errorf(\"invalid header format specified: `%s`, \"+\n\t\t\t\"must be in format `name:value`\", value)\n\t\tos.Exit(1)\n\t}\n\tif h.header == nil {\n\t\th.header = http.Header{}\n\t}\n\th.header.Add(headerParts[0], headerParts[1])\n\treturn nil\n}",
"func (h *ResponseHeader) Set(key, val string) {\n\th.ResponseHeader.Set(key, val)\n}",
"func (service *MediaServiceInterface) SetHeader(header interface{}) {\n\tservice.client.AddHeader(header)\n}",
"func (this *SIPMessage) SetHeader(h header.Header) (IllegalArgumentException error) {\n\tif h == nil {\n\t\treturn errors.New(\"IllegalArgumentException: nil header!\")\n\t}\n\n\tif hl, ok := h.(header.SIPHeaderLister); ok {\n\t\t// Ignore empty lists.\n\t\tif hl.Len() == 0 {\n\t\t\treturn nil\n\t\t}\n\t}\n\tthis.RemoveHeader(h.GetHeaderName())\n\treturn this.AttachHeader3(h, true, false)\n}",
"func (req *Request) SetRequestHeader(k, v string) {\n\treq.r.Header.Set(k, v)\n}",
"func SetHeader(ctx context.Context, key HeaderContextKey, value string) context.Context {\n\treturn context.WithValue(ctx, key, value)\n}",
"func (c *Context) setHead(key, value string) {\n\tc.f.Response.Header.Set(key, value)\n}",
"func (_e *MockDataReceiverService_PutMetricServer_Expecter) SetHeader(_a0 interface{}) *MockDataReceiverService_PutMetricServer_SetHeader_Call {\n\treturn &MockDataReceiverService_PutMetricServer_SetHeader_Call{Call: _e.mock.On(\"SetHeader\", _a0)}\n}",
"func (self *Response) SetHeader(header http.Header) {\n\tself.header = header\n}",
"func (h *RequestHeader) Set(key, val string) {\n\th.RequestHeader.Set(key, val)\n}",
"func (ac challenge) SetHeaders(w http.ResponseWriter) {\n\n}",
"func (ctx *Context) SetHeader(key, value string) {\n\tctx.Response.Header.Set(key, value)\n}",
"func SetHeader(req *http.Request) {\n\treq.Header.Set(\"Accept\", \"Application/json\")\n\treq.Header.Set(\"Content-Type\", \"Application/json\")\n\treq.Header.Set(\"Charset\", \"utf-8\")\n}",
"func (rc *RelayController) SetNewHeader(header *types.Header) {\n\tif header.Relayer == nil {\n\t\treturn\n\t}\n\n\tif header.Relayer.Address == rc.currentNodeAddress {\n\t\trc.logger.Debug(\"RELAY SetNewHeader\", \"rc.startRelay()\")\n\t\trc.startRelay()\n\t} else {\n\t\trc.logger.Debug(\"RELAY SetNewHeader\", \"rc.stopRelay()\",\n\t\t\tfmt.Sprintf(\"expcted: %s obtain: %s\", header.Relayer.Address, rc.currentNodeAddress))\n\t\trc.stopRelay()\n\t}\n}",
"func (h HttpHeader) Set(key, value string) {\n\tif _, ok := h[key]; ok {\n\t\th[key] = nil\n\t}\n\th[key] = make([]string, 0)\n\th[key] = append(h[key], value)\n}",
"func SetHeaderKey(req *http.Request, key string, value string) {\n\treq.Header.Set(key, value)\n}",
"func (h *RequestHeader) Set(key, value string) {\n\tinitHeaderKV(&h.bufKV, key, value, h.disableNormalizing)\n\th.SetCanonical(h.bufKV.key, h.bufKV.value)\n}",
"func (_e *MockDataRegistryService_CreateOrUpdateMetricsServer_Expecter) SetHeader(_a0 interface{}) *MockDataRegistryService_CreateOrUpdateMetricsServer_SetHeader_Call {\n\treturn &MockDataRegistryService_CreateOrUpdateMetricsServer_SetHeader_Call{Call: _e.mock.On(\"SetHeader\", _a0)}\n}",
"func (r *Request) SetHeader(header, value string) *Request {\n\tr.Header.Set(header, value)\n\treturn r\n}",
"func (c *Ctx) Set(key string, val string) {\n\tc.Response.Header.Set(key, val)\n}",
"func (client *Client) setHeaders(req *http.Request, httpVerb, path, contentType, content string) error {\n\tif client.creds == nil {\n\t\treturn ErrNoCredentials\n\t}\n\tnow := time.Now().UTC().Format(\"2006-01-02T15:04:05.000000Z07:00\")\n\n\tif len(contentType) > 0 {\n\t\treq.Header.Set(\"Content-Type\", contentType)\n\t}\n\n\treq.Header.Set(\"Accept\", \"application/json\")\n\treq.Header.Set(\"Dragonchain\", client.creds.GetDragonchainID())\n\treq.Header.Set(\"Timestamp\", fmt.Sprintf(\"%s\", now))\n\treq.Header.Set(\"Authorization\", client.creds.GetAuthorization(httpVerb, path, now, contentType, content))\n\treturn nil\n}",
"func (h CommonHeader) Set(key string, value string) {\n\th[key] = value\n}",
"func (m *MockWerftService_ListenServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func (h *ResponseHeader) Set(key, value string) {\n\tinitHeaderKV(&h.bufKV, key, value, h.disableNormalizing)\n\th.SetCanonical(h.bufKV.key, h.bufKV.value)\n}",
"func (c *Context) Header(key, value string) {\n\tif len(value) == 0 {\n\t\tc.Writer.Header().Del(key)\n\t} else {\n\t\tc.Writer.Header().Set(key, value)\n\t}\n}",
"func (h *ApnsHeader) set(reqHeader http.Header) {\n\t// headers are optional\n\tif h == nil {\n\t\treturn\n\t}\n\n\tif h.ApnsId != \"\" {\n\t\treqHeader.Set(\"apns-id\", h.ApnsId)\n\t} // when omitted, Apple will generate a UUID for you\n\n\tif h.ApnsCollapseId != \"\" {\n\t\treqHeader.Set(\"apns-collapse-id\", h.ApnsCollapseId)\n\t}\n\n\tif h.ApnsPriority != \"\" {\n\t\treqHeader.Set(\"apns-priority\", h.ApnsPriority)\n\t} // when omitted, the default priority is 10\n\n\tif h.ApnsTopic != \"\" {\n\t\treqHeader.Set(\"apns-topic\", h.ApnsTopic)\n\t}\n\tif !h.ApnsExpiration.IsZero() {\n\t\treqHeader.Set(\"apns-expiration\", fmt.Sprintf(\"%v\", h.ApnsExpiration.Unix()))\n\t}\n\n}",
"func setHeader(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Server\", \"WebServer\")\n\tw.Header().Set(\"Content-Type\", \"text/html\")\n\tw.Header().Set(\"Cache-Control\", \"no-cache, private, max-age=0\")\n\tw.Header().Set(\"Expires\", time.Unix(0, 0).Format(http.TimeFormat))\n\tw.Header().Set(\"Pragma\", \"no-cache\")\n\tw.Header().Set(\"X-Accel-Expires\", \"0\")\n}",
"func (m *MockAgentSecure_TaggerStreamEntitiesServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func (m *MockAgentSecure_WorkloadmetaStreamEntitiesServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func SetMsgHeader(header uint64, msgData []byte) error {\n if len(msgData) < HEADER_LEN_B {\n return ErrBufferTooSmall\n }\n\n msgData[0] = byte(header >> 56)\n msgData[1] = byte(header >> 48)\n msgData[2] = byte(header >> 40)\n msgData[3] = byte(header >> 32)\n msgData[4] = byte(header >> 24)\n msgData[5] = byte(header >> 16)\n msgData[6] = byte(header >> 8)\n msgData[7] = byte(header)\n\n return nil\n}",
"func (m *MockWerftService_SubscribeServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func (m *MockMachine_ExecuteServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func SetAuthHeader(token string) string {\n\treturn fmt.Sprintf(\"Bearer %s\", token)\n}",
"func (m *MockBasic_PrimeServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func (h *header) Set(value string) error {\n\ti := strings.IndexRune(value, ':')\n\tif i < 0 {\n\t\treturn errorString(\"Header field format must be `name: value'\")\n\t}\n\thf := hfield{value[0:i], value[i+1:]}\n\t*h = append(*h, hf)\n\treturn nil\n}",
"func (m *MockMessageBus_SubscribeServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func (h *Header) Set(key, value string) {\n\tif i, ok := h.index(key); ok {\n\t\th.slice[i+1] = value\n\t} else {\n\t\th.slice = append(h.slice, key, value)\n\t}\n}",
"func (h Headers) Set(key, value string) {\n\tkey = strings.ToLower(key)\n\th[key] = value\n}",
"func (m *MockConfigAdminService_UploadRegisterModelServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func SetHeader() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tc.Header(\"Access-Control-Allow-Origin\", \"*\")\n\t\tc.Header(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE, UPDATE\")\n\t\tc.Header(\"Access-Control-Allow-Headers\", \"Origin, X-Requested-With, Content-Type, Accept, Authorization\")\n\t\tc.Header(\"Access-Control-Expose-Headers\", \"Content-Length, Access-Control-Allow-Origin, Access-Control-Allow-Headers, Cache-Control, Content-Language, Content-Type\")\n\t\tc.Header(\"Access-Control-Allow-Credentials\", \"true\")\n\n\t\tif c.Request.Method == \"OPTIONS\" {\n\t\t\tc.AbortWithStatus(204)\n\t\t\treturn\n\t\t}\n\n\t\tc.Next()\n\t}\n}",
"func (c *Context) Header(key, value string) {\n\tif len(value) == 0 {\n\t\tc.Response.Header.Del(key)\n\t} else {\n\t\tc.Response.Header.Set(key, value)\n\t}\n}",
"func (l *Lambda) ResponseHeaderSet(header, value string) {\n\tl.w.Header().Set(header, value)\n}",
"func (p *Context) Header(k, v string) {\n\tp.Writer.Header().Set(k, v)\n}",
"func (c *Client) SetHeaders(headers http.Header) {\n\tc.modifyLock.Lock()\n\tdefer c.modifyLock.Unlock()\n\tc.headers = headers\n}",
"func ResponseSetHeader(key, value string) ResponseModifier {\n\treturn func(resp *http.Response, err error) (*http.Response, error) {\n\t\tif resp != nil {\n\t\t\tif resp.Header == nil {\n\t\t\t\tresp.Header = make(http.Header)\n\t\t\t}\n\t\t\tresp.Header.Set(key, value)\n\t\t}\n\t\treturn resp, err\n\t}\n}",
"func (m *MockTopo_WatchServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func (m *mockHTTPWriter) WriteHeader(scode int) {\n\tm.WriteHeader(1)\n}",
"func (req *Request) SetHeaderCookie(key, value string) {\n\treq.Req.Header.Set(key, value)\n}",
"func (requestHeader *RequestHeader) SetVersion(version uint16) {\n\t// if requestHeader.APIVersion == -1 {\n\t// \treturn\n\t// }\n\trequestHeader.APIVersion = version\n}",
"func (m *MockWerftService_StartLocalJobServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func (xs *Sheet) SetHeader(header string, margin float64) int {\n\ttmp, _, _ := xs.xb.lib.NewProc(\"xlSheetSetHeaderW\").\n\t\tCall(xs.self, S(header), F(margin))\n\treturn int(tmp)\n}",
"func (m *MockProvisioner_ProvisionResourceServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func (filterdev *NetworkTap) SetHeaderComplete(f int) error {\n\t_, _, err := syscall.Syscall(syscall.SYS_IOCTL, uintptr(filterdev.device.Fd()), syscall.BIOCSHDRCMPLT, uintptr(unsafe.Pointer(&f)))\n\tif err != 0 {\n\t\treturn syscall.Errno(err)\n\t}\n\treturn nil\n}",
"func (c *Context) Header(key, value string) {\n\tif len(value) == 0 {\n\t\tc.Response.Header().Del(key)\n\t} else {\n\t\tc.Response.Header().Set(key, value)\n\t}\n}",
"func (t *Target) AddHeader(key, value string) {\n t.header.Add(key, value)\n}",
"func (m *MockPdfService_InternalRasterizePdfServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func (m *MockConfigAdminService_ListSnapshotsServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func (req *Request) SetResponseHeader(k, v string) {\n\treq.res.Header().Set(k, v)\n}",
"func setHeaders(request *http.Request, headers Vals) {\n\tfor _, v := range headers {\n\t\trequest.Header.Set(v.Name, v.Value)\n\t}\n}",
"func (m *MockAergoRPCService_ListBlockStreamServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func (m *MockPdfService_RasterizePdfServer) SetHeader(arg0 metadata.MD) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHeader\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}"
] | [
"0.8070126",
"0.7863313",
"0.78547883",
"0.7850843",
"0.7806121",
"0.7799137",
"0.7771557",
"0.77660334",
"0.7763081",
"0.77529764",
"0.7746852",
"0.7710076",
"0.76950485",
"0.7674024",
"0.7665853",
"0.7630679",
"0.76287884",
"0.75963986",
"0.75706214",
"0.7531666",
"0.7529309",
"0.7486373",
"0.7444187",
"0.74303573",
"0.7419241",
"0.7382404",
"0.7324501",
"0.7299423",
"0.726452",
"0.7241963",
"0.71823",
"0.7161666",
"0.7159289",
"0.71453655",
"0.7130562",
"0.7080792",
"0.70418185",
"0.7018735",
"0.70115507",
"0.6993368",
"0.6984197",
"0.69671535",
"0.69659156",
"0.6958884",
"0.69519997",
"0.694",
"0.6915952",
"0.6904668",
"0.6897196",
"0.68971115",
"0.6890349",
"0.68321323",
"0.68292177",
"0.6824746",
"0.6824145",
"0.6780497",
"0.6749665",
"0.6746709",
"0.66865224",
"0.6660264",
"0.6651109",
"0.6636311",
"0.66294825",
"0.661627",
"0.6604738",
"0.659837",
"0.6565052",
"0.65493804",
"0.65378636",
"0.65223813",
"0.6509688",
"0.65054744",
"0.6504087",
"0.65036196",
"0.6500422",
"0.6500043",
"0.64942235",
"0.6493328",
"0.648808",
"0.6480659",
"0.6451055",
"0.6449517",
"0.64338887",
"0.6427749",
"0.6422714",
"0.6411387",
"0.6407322",
"0.6405319",
"0.64023274",
"0.64007497",
"0.638059",
"0.63762474",
"0.636864",
"0.6366837",
"0.6357793",
"0.63509315",
"0.6349258",
"0.6346229",
"0.63296896",
"0.6322976"
] | 0.7985397 | 1 |
SetPrefix to set prefix | func (l *EchoLogrus) SetPrefix(prefix string) {
l.prefix = prefix
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func SetPrefix(p string) {\n\tprefix = p\n}",
"func SetPrefix(pre string) {\n\tmu.Lock()\n\tdefer mu.Unlock()\n\tprefix = pre\n}",
"func SetPrefix(s string) {\n\tprefix = s\n}",
"func SetPrefix(s string) {\n\tprefix = s\n}",
"func SetPrefix(p string) {\n\tprefix = strings.ToUpper(p)\n}",
"func SetPrefix(prefix string) { std.SetPrefix(prefix) }",
"func (g *Gopher) SetPrefix(s string) {\n\tg.mu.Lock()\n\tdefer g.mu.Unlock()\n\tg.prefix = s\n}",
"func (l *DockerLib) SetPrefix(p string) {\n\tl.prefix = p\n}",
"func (w *PrefixedWriter) SetPrefix(p string) {\n\tw.prefix = []byte(p)\n}",
"func SetPrefix(prefix string) {\n\tstd.SetPrefix(prefix)\n}",
"func SetPrefix(prefix string) {\n\tstd.SetPrefix(prefix)\n}",
"func (guild *Guild) SetPrefix(pre string) error {\n\tguild.Settings.BotPrefix = pre\n\tguildSettings.add(guild).save(config.GuildFile)\n\treturn nil\n}",
"func (mlog *MultiLogger) SetPrefix(prefix string) {\n\tmlog.prefix = []byte(prefix)\n}",
"func (b *bar) SetPrefix(prefix string) {\n\tb.mutex.Lock()\n\tdefer b.mutex.Unlock()\n\n\tb.prefix = prefix\n}",
"func (logger *Logger) SetPrefix(prefix string) string {\n\tlogger.mu.Lock()\n\tdefer logger.mu.Unlock()\n\tlogger.prefix = prefix\n\treturn logger.prefix\n}",
"func (_m *RediStore) SetKeyPrefix(p string) {\n\t_m.Called(p)\n}",
"func (c *Config) SetPrefix(prefix string) {\n\tif strings.TrimSpace(prefix) == \"\" || strings.TrimSpace(prefix) == c.Prefix {\n\t\treturn\n\t}\n\n\tc.Prefix = prefix\n}",
"func (l *Logger) SetPrefix(prefix string) {\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\tl.prefix = prefix\n}",
"func (l *Logger) SetPrefix(prefix string) {\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\tl.prefix = prefix\n}",
"func (f *msgFilter) SetPrefix(p string) {\n\tf.prefix = p\n}",
"func (r *RedisSession) SetPrefix(name string) {\n\tr.prefix = name + \":\"\n}",
"func (r *RedisSession) SetPrefix(name string) {\n\tr.prefix = name + \":\"\n}",
"func (in *ActionIpAddressIndexInput) SetPrefix(value int64) *ActionIpAddressIndexInput {\n\tin.Prefix = value\n\n\tif in._selectedParameters == nil {\n\t\tin._selectedParameters = make(map[string]interface{})\n\t}\n\n\tin._selectedParameters[\"Prefix\"] = nil\n\treturn in\n}",
"func SetPrefix(prefix string) Option {\n\treturn func(g *Generator) error {\n\t\tg.Prefix = prefix\n\t\treturn nil\n\t}\n}",
"func ConfigureSetPrefix(newPrefix string) {\n\tprefixFunction = func() string {\n\t\treturn newPrefix\n\t}\n}",
"func (a *AbbrFieldNamer) SetPrefix(s string) {\n\ta.Prefix = s\n}",
"func (o *SignalPersonName) SetPrefix(v string) {\n\to.Prefix.Set(&v)\n}",
"func (s *SlackNotify) SetPrefix(v string) {\n\ts.prefix = v\n}",
"func (puo *PrenameUpdateOne) SetPrefix(s string) *PrenameUpdateOne {\n\tpuo.mutation.SetPrefix(s)\n\treturn puo\n}",
"func (s *Scout) SetPrefix(prefix string) {\n\ts.Prefix = prefix\n}",
"func (l *Logger) SetPrefix(prefix string) {\n\tl.lg.SetPrefix(prefix)\n}",
"func (puo *PrefixUpdateOne) SetPrefix(s string) *PrefixUpdateOne {\n\tpuo.mutation.SetPrefix(s)\n\treturn puo\n}",
"func (s *progressBar) SetPrefix(format string, args ...interface{}) {\n\ts.prefix = fmt.Sprintf(format, args...)\n}",
"func (client *RestClient) SetPrefix(newPrefix string) {\n\tclient.prefix = newPrefix\n}",
"func SetPrefix(prefix string) string {\n\tdefer logger.SetPrefix(prefix)\n\told := tags[0]\n\ttags[0] = prefix\n\treturn old\n}",
"func (f *fakeProgressbar) SetPrefix(format string, args ...interface{}) {\n\tf.prefix = fmt.Sprintf(format, args...)\n}",
"func (n *nodeHeader) setPrefix(p []byte) {\n\tpLen, pBytes := n.prefixFields()\n\n\t// Write to the byte array and set the length field to the num bytes copied\n\t*pLen = uint16(copy(pBytes, p))\n}",
"func (pu *PrenameUpdate) SetPrefix(s string) *PrenameUpdate {\n\tpu.mutation.SetPrefix(s)\n\treturn pu\n}",
"func (c *Client) Prefix(s string) {\n\tc.prefix = s\n}",
"func (pu *PrefixUpdate) SetPrefix(s string) *PrefixUpdate {\n\tpu.mutation.SetPrefix(s)\n\treturn pu\n}",
"func (l *LvlStruct) SetPrefix(prefix string) {\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\tl.log.SetPrefix(prefix)\n}",
"func (km *KeyValueMap) SetPrefix(prefix string) {\n\t(*km)[kmPrefix] = prefix\n}",
"func ConfigureSetPrefixFunction(pf func() string) {\n\tprefixFunction = pf\n}",
"func (dblog *DataBaseLogger) SetPrefix(prefix string) {\n\tdblog.prefix = prefix\n}",
"func LogSetPrefix(prefix string) {\n\tmuLogt.Lock()\n\tdefer muLogt.Unlock()\n\tlogt.SetPrefix(prefix)\n}",
"func SetPrefix(prefix string) {\n\tgolog.SetPrefix(prefix)\n}",
"func (flogger *FileLogger) SetPrefix(s string) {\n\tflogger.fileloggerprefix = s\n}",
"func (n *Notification) SetTablePrefix(tablePrefix string) {\n\tn.ecosystem = converter.StrToInt64(tablePrefix)\n}",
"func (m *Member) SetTablePrefix(prefix string) {\r\n\tm.ecosystem = converter.StrToInt64(prefix)\r\n}",
"func (b *Binary) SetTablePrefix(prefix string) {\n\tb.ecosystem = converter.StrToInt64(prefix)\n}",
"func TestSetPrefix(t *testing.T) {\n\tprefix := Prefix()\n\tdefer SetPrefix(prefix)\n\n\ttests := []struct {\n\t\tname string\n\t\tin string\n\t}{\n\t\t{\n\t\t\tname: \"Set: `hello`\",\n\t\t\tin: \"hello\",\n\t\t},\n\t\t{\n\t\t\tname: \"Set: `hello-world`\",\n\t\t\tin: \"hello-world\",\n\t\t},\n\t}\n\n\t// Don't use parallel tests here.\n\tfor _, tt := range tests {\n\t\tSetPrefix(tt.in)\n\t\tif s := Prefix(); s != tt.in {\n\t\t\tt.Errorf(\"%s: failed, got %s, want %s\",\n\t\t\t\ttt.name, s, tt.in)\n\t\t}\n\t}\n}",
"func (s *KinesisVideoStreamConfig) SetPrefix(v string) *KinesisVideoStreamConfig {\n\ts.Prefix = &v\n\treturn s\n}",
"func (fps *FetcherProcessStream) SetPrefix(prefix string) *FetcherProcessStream {\n\tfps.prefix = prefix + \" \"\n\treturn fps\n}",
"func (rw *RemoteTimeWriter) SetPrefix(enable bool) {\n\trw.hasPrefix = enable\n}",
"func (s *S3Bucket) SetPrefix(v string) *S3Bucket {\n\ts.Prefix = &v\n\treturn s\n}",
"func (o *setNamePrefixOptions) RunSetNamePrefix(fSys filesys.FileSystem) error {\n\tmf, err := kustfile.NewKustomizationFile(fSys)\n\tif err != nil {\n\t\treturn err\n\t}\n\tm, err := mf.Read()\n\tif err != nil {\n\t\treturn err\n\t}\n\tm.NamePrefix = o.prefix\n\treturn mf.Write(m)\n}",
"func Prefix(s string) Option {\n\treturn optionFunc(func(l *lineWriter) {\n\t\tl.prefixes = append(l.prefixes, func() string { return s })\n\t})\n}",
"func ConfigureResetPrefix() {\n\tprefixFunction = defaultPrefixFunction\n}",
"func (r *Rest) SetPrefix(prefix string) *Rest {\n\tr.Prefix = prefix\n\treturn r\n}",
"func (e *Encoder) SetNamespacePrefix(namespacePrefix string) {\n\te.namespacePrefix = namespacePrefix\n}",
"func (si *SyncIndexJob) Prefix(p string) {\n\tsi.DestinationPrefix(p)\n}",
"func (r *Router) SetPathPrefix(pfx string) {\n\tr.pathPrefix = pfx\n}",
"func Prefix(value string) Option {\n\treturn addParam(\"prefix\", value)\n}",
"func Prefix(p string) Option {\n\treturn func(s *Source) { s.prefix = p }\n}",
"func WithPrefix(value string) Option {\n\treturn func(opts *options) error {\n\t\topts.prefix = value\n\t\t// No error\n\t\treturn nil\n\t}\n}",
"func Prefix(l string) Option {\n\treturn func(c *Config) Option {\n\t\tprevious := c.Pfx\n\t\tc.Pfx = l\n\t\treturn Prefix(previous)\n\t}\n}",
"func Prefix(p string) Option {\n\treturn func(s *storage) {\n\t\ts.prefix = p\n\t}\n}",
"func Prefix(prefix string) Option {\n\treturn func(s *Store) {\n\t\ts.prefix = prefix\n\t}\n}",
"func WithPrefix(p string) OptionFunc {\n\treturn func(b *Bot) {\n\t\tb.conf.Prefix = p\n\t}\n}",
"func (o *SignalPersonName) UnsetPrefix() {\n\to.Prefix.Unset()\n}",
"func (store *RedisStore) SetKeyPrefix(p string) {\r\n\tstore.keyPrefix = p\r\n}",
"func CmdPrefix(ctx *system.Context) {\n\tgconfig := ctx.Get(\"gconfig\").(*models.Guild)\n\tSetString(ctx, gconfig, \"Prefix\", \"\", &gconfig.Prefix)\n}",
"func SetRootPrefix(prefix string) {\n\trootPrefix = prefix\n}",
"func (b UpdateBuilder) Prefix(sql string, args ...interface{}) UpdateCondition {\n\treturn builder.Append(b, \"Prefixes\", Expr(sql, args...)).(UpdateBuilder)\n}",
"func Prefix(prefix string) Option {\n\treturn func(o *options) {\n\t\to.prefix = prefix\n\t}\n}",
"func (c *Config) Prefix(prefix string) *Config {\n\tc.GetContext().Prefix = prefix\n\treturn c\n}",
"func (r *Routing) Prefix(prefix string, f func()) {\n\n\tdefer func() {\n\t\tr.routerWithPrefix = nil\n\t\tif len(r.prefixes) > 0 {\n\t\t\tr.prefixes = r.prefixes[:len(r.prefixes)-1]\n\t\t}\n\t}()\n\n\tif len(prefix) == 0 {\n\t\tpanic(\"Prefix(): the prefix can't be empty\")\n\t}\n\n\tr.prefixes = append(r.prefixes, prefix)\n\n\tvar mergePrefix = strings.Join(r.prefixes, \"/\")\n\n\tr.routerWithPrefix = r.Router.PathPrefix(fmt.Sprintf(\"/%s\", mergePrefix)).Subrouter().StrictSlash(true)\n\tf()\n\n}",
"func (c *FileSystemCache) Prefix(p ...string) Cache {\n\tc.prefix = p\n\treturn c\n}",
"func (s *RedisStore) SetKeyPrefix(p string) {\n\ts.keyPrefix = p\n}",
"func (r *Router) UsePrefix(prefix string) {\n\tr.prefix = prefix\n}",
"func (a *BaseAdapter) SetPathPrefix(prefix string) {\n\tif prefix == \"\" {\n\t\ta.pathPrefix = nil\n\t}\n\n\tp := fmt.Sprintf(\"%s%s\", prefix, string(os.PathSeparator))\n\ta.pathPrefix = &p\n}",
"func (c *HelpCountryCode) SetPrefixes(value []string) {\n\tc.Flags.Set(0)\n\tc.Prefixes = value\n}",
"func ResetPrefix(in chan int,\n\tout chan int,\n\treset chan int,\n\tinitial int) {\n\tvar v, r int = 0, 0\n\tout <- initial\n\tfor {\n\t\tselect {\n\t\tcase r = <-reset:\n\t\t\t<-in\n\t\t\tout <- r\n\t\tcase v = <-in:\n\t\t\tout <- v\n\t\t}\n\t}\n}",
"func WithPrefix(prefix string) Option {\n\treturn func(o *options) {\n\t\to.prefix = prefix\n\t}\n}",
"func (r *Request) Prefix(segments ...string) *Request {\n\tif r.err != nil {\n\t\treturn r\n\t}\n\n\tr.pathPrefix = path.Join(r.pathPrefix, path.Join(segments...))\n\n\treturn r\n}",
"func (ub *UpdateBuilder) Prefix(\n\tsql string,\n\targs ...interface{},\n) *UpdateBuilder {\n\tub.sql = ub.sql.Prefix(sql, args...)\n\treturn ub\n}",
"func WithPrefix(preffix string) opt {\n\tif !strings.HasSuffix(preffix, \"_\") {\n\t\tpreffix = preffix + \"_\"\n\t}\n\treturn func(key string) string {\n\t\treturn preffix + key\n\t}\n}",
"func InitWithPrefix(conf interface{}, prefix string) error {\n\treturn InitWithOptions(conf, Options{Prefix: prefix})\n}",
"func (tbl RecordTable) WithPrefix(pfx string) RecordTable {\n\ttbl.name.Prefix = pfx\n\treturn tbl\n}",
"func (n *Name) Prefix() string {\n\t// TODO handle gender\n\treturn n.pick(namePrefix + \"/prefix\")\n}",
"func WithPrefix(prefix string) OptFunc {\n\treturn func(l *Logger) {\n\t\tl.SetPrefix(prefix)\n\t}\n}",
"func WithPrefix(metricPrefix string) Option {\n\treturn func(ro *registerOptions) {\n\t\tro.metricPrefix = metricPrefix\n\t}\n}",
"func SetEnvironmentPrefix(envPrefix string) {\n\t_envPrefix = envPrefix\n}",
"func (_m *Plugin) InitPrefix(prefix config.Prefix) {\n\t_m.Called(prefix)\n}",
"func (p *Periph) StorePREFIX(n int, prefix uint32) {\n\tp.prefix[n].Store(prefix)\n}",
"func SwapPrefix(group, prefix, swap string) string {\n\tprefix = strings.ToLower(prefix)\n\tgroup = strings.ToLower(group)\n\tif strings.HasPrefix(group, prefix) {\n\t\ts := swap + strings.TrimPrefix(group, prefix)\n\t\treturn Format(s)\n\t}\n\treturn \"\"\n}",
"func (*WriteHandler) Prefix() string {\n\treturn prefixWrite\n}",
"func (o BucketReplicationConfigRuleFilterAndOutput) Prefix() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BucketReplicationConfigRuleFilterAnd) *string { return v.Prefix }).(pulumi.StringPtrOutput)\n}",
"func (s *IPSet) AddPrefix(p IPPrefix) { s.AddRange(p.Range()) }",
"func (s *IPSet) AddPrefix(p IPPrefix) { s.AddRange(p.Range()) }"
] | [
"0.87040925",
"0.86976516",
"0.8549345",
"0.8549345",
"0.8411652",
"0.8407237",
"0.8238906",
"0.8226233",
"0.8070257",
"0.8029888",
"0.8029888",
"0.7990705",
"0.78467417",
"0.7828523",
"0.7764234",
"0.7757115",
"0.7752778",
"0.77484006",
"0.77484006",
"0.7694885",
"0.7677132",
"0.7677132",
"0.7666065",
"0.7630391",
"0.76222384",
"0.76035",
"0.7600513",
"0.75939035",
"0.7578157",
"0.75475615",
"0.7526436",
"0.75164926",
"0.75119865",
"0.75097346",
"0.74857587",
"0.7471405",
"0.74125963",
"0.7405889",
"0.7377772",
"0.7299566",
"0.7284426",
"0.72769445",
"0.7266473",
"0.72397214",
"0.7225768",
"0.7217477",
"0.71704584",
"0.7134349",
"0.7125711",
"0.71071965",
"0.7072684",
"0.7052207",
"0.7049672",
"0.7035073",
"0.70126104",
"0.69083744",
"0.69003594",
"0.68851864",
"0.6871179",
"0.685546",
"0.68464404",
"0.6845697",
"0.67949873",
"0.67711824",
"0.67603064",
"0.6748755",
"0.67412764",
"0.6739231",
"0.6726792",
"0.6719072",
"0.66772455",
"0.66622335",
"0.66609085",
"0.66473943",
"0.66241544",
"0.6599982",
"0.6550509",
"0.6529965",
"0.6504455",
"0.64928067",
"0.6491554",
"0.64788336",
"0.64783925",
"0.64727885",
"0.644288",
"0.64287573",
"0.64119995",
"0.6387492",
"0.635566",
"0.6349087",
"0.6337364",
"0.6317385",
"0.6308216",
"0.6300212",
"0.6262184",
"0.62249404",
"0.62224287",
"0.6208421",
"0.6195518",
"0.6195518"
] | 0.7284788 | 40 |
Prefix of echo logrus | func (l *EchoLogrus) Prefix() string {
return l.prefix
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (logger *Logger) echo(w io.Writer, l level.Level, f string, a ...any) {\n\t// Lock the log object for change.\n\tlogger.mu.RLock()\n\tdefer logger.mu.RUnlock()\n\n\t// Get the stack frame.\n\tsf := getStackFrame(logger.skipStackFrames)\n\n\t// If an additional value is set for the output (writer),\n\t// use it with the default settings.\n\toutputs := logger.outputs\n\tif w != nil {\n\t\toutput := Default\n\t\toutput.Writer = w\n\t\toutput.isSystem = true\n\t\toutputs[\"*\"] = &output // this name can be used for system names\n\t}\n\n\t// Output message.\n\tfor _, o := range logger.outputs {\n\t\tvar msg string\n\t\thas, err := o.Levels.Contains(l)\n\t\tif !has || err != nil || !o.Enabled.IsTrue() {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Hide or show the prefix.\n\t\tprefix := logger.prefix\n\t\tif !o.WithPrefix.IsTrue() {\n\t\t\tprefix = \"\"\n\t\t}\n\n\t\t// Text or JSON representation of the message.\n\t\tif o.TextStyle.IsTrue() {\n\t\t\tmsg = textMessage(prefix, l, time.Now(), o, sf, f, a...)\n\t\t} else {\n\t\t\tmsg = objectMessage(prefix, l, time.Now(), o, sf, f, a...)\n\t\t}\n\n\t\t// Print message.\n\t\tfmt.Fprint(o.Writer, msg)\n\t}\n}",
"func loggerPrefix(code int, s string) string {\n\tt := time.Now().Format(\"15:04:05 PM\")\n\tif isatty.IsTerminal(os.Stdout.Fd()) {\n\t\treturn fmt.Sprintf(\"\\033[38;5;%dm%s %s | \\033[0m\", code, t, s)\n\t}\n\treturn fmt.Sprintf(\"%s %s | \", t, s)\n}",
"func SetEchoPrefix(prefix string) {\n\techoPrefix = prefix\n}",
"func WithPrefix(l *logger, msg string, info string) {\n\tl.SetPrefix(time.Now().Format(\"2006-01-02 15:04:05 \"))\n\tl.Print(info + \" \" + msg)\n}",
"func (c *BaseClient) Echo(s string) string {\n\tc.Logf(\"Echo: '%v'\", s)\n\treturn s\n}",
"func TestEcho(t *testing.T) {\n\t// Create a new logger.\n\tlogger := New(\"TEST-PREFIX:\")\n\n\t// Classical test.\n\tr, w, _ := os.Pipe()\n\terr := logger.SetOutputs(Output{\n\t\tName: \"test\",\n\t\tWriter: w,\n\t\tLevels: level.Default,\n\t})\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tlogger.echo(nil, level.Debug, \"test %s\", \"message\")\n\toutC := make(chan string)\n\tgo ioCopy(r, outC)\n\tw.Close()\n\tout := <-outC\n\n\tif !strings.Contains(out, \"test message\") {\n\t\tt.Errorf(\"echo did not write the correct TEXT message: %s\", out)\n\t}\n\n\t// As JSON.\n\tr, w, _ = os.Pipe()\n\tlogger.SetOutputs(Output{\n\t\tName: \"test\",\n\t\tWriter: w,\n\t\tLevels: level.Default,\n\t\tWithPrefix: trit.False,\n\t})\n\n\tlogger.echo(nil, level.Debug, \"test %s\", \"message\")\n\toutC = make(chan string)\n\tgo ioCopy(r, outC)\n\tw.Close()\n\tout = <-outC\n\n\tif strings.Contains(out, \"TEST-PREFIX\") {\n\t\tt.Errorf(\"the prefix should not appear in this test: %s\", out)\n\t}\n\n\t// As JSON.\n\tr, w, _ = os.Pipe()\n\tlogger.SetOutputs(Output{\n\t\tName: \"test\",\n\t\tWriter: w,\n\t\tLevels: level.Default,\n\t\tTextStyle: trit.False,\n\t})\n\n\tlogger.echo(nil, level.Debug, \"test %s\", \"message\")\n\toutC = make(chan string)\n\tgo ioCopy(r, outC)\n\tw.Close()\n\tout = <-outC\n\n\tif !strings.Contains(out, \"\\\"level\\\":\\\"DEBUG\\\"\") {\n\t\tt.Errorf(\"echo did not write the correct JSON message: %s\", out)\n\t}\n\n\t// Disabled.\n\tr, w, _ = os.Pipe()\n\tlogger.SetOutputs(Output{\n\t\tName: \"test\",\n\t\tWriter: w,\n\t\tEnabled: trit.False,\n\t})\n\n\tlogger.echo(nil, level.Debug, \"test %s\", \"message\")\n\toutC = make(chan string)\n\tgo ioCopy(r, outC)\n\tw.Close()\n\tout = <-outC\n\n\tif len(out) != 0 {\n\t\tt.Errorf(\"should not write anything: %s\", out)\n\t}\n}",
"func msgPrefix() string {\n\treturn fmt.Sprintf(\"glog: %s: \", time.Now().Format(\"15:04:05.00000\"))\n}",
"func (l *EchoLogrus) SetPrefix(prefix string) {\n\tl.prefix = prefix\n}",
"func SetPrefix(prefix string) {\n\tgolog.SetPrefix(prefix)\n}",
"func logging (prefix string, logMsg string) {\n\n\tf, err := os.OpenFile(Conf.Files.LogFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tdefer f.Close()\n\n\tlogger := log.New(f, prefix, log.LstdFlags)\n\tlogger.Println(logMsg)\n}",
"func logFrom(method, msg string) {\n\tlog.Printf(\"Sender - %s - %s\", msg, method)\n}",
"func (l *Logger) Prefix() string {\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\treturn l.prefix\n}",
"func (l *Logger) Prefix() string {\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\treturn l.prefix\n}",
"func LogPrefix() string {\n\tmuLogt.Lock()\n\tdefer muLogt.Unlock()\n\treturn logt.Prefix()\n}",
"func Logrus() echo.MiddlewareFunc {\n\treturn LogrusDefaultConfig(DefaultLoggerConfig)\n}",
"func (l *Log) Prefix() (res string) {\n\tres = string(l.prefix)\n\treturn\n}",
"func (ctx *PlikContext) UpdateLoggerPrefix(prefix string) {\n\tstr := \"\"\n\tif ip, ok := ctx.Get(\"RemoteIp\"); ok {\n\t\tstr += fmt.Sprintf(\"[%s]\", ip)\n\t}\n\tif uploadID, ok := ctx.Get(\"UploadId\"); ok {\n\t\tstr += fmt.Sprintf(\"[%s]\", uploadID)\n\t}\n\tif fileName, ok := ctx.Get(\"FileName\"); ok {\n\t\tstr += fmt.Sprintf(\"[%s]\", fileName)\n\t}\n\tctx.SetPrefix(str + prefix)\n}",
"func (logger *Logger) Prefix() string {\n\tlogger.mu.RLock()\n\tdefer logger.mu.RUnlock()\n\treturn logger.prefix\n}",
"func (l *Logger) lprintln(lv Level, v ...interface{}) { _ = l.Output(lv, 4, fmt.Sprintln(v...)) }",
"func Prefix(target Logger, f string, v ...interface{}) Logger {\n\tp := fmt.Sprintf(f, v...)\n\n\treturn prefixer{\n\t\ttarget,\n\t\tp,\n\t\tstrings.ReplaceAll(p, \"%\", \"%%\"),\n\t}\n}",
"func SuperLog(text string) {\n\tlog.Printf(\"This is super log: %s\", text)\n}",
"func (handler *ConsoleLogHandler) Name() string {\r\n return \"console\"\r\n}",
"func WithPrefix(prefix string) OptFunc {\n\treturn func(l *Logger) {\n\t\tl.SetPrefix(prefix)\n\t}\n}",
"func LogSetPrefix(prefix string) {\n\tmuLogt.Lock()\n\tdefer muLogt.Unlock()\n\tlogt.SetPrefix(prefix)\n}",
"func T(message ...interface{}) {\n\tif LOG_TRACE <= LOG_LEVEL {\n\t\tfmt.Println(message...)\n\t}\n}",
"func (logger *NLog) Raw(format string, v ...interface{}) {\n\tlogger.rwm.RLock()\n\tdefer logger.rwm.RUnlock()\n\n\tlogger.raw.Output(3, fmt.Sprintln(fmt.Sprintf(format, v...)))\n}",
"func LogMsgs() {\n\tlogrus.Debug(\"Sample debug 1.\")\n\tlogrus.WithFields(logrus.Fields{\"name\": CallerName(1), \"a\": \"b\", \"c\": 10}).Debug(\"Sample debug 2.\")\n\tlogrus.Info(\"Sample info 1.\")\n\tlogrus.WithFields(logrus.Fields{\"name\": CallerName(1), \"a\": \"b\", \"c\": 10}).Info(\"Sample info 2.\")\n\tlogrus.Warn(\"Sample warn 1.\")\n\tlogrus.WithFields(logrus.Fields{\"name\": CallerName(1), \"a\": \"b\", \"c\": 10}).Warn(\"Sample warn 2.\")\n\tlogrus.Error(\"Sample error 1.\")\n\tlogrus.WithFields(logrus.Fields{\"name\": CallerName(1), \"a\": \"b\", \"c\": 10}).Error(\"Sample error 2.\")\n}",
"func generateCloudWatchLogStreamPrefix(context context.T, commandID string) (string, error) {\n\n\tinstanceID, err := context.Identity().ShortInstanceID()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn fmt.Sprintf(\"%s/%s\", commandID, instanceID), nil\n}",
"func (logProxy *loggerProxy)Trace(msgfmt string, args ...interface{}) {\n var ch loggerProxyChannel\n ch.fnPtr = logProxy.logObj.Trace\n ch.msg = logProxy.appendlog(msgfmt, args...)\n logProxy.logChannel <- ch\n}",
"func QuietLog(string, ...interface{}) {\n}",
"func Log(fmt string, args ...interface{}) {}",
"func logSetup() {\n\n\tlog.Printf(\"Server will run on: %s\\n\", getListenAddress())\n}",
"func LogMsgs() {\n\tlogger := logrus.WithField(\"name\", lcf.CallerName(1))\n\tlogger.Debug(\"Sample debug 1.\")\n\tlogger.WithFields(logrus.Fields{\"a\": \"b\", \"c\": 10}).Debug(\"Sample debug 2.\")\n\tlogger.Info(\"Sample info 1.\")\n\tlogger.WithFields(logrus.Fields{\"a\": \"b\", \"c\": 10}).Info(\"Sample info 2.\")\n\tlogger.Warn(\"Sample warn 1.\")\n\tlogger.WithFields(logrus.Fields{\"a\": \"b\", \"c\": 10}).Warn(\"Sample warn 2.\")\n\tlogger.Error(\"Sample error 1.\")\n\tlogger.WithFields(logrus.Fields{\"a\": \"b\", \"c\": 10}).Error(\"Sample error 2.\")\n}",
"func ConsoleLogger() mux.MiddlewareFunc {\n return FormatLogger( log.Printf )\n}",
"func Startf(format string, args ...interface{}) { logRaw(LevelStart, 2, format, args...) }",
"func (pl ProdLogger) Println(args ...interface{}) {\n\n}",
"func (s *session) log(info ...interface{}) {\n\tpreamble := fmt.Sprintf(\"IMAP (%s) \", s.id)\n\tmessage := []interface{}{preamble}\n\tmessage = append(message, info...)\n\tlog.Print(message...)\n}",
"func (l *Logger) lprint(lv Level, v ...interface{}) { _ = l.Output(lv, 4, fmt.Sprint(v...)) }",
"func (l *LvlStruct) Prefix() string {\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\treturn l.log.Prefix()\n}",
"func (c Clients) Echo(ctx context.Context, msg string) (string, error) {\n\treq := newRequest(\"*2\\r\\n$4\\r\\nECHO\\r\\n$\")\n\treq.addString(msg)\n\treturn c.c.cmdString(ctx, req)\n}",
"func (l *Logger) Print(v ...interface{}) { l.Output(2, fmt.Sprint(v...)) }",
"func echo(client *rpc2.Client, args []interface{}, reply *[]interface{}) error {\n\t*reply = args\n\tconnectionsMutex.RLock()\n\tdefer connectionsMutex.RUnlock()\n\tif _, ok := connections[client]; ok {\n\t\tconnections[client].handlersMutex.Lock()\n\t\tdefer connections[client].handlersMutex.Unlock()\n\t\tfor _, handler := range connections[client].handlers {\n\t\t\thandler.Echo(nil)\n\t\t}\n\t}\n\treturn nil\n}",
"func (c *T) Log(args ...interface{})",
"func (*WriteHandler) Prefix() string {\n\treturn prefixWrite\n}",
"func logSetup() {\n\tlog.Printf(\"Server will run on: %s\\n\", getListenAddress())\n}",
"func EthloggerPrint(content string) {\n\tlog.Printf(\"[Eth handler] %s\\n\", content)\n}",
"func (n *nopLogger) Printf(format string, v ...interface{}) {}",
"func Raw(format string, args ...interface{}) {\n\tlock.Lock()\n\tdefer lock.Unlock()\n\n\tfor _, l := range loggers {\n\t\tcurrMessage := fmt.Sprintf(format, args...)\n\t\tl.emit(currMessage)\n\t}\n}",
"func (l *logger) String() string {\n\tif l == nil {\n\t\treturn \"(nil)\"\n\t}\n\treturn l.prefix\n}",
"func (o BucketLoggingResponseOutput) LogObjectPrefix() pulumi.StringOutput {\n\treturn o.ApplyT(func(v BucketLoggingResponse) string { return v.LogObjectPrefix }).(pulumi.StringOutput)\n}",
"func (lg *Logger) Debug(args ...interface{}) {\n if lg.level <= DEBUG {\n lg.logger.SetPrefix(LEVELS[DEBUG])\n lg.logger.Println(args...)\n }\n}",
"func (s *server) Echo(ctx context.Context, in *pb.StringMessage) (*pb.StringMessage, error) {\n\tlog.Printf(\"Echo: Received '%v'\", in.Value)\n\treturn &pb.StringMessage{Value: in.Value}, nil\n}",
"func (lg *Logger) Info(args ...interface{}) {\n if lg.level <= INFO {\n lg.logger.SetPrefix(LEVELS[INFO])\n lg.logger.Println(args...)\n }\n}",
"func ulog(format string, a ...interface{}) {\n\tp := fmt.Sprintf(format, a...)\n\tlog.Print(p)\n\tif Uhura.DebugToScreen {\n\t\tfmt.Print(p)\n\t}\n}",
"func Log(v ...interface{}) {\n\tlog.Output(2, prefix+fmt.Sprint(v...))\n}",
"func (EchoHandler) Name() string {\n\treturn NameEcho\n}",
"func doRawLog(msg string) {\n\tif !logger.Initialized {\n\t\tInit() // Stdout by default\n\t}\n\tif _, err := logger.Writer.Write([]byte(msg)); err != nil {\n\t\tpanic(\"Cannot write to log output\")\n\t}\n}",
"func (logProxy *loggerProxy)Info(msgfmt string, args ...interface{}) {\n var ch loggerProxyChannel\n ch.fnPtr = logProxy.logObj.Info\n ch.msg = logProxy.appendlog(msgfmt, args...)\n logProxy.logChannel <- ch\n}",
"func logging() {\n\tfmt.Println(\"Selesai memanggil function\")\n\tfmt.Println(\"\")\n}",
"func loggerApp(w http.ResponseWriter, r *http.Request) {\n\tfmt.Fprintf(w, \"%v\\n\", Trans())\n}",
"func (self *GameHeart) Logs(msg *HeartMessageType) {\n\n}",
"func SetPrefix(prefix string) string {\n\tdefer logger.SetPrefix(prefix)\n\told := tags[0]\n\ttags[0] = prefix\n\treturn old\n}",
"func (l *Logger) log(level int64, v string) { l.doMsg(level, v) }",
"func (l *Logger) Println(v ...interface{}) { l.Output(2, fmt.Sprintln(v...)) }",
"func eprint(err error) {\n\tfmt.Println(DHT_PREFIX, err.Error())\n}",
"func log(args ...Any) {\n\tfmt.Println(args...)\n}",
"func (o BucketV2LoggingOutput) TargetPrefix() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BucketV2Logging) *string { return v.TargetPrefix }).(pulumi.StringPtrOutput)\n}",
"func echoServer() error {\n\tlistener, err := quic.ListenAddr(addr, generateTLSConfig(), nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\tsess, err := listener.Accept()\n\tif err != nil {\n\t\treturn err\n\t}\n\tstream, err := sess.AcceptStream()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\t// Echo through the loggingWriter\n\t_, err = io.Copy(loggingWriter{stream}, stream)\n\treturn err\n}",
"func logRaw(logLevel Level, stackDepth int, format string, args ...interface{}) {\n\n\tLevelNames := []string{\n\t\t\"Inval\",\n\t\t\" All\",\n\t\t\"Debug\",\n\t\t\" Info\",\n\t\t\"Start\",\n\t\t\" Exit\",\n\t\t\" Warn\",\n\t\t\"Error\",\n\t\t\" None\",\n\t}\n\n\tif logLevel < LogLevel() {\n\t\treturn\n\t}\n\tif logLevel < LevelDebug || logLevel > LevelError {\n\t\tlogLevel = LevelError\n\t}\n\n\tvar dirname string\n\t_, filename, linenumber, _ := runtime.Caller(stackDepth)\n\tdirname, filename = path.Split(filename)\n\tdirname = path.Base(dirname)\n\ti := len(filename)\n\tif i > 26 {\n\t\ti = 26\n\t}\n\tfilename = dirname + \"/\" + filename[:i]\n\n\titemTime := time.Now()\n\tif itemTime.After(logRotationTime) {\n\t\trotateLogFile()\n\t}\n\n\tvar message = fmt.Sprintf(format, args...)\n\tmessage = strings.Replace(message, \"\\n\", \"|\", -1)\n\tmessage = strings.Replace(message, \"\\r\", \"|\", -1)\n\tmessage = fmt.Sprintf(\n\t\t\"%s %26s:%-4d %s: %s\\n\",\n\t\titemTime.Format(time.RFC3339),\n\t\tfilename,\n\t\tlinenumber,\n\t\tLevelNames[logLevel],\n\t\tmessage,\n\t)\n\tfmt.Fprint(logWriter, message)\n\tif TeeStderr() {\n\t\tfmt.Fprint(os.Stderr, message)\n\t}\n}",
"func (rw *RemoteTimeWriter) EnablePrefix(hasAppNamePrefix bool) {\n\trw.logInfo.BHasAppNamePrefix = hasAppNamePrefix\n}",
"func (logger *DiscardLogger) PrintCommand(sessionID string, command string, params string) {}",
"func (o BucketLoggingOutput) TargetPrefix() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BucketLogging) *string { return v.TargetPrefix }).(pulumi.StringPtrOutput)\n}",
"func init() {\n\tlog.SetFlags(log.LstdFlags | log.Lmicroseconds | log.Lshortfile)\n\tlog.SetOutput(os.Stdout)\n}",
"func LogSetup(server config.Server) {\n\tforwardHost := utils.IfEmpty(server.Upstream.Host, \"*\")\n\tforwardProto := server.Upstream.Scheme\n\n\tlbEndpointList := fmt.Sprintf(\"%v\", server.Upstream.Endpoints)\n\tif len(server.Upstream.Endpoints) == 0 {\n\t\tlbEndpointList = \"VOID\"\n\t}\n\n\tlogrus.Infof(\"Server will run on :%s and :%s and redirects to url: %s://%s -> %s\\n\", server.Port.HTTP, server.Port.HTTPS, forwardProto, forwardHost, lbEndpointList)\n}",
"func (o BucketLoggingOutput) LogObjectPrefix() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BucketLogging) *string { return v.LogObjectPrefix }).(pulumi.StringPtrOutput)\n}",
"func (o BucketLoggingOutput) LogObjectPrefix() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BucketLogging) *string { return v.LogObjectPrefix }).(pulumi.StringPtrOutput)\n}",
"func CmdLogMixin(cmd *cobra.Command) *cobra.Command {\n\tdev := cmd.PersistentFlags().Bool(\"development\", terminal.IsTerminal(int(os.Stdout.Fd())), \"format output for console\")\n\tv := cmd.PersistentFlags().Int8P(\"verbose\", \"v\", 0, \"verbosity level\")\n\n\tsetupLogger := func() {\n\t\tZapLogger = corezap.NewRaw(func(options *corezap.Options) {\n\t\t\tlevel := zap.NewAtomicLevelAt(zapcore.Level(-*v))\n\t\t\toptions.Level = &level\n\t\t\toptions.Development = *dev\n\t\t})\n\t\tctrl.SetLogger(zapr.NewLogger(ZapLogger))\n\t}\n\n\tif cmd.PersistentPreRunE != nil {\n\t\tparent := cmd.PersistentPreRunE\n\t\tcmd.PersistentPreRunE = func(c *cobra.Command, args []string) error {\n\t\t\tsetupLogger()\n\t\t\treturn parent(c, args)\n\t\t}\n\t\treturn cmd\n\t}\n\n\tparent := cmd.PersistentPreRun\n\tcmd.PersistentPreRun = func(c *cobra.Command, args []string) {\n\t\tsetupLogger()\n\t\tif parent != nil {\n\t\t\tparent(c, args)\n\t\t}\n\t}\n\treturn cmd\n}",
"func fmtLogger(msg string, args ...interface{}) {\n\tfmt.Printf(msg, args...)\n\tfmt.Println()\n}",
"func (l prefixer) Log(f string, v ...interface{}) {\n\tLog(\n\t\tl.Target,\n\t\tl.FormatSpecifierPrefix+f,\n\t\tv...,\n\t)\n}",
"func (l *LevelLog) Fatalln(v ...interface{}) {\n\tl.logger.SetPrefix(\"FATAL: \")\n\tl.logger.Println(v...)\n}",
"func (l *Logger) Print(v ...interface{}) { l.lprint(INFO, v...) }",
"func (*Logger) DiscordGoLogf(_, _ int, _ string, _ ...interface{}) {\n\t// Nop\n}",
"func echo(x *string) {\n\tprintln(x)\n}",
"func (l *Logger) DebugPrefix() string {\n\treturn l.debugPrefix\n}",
"func (l *LvlStruct) SetPrefix(prefix string) {\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\tl.log.SetPrefix(prefix)\n}",
"func logger(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp interface{}, err error) {\n\tlog.Printf(\"---> Unary interceptor: %v\\n\", info.FullMethod)\n\treturn handler(ctx, req)\n}",
"func LogP(logLvl int, prefix string, v ...interface{}) {\n\tif ErrLogLevel >= logLvl && loggerErr != nil {\n\t\tloggerErr.Output(3, prefix+LogPrefix[logLvl]+fmt.Sprint(v...)+postFix)\n\t} else if LogLevel >= logLvl && logger != nil {\n\t\tlogger.Output(3, prefix+LogPrefix[logLvl]+fmt.Sprint(v...)+postFix)\n\t}\n}",
"func (s *Server) echo(writer http.ResponseWriter, request *http.Request) {\r\n\twriter.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\r\n\twriter.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Range, Content-Disposition, Content-Type, ETag\")\r\n\r\n\t// 30% chance of failure\r\n\tif rand.Intn(100) < 30 {\r\n\t\ts.logger.Error(\"Unlucky Request\")\r\n\t\ts.client.Count(\"Request_failed.counter\", 1, []string{\"env:production\", \"partition:1\", \"partition:2\"}, 1)\r\n\t\twriter.WriteHeader(500)\r\n\t\twriter.Write([]byte(\"a chaos monkey broke your server\"))\r\n\t\treturn\r\n\t}\r\n\r\n\t// Happy path\r\n\ts.client.Count(\"Request_success.counter\", 1, []string{\"env:production\", \"partition:1\", \"partition:2\"}, 1)\r\n\twriter.WriteHeader(200)\r\n\trequest.Write(writer)\r\n}",
"func (c *Client) debug(str string, args ...interface{}) {\n\tif c.level >= 2 {\n\t\tc.log.Printf(str, args...)\n\t}\n}",
"func print(args ...interface{}) {\n\tok := logLevel <= 1\n\n\tif ok {\n\t\tprintf(\"%s\", fmt.Sprint(args...))\n\t}\n}",
"func Print(v ...interface{}) {\n\tlog.Output(2, prefix+fmt.Sprint(v...))\n}",
"func LogNack(\n\tlog logging.Logger,\n\tenv *envelopespec.Envelope,\n\tcause error,\n\tdelay time.Duration,\n) {\n\tlogging.LogString(\n\t\tlog,\n\t\tString(\n\t\t\t[]IconWithLabel{\n\t\t\t\tMessageIDIcon.WithID(env.GetMessageId()),\n\t\t\t\tCausationIDIcon.WithID(env.GetCausationId()),\n\t\t\t\tCorrelationIDIcon.WithID(env.GetCorrelationId()),\n\t\t\t},\n\t\t\t[]Icon{\n\t\t\t\tConsumeErrorIcon,\n\t\t\t\tErrorIcon,\n\t\t\t},\n\t\t\tenv.GetPortableName(),\n\t\t\tcause.Error(),\n\t\t\tfmt.Sprintf(\"next retry in %s\", delay),\n\t\t),\n\t)\n}",
"func logPrintf(r *http.Request, fmtstr string, varargs ...interface{}) {\n\tpayload := fmt.Sprintf(fmtstr, varargs...)\n\tprefix := fmt.Sprintf(\"ip:%s\", r.Header.Get(\"x-appengine-user-ip\"))\n\tlog.Printf(\"%s %s\", prefix, payload)\n}",
"func echo (){\n\tfmt.Println(strings.Join(os.Args [1:], \" \"))\n}",
"func responseLogger(handler http.HandlerFunc) http.HandlerFunc {\n\treturn func (w http.ResponseWriter, r *http.Request){\n\t\tlog.Printf(\"\\n%s %s%s %s\",r.Method, r.Host, r.RequestURI, r.Proto )\n\t\thandler(w,r)\n\t}\n}",
"func addPrefixes(format string, ctx context.Context, logLevel LogLevel, logKey LogKey) string {\n\ttimestampPrefix := time.Now().Format(ISO8601Format) + \" \"\n\n\tvar logLevelPrefix string\n\tif logLevel > LevelNone {\n\t\tlogLevelPrefix = \"[\" + logLevel.StringShort() + \"] \"\n\t}\n\n\tvar logKeyPrefix string\n\tif logKey > KeyNone && logKey != KeyAll {\n\t\tlogKeyName := logKey.String()\n\t\t// Append \"+\" to logKeys at debug level (for backwards compatibility)\n\t\tif logLevel >= LevelDebug {\n\t\t\tlogKeyName += \"+\"\n\t\t}\n\t\tlogKeyPrefix = logKeyName + \": \"\n\t}\n\n\tif ctx != nil {\n\t\tif logCtx, ok := ctx.Value(LogContextKey{}).(LogContext); ok {\n\t\t\tformat = logCtx.addContext(format)\n\t\t}\n\t}\n\n\treturn timestampPrefix + logLevelPrefix + logKeyPrefix + format\n}",
"func setupLogger() {\n\tsl := logrus.New()\n\tsrvLog = sl.WithField(\"context\", \"server\")\n}",
"func (lg *Logger) Notice(args ...interface{}) {\n if lg.level <= NOTICE {\n lg.logger.SetPrefix(LEVELS[NOTICE])\n lg.logger.Println(args...)\n }\n}",
"func (t t) Log(args ...interface{}) {\n\tfmt.Println(args...)\n}",
"func logf(level string, format string, args ...interface{}) {\n\tfmt.Fprintf(ginkgo.GinkgoWriter, nowStamp()+\": \"+level+\": \"+format+\"\\n\", args...)\n}"
] | [
"0.65048844",
"0.6139849",
"0.59801805",
"0.596994",
"0.568581",
"0.5660072",
"0.5650996",
"0.5524819",
"0.5521323",
"0.5493837",
"0.5487513",
"0.54677296",
"0.54677296",
"0.545661",
"0.54110783",
"0.5346564",
"0.5343675",
"0.5329851",
"0.5328043",
"0.5319607",
"0.5315472",
"0.5306001",
"0.5288565",
"0.5286874",
"0.5268245",
"0.5266619",
"0.52665323",
"0.5264774",
"0.5258224",
"0.5256523",
"0.5241243",
"0.5224689",
"0.52139753",
"0.5213685",
"0.5205491",
"0.5202469",
"0.52006906",
"0.51847863",
"0.518112",
"0.51759887",
"0.5163298",
"0.51614255",
"0.5149628",
"0.51466393",
"0.51417613",
"0.51398313",
"0.5137523",
"0.51088256",
"0.50976306",
"0.509656",
"0.50847405",
"0.5079068",
"0.50706536",
"0.50696385",
"0.50558597",
"0.50447965",
"0.50422883",
"0.50422364",
"0.50357914",
"0.5035142",
"0.50314796",
"0.5031021",
"0.5007618",
"0.50061667",
"0.5004028",
"0.49928507",
"0.49905485",
"0.49835837",
"0.49719438",
"0.49693492",
"0.49557886",
"0.49472314",
"0.49411726",
"0.4938813",
"0.49321",
"0.49321",
"0.49274182",
"0.4926271",
"0.4925302",
"0.49112916",
"0.49093226",
"0.49087209",
"0.4907978",
"0.49073365",
"0.49058104",
"0.49051884",
"0.49022403",
"0.49021193",
"0.49020043",
"0.49008054",
"0.48972744",
"0.48956528",
"0.4895607",
"0.48909372",
"0.48899457",
"0.48890606",
"0.48890185",
"0.48884282",
"0.48880604",
"0.48814842"
] | 0.6448333 | 1 |
SetLevel set level to logger from given log.Lvl | func (l *EchoLogrus) SetLevel(lvl log.Lvl) {
switch lvl {
case log.DEBUG:
l.Logger.SetLevel(logrus.DebugLevel)
case log.WARN:
l.Logger.SetLevel(logrus.WarnLevel)
case log.ERROR:
l.Logger.SetLevel(logrus.ErrorLevel)
case log.INFO:
l.Logger.SetLevel(logrus.InfoLevel)
default:
logrus.Warnf("Unknown level: %v", lvl)
l.Logger.SetLevel(logrus.WarnLevel)
}
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func SetLevel(l string) {\n\tswitch l {\n\tcase \"verbose\":\n\t\tlvl = ver\n\tcase \"debug\":\n\t\tlvl = deb\n\tcase \"info\":\n\t\tlvl = inf\n\tcase \"warning\":\n\t\tlvl = war\n\tcase \"error\":\n\t\tlvl = err\n\tcase \"fatal\":\n\t\tlvl = fat\n\tcase \"off\":\n\t\tlvl = off\n\tdefault:\n\t\tFatalf(\"Invalid logging level '%s'!\", l)\n\t}\n}",
"func SetLogLevel(l int) {\n level = l\n}",
"func SetLevel(l LogLevel) {\n\tlogger.level = l\n}",
"func SetLevel(lvl string) {\n\tlevel, err := logrus.ParseLevel(lvl)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tlog.Level = level\n}",
"func SetLevel(v string) {\n\tlog.SetLevel(v)\n}",
"func SetLevel(l zapcore.Level) {\n\tLogger.atom.SetLevel(l)\n}",
"func SetLevel(lvl Level) {\n\tdefaultLogger.SetLevel(lvl)\n}",
"func SetLogLevel(l string) {\n\tif l == \"\" {\n\t\tl = \"info\"\n\t}\n\t// fmt.Fprintln(os.Stderr, \"setting log level\", l)\n\tlvl := logLevels.Info\n\tfor i := range LevelSpecs {\n\t\tif LevelSpecs[i].Name[:1] == l[:1] {\n\t\t\tlvl = LevelSpecs[i].ID\n\t\t}\n\t}\n\tcurrentLevel.Store(lvl)\n}",
"func SetLevel(lvl string) {\n\tatom.SetLevel(getLoggerLevel(lvl))\n}",
"func setLogLevel(l *logger.Logger, level int) {\n\tswitch level {\n\tcase Error:\n\t\tl.SetLevel(logger.ErrorLevel)\n\tcase Warn:\n\t\tl.SetLevel(logger.WarnLevel)\n\tcase Info:\n\t\tl.SetLevel(logger.InfoLevel)\n\tdefault:\n\t\tl.SetLevel(logger.DebugLevel)\n\t}\n}",
"func SetLogLevel(level int) {\n\tif level > len(logrus.AllLevels)-1 {\n\t\tlevel = len(logrus.AllLevels) - 1\n\t} else if level < 0 {\n\t\tlevel = 0\n\t}\n\tlogrus.SetLevel(logrus.AllLevels[level])\n}",
"func (l *Logger) SetLevel(lev Level) {\n\tif lev != l.lev {\n\t\tl.mu.Lock()\n\t\tl.lev = lev\n\t\tl.mu.Unlock()\n\t}\n}",
"func SetLevel(l int) {\n\tlevel = l\n}",
"func SetLevel(l int) {\n\tlevel = l\n}",
"func (lw *LogWriter) SetLevel(l Level) {\n\tlw.lvl = l\n}",
"func SetLogLevel(lvl Level) {\n\tLog.Level = lvl\n}",
"func (r *Factory) SetLevel(name string, l Level) {\n\tr.Lock()\n\tdefer r.Unlock()\n\tr.setLevel(name, l)\n\tr.refreshLoggers()\n}",
"func SetLevel(l int) {\n\tlock.Lock()\n\tdefer lock.Unlock()\n\n\tlevel = l\n}",
"func (l *XORMLogBridge) SetLevel(lvl core.LogLevel) {\n}",
"func SetLogLevel(lvl logrus.Level) {\n\tLog.Level = lvl\n}",
"func SetLogLevel(l string) {\n\tlogger.Level = parseLogLevel(l)\n}",
"func SetLevel(lev Level) {\n\tstdLogger.SetLevel(lev)\n}",
"func SetLogLevel(level Level) {\n\tmutex.Lock()\n\tdefer mutex.Unlock()\n\tlogLevel = level\n}",
"func SetLogLevel(l int) {\n\tif l < LevelError {\n\t\tlevel = LevelError\n\t\tlogTag(\"WRN\", Mixer, \"Log Level '%d' too low, forcing to %s (%d)\", l, levelMap[level], level)\n\t} else if l > LevelVerbose {\n\t\tlevel = LevelVerbose\n\t\tlogTag(\"WRN\", Mixer, \"Log Level '%d' too high, forcing to %s (%d)\", l, levelMap[level], level)\n\t} else {\n\t\tlevel = l\n\t\tDebug(Mixer, \"Log Level set to %s (%d)\", levelMap[level], l)\n\t}\n}",
"func SetLogLevel(newLevel int32) error {\n\tif newLevel < 0 || newLevel > 4 {\n\t\treturn errWrongLevel\n\t}\n\tatomic.StoreInt32(&logLevel, newLevel)\n\treturn nil\n}",
"func SetLevel(level loggers.Level) {\n\tGetLogger().SetLevel(level)\n}",
"func SetLevel(l Level) {\n\tlevel = l\n}",
"func SetLevel(l Level) {\n\tlevel = l\n}",
"func SetLevel(lvs ...Level) {\n\tl.SetLevel(lvs...)\n}",
"func SetLoglevel(level int) {\n\tloglevel = level\n}",
"func (l *Logger) SetLevel(lvl Level) {\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\tl.Level = lvl\n}",
"func SetLevel(lvl *Level) {\n\tif err := wrappedLogger.level.UnmarshalText([]byte(lvl.String())); err != nil {\n\t\twrappedLogger.zap.Warn(fmt.Sprintf(\"error setting lot level: %s\", err))\n\t}\n}",
"func SetLevel(level Level) {\n\tlog.setLevel(level)\n}",
"func (hnd *Handlers) SetLogLevel() http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tvalue := bone.GetValue(r, \"level\")\n\t\tif value == \"\" {\n\t\t\thnd.writeErrorResponse(w, \"must supply a level between 0 and 5\")\n\t\t\treturn\n\t\t}\n\n\t\tlevel, err := strconv.Atoi(value)\n\t\tif err != nil {\n\t\t\tlog.Warn(\"attempt to set log level to invalid value: %s, ignored...\", level)\n\t\t\thnd.writeErrorResponse(w, err.Error())\n\t\t\treturn\n\t\t}\n\n\t\tif level < 0 {\n\t\t\tlevel = 0\n\t\t}\n\n\t\tif level > 5 {\n\t\t\tlevel = 5\n\t\t}\n\n\t\tlog.SetLevel(level)\n\n\t\tfmt.Fprintf(w, \"{\\\"%s\\\":\\\"%d\\\"}\\n\\r\", \"loglevel\", log.GetLevel())\n\t}\n}",
"func (bot *Settings) LogLvl(Lvl log.Lvl) {\n\tlogHandler := log.LvlFilterHandler(Lvl, log.StdoutHandler)\n\tbot.irc.Logger.SetHandler(logHandler)\n}",
"func SetLevel(level Level) {\n\tloggingLevel = level\n}",
"func SetLogLevel(level log.LogLevel) {\n\tlogger.Level = level\n}",
"func WithLevel(l Level, v ...interface{}) {\n\tif l > DefaultLevel {\n\t\treturn\n\t}\n\tlog(v...)\n}",
"func (l *Logger) SetLevel(v string) {\n\tswitch v {\n\tcase \"debug\", \"DEBUG\":\n\t\tl.Level = logrus.DebugLevel\n\tcase \"info\", \"INFO\":\n\t\tl.Level = logrus.InfoLevel\n\tcase \"warning\", \"WARNING\":\n\t\tl.Level = logrus.WarnLevel\n\tcase \"error\", \"ERROR\":\n\t\tl.Level = logrus.ErrorLevel\n\tcase \"fatal\", \"FATAL\":\n\t\tl.Level = logrus.FatalLevel\n\tcase \"panic\", \"PANIC\":\n\t\tl.Level = logrus.PanicLevel\n\t}\n}",
"func setLogLevel(level string) {\n\tif level == \"\" {\n\t\treturn\n\t}\n\tswitch level {\n\tcase \"DEBUG\":\n\t\tlogLevel = DEBUG\n\t\tlogrus.SetLevel(logrus.DebugLevel)\n\tcase \"INFO\":\n\t\tlogLevel = INFO\n\t\tlogrus.SetLevel(logrus.InfoLevel)\n\tcase \"WARN\":\n\t\tlogLevel = WARN\n\t\tlogrus.SetLevel(logrus.WarnLevel)\n\tcase \"ERROR\":\n\t\tlogLevel = ERROR\n\t\tlogrus.SetLevel(logrus.ErrorLevel)\n\tcase \"FATAL\":\n\t\tlogLevel = FATAL\n\t\tlogrus.SetLevel(logrus.FatalLevel)\n\tcase \"PANIC\":\n\t\tlogLevel = PANIC\n\t\tlogrus.SetLevel(logrus.PanicLevel)\n\t}\n}",
"func (f *FileHandler) SetLevel(l LogLevel) {\n\tf.min = l\n\tf.max = l\n}",
"func setLogLevel(subsystemID string, logLevel string) {\n\t// Create the backend seelog logger if needed.\n\tif backendLog == seelog.Disabled {\n\t\tbackendLog = newSeelogLogger()\n\t}\n\n\t// Ignore invalid subsystems.\n\tlogger, ok := subsystemLoggers[subsystemID]\n\tif !ok {\n\t\treturn\n\t}\n\n\t// Default to info if the log level is invalid.\n\tlevel, ok := btclog.LogLevelFromString(logLevel)\n\tif !ok {\n\t\tlevel = btclog.InfoLvl\n\t}\n\n\t// Create new logger for the subsystem if needed.\n\tif logger == btclog.Disabled {\n\t\tlogger = btclog.NewSubsystemLogger(backendLog, subsystemID+\": \")\n\t\tuseLogger(subsystemID, logger)\n\t}\n\tlogger.SetLevel(level)\n}",
"func SetLogLevel(logLevel string) {\n\tif logLevel != \"\" {\n\t\tlvl, err := logrus.ParseLevel(logLevel)\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"Unable to parse logging level: %s\", logLevel)\n\t\t\tlogrus.Error(\"Acceptable log levels are: debug, info, warning, panic, and fatal.\")\n\t\t}\n\t\tlogrus.SetLevel(lvl)\n\t} else {\n\t\tlogrus.SetLevel(logrus.InfoLevel)\n\t}\n}",
"func (l *Logger) SetLevel(level Level) {\n\tsetLevel(l.logger, level)\n}",
"func SetLevel(l int) {\n\tatomic.StoreInt32(&level, int32(l))\n}",
"func SetLogLevel(logLevel logrus.Level) {\n\tDefaultLogger.SetLevel(logLevel)\n}",
"func SetLogLevel(logLevel int) {\n\tlogging.SetLevel(logging.Level(logLevel), mainLoggerName)\n}",
"func SetLogLevel(l string) error {\n\tswitch strings.ToLower(l) {\n\tcase \"debug\":\n\t\tLog.Level = logrus.DebugLevel\n\tcase \"info\":\n\t\tLog.Level = logrus.InfoLevel\n\tcase \"warn\":\n\t\tLog.Level = logrus.WarnLevel\n\tcase \"error\":\n\t\tLog.Level = logrus.ErrorLevel\n\tcase \"fatal\":\n\t\tLog.Level = logrus.FatalLevel\n\tcase \"panic\":\n\t\tLog.Level = logrus.PanicLevel\n\t}\n\treturn nil\n}",
"func (l *Logger) SetLevel(level int) {\n\tif level < Error || level > Debug {\n\t\tpanic(level)\n\t}\n\tl.level = level\n}",
"func SetLevel(level int) {\n\tlogrus.SetLevel(logrus.Level(level))\n}",
"func (l *Logger) SetLevel (n int) {\n\tl.level = n\n}",
"func SetLogLevel(level colog.Level) {\n\tlogWriter.SetMinLevel(level)\n}",
"func SetLogLevel(v int) {\n\tC.ploop_set_log_level(C.int(v))\n}",
"func SetLevel(lv Level) {\n\tcurrentLevel = lv\n\tcfg.Level.SetLevel(lv)\n}",
"func (l *ModuleLeveled) SetLevel(level logging.Level, module string) {\n\tl.levels[module] = level\n}",
"func setLogLevel(subsystemID string, logLevel string) {\n\t// Ignore invalid subsystems.\n\tlogger, ok := subsystemLoggers[subsystemID]\n\tif !ok {\n\t\treturn\n\t}\n\n\t// Defaults to info if the log level is invalid.\n\tlevel, _ := btclog.LevelFromString(logLevel)\n\tlogger.SetLevel(level)\n}",
"func SetLogLevel(level logrus.Level) {\n\tlogger.SetLevel(level)\n}",
"func setLogLevel(subsystemID string, logLevel string) {\n\t// Ignore invalid subsystems.\n\tlogger, ok := subsystemLoggers[subsystemID]\n\tif !ok {\n\t\treturn\n\t}\n\n\t// Defaults to info if the log level is invalid.\n\tlevel, _ := common.LevelFromString(logLevel)\n\tlogger.SetLevel(level)\n}",
"func (logger *Logger) SetLevel(level Level) {\n\tif level.isValid() {\n\t\tlogger.level = level\n\t}\n}",
"func (log *logging) setLevel(level Level) {\n\tlog.level = level\n}",
"func SetLoglevel(logFile *LogFileName, level int) {\n\t(*logFile).logLevel = level\n}",
"func SetLogLevel(loggerName string, level Level) {\n\tlm().setLogLevel(loggerName, level)\n}",
"func SetLevel(l Level) {\n\tDefaultLevel = l\n}",
"func setLevel(logrusLevel logrus.Level) int32 {\n\treturn int32(logrusLevel)\n}",
"func SetLevel(ctx context.Context, logLevelStr string) {\n\tif setter, ok := ctx.Value(setLogLevelContextKey{}).(func(string)); ok {\n\t\tsetter(logLevelStr)\n\t}\n}",
"func (l *Logger) SetLevel(level Level) {\n\tl.mu.Lock()\n\tl.level = level\n\tl.mu.Unlock()\n}",
"func (logger *Logger) SetLevel(level logging.LogLevel) {\n\tlogger.access.Lock()\n\tdefer logger.access.Unlock()\n\tlogger.level = level\n}",
"func (l *Logger) SetLevel(level int) {\n\tl.level = level\n}",
"func SetLevel(level Level) {\n\tlconf.FilterLevel = level\n}",
"func SetLogLevel(logLevel string) {\n\tlevel, err := log.ParseLevel(logLevel)\n\terrors.CheckError(err)\n\tlog.SetLevel(level)\n}",
"func (d *LevelWrapper) SetLevel(level Level) {\n\td.LogLevel = level\n}",
"func (m *MYLOG) SetDefaultLvl(lvl shim.LoggingLevel) {\r\n\tm.logger.SetLevel(lvl)\r\n}",
"func (l *Logger) SetLogLevel(level Level) {\n\tl.level.Set(level)\n}",
"func SetLevel(level Level) {\n\tfilterLevel = level\n}",
"func SetLevel(s, level string) (*Levels, error) {\n\tfound, logger := validSubLogger(s)\n\tif !found {\n\t\treturn nil, fmt.Errorf(\"logger %v not found\", s)\n\t}\n\tlogger.Levels = splitLevel(level)\n\n\treturn &logger.Levels, nil\n}",
"func LogLevel(level string) {\n\tlogLevel = level\n\tLogger = logger.New(logLevel)\n}",
"func (a *Admin) SetLoggerLevel(_ *http.Request, args *SetLoggerLevelArgs, _ *api.EmptyReply) error {\n\ta.Log.Debug(\"API called\",\n\t\tzap.String(\"service\", \"admin\"),\n\t\tzap.String(\"method\", \"setLoggerLevel\"),\n\t\tlogging.UserString(\"loggerName\", args.LoggerName),\n\t\tzap.Stringer(\"logLevel\", args.LogLevel),\n\t\tzap.Stringer(\"displayLevel\", args.DisplayLevel),\n\t)\n\n\tif args.LogLevel == nil && args.DisplayLevel == nil {\n\t\treturn errNoLogLevel\n\t}\n\n\tvar loggerNames []string\n\tif len(args.LoggerName) > 0 {\n\t\tloggerNames = []string{args.LoggerName}\n\t} else {\n\t\t// Empty name means all loggers\n\t\tloggerNames = a.LogFactory.GetLoggerNames()\n\t}\n\n\tfor _, name := range loggerNames {\n\t\tif args.LogLevel != nil {\n\t\t\tif err := a.LogFactory.SetLogLevel(name, *args.LogLevel); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\tif args.DisplayLevel != nil {\n\t\t\tif err := a.LogFactory.SetDisplayLevel(name, *args.DisplayLevel); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}",
"func (lr *logRegistry) SetLevel(logger, level string) error {\n\tlr.rwmutex.RLock()\n\tdefer lr.rwmutex.RUnlock()\n\tlg, ok := lr.mapping[logger]\n\tif !ok {\n\t\treturn fmt.Errorf(\"Logger %s not found\", logger)\n\t}\n\tlvl, err := logrus.ParseLevel(level)\n\tif err == nil {\n\t\tswitch lvl {\n\t\tcase logrus.DebugLevel:\n\t\t\tlg.SetLevel(logging.DebugLevel)\n\t\tcase logrus.InfoLevel:\n\t\t\tlg.SetLevel(logging.InfoLevel)\n\t\tcase logrus.WarnLevel:\n\t\t\tlg.SetLevel(logging.WarnLevel)\n\t\tcase logrus.ErrorLevel:\n\t\t\tlg.SetLevel(logging.ErrorLevel)\n\t\tcase logrus.PanicLevel:\n\t\t\tlg.SetLevel(logging.PanicLevel)\n\t\tcase logrus.FatalLevel:\n\t\t\tlg.SetLevel(logging.FatalLevel)\n\t\t}\n\n\t}\n\treturn nil\n}",
"func SetLogLevel(lvl string) error {\n\ttlvl := parseLogLevel(lvl)\n\tdefaultLgr.option.Level = tlvl\n\tdefaultLgr.leveldBackend.SetLevel(tlvl.loggingLevel(), \"\")\n\treturn nil\n}",
"func SetLoglevel(loglevel string) {\n\tif loglevel == \"panic\" {\n\t\tlog.SetLevel(log.PanicLevel)\n\t\tlog.SetReportCaller(false)\n\t} else if loglevel == \"fatal\" {\n\t\tlog.SetLevel(log.FatalLevel)\n\t\tlog.SetReportCaller(false)\n\t} else if loglevel == \"warn\" {\n\t\tlog.SetLevel(log.WarnLevel)\n\t\tlog.SetReportCaller(false)\n\t} else if loglevel == \"info\" {\n\t\tlog.SetLevel(log.InfoLevel)\n\t\tlog.SetReportCaller(false)\n\t} else if loglevel == \"debug\" {\n\t\tlog.SetLevel(log.DebugLevel)\n\t\tlog.SetReportCaller(true)\n\t} else if loglevel == \"trace\" {\n\t\tlog.SetLevel(log.TraceLevel)\n\t\tlog.SetReportCaller(true)\n\t}\n}",
"func SetLoggerLevel(logger *logging.Logger, level string) error {\n\tif err := validateLogLevel(level); err != nil {\n\t\treturn err\n\t}\n\n\tlogLevel, err := logging.LogLevel(level)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tbackend1 := logging.NewLogBackend(os.Stderr, \"\", 0)\n\n\tformat := logging.MustStringFormatter(\n\t\t`%{color}%{time:15:04:05.000} %{level:.4s} ▶ [%{shortfunc}]: %{color:reset} %{message}`,\n\t)\n\n\t// For messages written to backend1 we want to add some additional\n\t// information to the output, including the used log level and the name of\n\t// the function.\n\tbackend1Formatter := logging.NewBackendFormatter(backend1, format)\n\n\t// Only errors and more severe messages should be sent to backend1\n\tbackend1Leveled := logging.AddModuleLevel(backend1Formatter)\n\tbackend1Leveled.SetLevel(logLevel, logger.Module)\n\n\t// Set the backends to be used.\n\tlogger.SetBackend(backend1Leveled)\n\n\treturn nil\n}",
"func (logger *Logger) SetLevel(level string) {\n\tlogger.level = LevelInt(level)\n}",
"func (b *Logger) SetLevel(level Level) *Logger {\n\tatomic.StoreUint32((*uint32)(&b.level), uint32(level))\n\treturn b\n}",
"func SetLogLevel(level string) error {\n\tlogLevel, err := parseStringLogLevel(level)\n\tif err != nil {\n\t\treturn err\n\t}\n\tzerolog.SetGlobalLevel(logLevel)\n\n\treturn nil\n}",
"func (logger *Logger) SetLogLevel(level string) {\n\tswitch level {\n\tcase \"fatal\":\n\t\tlogger.LogLevel = FatalLevel\n\tcase \"error\":\n\t\tlogger.LogLevel = ErrorLevel\n\tcase \"warn\":\n\t\tlogger.LogLevel = WarnLevel\n\tcase \"info\":\n\t\tlogger.LogLevel = InfoLevel\n\tcase \"debug\":\n\t\tlogger.LogLevel = DebugLevel\n\tdefault:\n\t\tlogger.LogLevel = TraceLevel\n\t}\n}",
"func Level(level string) Option {\n\treturn func(logger *logrus.Logger) {\n\t\tlevel, err := logrus.ParseLevel(level)\n\t\t// No need to handle the error here, just don't update the log level\n\t\tif err == nil {\n\t\t\tlogger.SetLevel(level)\n\t\t}\n\t}\n}",
"func (lc mockNotifyLogger) SetLogLevel(loglevel string) error {\n\treturn nil\n}",
"func setLoggingLevel(logLevel string) error {\n\n\tswitch logLevel {\n\tcase LogLevelDisabled:\n\t\tzerolog.SetGlobalLevel(loggingLevels[LogLevelDisabled])\n\tcase LogLevelPanic:\n\t\tzerolog.SetGlobalLevel(loggingLevels[LogLevelPanic])\n\tcase LogLevelFatal:\n\t\tzerolog.SetGlobalLevel(loggingLevels[LogLevelFatal])\n\tcase LogLevelError:\n\t\tzerolog.SetGlobalLevel(loggingLevels[LogLevelError])\n\tcase LogLevelWarn:\n\t\tzerolog.SetGlobalLevel(loggingLevels[LogLevelWarn])\n\tcase LogLevelInfo:\n\t\tzerolog.SetGlobalLevel(loggingLevels[LogLevelInfo])\n\tcase LogLevelDebug:\n\t\tzerolog.SetGlobalLevel(loggingLevels[LogLevelDebug])\n\tcase LogLevelTrace:\n\t\tzerolog.SetGlobalLevel(loggingLevels[LogLevelTrace])\n\tdefault:\n\t\treturn fmt.Errorf(\"invalid option provided: %v\", logLevel)\n\t}\n\n\t// signal that a case was triggered as expected\n\treturn nil\n\n}",
"func (stimLogger *FullStimLogger) SetLevel(level Level) {\n\tstimLogger.currentLevel = level\n\thl := level\n\tfor kv := range stimLogger.logfiles.Iter() {\n\t\tlgr := kv.Value.(*logFile)\n\t\tif lgr.logLevel > hl {\n\t\t\thl = lgr.logLevel\n\t\t}\n\t}\n\tstimLogger.highestLevel = hl\n}",
"func SetLogLevel(subsystemID string, logLevel string) {\n\t// Ignore invalid subsystems.\n\tlogger, ok := subsystemLoggers[subsystemID]\n\tif !ok {\n\t\treturn\n\t}\n\n\t// Defaults to info if the log level is invalid.\n\tlevel, _ := LevelFromString(logLevel)\n\tlogger.SetLevel(level)\n}",
"func (m Meta) SetLevel(lvl Level) {\n\tm.lvl.Store(int32(lvl))\n}",
"func SetLevelByName(level string) error {\n\tl, err := logrus.ParseLevel(level)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Invalid level name %v: %w\", level, err)\n\t}\n\tLogger.SetLevel(l)\n\treturn nil\n}",
"func SetLevel(level string) error {\n\tlevel = strings.ToLower(level)\n\tlvl, err := zerolog.ParseLevel(level)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tzerolog.SetGlobalLevel(lvl)\n\treturn nil\n}",
"func SetLogLevel(level string) (ok bool) {\n\tswitch strings.ToUpper(level) {\n\tcase \"ERROR\":\n\t\tMaxLogLevel = ERROR\n\tcase \"WARN\":\n\t\tMaxLogLevel = WARN\n\tcase \"INFO\":\n\t\tMaxLogLevel = INFO\n\tcase \"TRACE\":\n\t\tMaxLogLevel = TRACE\n\tdefault:\n\t\tLogError(\"Unknown log level requested: %v\", level)\n\t\treturn false\n\t}\n\treturn true\n}",
"func SetLogLevel(level string) {\n\tswitch level {\n\tcase \"debug\":\n\t\tlog.SetLevel(log.DebugLevel)\n\tcase \"info\":\n\t\tlog.SetLevel(log.InfoLevel)\n\tcase \"warning\":\n\t\tlog.SetLevel(log.WarnLevel)\n\tcase \"error\":\n\t\tlog.SetLevel(log.ErrorLevel)\n\tcase \"fatal\":\n\t\tlog.SetLevel(log.FatalLevel)\n\tcase \"panic\":\n\t\tlog.SetLevel(log.PanicLevel)\n\tdefault:\n\t\tlog.SetLevel(log.InfoLevel)\n\t\tmsg := \"Unknown level, \" + level\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"InputPlugin\": \"systemctl\",\n\t\t}).Error(msg)\n\t}\n}",
"func WithLevel(lvl string) Option {\n\treturn func(l *Logger) {\n\t\tl.level = zapLevel(lvl)\n\t}\n}",
"func (l *Logger) SetLevel(levelName string) *Logger {\n\tl.mu.Lock()\n\tl.Level = getLevelByName(levelName)\n\tl.mu.Unlock()\n\treturn l\n}",
"func SetLogLevel(level zapcore.Level) {\n\tif level == TraceLevel {\n\t\tIsTraceLevel = true\n\t\tlevel = zapcore.DebugLevel\n\t}\n\tatom.SetLevel(level)\n}",
"func SetLevel(level logrus.Level) {\n\tlogrus.SetLevel(level)\n}",
"func (l *TestLog) SetLevel(level int) {\n\tl.verbosity = level\n}"
] | [
"0.77812415",
"0.77143854",
"0.7690312",
"0.7635716",
"0.74855775",
"0.7402874",
"0.7372661",
"0.73702145",
"0.7263379",
"0.7263271",
"0.7238328",
"0.7234703",
"0.72197574",
"0.72197574",
"0.72189397",
"0.72102547",
"0.72085947",
"0.7204849",
"0.7196298",
"0.7192631",
"0.7182598",
"0.71739995",
"0.71672547",
"0.71636695",
"0.71483856",
"0.712785",
"0.7096081",
"0.7096081",
"0.7053296",
"0.70381826",
"0.7028818",
"0.69828635",
"0.698009",
"0.69790715",
"0.6966797",
"0.69538206",
"0.69351083",
"0.69299024",
"0.6919109",
"0.69014585",
"0.6894854",
"0.68775713",
"0.68767726",
"0.6874495",
"0.6867863",
"0.68492484",
"0.6841415",
"0.68413705",
"0.68289006",
"0.6818679",
"0.6810057",
"0.67882377",
"0.67733973",
"0.6772108",
"0.676041",
"0.6747264",
"0.67416394",
"0.67273355",
"0.66861993",
"0.66755944",
"0.6671932",
"0.6671178",
"0.66669154",
"0.66667616",
"0.66665566",
"0.6665377",
"0.6664546",
"0.66639084",
"0.6652139",
"0.665166",
"0.6647625",
"0.6624097",
"0.6622627",
"0.65998375",
"0.65930116",
"0.6591218",
"0.6570473",
"0.6558669",
"0.65580606",
"0.65575606",
"0.6552908",
"0.65479755",
"0.6537287",
"0.65357083",
"0.653282",
"0.65195453",
"0.6516956",
"0.65039146",
"0.64945567",
"0.6493537",
"0.6493359",
"0.649303",
"0.64876676",
"0.64846957",
"0.64799464",
"0.6453152",
"0.6435636",
"0.6427711",
"0.6414807",
"0.6414443"
] | 0.72558755 | 10 |
Level returns logger level | func (l *EchoLogrus) Level() log.Lvl {
switch l.Logger.Level {
case logrus.DebugLevel:
return log.DEBUG
case logrus.WarnLevel:
return log.WARN
case logrus.ErrorLevel:
return log.ERROR
case logrus.InfoLevel:
return log.INFO
}
return log.WARN
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func Level() int {\n\treturn level\n}",
"func LogLevel() int {\n return level\n}",
"func (l *Logrus) Level() interface{} {\n\treturn l.Logger.Level\n}",
"func (f *LogFile) Level() int { return 0 }",
"func Level(level string) int {\n\tl, ok := levels[level]\n\tif !ok {\n\t\tpanic(\"Invalid log level \" + level)\n\t}\n\treturn l\n}",
"func (handler *ConsoleLogHandler) Level() LogLevel {\r\n return AllLogLevels\r\n}",
"func (l *Logger) Level() LogLevel {\n\tl.mutex.Lock()\n\tdefer l.mutex.Unlock()\n\treturn l.level\n}",
"func (l *Logger) LogLevel() int {\n\treturn l.level\n}",
"func (ll *LogLevelLogger) Level() LogLevel {\n\treturn ll.level\n}",
"func GetLevel(logger *logrus.Logger) logrus.Level {\n\treturn logrus.Level(atomic.LoadUint32((*uint32)(&logger.Level)))\n}",
"func GetLevel() loggers.Level {\n\treturn GetLogger().GetLevel()\n}",
"func getLoggerLevel(cfg *Config) logrus.Level {\n\tll, err := logrus.ParseLevel(cfg.Level)\n\tif err != nil {\n\t\tll = logrus.WarnLevel\n\t}\n\n\treturn ll\n}",
"func (l DefaultSDKLogger) LogLevel() int {\n\treturn l.currentLoggingLevel\n}",
"func LogLevel() Level {\n\tmutex.RLock()\n\tdefer mutex.RUnlock()\n\treturn logLevel\n}",
"func (logger *Logger) GetLevel() logging.LogLevel {\n\treturn logger.level\n}",
"func LogLevel() string {\n\treturn cnfg.GetString(\"log.level\")\n}",
"func (l *Logger) LoggingLevel() int {\n\treturn l.loggingLevel\n}",
"func GetLevel() Level {\n\tl, _ := NamedLoggers.Load(DEFAULT)\n\treturn l.GetLevel()\n}",
"func GetLevel() (level Level) {\n\treturn lconf.FilterLevel\n}",
"func (d *LevelWrapper) Level() Level {\n\treturn d.LogLevel\n}",
"func Level() int {\n\treturn int(atomic.LoadInt32(&level))\n}",
"func (l *Logger) GetLevel() Level {\n\treturn l.Level\n}",
"func (l *BasicLogger) GetLevel() Level {\n\treturn Level(l.Logger.GetLevel())\n}",
"func Loglevel() int {\n\treturn loglevel\n}",
"func (l *Logger) LogLevel() Level {\n\treturn l.level.Get()\n}",
"func (l *logHandler) LogLevel() int {\n\treturn l.logLevel\n}",
"func (manager Manager) GetLoggingLevel() string {\n\treturn manager.viperConfig.GetString(logLevel)\n}",
"func (l *logWrapper) LogLevel() int {\n\treturn l.logLevel\n}",
"func (l *Config) GetLevel() string {\n\tif l.Level == \"\" {\n\t\treturn defaultLevel\n\t}\n\n\treturn l.Level\n}",
"func LogLevel() LogLevelType {\n\treturn logLevel\n}",
"func GetLevel() (level Level) {\n\treturn filterLevel\n}",
"func level(l log15.Lvl) string {\n\tswitch l {\n\tcase log15.LvlInfo:\n\t\treturn \"INFO\"\n\tcase log15.LvlDebug:\n\t\treturn \"DEBUG\"\n\tcase log15.LvlWarn:\n\t\treturn \"WARN\"\n\tcase log15.LvlCrit:\n\t\treturn \"FATAL\"\n\tcase log15.LvlError:\n\t\treturn \"ERROR\"\n\t}\n\treturn l.String()\n}",
"func (l *Logger) LogLevel() LogLevel {\n\tmu.RLock()\n\tdefer mu.RUnlock()\n\treturn l.logLevel\n}",
"func GetLevel() Level {\n\treturn std.Level()\n}",
"func (l *ModuleLeveled) GetLevel(module string) logging.Level {\n\tlevel, exists := l.levels[module]\n\tif !exists {\n\t\tlevel, exists = l.levels[\"\"]\n\t\t// no configuration exists, default to debug\n\t\tif !exists {\n\t\t\tlevel = logging.DEBUG\n\t\t}\n\t}\n\treturn level\n}",
"func (w *LevelWriter) Level() string {\n\tl := w.level.Load().(string)\n\tswitch l[0] {\n\tcase 'T':\n\t\treturn \"TRACE\"\n\tcase 'D':\n\t\treturn \"DEBUG\"\n\tcase 'I':\n\t\treturn \"INFO\"\n\tcase 'W':\n\t\treturn \"WARN\"\n\tcase 'E':\n\t\treturn \"ERROR\"\n\tcase 'F':\n\t\treturn \"FATAL\"\n\tdefault:\n\t\treturn \"???\" + l + \"???\"\n\t}\n}",
"func GetLevel() logrus.Level {\n\treturn logrus.GetLevel()\n}",
"func LogLevel() int32 {\n\treturn atomic.LoadInt32(&logLevel)\n}",
"func (f *IndexFile) Level() int { return f.level }",
"func getLogLevel() log.Level {\n\tlvl, err := log.ParseLevel(Config().GetString(\"log-level\"))\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"passed\": lvl,\n\t\t\t\"default\": \"fatal\",\n\t\t}).Warn(\"Log level is not valid, fallback to default level\")\n\t\treturn log.FatalLevel\n\t}\n\treturn lvl\n}",
"func GetLevel() Level {\n\treturn DefaultLevel\n}",
"func getLogLevel(settings Settings) logrus.Level {\n\treturn convertLogLevel(settings.Level)\n}",
"func LogLevel() logrus.Level {\n\tlvl, err := logrus.ParseLevel(General.LogLevel)\n\tif err != nil {\n\t\tlogrus.WithError(err).Fatal(\"config: Parse log level\")\n\t}\n\treturn lvl\n}",
"func Level(s string) (*Levels, error) {\n\tfound, logger := validSubLogger(s)\n\tif !found {\n\t\treturn nil, fmt.Errorf(\"logger %v not found\", s)\n\t}\n\n\treturn &logger.Levels, nil\n}",
"func (lr *logRegistry) GetLevel(logger string) (string, error) {\n\tlr.rwmutex.RLock()\n\tdefer lr.rwmutex.RUnlock()\n\tlg, ok := lr.mapping[logger]\n\tif !ok {\n\t\treturn \"\", fmt.Errorf(\"Logger %s not found\", logger)\n\t}\n\treturn lg.GetLevel().String(), nil\n}",
"func (sl *List) Level() int { return sl.level }",
"func (o *CreateEventPayloadActions) GetLevel() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Level\n}",
"func (e Entry) Level() (level Level) {\n\treturn e.level\n}",
"func (g *group) GetLevel() Level {\n\treturn g.level\n}",
"func (ll *LogLevel) Get() logrus.Level {\n\tswitch *ll {\n\tcase \"trace\":\n\t\treturn logrus.TraceLevel\n\tcase \"debug\":\n\t\treturn logrus.DebugLevel\n\tcase \"info\":\n\t\treturn logrus.InfoLevel\n\tcase \"warn\":\n\t\treturn logrus.WarnLevel\n\tcase \"error\":\n\t\treturn logrus.ErrorLevel\n\tcase \"fatal\":\n\t\treturn logrus.FatalLevel\n\tcase \"panic\":\n\t\treturn logrus.PanicLevel\n\tdefault:\n\t\treturn logrus.TraceLevel\n\t}\n}",
"func (lc *LogConfig) LogrusLevel() *log.Level {\n\tif lc.Level != \"\" {\n\t\tlevel, err := log.ParseLevel(lc.Level)\n\t\tif err != nil {\n\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\"level\": lc.Level,\n\t\t\t}).Error(\"Invalid log level\")\n\t\t\treturn nil\n\t\t}\n\t\treturn &level\n\t}\n\treturn nil\n}",
"func LogLevel(i int) logger.LogLevel {\n\tswitch i {\n\tcase 0:\n\t\treturn logger.FatalLevel\n\tcase 1:\n\t\treturn logger.PanicLevel\n\tcase 2:\n\t\treturn logger.ErrorLevel\n\tcase 3:\n\t\treturn logger.WarnLevel\n\tcase 4:\n\t\treturn logger.NotifyLevel\n\tcase 5:\n\t\treturn logger.InfoLevel\n\tcase 6:\n\t\treturn logger.DebugLevel\n\tdefault:\n\t\treturn logger.InfoLevel\n\t}\n}",
"func Atol(level string) logging.Level {\n\tvar logLevel logging.Level\n\tswitch strings.ToLower(level) {\n\tcase \"error\":\n\t\tlogLevel = logging.Error\n\tcase \"info\":\n\t\tlogLevel = logging.Info\n\tcase \"warn\":\n\t\tlogLevel = logging.Warn\n\tcase \"debug\":\n\t\tlogLevel = logging.Debug\n\tcase \"trace\":\n\t\tlogLevel = logging.Trace\n\tdefault:\n\t\tlogLevel = logging.Info\n\t}\n\n\treturn logLevel\n}",
"func getLevel() (log.Level, error) {\n\tlvl := conf.GetEnvOrDefault(\"LOG_LEVEL\", defaultLogLevel.String())\n\n\tlogLevel, err := log.ParseLevel(lvl)\n\tif err != nil {\n\t\treturn defaultLogLevel, err\n\t}\n\n\treturn logLevel, nil\n}",
"func (v Verbosity) GetLevel() log.Level {\n\tswitch v {\n\tcase FATAL:\n\t\treturn log.FatalLevel\n\tcase ERROR:\n\t\treturn log.ErrorLevel\n\tcase WARNING:\n\t\treturn log.WarnLevel\n\tcase INFO:\n\t\treturn log.InfoLevel\n\tcase DEBUG:\n\t\treturn log.DebugLevel\n\tcase TRACE:\n\t\treturn log.TraceLevel\n\tdefault:\n\t\t// this is never reached, but in case, return INFO as the default\n\t\treturn log.InfoLevel\n\t}\n}",
"func (l *Logger) MinLevel() Level {\n\treturn l.min\n}",
"func (_Votes *VotesCaller) LogLevel(opts *bind.CallOpts) (uint8, error) {\n\tvar (\n\t\tret0 = new(uint8)\n\t)\n\tout := ret0\n\terr := _Votes.contract.Call(opts, out, \"logLevel\")\n\treturn *ret0, err\n}",
"func (c LoggerConfig) GetLevel() Level {\n\tlevel := c.Level\n\tif level != nil {\n\t\treturn levelStringToLevel(*level)\n\t}\n\treturn ErrorLevel\n}",
"func (l *XORMLogBridge) Level() core.LogLevel {\n\tswitch log.GetLevel() {\n\tcase \"debug\":\n\t\treturn core.LOG_DEBUG\n\tcase \"info\":\n\t\treturn core.LOG_INFO\n\tcase \"warn\":\n\t\treturn core.LOG_WARNING\n\tdefault:\n\t\treturn core.LOG_ERR\n\t}\n\treturn core.LOG_OFF\n}",
"func GetLevel(levelName string) (level, bool) {\n\tlevelName = strings.ToUpper(levelName)\n\n\tswitch (levelName) {\n\tcase \"NOLOG\":\n\t\treturn NOLOG, true\n\n\tcase \"ERROR\":\n\t\treturn ERROR, true\n\n\tcase \"WARN\":\n\t\treturn WARN, true\n\n\tcase \"VERBOSE\":\n\t\treturn VERBOSE, true\n\n\tcase \"INFO\":\n\t\treturn INFO, true\n\n\tcase \"DEBUG\":\n\t\treturn DEBUG, true\n\t}\n\n\treturn NOLOG, false\n}",
"func (a *Admin) GetLoggerLevel(_ *http.Request, args *GetLoggerLevelArgs, reply *GetLoggerLevelReply) error {\n\ta.Log.Debug(\"API called\",\n\t\tzap.String(\"service\", \"admin\"),\n\t\tzap.String(\"method\", \"getLoggerLevels\"),\n\t\tlogging.UserString(\"loggerName\", args.LoggerName),\n\t)\n\treply.LoggerLevels = make(map[string]LogAndDisplayLevels)\n\tvar loggerNames []string\n\t// Empty name means all loggers\n\tif len(args.LoggerName) > 0 {\n\t\tloggerNames = []string{args.LoggerName}\n\t} else {\n\t\tloggerNames = a.LogFactory.GetLoggerNames()\n\t}\n\n\tfor _, name := range loggerNames {\n\t\tlogLevel, err := a.LogFactory.GetLogLevel(name)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdisplayLevel, err := a.LogFactory.GetDisplayLevel(name)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treply.LoggerLevels[name] = LogAndDisplayLevels{\n\t\t\tLogLevel: logLevel,\n\t\t\tDisplayLevel: displayLevel,\n\t\t}\n\t}\n\treturn nil\n}",
"func (n *Node) Level() int {\n\treturn n.Depth() + 1\n}",
"func (hnd *Handlers) GetLogLevel() http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tfmt.Fprintf(w, \"{\\\"%s\\\":\\\"%d\\\"}\\n\\r\", \"loglevel\", log.GetLevel())\n\t}\n}",
"func (this *resultStruct) Level() int32 {\n\tvalue := this.level\n\treturn value\n}",
"func GetLoglevel(logFile *LogFileName) (level int) {\n\treturn (*logFile).logLevel\n}",
"func (n *TreeNode) Level() (byte, error) {\n\tif n == nil {\n\t\treturn 0, errors.New(errors.KsiInvalidArgumentError)\n\t}\n\treturn n.level, nil\n}",
"func (m Meta) Level() Level {\n\treturn Level(m.lvl.Load())\n}",
"func (s *baseNode) Level() int {\n\treturn s.level\n}",
"func (l *ModuleLevels) GetLevel(module string) api.Level {\n\tlevel, exists := l.levels[module]\n\tif !exists {\n\t\tlevel, exists = l.levels[\"\"]\n\t\t// no configuration exists, default to info\n\t\tif !exists {\n\t\t\tlevel = api.INFO\n\t\t}\n\t}\n\treturn level\n}",
"func GetLevel(module string) Level {\n\treturn defaultBackend.GetLevel(module)\n}",
"func (l *Logger) GetMinLevel() Level {\n\treturn l.minLevel\n}",
"func GetLevel() Level {\n\treturn currentLevel\n}",
"func (lvl leveled) GetLevels() []LogLevel {\n\treturn lvl.levels\n}",
"func (l *Logger) formatLevel(lv string) string {\n\n\tspaces := \"\"\n\tif len(lv) == 4 {\n\t\tspaces = \" \"\n\t}\n\treturn lv + spaces\n}",
"func (t *Tree) Level() int {\n\treturn t.Depth() + 1\n}",
"func (self *StandardLogger) GetEffectiveLevel() LogLevelType {\n\tself.lock.RLock()\n\tdefer self.lock.RUnlock()\n\tvar logger Logger = self\n\tfor logger != nil {\n\t\tlevel := logger.GetLevel()\n\t\tif level != LevelNotset {\n\t\t\treturn level\n\t\t}\n\t\tlogger = logger.GetParent()\n\t}\n\treturn LevelNotset\n}",
"func (h *Handler) GetLevel() log.Level {\n\treturn log.InfoLevel\n}",
"func (handler *TimedFileHandler) GetLevel() Level {\n\treturn handler.Level\n}",
"func GetLevel(string string) (Level, error) {\n\tswitch string {\n\tcase \"t\":\n\t\tfallthrough\n\tcase \"T\":\n\t\tfallthrough\n\tcase \"trace\":\n\t\tfallthrough\n\tcase \"TRACE\":\n\t\treturn LevelTrace, nil\n\tcase \"d\":\n\t\tfallthrough\n\tcase \"D\":\n\t\tfallthrough\n\tcase \"debug\":\n\t\tfallthrough\n\tcase \"DEBUG\":\n\t\treturn LevelDebug, nil\n\tcase \"i\":\n\t\tfallthrough\n\tcase \"I\":\n\t\tfallthrough\n\tcase \"info\":\n\t\tfallthrough\n\tcase \"INFO\":\n\t\treturn LevelInfo, nil\n\tcase \"w\":\n\t\tfallthrough\n\tcase \"W\":\n\t\tfallthrough\n\tcase \"warn\":\n\t\tfallthrough\n\tcase \"WARN\":\n\t\treturn LevelWarn, nil\n\tcase \"e\":\n\t\tfallthrough\n\tcase \"E\":\n\t\tfallthrough\n\tcase \"error\":\n\t\tfallthrough\n\tcase \"ERROR\":\n\t\treturn LevelError, nil\n\tcase \"o\":\n\t\tfallthrough\n\tcase \"O\":\n\t\tfallthrough\n\tcase \"out\":\n\t\tfallthrough\n\tcase \"OUT\":\n\t\treturn LevelOut, nil\n\tdefault:\n\t\treturn -1, ErrLevelUnknown(string)\n\t}\n}",
"func (e *Env) LogLevel(key string, optionSetters ...OptionSetter) (level logging.Level, err error) {\n\ts, err := e.Get(key, optionSetters...)\n\tif err != nil {\n\t\treturn level, err\n\t}\n\tswitch strings.ToLower(s) {\n\tcase \"debug\":\n\t\treturn logging.LevelDebug, nil\n\tcase \"info\":\n\t\treturn logging.LevelInfo, nil\n\tcase \"warning\":\n\t\treturn logging.LevelWarn, nil\n\tcase \"error\":\n\t\treturn logging.LevelError, nil\n\tdefault:\n\t\treturn level, fmt.Errorf(\"%w: %s: can be one of: debug, info, warning, error\",\n\t\t\tErrUnknownLogLevel, s)\n\t}\n}",
"func (h *Handler) GetLevel() logging.Level {\n\treturn h.Level\n}",
"func (l *Logger) Level(level zerolog.Level) zerolog.Logger {\n\treturn l.logger.Level(level)\n}",
"func (m *Machine) LogLevel() string {\n\treturn m.Cfg.LogLevel\n}",
"func (f File) Level() int {\n\tfp := f.FullPath()\n\tif fp == \"\" || fp == \"/\" {\n\t\treturn 0\n\t}\n\treturn strings.Count(fp, \"/\")\n}",
"func (o MethodSettingsSettingsOutput) LoggingLevel() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v MethodSettingsSettings) *string { return v.LoggingLevel }).(pulumi.StringPtrOutput)\n}",
"func (f *Formatter) _level(r *Record) string {\n\ts := LevelName[r.lv]\n\tif f.colored {\n\t\ts = f.paint(r.lv, s)\n\t}\n\treturn s\n}",
"func (_Votes *VotesCallerSession) LogLevel() (uint8, error) {\n\treturn _Votes.Contract.LogLevel(&_Votes.CallOpts)\n}",
"func (c OutputConfig) GetLevel() Level {\n\tlevel := c.Level\n\tif level != nil {\n\t\treturn levelStringToLevel(*level)\n\t}\n\treturn DebugLevel\n}",
"func V(level int) Logger {\n\tv := Verbosity\n\tif pv, ok := packageVerbosity(1); ok {\n\t\tv = pv\n\t}\n\n\tif level <= v {\n\t\treturn Info\n\t}\n\treturn nilLogger\n}",
"func (m *CompetenceMutation) Level() (r int, exists bool) {\n\tv := m.level\n\tif v == nil {\n\t\treturn\n\t}\n\treturn *v, true\n}",
"func (hook fieldLogHook) Levels() []logrus.Level {\n\tif hook.levels != nil {\n\t\treturn hook.levels\n\t}\n\n\treturn logrus.AllLevels\n}",
"func (o MethodSettingsSettingsPtrOutput) LoggingLevel() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *MethodSettingsSettings) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.LoggingLevel\n\t}).(pulumi.StringPtrOutput)\n}",
"func Name2Level(ln string) (Level, error) {\n\tswitch strings.ToLower(ln) {\n\tcase \"panic\":\n\t\treturn PanicLevel, nil\n\tcase \"fatal\":\n\t\treturn FatalLevel, nil\n\tcase \"err\", \"error\":\n\t\treturn ErrorLevel, nil\n\tcase \"warn\", \"warning\":\n\t\treturn WarnLevel, nil\n\tcase \"notice\":\n\t\treturn NoticeLevel, nil\n\tcase \"info\", \"\": // make the zero value useful\n\t\treturn InfoLevel, nil\n\tcase \"debug\":\n\t\treturn DebugLevel, nil\n\tcase \"trace\":\n\t\treturn TraceLevel, nil\n\t}\n\n\tvar l Level\n\treturn l, fmt.Errorf(\"invalid log Level: %q\", ln)\n}",
"func (self Channel) GetLevel() (c int, e error) {\n\tc = int(C.BASS_ChannelGetLevel(self.cint()))\n\tif c == -1 {\n\t\treturn 0, errMsg()\n\t}\n\treturn c, nil\n}",
"func GetMLogLevel(module string) string {\n\tmloggers.RLock()\n\tdefer mloggers.RUnlock()\n\twl, ok := mloggers.loggers[module]\n\tif !ok {\n\t\treturn \"\"\n\t}\n\tswitch wl.option.Level {\n\tcase CRITICAL:\n\t\treturn \"critical\"\n\tcase ERROR:\n\t\treturn \"error\"\n\tcase WARNING:\n\t\treturn \"warning\"\n\tcase NOTICE:\n\t\treturn \"notice\"\n\tcase INFO:\n\t\treturn \"info\"\n\tcase DEBUG:\n\t\treturn \"debug\"\n\tdefault:\n\t\treturn \"\"\n\t}\n}",
"func (l *Logger) extractLevel(line string) (level, msg string) {\n\tfor _, lv := range levels {\n\t\tif strings.HasPrefix(line, lv) {\n\t\t\treturn lv, strings.TrimSpace(line[len(lv):])\n\t\t}\n\t\tif strings.HasPrefix(line, \"[\"+lv+\"]\") {\n\t\t\treturn lv, strings.TrimSpace(line[len(\"[\"+lv+\"]\"):])\n\t\t}\n\t}\n\treturn \"INFO\", line\n}",
"func (l *Logger) GetVerbose() event.Level { return l.level }",
"func (o MitigationActionEnableIoTLoggingParamsOutput) LogLevel() MitigationActionEnableIoTLoggingParamsLogLevelOutput {\n\treturn o.ApplyT(func(v MitigationActionEnableIoTLoggingParams) MitigationActionEnableIoTLoggingParamsLogLevel {\n\t\treturn v.LogLevel\n\t}).(MitigationActionEnableIoTLoggingParamsLogLevelOutput)\n}",
"func (q elasticClient) Levels() []logrus.Level {\n\treturn []logrus.Level{\n\t\tlogrus.InfoLevel,\n\t}\n}",
"func (s *FieldStatsService) Level(level string) *FieldStatsService {\n\ts.level = level\n\treturn s\n}"
] | [
"0.754305",
"0.75143677",
"0.75124973",
"0.738756",
"0.73849875",
"0.7359766",
"0.7305995",
"0.7292284",
"0.72628397",
"0.72301555",
"0.7184917",
"0.7155091",
"0.7095232",
"0.7077203",
"0.70604163",
"0.7057018",
"0.70108193",
"0.7008901",
"0.69797534",
"0.6977283",
"0.69235563",
"0.6922842",
"0.6907462",
"0.6898637",
"0.6889818",
"0.6852117",
"0.68001455",
"0.6798665",
"0.67862236",
"0.67742586",
"0.6774019",
"0.6762232",
"0.67424613",
"0.67244434",
"0.6651039",
"0.66432",
"0.664038",
"0.66255057",
"0.6624915",
"0.6610475",
"0.65954244",
"0.6594486",
"0.6592655",
"0.6584795",
"0.6538191",
"0.65296346",
"0.6522314",
"0.6506433",
"0.65052706",
"0.64861876",
"0.6485968",
"0.64560187",
"0.6436518",
"0.6435111",
"0.64153045",
"0.6414634",
"0.64050597",
"0.6393272",
"0.6378203",
"0.63747895",
"0.6370878",
"0.636357",
"0.6347107",
"0.6345158",
"0.62907535",
"0.62818485",
"0.62752557",
"0.62704945",
"0.62603074",
"0.6251011",
"0.62262434",
"0.6220378",
"0.6206615",
"0.6204176",
"0.62022626",
"0.61909264",
"0.6190203",
"0.61838627",
"0.6174689",
"0.61601466",
"0.61413395",
"0.6122453",
"0.61102575",
"0.6097579",
"0.6092987",
"0.6077108",
"0.6075448",
"0.60734415",
"0.60650367",
"0.6053112",
"0.6052605",
"0.6042419",
"0.6017583",
"0.5986409",
"0.5982507",
"0.5955896",
"0.5947577",
"0.594557",
"0.5941821",
"0.5919893"
] | 0.71246123 | 12 |
Output logger output func | func (l *EchoLogrus) Output() io.Writer {
return l.Out
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func StdoutLogger(format string, args ...interface{}) {\n\tfmt.Print(fmt.Sprintf(format, args...) + \"\\n\")\n}",
"func (l *logHandler) Output(calldepth int, s string) error {\n\treturn l.w.Output(calldepth+1, s)\n}",
"func (l *StdLogger) output(level LogLevel, stackAdjust int, format string, args ...interface{}) {\n\tif level < Level {\n\t\treturn\n\t} \n\tll := l.dlog\n\tswitch level {\n\t\tcase Error: fallthrough\n\t\tcase Warning: ll = l.elog\n\t}\n\tif int(level) < len(headers) && int(level) >= 0 {\n\t\tformat = headers[int(level)] + format\n\t}\n\tll.Output(stackAdjust + 2, fmt.Sprintf(format, args...))\n}",
"func (l *Logger) Output(ctx context.Context, calldepth int, msg string) {\n\t// This is the bottom of the logger; everything calls this to do writes.\n\t// Handling nil here means everything should be able to be called on a nil\n\t// *Logger and not explode.\n\tif l == nil || l.emitter == nil {\n\t\treturn\n\t}\n\tif l.now == nil {\n\t\tl.now = time.Now\n\t}\n\te := Entry{\n\t\tTime: l.now(),\n\t\tTags: fromContext(ctx),\n\t\tMsg: msg,\n\t}\n\n\tif l.caller {\n\t\tvar ok bool\n\t\t_, e.File, e.Line, ok = runtime.Caller(calldepth)\n\t\tif !ok {\n\t\t\te.File = \"???\"\n\t\t\te.Line = 0\n\t\t}\n\t}\n\n\tl.emitter.Emit(ctx, &e)\n}",
"func (log *logger) output(pool *bufferPool, level logLevel, format string, a ...interface{}) {\n\tif level < log.minLevel {\n\t\treturn\n\t}\n\tvar msg string\n\tif len(a) == 0 {\n\t\tmsg = format\n\t} else {\n\t\tmsg = fmt.Sprintf(format, a...)\n\t}\n\tvar buf = log.formatter.Format(pool, level, msg)\n\tfor _, dev := range log.devices {\n\t\tdev.Write(buf.Bytes())\n\t}\n\tpool.put(buf)\n}",
"func (l *logWrapper) Output(calldepth int, s string) error {\n\treturn log.Output(calldepth+1, s)\n}",
"func (l *Logger) Output(callDepth int, level int, format string, v ...interface{}) {\n\tif l.level > level {\n\t\treturn\n\t}\n\n\tbuf := l.popBuf()\n\n\tif l.flag&Ltime > 0 {\n\t\tnow := time.Now().Format(TimeFormat)\n\t\tbuf = append(buf, now...)\n\t\tbuf = append(buf, \" - \"...)\n\t}\n\n\tif l.flag&Llevel > 0 {\n\t\tbuf = append(buf, LevelName[level]...)\n\t\tbuf = append(buf, \" - \"...)\n\t}\n\n\tif l.flag&Lfile > 0 {\n\t\t_, file, line, ok := runtime.Caller(callDepth)\n\t\tif !ok {\n\t\t\tfile = \"???\"\n\t\t\tline = 0\n\t\t} else {\n\t\t\tfor i := len(file) - 1; i > 0; i-- {\n\t\t\t\tif file[i] == '/' {\n\t\t\t\t\tfile = file[i+1:]\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tbuf = append(buf, file...)\n\t\tbuf = append(buf, \":[\"...)\n\n\t\tbuf = strconv.AppendInt(buf, int64(line), 10)\n\t\tbuf = append(buf, \"] - \"...)\n\t}\n\n\ts := fmt.Sprintf(format, v...)\n\n\tbuf = append(buf, s...)\n\n\tif s[len(s)-1] != '\\n' {\n\t\tbuf = append(buf, '\\n')\n\t}\n\n\tl.msg <- buf\n}",
"func (l *logger) FuncOut() {\n\tif !l.isDebug {\n\t\treturn\n\t}\n\tl.printLog(\"[DEBUG]\", \"%s\", \"OUT\")\n}",
"func (l *loggingT) output(s severity.Severity, logger *logWriter, buf *buffer.Buffer, depth int, file string, line int, alsoToStderr bool) {\n\tvar isLocked = true\n\tl.mu.Lock()\n\tdefer func() {\n\t\tif isLocked {\n\t\t\t// Unlock before returning in case that it wasn't done already.\n\t\t\tl.mu.Unlock()\n\t\t}\n\t}()\n\n\tif l.traceLocation.isSet() {\n\t\tif l.traceLocation.match(file, line) {\n\t\t\tbuf.Write(dbg.Stacks(false))\n\t\t}\n\t}\n\tdata := buf.Bytes()\n\tif logger != nil {\n\t\tif logger.writeKlogBuffer != nil {\n\t\t\tlogger.writeKlogBuffer(data)\n\t\t} else {\n\t\t\t// TODO: set 'severity' and caller information as structured log info\n\t\t\t// keysAndValues := []interface{}{\"severity\", severityName[s], \"file\", file, \"line\", line}\n\t\t\tif s == severity.ErrorLog {\n\t\t\t\tlogger.WithCallDepth(depth+3).Error(nil, string(data))\n\t\t\t} else {\n\t\t\t\tlogger.WithCallDepth(depth + 3).Info(string(data))\n\t\t\t}\n\t\t}\n\t} else if l.toStderr {\n\t\tos.Stderr.Write(data)\n\t} else {\n\t\tif alsoToStderr || l.alsoToStderr || s >= l.stderrThreshold.get() {\n\t\t\tos.Stderr.Write(data)\n\t\t}\n\n\t\tif logging.logFile != \"\" {\n\t\t\t// Since we are using a single log file, all of the items in l.file array\n\t\t\t// will point to the same file, so just use one of them to write data.\n\t\t\tif l.file[severity.InfoLog] == nil {\n\t\t\t\tif err := l.createFiles(severity.InfoLog); err != nil {\n\t\t\t\t\tos.Stderr.Write(data) // Make sure the message appears somewhere.\n\t\t\t\t\tl.exit(err)\n\t\t\t\t}\n\t\t\t}\n\t\t\tl.file[severity.InfoLog].Write(data)\n\t\t} else {\n\t\t\tif l.file[s] == nil {\n\t\t\t\tif err := l.createFiles(s); err != nil {\n\t\t\t\t\tos.Stderr.Write(data) // Make sure the message appears somewhere.\n\t\t\t\t\tl.exit(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif l.oneOutput {\n\t\t\t\tl.file[s].Write(data)\n\t\t\t} else {\n\t\t\t\tswitch s {\n\t\t\t\tcase severity.FatalLog:\n\t\t\t\t\tl.file[severity.FatalLog].Write(data)\n\t\t\t\t\tfallthrough\n\t\t\t\tcase severity.ErrorLog:\n\t\t\t\t\tl.file[severity.ErrorLog].Write(data)\n\t\t\t\t\tfallthrough\n\t\t\t\tcase severity.WarningLog:\n\t\t\t\t\tl.file[severity.WarningLog].Write(data)\n\t\t\t\t\tfallthrough\n\t\t\t\tcase severity.InfoLog:\n\t\t\t\t\tl.file[severity.InfoLog].Write(data)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tif s == severity.FatalLog {\n\t\t// If we got here via Exit rather than Fatal, print no stacks.\n\t\tif atomic.LoadUint32(&fatalNoStacks) > 0 {\n\t\t\tl.mu.Unlock()\n\t\t\tisLocked = false\n\t\t\ttimeoutFlush(ExitFlushTimeout)\n\t\t\tOsExit(1)\n\t\t}\n\t\t// Dump all goroutine stacks before exiting.\n\t\t// First, make sure we see the trace for the current goroutine on standard error.\n\t\t// If -logtostderr has been specified, the loop below will do that anyway\n\t\t// as the first stack in the full dump.\n\t\tif !l.toStderr {\n\t\t\tos.Stderr.Write(dbg.Stacks(false))\n\t\t}\n\n\t\t// Write the stack trace for all goroutines to the files.\n\t\ttrace := dbg.Stacks(true)\n\t\tlogExitFunc = func(error) {} // If we get a write error, we'll still exit below.\n\t\tfor log := severity.FatalLog; log >= severity.InfoLog; log-- {\n\t\t\tif f := l.file[log]; f != nil { // Can be nil if -logtostderr is set.\n\t\t\t\tf.Write(trace)\n\t\t\t}\n\t\t}\n\t\tl.mu.Unlock()\n\t\tisLocked = false\n\t\ttimeoutFlush(ExitFlushTimeout)\n\t\tOsExit(255) // C++ uses -1, which is silly because it's anded with 255 anyway.\n\t}\n\tbuffer.PutBuffer(buf)\n\n\tif stats := severityStats[s]; stats != nil {\n\t\tatomic.AddInt64(&stats.lines, 1)\n\t\tatomic.AddInt64(&stats.bytes, int64(len(data)))\n\t}\n}",
"func (l *Logger) Output(level int, calldepth int, format string, v ...interface{}) error {\n\tif level < l.level {\n\t\treturn nil\n\t}\n\n\ts := fmt.Sprintf(format+\"\\n\", v...)\n\n\tlevelName := LlevelName[level]\n\n\tnow := time.Now() // get this early.\n\tvar funcName string\n\tvar pc uintptr\n\tvar file string\n\tvar line int\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\tif l.flag&(Lshortfile|Llongfile) != 0 {\n\t\t// Release lock while getting caller info - it's expensive.\n\t\tl.mu.Unlock()\n\t\tvar ok bool\n\t\tpc, file, line, ok = runtime.Caller(calldepth)\n\t\tfuncName = runtime.FuncForPC(pc).Name()\n\t\tif !ok {\n\t\t\tfile = \"???\"\n\t\t\tline = 0\n\t\t}\n\t\tl.mu.Lock()\n\t}\n\n\tl.FormatHeader(now, funcName, file, line, levelName)\n\n\tl.buf = append(l.buf, \" | \"...)\n\tl.buf = append(l.buf, s...)\n\tif len(s) == 0 || s[len(s)-1] != '\\n' {\n\t\tl.buf = append(l.buf, '\\n')\n\t}\n\t_, err := l.out.Write(l.buf)\n\treturn err\n}",
"func (l *Logger) Output(calldepth int, s string) error {\n\t// Get time early if we need it.\n\tvar now time.Time\n\tif l.flag&(Ldate|Ltime|Lmicroseconds) != 0 {\n\t\tnow = time.Now()\n\t}\n\tvar file string\n\tvar line int\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\tif l.flag&(Lshortfile|Llongfile) != 0 {\n\t\t// Release lock while getting caller info - it's expensive.\n\t\tl.mu.Unlock()\n\t\tvar ok bool\n\t\t_, file, line, ok = runtime.Caller(calldepth)\n\t\tif !ok {\n\t\t\tfile = \"???\"\n\t\t\tline = 0\n\t\t}\n\t\tl.mu.Lock()\n\t}\n\tl.buf = l.buf[:0]\n\tl.formatHeader(&l.buf, now, file, line)\n\tl.buf = append(l.buf, s...)\n\tif len(s) == 0 || s[len(s)-1] != '\\n' {\n\t\tl.buf = append(l.buf, '\\n')\n\t}\n\tl.openFile()\n\t_, err := l.out.Write(l.buf)\n\tl.closeFile()\n\treturn err\n}",
"func (n *NullLogger) Out(_ []byte) {\n}",
"func (l *Logger) Output(lv Level, calldepth int, s string) error {\n\tif lv >= l.min {\n\t\tif (l.flag & Llevel) != 0 {\n\t\t\treturn l.lg.Output(calldepth, fmt.Sprintf(\"[%v] %s\", lv, s))\n\t\t}\n\t\treturn l.lg.Output(calldepth, s)\n\t}\n\treturn nil\n}",
"func Info(v ...interface{}) {\n std.logger.Output(2, fmt.Sprintln(v...))\n}",
"func (l *BufLogger) Output(calldepth int, level, s string) error {\n\tnow := time.Now()\n\tvar file string\n\tvar line int\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\tif l.def.Flags&(Lshortfile|Llongfile) != 0 {\n\t\t// Release lock while getting caller info - it's expensive.\n\t\tl.mu.Unlock()\n\t\tvar ok bool\n\t\t_, file, line, ok = runtime.Caller(calldepth)\n\t\tif !ok {\n\t\t\tfile = \"???\"\n\t\t\tline = 0\n\t\t}\n\t\tl.mu.Lock()\n\t}\n\t// colorful print begin\n\tif l.def.ColorfulPrint && l.def.OutputType != LogToFile {\n\t\tswitch level {\n\t\tcase LogLevelInfo:\n\t\t\tl.buf = append(l.buf, \"\\x1b[34m\"...)\n\t\tcase LogLevelWarn:\n\t\t\tl.buf = append(l.buf, \"\\x1b[33m\"...)\n\t\tcase LogLevelError:\n\t\t\tl.buf = append(l.buf, \"\\x1b[31m\"...)\n\t\tcase LogLevelFatal:\n\t\t\tl.buf = append(l.buf, \"\\x1b[35m\"...)\n\t\t}\n\n\t}\n\t// log prefix\n\tformatLogPrefix(&l.buf, now, level, file, line)\n\t// log content\n\tl.buf = append(l.buf, s...)\n\tif len(s) == 0 || s[len(s)-1] != '\\n' {\n\t\tl.buf = append(l.buf, '\\n')\n\t}\n\t// colorful print end\n\tif l.def.ColorfulPrint && l.def.OutputType != LogToFile {\n\t\tl.buf = append(l.buf, \"\\x1b[0m\"...)\n\t}\n\treturn nil\n}",
"func (l *NoopNSQLogger) Output(calldepth int, s string) error {\n\tlog.Info(s)\n\treturn nil\n}",
"func StandardOutLogger(out SuperMarketLog) {\n\toutputFormat := \"%s\\t%s\\t%s\\t%s\"\n\tlog.Printf(\n\t\toutputFormat,\n\t\tout.Method,\n\t\tout.RequestURI,\n\t\tout.Name,\n\t\tout.Time,\n\t)\n}",
"func writeLog() {\n}",
"func (l *NSQLogger) Output(calldepth int, s string) error {\n\treturn l.fn(s)\n}",
"func (e *executor) output(opts opts, output []string) {\n\tif opts.verbose {\n\t\tfor _, line := range output {\n\t\t\te.logLine(line)\n\t\t}\n\t}\n}",
"func FuncOut() {\n\tsimlog.FuncOut()\n}",
"func output(w io.Writer, l Level, v ...interface{}) error {\n\tif l < level {\n\t\treturn nil\n\t}\n\t_, err := fmt.Fprint(w, colorize(l, timestamp(levelPrefix[l]+fmt.Sprintln(v...))))\n\treturn err\n}",
"func logStdOut(s string) {\n\tlog.Printf(\"%s\\n\", string(s))\n}",
"func (l *Logger) Output(calldepth int, s string) error {\n\tnow := time.Now() // get this early.\n\tvar file string\n\tvar line int\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\tif l.flag&(Lshortfile|Llongfile) != 0 {\n\t\t/*Release lock while getting caller info - it's expensive.*/\n\t\tl.mu.Unlock()\n\t\tvar ok bool\n\t\t_, file, line, ok = runtime.Caller(calldepth)\n\t\tif !ok {\n\t\t\tfile = \"???\"\n\t\t\tline = 0\n\t\t}\n\t\tl.mu.Lock()\n\t}\n\tl.buf = l.buf[:0]\n\tl.formatHeader(&l.buf, now, file, line)\n\tl.buf = append(l.buf, s...)\n\tif len(s) == 0 || s[len(s)-1] != '\\n' {\n\t\tl.buf = append(l.buf, '\\n')\n\t}\n\tn, err := l.out.Write(l.buf)\n\tl.writtenSize += uint64(n)\n\tif l.writtenSize >= l.splitFileSize {\n\t\tif l.filename != \"\" {\n\t\t\tl.rotate()\n\t\t}\n\t\tl.writtenSize = 0\n\t}\n\treturn err\n}",
"func (d *DummyLogger) Info(format string) {}",
"func (fl *FakeLogger) Output(value bool) {\n\tfl.output = value\n}",
"func Output(lv Level, calldepth int, s string) error {\n\treturn std.Output(lv, calldepth, s)\n}",
"func outputf(w io.Writer, l Level, format string, v ...interface{}) error {\n\tif l < level {\n\t\treturn nil\n\t}\n\t_, err := fmt.Fprintf(w, colorize(l, timestamp(levelPrefix[l]+fmt.Sprintln(format))), v...)\n\treturn err\n}",
"func PrintOut(level string, value string) {\n\tgetLevel := func() string {\n\t\tswitch level {\n\t\tcase \"trace\":\n\t\t\treturn \"[TRACE]\"\n\t\tcase \"debug\":\n\t\t\treturn \"[DEBUG]\"\n\t\tcase \"error\":\n\t\t\treturn \"[ERROR]\"\n\t\tcase \"info\":\n\t\t\treturn \"[INFO] \"\n\t\tdefault:\n\t\t\treturn \"\"\n\t\t}\n\t}\n\n\tif logOn && strings.Contains(logLevels, level) {\n\t\tlogFile.WriteString(\n\t\t\tstrings.ToUpper(getLevel()) + \" \" +\n\t\t\t\ttime.Now().Format(\"Jan 02 15:04:05.000\") + \" \" +\n\t\t\t\tvalue + \"\\n\")\n\t}\n}",
"func (l *NoopNSQLogger) Output(calldepth int, s string) error {\n\treturn nil\n}",
"func (l *Logger) lprintln(lv Level, v ...interface{}) { _ = l.Output(lv, 4, fmt.Sprintln(v...)) }",
"func (l *Logger) Output(in *bufio.Scanner) error {\n\tfor in.Scan() {\n\t\t_ = l.Write(l.Prefix)\n\t\t_ = l.Write(in.Bytes())\n\t\t_ = l.Write(ln)\n\t}\n\n\tif err := in.Err(); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func (l *Logger) Println(v ...interface{}) { l.lprintln(INFO, v...) }",
"func Info(msg string) {\n log.Info(msg)\n}",
"func ConsoleOutput(message string, logger *log.Logger) {\r\n\tlogger.Println(message)\r\n}",
"func (lc StdoutLogger) Log(text string) {\n\tlc(text)\n}",
"func (logger GoLogger) Output() io.Writer {\n\treturn logger.output\n}",
"func (l *Logger) Println(v ...interface{}) { l.Output(2, fmt.Sprintln(v...)) }",
"func out(format string, v...interface{}) {\n\tfmt.Printf(format, v...)\n}",
"func (f *FakeOutput) Logger() *zap.SugaredLogger { return f.SugaredLogger }",
"func (l Logger) Log(args ...interface{}) {\n\tlog.SetOutput(os.Stdout)\n\tlog.Println(args...)\n}",
"func (l *Logger) Printf(format string, v ...interface{}) { l.lprintf(INFO, format, v...) }",
"func (sl *StdOutLogger) LogInfo(m ...interface{}) {\n\tsl.stdlog(fmt.Sprintf(\"%v\", fmt.Sprint(m...)))\n}",
"func (l Logger) Info(text ...string) {\n\tfmt.Fprintln(l.out, info(strings.Join(text, \" \")))\n}",
"func (l *Logger) Print(v ...interface{}) { l.lprint(INFO, v...) }",
"func SetOutput(level LogLevel, w io.Writer) func(*Logger) {\n\treturn func(l *Logger) {\n\t\tswitch level {\n\t\tcase Info:\n\t\t\tl.logInfo.SetOutput(w)\n\t\tcase Notice:\n\t\t\tl.logNotice.SetOutput(w)\n\t\tcase Warning:\n\t\t\tl.logWarning.SetOutput(w)\n\t\tcase Debug:\n\t\t\tl.logDebug.SetOutput(w)\n\t\tcase Trace:\n\t\t\tl.logTrace.SetOutput(w)\n\t\tcase Error:\n\t\t\tl.logError.SetOutput(w)\n\t\tcase Critical:\n\t\t\tl.logCritical.SetOutput(w)\n\t\tcase All:\n\t\t\tl.logInfo.SetOutput(w)\n\t\t\tl.logNotice.SetOutput(w)\n\t\t\tl.logWarning.SetOutput(w)\n\t\t\tl.logDebug.SetOutput(w)\n\t\t\tl.logTrace.SetOutput(w)\n\t\t\tl.logError.SetOutput(w)\n\t\t\tl.logCritical.SetOutput(w)\n\t\t}\n\t}\n}",
"func Info(format string, a ...interface{}) {\n\tif currentLogger == nil {\n\t\treturn\n\t}\n\tcurrentLogger.output(currentPool, _InfoLevel, format, a...)\n}",
"func writeLog(msg ...interface{}) {\n\tlogLocker.Lock()\n\tdefer logLocker.Unlock()\n\tif *confVerbose {\n\t\tcolor.Green(fmt.Sprint(time.Now().Format(\"02_01_06-15.04.05\"), \"[WRITE] ->\", msg))\n\t}\n}",
"func (l *Logger) Print(v ...interface{}) { l.Output(2, fmt.Sprint(v...)) }",
"func SetLogger(l *log.Logger) {\n StdOutLogger = l\n}",
"func Stdout(format string, a ...interface{}) {\n\tfmt.Printf(GetOutputPrefix(GetFuncName(1))+format+\"\\n\", a...)\n}",
"func (d *DummyLogger) Infof(format string, args ...interface{}) {}",
"func fmtLogger(msg string, args ...interface{}) {\n\tfmt.Printf(msg, args...)\n\tfmt.Println()\n}",
"func (logger *Logger) echo(w io.Writer, l level.Level, f string, a ...any) {\n\t// Lock the log object for change.\n\tlogger.mu.RLock()\n\tdefer logger.mu.RUnlock()\n\n\t// Get the stack frame.\n\tsf := getStackFrame(logger.skipStackFrames)\n\n\t// If an additional value is set for the output (writer),\n\t// use it with the default settings.\n\toutputs := logger.outputs\n\tif w != nil {\n\t\toutput := Default\n\t\toutput.Writer = w\n\t\toutput.isSystem = true\n\t\toutputs[\"*\"] = &output // this name can be used for system names\n\t}\n\n\t// Output message.\n\tfor _, o := range logger.outputs {\n\t\tvar msg string\n\t\thas, err := o.Levels.Contains(l)\n\t\tif !has || err != nil || !o.Enabled.IsTrue() {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Hide or show the prefix.\n\t\tprefix := logger.prefix\n\t\tif !o.WithPrefix.IsTrue() {\n\t\t\tprefix = \"\"\n\t\t}\n\n\t\t// Text or JSON representation of the message.\n\t\tif o.TextStyle.IsTrue() {\n\t\t\tmsg = textMessage(prefix, l, time.Now(), o, sf, f, a...)\n\t\t} else {\n\t\t\tmsg = objectMessage(prefix, l, time.Now(), o, sf, f, a...)\n\t\t}\n\n\t\t// Print message.\n\t\tfmt.Fprint(o.Writer, msg)\n\t}\n}",
"func (l *Logger) Output(w io.Writer) zerolog.Logger {\n\treturn l.logger.Output(w)\n}",
"func (l *Logger) Info(v ...interface{}) {\n\tif l.loglevel <= sInfo {\n\t\tl.output(sInfo, 0, fmt.Sprint(v...))\n\t} else {\n\t\treturn\n\t}\n}",
"func (logger *TestLogger) LogOutput() string {\n\treturn logger.logBuffer.String()\n}",
"func responseLogger(handler http.HandlerFunc) http.HandlerFunc {\n\treturn func (w http.ResponseWriter, r *http.Request){\n\t\tlog.Printf(\"\\n%s %s%s %s\",r.Method, r.Host, r.RequestURI, r.Proto )\n\t\thandler(w,r)\n\t}\n}",
"func (l *stubLogger) Infof(format string, args ...interface{}) {}",
"func writeLog(i ...interface{}) {\n\tif Debug {\n\t\tlog.Println(\"checkClient:\", fmt.Sprint(i...))\n\t}\n}",
"func logf(format string, args ...interface{}) {\n\tglobalLoggerLock.Lock()\n\tdefer globalLoggerLock.Unlock()\n\tif globalLogger != nil {\n\t\tglobalLogger.Output(2, fmt.Sprintf(format, args...))\n\t}\n}",
"func (l Logger) Out() io.Writer {\n\treturn l.out\n}",
"func Output(r cruntime.Manager, bs bootstrapper.Bootstrapper, cfg config.ClusterConfig, runner command.Runner, lines int, logOutput *os.File) error {\n\tcmds := logCommands(r, bs, cfg, lines, false)\n\tcmds[\"kernel\"] = \"uptime && uname -a && grep PRETTY /etc/os-release\"\n\n\tnames := []string{}\n\tfor k := range cmds {\n\t\tnames = append(names, k)\n\t}\n\n\tout.SetOutFile(logOutput)\n\tdefer out.SetOutFile(os.Stdout)\n\tout.SetErrFile(logOutput)\n\tdefer out.SetErrFile(os.Stderr)\n\n\tsort.Strings(names)\n\tfailed := []string{}\n\tfor i, name := range names {\n\t\tif i > 0 {\n\t\t\tout.Styled(style.Empty, \"\")\n\t\t}\n\t\tout.Styled(style.Empty, \"==> {{.name}} <==\", out.V{\"name\": name})\n\t\tvar b bytes.Buffer\n\t\tc := exec.Command(\"/bin/bash\", \"-c\", cmds[name])\n\t\tc.Stdout = &b\n\t\tc.Stderr = &b\n\t\tif rr, err := runner.RunCmd(c); err != nil {\n\t\t\tklog.Errorf(\"command %s failed with error: %v output: %q\", rr.Command(), err, rr.Output())\n\t\t\tfailed = append(failed, name)\n\t\t\tcontinue\n\t\t}\n\t\tl := \"\"\n\t\tscanner := bufio.NewScanner(&b)\n\t\tfor scanner.Scan() {\n\t\t\tl += scanner.Text() + \"\\n\"\n\t\t}\n\t\tif err := scanner.Err(); err != nil {\n\t\t\tklog.Errorf(\"failed to read output: %v\", err)\n\t\t\tfailed = append(failed, name)\n\t\t}\n\t\tout.Styled(style.Empty, l)\n\t}\n\n\tif len(failed) > 0 {\n\t\treturn fmt.Errorf(\"unable to fetch logs for: %s\", strings.Join(failed, \", \"))\n\t}\n\treturn nil\n}",
"func (logger *ChannelLogger) setLogOutput(outString string) error {\n\tswitch outString {\n\tcase \"stdout\":\n\t\tlogger.SetOutput(os.Stdout)\n\tcase \"stderr\":\n\t\tlogger.SetOutput(os.Stderr)\n\tdefault:\n\t\tf, err := os.OpenFile(outString, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0644)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tlogger.SetOutput(f)\n\t}\n\treturn nil\n}",
"func Log(fmt string, args ...interface{}) {}",
"func Info(v ...interface{}) {\n\tstdLogger.Print(v...)\n}",
"func (l *Logger) lprint(lv Level, v ...interface{}) { _ = l.Output(lv, 4, fmt.Sprint(v...)) }",
"func lInfo(v ...interface{}) {\n\tinfoLogger.Println(v...)\n}",
"func (e *Engine) Outputf(format string, a ...interface{}) {\n\ts := fmt.Sprintf(format, a...)\n\tif e.LogProtocolTraffic {\n\t\te.Debugf(\"< %s\", s)\n\t}\n\te.out.Print(s)\n}",
"func (n *Node) Output() error {\n\treader, err := n.Logs()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer reader.Close()\n\n\tio.Copy(os.Stdout, reader)\n\n\treturn nil\n}",
"func ConsoleLogger() mux.MiddlewareFunc {\n return FormatLogger( log.Printf )\n}",
"func (l *Lgr) Info(args ...interface{}) {\n l.Logger.Info(args...)\n}",
"func (pl ProdLogger) Println(args ...interface{}) {\n\n}",
"func Write(logtype string, message string) {\n\tsuccess := false\n\tfor _, v := range logsOutput.File {\n\t\tif v.Logtype == logtype {\n\t\t\tlog.SetOutput(v.Logoutput)\n\t\t\tlog.Println(message)\n\t\t\tsuccess = true\n\t\t}\n\t}\n\tlog.SetOutput(os.Stderr)\n\tif !success {\n\t\tlog.Println(\"Type\", logtype, \"was not assigned for log output\")\n\t}\n}",
"func Info(format string, a ...interface{}) {\n\tif Level >= 3 {\n\t\ta, w := extractLoggerArgs(format, a...)\n\t\ts := fmt.Sprintf(label(format, InfoLabel), a...)\n\n\t\tif Color {\n\t\t\tw = color.Output\n\t\t\ts = color.MagentaString(s)\n\t\t}\n\n\t\tfmt.Fprintf(w, s)\n\t}\n}",
"func LogGetOutput() io.Writer {\n\tmuLogt.Lock()\n\tdefer muLogt.Unlock()\n\treturn logOutputCur\n}",
"func SetOutput(w io.Writer, tag string) {\n\tbaseLogger = newJSONLogger(newWriterLogger(w, tag), syslog.LOG_DEBUG, -1)\n}",
"func logInfo(format string, v ...interface{}) {\n\ts := fmt.Sprintf(format, v...)\n\tlogger.Println(s)\n}",
"func Log(msg string, a ...interface{}) {\n formatted := fmt.Sprintf(msg, a...)\n logger.Println(fmt.Sprintf(\"\\033[34;1mINFO:\\033[0m %s\", formatted))\n}",
"func LogOut(out io.Writer) {\n\tlock.Lock()\n\telog = log.New(out, \"[parajson] \", 0)\n\tlock.Unlock()\n}",
"func (l *Logger) Infof(fmt string, args ...interface{}) {\n}",
"func initLogger() {\n\tlogdir := viper.GetString(\"log.log_dir\")\n\tstdout := viper.GetBool(\"log_stdout\")\n\n\tvar writer io.Writer\n\n\tif logdir != \"\" {\n\t\tfolderPath, err := filepath.Abs(logdir)\n\t\tpanicIfError(err, fmt.Sprintf(\"Error on parsing log path: %s\", logdir))\n\n\t\tabspath, err := filepath.Abs(path.Join(logdir, \"run.log\"))\n\t\tpanicIfError(err, fmt.Sprintf(\"Error on parsing log file path: %s\", logdir))\n\n\t\terr = os.MkdirAll(folderPath, os.ModePerm)\n\t\tpanicIfError(err, fmt.Sprintf(\"Error on creating log dir: %s\", folderPath))\n\n\t\tif stdout {\n\t\t\tfmt.Println(\"Will be logged to stdout and \", abspath)\n\t\t\tfileWriter := mylog.RotateLog(abspath)\n\t\t\twriter = io.MultiWriter(os.Stdout, fileWriter)\n\t\t} else {\n\t\t\tfmt.Println(\"Will be logged to \", abspath)\n\t\t\twriter = mylog.RotateLog(abspath)\n\t\t}\n\t} else {\n\t\t// stdout only\n\t\tfmt.Println(\"Will be logged to stdout\")\n\t\twriter = os.Stdout\n\t}\n\tlogrus.SetOutput(writer)\n\n\t// Only log the warning severity or above.\n\tswitch viper.GetString(\"log.level\") {\n\tcase \"panic\":\n\t\tlogrus.SetLevel(logrus.PanicLevel)\n\tcase \"fatal\":\n\t\tlogrus.SetLevel(logrus.FatalLevel)\n\tcase \"error\":\n\t\tlogrus.SetLevel(logrus.ErrorLevel)\n\tcase \"warn\":\n\t\tlogrus.SetLevel(logrus.WarnLevel)\n\tcase \"info\":\n\t\tlogrus.SetLevel(logrus.InfoLevel)\n\tcase \"debug\":\n\t\tlogrus.SetLevel(logrus.DebugLevel)\n\tcase \"trace\":\n\t\tlogrus.SetLevel(logrus.TraceLevel)\n\tdefault:\n\t\tfmt.Println(\"Unknown level\", viper.GetString(\"log.level\"), \"Set to INFO\")\n\t\tlogrus.SetLevel(logrus.InfoLevel)\n\t}\n\n\tFormatter := new(logrus.TextFormatter)\n\tFormatter.ForceColors = false\n\tFormatter.DisableColors = true\n\tFormatter.TimestampFormat = \"06-01-02 15:04:05.000000\"\n\tFormatter.FullTimestamp = true\n\tlogrus.SetFormatter(Formatter)\n\n\t// redirect standard log to logrus\n\t//log.SetOutput(logrus.StandardLogger().Writer())\n\t//log.Println(\"Standard logger. Am I here?\")\n\tlineNum := viper.GetBool(\"log_line_number\")\n\tif lineNum {\n\t\t//filenameHook := filename.NewHook()\n\t\t//filenameHook.Field = \"line\"\n\t\t//logrus.AddHook(filenameHook)\n\t\tlogrus.SetReportCaller(true)\n\t}\n\tbyLevel := viper.GetBool(\"multifile_by_level\")\n\tif byLevel && logdir != \"\" {\n\t\tpanicLog, _ := filepath.Abs(path.Join(logdir, \"panic.log\"))\n\t\tfatalLog, _ := filepath.Abs(path.Join(logdir, \"fatal.log\"))\n\t\twarnLog, _ := filepath.Abs(path.Join(logdir, \"warn.log\"))\n\t\terrorLog, _ := filepath.Abs(path.Join(logdir, \"error.log\"))\n\t\tinfoLog, _ := filepath.Abs(path.Join(logdir, \"info.log\"))\n\t\tdebugLog, _ := filepath.Abs(path.Join(logdir, \"debug.log\"))\n\t\ttraceLog, _ := filepath.Abs(path.Join(logdir, \"trace.log\"))\n\t\twriterMap := lfshook.WriterMap{\n\t\t\tlogrus.PanicLevel: mylog.RotateLog(panicLog),\n\t\t\tlogrus.FatalLevel: mylog.RotateLog(fatalLog),\n\t\t\tlogrus.WarnLevel: mylog.RotateLog(warnLog),\n\t\t\tlogrus.ErrorLevel: mylog.RotateLog(errorLog),\n\t\t\tlogrus.InfoLevel: mylog.RotateLog(infoLog),\n\t\t\tlogrus.DebugLevel: mylog.RotateLog(debugLog),\n\t\t\tlogrus.TraceLevel: mylog.RotateLog(traceLog),\n\t\t}\n\t\tlogrus.AddHook(lfshook.NewHook(\n\t\t\twriterMap,\n\t\t\tFormatter,\n\t\t))\n\t}\n\tlogger := logrus.StandardLogger()\n\tlogrus.Debug(\"Logger initialized.\")\n\tbyModule := viper.GetBool(\"multifile_by_module\")\n\tif !byModule {\n\t\tlogdir = \"\"\n\t}\n\n\tdownloader.InitLoggers(logger, logdir)\n\tfetcher.InitLoggers(logger, logdir)\n\tp2p.InitLoggers(logger, logdir)\n\tog.InitLoggers(logger, logdir)\n\tsyncer.InitLoggers(logger, logdir)\n\tannsensus.InitLoggers(logger, logdir)\n\n}",
"func (c *T) Log(args ...interface{})",
"func (l Logger) Info(msg ...interface{}) {\n\tif l.Level <= log.InfoLevel {\n\t\tout := fmt.Sprint(msg...)\n\t\tout = checkEnding(out)\n\t\tfmt.Fprint(l.InfoOut, out)\n\t}\n}",
"func logSetOutput(iowr io.Writer) {\n\tlogt.SetOutput(iowr)\n\tlogOutputCur = iowr\n}",
"func (p *CreateSparseTest) OutLog() *sp.OutPort {\n\treturn p.Out(\"log\")\n}",
"func Log(msg string, err error) {\n\n}",
"func (slog stdLogger) Info(s string) {\n\tif logger.Logger != nil {\n\t\tlogger.Write([]byte(\" INFO \" + s))\n\t} else {\n\t\tlog.Printf(\" INFO \" + s)\n\t}\n}",
"func (l *Logger) Printf(msg string, v ...interface{}) {\n\terr := l.Logger.Output(callDepth, fmt.Sprintf(msg, v...))\n\tif err != nil {\n\t\tfmt.Println(\"Logger Error:\", err) //nolint:forbidigo\n\t}\n}",
"func (nl *NullLogger) LogInfo(m ...interface{}) {\n}",
"func Info(args ...interface{}) {\n LoggerOf(default_id).Info(args...)\n}",
"func (c *Client) Log(lvl level, message string, extra map[string]string) error {\n\tfor _, o := range c.cfg.Outputs {\n\t\terr := o.output(newLogBody(lvl.toString(), message, extra))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}",
"func (l *MessageLogger) Info(msg string) { l.logger.Info(msg) }",
"func (l nullLogger) Info(msg string, ctx ...interface{}) {}",
"func LogLogger( l *log.Logger ) mux.MiddlewareFunc {\n return FormatLogger( l.Printf )\n}",
"func (pl ProdLogger) Printf(format string, args ...interface{}) {\n\n}",
"func Info(v ...interface{}) error {\n\treturn output(os.Stdout, LevelInfo, v...)\n}",
"func (logProxy *loggerProxy)Info(msgfmt string, args ...interface{}) {\n var ch loggerProxyChannel\n ch.fnPtr = logProxy.logObj.Info\n ch.msg = logProxy.appendlog(msgfmt, args...)\n logProxy.logChannel <- ch\n}",
"func Write(info []byte, In string, Out string, WhoAmI string) {\n\t//Build complete String\n\tFullString := WhoAmI + \", \" + In + \", \" + Out + \", \" + string(info) + \"\\n\"\n\tmu.Lock()\n\tdefer mu.Unlock()\n\tlogfile.Write([]byte(FullString))\n}",
"func Logf(format string, a ...interface{}) {\n\tfmt.Fprintf(GinkgoWriter, \"INFO: \"+format+\"\\n\", a...)\n}"
] | [
"0.66758984",
"0.6622248",
"0.65872693",
"0.65407556",
"0.64986706",
"0.6486056",
"0.6422647",
"0.64099437",
"0.6400195",
"0.63558316",
"0.628476",
"0.62818784",
"0.626416",
"0.6256454",
"0.6244308",
"0.6233273",
"0.6202806",
"0.6198649",
"0.6172572",
"0.61362547",
"0.6107438",
"0.609975",
"0.6093363",
"0.60829914",
"0.6021515",
"0.59759605",
"0.5959793",
"0.59507936",
"0.5943085",
"0.5880919",
"0.5860901",
"0.5854084",
"0.58317703",
"0.58282727",
"0.58249116",
"0.5819428",
"0.58190215",
"0.58088386",
"0.57901996",
"0.5761894",
"0.5748754",
"0.57481927",
"0.5743752",
"0.57430536",
"0.5739525",
"0.57373106",
"0.5728736",
"0.5725681",
"0.57225597",
"0.5721312",
"0.5714161",
"0.57076126",
"0.5707356",
"0.5706552",
"0.569705",
"0.5696628",
"0.56868404",
"0.5686395",
"0.56860244",
"0.5683083",
"0.5678451",
"0.5674438",
"0.56651706",
"0.5633111",
"0.5624032",
"0.56210697",
"0.561868",
"0.56133115",
"0.5610938",
"0.56027997",
"0.56026477",
"0.559928",
"0.55876845",
"0.5587173",
"0.5567606",
"0.5561155",
"0.5552273",
"0.5550223",
"0.55491996",
"0.5543556",
"0.554306",
"0.55418646",
"0.5530762",
"0.55269796",
"0.55236274",
"0.5522653",
"0.55220526",
"0.55202734",
"0.55199474",
"0.5506972",
"0.5506481",
"0.5503677",
"0.55035335",
"0.5503334",
"0.5495245",
"0.54943776",
"0.5492899",
"0.5491558",
"0.5488858",
"0.5481326"
] | 0.5589328 | 72 |
Printj print json log | func (l *EchoLogrus) Printj(j log.JSON) {
l.Logger.WithFields(logrus.Fields(j)).Print()
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func PPrintJSON(xx interface{}) {\n\tyy, _ := json.MarshalIndent(xx, \"\", \" \")\n\tlog.Println(string(yy))\n}",
"func JsonPrint(data interface{}) {\n\tvar p []byte\n\tp, err := json.Marshal(data)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\tfmt.Printf(\"%s \\n\", p)\n}",
"func jsonPrint(posts Posts) {\n\tpostJSON, err := json.MarshalIndent(posts, \"\", \" \")\n\tif err != nil {\n\t\tlogrus.Error(err)\n\t\tpanic(err)\n\t}\n\tfmt.Printf(\"JSON data: \\n %s\\n\", string(postJSON))\n}",
"func printJSON(v interface{}) {\n\tw := json.NewEncoder(os.Stdout)\n\tw.SetIndent(\"\", \"\\t\")\n\terr := w.Encode(v)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}",
"func (l *jsonLogger) Print(v ...interface{}) {\n\tl.jsonLogParser.print(v...)\n}",
"func PrintJson(v interface{}) {\n\tjsonText, _ := json.MarshalIndent(v, \"\", \"\\t\")\n\tfmt.Println(string(jsonText))\n}",
"func jsonPrint(w http.ResponseWriter, out []string) {\n\tjsondat := &myJSON{Array: out}\n\tencjson, _ := json.Marshal(jsondat)\n\tfmt.Fprintf(w, \"%q\", string(encjson))\n}",
"func printJSON(v interface{}) {\n\tprintf(\"%v\\n\", util.FormatJSON(v))\n}",
"func PrintJSON(j interface{}) error {\n\tout, err := json.MarshalIndent(j, \"\", \" \")\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Println(string(out))\n\treturn nil\n}",
"func Print() {\n\tlog.Println(jsonFromItems(items))\n}",
"func (jp *JsonPrinter) printJson(line string, w io.Writer) {\n\tindentCnt := getIndent(line)\n\tindent := toSpaces(indentCnt)\n\tline = strings.TrimLeft(line, \" \")\n\n\t// { OR }(,) OR ](,)\n\tif strings.HasPrefix(line, \"{\") || strings.HasPrefix(line, \"}\") || strings.HasPrefix(line, \"]\") {\n\t\tfmt.Fprintf(w, \"%s%s\\n\", indent, line)\n\t\treturn\n\t}\n\n\t// \"key\": { OR \"key\": [ OR \"key\": value(,) OR value(,)\n\tsplitted := strings.SplitN(line, \": \", 2)\n\tkey := splitted[0]\n\n\t// value(,)\n\tif len(splitted) == 1 {\n\t\tfmt.Fprintf(w, \"%s%s\\n\", indent, jp.colorValue(key))\n\t\treturn\n\t}\n\n\tvalue := splitted[1]\n\tfmt.Fprintf(w, \"%s%s: %s\\n\", indent, jp.colorKey(key), jp.colorValue(value))\n}",
"func printJson(ag *alertGroup, m *sync.Mutex) {\n\tm.Lock()\n\tfor _, alert := range ag.Alerts {\n\t\tout := map[string]string{\"status\": alert.Status}\n\n\t\tfor k, v := range alert.Labels {\n\t\t\tout[k] = v\n\t\t}\n\t\tfor k, v := range alert.Annotations {\n\t\t\tout[k] = v\n\t\t}\n\t\tout[\"startsAt\"] = alert.StartsAt.Truncate(time.Millisecond).String()\n\t\tout[\"endsAt\"] = alert.EndsAt.Truncate(time.Millisecond).String()\n\n\t\tjout, err := json.Marshal(out)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\tfmt.Printf(\"%s\\n\", jout)\n\t}\n\tm.Unlock()\n}",
"func logJSON(v interface{}) {\n\tb, err := json.MarshalIndent(v, \"\", \" \")\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tlog.Println(string(b))\n}",
"func print_json(chunks []Chunk) {\n\tfmt.Println(\"{\")\n\tfor i := range chunks {\n\t\tpayload := chunks[i].payload\n\t\ttag := chunks[i].tag\n\t\tif i > 0 {\n\t\t\tfmt.Println(\",\")\n\t\t}\n\t\tfmt.Printf(\" \\\"%s\\\": \\\"%s\\\"\", tag, payload)\n\t}\n\tfmt.Printf(\"\\n}\\n\")\n}",
"func (out *JsonOutput) Print() {\n\tout.ResponseWriter.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tout.ResponseWriter.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\tjson.NewEncoder(out.ResponseWriter).Encode(out.APIOutput)\n}",
"func printJSON(v interface{}) {\n\txb, err := json.MarshalIndent(v, \"\", \" \")\n\tif err != nil {\n\t\tfmt.Println(\"outputJSON() could not marshal the value -\", err)\n\t\treturn\n\t}\n\tfmt.Println(string(xb))\n}",
"func jsonPrintUser(w http.ResponseWriter, out []userInfo) {\n\tjsondat := &myJSONUser{Array: out}\n\tencjson, _ := json.Marshal(jsondat)\n\tfmt.Fprintf(w, \"%q\", string(encjson))\n}",
"func (printer JSON) Print(message interface{}) {\n\ts, _ := prettyjson.Marshal(message)\n\t_, _ = fmt.Fprintf(printer.writer, \"%v\\n\", string(s))\n}",
"func (cli *CLI) PrintJSON(v interface{}) int {\n\tvar res slackapi.Response\n\n\tout, err := json.MarshalIndent(v, \"\", \"\\x20\\x20\")\n\n\tif err != nil {\n\t\tfmt.Printf(\"{\\\"ok\\\":false, \\\"error\\\":\\\"json.encode; %s\\\"}\\n\", err.Error())\n\t\treturn 1\n\t}\n\n\tif err := json.NewDecoder(bytes.NewReader(out)).Decode(&res); err != nil {\n\t\tfmt.Printf(\"{\\\"ok\\\":false, \\\"error\\\":\\\"json.decode; %s\\\"}\\n\", err.Error())\n\t\treturn 1\n\t}\n\n\tif !res.Ok && res.Error != \"\" {\n\t\tif err := json.NewEncoder(os.Stdout).Encode(res); err != nil {\n\t\t\tlog.Fatalln(\"json.encode;\", err)\n\t\t}\n\n\t\treturn 1\n\t}\n\n\tfmt.Printf(\"%s\\n\", out)\n\treturn 0\n}",
"func jsonPrintDetails() {\n\n\tb, err := json.Marshal(alertDetails[name])\n\n\tif err != nil {\n\t\tlog.Printf(\"Unable to convert Detailed JSON Data, error: %v\\n\", err)\n\t\treturn\n\t}\n\n\tfmt.Println(string(b))\n}",
"func jsonPrint() {\n\n\tvar filteredData []alertDataJSON\n\tvar temp alertDataJSON\n\n\tfor _, each := range allAlertData {\n\t\tif filteredAlerts[each.Name] == 1 {\n\t\t\ttemp.Name = each.Name\n\t\t\ttemp.Service = each.Service\n\t\t\ttemp.Severity = each.Service\n\t\t\ttemp.Tag = each.Tag\n\t\t\ttemp.Starts = timeDiff(time.Now(), each.StartsAt, 0)\n\t\t\tif each.EndsAt == time.Unix(maxtstmp, 0).UTC() {\n\t\t\t\ttemp.Ends = \"Undefined\"\n\t\t\t\ttemp.Duration = \"Undefined\"\n\t\t\t} else {\n\t\t\t\ttemp.Ends = timeDiff(time.Now(), each.EndsAt, 0)\n\t\t\t\ttemp.Duration = timeDiff(each.StartsAt, each.EndsAt, 1)\n\t\t\t}\n\n\t\t\tfilteredData = append(filteredData, temp)\n\t\t}\n\t}\n\n\tb, err := json.Marshal(filteredData)\n\n\tif err != nil {\n\t\tlog.Printf(\"Unable to convert Filtered JSON Data, error: %v\\n\", err)\n\t\treturn\n\t}\n\n\tfmt.Println(string(b))\n}",
"func PrintToJSON(val interface{}) {\n\tb, err := json.MarshalIndent(val, \"\", \" \")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tfmt.Println(string(b))\n\n}",
"func (n *AgentNotify) DumpJSON() {\n\tfmt.Println(n.getJSON())\n}",
"func (l *jsonLogger) Printf(format string, v ...interface{}) {\n\tl.jsonLogParser.printf(format, v...)\n}",
"func PrintJSON(w http.ResponseWriter, output interface{}, httpStatus int) []byte {\n\toutBuf, err := json.Marshal(output)\n\tif err != nil {\n\t\tHTTPOut(w, `{\"Errors\":\"JSON marshal failure on output: `+err.Error()+`\"}`, http.StatusInternalServerError)\n\t} else {\n\t\tHTTPOut(w, string(outBuf), httpStatus)\n\t}\n\treturn outBuf\n}",
"func (handler *ConsoleLogHandler) Format() LogFormat {\r\n return JSONFormat\r\n}",
"func (aptRestorer *APTRestorer) logJson(restoreState *models.RestoreState, jsonString string) {\n\ttimestamp := time.Now().UTC().Format(time.RFC3339)\n\tstartMessage := fmt.Sprintf(\"-------- BEGIN %s | WorkItem: %d | Time: %s --------\",\n\t\trestoreState.WorkItem.ObjectIdentifier, restoreState.WorkItem.Id, timestamp)\n\tendMessage := fmt.Sprintf(\"-------- END %s | WorkItem: %d | Time: %s --------\",\n\t\trestoreState.WorkItem.ObjectIdentifier, restoreState.WorkItem.Id, timestamp)\n\taptRestorer.Context.JsonLog.Println(startMessage, \"\\n\",\n\t\tjsonString, \"\\n\",\n\t\tendMessage)\n}",
"func JSONLogger(r *http.Request, status int, len int64, d time.Duration) {\n\tos.Stderr.WriteString(JSONLogMessage(time.Now, r.Method, r.URL, status, len, d, nil))\n}",
"func printJSONResult(v interface{}) {\n\tdata, err := json.Marshal(v)\n\tif err != nil {\n\t\tfmt.Fprintf(os.Stderr, \"cannot marshal the result into a JSON: %v\\n\", err)\n\t\treturn\n\t}\n\tfmt.Printf(\"%s\\n\", data)\n}",
"func PrintAsJSON(v interface{}) {\n\tjson.NewEncoder(os.Stdout).Encode(v)\n}",
"func (l *jsonLogger) Println(v ...interface{}) {\n\tl.jsonLogParser.print(v...)\n}",
"func (ps pumaStatusFinalOutput) printAndBuildJSON() error {\n\tb, err := json.Marshal(ps)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Println(string(b))\n\n\treturn nil\n}",
"func JSONLogMessage(now func() time.Time, method string, u *url.URL, status int, length int64, d time.Duration, fields map[string]string) string {\n\tc := \"http_\" + strconv.Itoa(status/100) + \"xx\"\n\ts := `{` +\n\t\t`\"time\":\"` + now().UTC().Format(time.RFC3339) + `\",` +\n\t\t`\"src\":\"rl\",` +\n\t\t`\"status\":` + strconv.Itoa(status) + `,` +\n\t\t`\"` + c + `\":1,` +\n\t\t`\"len\":` + strconv.FormatInt(length, 10) + `,` +\n\t\t`\"ms\":` + strconv.FormatInt(d.Nanoseconds()/1000000, 10) + `,` +\n\t\t`\"method\":\"` + jsonEscape(method) + `\",` +\n\t\t`\"path\":\"` + jsonEscape(u.Path) + `\"`\n\tfor k, v := range fields {\n\t\ts += `,\"` + k + `\":\"` + v + `\"`\n\t}\n\treturn s + \"}\\n\"\n}",
"func (r *RepoContent) Print(o io.Writer) error {\n\tb, err := json.MarshalIndent(r, \"\", \" \")\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to marshal report: %v\", err)\n\t}\n\t_, err = o.Write(b)\n\treturn err\n}",
"func dumpJSON(o interface{}) error {\n\tjs, err := json.MarshalIndent(o, \"\", \" \")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Printf(\"```\\n%s\\n```\\n\\n\", js)\n\n\treturn nil\n}",
"func LogJSON(data interface{}) string {\n\tjsonData, err := json.Marshal(data)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\tvar prettyJSON bytes.Buffer\n\terr = json.Indent(&prettyJSON, jsonData, \"\", \" \")\n\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\treturn prettyJSON.String()\n}",
"func PrintJSON(anyJson interface{}) string {\n\tif anyJson == nil {\n\t\treturn \"null\"\n\t}\n\n\tbt, err := json.Marshal(anyJson)\n\tif err != nil {\n\t\treturn \"{ <invalid json> }\"\n\t}\n\n\t// Pretty-print\n\tvar out bytes.Buffer\n\terr = json.Indent(&out, bt, \"\", \" \")\n\tif err != nil {\n\t\treturn \"{ <indent error> }\"\n\t}\n\treturn out.String()\n}",
"func printResponse(resp interface{}, err error) {\n\tif err == nil {\n\t\tjtext, err := json.MarshalIndent(resp, \"\", \" \")\n\t\tif err == nil {\n\t\t\tfmt.Println(string(jtext))\n\t\t}\n\t}\n\tif err != nil {\n\t\tfmt.Printf(\"err: %s\\n\", err)\n\t}\n}",
"func (n *NetOp) LogJSON(b []byte) {\n\tif n.Logger != nil {\n\t\tn.Logger.LogJSON(b)\n\t}\n}",
"func ProjectJSONPrint(t dto.Project, w io.Writer) error {\n\treturn json.NewEncoder(w).Encode(t)\n}",
"func (context *Context) Print() error {\n\t// fmt.Println(\"==> context: \", context)\n\tb, err := json.Marshal(context)\n\tif err != nil {\n\t\treturn err\n\t}\n\tos.Stdout.Write(b)\n\treturn nil\n}",
"func (device *Device) Print() {\n\tjsonOutput, err := json.MarshalIndent(device, \"\", \" \")\n\tif err == nil {\n\t\tfmt.Println(string(jsonOutput))\n\t} else {\n\t\tfmt.Println(\"ERROR Marshaling to Json:\", err)\n\t}\n}",
"func JSONPrinter(writer io.Writer) Printer {\n\treturn &JSON{writer}\n}",
"func TestJsonFormat(t *testing.T) {\n\tout := strings.Builder{}\n\tSetOutput(&out)\n\tSetLevelStr(\"DEBUG\")\n\tSetLogFormat(JSON)\n\tt.Run(\"test setting of JSON log format\", func(t *testing.T) {\n\t\tprintAllLevels(\"test json format\")\n\t\tif !(strings.Count(out.String(), \"{\\\"level\\\":\\\"DEBUG\\\",\\\"time\\\":\\\"\") == 2 &&\n\t\t\tstrings.Count(out.String(), \"\\\",\\\"location\\\":\\\"logger_test.go:\") == 10 &&\n\t\t\tstrings.Count(out.String(), \"\\\"goroutine\\\":\") == 10 &&\n\t\t\tstrings.Count(out.String(), \",\\\"message\\\":\\\"--> test json format\\\"}\") == 10) {\n\t\t\tt.Errorf(\"Log should be in JSON format:\\n%v\", out.String())\n\t\t}\n\t})\n}",
"func JSONLog(w io.Writer) LogFunc {\n\treturn func(v interface{}) {\n\t\tdata, err := json.Marshal(v)\n\t\tif err != nil {\n\t\t\tdata, err = json.Marshal(struct {\n\t\t\t\tContext string `json:\"context\"`\n\t\t\t\tDebugData string `json:\"debugData\"`\n\t\t\t\tError string `json:\"error\"`\n\t\t\t}{\n\t\t\t\tContext: \"Error marshaling 'debugData' into JSON\",\n\t\t\t\tDebugData: spew.Sdump(v),\n\t\t\t\tError: err.Error(),\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\t// We really REALLY should never get here\n\t\t\t\tlog.Println(\"ERROR MARSHALLING THE MARSHALLING ERROR!:\", err)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif _, err := fmt.Fprintf(w, \"%s\\n\", data); err != nil {\n\t\t\tlog.Println(\"ERROR WRITING TO LOGGER:\", err)\n\t\t}\n\t}\n}",
"func LogJSON(level Level, module string, data interface{}) {\n\tb, err := json.MarshalIndent(data, \"\", \" \")\n\tif err == nil {\n\t\tlconf.Logger.Log(level, module, \"%s\", string(b))\n\t}\n}",
"func PrettyPrint(val interface{}) {\n\to, e := json.MarshalIndent(val, \"\", \" \")\n\tif e != nil {\n\t\tlog.Panic(e.Error())\n\t}\n\tfmt.Printf(string(o))\n\tfmt.Println()\n}",
"func Show(v interface{}) {\n\tdata, _ := json.MarshalIndent(v, \"\", \" \")\n\tfmt.Println(string(data))\n}",
"func prettyPrint(v interface{}) {\n\tencoder := json.NewEncoder(os.Stdout)\n\tencoder.SetIndent(\"\", \" \")\n\n\tif err := encoder.Encode(v); err != nil {\n\t\tlog.Warning(\"Unable to pretty-print tunnel information, will dump raw data instead...\")\n\t\tfmt.Printf(\"%+v\\n\", v)\n\t}\n}",
"func (c *Config) Print() error {\n\tenc := json.NewEncoder(os.Stdout)\n\tenc.SetIndent(\"\", \" \")\n\tif err := enc.Encode(c); err != nil {\n\t\treturn errors.New(fmt.Sprint(\"error reporting configuration:\", err))\n\t}\n\treturn nil\n}",
"func DumpJSON(v interface{}) (n int, err error) {\n\tdata, err := json.MarshalIndent(v, \"\", \" \")\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn fmt.Printf(\"%s\\n\\n\", data)\n}",
"func prettyPrint(data interface{}) {\n\tvar p []byte\n\t// var err := error\n\tp, err := json.MarshalIndent(data, \"\", \"\\t\")\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\tfmt.Printf(\"%s \\n\", p)\n}",
"func ClientJSONPrint(t dto.Client, w io.Writer) error {\n\treturn json.NewEncoder(w).Encode(t)\n}",
"func JSONPrintTblProduct(c *gin.Context) {\n\tDb, err := config.DbConnect()\n\tif err != nil {\n\t\tpanic(\"Not Connect database\")\n\t}\n\ttabelproduct := []entities.TabelProduct{}\n\tsqlProduct := `SELECT * FROM tabelproduct;`\n\tdataList := models.ListTblProduct(Db, sqlProduct)\n\ttabelproduct = dataList\n\tDb.Close()\n\tc.JSON(http.StatusOK, tabelproduct)\n}",
"func (t *Tree) PrintTree() {\n\tb, err := json.MarshalIndent(t, \"\", \" \")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tfmt.Println(string(b))\n}",
"func Dump(vals ...interface{}) {\n\tfor _, v := range vals {\n\t\tfmt.Println(JsonDump(v))\n\t}\n}",
"func PrintOutputJSON(output interface{}) error {\n\tb, err := json.Marshal(output)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Println(string(b))\n\treturn nil\n}",
"func PrintJSONReport(path, filename string, body interface{}) error {\n\tif body != \"\" {\n\t\tsummary, err := getSummary(body)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfor idx := range summary.Queries {\n\t\t\tsummary.Queries[idx].CISBenchmarkName = \"\"\n\t\t\tsummary.Queries[idx].CISBenchmarkVersion = \"\"\n\t\t\tsummary.Queries[idx].CISDescriptionID = \"\"\n\t\t\tsummary.Queries[idx].CISDescriptionText = \"\"\n\t\t\tsummary.Queries[idx].CISRationaleText = \"\"\n\t\t}\n\t\tsummary.Version = constants.Version\n\t\tbody = summary\n\t}\n\n\treturn ExportJSONReport(path, filename, body)\n}",
"func (l *EchoLogrus) Debugj(j log.JSON) {\n\tl.Logger.WithFields(logrus.Fields(j)).Debug()\n}",
"func PrintMessage(w http.ResponseWriter, ResponseArray interface{}, status int) {\n\tw.WriteHeader(status)\n\tjson.NewEncoder(w).Encode(ResponseArray)\n\treturn\n}",
"func JSON(data interface{}, args ...interface{}) string {\n\tw := Writer{\n\t\tOptions: ojg.DefaultOptions,\n\t\tWidth: 80,\n\t\tMaxDepth: 3,\n\t\tSEN: false,\n\t}\n\tw.config(args)\n\tb, _ := w.encode(data)\n\n\treturn string(b)\n}",
"func (p *Parser) Dump() error {\n return p.json.Dump()\n}",
"func (lrt *LogRoundTripper) formatJSON(raw []byte) string {\n\tvar data map[string]interface{}\n\n\terr := json.Unmarshal(raw, &data)\n\tif err != nil {\n\t\tklog.V(6).Infof(\"Unable to parse JSON: %s, data: %s\", err, string(raw))\n\t\treturn string(raw)\n\t}\n\n\t// Mask known password fields\n\tif v, ok := data[\"auth\"].(map[string]interface{}); ok {\n\t\tif v, ok := v[\"identity\"].(map[string]interface{}); ok {\n\t\t\tif v, ok := v[\"password\"].(map[string]interface{}); ok {\n\t\t\t\tif v, ok := v[\"user\"].(map[string]interface{}); ok {\n\t\t\t\t\tv[\"password\"] = \"***\"\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Ignore the catalog\n\tif v, ok := data[\"token\"].(map[string]interface{}); ok {\n\t\tif _, ok := v[\"catalog\"]; ok {\n\t\t\treturn \"\"\n\t\t}\n\t}\n\n\tpretty, err := json.MarshalIndent(data, \"\", \" \")\n\tif err != nil {\n\t\tklog.V(6).Infof(\"Unable to re-marshal JSON: %s\", err)\n\t\treturn string(raw)\n\t}\n\n\treturn string(pretty)\n}",
"func loggerJSON(l jsonLog) {\n\tl.Date = time.Now()\n\tif l.Level == 0 {\n\t\tl.Level = 6\n\t}\n\tif Config.MinLogLevel >= l.Level {\n\t\tif l.Version == \"\" {\n\t\t\tl.Version = \"1.1\"\n\t\t}\n\t\tif l.Host == \"\" {\n\t\t\tl.Host = \"Quotes\"\n\t\t}\n\t\tif l.ResponseCode == 0 {\n\t\t\tl.ResponseCode = 200\n\t\t}\n\t\t_ = os.MkdirAll(\"./logs/\", os.ModePerm)\n\t\tf, err := os.OpenFile(\"./logs/logs.json\", os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Error opening logs.json file: %v\", err)\n\t\t}\n\t\tdata, _ := json.Marshal(l)\n\t\tf.WriteString(string(data) + \"\\n\")\n\t\tf.Close()\n\t}\n}",
"func JsonFileLogger(out SuperMarketLog) error {\n\toutput, _ := json.Marshal(out) // Create he output to log\n\tstringOutput := string(output) + \"\\n\" // Append a newline to the output\n\t//If the file doesn't exist, create it or append to the file\n\tf, err := os.OpenFile(\"rest.log\", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tif _, err := f.Write([]byte(stringOutput)); err != nil { //Write out to the log\n\t\tlog.Fatal(err)\n\t}\n\tif err := f.Close(); err != nil { //Close the writer\n\t\tlog.Fatal(err)\n\t}\n\treturn err\n}",
"func JSON(response []byte) {\n\t// pretty-print the json\n\tutils.PrintJSON(response)\n}",
"func PrettyPrintJSON(jsonStr string) {\n\n\tfmt.Println(PrettyJSON(jsonStr))\n\n}",
"func printReqLog(ctx context.Context, req interface{}) {\n\tjsoon, _ := json.Marshal(ctx)\n\tlog.Println(string(jsoon))\n\n\tjsoon, _ = json.Marshal(req)\n\tlog.Println(string(jsoon))\n}",
"func ClientsJSONPrint(t []dto.Client, w io.Writer) error {\n\treturn json.NewEncoder(w).Encode(t)\n}",
"func PrintAndJsonOutput(result []*kubestr.TestOutput, output string, outfile string) bool {\n\tif output == \"json\" {\n\t\tjsonRes, _ := json.MarshalIndent(result, \"\", \" \")\n\t\tif len(outfile) > 0 {\n\t\t\terr := os.WriteFile(outfile, jsonRes, 0666)\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Error writing output:\", err.Error())\n\t\t\t\tos.Exit(2)\n\t\t\t}\n\t\t} else {\n\t\t\tfmt.Println(string(jsonRes))\n\t\t}\n\t\treturn true\n\t}\n\treturn false\n}",
"func (j *JSON) Format(rec *logr.LogRec, stacktrace bool, buf *bytes.Buffer) (*bytes.Buffer, error) {\n\tj.once.Do(j.applyDefaultKeyNames)\n\n\tif buf == nil {\n\t\tbuf = &bytes.Buffer{}\n\t}\n\tenc := gojay.BorrowEncoder(buf)\n\tdefer func() {\n\t\tenc.Release()\n\t}()\n\n\tsorter := j.ContextSorter\n\tif sorter == nil {\n\t\tsorter = j.defaultContextSorter\n\t}\n\n\tjlr := JSONLogRec{\n\t\tLogRec: rec,\n\t\tJSON: j,\n\t\tstacktrace: stacktrace,\n\t\tsorter: sorter,\n\t}\n\n\terr := enc.EncodeObject(jlr)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tbuf.WriteByte('\\n')\n\treturn buf, nil\n}",
"func PrettyPrint(v interface{}) {\n\tb, _ := json.MarshalIndent(v, \"\", \" \")\n\tprintln(string(b))\n}",
"func (l *FormattedJSONLogger) Log(keyvals ...interface{}) error {\n\tm := make(map[string]interface{}, (len(keyvals)+1)/2)\n\n\tfor i := 0; i < len(keyvals); i += 2 {\n\t\tvar v interface{} = \"(!MISSING)\"\n\t\tif i+1 < len(keyvals) {\n\t\t\tv = keyvals[i+1]\n\t\t}\n\t\tm[fmt.Sprintf(\"%s\", keyvals[i])] = v\n\t}\n\n\tb, err := json.Marshal(m)\n\tif err != nil {\n\t\t// TODO: Write this error to logs.\n\t\treturn err\n\t}\n\t_, err = fmt.Fprintln(l.w, string(b))\n\treturn err\n}",
"func (l *JSONLogger) Log(level string, format string, args ...interface{}) {\n\tpayload := payload{\n\t\ttime.Now(),\n\t\tlevel,\n\t\tl.service,\n\t\tl.name,\n\t\tfmt.Sprintf(format, args...),\n\t}\n\tenc, _ := json.Marshal(payload)\n\tfmt.Fprintf(l.writer, \"%s\\n\", enc)\n}",
"func (r Result) PrettyPrintJSON() string {\n\tpretty, err := json.MarshalIndent(r.Body, \"\", \" \")\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\treturn string(pretty)\n}",
"func (l logrec) String() string {\n\tout, _ := json.Marshal(&l)\n\treturn string(out)\n}",
"func PrintPerson(name, address string) error {\n\tp := map[string]string{\n\t\t\"name\": name,\n\t\t\"address\": address,\n\t}\n\tbarr, err := json.Marshal(p)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Println(string(barr))\n\treturn nil\n}",
"func ExamplePrint() {\n\tsetup()\n\n\tlog.Print(\"hello world\")\n\t// Output: {\"level\":\"debug\",\"time\":1199811905,\"message\":\"hello world\"}\n}",
"func (p *Pool) Print() []byte {\n\ttype ClientShow struct {\n\t\tInuse int `json:\"inuse\"`\n\t\tTimeUsed time.Time `json:\"time_used\"`\n\t\tTimeInit time.Time `json:\"time_init\"`\n\t\tActive bool `json:\"active\"`\n\t}\n\ttype PoolShow struct {\n\t\tPoolSize int `json:\"pool_size\"`\n\t\tIdleConnLen int `json:\"idleConn_len\"`\n\t\tClients []ClientShow `json:\"clients\"`\n\t\tQueueSize int `json:\"queue_size\"`\n\t}\n\n\trep := new(PoolShow)\n\trep.PoolSize = p.poolSize\n\trep.IdleConnLen = p.idleConnQueue.size()\n\trep.QueueSize = p.requestQueue.size()\n\tfor _, conn := range p.clients {\n\t\tnewClient := ClientShow{\n\t\t\tInuse: conn.inUse,\n\t\t\tTimeInit: conn.timeInitiated,\n\t\t\tTimeUsed: conn.timeUsed,\n\t\t\tActive: conn.active(),\n\t\t}\n\t\trep.Clients = append(rep.Clients, newClient)\n\t}\n\tresult, err := json.Marshal(rep)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\treturn result\n}",
"func Debug(config *CHYLE, logger *log.Logger) {\n\tif d, err := json.MarshalIndent(config, \"\", \" \"); err == nil {\n\t\tlogger.Println(string(d))\n\t}\n}",
"func EnableJSON() {\n\tlogrus.SetFormatter(&logrus.JSONFormatter{})\n}",
"func JsonOutput(data interface{}) {\n\tjson.NewEncoder(os.Stdout).Encode(data)\n}",
"func PrintClass(a interface{}) {\n\tj, _ := json.Marshal(a)\n\tfmt.Println(string(j))\n}",
"func ProjectsJSONPrint(t []dto.Project, w io.Writer) error {\n\treturn json.NewEncoder(w).Encode(t)\n}",
"func resultHandler(res []byte) {\n\tresult := prettyJSON(string(res))\n\tfmt.Printf(result)\n}",
"func (r *Response) pretty() string {\n\n\tvar body api.Response\n\t_ = json.Unmarshal(r.recorder.Body.Bytes(), &body)\n\n\tlr := LogResponse{Res{\n\t\tCode: r.recorder.Code,\n\t\tHeaders: r.recorder.Header(),\n\t\tBody: body,\n\t}}\n\tres, err := json.MarshalIndent(lr, \"\", \" \")\n\tgomega.Expect(err).To(gomega.BeNil())\n\treturn string(res)\n}",
"func JSON(w http.ResponseWriter, data interface{}) {\n\tresponse, err := json.Marshal(data)\n\tif err != nil {\n\t\tlog.Println(\"Failed to generate json \")\n\t}\n\tfmt.Fprint(w, string(response))\n}",
"func (ed EtcdDump) PrintDataAsJSON(filter []string) (*bytes.Buffer, error) {\n\tbuffer := new(bytes.Buffer)\n\tkeys := ed.getSortedKeys()\n\tvar wasError error\n\n\tvpps, isData := processFilter(keys, filter)\n\tif !isData {\n\t\tfmt.Fprintf(buffer, \"No data to display for VPPS: %s\\n\", vpps)\n\t\treturn buffer, wasError\n\t}\n\n\tfor _, key := range keys {\n\t\tif isNotInFilter(key, vpps) {\n\t\t\tcontinue\n\t\t}\n\n\t\tvd, _ := ed[key]\n\t\t// Obtain raw data\n\t\tifaceConfDataRoot, ifaceConfKeys := getInterfaceConfigData(vd.Interfaces)\n\t\tifaceStateDataRoot, ifaceStateKeys := getInterfaceStateData(vd.Interfaces)\n\t\tl2ConfigDataRoot, l2Keys := getL2ConfigData(vd.BridgeDomains)\n\t\tl2StateDataRoot, l2Keys := getL2StateData(vd.BridgeDomains)\n\t\tl2FibDataRoot, l2FibKeys := getL2FIBData(vd.FibTableEntries)\n\t\tl3FibDataRoot, l3FibKeys := getL3FIBData(vd.StaticRoutes)\n\n\t\t// Interface config data\n\t\tjsConfData, err := json.MarshalIndent(ifaceConfDataRoot, \"\", indent)\n\t\tif err != nil {\n\t\t\twasError = err\n\t\t}\n\t\t// Interface state data\n\t\tjsStateData, err := json.MarshalIndent(ifaceStateDataRoot, \"\", indent)\n\t\tif err != nil {\n\t\t\twasError = err\n\t\t}\n\t\t// L2 config data\n\t\tjsL2ConfigData, err := json.MarshalIndent(l2ConfigDataRoot, \"\", indent)\n\t\tif err != nil {\n\t\t\twasError = err\n\t\t}\n\t\t// L2 state data\n\t\tjsL2StateData, err := json.MarshalIndent(l2StateDataRoot, \"\", indent)\n\t\tif err != nil {\n\t\t\twasError = err\n\t\t}\n\t\t// L2 FIB data\n\t\tjsL2FIBData, err := json.MarshalIndent(l2FibDataRoot, \"\", indent)\n\t\tif err != nil {\n\t\t\twasError = err\n\t\t}\n\t\t// L3 FIB data\n\t\tjsL3FIBData, err := json.MarshalIndent(l3FibDataRoot, \"\", indent)\n\t\tif err != nil {\n\t\t\twasError = err\n\t\t}\n\n\t\t// Add data to buffer\n\t\tif string(jsConfData) != emptyJSON {\n\t\t\tprintLabel(buffer, key+\": - \"+IfConfig+\"\\n\", indent, ifaceConfKeys)\n\t\t\tfmt.Fprintf(buffer, \"%s\\n\", jsConfData)\n\t\t}\n\t\tif string(jsStateData) != emptyJSON {\n\t\t\tprintLabel(buffer, key+\": - \"+IfState+\"\\n\", indent, ifaceStateKeys)\n\t\t\tfmt.Fprintf(buffer, \"%s\\n\", jsStateData)\n\t\t}\n\t\tif string(jsL2ConfigData) != emptyJSON {\n\t\t\tprintLabel(buffer, key+\": - \"+BdConfig+\"\\n\", indent, l2Keys)\n\t\t\tfmt.Fprintf(buffer, \"%s\\n\", jsL2ConfigData)\n\t\t}\n\t\tif string(jsL2ConfigData) != emptyJSON {\n\t\t\tprintLabel(buffer, key+\": - \"+BdState+\"\\n\", indent, l2Keys)\n\t\t\tfmt.Fprintf(buffer, \"%s\\n\", jsL2StateData)\n\t\t}\n\t\tif string(jsL2FIBData) != emptyJSON {\n\t\t\tprintLabel(buffer, key+\": -\"+L2FibConfig+\"\\n\", indent, l2FibKeys)\n\t\t\tfmt.Fprintf(buffer, \"%s\\n\", jsL2FIBData)\n\t\t}\n\t\tif string(jsL3FIBData) != emptyJSON {\n\t\t\tprintLabel(buffer, key+\": - \"+L3FibConfig+\"\\n\", indent, l3FibKeys)\n\t\t\tfmt.Fprintf(buffer, \"%s\\n\", jsL3FIBData)\n\t\t}\n\n\t}\n\n\treturn buffer, wasError\n}",
"func (l *EchoLogrus) Infoj(j log.JSON) {\n\tl.Logger.WithFields(logrus.Fields(j)).Info()\n}",
"func init() {\n\tlog.SetFormatter(&log.JSONFormatter{})\n\tlog.SetOutput(os.Stdout)\n\tlog.SetLevel(log.InfoLevel)\n}",
"func (writer *Writer) String() string {\n\tr, e := json.MarshalIndent (writer, \"\", \"\\t\")\n\n\tif nil != e {\n\t\tlog.Print (e)\n\t}\n\n\treturn string (r)\n}",
"func (c Config) Print() {\n\tif c.JsonLogging {\n\t\tc.PrintJsonConfigArgs()\n\t} else {\n\t\tc.PrintHumanConfigArgs()\n\t}\n}",
"func printSecretJSON(secret *v1.Secret, targetData string, filterF filterFunc, flagMetadata bool) {\n\ttype metadata struct {\n\t\tName string\n\t\tType string\n\t\tLength int\n\t\tSize int\n\t}\n\ttype outjson struct {\n\t\tMetadata metadata `json:\"metadata\",omitempty`\n\t\tValues map[string]string `json:\"values\"`\n\t}\n\n\tsecs := outjson{Values: make(map[string]string)}\n\tfor _, sd := range filterF(secret.Data, targetData) {\n\t\tsecs.Values[sd.Key] = sd.Value\n\t}\n\n\tif flagMetadata {\n\t\tsecs.Metadata = metadata{Name: secret.Name, Type: string(secret.Type), Length: len(secret.Data), Size: secret.Size()}\n\t}\n\n\tsecretJSON, err := json.MarshalIndent(secs, \"\", \" \")\n\tif err != nil {\n\t\tprExit(err)\n\t}\n\tfmt.Fprintf(os.Stdout, \"%s\", secretJSON)\n}",
"func (l *NilLogger) Print(v ...interface{}) {\n\n}",
"func PrettyPrintJSON(metrics interface{}) string {\n\toutput := &bytes.Buffer{}\n\tif err := json.NewEncoder(output).Encode(metrics); err != nil {\n\t\tLogf(\"Error building encoder: %v\", err)\n\t\treturn \"\"\n\t}\n\tformatted := &bytes.Buffer{}\n\tif err := json.Indent(formatted, output.Bytes(), \"\", \" \"); err != nil {\n\t\tLogf(\"Error indenting: %v\", err)\n\t\treturn \"\"\n\t}\n\treturn formatted.String()\n}",
"func Flush() {\n\tif printer.Format == FormatJSON {\n\t\tvar b []byte\n\t\tif printer.Single && len(printer.Lines) == 1 {\n\t\t\tb, _ = json.MarshalIndent(printer.Lines[0], \"\", \" \")\n\t\t} else {\n\t\t\tb, _ = json.MarshalIndent(printer.Lines, \"\", \" \")\n\t\t}\n\n\t\tfmt.Fprintln(printer.writer, string(b))\n\t\tprinter.Lines = []interface{}{}\n\t}\n}",
"func (l *Logger) LogJSON(value interface{}) (err error) {\n\tvar msg []byte\n\tif msg, err = json.Marshal(value); err != nil {\n\t\treturn\n\t}\n\n\t// Convert message to bytes and pass to l.Log\n\treturn l.Log(msg)\n}",
"func PrettyPrint(x interface{}) {\n\tb, err := json.MarshalIndent(x, \"\", \" \")\n\tif err != nil {\n\t\tfmt.Println(\"error:\", err)\n\t}\n\tfmt.Println(string(b))\n}",
"func (ptr *KeyholeInfo) Print() string {\n\tif ptr == nil {\n\t\treturn \"\"\n\t}\n\tstrs := []string{fmt.Sprintf(`{ keyhole: { version: \"%v\", args: \"%v\" } }`, ptr.Version, ptr.Params)}\n\tstrs = append(strs, ptr.Logs...)\n\treturn strings.Join(strs, \"\\n\")\n}",
"func write(resp *Response, w http.ResponseWriter) {\n\tjs, _ := json.Marshal(resp)\n\tfmt.Fprint(w, string(js))\n}"
] | [
"0.7584081",
"0.7325744",
"0.72773486",
"0.72528744",
"0.7165884",
"0.7115936",
"0.70951813",
"0.7084531",
"0.70682424",
"0.7060325",
"0.70069987",
"0.6986642",
"0.69384706",
"0.69226503",
"0.6861717",
"0.6788508",
"0.67024064",
"0.6682685",
"0.6663007",
"0.6567341",
"0.6549577",
"0.6495432",
"0.6483976",
"0.6462898",
"0.6447256",
"0.643791",
"0.6392112",
"0.6382437",
"0.6365504",
"0.635616",
"0.63222283",
"0.6307835",
"0.62692016",
"0.6268653",
"0.6258925",
"0.6247758",
"0.61851716",
"0.6169646",
"0.6139288",
"0.6125422",
"0.6118951",
"0.6105973",
"0.61022866",
"0.6036686",
"0.60360354",
"0.60105777",
"0.5984554",
"0.59528226",
"0.59253585",
"0.5883983",
"0.581859",
"0.58154774",
"0.58122426",
"0.5798262",
"0.5758175",
"0.57558477",
"0.5747577",
"0.5741176",
"0.57358336",
"0.5729623",
"0.57167566",
"0.57151777",
"0.5690855",
"0.5674175",
"0.5668557",
"0.5663014",
"0.5657246",
"0.5654774",
"0.56511915",
"0.56339675",
"0.56291956",
"0.56144935",
"0.56139255",
"0.5613426",
"0.56129974",
"0.56126225",
"0.5611033",
"0.55995035",
"0.5592707",
"0.5571241",
"0.5566378",
"0.5560886",
"0.55534893",
"0.55533737",
"0.55492467",
"0.5536612",
"0.5529081",
"0.5525304",
"0.55251",
"0.55246055",
"0.5519069",
"0.55131423",
"0.54907465",
"0.5481007",
"0.5466373",
"0.5458346",
"0.5458335",
"0.54458946",
"0.5439614",
"0.5430058"
] | 0.6997781 | 11 |
Debugj debug json log | func (l *EchoLogrus) Debugj(j log.JSON) {
l.Logger.WithFields(logrus.Fields(j)).Debug()
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (l *JSONLogger) Debug(format string, args ...interface{}) {\n\tl.Log(\"debug\", format, args...)\n}",
"func (l *jsonLogger) Debug(message interface{}, params ...interface{}) {\n\tl.jsonLogParser.parse(context.Background(), l.jsonLogParser.log.Debug(), \"\", params...).Msgf(\"%s\", message)\n}",
"func Debug(config *CHYLE, logger *log.Logger) {\n\tif d, err := json.MarshalIndent(config, \"\", \" \"); err == nil {\n\t\tlogger.Println(string(d))\n\t}\n}",
"func JSONLogger(r *http.Request, status int, len int64, d time.Duration) {\n\tos.Stderr.WriteString(JSONLogMessage(time.Now, r.Method, r.URL, status, len, d, nil))\n}",
"func Debug(v ...interface{}) {\n\tjasonLog.output(2, levelDebug, \"\", v...)\n}",
"func logJSON(v interface{}) {\n\tb, err := json.MarshalIndent(v, \"\", \" \")\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tlog.Println(string(b))\n}",
"func JSONLog(w io.Writer) LogFunc {\n\treturn func(v interface{}) {\n\t\tdata, err := json.Marshal(v)\n\t\tif err != nil {\n\t\t\tdata, err = json.Marshal(struct {\n\t\t\t\tContext string `json:\"context\"`\n\t\t\t\tDebugData string `json:\"debugData\"`\n\t\t\t\tError string `json:\"error\"`\n\t\t\t}{\n\t\t\t\tContext: \"Error marshaling 'debugData' into JSON\",\n\t\t\t\tDebugData: spew.Sdump(v),\n\t\t\t\tError: err.Error(),\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\t// We really REALLY should never get here\n\t\t\t\tlog.Println(\"ERROR MARSHALLING THE MARSHALLING ERROR!:\", err)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif _, err := fmt.Fprintf(w, \"%s\\n\", data); err != nil {\n\t\t\tlog.Println(\"ERROR WRITING TO LOGGER:\", err)\n\t\t}\n\t}\n}",
"func (data *Invasion) debug(obj ...interface{}) {\n data.DebugLog = append(data.DebugLog, spew.Sdump(obj))\n}",
"func (aptRestorer *APTRestorer) logJson(restoreState *models.RestoreState, jsonString string) {\n\ttimestamp := time.Now().UTC().Format(time.RFC3339)\n\tstartMessage := fmt.Sprintf(\"-------- BEGIN %s | WorkItem: %d | Time: %s --------\",\n\t\trestoreState.WorkItem.ObjectIdentifier, restoreState.WorkItem.Id, timestamp)\n\tendMessage := fmt.Sprintf(\"-------- END %s | WorkItem: %d | Time: %s --------\",\n\t\trestoreState.WorkItem.ObjectIdentifier, restoreState.WorkItem.Id, timestamp)\n\taptRestorer.Context.JsonLog.Println(startMessage, \"\\n\",\n\t\tjsonString, \"\\n\",\n\t\tendMessage)\n}",
"func Debug(data []byte) {\n\tlog.Print(\"DEBUG: \", string(data))\n}",
"func (msg *Message) Debug() string {\n\tbytes, _ := json.Marshal(msg) // Ignoring the error because it literally can't happen.\n\treturn string(bytes)\n}",
"func (d *DummyLogger) Debug(format string) {}",
"func (w *Writer) Debug(m string) error {}",
"func (c *APIClient) Debug() {\n\tc.debug = true\n}",
"func Debug(content interface{}) {\n\tLog(\"DEBUG\", content)\n}",
"func Debugf(format string, v ...interface{}) {\n\tjasonLog.output(2, levelDebug, format, v...)\n}",
"func ExampleDebug() {\n\tsetup()\n\tlog.Debug().Msg(\"hello world\")\n\n\t// Output: {\"level\":\"debug\",\"time\":1199811905,\"message\":\"hello world\"}\n}",
"func (handler *ConsoleLogHandler) Format() LogFormat {\r\n return JSONFormat\r\n}",
"func (l *AppLogger) Debug(tag string, message ...interface{}) {\n\tl.logging.SetFormatter(&logrus.JSONFormatter{})\n\tk := getAppFields(l.reqId, tag, l.userId)\n\tl.logging.WithFields(k).Debug(message...)\n}",
"func JSONLogMessage(now func() time.Time, method string, u *url.URL, status int, length int64, d time.Duration, fields map[string]string) string {\n\tc := \"http_\" + strconv.Itoa(status/100) + \"xx\"\n\ts := `{` +\n\t\t`\"time\":\"` + now().UTC().Format(time.RFC3339) + `\",` +\n\t\t`\"src\":\"rl\",` +\n\t\t`\"status\":` + strconv.Itoa(status) + `,` +\n\t\t`\"` + c + `\":1,` +\n\t\t`\"len\":` + strconv.FormatInt(length, 10) + `,` +\n\t\t`\"ms\":` + strconv.FormatInt(d.Nanoseconds()/1000000, 10) + `,` +\n\t\t`\"method\":\"` + jsonEscape(method) + `\",` +\n\t\t`\"path\":\"` + jsonEscape(u.Path) + `\"`\n\tfor k, v := range fields {\n\t\ts += `,\"` + k + `\":\"` + v + `\"`\n\t}\n\treturn s + \"}\\n\"\n}",
"func JSONDebug(conn redis.Conn, subcommand, key, path string) (res interface{}, err error) {\n\tif subcommand != DebugMemorySubcommand && subcommand != DebugHelpSubcommand {\n\t\terr = fmt.Errorf(\"unknown subcommand - try `JSON.DEBUG HELP`\")\n\t\treturn\n\t}\n\tname, args, _ := CommandBuilder(\"JSON.DEBUG\", subcommand, key, path)\n\tres, err = conn.Do(name, args...)\n\tif err != nil {\n\t\treturn\n\t}\n\t// JSONDebugMemorySubcommand returns an integer representing memory usage\n\tif subcommand == DebugMemorySubcommand {\n\t\treturn res.(int64), err\n\t}\n\t// JSONDebugHelpSubcommand returns slice of string of Help as slice of uint8\n\thlp := make([]string, 0, 10)\n\tfor _, r := range res.([]interface{}) {\n\t\thlp = append(hlp, tostring(r))\n\t}\n\tres = strings.Join(hlp, \"\\n\")\n\treturn\n}",
"func Debug(args ...interface{}) {\n\txlog.SetFormatter(&logrus.JSONFormatter{})\n\txlog.SetLevel(logrus.DebugLevel)\n\n\txlog.Debug(args...)\n}",
"func TestDebug(t *testing.T) {\n\tvar data = []byte(`Log this!`)\n\tapolog.Debug(data)\n}",
"func (n *AgentNotify) DumpJSON() {\n\tfmt.Println(n.getJSON())\n}",
"func jsonPrint(posts Posts) {\n\tpostJSON, err := json.MarshalIndent(posts, \"\", \" \")\n\tif err != nil {\n\t\tlogrus.Error(err)\n\t\tpanic(err)\n\t}\n\tfmt.Printf(\"JSON data: \\n %s\\n\", string(postJSON))\n}",
"func LogJSON(data interface{}) string {\n\tjsonData, err := json.Marshal(data)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\tvar prettyJSON bytes.Buffer\n\terr = json.Indent(&prettyJSON, jsonData, \"\", \" \")\n\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\treturn prettyJSON.String()\n}",
"func (lc mockNotifyLogger) Debug(msg string, args ...interface{}) {\n}",
"func debug(m string, v interface{}) {\n\tif DebugMode {\n\t\tlog.Printf(m+\":%+v\", v)\n\t}\n}",
"func TestJsonFormat(t *testing.T) {\n\tout := strings.Builder{}\n\tSetOutput(&out)\n\tSetLevelStr(\"DEBUG\")\n\tSetLogFormat(JSON)\n\tt.Run(\"test setting of JSON log format\", func(t *testing.T) {\n\t\tprintAllLevels(\"test json format\")\n\t\tif !(strings.Count(out.String(), \"{\\\"level\\\":\\\"DEBUG\\\",\\\"time\\\":\\\"\") == 2 &&\n\t\t\tstrings.Count(out.String(), \"\\\",\\\"location\\\":\\\"logger_test.go:\") == 10 &&\n\t\t\tstrings.Count(out.String(), \"\\\"goroutine\\\":\") == 10 &&\n\t\t\tstrings.Count(out.String(), \",\\\"message\\\":\\\"--> test json format\\\"}\") == 10) {\n\t\t\tt.Errorf(\"Log should be in JSON format:\\n%v\", out.String())\n\t\t}\n\t})\n}",
"func Debug(msg string) {\n log.Debug(msg)\n}",
"func (n *NetOp) LogJSON(b []byte) {\n\tif n.Logger != nil {\n\t\tn.Logger.LogJSON(b)\n\t}\n}",
"func loggerJSON(l jsonLog) {\n\tl.Date = time.Now()\n\tif l.Level == 0 {\n\t\tl.Level = 6\n\t}\n\tif Config.MinLogLevel >= l.Level {\n\t\tif l.Version == \"\" {\n\t\t\tl.Version = \"1.1\"\n\t\t}\n\t\tif l.Host == \"\" {\n\t\t\tl.Host = \"Quotes\"\n\t\t}\n\t\tif l.ResponseCode == 0 {\n\t\t\tl.ResponseCode = 200\n\t\t}\n\t\t_ = os.MkdirAll(\"./logs/\", os.ModePerm)\n\t\tf, err := os.OpenFile(\"./logs/logs.json\", os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Error opening logs.json file: %v\", err)\n\t\t}\n\t\tdata, _ := json.Marshal(l)\n\t\tf.WriteString(string(data) + \"\\n\")\n\t\tf.Close()\n\t}\n}",
"func (l *jsonLogger) DebugContext(ctx context.Context, message interface{}, params ...interface{}) {\n\tl.jsonLogParser.parse(ctx, l.jsonLogParser.log.Debug(), \"\", params...).Msgf(\"%s\", message)\n}",
"func (l *ZapLogger) Debug(format string) {\n\tl.logger.Debug(format)\n}",
"func dumpJSON(o interface{}) error {\n\tjs, err := json.MarshalIndent(o, \"\", \" \")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Printf(\"```\\n%s\\n```\\n\\n\", js)\n\n\treturn nil\n}",
"func (l MockLogger) Debug(msg string, fields ...interface{}) {\n}",
"func (lfo *logFuncOutput) debugSprintAll() string {\n\n\tlfo.listMutex.Lock()\n\tdefer lfo.listMutex.Unlock()\n\n\tresult := \"\"\n\n\tfor _, entry := range lfo.jsonList {\n\t\tjsonVal, err := json.Marshal(entry)\n\t\tif err != nil {\n\t\t\tlfo.errorOccurred = err\n\t\t\treturn fmt.Sprint(err)\n\t\t}\n\t\tresult += string(jsonVal)\n\t}\n\n\treturn result\n}",
"func debug(format string, args ...interface{}) {\n\tif debugOn {\n\t\tlog.Printf(\"DEBUG: \"+format, args...)\n\t}\n}",
"func jsonPrintDetails() {\n\n\tb, err := json.Marshal(alertDetails[name])\n\n\tif err != nil {\n\t\tlog.Printf(\"Unable to convert Detailed JSON Data, error: %v\\n\", err)\n\t\treturn\n\t}\n\n\tfmt.Println(string(b))\n}",
"func printJson(ag *alertGroup, m *sync.Mutex) {\n\tm.Lock()\n\tfor _, alert := range ag.Alerts {\n\t\tout := map[string]string{\"status\": alert.Status}\n\n\t\tfor k, v := range alert.Labels {\n\t\t\tout[k] = v\n\t\t}\n\t\tfor k, v := range alert.Annotations {\n\t\t\tout[k] = v\n\t\t}\n\t\tout[\"startsAt\"] = alert.StartsAt.Truncate(time.Millisecond).String()\n\t\tout[\"endsAt\"] = alert.EndsAt.Truncate(time.Millisecond).String()\n\n\t\tjout, err := json.Marshal(out)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\tfmt.Printf(\"%s\\n\", jout)\n\t}\n\tm.Unlock()\n}",
"func LogJSON(level Level, module string, data interface{}) {\n\tb, err := json.MarshalIndent(data, \"\", \" \")\n\tif err == nil {\n\t\tlconf.Logger.Log(level, module, \"%s\", string(b))\n\t}\n}",
"func JSONLogMiddleware() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\t// Start timer\n\t\tstart := time.Now()\n\n\t\t// Process Request\n\t\tc.Next()\n\n\t\t// Stop timer\n\t\tduration := GetDurationInMillseconds(start)\n\n\t\tentry := log.WithFields(log.Fields{\n\t\t\t\"type\": \"router\",\n\t\t\t\"client_ip\": GetClientIP(c),\n\t\t\t\"duration\": duration,\n\t\t\t\"method\": c.Request.Method,\n\t\t\t\"path\": c.Request.RequestURI,\n\t\t\t\"status\": c.Writer.Status(),\n\t\t\t\"referrer\": c.Request.Referer(),\n\t\t})\n\n\t\tif c.Writer.Status() >= 500 {\n\t\t\tentry.Error(c.Errors.String())\n\t\t} else {\n\t\t\tentry.Info(\"\")\n\t\t}\n\t}\n}",
"func PPrintJSON(xx interface{}) {\n\tyy, _ := json.MarshalIndent(xx, \"\", \" \")\n\tlog.Println(string(yy))\n}",
"func debug(format string, v ...interface{}) {\n\tif *verbose {\n\t\tfmt.Printf(format+\"\\n\", v...)\n\t}\n}",
"func (zl *ZapLogger) Debug(ctx context.Context, msg string, data interface{}) {\n\tzl.logger.Debug(msg, fields(ctx, data, nil)...)\n}",
"func Debugf(format string, args ...interface{}) {\n\txlog.SetFormatter(&logrus.JSONFormatter{})\n\txlog.SetLevel(logrus.DebugLevel)\n\n\txlog.Debugf(format, args...)\n}",
"func (c *Client) debugf(format string, v ...any) {\n\tif !c.DebugLog {\n\t\treturn\n\t}\n\tc.Logger(format, v...)\n}",
"func (lh *logHandler) Debug(data ...interface{}) {\n\tif lh.debug == nil {\n\t\treturn\n\t}\n\tlh.debug.Println(data...)\n}",
"func (this *SrvLog) Debug(format string, v ...interface{}) {\n\tthis.LogTo(false, \"debug\", format, v...)\n}",
"func (ss *sceneStub) D() Logger { return ss.L(Debug) }",
"func (l *MessageLogger) Debug(msg string) { l.logger.Debug(msg) }",
"func (patchwork *Patchwork) Debug() {\n\tpatchwork.debug = true\n}",
"func (z *ZapLogWrapper) Debug(args ...interface{}) {\n\tz.l.Debug(args...)\n}",
"func (l *Logger) Debug(v ...interface{}) { l.lprint(DEBUG, v...) }",
"func Debug(l ...interface{}) {\n\tlog.WithFields(log.Fields{\n\t\t\"SERVICE\": \"WINGO\",\n\t}).Debugln(l...)\n}",
"func (z *Logger) Debug(args ...interface{}) {\n\tz.SugaredLogger.Debug(args...)\n}",
"func (n *Null) Debug(args ...interface{}) {\n}",
"func print_json(chunks []Chunk) {\n\tfmt.Println(\"{\")\n\tfor i := range chunks {\n\t\tpayload := chunks[i].payload\n\t\ttag := chunks[i].tag\n\t\tif i > 0 {\n\t\t\tfmt.Println(\",\")\n\t\t}\n\t\tfmt.Printf(\" \\\"%s\\\": \\\"%s\\\"\", tag, payload)\n\t}\n\tfmt.Printf(\"\\n}\\n\")\n}",
"func Debug(msg string, a ...interface{}) {\n if *verbose {\n formatted := fmt.Sprintf(msg, a...)\n logger.Println(fmt.Sprintf(\"\\033[35;1mDEBUG:\\033[0m %s\", formatted))\n }\n}",
"func (log *log) Debug(a ...interface{}) {\n\ta = append([]interface{}{log.attachPrefix(\"\")}, a...)\n\tlogger.DebugDepth(1, a...)\n}",
"func logDebug(r *http.Request, msg string) {\n\tif Debug {\n\t\tlogEvent(r, \"debug\", msg)\n\t}\n}",
"func (l *logHandler) Debug(args ...interface{}) {\n\tl.Log(LogDebug, 3, args...)\n}",
"func (zw *ZerologWriter) DebugLogging(enabled bool) { zw.w.DebugLogging(enabled) }",
"func (zw *ZerologWriter) DebugLogging(enabled bool) { zw.w.DebugLogging(enabled) }",
"func (r *Record) Debug(args ...interface{}) {\n\tr.Log(DebugLevel, args...)\n}",
"func (nl *NullLogger) LogDebug(m ...interface{}) {\n}",
"func debugRequest(obj fmt.Stringer) {\n\tif debug != \"enable\" {\n\t\treturn\n\t}\n\tvar name string\n\tif t := reflect.TypeOf(obj); t.Kind() == reflect.Ptr {\n\t\tname = \"*\" + t.Elem().Name()\n\t} else {\n\t\tname = t.Name()\n\t}\n\tfmt.Printf(\"[DEBUG] %s\\n\", name)\n\tfmt.Println(obj)\n}",
"func (p *Parser) Dump() error {\n return p.json.Dump()\n}",
"func (l BlackHole) Debug(_ string, _ ...Field) {}",
"func EnableJSON() {\n\tlogrus.SetFormatter(&logrus.JSONFormatter{})\n}",
"func Debug(i interface{}) {\n\tsimlog.Debug(i)\n}",
"func DebugLogging(c *Client) {\n\tc.Handlers.Send.Prepend(request.RequestLogger)\n\tc.Handlers.Send.Append(request.ResponseLogger)\n}",
"func (n *nopLogger) Debugf(format string, v ...interface{}) {}",
"func (l *logger) Debug(b []byte) ([]byte, error) {\n\tlog.Debugf(\"%s\", b)\n\treturn []byte(\"\"), nil\n}",
"func Test_JSONLogger(t *testing.T) {\n\tdefer b.Reset()\n\n\tlog.InitJSONLogger(&log.Config{\n\t\tOutput: b,\n\t})\n\n\tlog.WithError(\n\t\terrors.New(\"bepis\"),\n\t).WithFields(log.Fields{\n\t\t\"hello\": \"world\",\n\t\t\"sample\": 1,\n\t\t\"text\": nil,\n\t}).Error(\"banana\")\n\n\texpected := map[string]interface{}{\n\t\t\"message\": \"banana\",\n\t\t\"error\": \"bepis\",\n\t\t\"hello\": \"world\",\n\t\t\"sample\": float64(1),\n\t\t\"text\": nil,\n\t\t\"level\": \"ERROR\",\n\t\t\"time\": \"<placeholder>\",\n\t\t\"_function\": \"<placeholder>\",\n\t\t\"_file\": \"<placeholder>\",\n\t\t\"_line\": \"<placeholder>\",\n\t}\n\n\tvar data map[string]interface{}\n\tif err := json.Unmarshal([]byte(b.String()), &data); err != nil {\n\t\tt.Fatalf(\"error unmarshalling buffer: %v\", err)\n\t}\n\n\tif len(expected) != len(data) {\n\t\tt.Fatalf(\"expected length: %d. actual length: %d\", len(expected), len(data))\n\t}\n\n\tfor k, v := range expected {\n\t\tval, ok := data[k]\n\t\tif !ok {\n\t\t\tt.Errorf(\"expected '%s' to be in buffer\", k)\n\t\t}\n\n\t\t// ignore the runtime specific info and timestamp, cant really get that info afaik\n\t\t// and checking their presence is good enough\n\t\tif !strings.HasPrefix(k, \"_\") && !(k == \"time\") {\n\t\t\tif val != v {\n\t\t\t\tt.Errorf(\"expected value: %T '%v'. actual value %T '%v'\", v, v, val, val)\n\t\t\t}\n\t\t}\n\t}\n}",
"func Dd(variable interface{}, die bool) {\n\tres, _ := json.MarshalIndent(variable, \"\", \" \")\n\n\tfmt.Println(\"= = = = = = = = = = = =\")\n\tfmt.Println(string(res))\n\tfmt.Println(\"= = = = = = = = = = = =\")\n\tif die {\n\t\tos.Exit(1)\n\t}\n}",
"func (c *Client) debug(str string, args ...interface{}) {\n\tif c.level >= 2 {\n\t\tc.log.Printf(str, args...)\n\t}\n}",
"func (testLog TestLog) Debug(msg ...interface{}) {\n\ttestLog.T.Log(\"[Debug]\", msg)\n}",
"func (f *firstLogger) Debug(message ...interface{}) {\n\ttransactionLog.WithFields(logrus.Fields{\n\t\t\"file\": f.Filename,\n\t}).Debug(message)\n\n}",
"func (zw *zerologWrapper) Debug(ctx context.Context, format string, args ...interface{}) {\n\tnewEntry(zw, false, zw.cfg.staticFields).Debug(ctx, format, args...)\n}",
"func (l *LoggerService) Debug(message string, values ...interface{}) {\n\tlog.Printf(\"[DEBUG] \"+message+\"\\n\", values...)\n}",
"func (j *Junction) debug(values ...string) {\n\tif j.DebugMode {\n\t\tlog.Println(values)\n\t}\n}",
"func Debug(args ...interface{}) {\n\tif glog.V(debug) {\n\t\tglog.InfoDepth(1, \"DEBUG: \"+fmt.Sprint(args...)) // 1 == depth in the stack of the caller\n\t}\n}",
"func Debug(v ...interface{}) {\n\tif jl.level != INFO {\n\t\tvar s string\n\t\tjl.stdlog.SetPrefix(\"[DEBUG] \")\n\t\tif jl.flag == LstdFlags|Lshortfile {\n\t\t\ts = generateStdflagShortFile()\n\t\t}\n\n\t\ts = s + fmt.Sprintln(v...)\n\t\tjl.stdlog.Print(s)\n\t}\n}",
"func (StdLogger) Debugf(format string, v ...interface{}) {}",
"func (p *tubePool) logDebug(opt RequestOptions, logString string) {\n\tif opt.Logger != nil && opt.LogLevel.AtLeast(aws.LogDebug) {\n\t\topt.Logger.Log(logString)\n\t}\n}",
"func debugLog(v ...interface{}) {\n\tif *isDebug || *isVerbose {\n\t\tlog.Println(v...)\n\t}\n}",
"func (l *Impl) Debug(format string, args ...interface{}) {\n\tl.write(\"DEBUG\", format, args...)\n}",
"func (l *zapLog) Debug(args ...interface{}) {\n\tif l.logger.Core().Enabled(zapcore.DebugLevel) {\n\t\tl.logger.Debug(fmt.Sprint(args...))\n\t}\n}",
"func (c *Client) Debugf(format string, v ...interface{}) {\n\tlog.Printf(\"[DEBUG] %s\", fmt.Sprintf(format, v...))\n}",
"func debugLog(msg ...interface{}) {\n\tlogLocker.Lock()\n\tdefer logLocker.Unlock()\n\tif *confVerbose {\n\t\tcolor.White(fmt.Sprint(time.Now().Format(\"02_01_06-15.04.05\"), \"[DEBUG] ->\", msg))\n\t}\n}",
"func debug(format string, args ...interface{}) {\n\tif IsDebug {\n\t\tLog.Debug(fmt.Sprintf(format, args...))\n\t}\n}",
"func Debug(format string, v ...interface{}) {\n\tmsg := fmt.Sprintf(\"[D]\"+format, v...)\n\tlog.Println(msg)\n}",
"func debug(v ...interface{}) {\n\tfmt.Println(v...)\n}",
"func (p *Provider) Debug(debug bool) {}",
"func (p *Provider) Debug(debug bool) {}",
"func (p *Provider) Debug(debug bool) {}",
"func (p *Provider) Debug(debug bool) {}",
"func (c *Client) Debug(debug bool) {\n\tc.debug = debug\n}",
"func (l nullLogger) Debug(msg string, ctx ...interface{}) {}"
] | [
"0.692201",
"0.6908374",
"0.67573774",
"0.6677289",
"0.64629704",
"0.6456457",
"0.6283996",
"0.6276111",
"0.61813056",
"0.6149834",
"0.61419827",
"0.60906905",
"0.6035263",
"0.6023588",
"0.60095674",
"0.6004559",
"0.5970736",
"0.59674805",
"0.59623086",
"0.59593713",
"0.59452456",
"0.59421045",
"0.59283984",
"0.5923103",
"0.5908647",
"0.58666354",
"0.5856749",
"0.58241165",
"0.5815909",
"0.5797374",
"0.57955426",
"0.57555926",
"0.5746866",
"0.5746197",
"0.5742655",
"0.5730328",
"0.5716491",
"0.57149714",
"0.5706344",
"0.57047045",
"0.57022697",
"0.5696722",
"0.56836903",
"0.5679901",
"0.56602156",
"0.5659629",
"0.5646003",
"0.56291485",
"0.56271607",
"0.5626677",
"0.56256664",
"0.56222945",
"0.5611493",
"0.55921674",
"0.5584815",
"0.55840987",
"0.55828035",
"0.55823046",
"0.557732",
"0.557421",
"0.5573303",
"0.5568779",
"0.5562851",
"0.5562851",
"0.55626136",
"0.556022",
"0.5559297",
"0.55495626",
"0.55458164",
"0.5537552",
"0.55280095",
"0.552524",
"0.5522998",
"0.5522408",
"0.55215454",
"0.55215096",
"0.55196875",
"0.551304",
"0.5511697",
"0.5510396",
"0.5509341",
"0.550728",
"0.55033904",
"0.5502484",
"0.5499364",
"0.5493805",
"0.54901356",
"0.54897755",
"0.54857236",
"0.54793257",
"0.54663295",
"0.54615337",
"0.54570854",
"0.5456285",
"0.5449675",
"0.5449675",
"0.5449675",
"0.5449675",
"0.54491043",
"0.5444496"
] | 0.6852664 | 2 |
Infoj info json log | func (l *EchoLogrus) Infoj(j log.JSON) {
l.Logger.WithFields(logrus.Fields(j)).Info()
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (l *JSONLogger) Info(format string, args ...interface{}) {\n\tl.Log(\"info\", format, args...)\n}",
"func (l *jsonLogger) Info(message interface{}, params ...interface{}) {\n\tl.jsonLogParser.parse(context.Background(), l.jsonLogParser.log.Info(), \"\", params...).Msgf(\"%s\", message)\n}",
"func Info(v ...interface{}) {\n\tjasonLog.output(2, levelInfo, \"\", v...)\n}",
"func ExampleInfo() {\n\tsetup()\n\tlog.Info().Msg(\"hello world\")\n\n\t// Output: {\"level\":\"info\",\"time\":1199811905,\"message\":\"hello world\"}\n}",
"func infoHandler(w http.ResponseWriter, r *http.Request) {\n\tinfo := Metainfo{Uptime: timeSince(startTime), Info: \"Service for IGC tracks.\", Version: \"v1\"}\n\tjs, err := json.Marshal(info)\n\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.Write(js)\n\n}",
"func Info(data []byte) {\n\tlog.Print(\"INFO: \", string(data))\n}",
"func (d *DummyLogger) Info(format string) {}",
"func (j DSGitHub) Info() string {\n\treturn fmt.Sprintf(\"%+v\", j)\n}",
"func (l *AppLogger) Info(tag string, message ...interface{}) {\n\tl.logging.SetFormatter(&logrus.JSONFormatter{})\n\tk := getAppFields(l.reqId, tag, l.userId)\n\tl.logging.WithFields(k).Info(message...)\n}",
"func Infof(format string, v ...interface{}) {\n\tjasonLog.output(2, levelInfo, format, v...)\n}",
"func (l *Impl) Info(format string, args ...interface{}) {\n\tl.write(\"INFO\", format, args...)\n}",
"func Info(format string, v ...interface{}) {\n\tmsg := fmt.Sprintf(\"[I]\"+format, v...)\n\tlog.Println(msg)\n}",
"func Info(format string, v ...interface{}) {\n\tdoLog(\"INFO\", format, v...)\n}",
"func (logger *NLog) Info(format string, v ...interface{}) {\n\tlogger.rwm.RLock()\n\tdefer logger.rwm.RUnlock()\n\n\tif logger.info != nil {\n\t\tlogger.info.Output(3, fmt.Sprintln(fmt.Sprintf(format, v...)))\n\t}\n}",
"func Info(msg string) {\n log.Info(msg)\n}",
"func Info(content interface{}) {\n\tLog(\"INFO\", content)\n}",
"func (l *ZapLogger) Info(format string) {\n\tl.logger.Info(format)\n}",
"func (b *Bootstrapper) Info() ([]byte, error) {\n\tclasses := map[string][]byte{}\n\tfor class, ref := range b.classRefs {\n\t\tclasses[class] = ref[:]\n\t}\n\treturn json.MarshalIndent(map[string]interface{}{\n\t\t\"root_domain\": b.rootDomainRef[:],\n\t\t\"root_member\": b.rootMemberRef[:],\n\t\t\"classes\": classes,\n\t}, \"\", \" \")\n}",
"func Info(args ...interface{}) {\n\txlog.SetFormatter(&logrus.JSONFormatter{})\n\txlog.SetLevel(logrus.DebugLevel)\n\n\txlog.Info(args...)\n}",
"func Info(v ...interface{}) {\n\t_ = info.Output(2, fmt.Sprint(v...))\n}",
"func (logger *Logger) Info(args ...interface{}) {\n\tlogger.std.Log(append([]interface{}{\"Info\"}, args...)...)\n}",
"func (this *SrvLog) Info(format string, v ...interface{}) {\n\tthis.LogTo(false, \"info\", format, v...)\n}",
"func Info(format string, a ...interface{}) {\n\tif currentLogger == nil {\n\t\treturn\n\t}\n\tcurrentLogger.output(currentPool, _InfoLevel, format, a...)\n}",
"func Info(format string, args ...interface{}) {\n\tdo(INFO, format, args...)\n}",
"func (zw *zerologWrapper) Info(ctx context.Context, format string, args ...interface{}) {\n\tnewEntry(zw, false, zw.cfg.staticFields).Info(ctx, format, args...)\n}",
"func (l *Logger) Info(args ...interface{}) {\n\tkitlevel.Info(l.Log).Log(args...)\n}",
"func TestInfo(t *testing.T) {\n\tvar data = []byte(`Log this!`)\n\tapolog.Info(data)\n}",
"func Info(format string, v ...interface{}) {\n\tLog(1, INFO, format, v...)\n}",
"func (l *PlexLogger) Info(msg string, kvs ...interface{}) {\n\tl.send(PlexLogInfo, msg, kvs...)\n}",
"func (z *Logger) Info(args ...interface{}) {\n\tz.SugaredLogger.Info(args...)\n}",
"func (_m *MockLogger) Info(message string, data map[string]interface{}) {\n}",
"func (j DSRocketchat) Info() string {\n\treturn fmt.Sprintf(\"%+v\", j)\n}",
"func (z *ZapLogWrapper) Info(args ...interface{}) {\n\tz.l.Info(args...)\n}",
"func (l *GrpcLog) Info(args ...interface{}) {\n\t// l.SugaredLogger.Info(args...)\n}",
"func (logger *Logger) Info(format string, a ...interface{}) {\n\tlogger.log(Info, format, a...)\n}",
"func (lh *logHandler) Info(data ...interface{}) {\n\tif lh.info == nil {\n\t\treturn\n\t}\n\tlh.info.Println(data...)\n}",
"func Info(text string) {\n\tprintLog(\"info\", text)\n}",
"func (log *log) Info(a ...interface{}) {\n\ta = append([]interface{}{log.attachPrefix(\"\")}, a...)\n\tlogger.InfoDepth(1, a...)\n}",
"func (l *XORMLogBridge) Info(v ...interface{}) {\n\tlog.Info(v...)\n}",
"func Info(format string, v ...interface{}) {\n\tl.output(LInfo, format, v...)\n}",
"func Info(args ...interface{}) {\n\tlogWithFilename().Info(args...)\n}",
"func Info(format string, args ...interface{}) {\n\treport(color.GreenString, \"INFO\", format, args...)\n}",
"func Info(args ...interface{}) {\n\tinfoLog.Output(CallDepth, fmt.Sprint(args...))\n}",
"func lInfo(v ...interface{}) {\n\tinfoLogger.Println(v...)\n}",
"func infoLog(msg ...interface{}) {\n\tlogLocker.Lock()\n\tdefer logLocker.Unlock()\n\tif *confVerbose {\n\t\tcolor.Cyan(fmt.Sprint(time.Now().Format(\"02_01_06-15.04.05\"), \"[INFOR] ->\", msg))\n\t}\n}",
"func (gl GoaLogger) Info(msg string, data ...interface{}) {\n\tev := gl.logger.Info()\n\tgl.log(ev, msg, data...)\n}",
"func Info(msg ...interface{}) {\n\tCurrent.Info(msg...)\n}",
"func InfoHandler(c *gin.Context) {\n\tl.Info(\"ii\")\n\tc.JSON(200, gin.H{\n\t\t\"message\": \"ok\",\n\t})\n}",
"func Info(args ...interface{}) {\n\tlog.Info(args...)\n}",
"func Info(args ...interface{}) {\n\tlog.Info(args...)\n}",
"func init() {\n\tlog.SetFormatter(&log.JSONFormatter{})\n\tlog.SetOutput(os.Stdout)\n\tlog.SetLevel(log.InfoLevel)\n}",
"func (l *Logger) Info(err errors.Error) {\n\tl.logInfo.Printf(string(err.JSON()))\n}",
"func Info(v ...interface{}) {\n\tinfoLog(v...)\n}",
"func (j DSGit) Info() string {\n\treturn fmt.Sprintf(\"%+v\", j)\n}",
"func Info(v ...interface{}) {\n\tpost(fmt.Sprint(_INFO_HEADER, fmt.Sprint(v...)), bufMap[\"InfoLocal\"])\n}",
"func Info(log string, v ...interface{}) {\n\tsyslog.Printf(\"INFO \"+log, v...)\n}",
"func (l *Logger) Info(message string) {\n\tl.printLogMessage(keptnLogMessage{Timestamp: time.Now(), Message: message, LogLevel: \"INFO\"})\n}",
"func Info(cmdTag, format string, a ...interface{}) {\n\tfmt.Printf(format+\"\\n\", a...)\n\tif level < LevelInfo || !logging {\n\t\treturn\n\t}\n\tif _, ok := cmdMap[cmdTag]; !ok {\n\t\tcmdTag = Mixer\n\t}\n\tlogTag(\"INF\", cmdTag, format, a...)\n}",
"func Info(args ...interface{}) {\r\n\tLogger.Info(\"\", args)\r\n}",
"func Info(l interface{}) {\n\tlog.WithFields(log.Fields{\n\t\t\"SERVICE\": \"WINGO\",\n\t}).Infoln(l)\n}",
"func Info(w io.Writer, format string, args ...any) {\n\tformat = strings.TrimRight(format, \"\\r\\n\") + \"\\n\"\n\tfmt.Fprintf(w, \"\\n\"+Wrap(Bold(\"INFO: \")+format, DefaultTextWidth)+\"\\n\", args...)\n}",
"func (l *Logger) INFO(msg string) {\n\tdefer l.Zap.Sync()\n\tl.Zap.Info(msg)\n}",
"func (l *Lgr) Info(args ...interface{}) {\n l.Logger.Info(args...)\n}",
"func (l *comLogger) Info(msg string) {\n\tl.Log(Info, msg)\n}",
"func Info(format string, args ...interface{}) {\n\tlog.Infof(format, args...)\n}",
"func (l *Logger) Info(msg string, args ...interface{}) {\n\tl.z.Infow(msg, args...)\n}",
"func Info(args ...interface{}) {\n\tlogger.Info(args...)\n}",
"func Info(args ...interface{}) {\n\tlogger.Info(args...)\n}",
"func Info(args ...interface{}) {\n\tlogger.Info(args...)\n}",
"func Info(args ...interface{}) {\n\tlogger.Info(args...)\n}",
"func Info(args ...interface{}) {\n\tlogger.Info(args...)\n}",
"func (w *Writer) Info(m string) error {}",
"func Info(msg string, fields ...zapcore.Field) {\n\tinitLogger()\n\n\tlogger.Info(msg, fields...)\n}",
"func Info(ctx context.Context, args ...interface{}) {\n\tGetLogger().Log(ctx, loggers.InfoLevel, 1, args...)\n}",
"func Info(args ...interface{}) {\n\twrite(\"[I] \", \"\", args...)\n}",
"func (l *logger) Info(b []byte) ([]byte, error) {\n\tlog.Infof(\"%s\", b)\n\treturn []byte(\"\"), nil\n}",
"func (zl *ZapLogger) Info(ctx context.Context, msg string, data interface{}) {\n\tzl.logger.Info(msg, fields(ctx, data, nil)...)\n}",
"func (logger *Logger) Info(args ...interface{}) {\n\tlogger.logPrint(L_INFO, args...)\n}",
"func (cl cronLogger) Info(msg string, keysAndValues ...interface{}) {\n\tcl.logger.Info(msg, zap.String(\"JobName\", cl.jobName), zap.Any(\"params\", keysAndValues))\n}",
"func (l *MessageLogger) Info(msg string) { l.logger.Info(msg) }",
"func (l LogItems) Info() error {\n\th, _ := os.Hostname()\n\tm := wrapBuildGraylogMessage(l.ShortMsg, l.FullMsg, 6, l.ExtraFields, h)\n\n\tsendGraylogMessageV2(m)\n\n\treturn nil\n}",
"func (l *Logger) Info(format string, v ...interface{}) {\n\tif Verbosity > 1 {\n\t\treturn\n\t}\n\n\tif !l.IsEnabled() {\n\t\treturn\n\t}\n\n\tv, attrs := SplitAttrs(v...)\n\n\tl.Output(1, \"INFO\", fmt.Sprintf(format, v...), attrs)\n}",
"func InfoN(name, format string, v ...interface{}) {\n\tif logger, ok := mutil[name]; ok {\n\t\tlogger.Info(format, v...)\n\t}\n}",
"func Info(args ...interface{}) {\n\tlog.Println(args...)\n}",
"func (l *logHandler) Info(args ...interface{}) {\n\tl.Log(LogInfo, 3, args...)\n}",
"func Info(v ...interface{}) string {\n\treturn logr.Info(v...)\n}",
"func (c *TogglHttpClient) infof(format string, args ...interface{}) {\n\tif c.infoLog != nil {\n\t\tc.infoLog.Printf(format, args...)\n\t}\n}",
"func Info(ctx ...interface{}) {\n\tlogNormal(infoStatus, time.Now(), ctx...)\n}",
"func Info(msg string, fields ...zap.Field) {\n\tlog.Info(msg, fields...)\n}",
"func Info(args ...interface{}) {\n\tlogger.Sugar().Info(args...)\n}",
"func (l Mylog) Info(ctx context.Context, msg string, data ...interface{}) {\n\tl.ServiceLog.Info(msg, data)\n\t//if l.LogLevel >= Info {\n\t//\tl.Printf(l.infoStr+msg, append([]interface{}{utils.FileWithLineNum()}, data...)...)\n\t//}\n}",
"func Info(format string, a ...interface{}) {\n\tif Level >= 3 {\n\t\ta, w := extractLoggerArgs(format, a...)\n\t\ts := fmt.Sprintf(label(format, InfoLabel), a...)\n\n\t\tif Color {\n\t\t\tw = color.Output\n\t\t\ts = color.MagentaString(s)\n\t\t}\n\n\t\tfmt.Fprintf(w, s)\n\t}\n}",
"func (uci *UCIAdapter) Info(info Info) {\n\tnps := int64(float64(info.nodeCount) / info.t.Seconds())\n\tuci.Send(fmt.Sprintf(\"info score cp %d depth %d nodes %d nps %d time %d pv %s\\n\", info.score,\n\t\tinfo.depth, info.nodeCount, nps, int(info.t/time.Millisecond), info.stk[0].pv.ToUCI()))\n}",
"func Info(args ...interface{}) {\n\tLogger.Info(args...)\n}",
"func JSONLogger(r *http.Request, status int, len int64, d time.Duration) {\n\tos.Stderr.WriteString(JSONLogMessage(time.Now, r.Method, r.URL, status, len, d, nil))\n}",
"func (l *Logger) Info(values ...interface{}) {\n\tif l.loggingLevel > InfoLevel {\n\t\treturn\n\t}\n\tl.log(l.infoPrefix, fmt.Sprint(values...))\n}",
"func Info(args ...interface{}) {\n\tLog.Info(args...)\n}",
"func (l *Logger) Info(log ...interface{}) {\n\tl.instance.Info(log...)\n}",
"func Info(id int64, args ...interface{}) {\n\tif IsOn() {\n\t\tAddTraceEvent(id, 1, &TraceEventDesc{\n\t\t\tDesc: fmt.Sprint(args...),\n\t\t\tSeverity: CtINFO,\n\t\t})\n\t} else {\n\t\tgrpclog.InfoDepth(1, args...)\n\t}\n}",
"func (glogger *GLogger) Info(format string, a ...interface{}) {\n\tglogger.internalLog(LInfo, fmt.Sprintf(format, a...))\n}"
] | [
"0.7321206",
"0.7151799",
"0.7080367",
"0.6893367",
"0.68167025",
"0.6690214",
"0.6590823",
"0.6548917",
"0.6489237",
"0.64583206",
"0.64574397",
"0.6440896",
"0.6437558",
"0.6433981",
"0.6421429",
"0.64140487",
"0.6408576",
"0.6364043",
"0.63606143",
"0.63484424",
"0.6333254",
"0.6330368",
"0.63294595",
"0.6327324",
"0.63153803",
"0.63031965",
"0.6285633",
"0.6282434",
"0.6270961",
"0.62570244",
"0.62353396",
"0.6231602",
"0.6225744",
"0.6223457",
"0.6210471",
"0.61881614",
"0.61868167",
"0.61787844",
"0.6168686",
"0.6166174",
"0.616076",
"0.6155803",
"0.6151831",
"0.6148514",
"0.6147049",
"0.6142449",
"0.6140066",
"0.61277556",
"0.6125243",
"0.6125243",
"0.61207545",
"0.6120584",
"0.6119636",
"0.6118495",
"0.6114251",
"0.61056024",
"0.609977",
"0.60993165",
"0.60983706",
"0.60960567",
"0.6093078",
"0.60899043",
"0.6089246",
"0.6088157",
"0.608768",
"0.60823905",
"0.6079418",
"0.6079418",
"0.6079418",
"0.6079418",
"0.6079418",
"0.6074402",
"0.60641783",
"0.6059087",
"0.6057443",
"0.6052276",
"0.60512483",
"0.6050827",
"0.60498345",
"0.6046076",
"0.6041726",
"0.6037991",
"0.6035068",
"0.6030493",
"0.60294086",
"0.6025556",
"0.6020503",
"0.6019458",
"0.601587",
"0.60074973",
"0.60068226",
"0.5999548",
"0.5996943",
"0.59959996",
"0.5993554",
"0.5990221",
"0.59852165",
"0.5982088",
"0.5980201",
"0.59784526"
] | 0.71056604 | 2 |
Warnj warning json log | func (l *EchoLogrus) Warnj(j log.JSON) {
l.Logger.WithFields(logrus.Fields(j)).Warn()
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (l *JSONLogger) Warning(format string, args ...interface{}) {\n\tl.Log(\"warning\", format, args...)\n}",
"func (l *jsonLogger) Warn(message interface{}, params ...interface{}) {\n\tl.jsonLogParser.parse(context.Background(), l.jsonLogParser.log.Warn(), \"\", params...).Msgf(\"%s\", message)\n}",
"func (d *DummyLogger) Warning(format string) {}",
"func Warn(v ...interface{}) {\n\tjasonLog.output(2, levelWarn, \"\", v...)\n}",
"func (l *Logger) Warn(err errors.Error) {\n\tl.logWarn.Printf(string(err.JSON()))\n}",
"func ExampleWarn() {\n\tsetup()\n\tlog.Warn().Msg(\"hello world\")\n\n\t// Output: {\"level\":\"warn\",\"time\":1199811905,\"message\":\"hello world\"}\n}",
"func Warning(format string, v ...interface{}) {\n\tdoLog(\"WARNING\", format, v...)\n}",
"func (l *ZapLogger) Warning(format string) {\n\tl.logger.Warn(format)\n}",
"func Warnf(format string, v ...interface{}) {\n\tjasonLog.output(2, levelWarn, format, v...)\n}",
"func Warn(data []byte) {\n\tlog.Print(\"WARNING: \", string(data))\n}",
"func Warn(args ...interface{}) {\n\txlog.SetFormatter(&logrus.JSONFormatter{})\n\txlog.SetLevel(logrus.DebugLevel)\n\n\txlog.Warn(args...)\n}",
"func (l *Logger) Warn(v ...interface{}) { l.lprint(WARN, v...) }",
"func (logger *Logger) Warning(format string, a ...interface{}) {\n\tlogger.log(Warning, format, a...)\n}",
"func lWarn(v ...interface{}) {\n\t/* #nosec */\n\t_ = warnLogger.Output(2, fmt.Sprintln(v...)) //Following log package, ignoring error value\n}",
"func (l *GrpcLog) Warning(args ...interface{}) {\n\tl.SugaredLogger.Warn(args...)\n}",
"func WarnHandler(c *gin.Context) {\n\tl.Warn(\"ee\")\n\tc.JSON(200, gin.H{\n\t\t\"message\": \"ok\",\n\t})\n}",
"func (w *Writer) Warning(m string) error {}",
"func Warning(format string, a ...interface{}) {\n\tif Level >= 2 {\n\t\ta, w := extractLoggerArgs(format, a...)\n\t\ts := fmt.Sprintf(label(format, WarningLabel), a...)\n\n\t\tif Color {\n\t\t\tw = color.Output\n\t\t\ts = color.YellowString(s)\n\t\t}\n\n\t\tfmt.Fprintf(w, s)\n\t}\n}",
"func Warning(v ...interface{}) {\n\tlogger.Warning(v...)\n}",
"func Warn(v ...interface{}) { std.lprint(WARN, v...) }",
"func Warning(format string, a ...interface{}) {\n\tprefix := magenta(warn)\n\tlog.Println(prefix, fmt.Sprintf(format, a...))\n}",
"func Warn(format string, v ...interface{}) {\n\tif LogLevel() <= 2 {\n\t\tlog.Printf(\"WARN: \"+format, v...)\n\t}\n}",
"func Warn(format string, args ...interface{}) {\n\treport(color.YellowString, \"WARN\", format, args...)\n}",
"func Warning(args ...interface{}) {\n\tlogger.Warning(args...)\n}",
"func (l *AppLogger) Warn(tag string, message ...interface{}) {\n\tl.logging.SetFormatter(&logrus.JSONFormatter{})\n\tk := getAppFields(l.reqId, tag, l.userId)\n\tl.logging.WithFields(k).Warn(message...)\n}",
"func (l *MessageLogger) Warn(msg string) { l.logger.Warn(msg) }",
"func Warning(args ...interface{}) {\n\tcurrentLogger.WarningDepth(context.Background(), defaultDepth, args...)\n}",
"func (logger *Logger) Warning(args ...interface{}) {\n\tlogger.std.Log(append([]interface{}{\"Warning\"}, args...)...)\n}",
"func Warning(args ...interface{}) {\n\tLogger.Warning(args...)\n}",
"func (o Object) Warn(f string, a ...interface{}) {\n\tif o.WarningEnabled {\n\t\to.PrintMsg(\"WARNING\", 2, f, a...)\n\t}\n}",
"func Warning(cmdTag, format string, a ...interface{}) {\n\tfmt.Printf(\"Warning: \"+format+\"\\n\", a...)\n\tif level < LevelWarning || !logging {\n\t\treturn\n\t}\n\tif _, ok := cmdMap[cmdTag]; !ok {\n\t\tcmdTag = Mixer\n\t}\n\tlogTag(\"WRN\", cmdTag, format, a...)\n}",
"func Warning(a ...interface{}) {\n\tcolor.Set(color.FgYellow)\n\tdefer color.Unset()\n\twarningLogger.Println(a...)\n}",
"func (l *Logger) Warn(v ...interface{}) {\n\tif l.loglevel <= sWarning {\n\t\tl.output(sWarning, 0, fmt.Sprint(v...))\n\t} else {\n\t\treturn\n\t}\n}",
"func Warning(format string, args ...interface{}) {\n\tlog.Warningf(format, args...)\n}",
"func Warning(args ...interface{}) {\n LoggerOf(default_id).Warning(args...)\n}",
"func Warning(args ...interface{}) {\n\tWarn(args...)\n}",
"func (l typeLogger) Warn(format string, v ...interface{}) {\n\tif l.level <= logWarning {\n\t\tmessage := fmt.Sprintf(format, v...)\n\t\tl.logger.Printf(\"%s%s%s: %s (%s)\", colorWarning, tagWarning, colorClear, message, getCallerPosition())\n\t}\n}",
"func (lg *Logger) Warning(args ...interface{}) {\n if lg.level <= WARNING {\n lg.logger.SetPrefix(LEVELS[WARNING])\n lg.logger.Println(args...)\n }\n}",
"func (logger *Logger) Warning(args ...interface{}) {\n\tlogger.logPrint(L_WARNING, args...)\n}",
"func Warnf(format string, args ...interface{}) {\n\txlog.SetFormatter(&logrus.JSONFormatter{})\n\txlog.SetLevel(logrus.DebugLevel)\n\n\txlog.Warnf(format, args...)\n}",
"func (l *sdkLogger) Warn(v ...interface{}) {\n\tl.Warning(v)\n}",
"func Warning(v ...interface{}) {\n if level <= LevelWarning {\n StdOutLogger.Printf(\"[W] %v\\n\", v)\n }\n}",
"func Warning(msg string, args ...interface{}) {\n\tif level&WARN != 0 {\n\t\twriteMessage(\"WRN\", msg, args...)\n\t}\n}",
"func Warn(format string, v ...interface{}) {\n\tLeveledLogger(level.Warn, format, v...)\n}",
"func Warn(l interface{}) {\n\tlog.WithFields(log.Fields{\n\t\t\"SERVICE\": \"WINGO\",\n\t}).Warnln(l)\n}",
"func Warn(format string, v ...interface{}) {\n\tDefaultLogger.Warn(format, v...)\n}",
"func (l *Logger) Warn(v ...interface{}) {\n\tl.NewEntry().Print(WARN, v...)\n}",
"func Warning(w io.Writer, format string, args ...any) {\n\tformat = strings.TrimRight(format, \"\\r\\n\") + \"\\n\"\n\tfmt.Fprintf(w, \"\\n\"+Wrap(BoldYellow(\"WARNING: \")+format, DefaultTextWidth)+\"\\n\", args...)\n}",
"func (l *Logger) WARN(msg string) {\n\tdefer l.Zap.Sync()\n\tl.Zap.Warn(msg)\n}",
"func (sl *SysLogger) Warning(info, msg string) {\n\tlog.Println(\"[WARNING]\", sl.tag, info, msg)\n}",
"func Warn(format string, v ...interface{}) {\n\tLog(1, WARN, format, v...)\n}",
"func Warn(v ...interface{}) {\n\tstdLogger.Log(WarnLevel, fmt.Sprint(v...))\n}",
"func Warn(v ...interface{}) {\n\tif level <= LevelWarning {\n\t\tTorbitLogger.Printf(\"[W] %v\\n\", v)\n\t}\n}",
"func Warning(v ...interface{}) {\n\toutput(LevelWarning, v)\n}",
"func (logger *ColorLogger) Warn(format string, args ...interface{}) {\n\tlogger.log(LOG_LEVEL_WARN, \"yellow\", format, args...)\n}",
"func Warning(f interface{}, v ...interface{}) {\n\tlogs.Warning(f, v...)\n}",
"func (l *Logger) Warnf(format string, v ...interface{}) { l.lprintf(WARN, format, v...) }",
"func Warn(format string, a ...interface{}) {\n\tif currentLogger == nil {\n\t\treturn\n\t}\n\tcurrentLogger.output(currentPool, _WarnLevel, format, a...)\n}",
"func Warning(text string) {\n\tprintLog(\"warning\", text)\n}",
"func Warn(msg ...interface{}) {\n\tCurrent.Warn(msg...)\n}",
"func TestWarn(t *testing.T) {\n\tvar data = []byte(`Log this!`)\n\tapolog.Warn(data)\n}",
"func WarnN(name, format string, v ...interface{}) {\n\tif logger, ok := mutil[name]; ok {\n\t\tlogger.Warn(format, v...)\n\t}\n}",
"func (l *jsonLogger) WarnContext(ctx context.Context, message interface{}, params ...interface{}) {\n\tl.jsonLogParser.parse(ctx, l.jsonLogParser.log.Warn(), \"\", params...).Msgf(\"%s\", message)\n}",
"func (l *Lgr) Warn(args ...interface{}) {\n l.Logger.Warn(args...)\n}",
"func (d *DummyLogger) Warningf(format string, args ...interface{}) {}",
"func Warning(format string, args ...interface{}) {\n\tdo(WARNING, format, args...)\n}",
"func (c *context) Warning(format string, args ...interface{}) {\n\tc.logger.Warning(c.prefixFormat()+format, args...)\n}",
"func printWarn(msg string) {\n\tif Verbose {\n\t\tfmt.Println(fmt.Sprintf(\" WARN %s: %s\", getFormattedTime(), msg))\n\t}\n}",
"func (b *Logger) Warning(format string, args ...interface{}) error {\n\tif Level(atomic.LoadUint32((*uint32)(&b.level))) > Warning {\n\t\treturn nil\n\t}\n\tvar prefix []string\n\tif b.prefix != \"\" {\n\t\tprefix = []string{b.prefix}\n\t}\n\treturn b.parent.log(&event{format: format, args: args, prefix: prefix, tracer: b.tracer, level: Warning})\n}",
"func WarnLog(service, action, resource, id string, gotError error) {\n\tlog.Printf(\"[WARN] %s\", ProblemStandardMessage(service, action, resource, id, gotError))\n}",
"func Warn(log string, v ...interface{}) {\n\tsyslog.Printf(\"WARN \"+log, v...)\n}",
"func (v Verbosity) Warning(args ...interface{}) {\n\tif v {\n\t\twarningLog.Output(CallDepth+1, fmt.Sprint(args...))\n\t}\n}",
"func (tcr *TestCaseReporter) Warning(format string, v ...interface{}) {\n\ttcr.logPrintf(format, v...)\n}",
"func (l Log) Warning(format string, args ...interface{}) {\n\tl.Logger.Sugar().Warnf(format, args...)\n}",
"func Warning(args ...interface{}) {\r\n\tif *gloged {\r\n\t\tglog.Warning(args...)\r\n\t} else {\r\n\t\tlog.Println(args...)\r\n\t}\r\n}",
"func Warning(args ...interface{}) {\n\tlogWithFilename().Warning(args...)\n}",
"func (z *Logger) Warn(args ...interface{}) {\n\tz.SugaredLogger.Warn(args...)\n}",
"func Warn(args ...interface{}) {\r\n\tLogger.Warn(\"\", args)\r\n}",
"func Warning(ctx ...interface{}) {\n\tlogNormal(warningStatus, time.Now(), ctx...)\n}",
"func (lc mockNotifyLogger) Warn(msg string, args ...interface{}) {\n}",
"func (l *loggerWrapper) Warning(args ...interface{}) {\n\tl.logger.Warn(sprint(args...))\n}",
"func (aspect LogAspect) Warning(format string, items ...interface{}) {\n\taspect.Log(WARNING, format, items...)\n}",
"func logWarn(format string, v ...interface{}) {\n\ts := fmt.Sprintf(format, v...)\n\tmsgPrintln(s)\n\tlogger.Println(s)\n}",
"func Warning(args ...interface{}) {\n\tlog.Println(args...)\n}",
"func (s *Slack) Warning(msg string, v ...interface{}) error {\n\treturn s.Send(\"warning\", msg, v...)\n}",
"func Warn(msg ...interface{}) {\n\tlog(defaultLogger, WARNING, msg...)\n}",
"func printWarning(format string, a ...interface{}) {\n\tif fetchConfig().verbosity < Warning {\n\t\treturn\n\t}\n\tif fetchConfig().color {\n\t\tformat = color.FgYellow.Render(format)\n\t}\n\tfmt.Fprintf(os.Stderr, format+\"\\n\", a...)\n}",
"func (logger *Logger) Warning(msg string, extras ...map[string]string) error {\n\tif WarnLevel >= logger.LogLevel {\n\t\treturn logger.Log(msg, WarnLevel, extras...)\n\t}\n\treturn nil\n}",
"func (s SugaredLogger) Warn(message string, fields ...interface{}) {\n\ts.zapLogger.Warnw(message, fields...)\n}",
"func (l *Log) Warning(v ...interface{}) {\n\tl.append(lWarning, fmt.Sprint(v...))\n}",
"func Warn(format string, v ...interface{}) {\n\tmsg := fmt.Sprintf(\"[W]\"+format, v...)\n\tlog.Println(msg)\n}",
"func Warn(msg string, fields ...zap.Field) {\n\tlogger.Warn(msg, fields...)\n}",
"func Warn(msg string) {\n\tif lvl <= war {\n\t\tl.Print(\"[WARN ]: \" + msg)\n\t}\n}",
"func (s *Session) issueWarning(code ErrorCode) { s.SendMethod(\"warn\", code.ResponseError()) }",
"func (l *Logger) Warning(a ...interface{}) {\n\tif l.Level() >= Warning {\n\t\tl.logWarning.Print(a...)\n\t}\n}",
"func (l *Logger) Warning(values ...interface{}) {\n\tif l.loggingLevel > WarningLevel {\n\t\treturn\n\t}\n\tl.log(l.warningPrefix, fmt.Sprint(values...))\n}",
"func (logger *Logger) Warn(args ...interface{}) {\n\tlogger.std.Log(append([]interface{}{\"Warn\"}, args...)...)\n}",
"func (l *XORMLogBridge) Warn(v ...interface{}) {\n\tlog.Warning(v...)\n}",
"func (l *Log) Warning(v ...interface{}) {\n\tif l.Level >= logLevelWarning {\n\t\tl.LogWF.Output(2, fmt.Sprintln(\" Warning \", v))\n\t}\n}",
"func (v *MultiLogger) Warning(args ...interface{}) {\n\tv.Warn(args...)\n}"
] | [
"0.75583",
"0.69907176",
"0.68272567",
"0.6758172",
"0.6574501",
"0.65728384",
"0.6472663",
"0.6430965",
"0.6343016",
"0.6327992",
"0.6319481",
"0.6286859",
"0.62543046",
"0.6215221",
"0.6163449",
"0.6122632",
"0.61130935",
"0.6112975",
"0.6100657",
"0.61001635",
"0.6091717",
"0.6085956",
"0.60334283",
"0.6032433",
"0.59883934",
"0.5983114",
"0.5983028",
"0.59701574",
"0.5963569",
"0.5956295",
"0.59487116",
"0.5946793",
"0.5936715",
"0.59358066",
"0.5928412",
"0.5925354",
"0.5923957",
"0.59031636",
"0.5902889",
"0.5898043",
"0.58946514",
"0.5894179",
"0.5889904",
"0.58811",
"0.58783317",
"0.5873124",
"0.5871887",
"0.58695585",
"0.58688176",
"0.58598185",
"0.5858946",
"0.58487797",
"0.58429176",
"0.583503",
"0.58334625",
"0.5830703",
"0.5825129",
"0.5824737",
"0.58237827",
"0.5814136",
"0.58127886",
"0.5811747",
"0.58091426",
"0.5807227",
"0.58066934",
"0.58055174",
"0.5803312",
"0.5802733",
"0.58022034",
"0.5802093",
"0.57992893",
"0.57984996",
"0.5796529",
"0.57931036",
"0.5781752",
"0.57816535",
"0.57813346",
"0.5780131",
"0.5770399",
"0.57695913",
"0.57651633",
"0.57613117",
"0.57561684",
"0.5755771",
"0.57450545",
"0.57450277",
"0.57393193",
"0.57333434",
"0.5732875",
"0.57306325",
"0.57301474",
"0.57262176",
"0.57249826",
"0.57214457",
"0.57169074",
"0.5716899",
"0.57132834",
"0.57093847",
"0.57071126",
"0.5700118"
] | 0.69637924 | 2 |
Fatalj fatal json log | func (l *EchoLogrus) Fatalj(j log.JSON) {
l.Logger.WithFields(logrus.Fields(j)).Fatal()
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (l *jsonLogger) Fatal(message interface{}, params ...interface{}) {\n\tl.jsonLogParser.parse(context.Background(), l.jsonLogParser.log.Fatal(), \"\", params...).Msgf(\"%s\", message)\n}",
"func Fatal(v ...interface{}) {\n\tjasonLog.output(2, levelFatal, \"\", v...)\n}",
"func TestFatalJSONMsg(t *testing.T) {\n\tfatalErr := Msg{\n\t\tMessage: \"This is a multiline\\nfatal message\\n\",\n\t\tCode: 2121,\n\t\tLevel: \"FATAL\",\n\t}\n\tapiVer := \"0.1\"\n\toutput := FatalJSONMsg(apiVer, fatalErr)\n\tcheckResultContains(t, output, ` \"apiVersion\": \"0.1\"`)\n\tcheckResultContains(t, output, ` \"error\": {`)\n\tcheckResultContains(t, output, ` \"message\": \"This is a multiline\\u000afatal `)\n\tcheckResultContains(t, output, ` \"code\": 2121,`)\n\n\t// Now lets see if the JSON returned was good\n\tvar result interface{}\n\terr := json.Unmarshal([]byte(output), &result)\n\tif err != nil {\n\t\tt.Fatalf(\"Unable to unmarshal JSON generated by FatalJSONMsg(), error: %s\\n\", err)\n\t}\n}",
"func Fatal(args ...interface{}) {\n\txlog.SetFormatter(&logrus.JSONFormatter{})\n\txlog.SetLevel(logrus.DebugLevel)\n\n\txlog.Fatal(args...)\n}",
"func (l *Logger) Fatal(err errors.Error) {\n\tl.logFatal.Printf(string(err.JSON()))\n}",
"func (l *jsonLogger) Fatalln(message interface{}, params ...interface{}) {\n\tl.jsonLogParser.parse(context.Background(), l.jsonLogParser.log.Fatal(), \"\", params...).Msgf(\"%s\", message)\n}",
"func Fatalf(format string, v ...interface{}) {\n\tjasonLog.output(2, levelFatal, format, v...)\n}",
"func (l *AppLogger) Fatal(tag string, message ...interface{}) {\n\tl.logging.SetFormatter(&logrus.JSONFormatter{})\n\tk := getAppFields(l.reqId, tag, l.userId)\n\tl.logging.WithFields(k).Fatal(message...)\n}",
"func Fatal(data []byte) {\n\tlog.Fatal(\"FATAL: \", string(data))\n}",
"func fatalf(msg string, args ...interface{}) {\n\terr := cloudprovider.VolumeError{\n\t\tMessage: fmt.Sprintf(msg, args...),\n\t\tStatus: \"Failure\",\n\t}\n\tfmt.Printf(err.ToJson())\n\tos.Exit(1)\n}",
"func wsFatal(conn *websocket.Conn, serverMsg string, clientMsg string) {\n\tlog.Error(serverMsg)\n\tpayload := Message{\n\t\tFatal: clientMsg,\n\t}\n\tresJSON, err := json.Marshal(payload)\n\tif err != nil {\n\t\tmsg := fmt.Sprintf(\"Failed to marshal result : %v\", err)\n\t\tlog.Error(msg)\n\t\treturn\n\t}\n\terr = conn.WriteMessage(websocket.TextMessage, resJSON)\n\tif err != nil {\n\t\tlog.Error(\"Couldn't write to conn: %v\", err)\n\t}\n\treturn\n}",
"func Fatalf(format string, args ...interface{}) {\n\txlog.SetFormatter(&logrus.JSONFormatter{})\n\txlog.SetLevel(logrus.DebugLevel)\n\n\txlog.Fatalf(format, args...)\n}",
"func (zl *ZapLogger) Fatal(ctx context.Context, msg string, data interface{}, err error) {\n\tzl.logger.Fatal(msg, fields(ctx, data, err)...)\n}",
"func Fatal(format string, v ...interface{}) {\n\tif LogLevel() <= 4 {\n\t\tlog.Printf(\"FATAL: \"+format, v...)\n\t}\n}",
"func (l *Logger) Fatal(v ...interface{}) { l.lprint(FATAL, v...) }",
"func Fatal(msg string, fields ...zapcore.Field) {\n\tinitLogger()\n\n\tlogger.Fatal(msg, fields...)\n}",
"func (l *EchoLogrus) Panicj(j log.JSON) {\n\tl.Logger.WithFields(logrus.Fields(j)).Panic()\n}",
"func ExampleFatal() {\n\tsetup()\n\terr := errors.New(\"A repo man spends his life getting into tense situations\")\n\tservice := \"myservice\"\n\n\tlog.Fatal().\n\t\tErr(err).\n\t\tStr(\"service\", service).\n\t\tMsgf(\"Cannot start %s\", service)\n\n\t// Outputs: {\"level\":\"fatal\",\"time\":1199811905,\"error\":\"A repo man spends his life getting into tense situations\",\"service\":\"myservice\",\"message\":\"Cannot start myservice\"}\n}",
"func (log *log) Fatal(a ...interface{}) {\n\ta = append([]interface{}{log.attachPrefix(\"\")}, a...)\n\tlogger.FatalDepth(1, a...)\n}",
"func (z *Logger) Fatal(args ...interface{}) {\n\tz.SugaredLogger.Fatal(args...)\n}",
"func Fatal(v ...interface{}) {\n\tairshipLog.Fatal(v...)\n}",
"func Fatal(v ...interface{}) {\n\tcheckInit()\n\ts := fmt.Sprint(v...)\n\tstd.Report(s)\n\tlog.Fatal(s)\n}",
"func lFatal(v ...interface{}) {\n\t/* #nosec */\n\t_ = errLogger.Output(2, fmt.Sprintln(v...)) //Following log package, ignoring error value\n\tos.Exit(1)\n}",
"func (l *GrpcLog) Fatal(args ...interface{}) {\n\tl.SugaredLogger.Fatal(args...)\n}",
"func Fatal(format string, v ...interface{}) {\n\tlog.Fatalf(format + \"\\n\", v ...)\n}",
"func Fatal(msg string, fields ...zap.Field) {\n\tlogger.Fatal(msg, fields...)\n}",
"func (s SugaredLogger) Fatal(message string, fields ...interface{}) {\n\ts.zapLogger.Fatalw(message, fields...)\n}",
"func Fatal(msg string, fields ...zapcore.Field) {\n\tGetZapLogger().Fatal(msg, fields...)\n}",
"func Fatal(format string, v ...interface{}) {\n\tl.output(LFatal, format, v...)\n}",
"func fatal(err error) {\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n}",
"func (l typeLogger) Fatal(format string, v ...interface{}) {\n\tmessage := fmt.Sprintf(format, v...)\n\tl.logger.Printf(\" %s%s%s : %s (%s)\", colorFatal, tagFatal, colorClear, message, getCallerPosition())\n\tos.Exit(1)\n}",
"func (l *jsonLogger) FatalContext(ctx context.Context, message interface{}, params ...interface{}) {\n\tl.jsonLogParser.parse(ctx, l.jsonLogParser.log.Fatal(), \"\", params...).Msgf(\"%s\", message)\n}",
"func Fatal(v ...interface{}) {\n\t// Send to Output instead of Fatal to allow us to increase the output depth by 1 to make sure the correct file is displayed\n\tfatalLogger.Output(2, fmt.Sprint(v...))\n\tos.Exit(1)\n}",
"func Fatal(v ...interface{}) { std.lprint(FATAL, v...) }",
"func Fatal(format string, v ...interface{}) {\n\tLeveledLogger(level.Fatal, format, v...)\n}",
"func (l *zapLog) Fatal(args ...interface{}) {\n\tif l.logger.Core().Enabled(zapcore.FatalLevel) {\n\t\tl.logger.Fatal(fmt.Sprint(args...))\n\t}\n}",
"func Fatal(message string, fields ...zap.Field) {\n\tlogger.Fatal(message, fields...)\n}",
"func Fatal(args ...interface{}) {\n LoggerOf(default_id).Fatal(args...)\n}",
"func catchFatal() {\n\terr := recover()\n\tif err != nil {\n\t\tentry := err.(*logrus.Entry)\n\t\tlog.WithFields(logrus.Fields{\n\t\t\t\"err_level\": entry.Level,\n\t\t\t\"err_message\": entry.Message,\n\t\t}).Error(\"Server Panic\")\n\t}\n}",
"func (c *T) Fatal(args ...interface{})",
"func (lg *logger) Fatal(err error) {\n\n\tif sentry {\n\t\traven.CapturePanicAndWait(func() {\n\t\t\tpanic(err)\n\t\t}, lg.buffer.Tags)\n\t}\n\n\tfields := lg.checkBuffer()\n\tlg.glg.Fatalf(LN_FMT, err.Error(), fields)\n\n}",
"func (log Logger) Fatal(message string, fields ...Data) {\n\tlog.log(log.zl.Fatal(), message, fields...)\n}",
"func fatal_error(tls *libc.TLS, zMsg uintptr, va uintptr) { /* speedtest1.c:116:13: */\n\tvar ap va_list\n\t_ = ap\n\tap = va\n\tlibc.Xvfprintf(tls, libc.X__acrt_iob_func(tls, uint32(2)), zMsg, ap)\n\t_ = ap\n\tlibc.Xexit(tls, 1)\n}",
"func Fatal(l interface{}) {\n\tlog.WithFields(log.Fields{\n\t\t\"SERVICE\": \"WINGO\",\n\t}).Fatalln(l)\n}",
"func (c *jobMessage) watchFatal() {\n\n}",
"func Fatal(v ...interface{}) {\n\toutput(LevelFatal, v...)\n}",
"func (r *reporter) Fatal(args ...interface{}) {\n\tr.Fail()\n\tpanic(fmt.Sprint(args...))\n}",
"func Fatal(format string, v ...interface{}) {\n\tLog(1, FATAL, format, v...)\n\tClose()\n\tos.Exit(1)\n}",
"func Fatalln(v ...interface{}) {\n\tLogger.Fatal(v...)\n}",
"func Fatal(args ...interface{}) {\n\tlogger.Fatal(args...)\n}",
"func Fatal(args ...interface{}) {\n\tlogger.Fatal(args...)\n}",
"func Fatal(v ...interface{}) {\n Std.Output(LevelFatal, CallDepth, sout(v...))\n os.Exit(1)\n}",
"func Fatal(msg string, fields ...zap.Field) {\n\tlog.Error(msg, fields...)\n}",
"func (l *Logger) Fatalln(v ...interface{}) { l.lprintln(FATAL, v...) }",
"func Fatal(values map[string]interface{}) {\n\tvalues[LevelKey] = LevelFatal\n\tLog(values)\n}",
"func (stimLogger *FullStimLogger) Fatal(message ...interface{}) {\n\tif stimLogger.highestLevel >= FatalLevel {\n\t\tif stimLogger.setLogger == nil {\n\t\t\tstimLogger.writeLogs(stimLogger.formatString(FatalLevel, fatalMsg, message...))\n\t\t} else {\n\t\t\tstimLogger.setLogger.Fatal(message...)\n\t\t}\n\t\tos.Exit(5)\n\t}\n}",
"func (claims Claims) Fatal(v ...interface{}) {\n\tt := service.LogTime()\n\tmessage := fmt.Sprint(v...)\n\tservice.Log(2, false, message, claims.parse(), proto.Log_FATAL, t).Result()\n\tBuiltInExit(1)\n}",
"func Fatal(args ...interface{}) {\n\tlogging.print(severity.FatalLog, logging.logger, logging.filter, args...)\n}",
"func (log *TcLog) Fatal(format string, v ...interface{}) {\n\tif LevelFatal < log.level {\n\t\treturn\n\t}\n\tmsg := fmt.Sprintf(format, v...)\n\tlog.writeMsg(LevelFatal, msg)\n}",
"func Fatal(v ...interface{}) {\n\tlogger.Fatal(v...)\n}",
"func Fatal(v ...interface{}) {\n\tlogger.Fatal(v...)\n}",
"func (lh *logHandler) Fatal(data ...interface{}) {\n\tif lh.fatal == nil {\n\t\treturn\n\t}\n\tlh.fatal.Fatalln(data...)\n}",
"func (n *Null) Fatal(args ...interface{}) {\n}",
"func (glogger *GLogger) Fatal(format string, a ...interface{}) {\n\tglogger.internalLog(LFatal, fmt.Sprintf(format, a...))\n}",
"func (v Verbosity) Fatal(args ...interface{}) {\n\tif v {\n\t\tfatalLog.Output(CallDepth+1, fmt.Sprint(args...))\n\t}\n}",
"func fatal(message string, args ...interface{}) {\n\tsyntaxError()\n\tfmt.Fprintf(os.Stderr, message, args...)\n\tos.Exit(1)\n}",
"func Fatal(v ...interface{}) {\n\tstdLogger.Log(FatalLevel, fmt.Sprint(v...))\n\tos.Exit(1)\n}",
"func Fatal(format string, args ...interface{}) {\n\tlog.Fatalf(format, args...)\n}",
"func Fatal(i interface{}) {\n\tl.Fatal(i)\n}",
"func fatal(format string, a ...interface{}) {\n\tfmt.Fprintf(os.Stderr, \"\\033[31;1m\\nerror: %v\\033[0m\\n\\n\", fmt.Sprintf(format, a...))\n\tos.Exit(1)\n}",
"func Fatal(v ...interface{}) {\n\tstd.Output(std.callDepth, fmt.Sprint(v...), FatalLevel)\n\tos.Exit(1)\n}",
"func (l *Logger) FATAL(msg string) {\n\tdefer l.Zap.Sync()\n\tl.Zap.Fatal(msg)\n}",
"func Fatal(a ...interface{}) {\n\tcolor.Set(color.FgRed)\n\tdefer color.Unset()\n\tfatalLogger.Println(a...)\n\tos.Exit(1)\n}",
"func Fatal(log string, v ...interface{}) {\n\tsyslog.Fatalf(\"FATAL \"+log, v...)\n}",
"func (c *context) Fatal(format string, args ...interface{}) {\n\tc.logger.Fatal(c.prefixFormat()+format, args...)\n}",
"func Fatal(args ...interface{}) {\n\tlogWithFilename().Fatal(args...)\n}",
"func (l *GlogLogger) Fatal(ctx context.Context, format string, args ...interface{}) {\n\tmsg := fmt.Sprintf(format, args...)\n\t// #nosec G104\n\tglog.ErrorDepth(1, msg)\n\tos.Exit(1)\n}",
"func (lw *LogWriter) Fatal(actor, event string, attrs map[string]string) {\n\tif lw.lvl > LevelFatal {\n\t\treturn\n\t}\n\tlw.output(lw.we, LevelFatal, actor, event, attrs)\n\tos.Exit(1)\n}",
"func (z *ZapLogWrapper) Fatal(args ...interface{}) {\n\tz.l.Fatal(args...)\n}",
"func (lg *Logger) Fatal(args ...interface{}) {\n if lg.level <= FATAL {\n lg.logger.SetPrefix(LEVELS[FATAL])\n lg.logger.Fatalln(args...)\n }\n}",
"func Fatal(args ...interface{}) {\n\tlog.Fatal(args...)\n}",
"func Fatal(args ...interface{}) {\n\tlog.Fatal(args...)\n}",
"func (l *Logger) Fatal(v ...interface{}) {\n\tl.log.Output(l.calldepth, header(\"FTL\", fmt.Sprint(v...)))\n\tos.Exit(1)\n}",
"func Fatal(msg string, fields ...zapcore.Field) {\n\tdefaultLogger.Fatal(msg, fields...)\n}",
"func (s *Scope) Fatal(msg string, fields ...zapcore.Field) {\n\tif s.GetOutputLevel() >= FatalLevel {\n\t\ts.emit(zapcore.FatalLevel, s.GetStackTraceLevel() >= FatalLevel, msg, fields)\n\t}\n}",
"func (xlog *Logger) Fatal(v ...interface{}) {\n\tlog.Std.Output(xlog.ReqId, log.Lfatal, 2, fmt.Sprint(v...))\n\tos.Exit(1)\n}",
"func Fatal(ctx context.Context, msg string, fields ...zap.Field) {\n\tFromContext(ctx).WithOptions(zap.AddCallerSkip(1)).Fatal(msg, fields...)\n}",
"func Fatal(format string, v ...interface{}) {\n\tlog.Fatalf(format, v...)\n\tos.Exit(1)\n}",
"func Fatal(err error) {\n\t_, file, line, _ := runtime.Caller(1)\n\tfmt.Printf(fatalFmt, filepath.Base(file), line, err.Error())\n\togl.SetLevel(ogl.WARN)\n\tpanic(err)\n}",
"func Fatal(v ...interface{}) {\n\tlog.Fatal(v...)\n}",
"func Fatal(a interface{}) {\n\tFatalf(\"%s\\n\", a)\n}",
"func (l *LogrusLogger) Fatal(params ...interface{}) {\n\tf, msg := mustExtractLogrusParams(params)\n\tlogrus.WithFields(f).Fatal(msg)\n}",
"func Fatal(args ...interface{}) {\n\tfatalLog.Output(CallDepth, fmt.Sprint(args...))\n\t// Todo: check if we need to flush here.\n\tos.Exit(1)\n}",
"func Fatalf(data string, v ...interface{}) {\n\t// Send to Output instead of Fatal to allow us to increase the output depth by 1 to make sure the correct file is displayed\n\tfatalLogger.Output(2, fmt.Sprintf(data, v...))\n\tos.Exit(1)\n}",
"func Fatal(message interface{}) {\n\tglobalLogger.Fatal(message)\n}",
"func Fatal(msg string, fields ...zap.Field) {\n\tglobalLoggerWarp().Fatal(msg, fields...)\n}",
"func (logger *Logger) Fatalf(format string, args ...interface{}) {\n\tlogger.std.Logf(\"Fatal\"+format, args...)\n}",
"func Fatal(args ...interface{}) {\n\tglobal.Fatal(args...)\n}",
"func Fatal(a ...interface{}) {\n\tlogrus.Fatal(a...)\n}",
"func Fatal(msg ...interface{}) {\n\tsyslog.Fatalln(\"F:\", msg)\n}"
] | [
"0.7188736",
"0.6748402",
"0.6675887",
"0.6648496",
"0.6544118",
"0.6526498",
"0.63779336",
"0.6328552",
"0.6310755",
"0.6282695",
"0.59981763",
"0.59522444",
"0.5941097",
"0.5932604",
"0.59110826",
"0.5877525",
"0.5863888",
"0.5845753",
"0.5837118",
"0.5808149",
"0.5792411",
"0.5788344",
"0.5782515",
"0.5777389",
"0.57735866",
"0.57711893",
"0.5769443",
"0.5767326",
"0.57630223",
"0.5761902",
"0.5743946",
"0.57373995",
"0.57270324",
"0.5706256",
"0.57058996",
"0.56947464",
"0.5692556",
"0.5692543",
"0.5690176",
"0.568915",
"0.56877816",
"0.5684053",
"0.5682541",
"0.56793636",
"0.56759435",
"0.56691897",
"0.5658476",
"0.5655881",
"0.56510144",
"0.5650514",
"0.5650514",
"0.5648365",
"0.5645554",
"0.5645434",
"0.56406707",
"0.5638658",
"0.5635873",
"0.5632455",
"0.5625718",
"0.56255084",
"0.56255084",
"0.5625088",
"0.56212777",
"0.5613798",
"0.561131",
"0.56110996",
"0.5609536",
"0.5605696",
"0.5605162",
"0.5601334",
"0.56005985",
"0.5598132",
"0.55967456",
"0.5594861",
"0.5591731",
"0.55899256",
"0.55895585",
"0.55878764",
"0.5585818",
"0.5583846",
"0.55828124",
"0.55828124",
"0.55781007",
"0.5576939",
"0.55762875",
"0.55669874",
"0.5566211",
"0.55609107",
"0.55592465",
"0.55546904",
"0.5552852",
"0.55498296",
"0.5548554",
"0.55457634",
"0.55359364",
"0.5533088",
"0.5530824",
"0.5527202",
"0.55236816",
"0.5522787"
] | 0.66852415 | 2 |
Panicj panic json log | func (l *EchoLogrus) Panicj(j log.JSON) {
l.Logger.WithFields(logrus.Fields(j)).Panic()
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (l *AppLogger) Panic(tag string, message ...interface{}) {\n\tl.logging.SetFormatter(&logrus.JSONFormatter{})\n\tk := getAppFields(l.reqId, tag, l.userId)\n\tl.logging.WithFields(k).Panic(message...)\n}",
"func JSONLogger(r *http.Request, status int, len int64, d time.Duration) {\n\tos.Stderr.WriteString(JSONLogMessage(time.Now, r.Method, r.URL, status, len, d, nil))\n}",
"func logJSON(v interface{}) {\n\tb, err := json.MarshalIndent(v, \"\", \" \")\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tlog.Println(string(b))\n}",
"func (c *Controller) HandlePanic(v interface{}) {\n status := http.StatusInternalServerError\n switch e := v.(type) {\n case *perror.Error:\n status = e.Status()\n c.Json(EmptyObject, status, e.Message())\n default:\n c.Json(EmptyObject, status)\n }\n\n c.Context().Error(\"%s, trace[%s]\", util.ToString(v), util.PanicTrace(TraceMaxDepth, false))\n}",
"func Test_JSONLogger(t *testing.T) {\n\tdefer b.Reset()\n\n\tlog.InitJSONLogger(&log.Config{\n\t\tOutput: b,\n\t})\n\n\tlog.WithError(\n\t\terrors.New(\"bepis\"),\n\t).WithFields(log.Fields{\n\t\t\"hello\": \"world\",\n\t\t\"sample\": 1,\n\t\t\"text\": nil,\n\t}).Error(\"banana\")\n\n\texpected := map[string]interface{}{\n\t\t\"message\": \"banana\",\n\t\t\"error\": \"bepis\",\n\t\t\"hello\": \"world\",\n\t\t\"sample\": float64(1),\n\t\t\"text\": nil,\n\t\t\"level\": \"ERROR\",\n\t\t\"time\": \"<placeholder>\",\n\t\t\"_function\": \"<placeholder>\",\n\t\t\"_file\": \"<placeholder>\",\n\t\t\"_line\": \"<placeholder>\",\n\t}\n\n\tvar data map[string]interface{}\n\tif err := json.Unmarshal([]byte(b.String()), &data); err != nil {\n\t\tt.Fatalf(\"error unmarshalling buffer: %v\", err)\n\t}\n\n\tif len(expected) != len(data) {\n\t\tt.Fatalf(\"expected length: %d. actual length: %d\", len(expected), len(data))\n\t}\n\n\tfor k, v := range expected {\n\t\tval, ok := data[k]\n\t\tif !ok {\n\t\t\tt.Errorf(\"expected '%s' to be in buffer\", k)\n\t\t}\n\n\t\t// ignore the runtime specific info and timestamp, cant really get that info afaik\n\t\t// and checking their presence is good enough\n\t\tif !strings.HasPrefix(k, \"_\") && !(k == \"time\") {\n\t\t\tif val != v {\n\t\t\t\tt.Errorf(\"expected value: %T '%v'. actual value %T '%v'\", v, v, val, val)\n\t\t\t}\n\t\t}\n\t}\n}",
"func (l *jsonLogger) Fatal(message interface{}, params ...interface{}) {\n\tl.jsonLogParser.parse(context.Background(), l.jsonLogParser.log.Fatal(), \"\", params...).Msgf(\"%s\", message)\n}",
"func (aptRestorer *APTRestorer) logJson(restoreState *models.RestoreState, jsonString string) {\n\ttimestamp := time.Now().UTC().Format(time.RFC3339)\n\tstartMessage := fmt.Sprintf(\"-------- BEGIN %s | WorkItem: %d | Time: %s --------\",\n\t\trestoreState.WorkItem.ObjectIdentifier, restoreState.WorkItem.Id, timestamp)\n\tendMessage := fmt.Sprintf(\"-------- END %s | WorkItem: %d | Time: %s --------\",\n\t\trestoreState.WorkItem.ObjectIdentifier, restoreState.WorkItem.Id, timestamp)\n\taptRestorer.Context.JsonLog.Println(startMessage, \"\\n\",\n\t\tjsonString, \"\\n\",\n\t\tendMessage)\n}",
"func LogJSON(data interface{}) string {\n\tjsonData, err := json.Marshal(data)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\tvar prettyJSON bytes.Buffer\n\terr = json.Indent(&prettyJSON, jsonData, \"\", \" \")\n\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\treturn prettyJSON.String()\n}",
"func Fatal(args ...interface{}) {\n\txlog.SetFormatter(&logrus.JSONFormatter{})\n\txlog.SetLevel(logrus.DebugLevel)\n\n\txlog.Fatal(args...)\n}",
"func JSONLog(w io.Writer) LogFunc {\n\treturn func(v interface{}) {\n\t\tdata, err := json.Marshal(v)\n\t\tif err != nil {\n\t\t\tdata, err = json.Marshal(struct {\n\t\t\t\tContext string `json:\"context\"`\n\t\t\t\tDebugData string `json:\"debugData\"`\n\t\t\t\tError string `json:\"error\"`\n\t\t\t}{\n\t\t\t\tContext: \"Error marshaling 'debugData' into JSON\",\n\t\t\t\tDebugData: spew.Sdump(v),\n\t\t\t\tError: err.Error(),\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\t// We really REALLY should never get here\n\t\t\t\tlog.Println(\"ERROR MARSHALLING THE MARSHALLING ERROR!:\", err)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif _, err := fmt.Fprintf(w, \"%s\\n\", data); err != nil {\n\t\t\tlog.Println(\"ERROR WRITING TO LOGGER:\", err)\n\t\t}\n\t}\n}",
"func loggerJSON(l jsonLog) {\n\tl.Date = time.Now()\n\tif l.Level == 0 {\n\t\tl.Level = 6\n\t}\n\tif Config.MinLogLevel >= l.Level {\n\t\tif l.Version == \"\" {\n\t\t\tl.Version = \"1.1\"\n\t\t}\n\t\tif l.Host == \"\" {\n\t\t\tl.Host = \"Quotes\"\n\t\t}\n\t\tif l.ResponseCode == 0 {\n\t\t\tl.ResponseCode = 200\n\t\t}\n\t\t_ = os.MkdirAll(\"./logs/\", os.ModePerm)\n\t\tf, err := os.OpenFile(\"./logs/logs.json\", os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Error opening logs.json file: %v\", err)\n\t\t}\n\t\tdata, _ := json.Marshal(l)\n\t\tf.WriteString(string(data) + \"\\n\")\n\t\tf.Close()\n\t}\n}",
"func printJson(ag *alertGroup, m *sync.Mutex) {\n\tm.Lock()\n\tfor _, alert := range ag.Alerts {\n\t\tout := map[string]string{\"status\": alert.Status}\n\n\t\tfor k, v := range alert.Labels {\n\t\t\tout[k] = v\n\t\t}\n\t\tfor k, v := range alert.Annotations {\n\t\t\tout[k] = v\n\t\t}\n\t\tout[\"startsAt\"] = alert.StartsAt.Truncate(time.Millisecond).String()\n\t\tout[\"endsAt\"] = alert.EndsAt.Truncate(time.Millisecond).String()\n\n\t\tjout, err := json.Marshal(out)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\tfmt.Printf(\"%s\\n\", jout)\n\t}\n\tm.Unlock()\n}",
"func (lrt *LogRoundTripper) formatJSON(raw []byte) string {\n\tvar data map[string]interface{}\n\n\terr := json.Unmarshal(raw, &data)\n\tif err != nil {\n\t\tklog.V(6).Infof(\"Unable to parse JSON: %s, data: %s\", err, string(raw))\n\t\treturn string(raw)\n\t}\n\n\t// Mask known password fields\n\tif v, ok := data[\"auth\"].(map[string]interface{}); ok {\n\t\tif v, ok := v[\"identity\"].(map[string]interface{}); ok {\n\t\t\tif v, ok := v[\"password\"].(map[string]interface{}); ok {\n\t\t\t\tif v, ok := v[\"user\"].(map[string]interface{}); ok {\n\t\t\t\t\tv[\"password\"] = \"***\"\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Ignore the catalog\n\tif v, ok := data[\"token\"].(map[string]interface{}); ok {\n\t\tif _, ok := v[\"catalog\"]; ok {\n\t\t\treturn \"\"\n\t\t}\n\t}\n\n\tpretty, err := json.MarshalIndent(data, \"\", \" \")\n\tif err != nil {\n\t\tklog.V(6).Infof(\"Unable to re-marshal JSON: %s\", err)\n\t\treturn string(raw)\n\t}\n\n\treturn string(pretty)\n}",
"func (handler *ConsoleLogHandler) Format() LogFormat {\r\n return JSONFormat\r\n}",
"func JSONLogMessage(now func() time.Time, method string, u *url.URL, status int, length int64, d time.Duration, fields map[string]string) string {\n\tc := \"http_\" + strconv.Itoa(status/100) + \"xx\"\n\ts := `{` +\n\t\t`\"time\":\"` + now().UTC().Format(time.RFC3339) + `\",` +\n\t\t`\"src\":\"rl\",` +\n\t\t`\"status\":` + strconv.Itoa(status) + `,` +\n\t\t`\"` + c + `\":1,` +\n\t\t`\"len\":` + strconv.FormatInt(length, 10) + `,` +\n\t\t`\"ms\":` + strconv.FormatInt(d.Nanoseconds()/1000000, 10) + `,` +\n\t\t`\"method\":\"` + jsonEscape(method) + `\",` +\n\t\t`\"path\":\"` + jsonEscape(u.Path) + `\"`\n\tfor k, v := range fields {\n\t\ts += `,\"` + k + `\":\"` + v + `\"`\n\t}\n\treturn s + \"}\\n\"\n}",
"func jsonPrint(posts Posts) {\n\tpostJSON, err := json.MarshalIndent(posts, \"\", \" \")\n\tif err != nil {\n\t\tlogrus.Error(err)\n\t\tpanic(err)\n\t}\n\tfmt.Printf(\"JSON data: \\n %s\\n\", string(postJSON))\n}",
"func (n *AgentNotify) DumpJSON() {\n\tfmt.Println(n.getJSON())\n}",
"func TestFatalJSONMsg(t *testing.T) {\n\tfatalErr := Msg{\n\t\tMessage: \"This is a multiline\\nfatal message\\n\",\n\t\tCode: 2121,\n\t\tLevel: \"FATAL\",\n\t}\n\tapiVer := \"0.1\"\n\toutput := FatalJSONMsg(apiVer, fatalErr)\n\tcheckResultContains(t, output, ` \"apiVersion\": \"0.1\"`)\n\tcheckResultContains(t, output, ` \"error\": {`)\n\tcheckResultContains(t, output, ` \"message\": \"This is a multiline\\u000afatal `)\n\tcheckResultContains(t, output, ` \"code\": 2121,`)\n\n\t// Now lets see if the JSON returned was good\n\tvar result interface{}\n\terr := json.Unmarshal([]byte(output), &result)\n\tif err != nil {\n\t\tt.Fatalf(\"Unable to unmarshal JSON generated by FatalJSONMsg(), error: %s\\n\", err)\n\t}\n}",
"func (n *NetOp) LogJSON(b []byte) {\n\tif n.Logger != nil {\n\t\tn.Logger.LogJSON(b)\n\t}\n}",
"func (l *EchoLogrus) Fatalj(j log.JSON) {\n\tl.Logger.WithFields(logrus.Fields(j)).Fatal()\n}",
"func (l *jsonLogger) Fatalln(message interface{}, params ...interface{}) {\n\tl.jsonLogParser.parse(context.Background(), l.jsonLogParser.log.Fatal(), \"\", params...).Msgf(\"%s\", message)\n}",
"func PPrintJSON(xx interface{}) {\n\tyy, _ := json.MarshalIndent(xx, \"\", \" \")\n\tlog.Println(string(yy))\n}",
"func (l *LogEntry) Panic(v interface{}, stack []byte) {\n\te := l.NewLogEntry(l.req).(*LogEntry)\n\tfmt.Fprintf(e.buf, \"panic: %#v\", v)\n\tlog.Print(e.buf.String())\n\tlog.Print(string(stack))\n}",
"func logAsString(l map[string]interface{}) string {\n\tl[\"app\"] = AppName\n\tl[\"version\"] = Version\n\tb, err := json.Marshal(l)\n\tif err != nil {\n\t\tlog.Printf(\"unable to marshap map[string]interface{}. Wtf. %v \\n %#v\", err, l)\n\t}\n\treturn string(b)\n}",
"func logStackOnRecover(s runtime.NegotiatedSerializer, panicReason interface{}, w http.ResponseWriter) {\n\tvar buffer bytes.Buffer\n\tbuffer.WriteString(fmt.Sprintf(\"recover from panic situation: - %v\\r\\n\", panicReason))\n\tfor i := 2; ; i++ {\n\t\t_, file, line, ok := rt.Caller(i)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\tbuffer.WriteString(fmt.Sprintf(\" %s:%d\\r\\n\", file, line))\n\t}\n\tklog.Errorln(buffer.String())\n\n\theaders := http.Header{}\n\tif ct := w.Header().Get(\"Content-Type\"); len(ct) > 0 {\n\t\theaders.Set(\"Accept\", ct)\n\t}\n\tresponsewriters.ErrorNegotiated(apierrors.NewGenericServerResponse(http.StatusInternalServerError, \"\", schema.GroupResource{}, \"\", \"\", 0, false), s, schema.GroupVersion{}, w, &http.Request{Header: headers})\n}",
"func InitLog() {\n // TODO: implement json logger\n\n /*log.SetFormatter(&log.TextFormatter{\n DisableTimestamp: true,\n })\n if logJson {\n log.SetFormatter(&log.JSONFormatter{})\n }\n log.SetOutput(os.Stdout)\n\n level, err := log.ParseLevel(logLevel)\n if err != nil {\n log.Error(errors.Wrap(err, fmt.Sprintf(\"Invalid log level %s, defaulting to INFO\", logLevel)))\n level = log.InfoLevel\n }\n log.SetLevel(level)*/\n\n\n log = standard.New(logLevel)\n\n}",
"func (l *AppLogger) Fatal(tag string, message ...interface{}) {\n\tl.logging.SetFormatter(&logrus.JSONFormatter{})\n\tk := getAppFields(l.reqId, tag, l.userId)\n\tl.logging.WithFields(k).Fatal(message...)\n}",
"func (w *DefaultPreWorkflowHooksCommandRunner) logPanics(baseRepo models.Repo, pullNum int, logger logging.SimpleLogging) {\n\tif err := recover(); err != nil {\n\t\tstack := recovery.Stack(3)\n\t\tlogger.Err(\"PANIC: %s\\n%s\", err, stack)\n\t\tif commentErr := w.VCSClient.CreateComment(\n\t\t\tbaseRepo,\n\t\t\tpullNum,\n\t\t\tfmt.Sprintf(\"**Error: goroutine panic. This is a bug.**\\n```\\n%s\\n%s```\", err, stack),\n\t\t\t\"\",\n\t\t); commentErr != nil {\n\t\t\tlogger.Err(\"unable to comment: %s\", commentErr)\n\t\t}\n\t}\n}",
"func (l *FormattedJSONLogger) Log(keyvals ...interface{}) error {\n\tm := make(map[string]interface{}, (len(keyvals)+1)/2)\n\n\tfor i := 0; i < len(keyvals); i += 2 {\n\t\tvar v interface{} = \"(!MISSING)\"\n\t\tif i+1 < len(keyvals) {\n\t\t\tv = keyvals[i+1]\n\t\t}\n\t\tm[fmt.Sprintf(\"%s\", keyvals[i])] = v\n\t}\n\n\tb, err := json.Marshal(m)\n\tif err != nil {\n\t\t// TODO: Write this error to logs.\n\t\treturn err\n\t}\n\t_, err = fmt.Fprintln(l.w, string(b))\n\treturn err\n}",
"func JsonFileLogger(out SuperMarketLog) error {\n\toutput, _ := json.Marshal(out) // Create he output to log\n\tstringOutput := string(output) + \"\\n\" // Append a newline to the output\n\t//If the file doesn't exist, create it or append to the file\n\tf, err := os.OpenFile(\"rest.log\", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tif _, err := f.Write([]byte(stringOutput)); err != nil { //Write out to the log\n\t\tlog.Fatal(err)\n\t}\n\tif err := f.Close(); err != nil { //Close the writer\n\t\tlog.Fatal(err)\n\t}\n\treturn err\n}",
"func Panic2Response(c *gin.Context, err error, segmentName string) {\n\tif err != nil {\n\t\tc.JSON(http.StatusInternalServerError, gin.H{\"error\": err.Error()})\n\t\tlog.Panicf(\"Panic in '%s' : %v\", segmentName, err)\n\t}\n}",
"func LogJSON(level Level, module string, data interface{}) {\n\tb, err := json.MarshalIndent(data, \"\", \" \")\n\tif err == nil {\n\t\tlconf.Logger.Log(level, module, \"%s\", string(b))\n\t}\n}",
"func LogPanic(context string, module string, info string) {\n log.Panic().\n Str(\"Context\", context).\n Str(\"Module\", module).\n Msg(info)\n}",
"func Fatal(v ...interface{}) {\n\tjasonLog.output(2, levelFatal, \"\", v...)\n}",
"func (zl *ZapLogger) Panic(ctx context.Context, msg string, data interface{}, err error) {\n\tzl.logger.Panic(msg, fields(ctx, data, err)...)\n}",
"func panicRecover(input *models.RunningInput) {\n\tif err := recover(); err != nil {\n\t\ttrace := make([]byte, 2048)\n\t\truntime.Stack(trace, true)\n\t\tlog.Printf(\"E! FATAL: [%s] panicked: %s, Stack:\\n%s\",\n\t\t\tinput.LogName(), err, trace)\n\t\tlog.Println(\"E! PLEASE REPORT THIS PANIC ON GITHUB with \" +\n\t\t\t\"stack trace, configuration, and OS information: \" +\n\t\t\t\"https://github.com/influxdata/telegraf/issues/new/choose\")\n\t}\n}",
"func SerializePanic(in interface{}) []byte {\n\tbytes, err := Serialize(in)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn bytes\n}",
"func logPanic(r interface{}) {\n\tif r == http.ErrAbortHandler {\n\t\t// honor the http.ErrAbortHandler sentinel panic value:\n\t\t// ErrAbortHandler is a sentinel panic value to abort a handler.\n\t\t// While any panic from ServeHTTP aborts the response to the client,\n\t\t// panicking with ErrAbortHandler also suppresses logging of a stack trace to the server's error log.\n\t\treturn\n\t}\n\n\t// Same as stdlib http server code. Manually allocate stack trace buffer size\n\t// to prevent excessively large logs\n\tconst size = 64 << 10\n\tstacktrace := make([]byte, size)\n\tstacktrace = stacktrace[:runtime.Stack(stacktrace, false)]\n\tif _, ok := r.(string); ok {\n\t\tfmt.Fprintf(os.Stderr, \"[ERROR] Observed a panic: %s\\n%s\", r, stacktrace)\n\t} else {\n\t\tfmt.Fprintf(os.Stderr, \"[ERROR] Observed a panic: %#v (%v)\\n%s\", r, r, stacktrace)\n\t}\n}",
"func PanicHandler() func(http.ResponseWriter, *http.Request, interface{}) {\n\treturn func(w http.ResponseWriter, r *http.Request, rcv interface{}) {\n\t\tresponse := ResultInternalServerErr\n\t\tif env := viper.GetString(`env`); env == \"development\" {\n\t\t\tif rcv != nil {\n\t\t\t\tresponse.SetMessage(rcv)\n\t\t\t}\n\t\t}\n\n\t\tlog.Printf(\"%s %s\", r.Method, r.URL.Path)\n\t\tlog.Printf(\"Panic Error: %+v\", rcv)\n\n\t\tJSONResult(w, &response)\n\t}\n}",
"func (xlog *Logger) Panic(v ...interface{}) {\n\ts := fmt.Sprint(v...)\n\tlog.Std.Output(xlog.ReqId, log.Lpanic, 2, s)\n\tpanic(s)\n}",
"func KO(msg string) {\n\ttmp := ErrorWithInputs{\n\t\tError: msg,\n\t\tURL: url,\n\t\tToken: token,\n\t}\n\toutput, _ := json.MarshalIndent(tmp, \"\", \" \")\n\tpanic(string(output))\n}",
"func (l *Logger) PANIC(msg string) {\n\tdefer l.Zap.Sync()\n\tl.Zap.DPanic(msg)\n}",
"func TestJsonFormat(t *testing.T) {\n\tout := strings.Builder{}\n\tSetOutput(&out)\n\tSetLevelStr(\"DEBUG\")\n\tSetLogFormat(JSON)\n\tt.Run(\"test setting of JSON log format\", func(t *testing.T) {\n\t\tprintAllLevels(\"test json format\")\n\t\tif !(strings.Count(out.String(), \"{\\\"level\\\":\\\"DEBUG\\\",\\\"time\\\":\\\"\") == 2 &&\n\t\t\tstrings.Count(out.String(), \"\\\",\\\"location\\\":\\\"logger_test.go:\") == 10 &&\n\t\t\tstrings.Count(out.String(), \"\\\"goroutine\\\":\") == 10 &&\n\t\t\tstrings.Count(out.String(), \",\\\"message\\\":\\\"--> test json format\\\"}\") == 10) {\n\t\t\tt.Errorf(\"Log should be in JSON format:\\n%v\", out.String())\n\t\t}\n\t})\n}",
"func Panic(f interface{}, v ...interface{}) {\n\tvar format string\n\tswitch f.(type) {\n\tcase string:\n\t\tformat = f.(string)\n\t\tlog.Panicf(format, v...)\n\tdefault:\n\t\tformat = fmt.Sprint(f)\n\t\tif len(v) == 0 {\n\t\t\tlog.Panic(format)\n\t\t\treturn\n\t\t}\n\t\tformat += strings.Repeat(\" %v\", len(v))\n\t\tlog.Panicf(format, v...)\n\t}\n}",
"func (s *errDeal) Panic(a ...interface{}) {\n\ts.once.Do(s.initPath)\n\n\tnow := time.Now() //获取当前时间\n\tpid := os.Getpid() //获取进程ID\n\ttimeStr := now.Format(\"2006-01-02\") //设定时间格式\n\tfname := fmt.Sprintf(\"%s/panic_%s-%x.log\", s._path, timeStr, pid) //保存错误信息文件名:程序名-进程ID-当前时间(年月日时分秒)\n\tfmt.Println(\"panic to file \", fname)\n\n\tf, err := os.OpenFile(fname, os.O_CREATE|os.O_APPEND|os.O_RDWR, 0666)\n\tif err != nil {\n\t\treturn\n\t}\n\tdefer f.Close()\n\n\tf.WriteString(\"=========================\" + now.Format(\"2006-01-02 15:04:05 ========================= \\r\\n\"))\n\tf.WriteString(getStr(a...)) //输出堆栈信息\n\tf.WriteString(\"=========================end=========================\")\n}",
"func printReqLog(ctx context.Context, req interface{}) {\n\tjsoon, _ := json.Marshal(ctx)\n\tlog.Println(string(jsoon))\n\n\tjsoon, _ = json.Marshal(req)\n\tlog.Println(string(jsoon))\n}",
"func JSONLogMiddleware() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\t// Start timer\n\t\tstart := time.Now()\n\n\t\t// Process Request\n\t\tc.Next()\n\n\t\t// Stop timer\n\t\tduration := GetDurationInMillseconds(start)\n\n\t\tentry := log.WithFields(log.Fields{\n\t\t\t\"type\": \"router\",\n\t\t\t\"client_ip\": GetClientIP(c),\n\t\t\t\"duration\": duration,\n\t\t\t\"method\": c.Request.Method,\n\t\t\t\"path\": c.Request.RequestURI,\n\t\t\t\"status\": c.Writer.Status(),\n\t\t\t\"referrer\": c.Request.Referer(),\n\t\t})\n\n\t\tif c.Writer.Status() >= 500 {\n\t\t\tentry.Error(c.Errors.String())\n\t\t} else {\n\t\t\tentry.Info(\"\")\n\t\t}\n\t}\n}",
"func init() {\n\tlog.SetFormatter(&log.JSONFormatter{})\n\tlog.SetOutput(os.Stdout)\n\tlog.SetLevel(log.InfoLevel)\n}",
"func printDatus(d interface{}) {\n\tm, ok := d.(map[string]interface{})\n\tif !ok {\n\t\tinfoLogger.Println(\"Failed type assertion\", d)\n\t}\n\t//Go's map implementation returns keys in random order. So we are sorting before accessing\n\tkeys := make([]string, len(m))\n\ti := 0\n\tfor k := range m {\n\t\tkeys[i] = k\n\t\ti++\n\t}\n\tsort.Strings(keys)\n\tfor _, key := range keys {\n\t\tinfoLogger.Panicf(\"%s: %-20v\", key, valueFromTypeMap(m[key]))\n\t}\n\tinfoLogger.Println()\n\n}",
"func logNotification(env *task.Env, task *task.MessageTask, pdu *libcoap.Pdu) {\n log.Infof(\"Message Code: %v (%+v)\", pdu.Code, pdu.CoapCode())\n\n\tif pdu.Data == nil {\n\t\treturn\n }\n\n var err error\n var logStr string\n var req *libcoap.Pdu\n if task != nil {\n req = task.GetMessage()\n } else {\n req = nil\n }\n\n observe, err := pdu.GetOptionIntegerValue(libcoap.OptionObserve)\n if err != nil {\n log.WithError(err).Warn(\"Get observe option value failed.\")\n return\n }\n log.WithField(\"Observe Value:\", observe).Info(\"Notification Message\")\n\n\tmaxAgeRes := pdu.GetOptionStringValue(libcoap.OptionMaxage)\n\tif maxAgeRes != \"\" {\n\t\tlog.Infof(\"Max-Age Option: %v\", maxAgeRes)\n\t}\n\n log.Infof(\" Raw payload: %s\", pdu.Data)\n hex := hex.Dump(pdu.Data)\n\tlog.Infof(\" Raw payload hex: \\n%s\", hex)\n\n\t// Check if the response body data is a string message (not an object)\n\tif pdu.IsMessageResponse() {\n\t\tlog.Debugf(\"Server send notification with error message: %+v\", pdu.Data)\n\t\treturn\n\t}\n\n\tdec := codec.NewDecoder(bytes.NewReader(pdu.Data), dots_common.NewCborHandle())\n\n // Identify response is mitigation or session configuration by cbor data in heximal\n if strings.Contains(hex, string(libcoap.IETF_MITIGATION_SCOPE_HEX)) {\n var v messages.MitigationResponse\n err = dec.Decode(&v)\n logStr = v.String()\n env.UpdateCountMitigation(req, v, string(pdu.Token))\n log.Debugf(\"Request query with token as key in map: %+v\", env.GetAllRequestQuery())\n } else if strings.Contains(hex, string(libcoap.IETF_SESSION_CONFIGURATION_HEX)) {\n var v messages.ConfigurationResponse\n err = dec.Decode(&v)\n logStr = v.String()\n log.Debug(\"Receive session notification - Client update new values to system session configuration and restart ping task.\")\n\t\tRestartHeartBeatTask(pdu, env)\n\n\t\t// Not refresh session config in case session config task is nil (server send notification after reset by expired Max-age)\n\t\tsessionTask := env.SessionConfigTask()\n\t\tif sessionTask != nil {\n\t\t\tRefreshSessionConfig(pdu, env, sessionTask.MessageTask())\n\t\t}\n\t} else if strings.Contains(hex, string(libcoap.IETF_TELEMETRY_PRE_MITIGATION)) {\n var v messages.TelemetryPreMitigationResponse\n err = dec.Decode(&v)\n logStr = v.String()\n log.Debug(\"Receive telemetry pre-mitigation notification.\")\n }else {\n log.Warnf(\"Unknown notification is received.\")\n }\n\n if err != nil {\n log.WithError(err).Warn(\"CBOR Decode failed.\")\n return\n }\n log.Infof(\" CBOR decoded: %s\", logStr)\n}",
"func (l *logData) UnmarshalJSON(d []byte) error {\n\tdata := map[string]interface{}{}\n\n\terr := json.Unmarshal(d, &data)\n\tif nil != err {\n\t\treturn err\n\t}\n\n\tif _, ok := data[\"caller\"]; ok {\n\t\tl.Caller = data[\"caller\"].(string)\n\t}\n\tif _, ok := data[\"data\"]; ok {\n\t\tif nil == l.Data {\n\t\t\tl.Data = map[string]interface{}{}\n\t\t}\n\t\tfor k, v := range data[\"data\"].(map[string]interface{}) {\n\t\t\tif e, ok := v.(error); ok {\n\t\t\t\tl.Data[k] = e.(error)\n\t\t\t} else {\n\t\t\t\tl.Data[k] = v\n\t\t\t}\n\t\t}\n\t\t//l.Data = data[\"data\"].(map[string]interface{})\n\t}\n\tif _, ok := data[\"error\"]; ok && \"\" != data[\"error\"] {\n\t\tl.Err = fmt.Errorf(data[\"error\"].(string))\n\t}\n\tif _, ok := data[\"host\"]; ok {\n\t\tl.Hostname = data[\"host\"].(string)\n\t}\n\tif _, ok := data[\"level\"]; ok {\n\t\tl.Level = data[\"level\"].(string)\n\t}\n\tif _, ok := data[\"msg\"]; ok {\n\t\tl.Message = data[\"msg\"].(string)\n\t}\n\tif _, ok := data[\"time\"]; ok {\n\t\tl.Timestamp = data[\"time\"].(string)\n\t}\n\tif _, ok := data[\"trace\"]; ok {\n\t\tl.Trace = data[\"trace\"].([]string)\n\t}\n\n\treturn nil\n}",
"func jsonPrintDetails() {\n\n\tb, err := json.Marshal(alertDetails[name])\n\n\tif err != nil {\n\t\tlog.Printf(\"Unable to convert Detailed JSON Data, error: %v\\n\", err)\n\t\treturn\n\t}\n\n\tfmt.Println(string(b))\n}",
"func logEvent(lv int, m string, rc int, r string, d int64) {\n\tvar l jsonLog\n\tl.Level = lv\n\tl.Method = m\n\tl.ResponseCode = rc\n\tl.Response = r\n\tl.Duration = d\n\tloggerJSON(l)\n}",
"func (s *sender) logToJSON(record plog.LogRecord) (string, error) {\n\tdata := s.filter.filterOut(record.Attributes())\n\trecord.Body().CopyTo(data.orig.PutEmpty(logKey))\n\n\tnextLine, err := json.Marshal(data.orig.AsRaw())\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn bytes.NewBuffer(nextLine).String(), nil\n}",
"func logStackOnRecover(panicReason interface{}, httpWriter http.ResponseWriter) {\n\tvar buffer bytes.Buffer\n\tbuffer.WriteString(fmt.Sprintf(\"recover from panic situation: - %v\\r\\n\", panicReason))\n\tfor i := 2; ; i += 1 {\n\t\t_, file, line, ok := runtime.Caller(i)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\tbuffer.WriteString(fmt.Sprintf(\" %s:%d\\r\\n\", file, line))\n\t}\n\t//\tlog.Print(buffer.String())\n\tlogger := log.New(os.Stderr, \"[server2 logStackOnRecover] \", log.LstdFlags|log.Lshortfile)\n\tlogger.Print(buffer.String())\n\thttpWriter.WriteHeader(http.StatusInternalServerError)\n\thttpWriter.Write(buffer.Bytes())\n}",
"func (l *Logger) Fatal(err errors.Error) {\n\tl.logFatal.Printf(string(err.JSON()))\n}",
"func toJSON(a interface{}) ([]byte, error) {\n\tbs, err := json.Marshal(a)\n\n\tif err != nil {\n\t\treturn []byte{}, fmt.Errorf(\"there was an error: %v\", err)\n\t\t// return []byte{}, fmt.Println(\"there was an error: %v\", err)\n\t\t//does not work bc Println returns n int and err => we only need err\n\t\t//fatal does not work bec no error returned but we need to return here\n\n\t}\n\treturn bs, nil\n}",
"func logPanic(r interface{}) {\n\tcallers := \"\"\n\tfor i := 0; true; i++ {\n\t\t_, file, line, ok := runtime.Caller(i)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\tcallers = callers + fmt.Sprintf(\"%v:%v\\n\", file, line)\n\t}\n\tlogger.Get().Error(\"Recovered from panic: %#v (%v)\\n%v\", r, r, callers)\n}",
"func I(format string, args ...interface{}) { infodeps(sentry.CaptureMessage, 3, format, args...) }",
"func (l *Logger) LogJSON(value interface{}) (err error) {\n\tvar msg []byte\n\tif msg, err = json.Marshal(value); err != nil {\n\t\treturn\n\t}\n\n\t// Convert message to bytes and pass to l.Log\n\treturn l.Log(msg)\n}",
"func print_json(chunks []Chunk) {\n\tfmt.Println(\"{\")\n\tfor i := range chunks {\n\t\tpayload := chunks[i].payload\n\t\ttag := chunks[i].tag\n\t\tif i > 0 {\n\t\t\tfmt.Println(\",\")\n\t\t}\n\t\tfmt.Printf(\" \\\"%s\\\": \\\"%s\\\"\", tag, payload)\n\t}\n\tfmt.Printf(\"\\n}\\n\")\n}",
"func (l *DefaultLogger) LogPanic(ctx context.Context, value interface{}) {\n\tconst size = 64 << 10\n\tbuf := make([]byte, size)\n\tbuf = buf[:runtime.Stack(buf, false)]\n\tlog.Printf(\"graphql: panic occurred: %v\\n%s\\ncontext: %v\", value, buf, ctx)\n}",
"func ToLog(b *Block) {\n\tfor {\n\t\tselect {\n\t\tcase msg := <-b.InChan:\n\t\t\tout, err := json.Marshal(msg.Msg)\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(\"could not marshal json\")\n\t\t\t}\n\t\t\tlog.Println(string(out))\n\t\tcase <-b.QuitChan:\n\t\t\tquit(b)\n\t\t\treturn\n\t\t}\n\t}\n}",
"func jsonError(w http.ResponseWriter, serverMsg string, clientMsg string) {\n\tlog.Error(serverMsg)\n\tpayload := Message{\n\t\tError: clientMsg,\n\t}\n\tresJSON, err := json.Marshal(payload)\n\tif err != nil {\n\t\tmsg := fmt.Sprintf(\"Failed to marshal result : %v\", err)\n\t\thttpError(w, msg, msg, http.StatusInternalServerError)\n\t\treturn\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tfmt.Fprintf(w, \"%s\\n\", string(resJSON))\n\treturn\n}",
"func Panic(args ...interface{}) {\r\n\tLogger.Panic(\"\", args)\r\n}",
"func (c *context) Panic(format string, args ...interface{}) {\n\tc.logger.Panic(c.prefixFormat()+format, args...)\n}",
"func (l *Logger) Panic(log ...interface{}) {\n\tl.instance.Panic(log...)\n}",
"func jsonPrint() {\n\n\tvar filteredData []alertDataJSON\n\tvar temp alertDataJSON\n\n\tfor _, each := range allAlertData {\n\t\tif filteredAlerts[each.Name] == 1 {\n\t\t\ttemp.Name = each.Name\n\t\t\ttemp.Service = each.Service\n\t\t\ttemp.Severity = each.Service\n\t\t\ttemp.Tag = each.Tag\n\t\t\ttemp.Starts = timeDiff(time.Now(), each.StartsAt, 0)\n\t\t\tif each.EndsAt == time.Unix(maxtstmp, 0).UTC() {\n\t\t\t\ttemp.Ends = \"Undefined\"\n\t\t\t\ttemp.Duration = \"Undefined\"\n\t\t\t} else {\n\t\t\t\ttemp.Ends = timeDiff(time.Now(), each.EndsAt, 0)\n\t\t\t\ttemp.Duration = timeDiff(each.StartsAt, each.EndsAt, 1)\n\t\t\t}\n\n\t\t\tfilteredData = append(filteredData, temp)\n\t\t}\n\t}\n\n\tb, err := json.Marshal(filteredData)\n\n\tif err != nil {\n\t\tlog.Printf(\"Unable to convert Filtered JSON Data, error: %v\\n\", err)\n\t\treturn\n\t}\n\n\tfmt.Println(string(b))\n}",
"func (claims Claims) Panic(v ...interface{}) {\n\tt := service.LogTime()\n\tmessage := fmt.Sprint(v...)\n\tservice.Log(2, false, message, claims.parse(), proto.Log_FATAL, t).Result()\n\tBuiltInPanic(message)\n}",
"func (n *Node) Panic(args ...interface{}) {\n\tn.log.Panic(args...)\n}",
"func (logger *Logger) Panicf(format string, args ...interface{}) {\n\tlogger.std.Logf(\"Panic\"+format, args...)\n}",
"func Panic(v ...interface{}) {\n s := sout(v...)\n Std.Output(LevelPanic, CallDepth, s)\n panic(s)\n}",
"func logPanic(r interface{}) {\n\tcallers := getCallers(r)\n\tlog.Errorf(\"Observed a panic: %#v (%v)\\n%v\", r, r, callers)\n}",
"func (c *Controller) HandlePanic(v interface{}, debug bool) {\n\tstatus := http.StatusInternalServerError\n\n\tswitch e := v.(type) {\n\tcase *perror.Error:\n\t\tstatus = e.Status()\n\t\tdefer func() {\n\t\t\tif err := recover(); err != nil {\n\t\t\t\tc.Json(EmptyObject, status, e.Message())\n\t\t\t\tc.Context().Error(\"%s, trace[%s]\", util.ToString(err), util.PanicTrace(TraceMaxDepth, false, debug))\n\t\t\t}\n\t\t}()\n\n\t\tApp().Router().ErrorController(c.Context(), status).(iface.IErrorController).Error(status, e.Message())\n\tdefault:\n\t\tdefer func() {\n\t\t\tif err := recover(); err != nil {\n\t\t\t\tc.Json(EmptyObject, status)\n\t\t\t\tc.Context().Error(\"%s, trace[%s]\", util.ToString(err), util.PanicTrace(TraceMaxDepth, false, debug))\n\t\t\t}\n\t\t}()\n\n\t\tApp().Router().ErrorController(c.Context(), status).(iface.IErrorController).Error(status, \"\")\n\t}\n\n\tif status != http.StatusOK {\n\t\tc.Context().Error(\"%s, trace[%s]\", util.ToString(v), util.PanicTrace(TraceMaxDepth, false, debug))\n\t}\n}",
"func (claims Claims) Panicf(format string, v ...interface{}) {\n\tt := service.LogTime()\n\tmessage := fmt.Sprintf(format, v...)\n\tservice.Log(2, false, message, claims.parse(), proto.Log_FATAL, t).Result()\n\tBuiltInPanic(message)\n}",
"func Panicw(msg string, keysAndValues ...interface{}) {\n\tlog.Panicw(msg, keysAndValues...)\n}",
"func printJSON(v interface{}) {\n\tw := json.NewEncoder(os.Stdout)\n\tw.SetIndent(\"\", \"\\t\")\n\terr := w.Encode(v)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}",
"func (l legalHoldInfoMessage) JSON() string {\n\tmsgBytes, e := json.MarshalIndent(l, \"\", \" \")\n\tfatalIf(probe.NewError(e), \"Unable to marshal into JSON.\")\n\treturn string(msgBytes)\n}",
"func RecoverAndLog() {\n\tif r := recover(); r != nil {\n\t\tfmt.Println(\"Panic digested from \", r)\n\n\t\tlog.Printf(\"Internal error: %v\", r)\n\t\tbuf := make([]byte, 1<<16)\n\t\tstackSize := runtime.Stack(buf, true)\n\t\t//log.Printf(\"%s\\n\", string(buf[0:stackSize]))\n\n\t\tvar dir = platform.GetSurgeDir()\n\t\tvar logPathOS = dir + string(os.PathSeparator) + \"paniclog.txt\"\n\t\tf, _ := os.OpenFile(logPathOS, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)\n\t\tw := bufio.NewWriter(f)\n\t\tw.WriteString(string(buf[0:stackSize]))\n\t\tw.Flush()\n\n\t\tpushError(\"Panic\", \"Please check your log file and paniclog for more info\")\n\n\t\tpanic(\"Panic dumped but not digested, please check your log\")\n\t}\n}",
"func (s *Server) log(req *jsonrpc2.Request) (interface{}, error) {\n\tin := new(logReq)\n\tif err := json.Unmarshal([]byte(*req.Params), in); err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, s.peer.Log(noContext, in.ID, in.Line)\n}",
"func JsonPrint(data interface{}) {\n\tvar p []byte\n\tp, err := json.Marshal(data)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\tfmt.Printf(\"%s \\n\", p)\n}",
"func showlog(w http.ResponseWriter, _req *http.Request) {\n\tstoreLock.RLock()\n\tvar slogs = make([]*proto.SlowlogEntries, len(storeMap))\n\tidx := 0\n\tfor _, s := range storeMap {\n\t\tslogs[idx] = s.Reply()\n\t\tidx++\n\t}\n\tstoreLock.RUnlock()\n\n\tencoder := json.NewEncoder(w)\n\terr := encoder.Encode(slogs)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"%s\", err), http.StatusInternalServerError)\n\t}\n}",
"func (logger *Logger) Panic(args ...interface{}) {\n\tlogger.std.Log(append([]interface{}{\"Panic\"}, args)...)\n\tpanic(args[0])\n}",
"func AutoTestLogFormat() log15.Format {\n\n\tlogfmt := log15.LogfmtFormat()\n\n\treturn log15.FormatFunc(func(r *log15.Record) []byte {\n\n\t\tif r.Msg == \"PrettyJsonLogFormat\" && len(r.Ctx) == 4 {\n\n\t\t\tb, ok := r.Ctx[3].([]byte)\n\t\t\tif ok {\n\t\t\t\t//return raw json data directly\n\t\t\t\treturn b\n\t\t\t}\n\t\t}\n\n\t\treturn logfmt.Format(r)\n\t})\n\n}",
"func writePanic(out io.Writer, data []byte) {\n\t_, err := out.Write(data)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}",
"func (l *Logger) Panic(v ...interface{}) {\n\ts := fmt.Sprint(v...)\n\tl.Log(PanicLevel, s)\n\tpanic(s)\n}",
"func Fatalf(format string, args ...interface{}) {\n\txlog.SetFormatter(&logrus.JSONFormatter{})\n\txlog.SetLevel(logrus.DebugLevel)\n\n\txlog.Fatalf(format, args...)\n}",
"func materializeWithJSON(logFields []model.KeyValue) ([]byte, error) {\n\tfields := make(map[string]string, len(logFields))\n\tfor i := range logFields {\n\t\tfields[logFields[i].Key] = tagValueToString(&logFields[i])\n\t}\n\tif event, ok := fields[\"event\"]; ok && len(fields) == 1 {\n\t\treturn []byte(event), nil\n\t}\n\treturn json.Marshal(fields)\n}",
"func defaultLogger(req *http.Request, err error) {\n\tmessage := err.Error()\n\tperr, ok := err.(Panic)\n\tif ok {\n\t\tmessage += \"\\n\" + perr.String()\n\t}\n\tlog.Println(message)\n}",
"func handlePanic(resp http.ResponseWriter, status int) {\n\tif p := recover(); p != nil {\n\n\t\tmessageFmt := \"Unhandled panic: %s\"\n\t\tvar err error\n\n\t\tswitch p.(type) {\n\t\tcase nil:\n\t\t\t// normal case, just ignore.\n\t\tcase string:\n\t\t\tmessageFmt = p.(string)\n\t\t\terr = errors.New(messageFmt)\n\t\tcase error:\n\t\t\terr = p.(error)\n\t\tdefault:\n\t\t\terr = errors.New(fmt.Sprint(p))\n\t\t}\n\n\t\tif err != nil {\n\t\t\treportError(err, messageFmt, resp, status)\n\t\t}\n\t}\n}",
"func Panic(v ...interface{}) {\n\ts := fmt.Sprint(v...)\n\tstdLogger.Log(PanicLevel, s)\n\tpanic(s)\n}",
"func (ps pumaStatusFinalOutput) printAndBuildJSON() error {\n\tb, err := json.Marshal(ps)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Println(string(b))\n\n\treturn nil\n}",
"func (cc *LogController) Patch(c *gin.Context) {\n\trequest := &LogPatchRequest{}\n\tif err := c.ShouldBindJSON(request); err != nil {\n\t\tjsonAPIError(c, http.StatusUnprocessableEntity, err)\n\t\treturn\n\t}\n\n\tif request.Level == \"\" && request.SqlEnabled == nil {\n\t\tjsonAPIError(c, http.StatusBadRequest, fmt.Errorf(\"please set either logLevel or logSql as params in order to set the log level\"))\n\t\treturn\n\t}\n\n\tif request.Level != \"\" {\n\t\tvar ll zapcore.Level\n\t\terr := ll.UnmarshalText([]byte(request.Level))\n\t\tif err != nil {\n\t\t\tjsonAPIError(c, http.StatusBadRequest, err)\n\t\t\treturn\n\t\t}\n\t\tif err = cc.App.GetStore().Config.SetLogLevel(c.Request.Context(), ll.String()); err != nil {\n\t\t\tjsonAPIError(c, http.StatusInternalServerError, err)\n\t\t\treturn\n\t\t}\n\t}\n\n\tif request.SqlEnabled != nil {\n\t\tif err := cc.App.GetStore().Config.SetLogSQLStatements(c.Request.Context(), *request.SqlEnabled); err != nil {\n\t\t\tjsonAPIError(c, http.StatusInternalServerError, err)\n\t\t\treturn\n\t\t}\n\t\tcc.App.GetStore().SetLogging(*request.SqlEnabled)\n\t}\n\n\t// Set default logger with new configurations\n\tlogger.SetLogger(cc.App.GetStore().Config.CreateProductionLogger())\n\n\tresponse := &presenters.LogResource{\n\t\tJAID: presenters.JAID{\n\t\t\tID: \"log\",\n\t\t},\n\t\tLevel: cc.App.GetStore().Config.LogLevel().String(),\n\t\tSqlEnabled: cc.App.GetStore().Config.LogSQLStatements(),\n\t}\n\n\tjsonAPIResponse(c, response, \"log\")\n}",
"func dumpJSON(o interface{}) error {\n\tjs, err := json.MarshalIndent(o, \"\", \" \")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Printf(\"```\\n%s\\n```\\n\\n\", js)\n\n\treturn nil\n}",
"func catchPanic(err *error, functionName string) {\n\tif r := recover(); r != nil {\n\t\tfmt.Printf(\"%s : PANIC Defered : %v\\n\", functionName, r)\n\n\t\t// Capture the stack trace\n\t\tbuf := make([]byte, 10000)\n\t\truntime.Stack(buf, false)\n\n\t\tfmt.Printf(\"%s : Stack Trace : %s\", functionName, string(buf))\n\n\t\tif err != nil {\n\t\t\t*err = fmt.Errorf(\"%v\", r)\n\t\t}\n\t}\n}",
"func (l *Logger) Panic(msg string, args ...interface{}) {\n\tl.z.Panicw(msg, args...)\n}",
"func FormatLog(flat map[string]interface{}) string {\n\tvar str string\n\tsliceOfKeys := []string{\"timestamp\", \"level\", \"deploymentId\", WorkFlowID.String(), ExecutionID.String(), NodeID.String(), InstanceID.String(), InterfaceName.String(), OperationName.String(), TypeID.String(), \"content\"}\n\tfor _, k := range sliceOfKeys {\n\t\tif val, ok := flat[k].(string); ok {\n\t\t\tif k != \"content\" {\n\t\t\t\tstr += \"[\" + val + \"]\"\n\t\t\t} else {\n\t\t\t\tstr += val\n\t\t\t}\n\t\t} else {\n\t\t\tstr += \"[]\"\n\t\t}\n\n\t}\n\treturn str\n}",
"func Panic(args ...interface{}) {\n\tlog.Panic(args...)\n}",
"func Panic(args ...interface{}) {\n\tlog.Panic(args...)\n}",
"func (k Keisatsu) WatchPanic() {\n\tif msg := recover(); msg != nil {\n\t\tmessage := Message{\n\t\t\tAppName: k.AppName,\n\t\t\tLevel: PanicLevel,\n\t\t\tMessage: fmt.Sprintf(\"%v\", msg),\n\t\t\tStackTrace: string(debug.Stack()),\n\t\t}\n\t\tk.sendWebhook(message)\n\t}\n}"
] | [
"0.6103428",
"0.5893398",
"0.57904696",
"0.57574403",
"0.5723549",
"0.55608106",
"0.55556524",
"0.5539607",
"0.5539534",
"0.55262053",
"0.5516622",
"0.5509169",
"0.5448856",
"0.54444563",
"0.54426837",
"0.54083824",
"0.5396337",
"0.53323275",
"0.53176063",
"0.5271319",
"0.5258463",
"0.52556366",
"0.52530444",
"0.52474433",
"0.52280897",
"0.52215976",
"0.52111655",
"0.5169514",
"0.51676184",
"0.51586723",
"0.515682",
"0.51377815",
"0.5131197",
"0.5128679",
"0.51150674",
"0.5102041",
"0.5088814",
"0.5077596",
"0.5074259",
"0.50695205",
"0.50685817",
"0.5067822",
"0.5062822",
"0.5055475",
"0.5048943",
"0.50259525",
"0.50185376",
"0.5000492",
"0.49883166",
"0.49813858",
"0.49786565",
"0.49709627",
"0.49581325",
"0.49550948",
"0.4939464",
"0.4934613",
"0.49334598",
"0.49142167",
"0.49036437",
"0.49017674",
"0.48722115",
"0.4868614",
"0.4865508",
"0.4858098",
"0.48539618",
"0.48539346",
"0.4849319",
"0.4833127",
"0.4827856",
"0.4827054",
"0.4819067",
"0.48108512",
"0.48096406",
"0.48042348",
"0.47893384",
"0.47859305",
"0.4780448",
"0.47763285",
"0.4773463",
"0.4761898",
"0.4752471",
"0.47497064",
"0.4748614",
"0.47474122",
"0.47472426",
"0.47429353",
"0.47393417",
"0.4738309",
"0.47366095",
"0.47353062",
"0.47340783",
"0.47327203",
"0.4732474",
"0.47261682",
"0.4724772",
"0.47244912",
"0.47225264",
"0.47219405",
"0.47219405",
"0.47216907"
] | 0.6141126 | 0 |
GenerateKey generates a fresh keypair for this VRF | func GenerateKey() (vrfp.PrivateKey, vrfp.PublicKey) {
key, err := ecdsa.GenerateKey(curve, rand.Reader)
if err != nil {
return nil, nil
}
return &PrivateKey{PrivateKey: key}, &PublicKey{PublicKey: &key.PublicKey}
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func GenerateKey() (PrivateKey, error) {\n\treturn newSecp256k1PrvKey()\n}",
"func genKey() *Key {\n\tprivKey := crypto.GenPrivKeyEd25519()\n\treturn &Key{\n\t\tAddress: privKey.PubKey().Address(),\n\t\tPubKey: privKey.PubKey(),\n\t\tPrivKey: privKey,\n\t}\n}",
"func generateKey() {\n\tpassphrase := os.Getenv(passphraseEnvironmentVariable)\n\tif passphrase == \"\" {\n\t\tprintErrorAndExit(fmt.Errorf(\"skicka: SKICKA_PASSPHRASE \" +\n\t\t\t\"environment variable not set.\\n\"))\n\t}\n\n\t// Derive a 64-byte hash from the passphrase using PBKDF2 with 65536\n\t// rounds of SHA256.\n\tsalt := getRandomBytes(32)\n\thash := pbkdf2.Key([]byte(passphrase), salt, 65536, 64, sha256.New)\n\tif len(hash) != 64 {\n\t\tlog.Fatalf(\"incorrect key size returned by pbkdf2 %d\\n\", len(hash))\n\t}\n\n\t// We'll store the first 32 bytes of the hash to use to confirm the\n\t// correct passphrase is given on subsequent runs.\n\tpassHash := hash[:32]\n\t// And we'll use the remaining 32 bytes as a key to encrypt the actual\n\t// encryption key. (These bytes are *not* stored).\n\tkeyEncryptKey := hash[32:]\n\n\t// Generate a random encryption key and encrypt it using the key\n\t// derived from the passphrase.\n\tkey := getRandomBytes(32)\n\tiv := getRandomBytes(16)\n\tencryptedKey := encryptBytes(keyEncryptKey, iv, key)\n\n\tfmt.Printf(\"; Add the following lines to the [encryption] section\\n\")\n\tfmt.Printf(\"; of your ~/.skicka.config file.\\n\")\n\tfmt.Printf(\"\\tsalt=%s\\n\", hex.EncodeToString(salt))\n\tfmt.Printf(\"\\tpassphrase-hash=%s\\n\", hex.EncodeToString(passHash))\n\tfmt.Printf(\"\\tencrypted-key=%s\\n\", hex.EncodeToString(encryptedKey))\n\tfmt.Printf(\"\\tencrypted-key-iv=%s\\n\", hex.EncodeToString(iv))\n}",
"func (e Endpoints) GenerateKey(ctx context.Context, r GenerateKeyRequest) (interface{}, error) {\n\tresp, _ := e.GenerateKeyEndpoint(ctx, r)\n\tgkResp := resp.(GenerateKeyResponse)\n\treturn gkResp, nil\n}",
"func GenerateKey() ([]byte, error) {\n\tlogger.Green(\"ssh\", \"Generating new key\")\n\tvar pemBuffer bytes.Buffer\n\n\t// Generate RSA keypair\n\trsaKey, err := rsa.GenerateKey(rand.Reader, 2048)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Encode RSA private key to pem\n\tpem.Encode(&pemBuffer, &pem.Block{\n\t\tType: \"RSA PRIVATE KEY\",\n\t\tBytes: x509.MarshalPKCS1PrivateKey(rsaKey),\n\t})\n\n\terr = ioutil.WriteFile(path.Join(configuration.StateDir, privateKeyFilename), pemBuffer.Bytes(), 0600)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn pemBuffer.Bytes(), nil\n}",
"func (k *Keychain) GenerateKey() ([]byte, error) {\n\tkey := make([]byte, defaultKeySize)\n\t_, err := rand.Read(key)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tk.pushKey(key)\n\treturn key, nil\n}",
"func GenerateKey(c elliptic.Curve, rand io.Reader) (*PrivateKey, error)",
"func GenerateKey(password string, salt []byte, params *Params) *saltedKey {\n\tunsalted := blake2b.Sum512([]byte(password))\n\tsalted := saltedKey{pwd: append(salt, unsalted[:]...), sel: salt, p: params}\n\treturn &salted\n}",
"func (a *Account) GenerateKey() error {\n\t// create a new key\n\tprivateKey, err := ecdsa.GenerateKey(elliptic.P384(), rand.Reader)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// save it to the Account struct\n\ta.key = privateKey\n\t// return no error\n\treturn nil\n}",
"func (b *Base) GenerateKey(req *GenerateKeyReq) (*GenerateKeyResp, error) {\n\treturn nil, ErrFunctionNotSupported\n}",
"func generateKey() (crypto.PrivateKey, error) {\n\tseed := make([]byte, crypto.KeyGenSeedMinLenECDSASecp256k1)\n\tn, err := rand.Read(seed)\n\tif err != nil || n != crypto.KeyGenSeedMinLenECDSASecp256k1 {\n\t\treturn nil, err\n\t}\n\treturn utils.GenerateUnstakedNetworkingKey(seedFixture(n))\n}",
"func GenerateKey() (Key, bool) {\n\tvar key Key = make([]byte, KeySize)\n\n\t_, err := io.ReadFull(PRNG, key)\n\treturn key, err == nil\n}",
"func generateKey(hash, salt []byte) (key []byte) {\n key = pbkdf2.Key(hash, salt, 2, 32, sha256.New)\n\n return\n}",
"func genkey() {\n\t// Key generation takes a long time, so it's polite to check the user's\n\t// request makes sense first.\n\testablishDir(true)\n\tif _, err := os.Lstat(privateKeyPath); err == nil {\n\t\texitPrintf(\"Error: The private key file (%s) already exists.\\n\",\n\t\t\tprivateKeyPath)\n\t}\n\n\tfmt.Fprintf(os.Stderr, \"Generating a new private key (%s)...\", privateKeyPath)\n\tprivateKey.Generate(rand.Reader)\n\tfmt.Fprintf(os.Stderr, \"\\n\")\n\n\tsaveKey(privateKeyPath, &privateKey)\n}",
"func KeyPairGenerate(IKM []byte, S []byte, W []byte) int {\n\tr := NewBIGints(CURVE_Order)\n\tL := ceil(3*ceil(r.nbits(),8),2)\n\tLEN:=core.InttoBytes(L, 2)\n\tAIKM:=make([]byte,len(IKM)+1) \n\tfor i:=0;i<len(IKM);i++ {\n\t\tAIKM[i]=IKM[i]\n\t}\n\tAIKM[len(IKM)]=0\n\n\tG := ECP2_generator()\n\tif G.Is_infinity() {\n\t\treturn BLS_FAIL\n\t}\n\tSALT := []byte(\"BLS-SIG-KEYGEN-SALT-\")\n\tPRK := core.HKDF_Extract(core.MC_SHA2,HASH_TYPE,SALT,AIKM)\n\tOKM := core.HKDF_Expand(core.MC_SHA2,HASH_TYPE,L,PRK,LEN)\n\n\tdx:= DBIG_fromBytes(OKM[:])\n\ts:= dx.Mod(r)\n\ts.ToBytes(S)\n// SkToPk\n\tG = G2mul(G, s)\n\tG.ToBytes(W,true)\n\treturn BLS_OK\n}",
"func generateKey(length int) (key []byte, err error) {\n\tdefer func(start time.Time) {\n\t\tvalue.RecordDataKeyGeneration(start, err)\n\t}(time.Now())\n\tkey = make([]byte, length)\n\tif _, err = rand.Read(key); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn key, nil\n}",
"func GenerateKey(rand io.Reader) (*PrivateKey, error) {\n\n\tc := SM2P256()\n\n\tk, err := randFieldElement(c, rand)\n\tfmt.Println(k)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpriv := new(PrivateKey)\n\tpriv.PublicKey.Curve= c\n\tpriv.D = k\n\n\tpriv.PublicKey.X, priv.PublicKey.Y = c.ScalarBaseMult(k.Bytes())\n\treturn priv, nil\n}",
"func (c *Client) keyGen(session SSHSession) (*bytes.Buffer, error) {\n\tscriptBytes, err := internal.Asset(\"client/scripts/keygen.sh\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Create deploy key.\n\tresult, stderr, err := session.Run(string(scriptBytes))\n\n\tif err != nil {\n\t\tlog.Println(stderr.String())\n\t\treturn nil, err\n\t}\n\n\treturn result, nil\n}",
"func generateKey(keypath string, salt []byte, keyiter int) (key *[secret.SecretKeyLength]byte, isPass bool, err error) {\n\t// if a keypath is given try and use it\n\tif keypath != \"\" {\n\t\tkey, err := secret.ReadKeyFromDisk(keypath)\n\t\tif err != nil {\n\t\t\treturn nil, false, fmt.Errorf(\"unable to build secret service: %v\", err)\n\t\t}\n\t\treturn key, false, nil\n\t}\n\n\t// otherwise read in a passphrase from disk and use that, remember to reset your terminal afterwards\n\tvar passphrase string\n\tfmt.Printf(\"Passphrase: \")\n\tfmt.Scanln(&passphrase)\n\n\t// derive key and return it\n\tkeySlice := pbkdf2.Key([]byte(passphrase), salt, keyiter, 32, sha256.New)\n\tkeyBytes, err := secret.KeySliceToArray(keySlice)\n\tif err != nil {\n\t\treturn nil, true, err\n\t}\n\n\treturn keyBytes, true, nil\n}",
"func GenerateKey(password, identifier string, bits int) (result string, err string) {\n\tr, e := chevronlib.GenerateKey(password, identifier, bits)\n\tresult = r\n\tif e != nil {\n\t\terr = e.Error()\n\t}\n\n\treturn\n}",
"func GenerateKey(keyPath string, SecLv int) (heimdall.PriKey, heimdall.PubKey) {\n\tif _, err := os.Stat(keyPath); os.IsNotExist(err) {\n\t\tpri, pub := GenerateNewKey(keyPath, SecLv)\n\t\treturn pri, pub\n\t}\n\tpri, pub := LoadKeyPair(keyPath)\n\treturn pri, pub\n}",
"func GenerateKey(password, extradata []byte) (keyfile, mpubkey, mprivkey []byte, err error) {\n\tpubkey, privkey, err := box.GenerateKey(rand.Reader) // Keypair\n\tif err != nil {\n\t\treturn\n\t}\n\treturn SaveKey(pubkey[:], privkey[:], password, extradata)\n}",
"func GenerateKey(curve elliptic.Curve, rand io.Reader) ([]byte, *big.Int, *big.Int, error)",
"func GenerateNewKeypair() *Keypair {\n\n\tpk, _ := ecdsa.GenerateKey(elliptic.P224(), rand.Reader)\n\n\tb := bigJoin(KEY_SIZE, pk.PublicKey.X, pk.PublicKey.Y)\n\n\tpublic := base58.EncodeBig([]byte{}, b)\n\tprivate := base58.EncodeBig([]byte{}, pk.D)\n\n\tkp := Keypair{Public: public, Private: private}\n\n\treturn &kp\n}",
"func GenerateKey() (publicKey PublicKey, privateKey PrivateKey, err error) {\n\tpub, priv, genErr := ed25519.GenerateKey(nil)\n\tcopy(publicKey[:], pub)\n\tcopy(privateKey[:], priv)\n\terr = genErr\n\n\treturn\n}",
"func GenerateKeypair() (*Keypair, error) {\n\tvar publicKey [32]byte\n\tvar privateKey [32]byte\n\t_, err := rand.Read(privateKey[:])\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tcurve25519.ScalarBaseMult(&publicKey, &privateKey)\n\treturn &Keypair{publicKey, privateKey}, nil\n}",
"func KeyGen(params *Params, master *MasterKey, attrs wkdibe.AttributeList, userNum int, newUser int) (*PrivateKey, error) {\n\tif newUser <= 0 || userNum < 0 || userNum+newUser > *params.userSize {\n\t\tpanic(\"Parameters for KeyGen are out of bound\")\n\t}\n\tkey := &PrivateKey{}\n\tlEnd, rEnd := userNum+1, userNum+newUser\n\tvar err error\n\tnodeID := make([]int, *params.userHeight, *params.userHeight)\n\n\tkey.root, err = treeKeyGen(params, master, 1, *params.userSize, lEnd, rEnd, attrs, nodeID, 0)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tkey.lEnd, key.rEnd = new(int), new(int)\n\t*key.lEnd, *key.rEnd = lEnd, rEnd\n\treturn key, nil\n}",
"func KeyGenerator() (*Key, error) {\n\tlog.Info(\"Generate new ssh key\")\n\n\tkey := new(Key)\n\n\tprivateKey, err := rsa.GenerateKey(rand.Reader, 2048)\n\tif err != nil {\n\t\tlog.Errorf(\"PrivateKey generator failed reason: %s\", err.Error())\n\t\treturn key, err\n\t}\n\n\tprivateKeyPEM := &pem.Block{Type: \"RSA PRIVATE KEY\", Bytes: x509.MarshalPKCS1PrivateKey(privateKey)}\n\tkeyBuff := new(bytes.Buffer)\n\tif err := pem.Encode(keyBuff, privateKeyPEM); err != nil {\n\t\tlog.Errorf(\"PrivateKey generator failed reason: %s\", err.Error())\n\t\treturn key, err\n\t}\n\tkey.PrivateKeyData = keyBuff.String()\n\tlog.Debug(\"Private key generated.\")\n\n\tpub, err := ssh.NewPublicKey(&privateKey.PublicKey)\n\tif err != nil {\n\t\tlog.Errorf(\"PublicKey generator failed reason: %s\", err.Error())\n\t\treturn key, err\n\t}\n\tlog.Debug(\"Public key generated.\")\n\n\tkey.PublicKeyData = fmt.Sprintf(\"%s %s \\n\", strings.TrimSuffix(string(ssh.MarshalAuthorizedKey(pub)), \"\\n\"), \"[email protected]\")\n\n\tkey.PublicKeyFingerprint = ssh.FingerprintSHA256(pub)\n\tlog.Info(\"SSH key generated.\")\n\n\treturn key, nil\n}",
"func GenerateKey(net bitcoin.Network) (*wallet.Key, error) {\r\n\tkey, err := bitcoin.GenerateKey(net)\r\n\tif err != nil {\r\n\t\treturn nil, errors.Wrap(err, \"Failed to generate key\")\r\n\t}\r\n\r\n\tresult := wallet.Key{\r\n\t\tKey: key,\r\n\t}\r\n\r\n\tresult.Address, err = key.RawAddress()\r\n\tif err != nil {\r\n\t\treturn nil, errors.Wrap(err, \"Failed to create key address\")\r\n\t}\r\n\r\n\treturn &result, nil\r\n}",
"func KeyGen(r *big.Int, params *Params, master *MasterKey, attrs AttributeList) (*PrivateKey, error) {\n\tkey := &PrivateKey{}\n\tk := len(attrs)\n\tl := len(params.H)\n\n\t// Randomly choose r in Zp.\n\tif r == nil {\n\t\tvar err error\n\t\tr, err = RandomInZp(rand.Reader)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tproduct := new(bn256.G1).Set(params.G3)\n\tkey.B = make([]*bn256.G1, l-k)\n\tkey.FreeMap = make(map[AttributeIndex]int)\n\tj := 0\n\tfor i, h := range params.H {\n\t\tattrIndex := AttributeIndex(i)\n\t\tif attr, ok := attrs[attrIndex]; ok {\n\t\t\tif attr != nil {\n\t\t\t\thi := new(bn256.G1).ScalarMult(h, attr)\n\t\t\t\tproduct.Add(product, hi)\n\t\t\t}\n\t\t} else {\n\t\t\tkey.B[j] = new(bn256.G1).ScalarMult(h, r)\n\t\t\tkey.FreeMap[attrIndex] = j\n\t\t\tj++\n\t\t}\n\t}\n\tif params.HSig != nil {\n\t\tkey.BSig = new(bn256.G1).ScalarMult(params.HSig, r)\n\t}\n\tproduct.ScalarMult(product, r)\n\n\tkey.A0 = new(bn256.G1).Add((*bn256.G1)(master), product)\n\tkey.A1 = new(bn256.G2).ScalarMult(params.G, r)\n\n\treturn key, nil\n}",
"func GenerateKey(dir string) (wgtypes.Key, error) {\n\tpath := filepath.Join(dir, \"key.priv\")\n\tdata, err := os.ReadFile(path)\n\tif err == nil {\n\t\t//key already exists\n\t\treturn wgtypes.ParseKey(string(data))\n\t} else if !os.IsNotExist(err) {\n\t\t//another error than not exist\n\t\treturn wgtypes.Key{}, err\n\t}\n\n\tkey, err := wgtypes.GeneratePrivateKey()\n\tif err != nil {\n\t\treturn wgtypes.Key{}, err\n\t}\n\tif err := os.MkdirAll(dir, 0700); err != nil {\n\t\treturn wgtypes.Key{}, err\n\t}\n\n\tif err := os.WriteFile(path, []byte(key.String()), 0400); err != nil {\n\t\treturn wgtypes.Key{}, err\n\t}\n\treturn key, nil\n}",
"func (s *State) genkeys() {\n\th := hmac.New(sha256.New, s.dynamic[:])\n\th.Write(s.static[:])\n\tres := h.Sum(nil)\n\tcopy(s.privateKey[:], res)\n\tcurve25519.ScalarBaseMult(&s.PublicKey, &s.privateKey)\n}",
"func keygen() (string, string) {\n priv, _ := config.GenerateRandomBytes(32)\n addr := config.PrivateToAddress(priv)\n return \"0x\"+addr, fmt.Sprintf(\"%x\", priv)\n}",
"func (s *SkyСoinService) GenerateKeyPair() *KeysResponse {\n\tseed := getRand()\n\trand.Read(seed)\n\tpub, sec := cipher.GenerateDeterministicKeyPair(seed)\n\treturn &KeysResponse{\n\t\tPrivate: sec.Hex(),\n\t\tPublic: pub.Hex(),\n\t}\n}",
"func GenerateKey(resp *svcsdk.DescribeKeyOutput) *svcapitypes.Key {\n\tcr := &svcapitypes.Key{}\n\n\tif resp.KeyMetadata.AWSAccountId != nil {\n\t\tcr.Status.AtProvider.AWSAccountID = resp.KeyMetadata.AWSAccountId\n\t} else {\n\t\tcr.Status.AtProvider.AWSAccountID = nil\n\t}\n\tif resp.KeyMetadata.Arn != nil {\n\t\tcr.Status.AtProvider.ARN = resp.KeyMetadata.Arn\n\t} else {\n\t\tcr.Status.AtProvider.ARN = nil\n\t}\n\tif resp.KeyMetadata.CloudHsmClusterId != nil {\n\t\tcr.Status.AtProvider.CloudHsmClusterID = resp.KeyMetadata.CloudHsmClusterId\n\t} else {\n\t\tcr.Status.AtProvider.CloudHsmClusterID = nil\n\t}\n\tif resp.KeyMetadata.CreationDate != nil {\n\t\tcr.Status.AtProvider.CreationDate = &metav1.Time{*resp.KeyMetadata.CreationDate}\n\t} else {\n\t\tcr.Status.AtProvider.CreationDate = nil\n\t}\n\tif resp.KeyMetadata.CustomKeyStoreId != nil {\n\t\tcr.Spec.ForProvider.CustomKeyStoreID = resp.KeyMetadata.CustomKeyStoreId\n\t} else {\n\t\tcr.Spec.ForProvider.CustomKeyStoreID = nil\n\t}\n\tif resp.KeyMetadata.CustomerMasterKeySpec != nil {\n\t\tcr.Spec.ForProvider.CustomerMasterKeySpec = resp.KeyMetadata.CustomerMasterKeySpec\n\t} else {\n\t\tcr.Spec.ForProvider.CustomerMasterKeySpec = nil\n\t}\n\tif resp.KeyMetadata.DeletionDate != nil {\n\t\tcr.Status.AtProvider.DeletionDate = &metav1.Time{*resp.KeyMetadata.DeletionDate}\n\t} else {\n\t\tcr.Status.AtProvider.DeletionDate = nil\n\t}\n\tif resp.KeyMetadata.Description != nil {\n\t\tcr.Spec.ForProvider.Description = resp.KeyMetadata.Description\n\t} else {\n\t\tcr.Spec.ForProvider.Description = nil\n\t}\n\tif resp.KeyMetadata.Enabled != nil {\n\t\tcr.Status.AtProvider.Enabled = resp.KeyMetadata.Enabled\n\t} else {\n\t\tcr.Status.AtProvider.Enabled = nil\n\t}\n\tif resp.KeyMetadata.EncryptionAlgorithms != nil {\n\t\tf9 := []*string{}\n\t\tfor _, f9iter := range resp.KeyMetadata.EncryptionAlgorithms {\n\t\t\tvar f9elem string\n\t\t\tf9elem = *f9iter\n\t\t\tf9 = append(f9, &f9elem)\n\t\t}\n\t\tcr.Status.AtProvider.EncryptionAlgorithms = f9\n\t} else {\n\t\tcr.Status.AtProvider.EncryptionAlgorithms = nil\n\t}\n\tif resp.KeyMetadata.ExpirationModel != nil {\n\t\tcr.Status.AtProvider.ExpirationModel = resp.KeyMetadata.ExpirationModel\n\t} else {\n\t\tcr.Status.AtProvider.ExpirationModel = nil\n\t}\n\tif resp.KeyMetadata.KeyId != nil {\n\t\tcr.Status.AtProvider.KeyID = resp.KeyMetadata.KeyId\n\t} else {\n\t\tcr.Status.AtProvider.KeyID = nil\n\t}\n\tif resp.KeyMetadata.KeyManager != nil {\n\t\tcr.Status.AtProvider.KeyManager = resp.KeyMetadata.KeyManager\n\t} else {\n\t\tcr.Status.AtProvider.KeyManager = nil\n\t}\n\tif resp.KeyMetadata.KeySpec != nil {\n\t\tcr.Spec.ForProvider.KeySpec = resp.KeyMetadata.KeySpec\n\t} else {\n\t\tcr.Spec.ForProvider.KeySpec = nil\n\t}\n\tif resp.KeyMetadata.KeyState != nil {\n\t\tcr.Status.AtProvider.KeyState = resp.KeyMetadata.KeyState\n\t} else {\n\t\tcr.Status.AtProvider.KeyState = nil\n\t}\n\tif resp.KeyMetadata.KeyUsage != nil {\n\t\tcr.Spec.ForProvider.KeyUsage = resp.KeyMetadata.KeyUsage\n\t} else {\n\t\tcr.Spec.ForProvider.KeyUsage = nil\n\t}\n\tif resp.KeyMetadata.MultiRegion != nil {\n\t\tcr.Spec.ForProvider.MultiRegion = resp.KeyMetadata.MultiRegion\n\t} else {\n\t\tcr.Spec.ForProvider.MultiRegion = nil\n\t}\n\tif resp.KeyMetadata.MultiRegionConfiguration != nil {\n\t\tf17 := &svcapitypes.MultiRegionConfiguration{}\n\t\tif resp.KeyMetadata.MultiRegionConfiguration.MultiRegionKeyType != nil {\n\t\t\tf17.MultiRegionKeyType = resp.KeyMetadata.MultiRegionConfiguration.MultiRegionKeyType\n\t\t}\n\t\tif resp.KeyMetadata.MultiRegionConfiguration.PrimaryKey != nil {\n\t\t\tf17f1 := &svcapitypes.MultiRegionKey{}\n\t\t\tif resp.KeyMetadata.MultiRegionConfiguration.PrimaryKey.Arn != nil {\n\t\t\t\tf17f1.ARN = resp.KeyMetadata.MultiRegionConfiguration.PrimaryKey.Arn\n\t\t\t}\n\t\t\tif resp.KeyMetadata.MultiRegionConfiguration.PrimaryKey.Region != nil {\n\t\t\t\tf17f1.Region = resp.KeyMetadata.MultiRegionConfiguration.PrimaryKey.Region\n\t\t\t}\n\t\t\tf17.PrimaryKey = f17f1\n\t\t}\n\t\tif resp.KeyMetadata.MultiRegionConfiguration.ReplicaKeys != nil {\n\t\t\tf17f2 := []*svcapitypes.MultiRegionKey{}\n\t\t\tfor _, f17f2iter := range resp.KeyMetadata.MultiRegionConfiguration.ReplicaKeys {\n\t\t\t\tf17f2elem := &svcapitypes.MultiRegionKey{}\n\t\t\t\tif f17f2iter.Arn != nil {\n\t\t\t\t\tf17f2elem.ARN = f17f2iter.Arn\n\t\t\t\t}\n\t\t\t\tif f17f2iter.Region != nil {\n\t\t\t\t\tf17f2elem.Region = f17f2iter.Region\n\t\t\t\t}\n\t\t\t\tf17f2 = append(f17f2, f17f2elem)\n\t\t\t}\n\t\t\tf17.ReplicaKeys = f17f2\n\t\t}\n\t\tcr.Status.AtProvider.MultiRegionConfiguration = f17\n\t} else {\n\t\tcr.Status.AtProvider.MultiRegionConfiguration = nil\n\t}\n\tif resp.KeyMetadata.Origin != nil {\n\t\tcr.Spec.ForProvider.Origin = resp.KeyMetadata.Origin\n\t} else {\n\t\tcr.Spec.ForProvider.Origin = nil\n\t}\n\tif resp.KeyMetadata.PendingDeletionWindowInDays != nil {\n\t\tcr.Status.AtProvider.PendingDeletionWindowInDays = resp.KeyMetadata.PendingDeletionWindowInDays\n\t} else {\n\t\tcr.Status.AtProvider.PendingDeletionWindowInDays = nil\n\t}\n\tif resp.KeyMetadata.SigningAlgorithms != nil {\n\t\tf20 := []*string{}\n\t\tfor _, f20iter := range resp.KeyMetadata.SigningAlgorithms {\n\t\t\tvar f20elem string\n\t\t\tf20elem = *f20iter\n\t\t\tf20 = append(f20, &f20elem)\n\t\t}\n\t\tcr.Status.AtProvider.SigningAlgorithms = f20\n\t} else {\n\t\tcr.Status.AtProvider.SigningAlgorithms = nil\n\t}\n\tif resp.KeyMetadata.ValidTo != nil {\n\t\tcr.Status.AtProvider.ValidTo = &metav1.Time{*resp.KeyMetadata.ValidTo}\n\t} else {\n\t\tcr.Status.AtProvider.ValidTo = nil\n\t}\n\n\treturn cr\n}",
"func generateKeyPair() (publicKey, privateKey *[32]byte, err error) {\n\treturn box.GenerateKey(rand.Reader)\n}",
"func createKeypair() *keypair.Full {\n\tpair, err := keypair.Random()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tlog.Println(\"Seed:\", pair.Seed())\n\tlog.Println(\"Address:\", pair.Address())\n\n\treturn pair\n}",
"func GenerateKey() ([]byte, error) {\n\tkey := make([]byte, AES256KeySizeBytes)\n\t_, err := rand.Read(key)\n\tif err != nil {\n\t\treturn key, err\n\t}\n\treturn key, nil\n}",
"func (k *Key) generateKeyID() error {\n\t// Create partial key map used to create the keyid\n\t// Unfortunately, we can't use the Key object because this also carries\n\t// yet unwanted fields, such as KeyID and KeyVal.Private and therefore\n\t// produces a different hash. We generate the keyID exactly as we do in\n\t// the securesystemslib to keep interoperability between other in-toto\n\t// implementations.\n\tvar keyToBeHashed = map[string]interface{}{\n\t\t\"keytype\": k.KeyType,\n\t\t\"scheme\": k.Scheme,\n\t\t\"keyid_hash_algorithms\": k.KeyIDHashAlgorithms,\n\t\t\"keyval\": map[string]string{\n\t\t\t\"public\": k.KeyVal.Public,\n\t\t},\n\t}\n\tkeyCanonical, err := cjson.EncodeCanonical(keyToBeHashed)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// calculate sha256 and return string representation of keyID\n\tkeyHashed := sha256.Sum256(keyCanonical)\n\tk.KeyID = fmt.Sprintf(\"%x\", keyHashed)\n\terr = validateKey(*k)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (ks *VRF) CreateKey() (secp256k1.PublicKey, error) {\n\tif ks.password == \"\" {\n\t\treturn secp256k1.PublicKey{}, errors.New(\"vrf keystore is not unlocked\")\n\t}\n\tkey := vrfkey.CreateKey()\n\tif err := ks.Store(key, ks.password, ks.scryptParams); err != nil {\n\t\treturn secp256k1.PublicKey{}, err\n\t}\n\treturn key.PublicKey, nil\n}",
"func (wa *WzAES) GenerateKey(pkiDir string) error {\n\twa.key = &[32]byte{}\n\t_, err := io.ReadFull(rand.Reader, wa.key[:])\n\tif err != nil {\n\t\treturn err\n\t}\n\tbuff := make([]byte, 0)\n\tfor _, elm := range wa.key {\n\t\tbuff = append(buff, elm)\n\t}\n\tif err := ioutil.WriteFile(path.Join(pkiDir, AES_TOKEN), buff, 0600); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func GenerateKey(random io.Reader, bits int) (*rsa.PrivateKey, error)",
"func generateSigningKey(secretKey, regionName, serviceName string, t time.Time) ([]byte, error) {\n\tkey := []byte(PreSKString + secretKey)\n\tvar err error\n\tdateStamp := t.UTC().Format(BasicDateFormatShort)\n\tdata := []string{dateStamp, regionName, serviceName, TerminationString}\n\tfor _, d := range data {\n\t\tkey, err = hmacsha256(key, d)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn key, nil\n}",
"func GenerateKey(path string) (string, error) {\n\tdata := make([]byte, aes.BlockSize)\n\tn, err := rand.Read(data)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"failed to generate random bytes: %v\", err)\n\t}\n\tif n != aes.BlockSize {\n\t\treturn \"\", fmt.Errorf(\"the length of random bytes %d != %d\", n, aes.BlockSize)\n\t}\n\n\tif err = os.WriteFile(path, data, 0777); err != nil {\n\t\treturn \"\", fmt.Errorf(\"failed write secret key to file %s: %v\", path, err)\n\t}\n\n\treturn string(data), nil\n}",
"func (g *KeyGenerator) GenerateKey(r io.Reader, s int) (*rsa.PrivateKey, error) {\n\treturn rsa.GenerateKey(r, s)\n}",
"func GenerateKey(rand io.Reader) (*ecdsa.PrivateKey, error) {\n\tc := Secp256k1()\n\tk, err := RandFieldElement(rand)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpriv := new(ecdsa.PrivateKey)\n\tpriv.PublicKey.Curve = c\n\tpriv.D = k\n\tpriv.PublicKey.X, priv.PublicKey.Y = c.ScalarBaseMult(k.Bytes())\n\treturn priv, nil\n}",
"func GenerateNewKeyPair(bits int) (*rsa.PrivateKey, error) {\n\tprivKey, err := rsa.GenerateKey(rand.Reader, bits)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn privKey, err\n}",
"func (r *RatchetState) genkeys() {\n\th := hmac.New(crypto.SHA256.New, r.dynamic[:])\n\th.Write(r.static[:])\n\tres := h.Sum(nil)\n\tcopy(r.privateKey[:], res)\n\tcurve25519.ScalarBaseMult(&r.PublicKey, &r.privateKey)\n}",
"func GenerateKey(rand io.Reader) (PublicKey, PrivateKey, error) {\n\tpriv := make([]byte, PrivateKeySize)\n\tif _, err := io.ReadFull(rand, priv); err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tpub, err := curve25519.X25519(priv, curve25519.Basepoint)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\treturn pub, priv, err\n}",
"func GenerateKey() (*ecdsa.PrivateKey, error) {\n\treturn crypto.GenerateKey()\n}",
"func GenerateKey() (*ecdsa.PrivateKey, error) {\n\treturn crypto.GenerateKey()\n}",
"func GenerateKey() ([]byte) {\n\tkey := make([]byte, 24)\n\t_, err := rand.Read(key)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\treturn key\n}",
"func GenerateCreateKeyInput(cr *svcapitypes.Key) *svcsdk.CreateKeyInput {\n\tres := &svcsdk.CreateKeyInput{}\n\n\tif cr.Spec.ForProvider.BypassPolicyLockoutSafetyCheck != nil {\n\t\tres.SetBypassPolicyLockoutSafetyCheck(*cr.Spec.ForProvider.BypassPolicyLockoutSafetyCheck)\n\t}\n\tif cr.Spec.ForProvider.CustomKeyStoreID != nil {\n\t\tres.SetCustomKeyStoreId(*cr.Spec.ForProvider.CustomKeyStoreID)\n\t}\n\tif cr.Spec.ForProvider.CustomerMasterKeySpec != nil {\n\t\tres.SetCustomerMasterKeySpec(*cr.Spec.ForProvider.CustomerMasterKeySpec)\n\t}\n\tif cr.Spec.ForProvider.Description != nil {\n\t\tres.SetDescription(*cr.Spec.ForProvider.Description)\n\t}\n\tif cr.Spec.ForProvider.KeySpec != nil {\n\t\tres.SetKeySpec(*cr.Spec.ForProvider.KeySpec)\n\t}\n\tif cr.Spec.ForProvider.KeyUsage != nil {\n\t\tres.SetKeyUsage(*cr.Spec.ForProvider.KeyUsage)\n\t}\n\tif cr.Spec.ForProvider.MultiRegion != nil {\n\t\tres.SetMultiRegion(*cr.Spec.ForProvider.MultiRegion)\n\t}\n\tif cr.Spec.ForProvider.Origin != nil {\n\t\tres.SetOrigin(*cr.Spec.ForProvider.Origin)\n\t}\n\tif cr.Spec.ForProvider.Policy != nil {\n\t\tres.SetPolicy(*cr.Spec.ForProvider.Policy)\n\t}\n\tif cr.Spec.ForProvider.Tags != nil {\n\t\tf9 := []*svcsdk.Tag{}\n\t\tfor _, f9iter := range cr.Spec.ForProvider.Tags {\n\t\t\tf9elem := &svcsdk.Tag{}\n\t\t\tif f9iter.TagKey != nil {\n\t\t\t\tf9elem.SetTagKey(*f9iter.TagKey)\n\t\t\t}\n\t\t\tif f9iter.TagValue != nil {\n\t\t\t\tf9elem.SetTagValue(*f9iter.TagValue)\n\t\t\t}\n\t\t\tf9 = append(f9, f9elem)\n\t\t}\n\t\tres.SetTags(f9)\n\t}\n\n\treturn res\n}",
"func genKey() (peerid string, privatekey string, err error) {\n\t// generate private key\n\tpriv, _, err := crypto.GenerateKeyPairWithReader(crypto.Ed25519, -1, crand.Reader)\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\t// convert to bytes\n\tkBytes, err := crypto.MarshalPrivateKey(priv)\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\t// Obtain Peer ID from public key\n\tpid, err := libp2p_peer.IDFromPublicKey(priv.GetPublic())\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\treturn pid.String(), base64.StdEncoding.EncodeToString(kBytes), nil\n}",
"func GenerateKey() []byte {\n\treturn RandAsciiBytes(KeySize)\n}",
"func newKey(rand io.Reader) (*keystore.Key, error) {\n\tpriECDSA, err := ecdsa.GenerateKey(crypto.S256(), rand)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn newKeyFromECDSA(priECDSA), nil\n}",
"func GenerateRevocationKey(issuerDID did.DID, credentialID string) string {\n\tsha := sha256.Sum256([]byte(issuerDID.ToShortFormDid().String() + credentialID))\n\treturn base58.Encode(sha[:])\n}",
"func generateKey(curve elliptic.Curve) (private []byte, public []byte, err error) {\n\tvar x, y *big.Int\n\tprivate, x, y, err = elliptic.GenerateKey(curve, rand.Reader)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tpublic = elliptic.Marshal(curve, x, y)\n\treturn\n}",
"func (c *HashRing) generateKey(ip string, i int) string {\n\treturn ip + \"#\" + strconv.Itoa(i)\n}",
"func KeyGen(key string, keyPrefix string, programName string, withProgramName bool) string {\n\tfinalKey := keyGen(key, keyPrefix, programName, withProgramName)\n\treturn escapeKey(finalKey)\n}",
"func GenKey(ip, port string) (kyber.Scalar, kyber.Point) {\n\tpriKey := crypto.Ed25519Curve.Scalar().SetInt64(int64(GetUniqueIDFromIPPort(ip, port))) // TODO: figure out why using a random hash value doesn't work for private key (schnorr)\n\tpubKey := pki.GetPublicKeyFromScalar(priKey)\n\n\treturn priKey, pubKey\n}",
"func (buf *KeyInfoBuffer) GenerateKey(tableID, rowID int64) Key {\n\tif tableID == 0 {\n\t\treturn nil\n\t}\n\n\tdata := *buf\n\tif data == nil {\n\t\tlength := len(tablePrefix) + 8\n\t\tif rowID != 0 {\n\t\t\tlength = len(tablePrefix) + len(recordPrefix) + 8*2\n\t\t}\n\t\tdata = make([]byte, 0, length)\n\t} else {\n\t\tdata = data[:0]\n\t}\n\n\tdata = append(data, tablePrefix...)\n\tdata = encodeInt(data, tableID)\n\tif rowID != 0 {\n\t\tdata = append(data, recordPrefix...)\n\t\tdata = encodeInt(data, rowID)\n\t}\n\n\t*buf = data\n\n\treturn encodeBytes(data)\n}",
"func (kg *ecdsaKeyGenerator) GenerateKey() error {\n\tif kg.DoesKeyExist() {\n\t\treturn kg.loadKey()\n\t}\n\n\tprivateKey, err := ecdsa.GenerateKey(elliptic.P256(), reader)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error encountered when generating ECDSA public/private keypair: %+v\\n\", err)\n\t}\n\n\tkg.privateKey = privateKey\n\tkg.publicKey = &privateKey.PublicKey\n\treturn nil\n}",
"func (p *ProtocolTECDSA) GenKey() error {\n\tp.secret = Gen(\"x\", p.network, \"ElGamal\")\n\tp.key, _ = p.secret.Exp()\n\tp.key.RevealExp()\n\n\tp.genElGamalKey()\n\n\treturn nil\n}",
"func GenKey(defaultNodeHome string) *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"genkey\",\n\t\tShort: \"generate key from the key file\",\n\t\tArgs: cobra.NoArgs,\n\t\tPreRunE: preCheckCmd,\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tserverCtx := server.GetServerContextFromCmd(cmd)\n\t\t\tconfig := serverCtx.Config\n\n\t\t\tnodeKey, filePv, err := genutil.InitializeNodeValidatorFiles(config)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tvar privKey crypto.PrivKey\n\n\t\t\tkeyType := strings.TrimSpace(viper.GetString(FlagType))\n\t\t\tif keyType == \"node\" {\n\t\t\t\tprivKey = nodeKey.PrivKey\n\t\t\t} else {\n\t\t\t\tprivKey = filePv.Key.PrivKey\n\t\t\t}\n\n\t\t\tkey, err := genutil.Genkey(privKey)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\treturn tempfile.WriteFileAtomic(viper.GetString(FlagOutFile), key, 0600)\n\t\t},\n\t}\n\n\tcmd.Flags().String(FlagType, \"validator\", \"key type (node|validator)\")\n\tcmd.Flags().String(cli.HomeFlag, defaultNodeHome, \"node's home directory\")\n\tcmd.Flags().String(FlagOutFile, \"priv.pem\", \"private key file path\")\n\n\treturn cmd\n}",
"func GenerateKeyPair() ([]byte, []byte) {\n\tconst seckeyLen = 32\n\tvar seckey []byte\n\tvar pubkey []byte\n\nnew_seckey:\n\tseckey = RandByte(seckeyLen)\n\tif secp.SeckeyIsValid(seckey) != 1 {\n\t\tgoto new_seckey // regen\n\t}\n\n\tpubkey = pubkeyFromSeckey(seckey)\n\tif pubkey == nil {\n\t\tlog.Panic(\"IMPOSSIBLE: pubkey invalid from valid seckey\")\n\t\tgoto new_seckey\n\t}\n\tif ret := secp.PubkeyIsValid(pubkey); ret != 1 {\n\t\tlog.Panicf(\"ERROR: Pubkey invalid, ret=%d\", ret)\n\t\tgoto new_seckey\n\t}\n\n\treturn pubkey, seckey\n}",
"func GenerateKey() (*ecdsa.PrivateKey, error) {\n\tparams := elliptic.Sm2p256v1().Params()\n\tb := make([]byte, params.BitSize/8+8) // TODO: use params.N.BitLen()\n\t_, err := io.ReadFull(rand.Reader, b)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tk := new(big.Int).SetBytes(b)\n\tn := new(big.Int).Sub(params.N, one)\n\tk.Mod(k, n)\n\tk.Add(k, one)\n\treturn ToSM2(k.Bytes())\n}",
"func GenerateKey(bits int) (*rsa.PrivateKey, error) {\n\treturn rsa.GenerateKey(rand.Reader, bits)\n}",
"func TestGenerateNewSSHKey(t *testing.T) {\n\tfilename := \"/tmp/bot-sshca-integration-test-generate-key\"\n\tos.Remove(filename)\n\n\terr := GenerateNewSSHKey(filename, false, false)\n\trequire.NoError(t, err)\n\n\terr = GenerateNewSSHKey(filename, false, false)\n\trequire.Errorf(t, err, \"Refusing to overwrite existing key (try with FORCE_WRITE=true if you're sure): \"+filename)\n\n\terr = GenerateNewSSHKey(filename, true, false)\n\trequire.NoError(t, err)\n\n\tbytes, err := ioutil.ReadFile(filename)\n\trequire.NoError(t, err)\n\trequire.True(t, strings.Contains(string(bytes), \"PRIVATE\"))\n\n\tbytes, err = ioutil.ReadFile(shared.KeyPathToPubKey(filename))\n\trequire.NoError(t, err)\n\trequire.False(t, strings.Contains(string(bytes), \"PRIVATE\"))\n\trequire.True(t, strings.HasPrefix(string(bytes), \"ssh-ed25519\") || strings.HasPrefix(string(bytes), \"ecdsa-sha2-nistp256\"))\n}",
"func Keygen() (ed25519.Scalar, ed25519.Point) {\n\tsecret_key := ed25519.Random()\n\tpublic_key := H.Mul(secret_key)\n\treturn secret_key, public_key\n}",
"func Generate() (*SSHKey, error) {\n\tdata := &SSHKey{}\n\n\trsaKey, err := rsa.GenerateKey(rand.Reader, 2048)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar pemBuf bytes.Buffer\n\tpem.Encode(&pemBuf, &pem.Block{\n\t\tType: \"RSA PRIVATE KEY\",\n\t\tBytes: x509.MarshalPKCS1PrivateKey(rsaKey),\n\t})\n\trsaPubKey, err := ssh.NewPublicKey(&rsaKey.PublicKey)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdata.PublicKey = bytes.TrimSpace(ssh.MarshalAuthorizedKey(rsaPubKey))\n\tdata.PrivateKey = rsaKey\n\n\treturn data, nil\n}",
"func (b *Base) GenerateKeyPair(req *GenerateKeyPairReq) (*GenerateKeyPairResp, error) {\n\treturn nil, ErrFunctionNotSupported\n}",
"func GenerateKey(rand io.Reader) (*PublicKey, *PrivateKey, error) {\n\tvar seed [SeedSize]byte\n\tif rand == nil {\n\t\trand = cryptoRand.Reader\n\t}\n\t_, err := io.ReadFull(rand, seed[:])\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tpk, sk := NewKeyFromSeed(&seed)\n\treturn pk, sk, nil\n}",
"func keyGen(key string, keyPrefix string, programName string, withProgramName bool) string {\n\tif programName != \"\" && withProgramName {\n\t\tif keyPrefix == \"\" {\n\t\t\treturn fmt.Sprintf(\"%s.%s\", programName, key)\n\t\t}\n\n\t\treturn fmt.Sprintf(\"%s.%s_%s\", programName, keyPrefix, key)\n\t} else {\n\t\tif keyPrefix == \"\" {\n\t\t\treturn key\n\t\t}\n\n\t\treturn fmt.Sprintf(\"%s_%s\", keyPrefix, key)\n\t}\n}",
"func GenerateFundKey(addr string) string {\n\treturn fmt.Sprintf(\"fund_addr_%s\", addr)\n}",
"func GenerateKeys(c *cli.Context) error {\n\t// validate\n\tif err := validateKeys(c); err != nil {\n\t\treturn err\n\t}\n\tf, err := buildFiles(c)\n\tif err != nil {\n\t\treturn cli.NewExitError(fmt.Sprintf(\"error building files %q\", err), 2)\n\t}\n\tif err := encodeFiles(f); err != nil {\n\t\treturn cli.NewExitError(fmt.Sprintf(\"unable to write keys %q\", err), 2)\n\t}\n\treturn nil\n}",
"func genServiceKey() ([]byte, error) {\n\tpriv, _, err := crypto.GenerateEd25519Key(crand.Reader)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tseed, err := seedFromEd25519PrivateKey(priv)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn seed, err\n}",
"func makeKey(password, salt []byte) []byte {\n\treturn pbkdf2.Key(password, salt, Pbkdf2Iters, KeySize, sha256.New)\n}",
"func (r *repo) GenKey(iri pub.IRI) error {\n\tob, err := r.loadOneFromPath(iri)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif ob.GetType() != pub.PersonType {\n\t\treturn errors.Newf(\"trying to generate keys for invalid ActivityPub object type: %s\", ob.GetType())\n\t}\n\tm, err := r.LoadMetadata(iri)\n\tif err != nil && !errors.IsNotFound(err) {\n\t\treturn err\n\t}\n\tif m == nil {\n\t\tm = new(storage.Metadata)\n\t}\n\tif m.PrivateKey != nil {\n\t\treturn nil\n\t}\n\t// TODO(marius): this needs a way to choose between ED25519 and RSA keys\n\tpubB, prvB := generateECKeyPair()\n\tm.PrivateKey = pem.EncodeToMemory(&prvB)\n\n\tif err = r.SaveMetadata(*m, iri); err != nil {\n\t\treturn err\n\t}\n\tpub.OnActor(ob, func(act *pub.Actor) error {\n\t\tact.PublicKey = pub.PublicKey{\n\t\t\tID: pub.IRI(fmt.Sprintf(\"%s#main\", iri)),\n\t\t\tOwner: iri,\n\t\t\tPublicKeyPem: string(pem.EncodeToMemory(&pubB)),\n\t\t}\n\t\treturn nil\n\t})\n\treturn nil\n}",
"func GenerateAPIKey() string {\n\treturn uniuri.NewLenChars(16, []byte(\"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz\"))\n}",
"func MakeGenerateKeyEndpoint(svc Service) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\tvar (\n\t\t\treq GenerateKeyRequest\n\t\t\tresp GenerateKeyResponse\n\t\t)\n\t\treq = request.(GenerateKeyRequest)\n\t\tkey, err := svc.GenerateKey(ctx, req)\n\t\tif err != nil {\n\t\t\tresp.Key = key\n\t\t\tresp.Err = err.Error()\n\t\t\treturn resp, nil\n\t\t}\n\t\tresp.Key = key\n\t\treturn resp, nil\n\t}\n}",
"func NewKey() (key *Key, err error) {\n\tpriv, pub, err := native.GenerateKeyPair(\"\")\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\n\treturn &Key{\n\t\tPriv: priv,\n\t\tPub: pub,\n\t}, nil\n}",
"func GenerateKeyPair(h func() hash.Hash, seed []byte) (*PublicKey, *PrivateKey, error) {\n\tif len(seed) != 0 && len(seed) != seedSize {\n\t\treturn nil, nil, errors.New(\"invalid size of seed\")\n\t}\n\n\tokm, err := generateOKM(seed, h)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tprivKeyFr, err := frFromOKM(okm)\n\tif err != nil {\n\t\treturn nil, nil, fmt.Errorf(\"convert OKM to FR: %w\", err)\n\t}\n\n\tprivKey := &PrivateKey{PrivKey: g2pubs.NewSecretKeyFromFR(privKeyFr)}\n\tpubKey := privKey.PublicKey()\n\n\treturn pubKey, privKey, nil\n}",
"func GenerateKey(n int) (string, error) {\n\tbuf := make([]byte, n)\n\t_, err := rand.Read(buf)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn base64.URLEncoding.EncodeToString(buf), nil\n\n}",
"func GenerateKey(rand io.Reader) (priv PrivateKey, err error) {\n\t/* See Certicom's SEC1 3.2.1, pg.23 */\n\t/* See NSA's Suite B Implementer’s Guide to FIPS 186-3 (ECDSA) A.1.1, pg.18 */\n\n\t/* Select private key d randomly from [1, n) */\n\n\t/* Read N bit length random bytes + 64 extra bits */\n\tb := make([]byte, secp256k1.N.BitLen()/8+8)\n\t_, err = io.ReadFull(rand, b)\n\tif err != nil {\n\t\treturn priv, fmt.Errorf(\"Reading random reader: %v\", err)\n\t}\n\n\td := new(big.Int).SetBytes(b)\n\n\t/* Mod n-1 to shift d into [0, n-1) range */\n\td.Mod(d, new(big.Int).Sub(secp256k1.N, big.NewInt(1)))\n\t/* Add one to shift d to [1, n) range */\n\td.Add(d, big.NewInt(1))\n\n\tpriv.D = d\n\n\t/* Derive public key from private key */\n\tpriv.derive()\n\n\treturn priv, nil\n}",
"func GenKeyPair() (string, string, error) {\n\tprivateKey, err := rsa.GenerateKey(rand.Reader, 2048)\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\tprivateKeyPEM := &pem.Block{Type: \"RSA PRIVATE KEY\", Bytes: x509.MarshalPKCS1PrivateKey(privateKey)}\n\tvar private bytes.Buffer\n\tif err := pem.Encode(&private, privateKeyPEM); err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\t// generate public key\n\tpub, err := ssh.NewPublicKey(&privateKey.PublicKey)\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\tpublic := ssh.MarshalAuthorizedKey(pub)\n\treturn string(public), private.String(), nil\n}",
"func GenKeyPair() (string, string, error) {\n\tprivateKey, err := rsa.GenerateKey(rand.Reader, 2048)\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\tprivateKeyPEM := &pem.Block{Type: \"RSA PRIVATE KEY\", Bytes: x509.MarshalPKCS1PrivateKey(privateKey)}\n\tvar private bytes.Buffer\n\tif err := pem.Encode(&private, privateKeyPEM); err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\t// generate public key\n\tpub, err := ssh.NewPublicKey(&privateKey.PublicKey)\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\tpublic := ssh.MarshalAuthorizedKey(pub)\n\treturn string(public), private.String(), nil\n}",
"func Generate(bits int) (Key, error) {\n\tprivkey, err := rsa.GenerateKey(rand.Reader, bits)\n\tif err != nil {\n\t\treturn Key{}, nil\n\t}\n\tkey := Key{\n\t\tPrivate: privkey,\n\t\tPublic: &privkey.PublicKey,\n\t}\n\treturn key, nil\n}",
"func generateKeyPairName() string {\n\tid := fmt.Sprintf(\"%x\", rand.Int())\n\treturn securityGroupNamePrefix + id\n}",
"func keygen(path string) error {\n\tif path == \"\" {\n\t\treturn errors.New(\"keygen missing argument for key path\")\n\t}\n\n\tkey, err := rsa.GenerateKey(rand.Reader, 2048)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"generating keys\")\n\t}\n\n\tfile, err := os.Create(path)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"creating private file\")\n\t}\n\tdefer file.Close()\n\n\tblock := pem.Block{\n\t\tType: \"RSA PRIVATE KEY\",\n\t\tBytes: x509.MarshalPKCS1PrivateKey(key),\n\t}\n\n\tif err := pem.Encode(file, &block); err != nil {\n\t\treturn errors.Wrap(err, \"encoding to private file\")\n\t}\n\n\tif err := file.Close(); err != nil {\n\t\treturn errors.Wrap(err, \"closing private file\")\n\t}\n\n\treturn nil\n}",
"func genPubkey() ([]byte, []byte) {\n\t_, pub := btcec.PrivKeyFromBytes(btcec.S256(), randomBytes(32))\n\tpubkey := pub.SerializeCompressed()\n\tpkHash := btcutil.Hash160(pubkey)\n\treturn pubkey, pkHash\n}",
"func GeneratePrivateKey() *PrivateKey {\n\tpriv := new(PrivateKey)\n\tseckey := NewSeckey()\n\tpriv.seckey = seckey\n\treturn priv\n}",
"func NewKey() *Key {\n\tkeypair, err := crypto.GenerateKeyPair()\n\tcommon.FatalIfErr(err, \"There was an error generating a key pair\")\n\treturn &Key{\n\t\tID: uuid.NewRandom(),\n\t\tKeyPair: &keypair,\n\t}\n}",
"func GenKey() ([]byte, error) {\n\tkey, err := exec.Command(\"wg\", \"genkey\").Output()\n\treturn bytes.Trim(key, \"\\n\"), err\n}",
"func (g Generator) Generate() (string, error) {\n\tnow := time.Now()\n\n\tt := jwt.Token{\n\t\tMethod: jwt.SigningMethodES256,\n\t\tHeader: map[string]interface{}{\n\t\t\t\"alg\": jwt.SigningMethodES256.Alg(),\n\t\t\t\"kid\": g.KeyId,\n\t\t},\n\t\tClaims: jwt.MapClaims{\n\t\t\t\"iss\": g.TeamId,\n\t\t\t\"iat\": now.Unix(),\n\t\t\t\"exp\": now.Add(time.Second * time.Duration(g.TTL)).Unix(),\n\t\t},\n\t\tSignature: string(g.Secret),\n\t}\n\n\tkey, err := ParsePKCS8PrivateKeyFromPEM(g.Secret)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn t.SignedString(key)\n}",
"func GenerateKeyPair() *rsa.PrivateKey {\n\n\tprivateKey, err := rsa.GenerateKey(rand.Reader, 2048)\n\n\tif err != nil {\n\t\tlog.Fatal(\"Error in generating key-value pair, error is\", err)\n\t}\n\treturn privateKey\n}",
"func genPrivateKeyFile(c *cli.Context) {\n\tconfig, err := readConfig(c)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tout, err := os.Create(config.KeyFile)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tdefer out.Close()\n\n\tprv, err := rsa.GenerateKey(rand.Reader, 2048)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\terr = pem.Encode(out, &pem.Block{\n\t\tType: \"RSA PRIVATE KEY\",\n\t\tBytes: x509.MarshalPKCS1PrivateKey(prv),\n\t})\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}",
"func (ctx Ctx) GenKey(at KeyPairID, flags KeyFlag) (res Key, err error) {\n\tif C.CryptGenKey(ctx.hProv, C.ALG_ID(at), C.DWORD(flags), &res.hKey) == 0 {\n\t\t// BUG: CryptGenKey raises error NTE_FAIL. Looking into it...\n\t\terr = getErr(\"Error creating key for container\")\n\t\treturn\n\t}\n\treturn\n}",
"func generateKeys() (pub, priv key, err error) {\n\treturn box.GenerateKey(rand.Reader)\n}",
"func genKey(code string, t time.Time) string {\n\treturn fmt.Sprintf(\"%s#%s\", code, t.Format(time.RFC3339))\n}"
] | [
"0.7184372",
"0.7109336",
"0.6994132",
"0.69569266",
"0.69184214",
"0.69149053",
"0.6908917",
"0.6815626",
"0.68146986",
"0.67768675",
"0.6756004",
"0.6755898",
"0.6723591",
"0.6719455",
"0.6715709",
"0.6704407",
"0.66962206",
"0.66807115",
"0.66571164",
"0.6654451",
"0.664013",
"0.66280144",
"0.66277975",
"0.6606326",
"0.6587511",
"0.6580703",
"0.65774786",
"0.6548608",
"0.65479714",
"0.6546035",
"0.6507046",
"0.6500095",
"0.64822",
"0.6449218",
"0.64384544",
"0.64381415",
"0.64154655",
"0.6405209",
"0.63990235",
"0.63979816",
"0.636163",
"0.6359514",
"0.63505083",
"0.63473564",
"0.6331614",
"0.63303965",
"0.63289964",
"0.63266015",
"0.6325282",
"0.6323022",
"0.6323022",
"0.6309256",
"0.62997025",
"0.6297527",
"0.62956655",
"0.62691426",
"0.6264859",
"0.62643516",
"0.6263618",
"0.62594724",
"0.6232324",
"0.62251973",
"0.6223727",
"0.6215423",
"0.6212649",
"0.62067425",
"0.6205229",
"0.62031156",
"0.6189813",
"0.6180871",
"0.6172851",
"0.6165955",
"0.61590683",
"0.61575055",
"0.6151988",
"0.61463904",
"0.6130833",
"0.6122761",
"0.6110896",
"0.6089899",
"0.608559",
"0.6083266",
"0.60831636",
"0.6080549",
"0.60769534",
"0.60760665",
"0.60760665",
"0.6075332",
"0.607442",
"0.60719055",
"0.6069695",
"0.6060586",
"0.6048659",
"0.6045701",
"0.604288",
"0.6040216",
"0.60381335",
"0.60314894",
"0.6028739",
"0.6024296"
] | 0.65792024 | 26 |
H1 hashes m to a curve point | func H1(m []byte) (x, y *big.Int) {
h := sha512.New()
var i uint32
byteLen := (curve.BitSize + 7) >> 3
for x == nil && i < 100 {
// TODO: Use a NIST specified DRBG.
h.Reset()
if err := binary.Write(h, binary.BigEndian, i); err != nil {
panic(err)
}
if _, err := h.Write(m); err != nil {
panic(err)
}
r := []byte{2} // Set point encoding to "compressed", y=0.
r = h.Sum(r)
x, y = Unmarshal(curve, r[:byteLen+1])
i++
}
return
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func hash1Prf(suite pairing.Suite, m string, k kyber.Scalar) kyber.Point {\n\thm := hash1(suite, m)\n\thmk := hm.Mul(k, hm)\n\treturn hmk\n}",
"func HashG1(msg, dst []byte) *G1 {\n\treturn mapToCurve(hashToBase(msg, dst))\n}",
"func (path *PATH) H(x float64) *PATH {\n\treturn path.AddPart(\"H\", x)\n}",
"func (path *PATH) Hh(x float64) *PATH {\n\treturn path.AddPart(\"h\", x)\n}",
"func (pk *PublicKey) ProofToHash(m, proof []byte) (index [32]byte, err error) {\n\tnilIndex := [32]byte{}\n\t// verifier checks that s == H2(m, [t]G + [s]([k]G), [t]H1(m) + [s]VRF_k(m))\n\tif got, want := len(proof), 64+65; got != want {\n\t\treturn nilIndex, ErrInvalidVRF\n\t}\n\n\t// Parse proof into s, t, and vrf.\n\ts := proof[0:32]\n\tt := proof[32:64]\n\tvrf := proof[64 : 64+65]\n\n\tuHx, uHy := elliptic.Unmarshal(curve, vrf)\n\tif uHx == nil {\n\t\treturn nilIndex, ErrInvalidVRF\n\t}\n\n\t// [t]G + [s]([k]G) = [t+ks]G\n\ttGx, tGy := curve.ScalarBaseMult(t)\n\tksGx, ksGy := curve.ScalarMult(pk.X, pk.Y, s)\n\ttksGx, tksGy := curve.Add(tGx, tGy, ksGx, ksGy)\n\n\t// H = H1(m)\n\t// [t]H + [s]VRF = [t+ks]H\n\tHx, Hy := H1(m)\n\ttHx, tHy := curve.ScalarMult(Hx, Hy, t)\n\tsHx, sHy := curve.ScalarMult(uHx, uHy, s)\n\ttksHx, tksHy := curve.Add(tHx, tHy, sHx, sHy)\n\n\t// H2(G, H, [k]G, VRF, [t]G + [s]([k]G), [t]H + [s]VRF)\n\t// = H2(G, H, [k]G, VRF, [t+ks]G, [t+ks]H)\n\t// = H2(G, H, [k]G, VRF, [r]G, [r]H)\n\tvar b bytes.Buffer\n\tif _, err := b.Write(elliptic.Marshal(curve, curve.Gx, curve.Gy)); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(elliptic.Marshal(curve, Hx, Hy)); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(elliptic.Marshal(curve, pk.X, pk.Y)); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(vrf); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(elliptic.Marshal(curve, tksGx, tksGy)); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(elliptic.Marshal(curve, tksHx, tksHy)); err != nil {\n\t\tpanic(err)\n\t}\n\th2 := H2(b.Bytes())\n\n\t// Left pad h2 with zeros if needed. This will ensure that h2 is padded\n\t// the same way s is.\n\tvar buf bytes.Buffer\n\tif _, err := buf.Write(make([]byte, 32-len(h2.Bytes()))); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := buf.Write(h2.Bytes()); err != nil {\n\t\tpanic(err)\n\t}\n\n\tif !hmac.Equal(s, buf.Bytes()) {\n\t\treturn nilIndex, ErrInvalidVRF\n\t}\n\treturn sha256.Sum256(vrf), nil\n}",
"func (p point) h(delta int) point {\n\treturn point{\n\t\tx: p.x + int64(delta),\n\t\ty: p.y,\n\t}\n}",
"func (pk *VrfablePublicKey) ProofToHash(m, proof []byte) (index [32]byte, err error) {\n\tnilIndex := [32]byte{}\n\t// verifier checks that s == H2(m, [t]G + [s]([k]G), [t]H1(m) + [s]VRF_k(m))\n\tif got, want := len(proof), 64+65; got != want {\n\t\treturn nilIndex, ErrInvalidVRF\n\t}\n\n\t// Parse proof into s, t, and vrf.\n\ts := proof[0:32]\n\tt := proof[32:64]\n\tvrf := proof[64 : 64+65]\n\n\t// uHx, uHy := elliptic.Unmarshal(curve, vrf)\n\tuHx, uHy := curve.Unmarshal(vrf) //////???\n\tif uHx == nil {\n\t\treturn nilIndex, ErrInvalidVRF\n\t}\n\n\t// [t]G + [s]([k]G) = [t+ks]G\n\ttGx, tGy := curve.ScalarBaseMult(t)\n\tksGx, ksGy := curve.ScalarMult(pk.X, pk.Y, s)\n\ttksGx, tksGy := curve.Add(tGx, tGy, ksGx, ksGy)\n\n\t// H = H1(m)\n\t// [t]H + [s]VRF = [t+ks]H\n\tHx, Hy := H1(m)\n\ttHx, tHy := curve.ScalarMult(Hx, Hy, t)\n\tsHx, sHy := curve.ScalarMult(uHx, uHy, s)\n\ttksHx, tksHy := curve.Add(tHx, tHy, sHx, sHy)\n\n\t// H2(G, H, [k]G, VRF, [t]G + [s]([k]G), [t]H + [s]VRF)\n\t// = H2(G, H, [k]G, VRF, [t+ks]G, [t+ks]H)\n\t// = H2(G, H, [k]G, VRF, [r]G, [r]H)\n\tvar b bytes.Buffer\n\tb.Write(curve.Marshal(params.Gx, params.Gy))\n\tb.Write(curve.Marshal(Hx, Hy))\n\tb.Write(curve.Marshal(pk.X, pk.Y))\n\tb.Write(vrf)\n\tb.Write(curve.Marshal(tksGx, tksGy))\n\tb.Write(curve.Marshal(tksHx, tksHy))\n\th2 := H2(b.Bytes())\n\n\t// Left pad h2 with zeros if needed. This will ensure that h2 is padded\n\t// the same way s is.\n\tvar buf bytes.Buffer\n\tbuf.Write(make([]byte, 32-len(h2.Bytes())))\n\tbuf.Write(h2.Bytes())\n\n\tif !hmac.Equal(s, buf.Bytes()) {\n\t\treturn nilIndex, ErrInvalidVRF\n\t}\n\treturn sha256.Sum256(vrf), nil\n}",
"func generateH() ristretto.Point {\n\tvar random ristretto.Scalar\n\tvar H ristretto.Point\n\trandom.Rand()\n\tH.ScalarMultBase(&random)\n\treturn H\n}",
"func (o Orbit) H() []float64 {\n\treturn Cross(o.RV())\n}",
"func bls_hash_to_point(M []byte) *ECP {\n\tDST := []byte(\"BLS_SIG_FP256BNG1_XMD:SHA-256_SVDW_RO_NUL_\")\n\tu := hash_to_field(core.MC_SHA2,HASH_TYPE,DST,M,2)\n\n\tP:=ECP_map2point(u[0])\n\tP1 := ECP_map2point(u[1]);\n\tP.Add(P1)\n\tP.Cfp()\n\tP.Affine()\n\treturn P\n}",
"func (g *G1) Hash(input, dst []byte) {\n\tconst L = 64\n\tpseudo := expander.NewExpanderMD(crypto.SHA256, dst).Expand(input, 2*L)\n\n\tvar u0, u1 ff.Fp\n\tu0.SetBytes(pseudo[0*L : 1*L])\n\tu1.SetBytes(pseudo[1*L : 2*L])\n\n\tvar q0, q1 isogG1Point\n\tq0.sswu(&u0)\n\tq1.sswu(&u1)\n\tvar p0, p1 G1\n\tp0.evalIsogG1(&q0)\n\tp1.evalIsogG1(&q1)\n\tg.Add(&p0, &p1)\n\tg.clearCofactor()\n}",
"func (c *core) HashToCurveTryAndIncrement(pk *point, alpha []byte) (*point, error) {\n\thasher := c.getHasher()\n\thash := make([]byte, 1+hasher.Size())\n\thash[0] = 2 // compress format\n\n\t// step 1: ctr = 0\n\tctr := 0\n\n\t// step 2: PK_string = point_to_string(Y)\n\tpkBytes := c.Marshal(pk)\n\n\t// step 3 ~ 6\n\tprefix := []byte{c.SuiteString, 0x01}\n\tsuffix := []byte{0}\n\tfor ; ctr < 256; ctr++ {\n\t\t// hash_string = Hash(suite_string || one_string || PK_string || alpha_string || ctr_string)\n\t\tsuffix[0] = byte(ctr)\n\t\thasher.Reset()\n\t\thasher.Write(prefix)\n\t\thasher.Write(pkBytes)\n\t\thasher.Write(alpha)\n\t\thasher.Write(suffix)\n\t\t// apppend right after compress format\n\t\thasher.Sum(hash[1:1])\n\n\t\t// H = arbitrary_string_to_point(hash_string)\n\t\tif H := c.Unmarshal(hash); H != nil {\n\t\t\tif c.Cofactor > 1 {\n\t\t\t\t// If H is not \"INVALID\" and cofactor > 1, set H = cofactor * H\n\t\t\t\tH = c.ScalarMult(H, []byte{c.Cofactor})\n\t\t\t}\n\t\t\treturn H, nil\n\t\t}\n\t}\n\treturn nil, errors.New(\"no valid point found\")\n}",
"func hashSchnorr(suite *bn256.Suite, message []byte, p kyber.Point) kyber.Scalar {\n pb, _ := p.MarshalBinary()\n c := suite.XOF(pb)\n c.Write(message)\n return suite.G1().Scalar().Pick(c)\n}",
"func getHash(p Point, precision Accuracy) (uint64, error) {\n\thash, err := hashstructure.Hash(p.truncate(precision), nil)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"unable to get hash point\")\n\t}\n\treturn hash, nil\n}",
"func GenerateH() *ristretto.Point {\n\trandom := new(ristretto.Scalar).Rand()\n\tH := new(ristretto.Point)\n\tH.ScalarMultBase(random)\n\treturn H\n}",
"func (p *Path) Ha(xs ...float64) *Path {\n\treturn p.addCmd(\"H\", hCmd{xs: xs})\n}",
"func hashToPoint(suite suites.Suite, msg []byte) kyber.Point {\n\thash := sha3.NewLegacyKeccak256()\n\tvar buf []byte\n\thash.Write(msg)\n\tbuf = hash.Sum(buf)\n\tx := suite.G1().Scalar().SetBytes(buf)\n\tpoint := suite.G1().Point().Mul(x, nil)\n\treturn point\n}",
"func DrawHLine(m draw.Image, x1, y, x2 int) {\n\tfor ; x1 <= x2; x1++ {\n\t\tm.Set(x1, y, color.RGBA{0, 0, 255, 255})\n\t}\n}",
"func H2(m []byte) *big.Int {\n\t// NIST SP 800-90A § A.5.1: Simple discard method.\n\tbyteLen := (curve.BitSize + 7) >> 3\n\th := sha512.New()\n\tfor i := uint32(0); ; i++ {\n\t\t// TODO: Use a NIST specified DRBG.\n\t\th.Reset()\n\t\tif err := binary.Write(h, binary.BigEndian, i); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tif _, err := h.Write(m); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tb := h.Sum(nil)\n\t\tk := new(big.Int).SetBytes(b[:byteLen])\n\t\tif k.Cmp(new(big.Int).Sub(curve.N, one)) == -1 {\n\t\t\treturn k.Add(k, one)\n\t\t}\n\t}\n}",
"func (params *SRPParams) calculateM1(username, salt, A, B, K []byte) []byte {\n\t/*\n\t\ti = H(g) xor H(N)\n\t\tM1 = H(i) + H(I) + H(salt) + H(A) + H(B) + H(K)\n\t\t+ ==> sha256_update\n\t*/\n\t// A,B 必须对齐,不然gg\n\tdigestn := params.Digest(padToN(params.G, params))\n\tdigestg := params.Digest(params.N.Bytes())\n\tdigesti := params.Digest(username)\n\thxor := make([]byte, len(digestn))\n\tfor i := range digestn {\n\t\thxor[i] = digestn[i] ^ digestg[i]\n\t}\n\th := params.Hash.New()\n\th.Write(hxor)\n\th.Write(digesti)\n\th.Write(salt)\n\th.Write(A)\n\th.Write(B)\n\th.Write(K)\n\tm1 := h.Sum(nil)\n\treturn m1\n}",
"func (party *EcdsaParty) Phase1(pub2 *PubKey) *PubKey {\n\tprv := party.prv\n\tpub := prv.PubKey()\n\tcurve := pub.Curve\n\n\tpx, py := curve.ScalarMult(pub2.X, pub2.Y, prv.D.Bytes())\n\treturn &PubKey{X: px, Y: py, Curve: curve}\n}",
"func h(x float64) float64 {\n\treturn math.Sin(x)\n}",
"func (h *Histogram) Delta1h() HistogramValue {\n\tb := make([]HistogramBucket, len(h.buckets))\n\tfor i, v := range h.buckets {\n\t\tb[i] = HistogramBucket{\n\t\t\tLowBound: v.lowBound,\n\t\t\tCount: v.count.Delta1h(),\n\t\t}\n\t}\n\n\tv := HistogramValue{\n\t\tCount: h.count.Delta1h(),\n\t\tSum: h.sum.Delta1h(),\n\t\tSumOfSquares: h.sumOfSquares.Delta1h(),\n\t\tMin: h.tracker.Min1h(),\n\t\tMax: h.tracker.Max1h(),\n\t\tBuckets: b,\n\t}\n\treturn v\n}",
"func encodeHash(x uint64, p, pPrime uint) (hashCode uint64) {\n\tif x&onesFromTo(64-pPrime, 63-p) == 0 {\n\t\tr := rho(extractShift(x, 0, 63-pPrime))\n\t\treturn concat([]concatInput{\n\t\t\t{x, 64 - pPrime, 63},\n\t\t\t{uint64(r), 0, 5},\n\t\t\t{1, 0, 0}, // this just adds a 1 bit at the end\n\t\t})\n\t} else {\n\t\treturn concat([]concatInput{\n\t\t\t{x, 64 - pPrime, 63},\n\t\t\t{0, 0, 0}, // this just adds a 0 bit at the end\n\t\t})\n\t}\n}",
"func hline (x1 int, x2 int, y int) {\n\tfor n := x1; n < x2; n++ {\n\t\t\timg.Set(n, y, col)\n\t}\n}",
"func (_Ethdkg *EthdkgCaller) HashToG1(opts *bind.CallOpts, message []byte) ([2]*big.Int, error) {\n\tvar (\n\t\tret0 = new([2]*big.Int)\n\t)\n\tout := ret0\n\terr := _Ethdkg.contract.Call(opts, out, \"HashToG1\", message)\n\treturn *ret0, err\n}",
"func h(a, b []int32) float64 {\n\tx1, y1, x2, y2 := a[0], a[1], b[0], b[1]\n\treturn math.Abs(float64(x1)-float64(x2)) + math.Abs(float64(y1)-float64(y2))\n}",
"func Secp256k1Hash(hash []byte) []byte {\n\thash = SumSHA256(hash)\n\t_, seckey := generateDeterministicKeyPair(hash) //seckey1 is usually sha256 of hash\n\tpubkey, _ := generateDeterministicKeyPair(SumSHA256(hash)) //SumSHA256(hash) equals seckey usually\n\tecdh := ECDH(pubkey, seckey) //raise pubkey to power of seckey in curve\n\treturn SumSHA256(append(hash, ecdh...)) //append signature to sha256(seed) and hash\n}",
"func (p *Path) Hr(xs ...float64) *Path {\n\treturn p.addCmd(\"h\", hCmd{xs: xs})\n}",
"func (k PrivateKey) Evaluate(m []byte) (index [32]byte, proof []byte) {\n\tnilIndex := [32]byte{}\n\t// Prover chooses r <-- [1,N-1]\n\tr, _, _, err := elliptic.GenerateKey(curve, rand.Reader)\n\tif err != nil {\n\t\treturn nilIndex, nil\n\t}\n\tri := new(big.Int).SetBytes(r)\n\n\t// H = H1(m)\n\tHx, Hy := H1(m)\n\n\t// VRF_k(m) = [k]H\n\tsHx, sHy := curve.ScalarMult(Hx, Hy, k.D.Bytes())\n\tvrf := elliptic.Marshal(curve, sHx, sHy) // 65 bytes.\n\n\t// G is the base point\n\t// s = H2(G, H, [k]G, VRF, [r]G, [r]H)\n\trGx, rGy := curve.ScalarBaseMult(r)\n\trHx, rHy := curve.ScalarMult(Hx, Hy, r)\n\tvar b bytes.Buffer\n\tif _, err := b.Write(elliptic.Marshal(curve, curve.Gx, curve.Gy)); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(elliptic.Marshal(curve, Hx, Hy)); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(elliptic.Marshal(curve, k.PublicKey.X, k.PublicKey.Y)); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(vrf); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(elliptic.Marshal(curve, rGx, rGy)); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(elliptic.Marshal(curve, rHx, rHy)); err != nil {\n\t\tpanic(err)\n\t}\n\ts := H2(b.Bytes())\n\n\t// t = r−s*k mod N\n\tt := new(big.Int).Sub(ri, new(big.Int).Mul(s, k.D))\n\tt.Mod(t, curve.N)\n\n\t// Index = H(vrf)\n\tindex = sha256.Sum256(vrf)\n\n\t// Write s, t, and vrf to a proof blob. Also write leading zeros before s and t\n\t// if needed.\n\tvar buf bytes.Buffer\n\tif _, err := buf.Write(make([]byte, 32-len(s.Bytes()))); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := buf.Write(s.Bytes()); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := buf.Write(make([]byte, 32-len(t.Bytes()))); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := buf.Write(t.Bytes()); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := buf.Write(vrf); err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn index, buf.Bytes()\n}",
"func (_Ethdkg *EthdkgCallerSession) HashToG1(message []byte) ([2]*big.Int, error) {\n\treturn _Ethdkg.Contract.HashToG1(&_Ethdkg.CallOpts, message)\n}",
"func siphash(k0, k1, m uint64) uint64 {\n\t// Initialization.\n\tv0 := k0 ^ 0x736f6d6570736575\n\tv1 := k1 ^ 0x646f72616e646f6d\n\tv2 := k0 ^ 0x6c7967656e657261\n\tv3 := k1 ^ 0x7465646279746573\n\tt := uint64(8) << 56\n\n\t// Compression.\n\tv3 ^= m\n\n\t// Round 1.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\t// Round 2.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\tv0 ^= m\n\n\t// Compress last block.\n\tv3 ^= t\n\n\t// Round 1.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\t// Round 2.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\tv0 ^= t\n\n\t// Finalization.\n\tv2 ^= 0xff\n\n\t// Round 1.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\t// Round 2.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\t// Round 3.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\t// Round 4.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\treturn v0 ^ v1 ^ v2 ^ v3\n}",
"func hDist(p1, p2 pos) float64 {\n\treturn 2 * rEarth *\n\t\tmath.Asin(math.Sqrt(haversine(p2.latRad-p1.latRad)+\n\t\t\tmath.Cos(p1.latRad)*math.Cos(p2.latRad)*haversine(p2.lonRad-p1.lonRad)))\n}",
"func MortonHash(lon, lat float64) uint64 {\n\treturn numeric.Interleave(scaleLon(lon), scaleLat(lat))\n}",
"func (_Ethdkg *EthdkgSession) HashToG1(message []byte) ([2]*big.Int, error) {\n\treturn _Ethdkg.Contract.HashToG1(&_Ethdkg.CallOpts, message)\n}",
"func (m *Matrix) H() int {\n\treturn int(m.focus.Bounds().Y)\n}",
"func ( self * Akima_curve )\tPoint ( x float64 )\t\tfloat64\t{\n\tvar (\n\t\tx_minus_x1 = x - self.X1\n\t\tx_minus_x1_pow2\t= x_minus_x1 * x_minus_x1\n\t)\n\treturn\tself.p0 + self.p3 * x_minus_x1 * x_minus_x1_pow2 +\n\t\t\tself.T1 * x_minus_x1 + self.p2 * x_minus_x1_pow2\n}",
"func Secp256k1Hash(seed []byte) []byte { //nolint:golint\n\thash := SumSHA256(seed)\n\t_, seckey := deterministicKeyPairIteratorStep(hash) // seckey1 is usually sha256 of hash\n\tpubkeySeed := SumSHA256(hash) // SumSHA256(hash) usually equals seckey\n\tpubkey, _ := deterministicKeyPairIteratorStep(pubkeySeed)\n\tecdh := ECDH(pubkey, seckey) // raise pubkey to power of seckey in curve\n\tout := SumSHA256(append(hash, ecdh...)) // append signature to sha256(seed) and hash\n\treturn out\n}",
"func H(data string) string {\n\tdigest := md5.New()\n\tdigest.Write([]byte(data))\n\treturn fmt.Sprintf(\"%x\", digest.Sum(nil))\n}",
"func H(data string) string {\n\tdigest := md5.New()\n\tdigest.Write([]byte(data))\n\treturn fmt.Sprintf(\"%x\", digest.Sum(nil))\n}",
"func (ph *PHash) computeHash(img [][]float32) hashtype.Binary {\n\t// TODO: Remove magic numbers\n\thash := make(hashtype.Binary, 8)\n\tvar c uint\n\tfor i := range img {\n\t\tfor j := range img[i] {\n\t\t\tif img[i][j] != 0 {\n\t\t\t\thash.Set(c)\n\t\t\t}\n\t\t\tc++\n\t\t}\n\t}\n\treturn hash\n}",
"func mathExpm1(ctx phpv.Context, args []*phpv.ZVal) (*phpv.ZVal, error) {\n\tvar x phpv.ZFloat\n\t_, err := core.Expand(ctx, args, &x)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn phpv.ZFloat(math.Expm1(float64(x))).ZVal(), nil\n}",
"func SignHashed(curve CurveSystem, sk Privkey, msg []byte) Signature {\n\treturn SignCustHash(curve, sk, msg, blake2b256)\n}",
"func (p *Processor) getHash(x *mat.Dense) int {\n\th := x.T().Mul(p.r.Value())\n\tconcat := mat.ConcatV(h, h.ProdScalar(-1.0))\n\treturn f64utils.ArgMax(concat.Data())\n}",
"func (m *Matrix) HAbs() int {\n\treturn int(m.y)\n}",
"func (c1 CIELab) DeltaH(c2 CIELab) float64 {\n\txDE := math.Sqrt(math.Pow(c2.A, 2)+math.Pow(c2.B, 2)) -\n\t\tmath.Sqrt(math.Pow(c1.A, 2)+math.Pow(c1.B, 2))\n\n\treturn math.Sqrt(math.Pow(c2.A-c1.A, 2) +\n\t\tmath.Pow(c2.B-c1.B, 2) - math.Pow(xDE, 2))\n}",
"func (h PublicKey) Hex() string { return strings.ToLower(hex.EncodeToString(h[:])) }",
"func HmacSha1(message []byte, secret string) []byte {\n\th := hmac.New(sha1.New, []byte(secret))\n\th.Write(message)\n\treturn h.Sum(nil)\n}",
"func Marshal(curve elliptic.Curve, x, y *big.Int) []byte",
"func Hashes(r Roller, p []byte) []uint64 {\n\tn := r.Len()\n\tif len(p) < n {\n\t\treturn nil\n\t}\n\th := make([]uint64, len(p)-n+1)\n\tfor i := 0; i < n-1; i++ {\n\t\tr.RollByte(p[i])\n\t}\n\tfor i := range h {\n\t\th[i] = r.RollByte(p[i+n-1])\n\t}\n\treturn h\n}",
"func (g SimplePoint) Geohash(precision int) (string, error) {\n\tp := g.CalculatedPoint()\n\treturn geohash.Encode(p.Y, p.X, precision)\n}",
"func Ripemd160H(d []byte) []byte {\n\th := ripemd160.New()\n\th.Write(d)\n\treturn h.Sum(nil)\n}",
"func Hann(input VectorComplex) VectorComplex {\n\tvh := input.Copy()\n\ttheta := 2.0 * math.Pi / float64(len(vh)-1)\n\tfor i := 0; i < len(vh); i++ {\n\t\tx := complex(theta*float64(i), 0)\n\t\tvh[i] *= (1.0 - cmplx.Cos(x)) / 2.0\n\t}\n\treturn vh\n}",
"func HashASM(k0, k1 uint64, p []byte) uint64",
"func Hash(k0, k1 uint64, p []byte) uint64 {\n\tvar d digest\n\td.size = Size\n\td.k0 = k0\n\td.k1 = k1\n\td.Reset()\n\td.Write(p)\n\treturn d.Sum64()\n}",
"func DhashHorizontal(img image.Image, hashLen int) ([]byte, error) {\n imgGray := imaging.Grayscale(img) // Grayscale image first\n return horizontalGradient(imgGray, hashLen) // horizontal diff gradient\n}",
"func (ts *TagSet) HashH() uint64 {\n\treturn ts.hashH\n}",
"func (h *Header) Hash() [32]byte {\n\tvar f []string\n\tif h.Description.Value != \"\" {\n\t\tf = append(f, h.Description.Value)\n\t}\n\tf = append(f, fmt.Sprint(h.Required.Value))\n\tf = append(f, fmt.Sprint(h.Deprecated.Value))\n\tf = append(f, fmt.Sprint(h.AllowEmptyValue.Value))\n\tif h.Style.Value != \"\" {\n\t\tf = append(f, h.Style.Value)\n\t}\n\tf = append(f, fmt.Sprint(h.Explode.Value))\n\tf = append(f, fmt.Sprint(h.AllowReserved.Value))\n\tif h.Schema.Value != nil {\n\t\tf = append(f, low.GenerateHashString(h.Schema.Value))\n\t}\n\tif h.Example.Value != nil {\n\t\tf = append(f, fmt.Sprint(h.Example.Value))\n\t}\n\tif len(h.Examples.Value) > 0 {\n\t\tfor k := range h.Examples.Value {\n\t\t\tf = append(f, fmt.Sprintf(\"%s-%x\", k.Value, h.Examples.Value[k].Value.Hash()))\n\t\t}\n\t}\n\tif len(h.Content.Value) > 0 {\n\t\tfor k := range h.Content.Value {\n\t\t\tf = append(f, fmt.Sprintf(\"%s-%x\", k.Value, h.Content.Value[k].Value.Hash()))\n\t\t}\n\t}\n\tkeys := make([]string, len(h.Extensions))\n\tz := 0\n\tfor k := range h.Extensions {\n\t\tkeys[z] = fmt.Sprintf(\"%s-%x\", k.Value, sha256.Sum256([]byte(fmt.Sprint(h.Extensions[k].Value))))\n\t\tz++\n\t}\n\tsort.Strings(keys)\n\tf = append(f, keys...)\n\treturn sha256.Sum256([]byte(strings.Join(f, \"|\")))\n}",
"func (ctx *Context) h(left, right, pubSeed []byte, addr address) []byte {\n\tret := make([]byte, ctx.p.N)\n\tctx.hInto(ctx.newScratchPad(), left, right,\n\t\tctx.precomputeHashes(pubSeed, nil), addr, ret)\n\treturn ret\n}",
"func Expm1(x float64) float64 {\n\n\treturn math.Exp(x) - 1\n}",
"func HLine(img *image.RGBA, x1, y, x2 int, col color.Color) {\n\tfor ; x1 <= x2; x1++ {\n\t\timg.Set(x1, y, col)\n\t}\n}",
"func ( self * Akima_curve )\tPrev_curve ( data_points * [][] float64 )\t\t( prev * Akima_curve )\t{\n\n\tvar points_len\t= uint ( len ( * data_points ) )\n//\tCare : Uint 0 -1 ~ undefined\n\tif\tself.Index_x1 == 0\t{\treturn\tnil\t}\n\n\tprev\t= new ( Akima_curve )\n\tprev.Index_x1\t= self.Index_x1 -1\n\n\tprev.X1\t= ( * data_points ) [ prev.Index_x1 ][ 0 ]\n\tprev.T1\t= slope_five_point ( data_points, points_len, prev.Index_x1 )\n\tprev.X2, prev.T2\t= self.X1 ,\tself.T1\n\n\tprev.set_coefficients ( ( * data_points ) [ prev.Index_x1 ][ 1 ] , ( * data_points ) [ self.Index_x1 ][ 1 ] ) ;\treturn\n}",
"func P384() Curve { return p384 }",
"func h(data string) string {\n\tdigest := md5.New()\n\tdigest.Write([]byte(data))\n\treturn fmt.Sprintf(\"%x\", digest.Sum(nil))\n}",
"func (b *Builder) AltH1(header string) *Builder {\n\theader = reCRLN.ReplaceAllString(header, \" \")\n\theader = strings.TrimSpace(header)\n\treturn b.writeln(header).writeln(strings.Repeat(\"=\", charLen(header))).nl()\n}",
"func Hash1(usr string) string {\n\tbytes := []byte(usr)\n\thash_bytes := hasher1.Sum(bytes)\n\treturn hex.EncodeToString(hash_bytes)\n}",
"func Hann(x float64, n int) float64 {\n\treturn 0.5 - 0.5*math.Cos(2*x*math.Pi/float64(n))\n}",
"func Hsl(h, s, l float64) Color {\r\n if s == 0 {\r\n return Color{l, l, l}\r\n }\r\n\r\n var r, g, b float64\r\n var t1 float64\r\n var t2 float64\r\n var tr float64\r\n var tg float64\r\n var tb float64\r\n\r\n if l < 0.5 {\r\n t1 = l * (1.0 + s)\r\n } else {\r\n t1 = l + s - l*s\r\n }\r\n\r\n t2 = 2*l - t1\r\n h = h / 360\r\n tr = h + 1.0/3.0\r\n tg = h\r\n tb = h - 1.0/3.0\r\n\r\n if tr < 0 {\r\n tr += 1\r\n }\r\n if tr > 1 {\r\n tr -= 1\r\n }\r\n if tg < 0 {\r\n tg += 1\r\n }\r\n if tg > 1 {\r\n tg -= 1\r\n }\r\n if tb < 0 {\r\n tb += 1\r\n }\r\n if tb > 1 {\r\n tb -= 1\r\n }\r\n\r\n // Red\r\n if 6*tr < 1 {\r\n r = t2 + (t1-t2)*6*tr\r\n } else if 2*tr < 1 {\r\n r = t1\r\n } else if 3*tr < 2 {\r\n r = t2 + (t1-t2)*(2.0/3.0-tr)*6\r\n } else {\r\n r = t2\r\n }\r\n\r\n // Green\r\n if 6*tg < 1 {\r\n g = t2 + (t1-t2)*6*tg\r\n } else if 2*tg < 1 {\r\n g = t1\r\n } else if 3*tg < 2 {\r\n g = t2 + (t1-t2)*(2.0/3.0-tg)*6\r\n } else {\r\n g = t2\r\n }\r\n\r\n // Blue\r\n if 6*tb < 1 {\r\n b = t2 + (t1-t2)*6*tb\r\n } else if 2*tb < 1 {\r\n b = t1\r\n } else if 3*tb < 2 {\r\n b = t2 + (t1-t2)*(2.0/3.0-tb)*6\r\n } else {\r\n b = t2\r\n }\r\n\r\n return Color{r, g, b}\r\n}",
"func hubbleTime(H0 float64) (timeGyr float64) {\n\thubbleTime := (1 / H0) // 1/(km/s/Mpc) = Mpc s / km\n\thubbleTime *= kmInAMpc // s\n\thubbleTime /= secInAGyr // Gyr\n\n\treturn hubbleTime\n}",
"func (o Orbit) HNorm() float64 {\n\treturn o.RNorm() * o.VNorm() * o.CosΦfpa()\n}",
"func GenerateOneTimePK(pkP publicKeyPair, hasher hash.Hash, Curve elliptic.Curve) (Px, Py, Rx, Ry *big.Int) {\n\n\tif Curve == nil {\n\t\tCurve = defaultCurve\n\t}\n\n\tif hasher == nil {\n\t\thasher = defaultHasher\n\t} else if hasher.Size() != 32 {\n\t\tpanic(\"only hashes with outputsize of 32 bytes allowed!\", )\n\t}\n\n\thasher.Reset()\n\tr := RandFieldElement(Curve)\n\t// X1,y1 = Hs(rA)G\n\tPx, Py = Curve.ScalarMult(pkP.Ax, pkP.Ay, r.Bytes())\n\tre := hasher.Sum(append(Px.Bytes()[:], Py.Bytes()[:]...))\n\tra := new(big.Int).SetBytes(re[:])\n\tra.Mod(ra, Curve.Params().N)\n\tPx, Py = Curve.ScalarBaseMult(ra.Bytes())\n\t//+BG\n\tPx, Py = Curve.Add(Px, Py, pkP.Bx, pkP.By)\n\tRx, Ry = Curve.ScalarBaseMult(r.Bytes())\n\treturn\n}",
"func hammingDistance(x int, y int) int {\n\n}",
"func (this *NurbsCurve) Point(u float64) vec3.T {\n\thomoPt := this.nonRationalPoint(u)\n\treturn homoPt.Dehomogenized()\n}",
"func (e *Edge) P1() int {\n\treturn e.p1\n}",
"func HmacSha1ToHex(message []byte, secret string) string {\n\th := hmac.New(sha1.New, []byte(secret))\n\th.Write(message)\n\treturn hex.EncodeToString(h.Sum(nil))\n}",
"func (b *Builder) H1(text string) *Builder {\n\treturn b.writeln(header(text, 1)).nl()\n}",
"func (t Tuple1) Hash() uint32 {\n\tif t.E1 == nil {\n\t\treturn 0\n\t}\n\treturn t.E1.Hash()\n}",
"func (ch *ColorMoment) Calculate(img image.Image) hashtype.Float64 {\n\tr := imgproc.Resize(ch.width, ch.height, img, ch.interp)\n\tb := imgproc.GaussianBlur(r, ch.kernel, ch.sigma)\n\tyrb, _ := imgproc.YCrCb(b)\n\thsv, _ := imgproc.HSV(b)\n\tyrbMom := imgproc.GetMoments(yrb)\n\t// Switch R and B channels\n\tyrbMom[0], yrbMom[2] = yrbMom[2], yrbMom[0]\n\thsvMom := imgproc.GetMoments(hsv)\n\t// Switch R and B channels\n\thsvMom[0], hsvMom[2] = hsvMom[2], hsvMom[0]\n\tyHuMom := imgproc.HuMoments(yrbMom)\n\thHuMom := imgproc.HuMoments(hsvMom)\n\thash := make(hashtype.Float64, len(hHuMom)+len(yHuMom))\n\tvar i int\n\tfor i = 0; i < len(hHuMom); i++ {\n\t\thash[i] = hHuMom[i]\n\t}\n\tfor ; i < len(hHuMom)+len(yHuMom); i++ {\n\t\thash[i] = yHuMom[i-len(hHuMom)]\n\t}\n\treturn hash\n}",
"func (in *Instance) hash(x, y, mu *big.Int, T uint64) *big.Int {\n\tb := sha512.New()\n\tb.Write(x.Bytes())\n\tb.Write(y.Bytes())\n\tb.Write(mu.Bytes())\n\tbits := make([]byte, 8)\n\tbinary.LittleEndian.PutUint64(bits, T)\n\tb.Write(bits)\n\tres := new(big.Int).SetBytes(b.Sum(nil))\n\tres.Mod(res, in.rsaModulus)\n\treturn res\n}",
"func ScalarMultH(scalar *Key) (result *Key) {\n\th := new(ExtendedGroupElement)\n\th.FromBytes(&H)\n\tresultPoint := new(ProjectiveGroupElement)\n\tGeScalarMult(resultPoint, scalar, h)\n\tresult = new(Key)\n\tresultPoint.ToBytes(result)\n\treturn\n}",
"func SToH(utfString string) string {\n\treturn hex.EncodeToString([]byte(utfString))\n}",
"func HADDPD(mx, x operand.Op) { ctx.HADDPD(mx, x) }",
"func hash(key string) int{\n\tvar num = 0\n\t// get the lenght of the key\n\tvar length = len(key)\n\n\t// add the ascii character value to creat a sum \n\tfor i := 0; i < length; i++{\n\n\t\tnum += int(key[i])\n\t}\n\t\n\t// square in the middle hash method\n\tvar avg = num * int((math.Pow(5.0, 0.5) - 1)) / 2\n\tvar numeric = avg - int(math.Floor(float64(avg)))\n\n\n\t// hash value to place into the table slice between -1 and CAPACITY - 1\n\treturn int(math.Floor(float64(numeric * CAPACITY)))\n}",
"func computeHPrime(H []*Point, y *big.Int) []*Point {\n\t// y^{-n} == (y^{-1})^n\n\tyExpNegN := powers(Inv(y), len(H))\n\n\t// H' is a set of generators derived from H. (64)\n\tHprime := make([]*Point, len(H))\n\tfor i := range H {\n\t\tHprime[i] = ScalarMulPoint(H[i], yExpNegN[i])\n\t}\n\n\treturn Hprime\n}",
"func (s *CountMinSketch) baseHashes(key []byte) [2]uint32 {\n\ts.h.Reset()\n\ts.h.Write(key)\n\tsum := s.h.Sum(nil)\n\treturn [2]uint32{binary.BigEndian.Uint32(sum[0:4]), binary.BigEndian.Uint32(sum[4:8])}\n}",
"func NewH1(text string) Element {\n\treturn NewHeader(1, text)\n}",
"func HashFileSha1(hs string) []byte {\n\tfile, err := os.Open(hs)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tdefer func() {\n\t\terr := file.Close()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tdata := sha1.New()\n\tif _, err := io.Copy(data, file); err != nil {\n\t\tpanic(err)\n\t}\n\n\thash := data.Sum(nil)\n\treturn hash[:]\n}",
"func HADDPS(mx, x operand.Op) { ctx.HADDPS(mx, x) }",
"func (self *State)Atanh(a any)any{\n self.IncOperations(self.coeff[\"atanh\"]+self.off[\"atanh\"])\n return wrap1(a,math.Atanh)\n}",
"func hash(m datasource.Metric) uint64 {\n\thash := fnv.New64a()\n\tlabels := m.Labels\n\tsort.Slice(labels, func(i, j int) bool {\n\t\treturn labels[i].Name < labels[j].Name\n\t})\n\tfor _, l := range labels {\n\t\t// drop __name__ to be consistent with Prometheus alerting\n\t\tif l.Name == \"__name__\" {\n\t\t\tcontinue\n\t\t}\n\t\thash.Write([]byte(l.Name))\n\t\thash.Write([]byte(l.Value))\n\t\thash.Write([]byte(\"\\xff\"))\n\t}\n\treturn hash.Sum64()\n}",
"func pkcs1v15HashInfo(hash crypto.Hash, inLen int) (hashLen int, prefix []byte, err error) {\n\t// Special case: crypto.Hash(0) is used to indicate that the data is\n\t// signed directly.\n\tif hash == 0 {\n\t\treturn inLen, nil, nil\n\t}\n\n\thashLen = hash.Size()\n\tif inLen != hashLen {\n\t\treturn 0, nil, errors.New(\"input must be hashed message\")\n\t}\n\n\t// CHANGE: Using our private prefix table as it contains the new SHA3 algorithm IDs\n\tprefix, ok := pkcs1Prefix[hash]\n\tif !ok {\n\t\treturn 0, nil, errors.New(\"unsupported hash function\")\n\t}\n\treturn\n}",
"func (curve *EdCurve) ToMontgomeryPointForm1(sqrtB *big.Int, p *EcPoint) (p1, p2 *EcPoint) {\n\toneSubY := new(big.Int).Sub(ONE, p.Y) // 1-y\n\toneAddY := new(big.Int).Add(ONE, p.Y) // 1+y\n\tp1, p2 = NewPoint(), NewPoint()\n\tp1.X = ModFraction(oneAddY, oneSubY, curve.P) // (1+y)/(1-y)\n\tp1.Y = ModFraction(p1.X, p.X, curve.P) // u/x\n\tp1.Y.Mul(p1.Y, sqrtB) // sqrtB * u/x\n\tp1.Y.Mod(p1.Y, curve.P)\n\n\tp2.X = ModFraction(oneSubY, oneAddY, curve.P) // (1-y)/(1+y)\n\tp2.Y = ModFraction(p2.X, p.X, curve.P) // u/x\n\tp2.Y.Mul(p2.Y, sqrtB) // sqrtB * u/x\n\tp2.Y.Mod(p2.Y, curve.P)\n\treturn\n}",
"func (c *core) HashPoints(points ...*point) *big.Int {\n\thasher := c.getHasher()\n\thasher.Write([]byte{c.SuiteString, 0x2})\n\tfor _, pt := range points {\n\t\thasher.Write(c.Marshal(pt))\n\t}\n\treturn bits2int(hasher.Sum(nil), c.N()*8)\n}",
"func HashPoint(x, xmin, xdel []float64, tol float64) int {\n\tif tol < 1e-15 {\n\t\tchk.Panic(\"HashPoint: minimum tolerance must be 1e-15. %v is invalid\", tol)\n\t}\n\tcoefs := []float64{11, 101, 1001}\n\tn := utl.Imin(len(x), 3)\n\tvar hash, xbar float64\n\tfor i := 0; i < n; i++ {\n\t\tif x[i] < xmin[i] {\n\t\t\tchk.Panic(\"HashPoint: coordinate is outside range: %v < %v\", x[i], xmin[i])\n\t\t}\n\t\tif x[i] > xmin[i]+xdel[i] {\n\t\t\tchk.Panic(\"HashPoint: coordinate is outside range: %v > %v\", x[i], xmin[i]+xdel[i])\n\t\t}\n\t\tif xdel[i] > 0 {\n\t\t\txbar = (x[i] - xmin[i]) / xdel[i]\n\t\t\tif xbar < 0 {\n\t\t\t\txbar = 0\n\t\t\t}\n\t\t\tif xbar > 1 {\n\t\t\t\txbar = 1\n\t\t\t}\n\t\t\thash += (xbar / tol) * coefs[i]\n\t\t}\n\t}\n\treturn int(hash)\n}",
"func (t *openAddressing) hash(key string, round int) uint32 {\n\tnum := uint(stringToInt(key))\n\tmax := uint(len(t.values) - 1)\n\treturn uint32((hashDivision(num, max) + uint(round)*hashDivision2(num, max)) % max)\n}",
"func memhash(p unsafe.Pointer, h, s uintptr) uintptr",
"func memhash(p unsafe.Pointer, h, s uintptr) uintptr",
"func GenerateOneTime_VOTE(key *edwards.PublicKey, hasher hash.Hash, Curve elliptic.Curve) (P, R edwards.PublicKey) {\n\n\tif Curve == nil {\n\t\tCurve = defaultCurve\n\t}\n\n\tif hasher == nil {\n\t\thasher = defaultHasher\n\t} else if hasher.Size() != 32 {\n\t\tpanic(\"only hashes with outputsize of 32 bytes allowed!\", )\n\t}\n\n\thasher.Reset()\n\tr := RandFieldElement(Curve)\n\t// X1,y1 = Hs(rA)G\n\tPx, Py := Curve.ScalarMult(key.X, key.Y, r.Bytes())\n\tre := hasher.Sum(append(Px.Bytes()[:], Py.Bytes()[:]...))\n\tra := new(big.Int).SetBytes(re[:])\n\tra.Mod(ra, Curve.Params().N)\n\tPx, Py = Curve.ScalarBaseMult(ra.Bytes())\n\tP = edwards.PublicKey{X: Px, Y: Py}\n\tRx, Ry := Curve.ScalarBaseMult(r.Bytes())\n\tR = edwards.PublicKey{X: Rx, Y: Ry}\n\treturn\n}",
"func MOVHPS(mx, mx1 operand.Op) { ctx.MOVHPS(mx, mx1) }",
"func (h *Histogram) copyHDataFrom(src *Histogram) {\n\tif h.Divider == src.Divider && h.Offset == src.Offset {\n\t\tfor i := 0; i < len(h.Hdata); i++ {\n\t\t\th.Hdata[i] += src.Hdata[i]\n\t\t}\n\t\treturn\n\t}\n\n\thData := src.Export()\n\tfor _, data := range hData.Data {\n\t\th.record((data.Start+data.End)/2, int(data.Count))\n\t}\n}"
] | [
"0.670059",
"0.6191052",
"0.60214895",
"0.59542876",
"0.58978754",
"0.585371",
"0.5751907",
"0.5730881",
"0.57265484",
"0.56613463",
"0.5578008",
"0.5534485",
"0.54807895",
"0.54439586",
"0.5409997",
"0.5383044",
"0.5313983",
"0.53073287",
"0.5304563",
"0.5226101",
"0.5205866",
"0.5202658",
"0.5182499",
"0.517529",
"0.5146467",
"0.51308787",
"0.5126647",
"0.5089565",
"0.5081696",
"0.5041892",
"0.50393045",
"0.50325924",
"0.50063884",
"0.49845472",
"0.49734113",
"0.49688593",
"0.4958525",
"0.49168682",
"0.4908639",
"0.4908639",
"0.49001205",
"0.4884286",
"0.48840994",
"0.48796174",
"0.48760396",
"0.48738897",
"0.48650077",
"0.48610687",
"0.48581108",
"0.4843927",
"0.4839314",
"0.48245984",
"0.48083338",
"0.48082006",
"0.48078644",
"0.48046607",
"0.4792597",
"0.47901624",
"0.47864193",
"0.47651386",
"0.47533637",
"0.47529027",
"0.47499633",
"0.47395414",
"0.4736165",
"0.4727827",
"0.472598",
"0.47247043",
"0.47073066",
"0.4706501",
"0.46929687",
"0.46856675",
"0.4679661",
"0.4671708",
"0.4663699",
"0.4660283",
"0.46593162",
"0.46568796",
"0.46523973",
"0.4645791",
"0.46416643",
"0.46389243",
"0.46333185",
"0.46248087",
"0.46216178",
"0.46184957",
"0.46149132",
"0.45988256",
"0.45957255",
"0.45902634",
"0.4588229",
"0.45821214",
"0.4580715",
"0.45785505",
"0.45765594",
"0.45711353",
"0.45711353",
"0.45600137",
"0.4553724",
"0.454745"
] | 0.7891151 | 0 |
H2 hashes to an integer [1,N1] | func H2(m []byte) *big.Int {
// NIST SP 800-90A § A.5.1: Simple discard method.
byteLen := (curve.BitSize + 7) >> 3
h := sha512.New()
for i := uint32(0); ; i++ {
// TODO: Use a NIST specified DRBG.
h.Reset()
if err := binary.Write(h, binary.BigEndian, i); err != nil {
panic(err)
}
if _, err := h.Write(m); err != nil {
panic(err)
}
b := h.Sum(nil)
k := new(big.Int).SetBytes(b[:byteLen])
if k.Cmp(new(big.Int).Sub(curve.N, one)) == -1 {
return k.Add(k, one)
}
}
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func hashToInt(hash []byte) *big.Int {\n\torderBits := S256().Params().N.BitLen()\n\torderBytes := (orderBits + 7) / 8\n\tif len(hash) > orderBytes {\n\t\thash = hash[:orderBytes]\n\t}\n\n\tret := new(big.Int).SetBytes(hash)\n\texcess := len(hash)*8 - orderBits\n\tif excess > 0 {\n\t\tret.Rsh(ret, uint(excess))\n\t}\n\treturn ret\n}",
"func hashToInt(hash []byte, c elliptic.Curve) *big.Int {\n\torderBits := c.Params().N.BitLen()\n\torderBytes := (orderBits + 7) / 8\n\tif len(hash) > orderBytes {\n\t\thash = hash[:orderBytes]\n\t}\n\n\tret := new(big.Int).SetBytes(hash)\n\texcess := len(hash)*8 - orderBits\n\tif excess > 0 {\n\t\tret.Rsh(ret, uint(excess))\n\t}\n\treturn ret\n}",
"func H2(m []byte) *big.Int {\n\t// NIST SP 800-90A § A.5.1: Simple discard method.\n\tbyteLen := (params.BitSize + 7) >> 3\n\th := sha512.New()\n\tfor i := uint32(0); ; i++ {\n\t\t// TODO: Use a NIST specified DRBG.\n\t\th.Reset()\n\t\tbinary.Write(h, binary.BigEndian, i)\n\t\th.Write(m)\n\t\tb := h.Sum(nil)\n\t\tk := new(big.Int).SetBytes(b[:byteLen])\n\t\tif k.Cmp(new(big.Int).Sub(params.N, one)) == -1 {\n\t\t\treturn k.Add(k, one)\n\t\t}\n\t}\n}",
"func hash(key string) int{\n\tvar num = 0\n\t// get the lenght of the key\n\tvar length = len(key)\n\n\t// add the ascii character value to creat a sum \n\tfor i := 0; i < length; i++{\n\n\t\tnum += int(key[i])\n\t}\n\t\n\t// square in the middle hash method\n\tvar avg = num * int((math.Pow(5.0, 0.5) - 1)) / 2\n\tvar numeric = avg - int(math.Floor(float64(avg)))\n\n\n\t// hash value to place into the table slice between -1 and CAPACITY - 1\n\treturn int(math.Floor(float64(numeric * CAPACITY)))\n}",
"func hashToBig(hash [32]byte) *big.Int {\n\t// A Hash is in little-endian, but the big package wants the bytes in\n\t// big-endian, so reverse them.\n\tfor i := 0; i < 32/2; i++ {\n\t\thash[i], hash[32-1-i] = hash[32-1-i], hash[i]\n\t}\n\n\treturn new(big.Int).SetBytes(hash[:])\n}",
"func (s *ShardMap) hash(v interface{}) int {\n\tswitch s.Type {\n\tcase \"string\":\n\t\tval, ok := v.(string)\n\t\tif !ok {\n\t\t\treturn -1\n\t\t}\n\n\t\thash := fnv.New32()\n\t\thash.Write([]byte(val))\n\t\treturn int(hash.Sum32() % NumShards)\n\tcase \"int32\":\n\t\t// Values that come as numbers in JSON are of type float64.\n\t\tval, ok := v.(float64)\n\t\tif !ok {\n\t\t\treturn -1\n\t\t}\n\n\t\treturn int(int32(val) % NumShards)\n\tdefault:\n\t\treturn -1\n\t}\n}",
"func (b *BloomFilter) hash2(value []byte) uint32 {\n\tf := crc32.NewIEEE()\n\tf.Write(value)\n\thash := f.Sum32()\n\treturn hash\n}",
"func (h Hash20) Big() *big.Int { return new(big.Int).SetBytes(h[:]) }",
"func hash_func(x, y, n HashValue) (HashValue) {\n return (x*1640531513 ^ y*2654435789) % n\n}",
"func (in *Instance) hash(x, y, mu *big.Int, T uint64) *big.Int {\n\tb := sha512.New()\n\tb.Write(x.Bytes())\n\tb.Write(y.Bytes())\n\tb.Write(mu.Bytes())\n\tbits := make([]byte, 8)\n\tbinary.LittleEndian.PutUint64(bits, T)\n\tb.Write(bits)\n\tres := new(big.Int).SetBytes(b.Sum(nil))\n\tres.Mod(res, in.rsaModulus)\n\treturn res\n}",
"func IntHashSha256(input []byte) *big.Int {\n\th := sha256.New()\n\th.Write(input)\n\treturn new(big.Int).SetBytes(h.Sum(nil))\n}",
"func hashint(a ...[]byte) *big.Int {\n\ti := big.NewInt(0)\n\tb := hashbyte(a...)\n\ti.SetBytes(b)\n\treturn i\n}",
"func (h Hash) Big() *big.Int { return new(big.Int).SetBytes(h[:]) }",
"func (_Ethdkg *EthdkgCaller) HashToG1(opts *bind.CallOpts, message []byte) ([2]*big.Int, error) {\n\tvar (\n\t\tret0 = new([2]*big.Int)\n\t)\n\tout := ret0\n\terr := _Ethdkg.contract.Call(opts, out, \"HashToG1\", message)\n\treturn *ret0, err\n}",
"func (s *SRP) hashint(a ...[]byte) *big.Int {\n\ti := big.NewInt(0)\n\tb := s.hashbyte(a...)\n\ti.SetBytes(b)\n\treturn i\n}",
"func Hash(value int64) uint64 {\n\treturn FNVHash64(uint64(value))\n}",
"func (t *openAddressing) hash(key string, round int) uint32 {\n\tnum := uint(stringToInt(key))\n\tmax := uint(len(t.values) - 1)\n\treturn uint32((hashDivision(num, max) + uint(round)*hashDivision2(num, max)) % max)\n}",
"func hash(addr mino.Address) *big.Int {\n\tsha := sha256.New()\n\tmarshalled, err := addr.MarshalText()\n\tif err != nil {\n\t\tmarshalled = []byte(addr.String())\n\t}\n\t// A hack to accommodate for minogrpc's design:\n\t// 1) the first byte is used to indicate if a node is orchestrator or not\n\t// 2) the only way to reach the orchestrator is to route a message to nil\n\t// from its server side, which has the same address but orchestrator byte\n\t// set to f.\n\t// We therefore have to ignore if a node is the orchestrator to be able to\n\t// route the message first to its server side, then from the server side to\n\t// the client side.\n\tsha.Write(marshalled[1:])\n\treturn byteArrayToBigInt(sha.Sum(nil))\n}",
"func IntHashInfo() {\n\tvar sp map[uint64]uint64 = make(map[uint64]uint64)\n\tprintMemStats()\n\ts := fnv.New64a()\n\tfor i := 0; i < 100000; i++ {\n\t\ts.Reset()\n\t\ts.Write([]byte(strconv.Itoa(i)))\n\t\ta := hex.EncodeToString(s.Sum(nil))\n\n\t\tas, error := strconv.ParseUint(a, 16, 64)\n\t\tif error != nil {\n\t\t\treturn\n\t\t}\n\t\tsp[as] = as\n\t\t//fmt.Println(as)\n\n\t}\n\tprintMemStats()\n\tfmt.Println(unsafe.Sizeof(sp))\n\tfmt.Println(len(sp))\n}",
"func hash(k Key) int {\n\tkey := fmt.Sprintf(\"%s\", k)\n\th := 0\n\tfor i := 0; i < len(key); i++ {\n\t\th = 31 * h + int(key[i])\n\t}\n\treturn h\n}",
"func (n Number) Hash() int {\n\tf, err := json.Number(n).Float64()\n\tif err != nil {\n\t\tbs := []byte(n)\n\t\th := xxhash.Checksum64(bs)\n\t\treturn int(h)\n\t}\n\treturn int(f)\n}",
"func GetHashNumber(a *big.Int, b *big.Int, index int, bitlen uint) *big.Int {\n\ttmp := []*big.Int{}\n\tif a != nil {\n\t\ttmp = append(tmp, a)\n\t}\n\tif b != nil {\n\t\ttmp = append(tmp, b)\n\t}\n\ttmp = append(tmp, big.NewInt(int64(index)))\n\tcountIdx := len(tmp)\n\ttmp = append(tmp, big.NewInt(0))\n\n\tk := uint(0)\n\tres := big.NewInt(0)\n\tfor k < bitlen {\n\t\tcur := HashCommit(tmp, false)\n\t\tcur.Lsh(cur, uint(k))\n\t\tres.Add(res, cur)\n\t\tk += 256\n\t\ttmp[countIdx].Add(tmp[countIdx], big.NewInt(1))\n\t}\n\n\treturn res\n}",
"func (o *ObjectIndex) Hash() uint32 {\n\tvar h uint32 = 17\n\n\tvar str string\n\tstr += fmt.Sprintf(\"%08x\", o.machine)\n\tstr += fmt.Sprintf(\"%04x\", o.pid)\n\tstr += fmt.Sprintf(\"%08x\", o.id)\n\tstr += fmt.Sprintf(\"%08x\", o.Rand)\n\tfor _, v := range str {\n\t\th += h*23 + uint32(v)\n\t}\n\treturn h\n}",
"func Hash(mem []byte) uint64 {\n\tvar hash uint64 = 5381\n\tfor _, b := range mem {\n\t\thash = (hash << 5) + hash + uint64(b)\n\t}\n\treturn hash\n}",
"func getHashedValue(iStr string, prefix int64, blockId int) *big.Int {\n\t// the first block, no prefix\n\tif blockId == 0 {\n\t\tiStrBytes := []byte(iStr) // convert string to byte\n\t\thmac_ins := hmac.New(sha256.New, k) // create an HMAC instance by key k\n\t\thmac_ins.Write(iStrBytes[:]) // generate the HMAC data for iStr\n\t\thashed := hmac_ins.Sum(nil)\n\t\thashedValue := new(big.Int).SetBytes(hashed[:]) // convert bytes to big.Int\n\t\treturn hashedValue\n\t} else { // include the prefix\n\t\t// hash the prefix by SHA256\n\t\tprefixBytes := []byte(strconv.FormatInt(prefix, 10))\n\t\thashedPrefix := sha256.Sum256(prefixBytes[:])\n\t\tiStrByte := []byte(iStr) // convert string to byte\n\n\t\t// combine hashedPrefix and iStrByte to finalBytes\n\t\tvar buffer bytes.Buffer\n\t\tbuffer.Write(hashedPrefix[:])\n\t\tbuffer.Write(iStrByte[:])\n\t\tfinalBytes := buffer.Bytes()\n\n\t\t// generate HMAC data for finalBytes\n\t\thmac_ins := hmac.New(sha256.New, k) // create an HMAC instance by key k\n\t\thmac_ins.Write(finalBytes[:]) // generate HMAC data for finalBytes\n\t\thashed := hmac_ins.Sum(nil)\n\t\thashedValue := new(big.Int).SetBytes(hashed[:]) // convert bytes to big.Int\n\t\treturn hashedValue\n\t}\n}",
"func hash(key string) int {\n\tsum := 0\n\tfor _, v := range key {\n\t\tsum += int(v)\n\t}\n\n\treturn sum\n}",
"func d2h(val uint64) (result *Key) {\n\tresult = new(Key)\n\tfor i := 0; val > 0; i++ {\n\t\tresult[i] = byte(val & 0xFF)\n\t\tval /= 256\n\t}\n\treturn\n}",
"func incHash(h common.Hash) common.Hash {\n\tvar a uint256.Int\n\ta.SetBytes32(h[:])\n\ta.AddUint64(&a, 1)\n\treturn common.Hash(a.Bytes32())\n}",
"func (_Ethdkg *EthdkgSession) HashToG1(message []byte) ([2]*big.Int, error) {\n\treturn _Ethdkg.Contract.HashToG1(&_Ethdkg.CallOpts, message)\n}",
"func hash(data []byte) uint32 {\n\tvar h uint32 = binary.LittleEndian.Uint32(data) * kDictHashMul32\n\n\t/* The higher bits contain more mixture from the multiplication,\n\t so we take our results from there. */\n\treturn h >> uint(32-kDictNumBits)\n}",
"func HashCommit(values []*big.Int, issig bool) *big.Int {\n\t// The first element is the number of elements\n\tvar tmp []interface{}\n\toffset := 0\n\tif issig {\n\t\ttmp = make([]interface{}, len(values)+2)\n\t\ttmp[0] = true\n\t\toffset++\n\t} else {\n\t\ttmp = make([]interface{}, len(values)+1)\n\t}\n\ttmp[offset] = gobig.NewInt(int64(len(values)))\n\toffset++\n\tfor i, v := range values {\n\t\ttmp[i+offset] = v.Go()\n\t}\n\tr, err := asn1.Marshal(tmp)\n\tif err != nil {\n\t\tpanic(err) // Marshal should never error, so panic if it does\n\t}\n\n\tsha := sha256.Sum256(r)\n\treturn new(big.Int).SetBytes(sha[:])\n}",
"func getHashFromEntry(data []byte) uint64 {\n\treturn binary.LittleEndian.Uint64(data[timestampSizeInBytes:])\n}",
"func (_L1Block *L1BlockCaller) Hash(opts *bind.CallOpts) ([32]byte, error) {\n\tvar out []interface{}\n\terr := _L1Block.contract.Call(opts, &out, \"hash\")\n\n\tif err != nil {\n\t\treturn *new([32]byte), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new([32]byte)).(*[32]byte)\n\n\treturn out0, err\n\n}",
"func (num Number) Hash() int {\n\tf, err := json.Number(num).Float64()\n\tif err != nil {\n\t\tbs := []byte(num)\n\t\th := xxhash.Checksum64(bs)\n\t\treturn int(h)\n\t}\n\treturn int(f)\n}",
"func (l *Link) ToHash() common.Hash {\n\treturn common.BigToHash((*big.Int)(l))\n}",
"func HashToBig(hash *chainhash.Hash) *big.Int {\n\t// A Hash is in little-endian, but the big package wants the bytes in big-endian, so reverse them.\n\tbuf := *hash\n\tblen := len(buf)\n\tfor i := 0; i < blen/2; i++ {\n\t\tbuf[i], buf[blen-1-i] = buf[blen-1-i], buf[i]\n\t}\n\t// buf := hash.CloneBytes()\n\treturn new(big.Int).SetBytes(buf[:])\n}",
"func computeHash(nstObj megav1.NamespaceTemplate) uint64 {\n\thash, err := hashstructure.Hash(nstObj, nil)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tfmt.Printf(\"computeHash: %d\\n\", hash)\n\treturn hash\n}",
"func encodeSizeHashPairsWrapper(obj *hashPairsWrapper) uint64 {\n\ti0 := uint64(0)\n\n\t// obj.HashPairs\n\ti0 += 4\n\t{\n\t\ti1 := uint64(0)\n\n\t\t// x.Hash\n\t\ti1 += 32\n\n\t\t// x.PrevHash\n\t\ti1 += 32\n\n\t\ti0 += uint64(len(obj.HashPairs)) * i1\n\t}\n\n\treturn i0\n}",
"func hash(x []byte) uint32 {\n\treturn crc32.ChecksumIEEE(x)\n}",
"func hashmapHash(data []byte) uint32 {\n\tvar result uint32 = 2166136261 // FNV offset basis\n\tfor _, c := range data {\n\t\tresult ^= uint32(c)\n\t\tresult *= 16777619 // FNV prime\n\t}\n\treturn result\n}",
"func (_Ethdkg *EthdkgCallerSession) HashToG1(message []byte) ([2]*big.Int, error) {\n\treturn _Ethdkg.Contract.HashToG1(&_Ethdkg.CallOpts, message)\n}",
"func getRelHash(r string) [8]int {\n\th1 := sha1.New()\n\th1.Write([]byte(r))\n\tb1 := h1.Sum(nil)\n\tdata1 := b1[0]\n\tid1 := data1 % 4\n\tidint := int(id1)\n\tvar nodelist [8]int\n\tfor k := 0; k < 8; k++ {\n\t\tnodelist[k] = (k * 4) + idint\n\t}\n\t//fmt.Println(\"Nodelist for given relation\", nodelist)\n\treturn nodelist\n}",
"func HashToBig(hash *chainhash.Hash) *big.Int {\n\t// A Hash is in little-endian, but the big package wants the bytes in\n\t// big-endian, so reverse them.\n\tbuf := *hash\n\tblen := len(buf)\n\tfor i := 0; i < blen/2; i++ {\n\t\tbuf[i], buf[blen-1-i] = buf[blen-1-i], buf[i]\n\t}\n\n\treturn new(big.Int).SetBytes(buf[:])\n}",
"func H1(m []byte) (x, y *big.Int) {\n\th := sha512.New()\n\tvar i uint32\n\tbyteLen := (curve.BitSize + 7) >> 3\n\tfor x == nil && i < 100 {\n\t\t// TODO: Use a NIST specified DRBG.\n\t\th.Reset()\n\t\tif err := binary.Write(h, binary.BigEndian, i); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tif _, err := h.Write(m); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tr := []byte{2} // Set point encoding to \"compressed\", y=0.\n\t\tr = h.Sum(r)\n\t\tx, y = Unmarshal(curve, r[:byteLen+1])\n\t\ti++\n\t}\n\treturn\n}",
"func space_hash(x, y, n uint64) (SpaceMapKey) {\n return SpaceMapKey((x*1640531513 ^ y*2654435789) % n)\n}",
"func BytesToHash(b []byte) Hash32 {\n\tvar h Hash32\n\th.SetBytes(b)\n\treturn h\n}",
"func checkHashSize(hexStr string) int {\n\tmaxint64 := 9223372036854775807\n\tminint64 := -9223372036854775807\n\tint64val := hex2int(hexStr)\n\n\tfor len(hexStr) > 0 && int64val >= maxint64 || int64val <= minint64 {\n\t\thexStr = hexStr[:len(hexStr)-1]\n\t\tint64val = hex2int(hexStr)\n\t}\n\treturn int64val\n}",
"func Hash(s int, o Orientation) (int, error) {\n\n\tvar errVal int = 10\n\n\tif !(s >= 0 && s <= palletWidth*palletLength) {\n\t\treturn errVal, ErrSize\n\t}\n\tif o != HORIZONTAL && o != VERTICAL && o != SQUAREGRID {\n\t\treturn errVal, ErrOrient\n\t}\n\n\tvar hash int\n\n\tswitch s {\n\tcase 1, 2, 3, 6:\n\t\thash = s - 1\n\tcase 4:\n\t\tif o == SQUAREGRID {\n\t\t\thash = s\n\t\t} else {\n\t\t\thash = s - 1\n\t\t}\n\tcase 8:\n\t\thash = 6\n\tcase 9:\n\t\thash = 7\n\tcase 12:\n\t\thash = 8\n\tcase 16:\n\t\thash = 9\n\tdefault:\n\t\treturn errVal, ErrSize\n\t}\n\n\treturn hash, nil\n}",
"func encodeHash(x uint64, p, pPrime uint) (hashCode uint64) {\n\tif x&onesFromTo(64-pPrime, 63-p) == 0 {\n\t\tr := rho(extractShift(x, 0, 63-pPrime))\n\t\treturn concat([]concatInput{\n\t\t\t{x, 64 - pPrime, 63},\n\t\t\t{uint64(r), 0, 5},\n\t\t\t{1, 0, 0}, // this just adds a 1 bit at the end\n\t\t})\n\t} else {\n\t\treturn concat([]concatInput{\n\t\t\t{x, 64 - pPrime, 63},\n\t\t\t{0, 0, 0}, // this just adds a 0 bit at the end\n\t\t})\n\t}\n}",
"func Hash(length int, key string) int64 {\n\tif key == \"\" {\n\t\treturn 0\n\t}\n\thc := hashCode(key)\n\treturn (hc ^ (hc >> 16)) % int64(length)\n}",
"func hash(key uint64) uint64 {\r\n\tkey ^= key >> 33\r\n\tkey *= 0xff51afd7ed558ccd\r\n\tkey ^= key >> 33\r\n\tkey *= 0xc4ceb9fe1a85ec53\r\n\tkey ^= key >> 33\r\n\treturn key\r\n}",
"func hashFieldNameToNumber(s string) uint8 {\n\th := fnv.New32a()\n\th.Write([]byte(s))\n\treturn uint8(h.Sum32() % 2047)\n}",
"func genHash2(resp io.ReadCloser) (i uint32){\n\n\t//\tCalculate a hash based on the URL'd file contents.\n\t//\tDon't use encode / decode machinery from an existing\n\t//\tpackage (JPEG / PNG / etc.) because we don't reliably\n\t//\tknow the file format AKA image type. We only know\n\t//\tthat we have a file as a starting point - specifically,\n\t//\tthe\n\t//\n\t//\t\tresponse.Body\n\t//\n\t//\tvalue. We can't assume anything about the file type - only\n\t//\tthat we start(ed) with a genuine file.\n\t//\n\t//\tTransform the response.Body value into a []byte slice\n\t//\tin the localBuf variable, and proceed . . .\n\n\tvar buf bytes.Buffer\n\tgz := gzip.NewWriter(&buf)\n\tif _, err := io.Copy(gz, resp); err != nil {\n\t\tfmt.Println(err.Error())\n\t}\n\tgz.Close()\n\n\tvar localBuf = buf.Bytes()\n\th := fnv.New32a()\n\th.Write(localBuf)\n\ti = h.Sum32()\n\treturn i\n}",
"func hash(s string) int {\n\th := fnv.New32a()\n\tif _, err := h.Write([]byte(s)); err != nil {\n\t\tpanic(err) // should never happen\n\t}\n\n\treturn int(h.Sum32() & 0x7FFFFFFF) // mask MSB of uint32 as this will be sign bit\n}",
"func hashInt(s string) uint32 {\n\tb := []byte(s)\n\th := crc32.ChecksumIEEE(b)\n\treturn h\n}",
"func ToHash(s []byte) Hash {\n\tif len(s) == 0 || len(s) > _Hash_maxLen {\n\t\treturn 0\n\t}\n\th := uint32(_Hash_hash0)\n\tfor i := 0; i < len(s); i++ {\n\t\th ^= uint32(s[i])\n\t\th *= 16777619\n\t}\n\tif i := _Hash_table[h&uint32(len(_Hash_table)-1)]; int(i&0xff) == len(s) {\n\t\tt := _Hash_text[i>>8 : i>>8+i&0xff]\n\t\tfor i := 0; i < len(s); i++ {\n\t\t\tif t[i] != s[i] {\n\t\t\t\tgoto NEXT\n\t\t\t}\n\t\t}\n\t\treturn i\n\t}\nNEXT:\n\tif i := _Hash_table[(h>>16)&uint32(len(_Hash_table)-1)]; int(i&0xff) == len(s) {\n\t\tt := _Hash_text[i>>8 : i>>8+i&0xff]\n\t\tfor i := 0; i < len(s); i++ {\n\t\t\tif t[i] != s[i] {\n\t\t\t\treturn 0\n\t\t\t}\n\t\t}\n\t\treturn i\n\t}\n\treturn 0\n}",
"func hash(s string) string {\n\thash := fnv.New32a()\n\thash.Write([]byte(s))\n\tintHash := hash.Sum32()\n\tresult := fmt.Sprintf(\"%08x\", intHash)\n\treturn result\n}",
"func hash8(u uint64, h uint8) uint32 {\n\treturn uint32((u * prime8bytes) >> ((64 - h) & 63))\n}",
"func (s *CountMinSketch) baseHashes(key []byte) [2]uint32 {\n\ts.h.Reset()\n\ts.h.Write(key)\n\tsum := s.h.Sum(nil)\n\treturn [2]uint32{binary.BigEndian.Uint32(sum[0:4]), binary.BigEndian.Uint32(sum[4:8])}\n}",
"func hash(key, value string) int64 {\n\thash := siphash.New(sipConst)\n\thash.Write([]byte(key + \":::\" + value))\n\treturn int64(hash.Sum64())\n}",
"func (this *XXHash64) Hash(data []byte) uint64 {\n\tend := len(data)\n\tvar h64 uint64\n\tn := 0\n\n\tif end >= 32 {\n\t\tend32 := end - 32\n\t\tv1 := this.seed + _XXHASH_PRIME64_1 + _XXHASH_PRIME64_2\n\t\tv2 := this.seed + _XXHASH_PRIME64_2\n\t\tv3 := this.seed\n\t\tv4 := this.seed - _XXHASH_PRIME64_1\n\n\t\tfor n <= end32 {\n\t\t\tbuf := data[n : n+32]\n\t\t\tv1 = xxHash64Round(v1, binary.LittleEndian.Uint64(buf[0:8]))\n\t\t\tv2 = xxHash64Round(v2, binary.LittleEndian.Uint64(buf[8:16]))\n\t\t\tv3 = xxHash64Round(v3, binary.LittleEndian.Uint64(buf[16:24]))\n\t\t\tv4 = xxHash64Round(v4, binary.LittleEndian.Uint64(buf[24:32]))\n\t\t\tn += 32\n\t\t}\n\n\t\th64 = ((v1 << 1) | (v1 >> 31)) + ((v2 << 7) | (v2 >> 25)) +\n\t\t\t((v3 << 12) | (v3 >> 20)) + ((v4 << 18) | (v4 >> 14))\n\n\t\th64 = xxHash64MergeRound(h64, v1)\n\t\th64 = xxHash64MergeRound(h64, v2)\n\t\th64 = xxHash64MergeRound(h64, v3)\n\t\th64 = xxHash64MergeRound(h64, v4)\n\t} else {\n\t\th64 = this.seed + _XXHASH_PRIME64_5\n\t}\n\n\th64 += uint64(end)\n\n\tfor n+8 <= end {\n\t\th64 ^= xxHash64Round(0, binary.LittleEndian.Uint64(data[n:n+8]))\n\t\th64 = ((h64<<27)|(h64>>37))*_XXHASH_PRIME64_1 + _XXHASH_PRIME64_4\n\t\tn += 8\n\t}\n\n\tfor n+4 <= end {\n\t\th64 ^= (uint64(binary.LittleEndian.Uint32(data[n:n+4])) * _XXHASH_PRIME64_1)\n\t\th64 = ((h64<<23)|(h64>>41))*_XXHASH_PRIME64_2 + _XXHASH_PRIME64_3\n\t\tn += 4\n\t}\n\n\tfor n < end {\n\t\th64 += (uint64(data[n]) * _XXHASH_PRIME64_5)\n\t\th64 = ((h64 << 11) | (h64 >> 53)) * _XXHASH_PRIME64_1\n\t\tn++\n\t}\n\n\th64 ^= (h64 >> 33)\n\th64 *= _XXHASH_PRIME64_2\n\th64 ^= (h64 >> 29)\n\th64 *= _XXHASH_PRIME64_3\n\treturn h64 ^ (h64 >> 32)\n}",
"func hash(m datasource.Metric) uint64 {\n\thash := fnv.New64a()\n\tlabels := m.Labels\n\tsort.Slice(labels, func(i, j int) bool {\n\t\treturn labels[i].Name < labels[j].Name\n\t})\n\tfor _, l := range labels {\n\t\t// drop __name__ to be consistent with Prometheus alerting\n\t\tif l.Name == \"__name__\" {\n\t\t\tcontinue\n\t\t}\n\t\thash.Write([]byte(l.Name))\n\t\thash.Write([]byte(l.Value))\n\t\thash.Write([]byte(\"\\xff\"))\n\t}\n\treturn hash.Sum64()\n}",
"func Hash(k0, k1 uint64, p []byte) uint64 {\n\tvar d digest\n\td.size = Size\n\td.k0 = k0\n\td.k1 = k1\n\td.Reset()\n\td.Write(p)\n\treturn d.Sum64()\n}",
"func hash(elements ...[32]byte) [32]byte {\n\tvar hash []byte\n\tfor i := range elements {\n\t\thash = append(hash, elements[i][:]...)\n\t}\n\treturn sha256.Sum256(hash)\n}",
"func (t *Target) hash() uint64 {\n\th := fnv.New64a()\n\n\t//nolint: errcheck\n\th.Write([]byte(fmt.Sprintf(\"%016d\", t.labels.Hash())))\n\t//nolint: errcheck\n\th.Write([]byte(t.URL().String()))\n\n\treturn h.Sum64()\n}",
"func Hash(b []byte) uint32 {\n\tconst (\n\t\tseed = 0xbc9f1d34\n\t\tm = 0xc6a4a793\n\t)\n\th := uint32(seed) ^ uint32(len(b))*m\n\tfor ; len(b) >= 4; b = b[4:] {\n\t\th += uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24\n\t\th *= m\n\t\th ^= h >> 16\n\t}\n\tswitch len(b) {\n\tcase 3:\n\t\th += uint32(b[2]) << 16\n\t\tfallthrough\n\tcase 2:\n\t\th += uint32(b[1]) << 8\n\t\tfallthrough\n\tcase 1:\n\t\th += uint32(b[0])\n\t\th *= m\n\t\th ^= h >> 24\n\t}\n\treturn h\n}",
"func hash(data []byte) [32]byte {\n\tvar hash [32]byte\n\n\th := sha256.New()\n\t// The hash interface never returns an error, for that reason\n\t// we are not handling the error below. For reference, it is\n\t// stated here https://golang.org/pkg/hash/#Hash\n\t// #nosec G104\n\th.Write(data)\n\th.Sum(hash[:0])\n\n\treturn hash\n}",
"func getKeyHash(k string) [4]int {\n\th1 := sha1.New()\n\th1.Write([]byte(k))\n\tb1 := h1.Sum(nil)\n\tdata1 := b1[0]\n\tid1 := data1 % 8\n\tid1 = id1 * 4\n\tidint := int(id1)\n\tvar nodelist [4]int\n\tfor k := 0; k < 4; k++ {\n\t\tnodelist[k] = k + idint\n\t}\n\n\t//fmt.Println(\"Nodelist for given key\", nodelist)\n\treturn nodelist\n}",
"func getHash(p Point, precision Accuracy) (uint64, error) {\n\thash, err := hashstructure.Hash(p.truncate(precision), nil)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"unable to get hash point\")\n\t}\n\treturn hash, nil\n}",
"func hashIt(s string, bit int) int {\n\th := sha1.New()\n\th.Write([]byte(s))\n\tbs := h.Sum(nil)\n\thashValue := math.Mod(float64(bs[len(bs)-1]), math.Exp2(float64(bit)))\n\treturn int(hashValue)\n}",
"func Hashes(r Roller, p []byte) []uint64 {\n\tn := r.Len()\n\tif len(p) < n {\n\t\treturn nil\n\t}\n\th := make([]uint64, len(p)-n+1)\n\tfor i := 0; i < n-1; i++ {\n\t\tr.RollByte(p[i])\n\t}\n\tfor i := range h {\n\t\th[i] = r.RollByte(p[i+n-1])\n\t}\n\treturn h\n}",
"func HashASM(k0, k1 uint64, p []byte) uint64",
"func op_BLOCKHASH(pc *uint64, in *interpreter, ctx *callCtx) uint64 {\n\tnum := ctx.stack.Peek()\n\tnum64, overflow := num.Uint64WithOverflow()\n\tif overflow {\n\t\tnum.Clear()\n\t}\n\tvar upper, lower uint64\n\tupper = in.evm.block.NumberU64()\n\tif upper < 257 {\n\t\tlower = 0\n\t} else {\n\t\tlower = upper - 256\n\t}\n\tif num64 >= lower && num64 < upper {\n\t\tnum.SetBytes(in.evm.block.Hash().Bytes())\n\t} else {\n\t\tnum.Clear()\n\t}\n\treturn 0\n}",
"func (t Tuple1) Hash() uint32 {\n\tif t.E1 == nil {\n\t\treturn 0\n\t}\n\treturn t.E1.Hash()\n}",
"func htkey_hash_str(k1 voidptr, len int) usize {\n\tvar k1p byteptr = (byteptr)(k1)\n\tvar hash usize\n\n\thash = 0 + 5381 + len + 1\n\tfor i := 0; i < len; i++ {\n\t\tc := k1p[i]\n\t\thash = ((hash << 5) + hash) ^ usize(c)\n\t}\n\n\treturn hash\n}",
"func (source *Source) Hash() int {\n\tvar hash int\n\n\tif len(source.Prefix) > 0 {\n\t\tfor _, b := range source.Prefix {\n\t\t\thash = int(b*31) + hash\n\t\t}\n\t}\n\n\thash = int(source.PrefixLen*31) + hash\n\thash = int(source.RouterId*31) + hash\n\n\treturn hash\n}",
"func byteshash(p *[]byte, h uintptr) uintptr",
"func Hash(strings ...string) uint32 {\n\tdigester := fnv.New32()\n\tfor _, s := range strings {\n\t\t_, _ = io.WriteString(digester, s)\n\t}\n\treturn digester.Sum32()\n}",
"func HashNumber(leafs Nodes, l Level, n Nodes) int64 {\n\tsum := Nodes(0)\n\tfor i := Level(0); i < l; i++ {\n\t\tsum += LevelWidth(leafs, i)\n\t}\n\treturn int64(sum + n)\n}",
"func (s *CountMinSketch) baseHashes(key []byte) (a uint32, b uint32) {\n\ts.hasher.Reset()\n\ts.hasher.Write(key)\n\tsum := s.hasher.Sum(nil)\n\tupper := sum[0:4]\n\tlower := sum[4:8]\n\ta = binary.BigEndian.Uint32(lower)\n\tb = binary.BigEndian.Uint32(upper)\n\treturn\n}",
"func hash(values ...[]byte) ([]byte, error) {\n\th := swarm.NewHasher()\n\tfor _, v := range values {\n\t\t_, err := h.Write(v)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn h.Sum(nil), nil\n}",
"func hash(value string) uint32 {\n\th := fnv.New32a()\n\th.Write([]byte(value))\n\n\treturn h.Sum32()\n}",
"func Hash(b []byte, seed uint64) uint64",
"func (h KeyImage) Big() *big.Int { return new(big.Int).SetBytes(h[:]) }",
"func (l Integer) HashCode() uint64 {\n\treturn intHash * uint64(l)\n}",
"func hash5(u uint64, h uint8) uint32 {\n\treturn uint32(((u << (64 - 40)) * prime5bytes) >> ((64 - h) & 63))\n}",
"func (h *hasht) hash(input string) uint64 {\n\tvar hash uint64 = FNVOffset\n\tfor _, char := range input {\n\t\thash ^= uint64(char)\n\t\thash *= FNVPrime\n\t}\n\treturn hash\n}",
"func Hex(s string) Integer { return integer{x: bigint.MustHex(s)} }",
"func (ph *PHash) computeHash(img [][]float32) hashtype.Binary {\n\t// TODO: Remove magic numbers\n\thash := make(hashtype.Binary, 8)\n\tvar c uint\n\tfor i := range img {\n\t\tfor j := range img[i] {\n\t\t\tif img[i][j] != 0 {\n\t\t\t\thash.Set(c)\n\t\t\t}\n\t\t\tc++\n\t\t}\n\t}\n\treturn hash\n}",
"func (p *Processor) getHash(x *mat.Dense) int {\n\th := x.T().Mul(p.r.Value())\n\tconcat := mat.ConcatV(h, h.ProdScalar(-1.0))\n\treturn f64utils.ArgMax(concat.Data())\n}",
"func NextHash(h Hash) Hash {\n\tnext := h\n\tfor i := HashLen - 1; i >= 0; i-- {\n\t\tnext[i] += 1\n\t\tif next[i] != 0 {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn next\n}",
"func HashToBig(buf []byte) *big.Int {\n\t// A Hash is in little-endian, but the big package wants the bytes in\n\t// big-endian, so reverse them.\n\tblen := len(buf)\n\tfor i := 0; i < blen/2; i++ {\n\t\tbuf[i], buf[blen-1-i] = buf[blen-1-i], buf[i]\n\t}\n\treturn new(big.Int).SetBytes(buf[:])\n}",
"func (s *SecondStateMachine) GetHash() uint64 {\n\t// the only state we have is that Count variable. that uint64 value pretty much\n\t// represents the state of this IStateMachine\n\treturn s.Count\n}",
"func hasher(s string) []byte {\n\tval := sha256.Sum256([]byte(s))\n\tvar hex []string\n\n\t// Iterate through the bytes.\n\tfor i := 0; i < len(val); i++ {\n\t\t// We want each number to be represented by 2 chars.\n\t\tplaceHolder := []string{\"0\"}\n\t\tvalue := strconv.FormatInt(int64(val[i]), 16)\n\n\t\tif len(value) != 2 {\n\t\t\tplaceHolder = append(placeHolder, value)\n\t\t\thex = append(hex, strings.Join(placeHolder, \"\"))\n\t\t} else {\n\t\t\thex = append(hex, value)\n\t\t}\n\t}\n\treturn []byte(strings.Join(hex, \"\"))\n\n}",
"func ihash(key string) int {\n\th := fnv.New32a()\n\th.Write([]byte(key))\n\treturn int(h.Sum32() & 0x7fffffff) % 10\n}",
"func hash4x64(u uint64, h uint8) uint32 {\n\treturn (uint32(u) * prime4bytes) >> ((32 - h) & 31)\n}",
"func Hash(data []byte) (string, int64) {\n\thasher := adler32.New()\n\tb, e := hasher.Write(data)\n\tif e != nil {\n\t\tlogs.WithFields(logs.Fields{\n\t\t\t\"Error\": e,\n\t\t}).Error(\"Unable to write chunk of data via hasher.Write\", e)\n\t}\n\treturn hex.EncodeToString(hasher.Sum(nil)), int64(b)\n}",
"func Hash(hash string, n int) string {\n\tfmt.Printf(\"Hashing %s%s%s %d times...\\n\", chalk.Magenta, hash, chalk.Reset, n)\n\n\tfor i := 0; i < n; i++ {\n\t\t// SHA256 hash\n\t\thashByte := sha256.Sum256([]byte(hash))\n\t\thash = hex.EncodeToString(hashByte[:])\n\t}\n\n\tfmt.Printf(\"%sHash :: %s%s\\n\", chalk.Blue, chalk.Reset, hash)\n\treturn hash\n}",
"func (t *Table) hash(s string) int {\n\t// Good enough.\n\th := fnv.New32()\n\th.Write([]byte(s))\n\treturn int(h.Sum32()) % t.m\n}",
"func Hash(key []byte) uint64 {\n\treturn murmur3.Sum64(key)\n}"
] | [
"0.6886269",
"0.6631975",
"0.63154674",
"0.6127057",
"0.6072592",
"0.6045243",
"0.6018208",
"0.60083294",
"0.5991375",
"0.59856206",
"0.59772563",
"0.5945282",
"0.5902907",
"0.5884424",
"0.58625615",
"0.585118",
"0.58473116",
"0.58305854",
"0.5804139",
"0.58038306",
"0.57929933",
"0.57920486",
"0.57836473",
"0.57727224",
"0.57579046",
"0.5751352",
"0.5709625",
"0.5697522",
"0.56929696",
"0.56675106",
"0.5653771",
"0.5636962",
"0.5636658",
"0.5633033",
"0.5609694",
"0.56088036",
"0.5605244",
"0.55955267",
"0.5583737",
"0.5572549",
"0.5559417",
"0.5558344",
"0.5553519",
"0.5551056",
"0.5538801",
"0.55340266",
"0.5522692",
"0.5508348",
"0.5507088",
"0.54915994",
"0.54827046",
"0.5479538",
"0.54744834",
"0.5461077",
"0.5450507",
"0.5430539",
"0.5425892",
"0.5422061",
"0.54210037",
"0.5411878",
"0.54105574",
"0.5401859",
"0.5401665",
"0.5394097",
"0.5392664",
"0.53904593",
"0.5390147",
"0.538937",
"0.5378421",
"0.53662264",
"0.53644824",
"0.5362572",
"0.53583974",
"0.535117",
"0.5345055",
"0.5343222",
"0.5342874",
"0.53412753",
"0.5339141",
"0.53353584",
"0.5329328",
"0.5324605",
"0.53229195",
"0.5312707",
"0.5306927",
"0.53022563",
"0.5300072",
"0.52938884",
"0.5293235",
"0.52920055",
"0.5290643",
"0.5290302",
"0.5286863",
"0.5286535",
"0.5282201",
"0.5277892",
"0.5277838",
"0.52754545",
"0.52702826",
"0.5270087"
] | 0.6020945 | 6 |
Evaluate returns the verifiable unpredictable function evaluated at m | func (k PrivateKey) Evaluate(m []byte) (index [32]byte, proof []byte) {
nilIndex := [32]byte{}
// Prover chooses r <-- [1,N-1]
r, _, _, err := elliptic.GenerateKey(curve, rand.Reader)
if err != nil {
return nilIndex, nil
}
ri := new(big.Int).SetBytes(r)
// H = H1(m)
Hx, Hy := H1(m)
// VRF_k(m) = [k]H
sHx, sHy := curve.ScalarMult(Hx, Hy, k.D.Bytes())
vrf := elliptic.Marshal(curve, sHx, sHy) // 65 bytes.
// G is the base point
// s = H2(G, H, [k]G, VRF, [r]G, [r]H)
rGx, rGy := curve.ScalarBaseMult(r)
rHx, rHy := curve.ScalarMult(Hx, Hy, r)
var b bytes.Buffer
if _, err := b.Write(elliptic.Marshal(curve, curve.Gx, curve.Gy)); err != nil {
panic(err)
}
if _, err := b.Write(elliptic.Marshal(curve, Hx, Hy)); err != nil {
panic(err)
}
if _, err := b.Write(elliptic.Marshal(curve, k.PublicKey.X, k.PublicKey.Y)); err != nil {
panic(err)
}
if _, err := b.Write(vrf); err != nil {
panic(err)
}
if _, err := b.Write(elliptic.Marshal(curve, rGx, rGy)); err != nil {
panic(err)
}
if _, err := b.Write(elliptic.Marshal(curve, rHx, rHy)); err != nil {
panic(err)
}
s := H2(b.Bytes())
// t = r−s*k mod N
t := new(big.Int).Sub(ri, new(big.Int).Mul(s, k.D))
t.Mod(t, curve.N)
// Index = H(vrf)
index = sha256.Sum256(vrf)
// Write s, t, and vrf to a proof blob. Also write leading zeros before s and t
// if needed.
var buf bytes.Buffer
if _, err := buf.Write(make([]byte, 32-len(s.Bytes()))); err != nil {
panic(err)
}
if _, err := buf.Write(s.Bytes()); err != nil {
panic(err)
}
if _, err := buf.Write(make([]byte, 32-len(t.Bytes()))); err != nil {
panic(err)
}
if _, err := buf.Write(t.Bytes()); err != nil {
panic(err)
}
if _, err := buf.Write(vrf); err != nil {
panic(err)
}
return index, buf.Bytes()
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (s *System) Evaluate(state []float32) []float32 {\n\tif len(state) > 0 {\n\t\treturn s.function(state, s.parametersVector)\n\t} else {\n\t\treturn s.function(s.stateVector, s.parametersVector)\n\t}\n}",
"func (m *Multiplication) Evaluate(left, right EvalResult) (EvalResult, error) {\n\treturn multiplyNumericWithError(left, right)\n}",
"func (e *ExpressionAtom) Evaluate(dataContext IDataContext, memory *WorkingMemory) (reflect.Value, error) {\n\tif e.Evaluated == true {\n\t\treturn e.Value, nil\n\t}\n\tif e.Variable != nil {\n\t\tval, err := e.Variable.Evaluate(dataContext, memory)\n\t\tif err != nil {\n\t\t\treturn reflect.Value{}, err\n\t\t}\n\t\te.Value = val\n\t\te.Evaluated = true\n\t\treturn val, err\n\t}\n\tif e.FunctionCall != nil {\n\t\tvalueNode := dataContext.Get(\"DEFUNC\")\n\t\targs, err := e.FunctionCall.EvaluateArgumentList(dataContext, memory)\n\t\tif err != nil {\n\t\t\treturn reflect.Value{}, err\n\t\t}\n\t\tret, err := valueNode.CallFunction(e.FunctionCall.FunctionName, args...)\n\t\tif err != nil {\n\t\t\treturn reflect.Value{}, err\n\t\t}\n\t\te.Value = ret\n\t\te.Evaluated = true\n\t\treturn ret, err\n\t}\n\tpanic(\"should not be reached\")\n}",
"func (v Variable) Evaluate() Expression {\n\treturn v\n}",
"func (f *functionQuery) Evaluate(t iterator) interface{} {\n\treturn f.Func(f.Input, t)\n}",
"func Evaluate(expression *[]string, dispatchTable DispatchTable, stack *Stack) interface{} {\n\n\tfor idx, token := range *expression {\n\t\tvar dispatchFunction DispatchFunc\n\n\t\tif _, err := strconv.ParseFloat(token, 64); err == nil {\n\t\t\tdispatchFunction = dispatchTable[\"FLOAT\"]\n\t\t} else {\n\t\t\tvar evalsOk bool\n\t\t\tif dispatchFunction, evalsOk = dispatchTable[token]; !evalsOk {\n\t\t\t\tdispatchFunction = dispatchTable[\"__DEFAULT__\"]\n\t\t\t\t// delete token from expression\n\t\t\t\tcopy((*expression)[idx:], (*expression)[idx+1:])\n\t\t\t\t(*expression)[len(*expression)-1] = \"\"\n\t\t\t\t(*expression) = (*expression)[:len(*expression)-1]\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tdispatchFunction(token, stack)\n\t}\n\treturn stack.Pop()\n}",
"func (e *ExpressionAtom) Evaluate() (reflect.Value, error) {\n\tvar val reflect.Value\n\tvar err error\n\tif e.Variable != nil {\n\t\tval, err = e.Variable.Evaluate()\n\t} else if e.FunctionCall != nil {\n\t\tval, err = e.FunctionCall.Evaluate()\n\t} else if e.MethodCall != nil {\n\t\tval, err = e.MethodCall.Evaluate()\n\t} else if e.Constant != nil {\n\t\tval, err = e.Constant.Evaluate()\n\t}\n\tif err == nil {\n\t\te.Value = val\n\t}\n\treturn val, err\n}",
"func (a Application) Evaluate() Expression {\n\tvar f = a.Function.Evaluate()\n\tif l, ok := f.(Abstraction); ok {\n\t\treturn l.Body.Substitute(l.Argument, a.Argument).Evaluate()\n\t}\n\treturn Application{f, a.Argument}\n}",
"func Evaluate(enode *lang.ExprNode, env *E) (*Value, error) {\n\tval, err := evalExpr(enode, env)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif enode.ResultVar != nil {\n\t\tif val.VType == MVar && enode.ResultVar.TType == lang.TTSVar {\n\t\t\treturn nil, fmt.Errorf(\"cannot assign a matrix value to a scalar variable\")\n\t\t}\n\n\t\tif val.VType == SVar && enode.ResultVar.TType == lang.TTMVar {\n\t\t\treturn nil, fmt.Errorf(\"cannot assign a scalar value to a matrix variable\")\n\t\t}\n\n\t\tv := rune(enode.ResultVar.Literal[0])\n\n\t\tswitch val.VType {\n\t\tcase MVar:\n\t\t\tenv.SetMVar(v, val.MValue)\n\t\tcase SVar:\n\t\t\tenv.SetSVar(v, val.SValue)\n\t\t}\n\t}\n\n\treturn val, nil\n}",
"func (s *candidate) Evaluate() (float64, error) {\n\tr, err := s.Schedule()\n\treturn r.Evaluate(), err\n}",
"func Evaluate(expression string) (float64, error) {\n\ttree, err := parse(expression)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn tree.evaluate()\n}",
"func Evaluate(c a.Context, s a.Sequence) a.Sequence {\n\treturn a.Map(c, compiler.Compile(c, s), evaluator)\n}",
"func (a Abstraction) Evaluate() Expression {\n\treturn Abstraction{a.Argument, a.Body.Evaluate()}\n}",
"func evaluate(expression []string, actions ActionTable, stack *Stack) interface{} {\n\tfor _, t := range expression {\n\t\tvar action ActionFunc\n\t\tif _, err := strconv.ParseFloat(t, 64); err == nil {\n\t\t\taction = actions[\"NUMBER\"]\n\t\t} else {\n\t\t\tvar ok bool\n\t\t\tif action, ok = actions[t]; !ok {\n\t\t\t\taction = actions[\"__DEFAULT__\"]\n\t\t\t}\n\t\t}\n\t\taction(t, stack)\n\t}\n\treturn stack.Pop()\n}",
"func (m *Model) Evaluate(seq Sequence) {\n\tm.EvaluateAt(seq, 0)\n}",
"func (f *CallExpression) Evaluate(ctx *Context) Value {\n\tcallable := f.Callable.Evaluate(ctx)\n\n\tif callable.Type == vtVariable {\n\t\tcallable = callable.Evaluate(ctx)\n\t}\n\n\tif callable.isCallable() {\n\t\tnewCtx := NewContext(\"\", nil)\n\t\targs := f.Args.EvaluateAll(ctx)\n\t\treturn callable.callable().Execute(newCtx, &args)\n\t}\n\n\tpanic(NewNotCallableError(callable))\n}",
"func EvaluateFuncs(exp string) string {\n exp = EvaluateFunc(exp, \"abs\")\n exp = EvaluateFunc(exp, \"sin\")\n exp = EvaluateFunc(exp, \"cos\")\n exp = EvaluateFunc(exp, \"tan\")\n return exp\n}",
"func (e *Exp) Eval() float64 {\n\te.init()\n\tresult, _ := e.eval(e.opTree)\n\treturn result\n}",
"func Evaluate(e ast.Node, genCtx *GenCtx) parser_driver.ValueExpr {\n\tswitch t := e.(type) {\n\tcase *ast.ParenthesesExpr:\n\t\treturn Evaluate(t.Expr, genCtx)\n\tcase *ast.BinaryOperationExpr:\n\t\tres, err := operator.BinaryOps.Eval(t.Op.String(), Evaluate(t.L, genCtx), Evaluate(t.R, genCtx))\n\t\tif err != nil {\n\t\t\tpanic(fmt.Sprintf(\"error occurred on eval: %+v\", err))\n\t\t}\n\t\treturn res\n\tcase *ast.UnaryOperationExpr:\n\t\tres, err := operator.UnaryOps.Eval(t.Op.String(), Evaluate(t.V, genCtx))\n\t\tif err != nil {\n\t\t\tpanic(fmt.Sprintf(\"error occurred on eval: %+v\", err))\n\t\t}\n\t\treturn res\n\tcase *ast.IsNullExpr:\n\t\tsubResult := Evaluate(t.Expr, genCtx)\n\t\tc := ConvertToBoolOrNull(subResult)\n\t\tr := parser_driver.ValueExpr{}\n\t\tr.SetInt64(0)\n\t\tif c == -1 {\n\t\t\tr.SetInt64(1)\n\t\t}\n\t\treturn r\n\tcase *ast.ColumnNameExpr:\n\t\tfor key, value := range genCtx.unwrapPivotRows {\n\t\t\toriginTableName := t.Name.Table.L\n\t\t\tfor k, v := range genCtx.TableAlias {\n\t\t\t\tif v == originTableName {\n\t\t\t\t\toriginTableName = k\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t\toriginColumnName := t.Name.Name.L\n\t\t\tif key == fmt.Sprintf(\"%s.%s\", originTableName, originColumnName) {\n\t\t\t\tv := parser_driver.ValueExpr{}\n\t\t\t\tv.SetValue(value)\n\t\t\t\tif tmpTable, ok := genCtx.TableAlias[t.Name.Table.L]; ok {\n\t\t\t\t\tt.Name.Table = model.NewCIStr(tmpTable)\n\t\t\t\t}\n\t\t\t\treturn v\n\t\t\t}\n\t\t}\n\t\tpanic(fmt.Sprintf(\"no such col %s in table %s\", t.Name, t.Name.Table))\n\tcase ast.ValueExpr:\n\t\tv := parser_driver.ValueExpr{}\n\t\tv.SetValue(t.GetValue())\n\t\tv.SetType(t.GetType())\n\t\treturn v\n\t}\n\n\t// is useless?\n\t// if e == nil {\n\t// \treturn trueValueExpr()\n\t// }\n\n\tpanic(\"not reachable\")\n\tv := parser_driver.ValueExpr{}\n\tv.SetNull()\n\treturn v\n}",
"func evaluate(node ast.Node, ext vmExtMap, tla vmExtMap, nativeFuncs map[string]*NativeFunction,\n\tmaxStack int, ic *importCache, traceOut io.Writer, stringOutputMode bool) (string, error) {\n\n\ti, err := buildInterpreter(ext, nativeFuncs, maxStack, ic, traceOut)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tresult, err := evaluateAux(i, node, tla)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tvar buf bytes.Buffer\n\ti.stack.setCurrentTrace(manifestationTrace())\n\tif stringOutputMode {\n\t\terr = i.manifestString(&buf, result)\n\t} else {\n\t\terr = i.manifestAndSerializeJSON(&buf, result, true, \"\")\n\t}\n\ti.stack.clearCurrentTrace()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tbuf.WriteString(\"\\n\")\n\treturn buf.String(), nil\n}",
"func (a *AlwaysReturn) Evaluate(r table.Row) (bool, error) {\n\treturn a.V, nil\n}",
"func (this *Mod) Evaluate(item value.Value, context Context) (value.Value, error) {\n\treturn this.BinaryEval(this, item, context)\n}",
"func eval(sc *scope, e sexpr) sexpr {\n\te = transform(sc, e)\n\tswitch e := e.(type) {\n\tcase cons: // a function to evaluate\n\t\tcons := e\n\t\tcar := eval(sc, cons.car)\n\t\tif !isFunction(car) && !isPrimitive(car) {\n\t\t\tpanic(\"Attempted application on non-function\")\n\t\t}\n\t\tcdr := cons.cdr\n\t\targs := flatten(cdr)\n\t\tif isPrimitive(car) {\n\t\t\treturn (car.(primitive))(sc, args)\n\t\t}\n\t\tf := car.(function)\n\t\t// This is a function - evaluate all arguments\n\t\tfor i, a := range args {\n\t\t\targs[i] = eval(sc, a)\n\t\t}\n\t\treturn f(sc, args)\n\tcase sym:\n\t\treturn sc.lookup(e)\n\t}\n\treturn e\n}",
"func (r *Resolver) Evaluate(args struct{ Expr string }) (Result, error) {\n\tvar result Result\n\tamount, err := calc.CalculateAmount(args.Expr)\n\tif err != nil {\n\t\treturn result, err\n\t}\n\n\tunitName := amount.Units\n\tunit, err := NewUnit(unitName)\n\tif err != nil {\n\t\treturn result, err\n\t}\n\n\tresult = Result{amount.Value, unit, args.Expr}\n\tlog.Info(fmt.Sprintf(\"evaluate(%s) = %.2f %s\", args.Expr, result.value, result.units.pluralName))\n\treturn result, nil\n}",
"func (m *Message) Eval(vm *VM, locals Interface) (result Interface) {\n\treturn m.Send(vm, locals, locals)\n}",
"func (session Runtime) Evaluate(code string, async bool, returnByValue bool) (interface{}, error) {\n\tresult, err := session.evaluate(code, session.currentContext(), async, returnByValue)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn result.Value, nil\n}",
"func Evaluate(query string, values map[string]interface{}) interface{} {\n\ttokens := Parser(query)\n\trpn := ToPostfix(tokens)\n\touts := SolvePostfix(rpn, values)\n\treturn outs\n}",
"func (r *RegexpOp) Evaluate(left, right EvalResult) (EvalResult, error) {\n\tpanic(\"implement me\")\n}",
"func EvaluateFunc(exp string, f string) string {\n exp = strings.Replace(exp, \"~\" + f + \"[\", \"~1\" + f +\"[\", 1)\n for strings.Index(exp, f + \"[\") != -1 {\n var openFunc int = strings.Index(exp, f + \"[\")\n var closeFunc int = FindClosingBracket(exp, openFunc + len(f))\n var wholeFunc string = exp[openFunc:closeFunc + 1]\n if openFunc != 0 && strings.Index(numbers, exp[openFunc-1:openFunc]) != -1 {\n exp = exp[0:openFunc] + \"*\" + exp[openFunc:]\n openFunc++\n closeFunc++\n }\n if closeFunc != len(exp)-1 && strings.Index(numbers, exp[closeFunc+1:closeFunc+2]) != -1 {\n exp = exp[0:closeFunc+1] + \"*\" + exp[closeFunc+1:]\n }\n var inFunc string = wholeFunc[len(f)+1:len(wholeFunc)-1]\n var result string = Pemdas(inFunc)\n if f == \"abs\" {\n if result[0] == '~' {\n result = result[1:]\n }\n } else if f == \"sin\" {\n result = fmt.Sprintf(\"%f\", math.Sin(NotateToDouble(result)))\n } else if f == \"cos\" {\n result = fmt.Sprintf(\"%f\", math.Cos(NotateToDouble(result)))\n } else if f == \"tan\" {\n result = fmt.Sprintf(\"%f\", math.Tan(NotateToDouble(result)))\n }\n exp = exp[0:openFunc] + NegativeNotate(result) + exp[closeFunc + 1:]\n }\n return exp\n}",
"func Evaluate(item interface{}, passedContext interface{}) map[string]float64 {\n\t//fmt.Fprintf(os.Stderr, \"eval:: %v %T\\n\", item, item)\n\n\tif item != nil {\n\t\tswitch passedContext.(type) {\n\t\tcase *DimContext:\n\t\t\t{\n\t\t\t\t//fmt.Fprintf(os.Stderr, \"here before processMember %v\\n\", item)\n\t\t\t\tprocessMember(item, passedContext)\n\t\t\t\t//fmt.Fprintf(os.Stderr, \"here after processMember %v %T\\n\", item, item)\n\t\t\t}\n\t\t}\n\t\tswitch v := item.(type) {\n\t\tcase hasResults:\n\t\t\t{\n\t\t\t\treturn v.Results()\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}",
"func (s *Subtraction) Evaluate(left, right EvalResult) (EvalResult, error) {\n\treturn subtractNumericWithError(left, right)\n}",
"func (me *lmsEvaluator) Eval(game c4.State, p c4.Piece) float64 {\n\tvar bestScore, knownScore float64\n\n\t// Copy out the coefficients to reduce lock contention\n\tme.coeffsMutex.RLock()\n\tmyCoeffs := me.Coeffs\n\tme.coeffsMutex.RUnlock()\n\n\t// Estimate the game state's utility\n\tapproxScore, currentFeatures := BetterEval(myCoeffs, game, p)\n\n\t// Try to get a better estimate of the utility by looking one move ahead\n\t// with proven weights\n\tif game.GetTurn() != p {\n\t\tbestScore = math.Inf(-1)\n\t} else {\n\t\tbestScore = math.Inf(+1)\n\t}\n\n\tfor col := 0; col < c4.MaxColumns; col++ {\n\t\tif nextBoard, err := game.AfterMove(game.GetTurn(),\n\t\t\tcol); err == nil {\n\t\t\tnextScore, _ := BetterEval(\n\t\t\t\tmyCoeffs,\n\t\t\t\tnextBoard,\n\t\t\t\tnextBoard.GetTurn())\n\n\t\t\tif game.GetTurn() != p {\n\t\t\t\tif nextScore > bestScore {\n\t\t\t\t\tbestScore = nextScore\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tif nextScore < bestScore {\n\t\t\t\t\tbestScore = nextScore\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\t// Use the evolved weights as a reference to prevent divergence\n\tknownScore, _ = BetterEval([6]float64{\n\t\t0.2502943943301069,\n\t\t-0.4952316649483701,\n\t\t0.3932539700819625,\n\t\t-0.2742452616759889,\n\t\t0.4746881137884282,\n\t\t0.2091091127191147}, game, p)\n\n\t// Change the coefficients according to the error\n\tme.count++\n\tif me.count%100000 == 0 {\n\t\tfmt.Println(me.count)\n\t\tfmt.Println(me.Coeffs)\n\t}\n\t// if !math.IsInf(bestScore, 0) {\n\t// \tfor j := 0; j < 6; j++ {\n\t// \t\tme.Coeffs[j] +=\n\t// \t\t\tmu * (bestScore - approxScore) * currentFeatures[j]\n\t// \t}\n\t// }\n\tgo func() {\n\t\tif !math.IsInf(bestScore, 0) {\n\t\t\tme.coeffsMutex.Lock()\n\t\t\tfor j := 0; j < 6; j++ {\n\t\t\t\tme.Coeffs[j] +=\n\t\t\t\t\tmu * (knownScore - approxScore) * currentFeatures[j]\n\t\t\t}\n\t\t\tme.coeffsMutex.Unlock()\n\t\t}\n\t}()\n\n\treturn approxScore\n}",
"func (a *Addition) Evaluate(left, right EvalResult) (EvalResult, error) {\n\treturn addNumericWithError(left, right)\n}",
"func (dn DoNothing) Evaluate(environment map[string]expr.Expression) map[string]expr.Expression {\n\treturn environment\n}",
"func (d Decomposition) Eval() *big.Int {\n\tresult := big.NewInt(0)\n\tfor _, m := range d.monomes {\n\t\tresult.Add(result, m.eval())\n\t}\n\treturn result\n}",
"func (sch *Scheduler) MrwsEvaluate(mrwsUsed *[PHYNUM][DIMENSION + 1]float64, podReq PodRequest, weightPod *[DIMENSION + 1]float64) int {\n\tvar fitInd int\n\tfitInd = -1\n\t// get the physical resource and pod idle rate\n\tvar mrwsIdle [PHYNUM][DIMENSION]float64\n\tfor i := 0; i < PHYNUM; i++ {\n\t\tfor j := 0; j < DIMENSION; j++ {\n\t\t\tmrwsIdle[i][j] = (sch.reTotal[j] - mrwsUsed[i][j] - podReq.resReq[j]) / sch.reTotal[j]\n\t\t}\n\t}\n\tvar podSum float64\n\tvar podIdle [PHYNUM]float64\n\tfor i := 0; i < PHYNUM; i++ {\n\t\tpodSum = podSum + mrwsUsed[i][DIMENSION]\n\t}\n\tfor i := 0; i < PHYNUM; i++ {\n\t\tpodIdle[i] = 1.0 - mrwsUsed[i][DIMENSION]/podSum\n\n\t}\n\n\t// get the satisfy physical machine index and calculate the max value node\n\tsaInd := sch.ResourceSatisfy(&mrwsIdle)\n\tif saInd != nil {\n\t\tsaLen := len(saInd)\n\t\t//calculate the satisfy index physical machine podMean and resMean\n\n\t\tvar resVal [DIMENSION]float64 // cal the sum and mean value\n\t\tvar resMean [DIMENSION]float64\n\t\tvar podVal float64\n\t\tfor i := 0; i < saLen; i++ {\n\t\t\tpodVal = podVal + podIdle[saInd[i]]\n\t\t\tfor j := 0; j < DIMENSION; j++ {\n\t\t\t\tresVal[j] = resVal[j] + mrwsIdle[saInd[i]][j]\n\t\t\t}\n\t\t}\n\t\tpodMean := podVal / podSum\n\t\tfor j := 0; j < DIMENSION; j++ {\n\t\t\tresMean[j] = resVal[j] / (float64)(saLen)\n\t\t}\n\n\t\tvar maxScore float64\n\t\tmaxScore = -1.0\n\t\tfitInd = saInd[0]\n\t\tvar bi, vi float64\n\t\tfor i := 0; i < saLen; i++ {\n\t\t\tvi = 0.0\n\t\t\tbi = 0.0\n\t\t\tfor j := 0; j < DIMENSION; j++ {\n\t\t\t\tvi = vi + mrwsIdle[saInd[i]][j]*weightPod[j]\n\t\t\t\tbi = bi + (mrwsIdle[saInd[i]][j]/resMean[j])*weightPod[DIMENSION]\n\t\t\t}\n\t\t\tvi = vi + podIdle[i]*weightPod[DIMENSION]\n\t\t\tbi = bi + (podIdle[i]/podMean)*weightPod[DIMENSION]\n\t\t\t// fmt.Printf(\"vi and bi %.3f %.3f \\n\", vi, bi)\n\t\t\t// bi = 0.0\n\t\t\tscoreVi := vi + bi\n\t\t\tif scoreVi > maxScore {\n\t\t\t\tfitInd = saInd[i]\n\t\t\t\tmaxScore = scoreVi\n\t\t\t}\n\t\t}\n\t\t// fmt.Printf(\"%.3f %d \\n\", maxScore, fitInd)\n\t}\n\treturn fitInd\n}",
"func (m monome) eval() *big.Int {\n\tc := big.NewInt(int64(m.coeff))\n\tb := big.NewInt(int64(m.base))\n\n\tresult := big.NewInt(0)\n\tresult.Exp(b, m.exponent.Eval(), nil)\n\tresult.Mul(c, result)\n\treturn result\n}",
"func (k *VrfablePrivateKey) Evaluate(m []byte) (index [32]byte, proof []byte) {\n\tnilIndex := [32]byte{}\n\t// Prover chooses r <-- [1,N-1]\n\tr, _, _, err := generateKeyFromCurve(curve, rand.Reader)\n\tif err != nil {\n\t\treturn nilIndex, nil\n\t}\n\tri := new(big.Int).SetBytes(r)\n\n\t// H = H1(m)\n\tHx, Hy := H1(m)\n\n\t// VRF_k(m) = [k]H\n\tsHx, sHy := curve.ScalarMult(Hx, Hy, k.D.Bytes())\n\n\t// vrf := elliptic.Marshal(curve, sHx, sHy) // 65 bytes.\n\tvrf := curve.Marshal(sHx, sHy) // 65 bytes.\n\n\t// G is the base point\n\t// s = H2(G, H, [k]G, VRF, [r]G, [r]H)\n\trGx, rGy := curve.ScalarBaseMult(r)\n\trHx, rHy := curve.ScalarMult(Hx, Hy, r)\n\tvar b bytes.Buffer\n\tb.Write(curve.Marshal(params.Gx, params.Gy))\n\tb.Write(curve.Marshal(Hx, Hy))\n\tb.Write(curve.Marshal(k.PublicKey.X, k.PublicKey.Y))\n\tb.Write(vrf)\n\tb.Write(curve.Marshal(rGx, rGy))\n\tb.Write(curve.Marshal(rHx, rHy))\n\ts := H2(b.Bytes())\n\n\t// t = r−s*k mod N\n\tt := new(big.Int).Sub(ri, new(big.Int).Mul(s, k.D))\n\tt.Mod(t, params.N)\n\n\t// Index = H(vrf)\n\tindex = sha256.Sum256(vrf)\n\n\t// Write s, t, and vrf to a proof blob. Also write leading zeros before s and t\n\t// if needed.\n\tvar buf bytes.Buffer\n\tbuf.Write(make([]byte, 32-len(s.Bytes())))\n\tbuf.Write(s.Bytes())\n\tbuf.Write(make([]byte, 32-len(t.Bytes())))\n\tbuf.Write(t.Bytes())\n\tbuf.Write(vrf)\n\n\treturn index, buf.Bytes()\n}",
"func Eval(ctx context.Context, e Expr, vs Values) (interface{}, error) {\r\n\tfn, err := FuncOf(ctx, e, vs)\r\n\tif err != nil {\r\n\t\treturn nil, err\r\n\t}\r\n\treturn fn.Call(ctx, vs)\r\n}",
"func (ev *evaluator) eval(expr Expr) model.Value {\n\t// This is the top-level evaluation method.\n\t// Thus, we check for timeout/cancellation here.\n\tif err := contextDone(ev.ctx, \"expression evaluation\"); err != nil {\n\t\tev.error(err)\n\t}\n\n\tswitch e := expr.(type) {\n\tcase *AggregateExpr:\n\t\tvector := ev.evalVector(e.Expr)\n\t\treturn ev.aggregation(e.Op, e.Grouping, e.Without, e.KeepCommonLabels, e.Param, vector)\n\n\tcase *BinaryExpr:\n\t\tlhs := ev.evalOneOf(e.LHS, model.ValScalar, model.ValVector)\n\t\trhs := ev.evalOneOf(e.RHS, model.ValScalar, model.ValVector)\n\n\t\tswitch lt, rt := lhs.Type(), rhs.Type(); {\n\t\tcase lt == model.ValScalar && rt == model.ValScalar:\n\t\t\treturn &model.Scalar{\n\t\t\t\tValue: scalarBinop(e.Op, lhs.(*model.Scalar).Value, rhs.(*model.Scalar).Value),\n\t\t\t\tTimestamp: ev.Timestamp,\n\t\t\t}\n\n\t\tcase lt == model.ValVector && rt == model.ValVector:\n\t\t\tswitch e.Op {\n\t\t\tcase itemLAND:\n\t\t\t\treturn ev.vectorAnd(lhs.(vector), rhs.(vector), e.VectorMatching)\n\t\t\tcase itemLOR:\n\t\t\t\treturn ev.vectorOr(lhs.(vector), rhs.(vector), e.VectorMatching)\n\t\t\tcase itemLUnless:\n\t\t\t\treturn ev.vectorUnless(lhs.(vector), rhs.(vector), e.VectorMatching)\n\t\t\tdefault:\n\t\t\t\treturn ev.vectorBinop(e.Op, lhs.(vector), rhs.(vector), e.VectorMatching, e.ReturnBool)\n\t\t\t}\n\t\tcase lt == model.ValVector && rt == model.ValScalar:\n\t\t\treturn ev.vectorScalarBinop(e.Op, lhs.(vector), rhs.(*model.Scalar), false, e.ReturnBool)\n\n\t\tcase lt == model.ValScalar && rt == model.ValVector:\n\t\t\treturn ev.vectorScalarBinop(e.Op, rhs.(vector), lhs.(*model.Scalar), true, e.ReturnBool)\n\t\t}\n\n\tcase *Call:\n\t\treturn e.Func.Call(ev, e.Args)\n\n\tcase *MatrixSelector:\n\t\treturn ev.matrixSelector(e)\n\n\tcase *NumberLiteral:\n\t\treturn &model.Scalar{Value: e.Val, Timestamp: ev.Timestamp}\n\n\tcase *ParenExpr:\n\t\treturn ev.eval(e.Expr)\n\n\tcase *StringLiteral:\n\t\treturn &model.String{Value: e.Val, Timestamp: ev.Timestamp}\n\n\tcase *UnaryExpr:\n\t\tse := ev.evalOneOf(e.Expr, model.ValScalar, model.ValVector)\n\t\t// Only + and - are possible operators.\n\t\tif e.Op == itemSUB {\n\t\t\tswitch v := se.(type) {\n\t\t\tcase *model.Scalar:\n\t\t\t\tv.Value = -v.Value\n\t\t\tcase vector:\n\t\t\t\tfor i, sv := range v {\n\t\t\t\t\tv[i].Value = -sv.Value\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn se\n\n\tcase *VectorSelector:\n\t\treturn ev.vectorSelector(e)\n\t}\n\tpanic(fmt.Errorf(\"unhandled expression of type: %T\", expr))\n}",
"func (a *AST) Evaluate(table ...map[string]ContextVar) *Expression {\n\tt := make(map[string]ContextVar)\n\tif len(table) > 0 && table[0] != nil {\n\t\tt = table[0]\n\t}\n\treturn a.Root.Evaluate(t)\n}",
"func TestEvaluatorValues(t *testing.T) {\n\tvar values = make(map[string]int)\n\tvalues[\"x\"] = 1\n\tvalues[\"y\"] = 2\n\texpression := \"x+y*2\"\n\n\tresult, err := evaluator.Evaluate(expression, values)\n\n\tassert.Nil(t, err, \"unexpected error\")\n\tassert.Equal(t, 5, result)\n}",
"func (l *LikeOp) Evaluate(left, right EvalResult) (EvalResult, error) {\n\tpanic(\"implement me\")\n}",
"func (d *Division) Evaluate(left, right EvalResult) (EvalResult, error) {\n\treturn divideNumericWithError(left, right)\n}",
"func (m MultilinearByValues) Eval(cs *frontend.ConstraintSystem, xs []frontend.Variable) frontend.Variable {\n\tf := m.DeepCopy()\n\tfor _, x := range xs {\n\t\t// Repeatedly fold the table\n\t\tf.Fold(cs, x)\n\t}\n\treturn f.Table[0]\n}",
"func (net *Network) Evaluate(inputValues []float64) float64 {\n\tinputLength := len(inputValues)\n\tfor i, n := range net.InputNodes {\n\t\tif i < inputLength {\n\t\t\tn.SetValue(inputValues[i])\n\t\t}\n\t}\n\tmaxIterationCounter := net.maxIterations\n\tif maxIterationCounter == 0 {\n\t\t// If max iterations has not been configured, use 100\n\t\tmaxIterationCounter = 100\n\t}\n\tresult, _ := net.OutputNode.evaluate(net.Weight, &maxIterationCounter)\n\treturn result\n}",
"func (c Chain) Evaluate(input Input) (string, string, bool) {\n\tfor _, policyFunc := range c {\n\t\treason, message, violationFound := policyFunc(input)\n\t\tif violationFound {\n\t\t\treturn reason, message, violationFound\n\t\t}\n\t}\n\treturn \"\", \"\", false\n}",
"func (c Chain) Evaluate(input Input) (string, string, bool) {\n\tfor _, policyFunc := range c {\n\t\treason, message, violationFound := policyFunc(input)\n\t\tif violationFound {\n\t\t\treturn reason, message, violationFound\n\t\t}\n\t}\n\treturn \"\", \"\", false\n}",
"func (s *CallLuaExpr) Evaluate(ns NS) interface{} {\n\tfunName := s.fun.Evaluate(ns).(String)\n\t// argStr := s.arg.Evaluate(ns).(String)\n\n\targs := make([]interface{}, len(s.argList))\n\tfor i, expr := range s.argList {\n\t\targs[i] = expr.Evaluate(ns)\n\t}\n\n\t// if err := L.DoString(LuaFun); err != nil {\n\t// \tpanic(err)\n\n\t// }\n\n\targList := []string{}\n\tfor _, v := range args {\n\t\tvs := fmt.Sprintf(\"%s\", v)\n\t\targList = append(argList, vs)\n\t}\n\n\t// r := callLua(fmt.Sprintf(\"%s\", funName), fmt.Sprintf(\"%s\", argStr))\n\t// r := callLua(fmt.Sprintf(\"%s\", funName), fmt.Sprintf(\"%s\", argStr))\n\t// r := callLua(fmt.Sprintf(\"%s\", funName), argList...)\n\tr := LuaPools.CallFunction(fmt.Sprintf(\"%s\", funName), argList...)\n\treturn String(r)\n}",
"func (i *InOp) Evaluate(left, right EvalResult) (EvalResult, error) {\n\tpanic(\"implement me\")\n}",
"func EvaluateView(mod *sysl.Module, appName, viewName string, s Scope) *sysl.Value {\n\ttxApp := mod.Apps[appName]\n\tview := txApp.Views[viewName]\n\tif view.Expr.Type == nil {\n\t\tview.Expr.Type = view.RetType\n\t}\n\treturn Eval(txApp, s, view.Expr)\n}",
"func (this *Element) Evaluate(item value.Value, context Context) (value.Value, error) {\n\treturn this.BinaryEval(this, item, context)\n}",
"func (c *Context) Evaluate(language, code string, stdins []string) ([]string, Message) {\n\tstdinGlob := glob(stdins)\n\tresults, msg := c.run(language, code, stdinGlob)\n\n\treturn unglob(results), msg\n}",
"func FunctionEvaluator(fn string) (Function, error) {\n\te := meval.New()\n\te.SetVar(\"x\", 0)\n\t//Test if function is valid\n\tif _, err := e.Eval(fn); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn func(x float64) float64 {\n\t\te.SetVar(\"x\", x)\n\t\tv, err := e.Eval(fn)\n\n\t\t//Eval method should error may not depend on the value of x\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\treturn v\n\t}, nil\n}",
"func ExampleEval() {\n\tfmt.Println(Eval(\"5\"))\n\tfmt.Println(Eval(\"1 + 2\"))\n\tfmt.Println(Eval(\"1 - 2 + 3\"))\n\tfmt.Println(Eval(\"3 * ( 3 + 1 * 3 ) / 2\"))\n\tfmt.Println(Eval(\"3 * ( ( 3 + 1 ) * 3 ) / 2\"))\n\t//OutPut:\n\t//5\n\t//3\n\t//2\n\t//9\n\t//18\n}",
"func Evaluate(thing interface{}, env Environment) (error, Value, Environment) {\n\tswitch thing.(type) {\n\tcase Value:\n\t\treturn EvaluateValue(thing.(Value), env)\n\tcase SExpression:\n\t\tsexp := thing.(SExpression)\n\t\tif isSpecialForm(sexp.FormName.Contained) {\n\t\t\treturn EvaluateSpecialForm(sexp, env)\n\t\t} else {\n\t\t\treturn EvaluateSexp(thing.(SExpression), env)\n\t\t}\n\tdefault:\n\t\treturn errors.New(fmt.Sprintf(\"No way to evaluate %v\\n\", thing)), Value{}, env\n\t}\n}",
"func (ev *evaluator) evalMatrix(e Expr) matrix {\n\tval := ev.eval(e)\n\tmat, ok := val.(matrix)\n\tif !ok {\n\t\tev.errorf(\"expected range vector but got %s\", documentedType(val.Type()))\n\t}\n\treturn mat\n}",
"func (e *Evaluator) Evaluate(node ast.Node, env *object.Environment) object.Object {\n\te.Ctxt = node.Context()\n\tswitch node.(type) {\n\tcase *ast.Program:\n\t\tres := &object.StmtResults{}\n\t\tres.Results = []object.Object{}\n\n\t\t// adding statements\n\t\tfor _, stmt := range node.(*ast.Program).Statements {\n\t\t\tif ret, ok := stmt.(*ast.ReturnStatement); ok {\n\t\t\t\treturn e.Evaluate(ret, env)\n\t\t\t}\n\t\t\tresult := e.Evaluate(stmt, env)\n\t\t\tres.Results = append(res.Results, result)\n\t\t}\n\n\t\t// adding functions\n\t\t//todo: this should function differently than closures\n\t\tfor _, fn := range node.(*ast.Program).Functions {\n\t\t\tbody := fn.Body\n\t\t\tparams := fn.Params\n\t\t\tenv.Data[fn.Name.Value] = &object.Function{\n\t\t\t\tParams: params,\n\t\t\t\tBody: body,\n\t\t\t\tEnv: env,\n\t\t\t}\n\t\t}\n\n\t\t//todo: adding classes\n\n\t\treturn res\n\n\tcase ast.Statement:\n\t\tstmt := node.(ast.Statement)\n\n\t\tswitch node.(ast.Statement).(type) {\n\t\tcase *ast.LetStatement:\n\t\t\tletstmt := stmt.(*ast.LetStatement)\n\t\t\tval := e.Evaluate(letstmt.Value, env)\n\t\t\tenv.Set(letstmt.Name.Value, val)\n\t\t\treturn NULL\n\n\t\tcase *ast.ExprStatement:\n\t\t\texpr := stmt.(*ast.ExprStatement)\n\t\t\treturn e.Evaluate(expr.Expression, env)\n\n\t\tcase *ast.ReturnStatement:\n\t\t\tretstmt := stmt.(*ast.ReturnStatement)\n\t\t\tres := e.Evaluate(retstmt.Value, env)\n\t\t\treturn &object.Return{Inner: res}\n\n\t\tcase *ast.WhileStatement:\n\t\t\te.loopcount++\n\t\t\twhilestmt := stmt.(*ast.WhileStatement)\n\n\t\t\tvar result object.Object\n\n\t\t\tfor {\n\t\t\t\tval := e.Evaluate(whilestmt.Condition, env)\n\t\t\t\tif !evaluateTruthiness(val) {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tresult = e.evalBlockStmt(whilestmt.Body, env)\n\t\t\t\tif object.IsErr(result) || object.IsBreak(result) {\n\t\t\t\t\tif object.IsBreak(result) {\n\t\t\t\t\t\te.loopcount--\n\t\t\t\t\t\treturn NULL\n\t\t\t\t\t}\n\t\t\t\t\treturn result\n\t\t\t\t}\n\t\t\t}\n\n\t\t\te.loopcount--\n\t\t\treturn result\n\n\t\tcase *ast.BreakStatement:\n\t\t\tif e.loopcount == 0 {\n\t\t\t\treturn &object.Exception{\n\t\t\t\t\tMsg: \"Cannot use break outside of loop\",\n\t\t\t\t\tCon: node.(ast.Statement).Context(),\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn &object.Break{}\n\n\t\tcase *ast.BlockStatement:\n\t\t\tblkstmt := stmt.(*ast.BlockStatement)\n\t\t\treturn e.evalBlockStmt(blkstmt, env)\n\n\t\tdefault:\n\t\t\treturn NULL\n\t\t}\n\n\tcase ast.Expression:\n\t\texpr := node.(ast.Expression)\n\n\t\tswitch node.(ast.Expression).(type) {\n\t\tcase *ast.Identifier:\n\t\t\tident := expr.(*ast.Identifier)\n\t\t\tif data, ok := env.Get(ident.Value); ok {\n\t\t\t\treturn data\n\t\t\t}\n\t\t\tif bltn, ok := builtins[ident.Value]; ok {\n\t\t\t\treturn bltn\n\t\t\t}\n\t\t\treturn &object.Exception{\n\t\t\t\tMsg: fmt.Sprintf(\"Could not find symbol %s\", ident.Value),\n\t\t\t\tCon: ident.Context(),\n\t\t\t}\n\n\t\tcase *ast.PrefixExpr:\n\t\t\tpexpr := expr.(*ast.PrefixExpr)\n\t\t\treturn e.evalPrefixExpr(pexpr, env)\n\n\t\tcase *ast.InfixExpr:\n\t\t\tiexpr := expr.(*ast.InfixExpr)\n\t\t\treturn e.evalInfixExpr(iexpr, env)\n\n\t\tcase *ast.IfExpression:\n\t\t\tifexpr := expr.(*ast.IfExpression)\n\t\t\tcondition := e.Evaluate(ifexpr.Condition, env)\n\t\t\tif condition == nil {\n\t\t\t\treturn &object.Exception{\n\t\t\t\t\tMsg: \"If condition returned nil\",\n\t\t\t\t\tCon: ifexpr.Context(),\n\t\t\t\t}\n\t\t\t}\n\t\t\tif evaluateTruthiness(condition) {\n\t\t\t\treturn e.Evaluate(ifexpr.Result, env)\n\t\t\t}\n\t\t\tif ifexpr.Alternative != nil {\n\t\t\t\tswitch ifexpr.Alternative.(type) {\n\t\t\t\tcase *ast.BlockStatement:\n\t\t\t\t\treturn e.Evaluate(ifexpr.Alternative.(*ast.BlockStatement), env)\n\t\t\t\tcase *ast.IfExpression:\n\t\t\t\t\treturn e.Evaluate(ifexpr.Alternative.(*ast.IfExpression), env)\n\t\t\t\tdefault:\n\t\t\t\t\treturn &object.Exception{\n\t\t\t\t\t\tMsg: \"Invalid else branch\",\n\t\t\t\t\t\tCon: ifexpr.Alternative.Context(),\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\tcase *ast.FnLiteral:\n\t\t\tfnlit := expr.(*ast.FnLiteral)\n\t\t\tparams := fnlit.Params\n\t\t\tbody := fnlit.Body\n\t\t\treturn &object.Function{Params: params, Env: env, Body: body}\n\n\t\tcase *ast.FunctionCall:\n\t\t\t// asserting type\n\t\t\tfncall := expr.(*ast.FunctionCall)\n\n\t\t\t// resolving to object\n\t\t\tfunction := e.Evaluate(fncall.Ident, env)\n\t\t\tif object.IsErr(function) {\n\t\t\t\treturn function\n\t\t\t}\n\n\t\t\targs := e.evalExpressions(fncall.Params, env)\n\t\t\tif len(args) == 1 && object.IsErr(args[0]) {\n\t\t\t\treturn args[0]\n\t\t\t}\n\n\t\t\treturn e.applyFunction(function, args)\n\n\t\tcase *ast.DotExpression:\n\t\t\t//todo\n\t\t\treturn &object.Exception{\n\t\t\t\tMsg: \"DotExpr: unimplemented\",\n\t\t\t\tCon: node.Context(),\n\t\t\t}\n\n\t\tcase *ast.Int:\n\t\t\tintexpr := node.(ast.Expression).(*ast.Int)\n\t\t\treturn &object.Integer{Value: intexpr.Inner}\n\t\tcase *ast.Flt:\n\t\t\tfltexpr := node.(ast.Expression).(*ast.Flt)\n\t\t\treturn &object.Float{Value: fltexpr.Inner}\n\t\tcase *ast.Str:\n\t\t\tstrexpr := node.(ast.Expression).(*ast.Str)\n\t\t\treturn &object.String{Value: strexpr.Inner}\n\t\tcase *ast.Bool:\n\t\t\tboolexpr := node.(ast.Expression).(*ast.Bool)\n\t\t\treturn nativeBooltoObj(boolexpr.Inner)\n\t\tcase *ast.Array:\n\t\t\tarray := node.(ast.Expression).(*ast.Array)\n\t\t\tarr := &object.Array{}\n\n\t\t\t// preallocating so we don't have to waste cycles\n\t\t\t// reallocating every time we append\n\t\t\telements := make([]object.Object, 0, len(array.Elements))\n\n\t\t\tfor _, elem := range array.Elements {\n\t\t\t\telements = append(elements, e.Evaluate(elem, env))\n\t\t\t}\n\t\t\tarr.Elements = elements\n\n\t\t\treturn arr\n\n\t\tcase *ast.Map:\n\t\t\thash := node.(ast.Expression).(*ast.Map)\n\t\t\tnewmap := &object.Map{}\n\t\t\tnewmap.Elements = make(map[object.HashKey]object.Object)\n\n\t\t\tfor key, val := range hash.Elements {\n\t\t\t\tnkey, nval := e.Evaluate(key, env), e.Evaluate(val, env)\n\n\t\t\t\tif object.IsErr(nkey) {\n\t\t\t\t\treturn nkey\n\t\t\t\t}\n\t\t\t\tif object.IsErr(nval) {\n\t\t\t\t\treturn nval\n\t\t\t\t}\n\n\t\t\t\thashable, ok := nkey.(object.Hashable)\n\n\t\t\t\tif !ok {\n\t\t\t\t\treturn &object.Exception{\n\t\t\t\t\t\tMsg: fmt.Sprintf(\"Cannot use type %T as key for Map\", nkey),\n\t\t\t\t\t\tCon: hash.Context(),\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tnewmap.Elements[hashable.HashKey()] = nval\n\t\t\t}\n\n\t\t\treturn newmap\n\n\t\tcase *ast.IndexExpr:\n\t\t\tidx := node.(ast.Expression).(*ast.IndexExpr)\n\t\t\treturn e.evalIndexExpr(idx, env)\n\n\t\tdefault:\n\t\t\treturn NULL\n\t\t}\n\tdefault:\n\t\treturn &object.Exception{\n\t\t\tMsg: \"Unimplemented type\",\n\t\t\tCon: node.Context(),\n\t\t}\n\t}\n\treturn &object.Exception{\n\t\tMsg: fmt.Sprintf(\"Evaluate: unreachable code, got %T\", node),\n\t\tCon: node.Context(),\n\t}\n}",
"func (p Print) Evaluate(vars map[string]interface{}, ctx interface{}, funcs FunctionMap, quotes []string) (map[string]interface{}, interface{}, error) {\n\tvars, v, err := p.Node.Evaluate(vars, ctx, funcs, quotes)\n\tif err != nil {\n\t\treturn vars, ctx, err\n\t}\n\tfmt.Println(TryFormatLiteral(v, quotes, false, 0))\n\treturn vars, ctx, nil\n}",
"func Evaluate(tpl string, data interface{}) (string, error) {\n\tt, err := Parse(tpl)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn Execute(t, data)\n}",
"func invoke(f func(float64, float64) float64) float64 {\n\treturn f(0.23, 0.9)\n}",
"func (z *Int) Exp(x, y, m *Int) *Int {}",
"func (f *Function) Eval(inputs ...interface{}) (args.Const, error) {\n\tlenInputs := len(inputs)\n\tif lenInputs != f.numVars {\n\t\treturn nil, errors.New(\"Number of inputs is not equal to the number of variables in function\")\n\t}\n\n\tvar operand1 args.Const\n\tvar operand2 args.Const\n\tvar operandStack []args.Const\n\n\ti := 0\n\tfor i < len(f.Args) {\n\t\tif f.typeInput(i) == args.Constant || f.typeInput(i) == args.Variable {\n\t\t\tvariable, err := f.getVar(i)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tif lenInputs != 0 {\n\t\t\t\toperand, err := variable.Eval(inputs[f.varNum[variable]])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\n\t\t\t\toperandStack = append(operandStack, operand)\n\t\t\t} else {\n\t\t\t\t// If length inputs is 0, then all variables must be constant.\n\t\t\t\t// This code assumes variable is a constant and so uses 0 as an input\n\t\t\t\t// to MustEval as it will never fail as the input does not matter for constants\n\t\t\t\toperandStack = append(operandStack, variable.MustEval(0))\n\t\t\t}\n\t\t} else if f.typeInput(i) == args.Operation {\n\t\t\toperation, err := f.getOp(i)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tif h, ok := unaryFuncs[operation]; ok {\n\t\t\t\tif len(operandStack) == 0 {\n\t\t\t\t\treturn nil, errors.New(\"Not enough operands\")\n\t\t\t\t}\n\n\t\t\t\toperand1, operandStack = operandStack[len(operandStack)-1], operandStack[:len(operandStack)-1]\n\t\t\t\tresult, err := h(operand1)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\n\t\t\t\toperandStack = append(operandStack, result)\n\t\t\t} else if h, ok := binaryFuncs[operation]; ok {\n\t\t\t\tif len(operandStack) < 2 {\n\t\t\t\t\treturn nil, errors.New(\"Not enough operands\")\n\t\t\t\t}\n\n\t\t\t\toperand2, operandStack = operandStack[len(operandStack)-1], operandStack[:len(operandStack)-1]\n\t\t\t\toperand1, operandStack = operandStack[len(operandStack)-1], operandStack[:len(operandStack)-1]\n\t\t\t\tresult, err := h(operand1, operand2)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\n\t\t\t\toperandStack = append(operandStack, result)\n\t\t\t} else {\n\t\t\t\treturn nil, errors.New(\"Operation not supported\")\n\t\t\t}\n\t\t}\n\t\ti++\n\t}\n\n\tif len(operandStack) > 1 {\n\t\treturn nil, errors.New(\"To many operands left over after calculation\")\n\t}\n\n\treturn operandStack[0], nil\n}",
"func Evaluate(expr string, contextVars map[string]logol.Match) bool {\n\tlogger.Debugf(\"Evaluate expression: %s\", expr)\n\n\tre := regexp.MustCompile(\"[$@#]+\\\\w+\")\n\tres := re.FindAllString(expr, -1)\n\t// msg, _ := json.Marshal(contextVars)\n\t// logger.Errorf(\"CONTEXT: %s\", msg)\n\tparameters := make(map[string]interface{}, 8)\n\tvarIndex := 0\n\tfor _, val := range res {\n\t\tt := strconv.Itoa(varIndex)\n\t\tvarName := \"VAR\" + t\n\t\tr := strings.NewReplacer(val, varName)\n\t\texpr = r.Replace(expr)\n\t\tvarIndex++\n\t\tcValue, cerr := getValueFromExpression(val, contextVars)\n\t\tif cerr {\n\t\t\tlogger.Debugf(\"Failed to get value from expression %s\", val)\n\t\t\treturn false\n\t\t}\n\t\tparameters[varName] = cValue\n\t}\n\tlogger.Debugf(\"New expr: %s with params %v\", expr, parameters)\n\n\texpression, err := govaluate.NewEvaluableExpression(expr)\n\tif err != nil {\n\t\tlogger.Errorf(\"Failed to evaluate expression %s\", expr)\n\t\treturn false\n\t}\n\tresult, _ := expression.Evaluate(parameters)\n\tif result == true {\n\t\treturn true\n\t}\n\treturn false\n}",
"func Evaluate(input string) (decimal.Decimal, error) {\n\tvar stack []decimal.Decimal\n\tinputs := strings.Split(input, \" \")\n\n\tfor _, command := range inputs {\n\t\tswitch command {\n\t\tcase \"+\", \"-\", \"*\", \"/\", \"%\", \"^\":\n\t\t\tif len(stack) < 2 {\n\t\t\t\treturn decimal.Zero, errors.New(\"stack overflow\")\n\t\t\t}\n\t\t\tlhs := stack[len(stack)-2]\n\t\t\trhs := stack[len(stack)-1]\n\t\t\tstack = stack[:len(stack)-1]\n\t\t\tswitch command {\n\t\t\tcase \"+\":\n\t\t\t\trhs = lhs.Add(rhs)\n\t\t\tcase \"-\":\n\t\t\t\trhs = lhs.Sub(rhs)\n\t\t\tcase \"*\":\n\t\t\t\trhs = lhs.Mul(rhs)\n\t\t\tcase \"/\":\n\t\t\t\trhs = lhs.Div(rhs)\n\t\t\tcase \"%\":\n\t\t\t\trhs = lhs.Mod(rhs)\n\t\t\tcase \"^\":\n\t\t\t\trhs = lhs.Pow(rhs)\n\t\t\t}\n\t\t\tstack[len(stack)-1] = rhs\n\t\tcase \"abs\", \"atan\", \"ceil\", \"cos\", \"floor\", \"neg\", \"sin\", \"tan\":\n\t\t\tif len(stack) < 1 {\n\t\t\t\treturn decimal.Zero, errors.New(\"stack overflow\")\n\t\t\t}\n\t\t\tval := stack[len(stack)-1]\n\t\t\tswitch command {\n\t\t\tcase \"abs\":\n\t\t\t\tval = val.Abs()\n\t\t\tcase \"atan\":\n\t\t\t\tval = val.Atan()\n\t\t\tcase \"ceil\":\n\t\t\t\tval = val.Ceil()\n\t\t\tcase \"cos\":\n\t\t\t\tval = val.Cos()\n\t\t\tcase \"floor\":\n\t\t\t\tval = val.Floor()\n\t\t\tcase \"neg\":\n\t\t\t\tval = val.Neg()\n\t\t\tcase \"sin\":\n\t\t\t\tval = val.Sin()\n\t\t\tcase \"tan\":\n\t\t\t\tval = val.Tan()\n\t\t\t}\n\t\t\tstack[len(stack)-1] = val\n\t\tdefault:\n\t\t\tval, err := decimal.NewFromString(command)\n\t\t\tif err != nil {\n\t\t\t\treturn val, err\n\t\t\t}\n\t\t\tstack = append(stack, val)\n\t\t}\n\t}\n\n\tif len(stack) != 1 {\n\t\treturn decimal.Zero, errors.New(\"unclean stack\")\n\t}\n\treturn stack[0], nil\n}",
"func evaluate(arg1 *vector.Vector, oper *vector.Vector, arg2 *vector.Vector) *vector.Vector {\n\t//Store the operator in a temp string, to save typing it out\n\tvar operS string\n\toperS = oper.At(0).(string)\n\tvar val1, val2 int \n\tvar err1, err2 os.Error\n\tval1, err1 = strconv.Atoi(arg1.At(0).(string))\n\tval2, err2 = strconv.Atoi(arg2.At(0).(string))\n\t//screens for consecutive operators\n\tif(err1 != nil || err2 != nil){\n\t\tfmt.Println(\"expr: syntax error\")\n\t\tos.Exit(-2)\n\t}\n\tvar result int = -1\n\t//Evaluate based on the operator\n\tif operS == \"+\" {\n\t\tresult = val1 + val2\n\t} else if operS == \"-\" {\n\t\tresult = val1 - val2\n\t} else if operS == \"/\" {\n\t\tresult = val1 / val2\n\t} else if operS == \"*\" {\n\t\tresult = val1 * val2\n\t} else if operS == \"%\" {\n\t\tresult = val1 % val2\n\t}\n\t//Clear the arg1 vector and add the result to it, then return\n\t//(saves memory by not creating a new vector)\n\targ1.Cut(0, arg1.Len())\n\targ1.Push(strconv.Itoa(result))\n\treturn arg1\n}",
"func (f *FunctionLike) Eval(env types.Env) (types.Expr, error) {\n\treturn nil, fmt.Errorf(\"interpreterTypes.FunctionLike: cannot eval a function-like: %s\", f)\n}",
"func Eval(input string, context map[string]interface{}) float64 {\n\tnode, err := Parse(input)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\texpr := &expression{node, context}\n\treturn expr.eval(expr.ast)\n}",
"func (n *NullSafeEqualOp) Evaluate(left, right EvalResult) (EvalResult, error) {\n\tpanic(\"implement me\")\n}",
"func (n *NotLikeOp) Evaluate(left, right EvalResult) (EvalResult, error) {\n\tpanic(\"implement me\")\n}",
"func (p *prog) Eval(input any) (v ref.Val, det *EvalDetails, err error) {\n\t// Configure error recovery for unexpected panics during evaluation. Note, the use of named\n\t// return values makes it possible to modify the error response during the recovery\n\t// function.\n\tdefer func() {\n\t\tif r := recover(); r != nil {\n\t\t\tswitch t := r.(type) {\n\t\t\tcase interpreter.EvalCancelledError:\n\t\t\t\terr = t\n\t\t\tdefault:\n\t\t\t\terr = fmt.Errorf(\"internal error: %v\", r)\n\t\t\t}\n\t\t}\n\t}()\n\t// Build a hierarchical activation if there are default vars set.\n\tvar vars interpreter.Activation\n\tswitch v := input.(type) {\n\tcase interpreter.Activation:\n\t\tvars = v\n\tcase map[string]any:\n\t\tvars = activationPool.Setup(v)\n\t\tdefer activationPool.Put(vars)\n\tdefault:\n\t\treturn nil, nil, fmt.Errorf(\"invalid input, wanted Activation or map[string]any, got: (%T)%v\", input, input)\n\t}\n\tif p.defaultVars != nil {\n\t\tvars = interpreter.NewHierarchicalActivation(p.defaultVars, vars)\n\t}\n\tv = p.interpretable.Eval(vars)\n\t// The output of an internal Eval may have a value (`v`) that is a types.Err. This step\n\t// translates the CEL value to a Go error response. This interface does not quite match the\n\t// RPC signature which allows for multiple errors to be returned, but should be sufficient.\n\tif types.IsError(v) {\n\t\terr = v.(*types.Err)\n\t}\n\treturn\n}",
"func (this *ObjectValues) Evaluate(item value.Value, context Context) (value.Value, error) {\n\treturn this.UnaryEval(this, item, context)\n}",
"func (this *ObjectValues) Evaluate(item value.Value, context Context) (value.Value, error) {\n\treturn this.UnaryEval(this, item, context)\n}",
"func (f *function) Eval(a *Apl) (Value, error) {\n\tvar err error\n\tvar l, r Value\n\n\t// The right argument must be evaluated first.\n\t// Otherwise this A←1⋄A+(A←2) evaluates to 3,\n\t// but it should evaluate to 4.\n\tr, err = f.right.Eval(a)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif f.left != nil {\n\n\t\t// Special case for modified assignments.\n\t\t// Defer evaluation of the left argument.\n\t\tif d, ok := f.Function.(*derived); ok && d.op == \"←\" {\n\t\t\tl = assignment{f.left}\n\t\t} else {\n\t\t\tl, err = f.left.Eval(a)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t}\n\n\t// Special case: the last function in a selective assignment uses Select instead of Call.\n\tif _, ok := f.right.(numVar); ok && f.selection {\n\t\tif d, ok := f.Function.(*derived); ok == true {\n\t\t\treturn d.Select(a, l, r)\n\t\t} else if p, ok := f.Function.(Primitive); ok == false {\n\t\t\treturn nil, fmt.Errorf(\"cannot use %T in selective assignment\", f.Function)\n\t\t} else {\n\t\t\treturn p.Select(a, l, r)\n\t\t}\n\t}\n\treturn f.Function.Call(a, l, r)\n}",
"func (this *Self) Evaluate(item value.Value, context Context) (value.Value, error) {\n\treturn item, nil\n}",
"func (u *comboUtility) Evaluate() float64 {\n\t// If the utility was reset, reevaluate it\n\tif u.reset {\n\t\tu.output = math.Min(1, math.Max(0, u.combinator(u.srcA.Evaluate(), u.srcB.Evaluate())))\n\t\tu.reset = false\n\t}\n\t// Return the currently set value\n\treturn u.output\n}",
"func Eval(txApp *sysl.Application, assign Scope, e *sysl.Expr) *sysl.Value {\n\tswitch x := e.Expr.(type) {\n\tcase *sysl.Expr_Transform_:\n\t\treturn evalTransform(txApp, assign, x, e)\n\tcase *sysl.Expr_Binexpr:\n\t\treturn evalBinExpr(txApp, assign, x.Binexpr)\n\tcase *sysl.Expr_Call_:\n\t\treturn evalCall(txApp, assign, x)\n\tcase *sysl.Expr_Name:\n\t\treturn evalName(assign, x)\n\tcase *sysl.Expr_GetAttr_:\n\t\treturn evalGetAttr(txApp, assign, x)\n\tcase *sysl.Expr_Ifelse:\n\t\treturn evalIfelse(txApp, assign, x)\n\tcase *sysl.Expr_Literal:\n\t\treturn x.Literal\n\tcase *sysl.Expr_Set:\n\t\treturn evalSet(txApp, assign, x)\n\tcase *sysl.Expr_List_:\n\t\treturn evalList(txApp, assign, x)\n\tcase *sysl.Expr_Unexpr:\n\t\treturn evalUnaryFunc(x.Unexpr.Op, Eval(txApp, assign, x.Unexpr.Arg))\n\tdefault:\n\t\tlogrus.Warnf(\"Skipping Expr of type %T\\n\", x)\n\t\treturn nil\n\t}\n}",
"func Evaluate(decision *Decision, maximiser bool) {\n\ts := whichStrategy(maximiser)\n\tdecision.Score = s.initial\n\tfor _, r := range decision.Responses {\n\t\tif r.Responses != nil {\n\t\t\t//\n\t\t\t// Evaluate the response from the opposite perspective.\n\t\t\t//\n\t\t\tEvaluate(r, !maximiser)\n\t\t}\n\t\tif s.compare(r.Score, decision.Score) {\n\t\t\tdecision.Score = r.Score\n\t\t}\n\t}\n}",
"func (e *EqualOp) Evaluate(left, right EvalResult) (EvalResult, error) {\n\tif out, err := e.IsTrue(left, right); err != nil || !out {\n\t\treturn resultFalse, err\n\t}\n\treturn resultTrue, nil\n}",
"func (e *Evaluator) Evaluate(expression string) (*string, error) {\n\tinfixExpression, err := e.tknzr.Tokenize(expression)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// a bit of syntax sugar: if expression contains only atoms\n\t// consider it as just a string literal\n\tif e.onlyAtoms(infixExpression) {\n\t\treturn &expression, nil\n\t}\n\n\tpostfixExpression, err := e.cnvtr.Convert(infixExpression)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn e.evaluateExpression(postfixExpression)\n}",
"func (r Result) Evaluation() float32 {\n\tswitch r {\n\tcase BlackWin:\n\t\treturn -100\n\tcase WhiteWin:\n\t\treturn 100\n\tcase Draw:\n\t\treturn 0\n\t}\n\tpanic(\"Should not be evaluating in play position for win/loss/draw\")\n\treturn 0\n}",
"func (e *Implementation) Evaluate(template string) (string, error) {\n\tp := tmpl.Parameters{\n\t\tTestNamespace: e.TestNamespace,\n\t\tDependencyNamespace: e.DependencyNamespace,\n\t}\n\n\treturn tmpl.Evaluate(template, p)\n}",
"func (b *BaseIntent) Evaluate(ctx context.Context, evaluateCtx *EvaluateContext) error {\n\treturn nil\n}",
"func (b *BinaryExpr) Evaluate(env ExpressionEnv) (EvalResult, error) {\n\tlVal, err := b.Left.Evaluate(env)\n\tif err != nil {\n\t\treturn EvalResult{}, err\n\t}\n\trVal, err := b.Right.Evaluate(env)\n\tif err != nil {\n\t\treturn EvalResult{}, err\n\t}\n\treturn b.Op.Evaluate(lVal, rVal)\n}",
"func compute(fn func(float64, float64) float64) float64 {\n\treturn fn(3, 4)\n}",
"func (e *binaryExprEvaluator) eval(lhs, rhs interface{}) interface{} {\n\tswitch e.op {\n\tcase ADD:\n\t\treturn lhs.(float64) + rhs.(float64)\n\tcase SUB:\n\t\treturn lhs.(float64) - rhs.(float64)\n\tcase MUL:\n\t\treturn lhs.(float64) * rhs.(float64)\n\tcase DIV:\n\t\trhs := rhs.(float64)\n\t\tif rhs == 0 {\n\t\t\treturn float64(0)\n\t\t}\n\t\treturn lhs.(float64) / rhs\n\tdefault:\n\t\t// TODO: Validate operation & data types.\n\t\tpanic(\"invalid operation: \" + e.op.String())\n\t}\n}",
"func (fn NoArgFunc) Eval(ctx *Context, r Row) (interface{}, error) {\n\treturn fn.Logic(ctx, r)\n}",
"func (lscript *Scripting) Eval(luacmd string, arguments ...interface{}) (*ScriptingReturnValues, error) {\n\targs := asScriptingArgs(arguments...)\n\tlargs := forLua(args)\n\tfor _, larg := range largs {\n\t\tlscript.Push(larg)\n\t}\n\tvar r *ScriptingReturnValues\n\terr := lscript.DoString(luacmd)\n\tif err != nil {\n\t\tT().P(\"script\", \"lua\").Errorf(\"scripting error: %s\", err.Error())\n\t} else {\n\t\tif err == nil {\n\t\t\tT().P(\"lua\", \"eval\").Debugf(\"%d return values on the stack\", lscript.GetTop())\n\t\t\tr = lscript.returnFromScripting(lscript.GetTop()) // return all values on the stack\n\t\t}\n\t}\n\treturn r, err\n}",
"func (deme *Deme) evaluate(ff FitnessFunction) {\n\tfor i := range deme.Individuals {\n\t\tdeme.Individuals[i].Evaluate(ff)\n\t}\n}",
"func (n *NotRegexpOp) Evaluate(left, right EvalResult) (EvalResult, error) {\n\tpanic(\"implement me\")\n}",
"func Evaluate(clients []*Client, truth [][]time.Duration) (stats Stats) {\n\tnodes := len(clients)\n\tcount := 0\n\tfor i := 0; i < nodes; i++ {\n\t\tfor j := i + 1; j < nodes; j++ {\n\t\t\test := clients[i].DistanceTo(clients[j].GetCoordinate()).Seconds()\n\t\t\tactual := truth[i][j].Seconds()\n\t\t\terror := math.Abs(est-actual) / actual\n\t\t\tstats.ErrorMax = math.Max(stats.ErrorMax, error)\n\t\t\tstats.ErrorAvg += error\n\t\t\tcount += 1\n\t\t}\n\t}\n\n\tstats.ErrorAvg /= float64(count)\n\tfmt.Printf(\"Error avg=%9.6f max=%9.6f\\n\", stats.ErrorAvg, stats.ErrorMax)\n\treturn\n}",
"func (m *Month) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {\n\treturn getDatePart(ctx, m.UnaryExpression, row, month)\n}",
"func (i *IntNode) Eval(m memory.M) dragonscript.Value {\n\treturn dragonscript.Integer(i.value)\n}",
"func (this *ObjectInnerValues) Evaluate(item value.Value, context Context) (value.Value, error) {\n\treturn this.UnaryEval(this, item, context)\n}",
"func TestEvaluatorWrongInput(t *testing.T) {\n\tvar values = make(map[string]int)\n\ttestCases := []TestCase{\n\t\t{\n\t\t\tname: \"mul instead of right operand\",\n\t\t\texpression: \"1**\",\n\t\t\texpectedError: true,\n\t\t},\n\t\t{\n\t\t\tname: \"forgot closing parenthesis\",\n\t\t\texpression: \"(1+2*\",\n\t\t\texpectedError: true,\n\t\t},\n\t\t{\n\t\t\tname: \"no operands\",\n\t\t\texpression: \"+\",\n\t\t\texpectedError: true,\n\t\t},\n\t\t{\n\t\t\tname: \"no right operand\",\n\t\t\texpression: \"2+\",\n\t\t\texpectedError: true,\n\t\t},\n\t\t{\n\t\t\tname: \"no left operand\",\n\t\t\texpression: \"+2\",\n\t\t\texpectedError: true,\n\t\t},\n\t\t{\n\t\t\tname: \"no left operand (minus)\",\n\t\t\texpression: \"-2\",\n\t\t\texpectedError: true,\n\t\t},\n\t\t{\n\t\t\tname: \"== typo\",\n\t\t\texpression: \"0=0\",\n\t\t\texpectedError: true,\n\t\t},\n\t\t{\n\t\t\tname: \"zero division\",\n\t\t\texpression: \"1/0\",\n\t\t\texpectedError: true,\n\t\t},\n\t}\n\n\tfor _, tc := range testCases {\n\t\tt.Run(tc.name, func(t *testing.T) {\n\t\t\tif tc.expectedError {\n\t\t\t\tresult, err := evaluator.Evaluate(tc.expression, values)\n\t\t\t\tassert.Error(t, err, \"error is expected\")\n\t\t\t\tassert.Equal(t, -1, result)\n\t\t\t}\n\t\t})\n\t}\n}",
"func evaluateExpression(c *Context, exp interface{}) interface{} {\r\n var val interface{}\r\n\r\n // fmt.Printf(\"Evaluating type %T, \\n\", exp)\r\n switch t := exp.(type) {\r\n case int:\r\n // fmt.Printf(\"Returning int %d\\n\", t)\r\n val = t\r\n case *Integer:\r\n val = t.Number\r\n case *StringPrimitive:\r\n val = t.str\r\n case string:\r\n val = t\r\n case []interface{}:\r\n val = t\r\n case *InfixExpression:\r\n // fmt.Printf(\"Evaluating infix expresison %T l: %T, r:%T\\n\", t,t.leftNode.Exp, t.rightNode.Exp)\r\n //Get the value of the left node and right\r\n lVal := evaluateExpression(c, t.leftNode.Exp)\r\n rVal := evaluateExpression(c, t.rightNode.Exp)\r\n\r\n\r\n //then apply the correct infix operator to the values\r\n val = evaluateInfixExpression(c, t.opType, lVal, rVal)\r\n\r\n case *Identifier:\r\n // fmt.Printf(\"Was identifier returning %v\\n\", t.id)\r\n if(t.id == \"nil\") {\r\n val = NewNil(0)\r\n } else {\r\n // fmt.Printf(\"Posssible indeitEifer %T\\n\", c.values[t.id])\r\n val = evaluateExpression(c, c.values[t.id])\r\n }\r\n case *CallExpression:\r\n // fmt.Printf(\"Evaluation call to %s\\n\",t.callee)\r\n\r\n //get declaration of call\r\n callDec := c.lookup(t.callee).(*FuncDeclaration)\r\n if(callDec.returnType == \"\") { //no rreturn type = unit\r\n val = &UnitType{}\r\n } else { //Evaluate the expression of the body for a value\r\n //This should produce a value and will execute all\r\n //of the code of the body as well\r\n for i, _ := range callDec.paramNodes {\r\n paramDec := callDec.paramNodes[i].Exp.(*Param)\r\n paramValue := t.paramNodes[i].Exp\r\n c.values[paramDec.id] = evaluateExpression(c, paramValue)\r\n val = c.values[paramDec.id]\r\n }\r\n\r\n }\r\n\r\n if(t.callee == \"printi\") {\r\n invokePrintI(c, t)\r\n } else if(t.callee == \"print\") {\r\n invokePrint(c, t)\r\n } else if(t.callee == \"not\") {\r\n invokeNot(c, t)\r\n } else { //Regular other user defined function do your thing!\r\n //invoke the body\r\n //Get the declaration of the calling function so we can execute it\r\n callDec := c.lookup(t.callee).(*FuncDeclaration)\r\n // fmt.Printf(\"Invoking random func \\n\")\r\n evaluateExpression(c, callDec.body.Exp)\r\n }\r\n case *IfThenElseExpression:\r\n condition := evaluateExpression(c, t.condNode.Exp).(bool)\r\n // fmt.Printf(\"Cond was %v \\n\", condition)\r\n //If else is nil then its an IfThen Exp\r\n if(t.elseNode == nil) {\r\n val = &UnitType{}\r\n if(condition) { //if the condition is true evaluatie the code inside\r\n evaluateExpression(c, t.thenNode.Exp)\r\n }\r\n } else { //otherwise its and ifThenElse\r\n if(condition) {\r\n val = evaluateExpression(c, t.thenNode.Exp)\r\n } else {\r\n val = evaluateExpression(c, t.elseNode.Exp)\r\n }\r\n }\r\n case *SeqExpression:\r\n // Value is equivalent to the last node of the seqence expression\r\n if(len(t.nodes) == 0) {\r\n val = &UnitType{}\r\n } else {\r\n // fmt.Printf(\"Seq type was %T\\n\", t.nodes[len(t.nodes)-1].Exp)\r\n val = evaluateExpression(c, t.nodes[len(t.nodes)-1].Exp)\r\n }\r\n case *Nil:\r\n val = NewNil(0)\r\n case *ArrayExp:\r\n arrType := getType(c, c.lookup(t.typeId)).(*Identifier)\r\n val = c.lookup(arrType.id)\r\n case *ForExpression:\r\n val = &UnitType{}\r\n case *LetExpression:\r\n if(len(t.exps) == 0) {\r\n val = &UnitType{}\r\n } else {\r\n // fmt.Printf(\"%T is last exp type\\n\", t.exps[len(t.exps)-1].Exp)\r\n // val = getType(c, t.exps[len(t.exps)-1].Exp)\r\n }\r\n case *Assignment:\r\n val = &UnitType{}\r\n case *RecordExp:\r\n var slc []interface{}\r\n for _, fcNode := range t.fieldCreateNodes {\r\n if b, isABinding := fcNode.Exp.(*Binding); isABinding {\r\n slc = append(slc, evaluateExpression(c, b.exp.Exp))\r\n }\r\n }\r\n val = slc\r\n default:\r\n fmt.Fprintf(os.Stderr, \"Could not evaluate exp %T\\n\", t)\r\n os.Exit(4)\r\n }\r\n\r\n return val\r\n}",
"func (p Polynomial) Eval(arg int) ed25519.Scalar {\n\tx := ed25519.New_scalar(*big.NewInt(int64(arg)))\n\tresult := p.coeffs[0].Add(p.coeffs[1].Mul(x))\n\tx_pow := x.Copy()\n\tfor i := 2; i < len(p.coeffs); i++ {\n\t\tx_pow = x_pow.Mul(x)\n\t\tresult = result.Add(p.coeffs[i].Mul(x_pow))\n\t}\n\treturn result\n}",
"func (NullEvaluator) Evaluate(ctx context.Context, input *EvaluationInput) (*EvaluationOutput, error) {\n\treturn &EvaluationOutput{\n\t\tTraits: input.Traits,\n\t}, nil\n}",
"func (m *MockFloatTexture) Evaluate(si *SurfaceInteraction) float64 {\n\tret := m.ctrl.Call(m, \"Evaluate\", si)\n\tret0, _ := ret[0].(float64)\n\treturn ret0\n}",
"func (maxFn) Eval(params ...interface{}) (interface{}, error) {\n\tif maxFnLogger.DebugEnabled() {\n\t\tmaxFnLogger.Debugf(\"Entering function max (eval) with param: %+v\", params[0])\n\t}\n\n\tinputParamValue := params[0]\n\tvar outputValue interface{}\n\n\tinputArray, ok := inputParamValue.([]interface{})\n\tif !ok {\n\t\tif maxFnLogger.DebugEnabled() {\n\t\t\tmaxFnLogger.Debugf(\"First argument is not an array. Argument Type is: %T. Will return error.\", inputParamValue)\n\t\t}\n\t\treturn nil, fmt.Errorf(\"First argument is not an array. Argument Type is: %T\", inputParamValue)\n\t}\n\n\tif inputArray == nil || len(inputArray) == 0 {\n\t\t//Do nothing\n\t\tif maxFnLogger.DebugEnabled() {\n\t\t\tmaxFnLogger.Debugf(\"Input arguments are nil or empty. Will return nil as output.\")\n\t\t}\n\t\treturn nil, nil\n\t}\n\n\tmaxValue := 0.0\n\tindexForMaxValue := 0\n\n\tfor j := 0; j < len(inputArray); j++ {\n\t\tvalueToCompare := inputArray[j]\n\n\t\tif maxFnLogger.DebugEnabled() {\n\t\t\tmaxFnLogger.Debugf(\"[%+v]: Value at index [%+v] is [%+v], of type %T.\", j, j, valueToCompare, valueToCompare)\n\t\t\tmaxFnLogger.Debugf(\"[%+v]: Attempt to coerce the value to float64.\", j)\n\t\t}\n\n\t\ttempValueToCompare, err := coerce.ToFloat64(valueToCompare)\n\n\t\tif err != nil {\n\t\t\tif maxFnLogger.DebugEnabled() {\n\t\t\t\tmaxFnLogger.Debugf(\"[%+v]: Value at index [%+v] is [%+v], which is of type %T, and is not a number.\", j, j, valueToCompare, valueToCompare)\n\t\t\t\tmaxFnLogger.Debugf(\"[%+v]: Array is not an array of go number types. Cannot compute max.\")\n\t\t\t}\n\t\t\treturn nil, fmt.Errorf(\"Value at index [%+v] is [%+v], which is of type %T, and cannot be coerced to float64. \"+\n\t\t\t\t\"Array is not an array of go number types. Cannot compute max.\", j, valueToCompare, valueToCompare)\n\t\t}\n\n\t\tif maxFnLogger.DebugEnabled() {\n\t\t\tmaxFnLogger.Debugf(\"[%+v]: Successfully coerced the value to float64.\", j)\n\t\t\tmaxFnLogger.Debugf(\"[%+v]: Coerced value is = [%+v]\", j, tempValueToCompare)\n\t\t}\n\n\t\tif j == 0 {\n\t\t\tmaxValue = tempValueToCompare\n\t\t} else {\n\t\t\tif maxValue < tempValueToCompare {\n\t\t\t\tmaxValue = tempValueToCompare\n\t\t\t\tindexForMaxValue = j\n\t\t\t}\n\t\t}\n\n\t\tif maxFnLogger.DebugEnabled() {\n\t\t\tmaxFnLogger.Debugf(\"[%+v]: Current max value is [%+v].\", j, maxValue)\n\t\t\tmaxFnLogger.Debugf(\"[%+v]: Current max index is [%+v].\", j, indexForMaxValue)\n\t\t}\n\n\t}\n\n\toutputValue = inputArray[indexForMaxValue]\n\n\tif maxFnLogger.DebugEnabled() {\n\t\tmaxFnLogger.Debugf(\"Final output value = %+v\", outputValue)\n\t}\n\n\tif maxFnLogger.DebugEnabled() {\n\t\tmaxFnLogger.Debugf(\"Exiting function max (eval)\")\n\t}\n\n\treturn outputValue, nil\n}"
] | [
"0.651853",
"0.6338464",
"0.6131312",
"0.61241156",
"0.60935414",
"0.60788083",
"0.6030784",
"0.60034543",
"0.6002316",
"0.5988152",
"0.597255",
"0.59357506",
"0.5845846",
"0.583995",
"0.58133984",
"0.5776667",
"0.5771244",
"0.5756347",
"0.57353276",
"0.5718127",
"0.5705097",
"0.57029766",
"0.5637356",
"0.5629114",
"0.55853665",
"0.5562175",
"0.55506706",
"0.5547969",
"0.55462676",
"0.5542525",
"0.5517861",
"0.5479303",
"0.5476308",
"0.54680854",
"0.54581994",
"0.5452519",
"0.54502964",
"0.5433895",
"0.5424243",
"0.5420689",
"0.54051304",
"0.54031754",
"0.53995275",
"0.53859276",
"0.5367255",
"0.53660655",
"0.53496975",
"0.53496975",
"0.5319196",
"0.52943456",
"0.5274579",
"0.5256423",
"0.5248485",
"0.5243544",
"0.5239524",
"0.52316475",
"0.52220964",
"0.5204502",
"0.5200289",
"0.5198345",
"0.51959693",
"0.5182932",
"0.5175901",
"0.51643646",
"0.5143628",
"0.51301694",
"0.512891",
"0.5124991",
"0.51239127",
"0.5121535",
"0.51142895",
"0.51109284",
"0.51109284",
"0.5092843",
"0.50814044",
"0.50798213",
"0.50744355",
"0.50682867",
"0.50646013",
"0.5033846",
"0.5020904",
"0.50198597",
"0.5016089",
"0.501105",
"0.50101894",
"0.49993768",
"0.4985884",
"0.49857593",
"0.49833038",
"0.49806416",
"0.49682838",
"0.49588478",
"0.4951295",
"0.49508324",
"0.49418494",
"0.49383998",
"0.49382907",
"0.4937479",
"0.4931718",
"0.49197742"
] | 0.56331724 | 23 |
ProofToHash asserts that proof is correct for m and outputs index. | func (pk *PublicKey) ProofToHash(m, proof []byte) (index [32]byte, err error) {
nilIndex := [32]byte{}
// verifier checks that s == H2(m, [t]G + [s]([k]G), [t]H1(m) + [s]VRF_k(m))
if got, want := len(proof), 64+65; got != want {
return nilIndex, ErrInvalidVRF
}
// Parse proof into s, t, and vrf.
s := proof[0:32]
t := proof[32:64]
vrf := proof[64 : 64+65]
uHx, uHy := elliptic.Unmarshal(curve, vrf)
if uHx == nil {
return nilIndex, ErrInvalidVRF
}
// [t]G + [s]([k]G) = [t+ks]G
tGx, tGy := curve.ScalarBaseMult(t)
ksGx, ksGy := curve.ScalarMult(pk.X, pk.Y, s)
tksGx, tksGy := curve.Add(tGx, tGy, ksGx, ksGy)
// H = H1(m)
// [t]H + [s]VRF = [t+ks]H
Hx, Hy := H1(m)
tHx, tHy := curve.ScalarMult(Hx, Hy, t)
sHx, sHy := curve.ScalarMult(uHx, uHy, s)
tksHx, tksHy := curve.Add(tHx, tHy, sHx, sHy)
// H2(G, H, [k]G, VRF, [t]G + [s]([k]G), [t]H + [s]VRF)
// = H2(G, H, [k]G, VRF, [t+ks]G, [t+ks]H)
// = H2(G, H, [k]G, VRF, [r]G, [r]H)
var b bytes.Buffer
if _, err := b.Write(elliptic.Marshal(curve, curve.Gx, curve.Gy)); err != nil {
panic(err)
}
if _, err := b.Write(elliptic.Marshal(curve, Hx, Hy)); err != nil {
panic(err)
}
if _, err := b.Write(elliptic.Marshal(curve, pk.X, pk.Y)); err != nil {
panic(err)
}
if _, err := b.Write(vrf); err != nil {
panic(err)
}
if _, err := b.Write(elliptic.Marshal(curve, tksGx, tksGy)); err != nil {
panic(err)
}
if _, err := b.Write(elliptic.Marshal(curve, tksHx, tksHy)); err != nil {
panic(err)
}
h2 := H2(b.Bytes())
// Left pad h2 with zeros if needed. This will ensure that h2 is padded
// the same way s is.
var buf bytes.Buffer
if _, err := buf.Write(make([]byte, 32-len(h2.Bytes()))); err != nil {
panic(err)
}
if _, err := buf.Write(h2.Bytes()); err != nil {
panic(err)
}
if !hmac.Equal(s, buf.Bytes()) {
return nilIndex, ErrInvalidVRF
}
return sha256.Sum256(vrf), nil
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (pk *VrfablePublicKey) ProofToHash(m, proof []byte) (index [32]byte, err error) {\n\tnilIndex := [32]byte{}\n\t// verifier checks that s == H2(m, [t]G + [s]([k]G), [t]H1(m) + [s]VRF_k(m))\n\tif got, want := len(proof), 64+65; got != want {\n\t\treturn nilIndex, ErrInvalidVRF\n\t}\n\n\t// Parse proof into s, t, and vrf.\n\ts := proof[0:32]\n\tt := proof[32:64]\n\tvrf := proof[64 : 64+65]\n\n\t// uHx, uHy := elliptic.Unmarshal(curve, vrf)\n\tuHx, uHy := curve.Unmarshal(vrf) //////???\n\tif uHx == nil {\n\t\treturn nilIndex, ErrInvalidVRF\n\t}\n\n\t// [t]G + [s]([k]G) = [t+ks]G\n\ttGx, tGy := curve.ScalarBaseMult(t)\n\tksGx, ksGy := curve.ScalarMult(pk.X, pk.Y, s)\n\ttksGx, tksGy := curve.Add(tGx, tGy, ksGx, ksGy)\n\n\t// H = H1(m)\n\t// [t]H + [s]VRF = [t+ks]H\n\tHx, Hy := H1(m)\n\ttHx, tHy := curve.ScalarMult(Hx, Hy, t)\n\tsHx, sHy := curve.ScalarMult(uHx, uHy, s)\n\ttksHx, tksHy := curve.Add(tHx, tHy, sHx, sHy)\n\n\t// H2(G, H, [k]G, VRF, [t]G + [s]([k]G), [t]H + [s]VRF)\n\t// = H2(G, H, [k]G, VRF, [t+ks]G, [t+ks]H)\n\t// = H2(G, H, [k]G, VRF, [r]G, [r]H)\n\tvar b bytes.Buffer\n\tb.Write(curve.Marshal(params.Gx, params.Gy))\n\tb.Write(curve.Marshal(Hx, Hy))\n\tb.Write(curve.Marshal(pk.X, pk.Y))\n\tb.Write(vrf)\n\tb.Write(curve.Marshal(tksGx, tksGy))\n\tb.Write(curve.Marshal(tksHx, tksHy))\n\th2 := H2(b.Bytes())\n\n\t// Left pad h2 with zeros if needed. This will ensure that h2 is padded\n\t// the same way s is.\n\tvar buf bytes.Buffer\n\tbuf.Write(make([]byte, 32-len(h2.Bytes())))\n\tbuf.Write(h2.Bytes())\n\n\tif !hmac.Equal(s, buf.Bytes()) {\n\t\treturn nilIndex, ErrInvalidVRF\n\t}\n\treturn sha256.Sum256(vrf), nil\n}",
"func (b *Set) hashIndex(item interface{}) (index int) {\n\n\trepString := \"\"\n\tswitch item.(type) {\n\tcase string:\n\t\trepString, _ = item.(string)\n\tcase int:\n\t\ttempString, _ := item.(int)\n\t\trepString = strconv.Itoa(tempString)\n\tdefault:\n\t\tpanic(\"type not support\")\n\t}\n\n\th := fnv.New32()\n\th.Write([]byte(repString))\n\tindex = int(h.Sum32() % uint32(b.m))\n\treturn\n}",
"func ProofToHash(proof *[PROOFBYTES]byte) (*[OUTPUTBYTES]byte, error) {\n\toutput := [OUTPUTBYTES]byte{}\n\toutputPtr := (*C.uchar)(unsafe.Pointer(&output))\n\tproofPtr := (*C.uchar)(unsafe.Pointer(proof))\n\tif C.crypto_vrf_proof_to_hash(outputPtr, proofPtr) != 0 {\n\t\treturn nil, errors.New(fmt.Sprintf(\n\t\t\t\"given proof isn't legitimately generated: proof=%s\", hex.EncodeToString(proof[:])))\n\t}\n\treturn &output, nil\n}",
"func memhash(p unsafe.Pointer, h, s uintptr) uintptr",
"func memhash(p unsafe.Pointer, h, s uintptr) uintptr",
"func hashmapHash(data []byte) uint32 {\n\tvar result uint32 = 2166136261 // FNV offset basis\n\tfor _, c := range data {\n\t\tresult ^= uint32(c)\n\t\tresult *= 16777619 // FNV prime\n\t}\n\treturn result\n}",
"func CheckMerkleProof(proof []*chainhash.Hash, hash, expectedRoot *chainhash.Hash, hashIdx int) bool {\n\ttreeHeight := uint(len(proof))\n\n\tfor _, h := range proof {\n\t\tvar newHash chainhash.Hash\n\t\tif hashIdx&1 == 1 {\n\t\t\tnewHash = chainhash.DoubleHashH(append(h[:], hash[:]...))\n\t\t} else {\n\t\t\tnewHash = chainhash.DoubleHashH(append(hash[:], h[:]...))\n\t\t}\n\t\thash = &newHash\n\t\thashIdx = (hashIdx >> 1) | (1 << treeHeight)\n\t}\n\n\treturn bytes.Equal(hash[:], expectedRoot[:])\n}",
"func (o *ObjectIndex) Hash() uint32 {\n\tvar h uint32 = 17\n\n\tvar str string\n\tstr += fmt.Sprintf(\"%08x\", o.machine)\n\tstr += fmt.Sprintf(\"%04x\", o.pid)\n\tstr += fmt.Sprintf(\"%08x\", o.id)\n\tstr += fmt.Sprintf(\"%08x\", o.Rand)\n\tfor _, v := range str {\n\t\th += h*23 + uint32(v)\n\t}\n\treturn h\n}",
"func (t *openAddressing) hash(key string, round int) uint32 {\n\tnum := uint(stringToInt(key))\n\tmax := uint(len(t.values) - 1)\n\treturn uint32((hashDivision(num, max) + uint(round)*hashDivision2(num, max)) % max)\n}",
"func IfaceHash(i interface{F()}, seed uintptr) uintptr",
"func runtime_memhash(p unsafe.Pointer, seed, s uintptr) uintptr",
"func (c *Cmd) GetProofByHash(w io.Writer, r io.Reader) error {\n\tvar request *GetProofByHashRequest\n\n\tif err := json.NewDecoder(r).Decode(&request); err != nil {\n\t\treturn fmt.Errorf(\"decode GetEntries request: %w\", err)\n\t}\n\n\tif err := request.Validate(); err != nil {\n\t\treturn fmt.Errorf(\"validate GetProofByHash request: %w\", err)\n\t}\n\n\tleafHash, err := base64.StdEncoding.DecodeString(request.Hash)\n\tif err != nil {\n\t\treturn errors.NewBadRequestError(fmt.Errorf(\"invalid base64 hash: %w\", err))\n\t}\n\n\treq := trillian.GetInclusionProofByHashRequest{\n\t\tLogId: c.logID,\n\t\tLeafHash: leafHash,\n\t\tTreeSize: request.TreeSize,\n\t\tOrderBySequence: true,\n\t}\n\n\tresp, err := c.client.GetInclusionProofByHash(context.Background(), &req)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"get leaves by range: %w\", err)\n\t}\n\n\tvar currentRoot types.LogRootV1\n\tif err := currentRoot.UnmarshalBinary(resp.GetSignedLogRoot().GetLogRoot()); err != nil {\n\t\treturn fmt.Errorf(\"%w: unmarshal binary: %v\", errors.ErrInternal, resp.GetSignedLogRoot().GetLogRoot())\n\t}\n\n\tif currentRoot.TreeSize < uint64(request.TreeSize) {\n\t\treturn fmt.Errorf(\"%w: got tree size: %d but we expected: %d\",\n\t\t\terrors.ErrNotFound, currentRoot.TreeSize, request.TreeSize,\n\t\t)\n\t}\n\n\tif len(resp.Proof) == 0 {\n\t\treturn fmt.Errorf(\"%w: no proof\", errors.ErrNotFound)\n\t}\n\n\treturn json.NewEncoder(w).Encode(GetProofByHashResponse{ // nolint: wrapcheck\n\t\tLeafIndex: resp.Proof[0].LeafIndex,\n\t\tAuditPath: resp.Proof[0].Hashes,\n\t})\n}",
"func EfaceHash(i interface{}, seed uintptr) uintptr",
"func (_Verifier *VerifierSession) VerifyProof(a [2]*big.Int, b [2][2]*big.Int, c [2]*big.Int, input [3]*big.Int) (bool, error) {\n\treturn _Verifier.Contract.VerifyProof(&_Verifier.CallOpts, a, b, c, input)\n}",
"func pHash(result, secret, seed []byte, hash hash.Hash) {\n\th := hmac.New(hash, secret);\n\th.Write(seed);\n\ta := h.Sum();\n\n\tj := 0;\n\tfor j < len(result) {\n\t\th.Reset();\n\t\th.Write(a);\n\t\th.Write(seed);\n\t\tb := h.Sum();\n\t\ttodo := len(b);\n\t\tif j+todo > len(result) {\n\t\t\ttodo = len(result) - j\n\t\t}\n\t\tbytes.Copy(result[j:j+todo], b);\n\t\tj += todo;\n\n\t\th.Reset();\n\t\th.Write(a);\n\t\ta = h.Sum();\n\t}\n}",
"func (_Verifier *VerifierCallerSession) VerifyProof(a [2]*big.Int, b [2][2]*big.Int, c [2]*big.Int, input [3]*big.Int) (bool, error) {\n\treturn _Verifier.Contract.VerifyProof(&_Verifier.CallOpts, a, b, c, input)\n}",
"func TestHasher(t *testing.T) {\n\tfor _, tt := range []struct {\n\t\tkey uint64\n\t\tbucket []int\n\t}{\n\t\t// Generated from the reference C++ code\n\t\t{0, []int{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}},\n\t\t{1, []int{0, 0, 0, 0, 0, 0, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 17, 17}},\n\t\t{0xdeadbeef, []int{0, 1, 2, 3, 3, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 16, 16, 16}},\n\t\t{0x0ddc0ffeebadf00d, []int{0, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 15, 15, 15, 15}},\n\t} {\n\t\tfor i, v := range tt.bucket {\n\t\t\thasher := &jmphasher{}\n\t\t\tif got := hasher.Hash(tt.key, i+1); got != v {\n\t\t\t\tt.Errorf(\"hash(%v,%v)=%v, want %v\", tt.key, i+1, got, v)\n\t\t\t}\n\t\t}\n\t}\n}",
"func computeHashWithProofOfWork(data string, difficulty string) (int64, string) {\r\n\tnonce := int64(0)\r\n\tfor {\r\n\t\thash := CalcHash(IntToStr(nonce) + data)\r\n\t\tif strings.HasPrefix(hash, difficulty) {\r\n\t\t\treturn nonce, hash\r\n\t\t}\r\n\t\tnonce++\r\n\t}\r\n}",
"func (a *Accumulator) Verify(elemIndex uint64, elemHash, expectedRootHash HashValue) error {\r\n\tif a.Hasher == nil {\r\n\t\treturn errors.New(\"nil hasher\")\r\n\t}\r\n\r\n\tbm := bitmap.NewFromUint64(elemIndex)\r\n\tif bm.Cap() < len(a.Siblings) {\r\n\t\treturn errors.New(\"merkle tree proof has too many siblings\")\r\n\t}\r\n\r\n\t// log.Printf(\"target hash: %s\", hex.EncodeToString(expectedRootHash))\r\n\thash := elemHash\r\n\t// log.Printf(\"initial hash: %s\", hex.EncodeToString(hash))\r\n\thasher := a.Hasher\r\n\tfor i := bm.BitsRev(); i.Next(); {\r\n\t\tidx, b := i.Bit()\r\n\t\tif idx >= len(a.Siblings) {\r\n\t\t\tbreak\r\n\t\t}\r\n\t\thasher.Reset()\r\n\t\tif b {\r\n\t\t\thasher.Write(a.Siblings[idx])\r\n\t\t\thasher.Write(hash)\r\n\t\t} else {\r\n\t\t\thasher.Write(hash)\r\n\t\t\thasher.Write(a.Siblings[idx])\r\n\t\t}\r\n\t\thash = hasher.Sum([]byte{})\r\n\t\t// log.Printf(\"new hash: %s\", hex.EncodeToString(hash))\r\n\t}\r\n\tif !sha3libra.Equal(hash, expectedRootHash) {\r\n\t\treturn errors.New(\"root hashes do not match\")\r\n\t}\r\n\treturn nil\r\n}",
"func HashASM(k0, k1 uint64, p []byte) uint64",
"func (_Verifier *VerifierCaller) VerifyProof(opts *bind.CallOpts, a [2]*big.Int, b [2][2]*big.Int, c [2]*big.Int, input [3]*big.Int) (bool, error) {\n\tvar out []interface{}\n\terr := _Verifier.contract.Call(opts, &out, \"verifyProof\", a, b, c, input)\n\n\tif err != nil {\n\t\treturn *new(bool), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(bool)).(*bool)\n\n\treturn out0, err\n\n}",
"func TestRefHasher(t *testing.T) {\n\tt.Parallel()\n\n\t// the test struct is used to specify the expected BMT hash for\n\t// segment counts between from and to and lengths from 1 to datalength\n\tfor _, x := range []struct {\n\t\tfrom int\n\t\tto int\n\t\texpected func([]byte) []byte\n\t}{\n\t\t// all lengths in [0,64] should be:\n\t\t//\n\t\t// sha3hash(data)\n\t\t//\n\t\t{\n\t\t\tfrom: 1,\n\t\t\tto: 2,\n\t\t\texpected: func(d []byte) []byte {\n\t\t\t\tdata := make([]byte, 64)\n\t\t\t\tcopy(data, d)\n\t\t\t\treturn sha3hash(t, data)\n\t\t\t},\n\t\t},\n\t\t// all lengths in [3,4] should be:\n\t\t//\n\t\t// sha3hash(\n\t\t// sha3hash(data[:64])\n\t\t// sha3hash(data[64:])\n\t\t// )\n\t\t//\n\t\t{\n\t\t\tfrom: 3,\n\t\t\tto: 4,\n\t\t\texpected: func(d []byte) []byte {\n\t\t\t\tdata := make([]byte, 128)\n\t\t\t\tcopy(data, d)\n\t\t\t\treturn sha3hash(t, sha3hash(t, data[:64]), sha3hash(t, data[64:]))\n\t\t\t},\n\t\t},\n\t\t// all bmttestutil.SegmentCounts in [5,8] should be:\n\t\t//\n\t\t// sha3hash(\n\t\t// sha3hash(\n\t\t// sha3hash(data[:64])\n\t\t// sha3hash(data[64:128])\n\t\t// )\n\t\t// sha3hash(\n\t\t// sha3hash(data[128:192])\n\t\t// sha3hash(data[192:])\n\t\t// )\n\t\t// )\n\t\t//\n\t\t{\n\t\t\tfrom: 5,\n\t\t\tto: 8,\n\t\t\texpected: func(d []byte) []byte {\n\t\t\t\tdata := make([]byte, 256)\n\t\t\t\tcopy(data, d)\n\t\t\t\treturn sha3hash(t, sha3hash(t, sha3hash(t, data[:64]), sha3hash(t, data[64:128])), sha3hash(t, sha3hash(t, data[128:192]), sha3hash(t, data[192:])))\n\t\t\t},\n\t\t},\n\t} {\n\t\tfor segCount := x.from; segCount <= x.to; segCount++ {\n\t\t\tfor length := 1; length <= segCount*32; length++ {\n\t\t\t\tlength, segCount, x := length, segCount, x\n\n\t\t\t\tt.Run(fmt.Sprintf(\"%d_segments_%d_bytes\", segCount, length), func(t *testing.T) {\n\t\t\t\t\tt.Parallel()\n\n\t\t\t\t\tdata := make([]byte, length)\n\t\t\t\t\t_, err := io.ReadFull(crand.Reader, data)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tt.Fatal(err)\n\t\t\t\t\t}\n\t\t\t\t\texpected := x.expected(data)\n\t\t\t\t\tactual, err := reference.NewRefHasher(sha3.NewLegacyKeccak256(), segCount).Hash(data)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tt.Fatal(err)\n\t\t\t\t\t}\n\t\t\t\t\tif !bytes.Equal(actual, expected) {\n\t\t\t\t\t\tt.Fatalf(\"expected %x, got %x\", expected, actual)\n\t\t\t\t\t}\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\t}\n}",
"func byteshash(p *[]byte, h uintptr) uintptr",
"func (mh *Median) computeHash(img *image.Gray, median uint) hashtype.Binary {\n\tsize := mh.width * mh.height / 8\n\thash := make(hashtype.Binary, size)\n\tbnds := img.Bounds()\n\tvar c uint\n\tfor i := bnds.Min.Y; i < bnds.Max.Y; i++ {\n\t\tfor j := bnds.Min.X; j < bnds.Max.X; j++ {\n\t\t\tpix := img.GrayAt(j, i).Y\n\t\t\tif uint(pix) > median {\n\t\t\t\thash.Set(c)\n\t\t\t}\n\t\t\tc++\n\t\t}\n\t}\n\treturn hash\n}",
"func TmMerkleHash(chunks []Chunk) Digest { panic(\"\") }",
"func (p *Proof) Hash() []byte {\n\thash := blake2b.Sum256(p.Bytes())\n\treturn hash[:]\n}",
"func Hash(s int, o Orientation) (int, error) {\n\n\tvar errVal int = 10\n\n\tif !(s >= 0 && s <= palletWidth*palletLength) {\n\t\treturn errVal, ErrSize\n\t}\n\tif o != HORIZONTAL && o != VERTICAL && o != SQUAREGRID {\n\t\treturn errVal, ErrOrient\n\t}\n\n\tvar hash int\n\n\tswitch s {\n\tcase 1, 2, 3, 6:\n\t\thash = s - 1\n\tcase 4:\n\t\tif o == SQUAREGRID {\n\t\t\thash = s\n\t\t} else {\n\t\t\thash = s - 1\n\t\t}\n\tcase 8:\n\t\thash = 6\n\tcase 9:\n\t\thash = 7\n\tcase 12:\n\t\thash = 8\n\tcase 16:\n\t\thash = 9\n\tdefault:\n\t\treturn errVal, ErrSize\n\t}\n\n\treturn hash, nil\n}",
"func (rh *RHash) OffsetHash(begin, length int) uint64 {\n\treturn rhCalcMod(\n\t\trh.hash[begin+length] + _RH_POSITIVIZER - rhMul(rh.hash[begin], _rhPowMemo[length]),\n\t)\n}",
"func checkProofOfWorkHash(powHash *chainhash.Hash, target *big.Int) error {\n\t// The proof of work hash must be less than the target difficulty.\n\thashNum := HashToBig(powHash)\n\tif hashNum.Cmp(target) > 0 {\n\t\tstr := fmt.Sprintf(\"proof of work hash %064x is higher than \"+\n\t\t\t\"expected max of %064x\", hashNum, target)\n\t\treturn ruleError(ErrHighHash, str)\n\t}\n\n\treturn nil\n}",
"func (i *Index) Hash() (uint32, error) {\n\treturn 0, fmt.Errorf(\"unhashable: %s\", i.Type())\n}",
"func hash_func(x, y, n HashValue) (HashValue) {\n return (x*1640531513 ^ y*2654435789) % n\n}",
"func ExampleMustHashTrytes() {}",
"func MimcHash(cs *frontend.ConstraintSystem, stream ...frontend.Variable) frontend.Variable {\n\tstate := cs.Constant(0)\n\tfor _, m := range stream {\n\t\toldState := state\n\t\tfor i := 0; i < hash.MimcRounds; i++ {\n\t\t\t// keys := cs.Constant(hash.Arks[i])\n\t\t\tstate = cs.Add(m, state, cs.Constant(hash.Arks[i]))\n\t\t\t// Raise to the power 7\n\t\t\ttmp := cs.Mul(state, state) // ^2\n\t\t\ttmp = cs.Mul(state, tmp) // ^3\n\t\t\ttmp = cs.Mul(tmp, tmp) // ^6\n\t\t\tstate = cs.Mul(state, tmp) // ^7\n\t\t}\n\t\t// Readd the oldState and the message as part of the Miyaguchi-Preenel construct\n\t\tstate = cs.Add(state, oldState, m)\n\t}\n\treturn state\n}",
"func (v *RealVerifier) Index(vrfProof []byte, directoryID, userID string) ([]byte, error) {\n\tindex, err := v.vrf.ProofToHash([]byte(userID), vrfProof)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"vrf.ProofToHash(): %v\", err)\n\t}\n\treturn index[:], nil\n}",
"func HashVerifier(b []byte) []byte {\n\th := sha256.New()\n\th.Write(b)\n\n\treturn h.Sum(nil)\n}",
"func HashIndexFunction(env *Zlisp, name string, args []Sexp) (Sexp, error) {\n\tQ(\"in HashIndexFunction, with %v args = '%#v', env=%p\",\n\t\tlen(args), args, env)\n\tfor i := range args {\n\t\tQ(\"in HashIndexFunction, args[%v] = '%v'\", i, args[i].SexpString(nil))\n\t}\n\tnarg := len(args)\n\tif narg != 2 {\n\t\treturn SexpNull, WrongNargs\n\t}\n\ttmp, err := env.ResolveDotSym([]Sexp{args[0]})\n\tif err != nil {\n\t\treturn SexpNull, err\n\t}\n\targs[0] = tmp[0]\n\tQ(\"HashIndexFunction: past dot resolve, args[0] is now type %T/val='%v'\",\n\t\targs[0], args[0].SexpString(nil))\n\n\tvar hash *SexpHash\n\tswitch ar0 := args[0].(type) {\n\tcase *SexpHash:\n\t\thash = ar0\n\tcase *SexpArray:\n\t\tQ(\"HashIndexFunction: args[0] is an array, defering to ArrayIndexFunction\")\n\t\treturn ArrayIndexFunction(env, name, args)\n\tcase Selector:\n\t\tx, err := ar0.RHS(env)\n\t\tQ(\"ar0.RHS() returned x = %#v\", x)\n\t\tif err != nil {\n\t\t\tQ(\"HashIndexFunction: Selector error: '%v'\", err)\n\t\t\treturn SexpNull, err\n\t\t}\n\t\tswitch xH := x.(type) {\n\t\tcase *SexpHash:\n\t\t\thash = xH\n\t\tcase *SexpHashSelector:\n\t\t\tx, err := xH.RHS(env)\n\t\t\tif err != nil {\n\t\t\t\tQ(\"HashIndexFunction: hash retreival from \"+\n\t\t\t\t\t\"SexpHashSelector gave error: '%v'\", err)\n\t\t\t\treturn SexpNull, err\n\t\t\t}\n\t\t\tswitch xHash2 := x.(type) {\n\t\t\tcase *SexpHash:\n\t\t\t\thash = xHash2\n\t\t\tdefault:\n\t\t\t\treturn SexpNull, fmt.Errorf(\"bad (hashidx h2 index) call: h2 was a hashidx itself, but it did not resolve to an hash, instead '%s'/type %T\", x.SexpString(nil), x)\n\t\t\t}\n\t\tcase *SexpArray:\n\t\t\tQ(\"HashIndexFunction sees args[0] is Selector\"+\n\t\t\t\t\" that resolved to an array '%v'\", xH.SexpString(nil))\n\t\t\treturn ArrayIndexFunction(env, name, []Sexp{xH, args[1]})\n\t\tdefault:\n\t\t\treturn SexpNull, fmt.Errorf(\"bad (hashidx h index) call: h did not resolve to a hash, instead '%s'/type %T\", x.SexpString(nil), x) // failing here with x a *SexpStr\n\t\t}\n\tdefault:\n\t\treturn SexpNull, fmt.Errorf(\"bad (hashidx h index) call: h was not a hashmap, instead '%s'/type %T\",\n\t\t\targs[0].SexpString(nil), args[0])\n\t}\n\n\tsel := args[1]\n\tswitch x := sel.(type) {\n\tcase *SexpSymbol:\n\t\tsel = x\n\t\t/*\n\t\t\tif x.isDot {\n\t\t\t\tQ(\"hashidx sees dot symbol: '%s', removing any prefix dot\", x.name)\n\t\t\t\tif len(x.name) >= 2 && x.name[0] == '.' {\n\t\t\t\t\tselSym := env.MakeSymbol(x.name[1:])\n\t\t\t\t\t//selSym.isDot = true\n\t\t\t\t\tsel = selSym\n\t\t\t\t}\n\t\t\t}\n\t\t*/\n\tdefault:\n\t\t// okay to have SexpArray/other as selector\n\t}\n\n\tret := SexpHashSelector{\n\t\tSelect: sel,\n\t\tContainer: hash,\n\t}\n\tQ(\"HashIndexFunction: returning without error, ret.Select = '%v'\", args[1].SexpString(nil))\n\treturn &ret, nil\n}",
"func (T *SparseMerkleTree) GenerateProofDB(index string) (Proof) {\n\treadDB, err := GetReadAngelaDB()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tdefer readDB.Close()\n\tproofResult := Proof{}\n\tproofResult.QueryID = index\n\t\n\tvar proof_t ProofType\n\tvar currID string\n\tvar startingIndex string\n\t_, ok := T.getLatestNode(index)\n\n\t// _, ok := T.cache[index]\n\tif !ok {\n\t\t// fmt.Println(\"Entering NONMEMBERSHIP\")\n\t\tproof_t = NONMEMBERSHIP\n\t\tancestorIds := make([]string, 0)\n\t\tancestor := index\n\t\t// Our stopping condition is length > 0 so we don't add the root to the copath\n\t\tfor ; len(ancestor) > 0; ancestor = getParent(ancestor) {\n\t\t\tancestorIds = append(ancestorIds, ancestor)\n\t\t}\n\t // fmt.Println(\"number of ancestors\", len(ancestorIds))\n\n\t\tcopathPairs, err := readDB.retrieveLatestCopathDigests(ancestorIds)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\tfor j := 0; j < len(copathPairs); j++ {\n\t\t\tT.cache[copathPairs[j].ID] = &copathPairs[j].Digest\n\t\t}\n\t\tstartingIndex = T.getEmptyAncestor(index)\n\t} else {\n\t\tproof_t = MEMBERSHIP\n\t\tstartingIndex = index\n\t}\n\t// fmt.Println(\"startingIndex is\", startingIndex)\n\tproofResult.ProofType = proof_t\n\tproofResult.ProofID = startingIndex\n\tCoPath := make([]CoPathPair, 0)\n\n\tcurrID = startingIndex\n\tids := make([]string, 0)\n\t// Our stopping condition is length > 0 so we don't add the root to the copath\n\tfor ; len(currID) > 0; currID = getParent(currID) {\n\t\tsiblingID, _ := getSibling(currID)\n\t\tids = append(ids, siblingID)\n\t}\n\n\tcopathPairs, err := readDB.retrieveLatestCopathDigests(ids)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\tproofResult.CoPath = CoPath\n\t\treturn proofResult\n\t}\n\n\tfor j := 0; j < len(copathPairs); j++ {\n\t\tT.cache[copathPairs[j].ID] = &copathPairs[j].Digest\n\t}\n\n\tcurrID = startingIndex\n\t// Our stopping condition is length > 0 so we don't add the root to the copath\n\tfor ; len(currID) > 0; currID = getParent(currID) {\n\t\t// Append the sibling to the copath and advance current node\n\t\tsiblingID, _ := getSibling(currID)\n\t\tsiblingDigestPointer, ok := T.cache[siblingID]\n\t\tvar siblingDigest digest\n\t\tif !ok {\n\t\t\tsiblingDigest = T.getEmpty(T.depth - len(siblingID))\n\t\t} else {\n\t\t\tsiblingDigest = *siblingDigestPointer\n\t\t}\n\t\tCoPathNode := CoPathPair{siblingID, siblingDigest}\n\t\tCoPath = append(CoPath, CoPathNode)\n\t}\n\t// fmt.Println(\"Length of CoPath\", len(CoPath))\n\t// 4 metadata fields and 2 times the copath length\n\tproofResult.ProofLength = len(CoPath)*2+4\n\tproofResult.CoPath = CoPath\n\treturn proofResult\t\n}",
"func IntHashInfo() {\n\tvar sp map[uint64]uint64 = make(map[uint64]uint64)\n\tprintMemStats()\n\ts := fnv.New64a()\n\tfor i := 0; i < 100000; i++ {\n\t\ts.Reset()\n\t\ts.Write([]byte(strconv.Itoa(i)))\n\t\ta := hex.EncodeToString(s.Sum(nil))\n\n\t\tas, error := strconv.ParseUint(a, 16, 64)\n\t\tif error != nil {\n\t\t\treturn\n\t\t}\n\t\tsp[as] = as\n\t\t//fmt.Println(as)\n\n\t}\n\tprintMemStats()\n\tfmt.Println(unsafe.Sizeof(sp))\n\tfmt.Println(len(sp))\n}",
"func (z *Block) MarshalHash() (o []byte, err error) {\n\tvar b []byte\n\to = hsp.Require(b, z.Msgsize())\n\t// map header, size 4\n\to = append(o, 0x84)\n\to = hsp.AppendArrayHeader(o, uint32(len(z.Acks)))\n\tfor za0003 := range z.Acks {\n\t\tif z.Acks[za0003] == nil {\n\t\t\to = hsp.AppendNil(o)\n\t\t} else {\n\t\t\tif oTemp, err := z.Acks[za0003].MarshalHash(); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t} else {\n\t\t\t\to = hsp.AppendBytes(o, oTemp)\n\t\t\t}\n\t\t}\n\t}\n\to = hsp.AppendArrayHeader(o, uint32(len(z.FailedReqs)))\n\tfor za0001 := range z.FailedReqs {\n\t\tif z.FailedReqs[za0001] == nil {\n\t\t\to = hsp.AppendNil(o)\n\t\t} else {\n\t\t\tif oTemp, err := z.FailedReqs[za0001].MarshalHash(); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t} else {\n\t\t\t\to = hsp.AppendBytes(o, oTemp)\n\t\t\t}\n\t\t}\n\t}\n\to = hsp.AppendArrayHeader(o, uint32(len(z.QueryTxs)))\n\tfor za0002 := range z.QueryTxs {\n\t\tif z.QueryTxs[za0002] == nil {\n\t\t\to = hsp.AppendNil(o)\n\t\t} else {\n\t\t\tif oTemp, err := z.QueryTxs[za0002].MarshalHash(); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t} else {\n\t\t\t\to = hsp.AppendBytes(o, oTemp)\n\t\t\t}\n\t\t}\n\t}\n\t// map header, size 2\n\to = append(o, 0x82)\n\tif oTemp, err := z.SignedHeader.Header.MarshalHash(); err != nil {\n\t\treturn nil, err\n\t} else {\n\t\to = hsp.AppendBytes(o, oTemp)\n\t}\n\tif oTemp, err := z.SignedHeader.HSV.MarshalHash(); err != nil {\n\t\treturn nil, err\n\t} else {\n\t\to = hsp.AppendBytes(o, oTemp)\n\t}\n\treturn\n}",
"func (expr *Expr) Hash() int {\n\ts := expr.Index\n\tswitch ts := expr.Terms.(type) {\n\tcase []*Term:\n\t\tfor _, t := range ts {\n\t\t\ts += t.Value.Hash()\n\t\t}\n\tcase *Term:\n\t\ts += ts.Value.Hash()\n\t}\n\tif expr.Negated {\n\t\ts++\n\t}\n\treturn s\n}",
"func (mp *MerkleProof) ToBytes() ([]byte, error) {\r\n\tindex := bt.VarInt(mp.Index)\r\n\r\n\ttxOrID, err := hex.DecodeString(mp.TxOrID)\r\n\tif err != nil {\r\n\t\treturn nil, err\r\n\t}\r\n\ttxOrID = bt.ReverseBytes(txOrID)\r\n\r\n\ttarget, err := hex.DecodeString(mp.Target)\r\n\tif err != nil {\r\n\t\treturn nil, err\r\n\t}\r\n\ttarget = bt.ReverseBytes(target)\r\n\r\n\tnodeCount := len(mp.Nodes)\r\n\r\n\tnodes := []byte{}\r\n\r\n\tfor _, n := range mp.Nodes {\r\n\t\tif n == \"*\" {\r\n\t\t\tnodes = append(nodes, []byte{1}...)\r\n\t\t\tcontinue\r\n\t\t}\r\n\r\n\t\tnodes = append(nodes, []byte{0}...)\r\n\t\tnb, err := hex.DecodeString(n)\r\n\t\tif err != nil {\r\n\t\t\treturn nil, err\r\n\t\t}\r\n\t\tnodes = append(nodes, bt.ReverseBytes(nb)...)\r\n\r\n\t}\r\n\r\n\tvar flags uint8\r\n\r\n\tvar txLength []byte\r\n\tif len(mp.TxOrID) > 64 { // tx bytes instead of txid\r\n\t\t// set bit at index 0\r\n\t\tflags |= (1 << 0)\r\n\r\n\t\ttxLength = bt.VarInt(uint64(len(txOrID)))\r\n\t}\r\n\r\n\tif mp.TargetType == \"header\" {\r\n\t\t// set bit at index 1\r\n\t\tflags |= (1 << 1)\r\n\t} else if mp.TargetType == \"merkleRoot\" {\r\n\t\t// set bit at index 2\r\n\t\tflags |= (1 << 2)\r\n\t}\r\n\r\n\t// ignore proofType and compositeType for this version\r\n\r\n\tbytes := []byte{}\r\n\tbytes = append(bytes, flags)\r\n\tbytes = append(bytes, index...)\r\n\tbytes = append(bytes, txLength...)\r\n\tbytes = append(bytes, txOrID...)\r\n\tbytes = append(bytes, target...)\r\n\tbytes = append(bytes, byte(nodeCount))\r\n\tbytes = append(bytes, nodes...)\r\n\r\n\treturn bytes, nil\r\n}",
"func (t *Table) hash(s string) int {\n\t// Good enough.\n\th := fnv.New32()\n\th.Write([]byte(s))\n\treturn int(h.Sum32()) % t.m\n}",
"func (f *Forest) reHash(dirt []uint64) error {\n\tif f.height == 0 || len(dirt) == 0 { // nothing to hash\n\t\treturn nil\n\t}\n\ttops, topheights := getTopsReverse(f.numLeaves, f.height)\n\t// fmt.Printf(\"nl %d f.h %d tops %v\\n\", f.numLeaves, f.height, tops)\n\n\tdirty2d := make([][]uint64, f.height)\n\th := uint8(0)\n\tdirtyRemaining := 0\n\tfor _, pos := range dirt {\n\t\tif pos > f.numLeaves {\n\t\t\treturn fmt.Errorf(\"Dirt %d exceeds numleaves %d\", pos, f.numLeaves)\n\t\t}\n\t\tdHeight := detectHeight(pos, f.height)\n\t\t// increase height if needed\n\t\tfor h < dHeight {\n\t\t\th++\n\t\t}\n\t\tif h > f.height {\n\t\t\treturn fmt.Errorf(\"postion %d at height %d but forest only %d high\",\n\t\t\t\tpos, h, f.height)\n\t\t}\n\t\t// if bridgeVerbose {\n\t\t// fmt.Printf(\"h %d\\n\", h)\n\t\t// }\n\t\tdirty2d[h] = append(dirty2d[h], pos)\n\t\tdirtyRemaining++\n\t}\n\n\t// this is basically the same as VerifyBlockProof. Could maybe split\n\t// it to a separate function to reduce redundant code..?\n\t// nah but pretty different beacuse the dirtyMap has stuff that appears\n\t// halfway up...\n\n\tvar currentRow, nextRow []uint64\n\n\t// floor by floor\n\tfor h = uint8(0); h < f.height; h++ {\n\t\tif bridgeVerbose {\n\t\t\tfmt.Printf(\"dirty %v\\ncurrentRow %v\\n\", dirty2d[h], currentRow)\n\t\t}\n\n\t\t// merge nextRow and the dirtySlice. They're both sorted so this\n\t\t// should be quick. Seems like a CS class kindof algo but who knows.\n\t\t// Should be O(n) anyway.\n\n\t\tcurrentRow = mergeSortedSlices(currentRow, dirty2d[h])\n\t\tdirtyRemaining -= len(dirty2d[h])\n\t\tif dirtyRemaining == 0 && len(currentRow) == 0 {\n\t\t\t// done hashing early\n\t\t\tbreak\n\t\t}\n\n\t\tfor i, pos := range currentRow {\n\t\t\t// skip if next is sibling\n\t\t\tif i+1 < len(currentRow) && currentRow[i]|1 == currentRow[i+1] {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif len(tops) == 0 {\n\t\t\t\treturn fmt.Errorf(\n\t\t\t\t\t\"currentRow %v no tops remaining, this shouldn't happen\",\n\t\t\t\t\tcurrentRow)\n\t\t\t}\n\t\t\t// also skip if this is a top\n\t\t\tif pos == tops[0] {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tright := pos | 1\n\t\t\tleft := right ^ 1\n\t\t\tparpos := up1(left, f.height)\n\n\t\t\t//\t\t\t\tfmt.Printf(\"bridge hash %d %04x, %d %04x -> %d\\n\",\n\t\t\t//\t\t\t\t\tleft, leftHash[:4], right, rightHash[:4], parpos)\n\t\t\tif f.data.read(left) == empty || f.data.read(right) == empty {\n\t\t\t\tf.data.write(parpos, empty)\n\t\t\t} else {\n\t\t\t\tpar := Parent(f.data.read(left), f.data.read(right))\n\t\t\t\tf.HistoricHashes++\n\t\t\t\tf.data.write(parpos, par)\n\t\t\t}\n\t\t\tnextRow = append(nextRow, parpos)\n\t\t}\n\t\tif topheights[0] == h {\n\t\t\ttops = tops[1:]\n\t\t\ttopheights = topheights[1:]\n\t\t}\n\t\tcurrentRow = nextRow\n\t\tnextRow = []uint64{}\n\t}\n\n\treturn nil\n}",
"func sendProof(kx *KX, mk, ek *[32]byte, parts ...[]byte) ([]byte, error) {\n\th := hmac.New(sha256.New, mk[:])\n\tfor _, p := range parts {\n\t\th.Write(p)\n\t}\n\td := h.Sum(nil)\n\terr := kx.writeWithKey(d, ek)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn d, err\n}",
"func encodeHash(x uint64, p, pPrime uint) (hashCode uint64) {\n\tif x&onesFromTo(64-pPrime, 63-p) == 0 {\n\t\tr := rho(extractShift(x, 0, 63-pPrime))\n\t\treturn concat([]concatInput{\n\t\t\t{x, 64 - pPrime, 63},\n\t\t\t{uint64(r), 0, 5},\n\t\t\t{1, 0, 0}, // this just adds a 1 bit at the end\n\t\t})\n\t} else {\n\t\treturn concat([]concatInput{\n\t\t\t{x, 64 - pPrime, 63},\n\t\t\t{0, 0, 0}, // this just adds a 0 bit at the end\n\t\t})\n\t}\n}",
"func hash(key string) int{\n\tvar num = 0\n\t// get the lenght of the key\n\tvar length = len(key)\n\n\t// add the ascii character value to creat a sum \n\tfor i := 0; i < length; i++{\n\n\t\tnum += int(key[i])\n\t}\n\t\n\t// square in the middle hash method\n\tvar avg = num * int((math.Pow(5.0, 0.5) - 1)) / 2\n\tvar numeric = avg - int(math.Floor(float64(avg)))\n\n\n\t// hash value to place into the table slice between -1 and CAPACITY - 1\n\treturn int(math.Floor(float64(numeric * CAPACITY)))\n}",
"func newMultiProof(hashes map[uint64][]byte, indices []uint64, values uint64) *MultiProof {\n\treturn &MultiProof{\n\t\tValues: values,\n\t\tHashes: hashes,\n\t\tIndices: indices,\n\t}\n}",
"func hash(m datasource.Metric) uint64 {\n\thash := fnv.New64a()\n\tlabels := m.Labels\n\tsort.Slice(labels, func(i, j int) bool {\n\t\treturn labels[i].Name < labels[j].Name\n\t})\n\tfor _, l := range labels {\n\t\t// drop __name__ to be consistent with Prometheus alerting\n\t\tif l.Name == \"__name__\" {\n\t\t\tcontinue\n\t\t}\n\t\thash.Write([]byte(l.Name))\n\t\thash.Write([]byte(l.Value))\n\t\thash.Write([]byte(\"\\xff\"))\n\t}\n\treturn hash.Sum64()\n}",
"func SimpleHash(i int) string {\n\treturn fmt.Sprint(i % 50)\n}",
"func VerifyMultiProofUsing(data [][]byte, salt bool, proof *MultiProof, root []byte, hashType HashType) (bool, error) {\n\t// Step 1 create hashes for all values\n\tvar proofHash []byte\n\tindexSalt := make([]byte, 4)\n\tfor i, index := range proof.Indices {\n\t\tif salt {\n\t\t\tbinary.BigEndian.PutUint32(indexSalt, uint32(index))\n\t\t\tproofHash = hashType.Hash(data[i], indexSalt)\n\t\t} else {\n\t\t\tproofHash = hashType.Hash(data[i])\n\t\t}\n\t\tproof.Hashes[index+proof.Values] = proofHash\n\t}\n\n\t// Step 2 calculate values up the tree\n\tfor i := proof.Values - 1; i > 0; i-- {\n\t\t_, exists := proof.Hashes[i]\n\t\tif !exists {\n\t\t\tchild1, exists := proof.Hashes[i*2]\n\t\t\tif exists {\n\t\t\t\tchild2, exists := proof.Hashes[i*2+1]\n\t\t\t\tif exists {\n\t\t\t\t\tproof.Hashes[i] = hashType.Hash(child1, child2)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif !bytes.Equal(proof.Hashes[1], root) {\n\t\treturn false, nil\n\t}\n\treturn true, nil\n}",
"func (s SampleList) Hash(i int) []byte {\n\tres := md5.Sum(s[i])\n\treturn res[:]\n}",
"func (bc *Blockchain) ProofOfWorkCalc(proof int, previous_proof int, Timestamp int64) string {\n // calculate the proof of work function\n var hash_PoW = sha256.New()\n result := (proof * proof) - (previous_proof * previous_proof) - int(Timestamp)\n hash_PoW.Write([]byte(strconv.Itoa(result)))\n hashed_PoW := hash_PoW.Sum(nil)\n result_hash := hex.EncodeToString(hashed_PoW)\n return result_hash\n}",
"func (i *ItemMeta) hash() int {\n\treturn hash(\n\t\ti.ItemName,\n\t\ti.ManufacturerItemDescription,\n\t\ti.UnitQuantity,\n\t\ti.IsWeighted,\n\t\ti.QuantityInPackage,\n\t\ti.AllowDiscount,\n\t\ti.ItemStatus,\n\t)\n}",
"func memhash(p unsafe.Pointer, seed, s uintptr) uintptr {\n\tif unsafe.Sizeof(uintptr(0)) > 4 {\n\t\treturn uintptr(hash64(p, s, seed))\n\t}\n\treturn uintptr(hash32(p, s, seed))\n}",
"func testProof(previousProof, newProof uint64) bool {\n\t// Concatenate the previous proof with the new proof\n\t// Calculate sha256 of the data\n\tshaResult := crypto.Sha256([]byte(fmt.Sprintf(\"%v%v\", previousProof, newProof)))\n\treturn reflect.DeepEqual(shaResult[:2], proofTest)\n}",
"func (hm *HashMap) getIndex(key string) uint64 {\n\thasher := hm.hasher.Get().(hash.Hash64)\n\thasher.Reset()\n\thasher.Write([]byte(key))\n\tindex := hasher.Sum64() % hm.size\n\thm.hasher.Put(hasher)\n\treturn index\n}",
"func hash(addr mino.Address) *big.Int {\n\tsha := sha256.New()\n\tmarshalled, err := addr.MarshalText()\n\tif err != nil {\n\t\tmarshalled = []byte(addr.String())\n\t}\n\t// A hack to accommodate for minogrpc's design:\n\t// 1) the first byte is used to indicate if a node is orchestrator or not\n\t// 2) the only way to reach the orchestrator is to route a message to nil\n\t// from its server side, which has the same address but orchestrator byte\n\t// set to f.\n\t// We therefore have to ignore if a node is the orchestrator to be able to\n\t// route the message first to its server side, then from the server side to\n\t// the client side.\n\tsha.Write(marshalled[1:])\n\treturn byteArrayToBigInt(sha.Sum(nil))\n}",
"func resMemoIn(n int) int {\n\thash := make(map[int]int)\n\treturn resMemo(n, hash)\n}",
"func Hash(mem []byte) uint64 {\n\tvar hash uint64 = 5381\n\tfor _, b := range mem {\n\t\thash = (hash << 5) + hash + uint64(b)\n\t}\n\treturn hash\n}",
"func (k *VrfablePrivateKey) Evaluate(m []byte) (index [32]byte, proof []byte) {\n\tnilIndex := [32]byte{}\n\t// Prover chooses r <-- [1,N-1]\n\tr, _, _, err := generateKeyFromCurve(curve, rand.Reader)\n\tif err != nil {\n\t\treturn nilIndex, nil\n\t}\n\tri := new(big.Int).SetBytes(r)\n\n\t// H = H1(m)\n\tHx, Hy := H1(m)\n\n\t// VRF_k(m) = [k]H\n\tsHx, sHy := curve.ScalarMult(Hx, Hy, k.D.Bytes())\n\n\t// vrf := elliptic.Marshal(curve, sHx, sHy) // 65 bytes.\n\tvrf := curve.Marshal(sHx, sHy) // 65 bytes.\n\n\t// G is the base point\n\t// s = H2(G, H, [k]G, VRF, [r]G, [r]H)\n\trGx, rGy := curve.ScalarBaseMult(r)\n\trHx, rHy := curve.ScalarMult(Hx, Hy, r)\n\tvar b bytes.Buffer\n\tb.Write(curve.Marshal(params.Gx, params.Gy))\n\tb.Write(curve.Marshal(Hx, Hy))\n\tb.Write(curve.Marshal(k.PublicKey.X, k.PublicKey.Y))\n\tb.Write(vrf)\n\tb.Write(curve.Marshal(rGx, rGy))\n\tb.Write(curve.Marshal(rHx, rHy))\n\ts := H2(b.Bytes())\n\n\t// t = r−s*k mod N\n\tt := new(big.Int).Sub(ri, new(big.Int).Mul(s, k.D))\n\tt.Mod(t, params.N)\n\n\t// Index = H(vrf)\n\tindex = sha256.Sum256(vrf)\n\n\t// Write s, t, and vrf to a proof blob. Also write leading zeros before s and t\n\t// if needed.\n\tvar buf bytes.Buffer\n\tbuf.Write(make([]byte, 32-len(s.Bytes())))\n\tbuf.Write(s.Bytes())\n\tbuf.Write(make([]byte, 32-len(t.Bytes())))\n\tbuf.Write(t.Bytes())\n\tbuf.Write(vrf)\n\n\treturn index, buf.Bytes()\n}",
"func (t *largeFlatTable) Hash() hash.Hash { return t.hash }",
"func space_hash(x, y, n uint64) (SpaceMapKey) {\n return SpaceMapKey((x*1640531513 ^ y*2654435789) % n)\n}",
"func (v Vertex) HashCode() int64 {\n\tprime := 31\n\tresult := 1\n\treturn int64(result * prime)\n}",
"func (eln *EmptyLeafNode) CountHashesRequiredForGetHash() int {\n\treturn 0\n}",
"func (k PrivateKey) Evaluate(m []byte) (index [32]byte, proof []byte) {\n\tnilIndex := [32]byte{}\n\t// Prover chooses r <-- [1,N-1]\n\tr, _, _, err := elliptic.GenerateKey(curve, rand.Reader)\n\tif err != nil {\n\t\treturn nilIndex, nil\n\t}\n\tri := new(big.Int).SetBytes(r)\n\n\t// H = H1(m)\n\tHx, Hy := H1(m)\n\n\t// VRF_k(m) = [k]H\n\tsHx, sHy := curve.ScalarMult(Hx, Hy, k.D.Bytes())\n\tvrf := elliptic.Marshal(curve, sHx, sHy) // 65 bytes.\n\n\t// G is the base point\n\t// s = H2(G, H, [k]G, VRF, [r]G, [r]H)\n\trGx, rGy := curve.ScalarBaseMult(r)\n\trHx, rHy := curve.ScalarMult(Hx, Hy, r)\n\tvar b bytes.Buffer\n\tif _, err := b.Write(elliptic.Marshal(curve, curve.Gx, curve.Gy)); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(elliptic.Marshal(curve, Hx, Hy)); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(elliptic.Marshal(curve, k.PublicKey.X, k.PublicKey.Y)); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(vrf); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(elliptic.Marshal(curve, rGx, rGy)); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := b.Write(elliptic.Marshal(curve, rHx, rHy)); err != nil {\n\t\tpanic(err)\n\t}\n\ts := H2(b.Bytes())\n\n\t// t = r−s*k mod N\n\tt := new(big.Int).Sub(ri, new(big.Int).Mul(s, k.D))\n\tt.Mod(t, curve.N)\n\n\t// Index = H(vrf)\n\tindex = sha256.Sum256(vrf)\n\n\t// Write s, t, and vrf to a proof blob. Also write leading zeros before s and t\n\t// if needed.\n\tvar buf bytes.Buffer\n\tif _, err := buf.Write(make([]byte, 32-len(s.Bytes()))); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := buf.Write(s.Bytes()); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := buf.Write(make([]byte, 32-len(t.Bytes()))); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := buf.Write(t.Bytes()); err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err := buf.Write(vrf); err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn index, buf.Bytes()\n}",
"func (p *Processor) getHash(x *mat.Dense) int {\n\th := x.T().Mul(p.r.Value())\n\tconcat := mat.ConcatV(h, h.ProdScalar(-1.0))\n\treturn f64utils.ArgMax(concat.Data())\n}",
"func getIndexAndFingerprint(data []byte, bucketPow uint) (uint, fingerprint) {\n\thash := metro.Hash64(data, 1337)\n\tfp := getFingerprint(hash)\n\t// Use most significant bits for deriving index.\n\ti1 := uint(hash>>32) & masks[bucketPow]\n\treturn i1, fingerprint(fp)\n}",
"func testProver(t *testing.T, n uint64, l uint, postFileName string, merkleFileName string) {\n\n\tcurrFolder, err := os.Getwd()\n\tif err != nil {\n\t\tassert.NoError(t, err, \"can't get path of executable\")\n\t}\n\n\tf := filepath.Join(currFolder, postFileName)\n\tmf := filepath.Join(currFolder, merkleFileName)\n\n\t// Initial commitment\n\t//id := util.Rnd(t, 32)\n\tseed, _ := new(big.Int).SetString(\"3b05a45e418666973c19aaccdf2547ba8d33e9610f547b31a0735d95d45469b5\", 16)\n\tid := seed.Bytes()\n\n\t// H(id) to be used for iPoW\n\th := hashing.NewHashFunc(id)\n\n\t// Generate a proof for a challenge\n\n\tpv, err := prover.NewProver(id, n, l, h, f, mf)\n\n\tchallenge := util.Rnd1(32)\n\n\tt1 := time.Now()\n\tproof, err := pv.Prove(challenge)\n\te1 := time.Since(t1)\n\tt.Logf(\"Proof generated in %s seconds.\\n\", e1)\n\n\tassert.NoError(t, err)\n\tfor i, n := range proof.Nonces {\n\t\tfmt.Printf(\"[%d] : %d\\n\", i, n)\n\t}\n\n\tassert.NoError(t, err)\n}",
"func siphash(k0, k1, m uint64) uint64 {\n\t// Initialization.\n\tv0 := k0 ^ 0x736f6d6570736575\n\tv1 := k1 ^ 0x646f72616e646f6d\n\tv2 := k0 ^ 0x6c7967656e657261\n\tv3 := k1 ^ 0x7465646279746573\n\tt := uint64(8) << 56\n\n\t// Compression.\n\tv3 ^= m\n\n\t// Round 1.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\t// Round 2.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\tv0 ^= m\n\n\t// Compress last block.\n\tv3 ^= t\n\n\t// Round 1.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\t// Round 2.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\tv0 ^= t\n\n\t// Finalization.\n\tv2 ^= 0xff\n\n\t// Round 1.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\t// Round 2.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\t// Round 3.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\t// Round 4.\n\tv0 += v1\n\tv1 = v1<<13 | v1>>(64-13)\n\tv1 ^= v0\n\tv0 = v0<<32 | v0>>(64-32)\n\n\tv2 += v3\n\tv3 = v3<<16 | v3>>(64-16)\n\tv3 ^= v2\n\n\tv0 += v3\n\tv3 = v3<<21 | v3>>(64-21)\n\tv3 ^= v0\n\n\tv2 += v1\n\tv1 = v1<<17 | v1>>(64-17)\n\tv1 ^= v2\n\tv2 = v2<<32 | v2>>(64-32)\n\n\treturn v0 ^ v1 ^ v2 ^ v3\n}",
"func (sc *SetComprehension) Hash() int {\n\treturn sc.Term.Hash() + sc.Body.Hash()\n}",
"func TestSignVerifyHash(t *testing.T) {\n\tt.Parallel()\n\t// Create some random data.\n\tdata := fastrand.Bytes(100)\n\t// Generate a keypair.\n\tsk, pk := GenerateED25519KeyPair()\n\t// Hash the data.\n\thash := blake2b.Sum256(data)\n\t// Sign the data.\n\tsig := signHash(hash, sk)\n\t// Verify signature\n\tif !verifyHash(hash, pk, sig) {\n\t\tt.Fatal(\"signature wasn't verified\")\n\t}\n}",
"func main() {\n\tvalues := []string{\"ABC\", \"ACB\", \"BAC\", \"BCA\", \"CAB\", \"CBA\"}\n\t// values := []string{\"to\", \"to\", \"top\", \"ton\", \"tom\"}\n\tfactor := []int{100, 10, 1}\n\n\t// 65x100 + 66x10 + 67x1 = 7227\n\thashKey := 0\n\tfor v := range values {\n\t\tbytes := []byte(values[v])\n\t\tf := 0\n\t\thashKey = 0\n\t\tfor i := range bytes {\n\t\t\tfmt.Print(bytes[i], \" \")\n\t\t\thashKey += int(bytes[i]) * factor[f]\n\t\t\tf++\n\t\t}\n\t\tfmt.Printf(\" (hashKey: %d) \\n\", hashKey)\n\t}\n}",
"func computeHash(nstObj megav1.NamespaceTemplate) uint64 {\n\thash, err := hashstructure.Hash(nstObj, nil)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tfmt.Printf(\"computeHash: %d\\n\", hash)\n\treturn hash\n}",
"func (ph *PHash) computeHash(img [][]float32) hashtype.Binary {\n\t// TODO: Remove magic numbers\n\thash := make(hashtype.Binary, 8)\n\tvar c uint\n\tfor i := range img {\n\t\tfor j := range img[i] {\n\t\t\tif img[i][j] != 0 {\n\t\t\t\thash.Set(c)\n\t\t\t}\n\t\t\tc++\n\t\t}\n\t}\n\treturn hash\n}",
"func jumpHash(key uint64, numBuckets int) int32 {\n\tvar b int64 = -1\n\tvar j int64\n\n\tfor j < int64(numBuckets) {\n\t\tb = j\n\t\tkey = key*2862933555777941757 + 1\n\t\tj = int64(float64(b+1) * (float64(int64(1)<<31) / float64((key>>33)+1)))\n\t}\n\n\treturn int32(b)\n}",
"func (pr PolicyReport) ResultHash() string {\n\tlist := make([]string, 0, len(pr.Results))\n\n\tfor id := range pr.Results {\n\t\tlist = append(list, id)\n\t}\n\n\tsort.Strings(list)\n\n\th := sha1.New()\n\th.Write([]byte(strings.Join(list, \"\")))\n\n\treturn hex.EncodeToString(h.Sum(nil))\n}",
"func (s *ShardMap) hash(v interface{}) int {\n\tswitch s.Type {\n\tcase \"string\":\n\t\tval, ok := v.(string)\n\t\tif !ok {\n\t\t\treturn -1\n\t\t}\n\n\t\thash := fnv.New32()\n\t\thash.Write([]byte(val))\n\t\treturn int(hash.Sum32() % NumShards)\n\tcase \"int32\":\n\t\t// Values that come as numbers in JSON are of type float64.\n\t\tval, ok := v.(float64)\n\t\tif !ok {\n\t\t\treturn -1\n\t\t}\n\n\t\treturn int(int32(val) % NumShards)\n\tdefault:\n\t\treturn -1\n\t}\n}",
"func (m *UnsyncListMock) CalculateHash() (r []byte, r1 error) {\n\tcounter := atomic.AddUint64(&m.CalculateHashPreCounter, 1)\n\tdefer atomic.AddUint64(&m.CalculateHashCounter, 1)\n\n\tif len(m.CalculateHashMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.CalculateHashMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to UnsyncListMock.CalculateHash.\")\n\t\t\treturn\n\t\t}\n\n\t\tresult := m.CalculateHashMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the UnsyncListMock.CalculateHash\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\t\tr1 = result.r1\n\n\t\treturn\n\t}\n\n\tif m.CalculateHashMock.mainExpectation != nil {\n\n\t\tresult := m.CalculateHashMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the UnsyncListMock.CalculateHash\")\n\t\t}\n\n\t\tr = result.r\n\t\tr1 = result.r1\n\n\t\treturn\n\t}\n\n\tif m.CalculateHashFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to UnsyncListMock.CalculateHash.\")\n\t\treturn\n\t}\n\n\treturn m.CalculateHashFunc()\n}",
"func MortonHash(lon, lat float64) uint64 {\n\treturn numeric.Interleave(scaleLon(lon), scaleLat(lat))\n}",
"func (i *indexBack) getHashKey() uint64 {\n\treturn i.hashKey\n}",
"func (pow *ProofOfWork) Run() (int, []byte) {\r\n\tvar intHash big.Int\r\n\tvar hash [32]byte\r\n\r\n\tnonce := 0\r\n\r\n\tfor nonce < math.MaxInt64 {\r\n\t\tdata := pow.InitData(nonce)\r\n\t\thash = sha256.Sum256(data)\r\n\r\n\t\tfmt.Printf(\"\\r%x\", hash)\r\n\t\tintHash.SetBytes(hash[:])\r\n\r\n\t\tif intHash.Cmp(pow.Target) == -1 {\r\n\t\t\tbreak\r\n\t\t} else {\r\n\t\t\tnonce++\r\n\t\t}\r\n\r\n\t}\r\n\tfmt.Println()\r\n\r\n\treturn nonce, hash[:]\r\n}",
"func (pow *ProofOfWork) Run() (int, []byte) {\n\tvar intHash big.Int\n\tvar hash [32]byte\n\n\tnonce := 0\n\n\t// run forever (virtually)\n\tfor nonce < math.MaxInt64 {\n\t\t// joins the previous hash, the current hash, the nonce and the difficulty into a 2d slice of bytes\n\t\tdata := pow.InitData(nonce)\n\t\t// hash the bytes\n\t\thash = sha256.Sum256(data)\n\n\t\tfmt.Printf(\"\\r%x\", hash)\n\n\t\t// set the result to the big integer\n\t\tintHash.SetBytes(hash[:])\n\n\t\t// less than the target we are looking for. Block is signed\n\t\tif intHash.Cmp(pow.Target) == -1 {\n\t\t\tbreak\n\t\t}\n\t\tnonce++\n\n\t}\n\n\tfmt.Println()\n\treturn nonce, hash[:]\n}",
"func (ac *ArrayComprehension) Hash() int {\n\treturn ac.Term.Hash() + ac.Body.Hash()\n}",
"func VerifyProof(rootHash common.Hash, key []byte, value []byte, proof [][]byte) (bool, error) {\n\tproofDB := NewMemDB()\n\tfor _, node := range proof {\n\t\tkey := crypto.Keccak256(node)\n\t\tproofDB.Put(key, node)\n\t}\n\tpath := crypto.Keccak256(key)\n\n\tres, err := trie.VerifyProof(rootHash, path, proofDB)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\treturn bytes.Equal(value, res), nil\n}",
"func (t *smallFlatTable) Hash() hash.Hash { return t.hash }",
"func (self *ResTransaction)GetHash()string{\n hb := new(utils.HashBuilder)\n hb.Add(self.Creator)\n hb.Add(self.Timestamp.Format(\"2006-01-02 15:04:05\"))\n hb.Add(self.JobBlock)\n hb.Add(self.JobTrans)\n hb.Add(self.Output)\n for i:=0;i<len(self.Inputs);i++{\n hb.Add(self.Inputs[i])\n }\n hb.Add(self.HashSol)\n hb.Add(self.Evaluation)\n hb.Add(self.IsMin)\n return fmt.Sprintf(\"%x\",hb.GetHash())\n}",
"func verifyPvtdataHash(n *nwo.Network, chaincodeQueryCmd commands.ChaincodeQuery, peers []*nwo.Peer, expected []byte) {\n\tfor _, peer := range peers {\n\t\tsess, err := n.PeerUserSession(peer, \"User1\", chaincodeQueryCmd)\n\t\tExpect(err).NotTo(HaveOccurred())\n\t\tEventually(sess, n.EventuallyTimeout).Should(gexec.Exit(0))\n\t\tactual := sess.Buffer().Contents()\n\t\t// verify actual bytes contain expected bytes - cannot use equal because session may contain extra bytes\n\t\tExpect(bytes.Contains(actual, expected)).To(Equal(true))\n\t}\n}",
"func verifyPvtdataHash(n *nwo.Network, chaincodeQueryCmd commands.ChaincodeQuery, peers []*nwo.Peer, expected []byte) {\n\tfor _, peer := range peers {\n\t\tsess, err := n.PeerUserSession(peer, \"User1\", chaincodeQueryCmd)\n\t\tExpect(err).NotTo(HaveOccurred())\n\t\tEventually(sess, n.EventuallyTimeout).Should(gexec.Exit(0))\n\t\tactual := sess.Buffer().Contents()\n\t\t// verify actual bytes contain expected bytes - cannot use equal because session may contain extra bytes\n\t\tExpect(bytes.Contains(actual, expected)).To(Equal(true))\n\t}\n}",
"func NewProof(hash *hash.Hash, public Public, private Private) *Proof {\n\tn := public.Pedersen.N\n\tphi := private.Phi\n\n\ta := make([]*big.Int, params.StatParam)\n\tA := make([]*big.Int, params.StatParam)\n\n\tfor i := 0; i < params.StatParam; i++ {\n\t\t// aᵢ ∈ mod ϕ(N)\n\t\ta[i] = sample.IntervalLN(rand.Reader)\n\t\ta[i].Mod(a[i], phi)\n\n\t\t// Aᵢ = tᵃ mod N\n\t\tA[i] = new(big.Int).Exp(public.Pedersen.T, a[i], n)\n\t}\n\n\tes := challenge(hash, public, A)\n\n\tZ := make([]*big.Int, params.StatParam)\n\tfor i := 0; i < params.StatParam; i++ {\n\t\tz := a[i]\n\t\tif es[i] {\n\t\t\tz.Add(z, private.Lambda)\n\t\t\tz.Mod(z, phi)\n\t\t}\n\t\tZ[i] = z\n\t}\n\n\treturn &Proof{\n\t\tA: &A,\n\t\tZ: &Z,\n\t}\n}",
"func (bol Boolean) Hash() int {\n\tif bol {\n\t\treturn 1\n\t}\n\treturn 0\n}",
"func (_SweetToken *SweetTokenCaller) ContractHash(opts *bind.CallOpts) ([32]byte, error) {\n\tvar (\n\t\tret0 = new([32]byte)\n\t)\n\tout := ret0\n\terr := _SweetToken.contract.Call(opts, out, \"contractHash\")\n\treturn *ret0, err\n}",
"func op_BLOCKHASH(pc *uint64, in *interpreter, ctx *callCtx) uint64 {\n\tnum := ctx.stack.Peek()\n\tnum64, overflow := num.Uint64WithOverflow()\n\tif overflow {\n\t\tnum.Clear()\n\t}\n\tvar upper, lower uint64\n\tupper = in.evm.block.NumberU64()\n\tif upper < 257 {\n\t\tlower = 0\n\t} else {\n\t\tlower = upper - 256\n\t}\n\tif num64 >= lower && num64 < upper {\n\t\tnum.SetBytes(in.evm.block.Hash().Bytes())\n\t} else {\n\t\tnum.Clear()\n\t}\n\treturn 0\n}",
"func (index *tableIndex) hashcode(value []interface{}, columnIndices ...int64) string {\n\treflectedValue := reflect.ValueOf(value)\n\ttestValue := \"\"\n\tsort.Slice(columnIndices, func(i, j int) bool {\n\t\treturn columnIndices[i] < columnIndices[j]\n\t})\n\n\tfor _, idx := range columnIndices {\n\t\tfield := reflectedValue.Index(int(idx))\n\t\t// 특별 예외: bool은 대문자로 변환\n\t\tvar msg string\n\t\tif boolValue, ok := field.Interface().(bool); ok {\n\t\t\tmsg = fmt.Sprintf(\"%v%v\", idx, trueOrFalse[boolValue])\n\t\t} else {\n\t\t\tmsg = fmt.Sprintf(\"%v%v\", idx, field.Interface())\n\t\t}\n\n\t\ttestValue += msg\n\t}\n\n\thashed := getIndexKey(testValue)\n\treturn hashed\n}",
"func (t *TrillianLogRPCServer) GetInclusionProofByHash(ctx context.Context, req *trillian.GetInclusionProofByHashRequest) (*trillian.GetInclusionProofByHashResponse, error) {\n\tctx, spanEnd := spanFor(ctx, \"GetInclusionProofByHash\")\n\tdefer spanEnd()\n\n\ttree, hasher, err := t.getTreeAndHasher(ctx, req.LogId, optsLogRead)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctx = trees.NewContext(ctx, tree)\n\n\tif err := validateGetInclusionProofByHashRequest(req, hasher); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Next we need to make sure the requested tree size corresponds to an STH, so that we\n\t// have a usable tree revision\n\ttx, err := t.snapshotForTree(ctx, tree, \"GetInclusionProofByHash\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer t.closeAndLog(ctx, tree.TreeId, tx, \"GetInclusionProofByHash\")\n\n\t// Find the leaf index of the supplied hash\n\tleafHashes := [][]byte{req.LeafHash}\n\tleaves, err := tx.GetLeavesByHash(ctx, leafHashes, req.OrderBySequence)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tslr, err := tx.LatestSignedLogRoot(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar root types.LogRootV1\n\tif err := root.UnmarshalBinary(slr.LogRoot); err != nil {\n\t\treturn nil, status.Errorf(codes.Internal, \"Could not read current log root: %v\", err)\n\t}\n\n\t// TODO(Martin2112): Need to define a limit on number of results or some form of paging etc.\n\tproofs := make([]*trillian.Proof, 0, len(leaves))\n\tfor _, leaf := range leaves {\n\t\t// Don't include leaves that aren't in the requested TreeSize.\n\t\tif leaf.LeafIndex >= req.TreeSize {\n\t\t\tcontinue\n\t\t}\n\t\tproof, err := getInclusionProofForLeafIndex(ctx, tx, hasher, uint64(req.TreeSize), uint64(leaf.LeafIndex))\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tproofs = append(proofs, proof)\n\t\tt.recordIndexPercent(leaf.LeafIndex, root.TreeSize)\n\t}\n\n\tif err := tx.Commit(ctx); err != nil {\n\t\treturn nil, err\n\t}\n\tif len(proofs) < 1 {\n\t\treturn nil, status.Errorf(codes.NotFound,\n\t\t\t\"No leaf found for hash: %x in tree size %v\", req.LeafHash, req.TreeSize)\n\t}\n\n\t// TODO(gbelvin): Rename \"Proof\" -> \"Proofs\"\n\treturn &trillian.GetInclusionProofByHashResponse{\n\t\tSignedLogRoot: slr,\n\t\tProof: proofs,\n\t}, nil\n}",
"func (in *Instance) hash(x, y, mu *big.Int, T uint64) *big.Int {\n\tb := sha512.New()\n\tb.Write(x.Bytes())\n\tb.Write(y.Bytes())\n\tb.Write(mu.Bytes())\n\tbits := make([]byte, 8)\n\tbinary.LittleEndian.PutUint64(bits, T)\n\tb.Write(bits)\n\tres := new(big.Int).SetBytes(b.Sum(nil))\n\tres.Mod(res, in.rsaModulus)\n\treturn res\n}",
"func (ref Ref) Hash() int {\n\treturn termSliceHash(ref)\n}",
"func (ctx *Context) hashMessage(pad scratchPad, msg io.Reader,\n\tR, root []byte, idx uint64) ([]byte, error) {\n\tret := make([]byte, ctx.p.N)\n\terr := ctx.hashMessageInto(pad, msg, R, root, idx, ret)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n}",
"func rangeProofVerify(n int, V *Point, proof BulletProof, g, h *Point, G,\n\tH []*Point) bool {\n\ty, z, x, ux, xs := ComputeChallenges(V, g, proof)\n\n\tyN := powers(y, n)\n\ttwoN := powers(big.NewInt(2), n)\n\tzz := Square(z)\n\tzzz := Mul(z, zz)\n\tHprime := computeHPrime(H, y)\n\n\t// Ensure that t(x) = L · R.\n\tdelta := new(big.Int).Sub(\n\t\tMul(new(big.Int).Sub(z, zz), Sum(yN...)),\n\t\tMul(zzz, Sum(twoN...)))\n\tdelta.Mod(delta, curve.N)\n\n\trhs65 := SumPoints(\n\t\tScalarMulPoint(V, Square(z)),\n\t\tScalarMulPoint(g, delta),\n\t\tScalarMulPoint(proof.T1, x),\n\t\tScalarMulPoint(proof.T2, Square(x)))\n\n\t// Check that the prover constructed the inner product honestly.\n\tif !commit(Neg(proof.negTaux), h, proof.tHat, g).Equals(rhs65) {\n\t\treturn false\n\t}\n\n\t// P above contains mu, so construct a P without it.\n\tP := SumPoints(\n\t\tproof.A,\n\t\tScalarMulPoint(proof.S, x),\n\t\tScalarMultAll(Neg(z), G...),\n\t\tScalarMulPoints(AddVectors(ScalarMul(yN, z), ScalarMul(twoN, Square(z))), Hprime),\n\t\tScalarMulPoint(h, proof.negMu))\n\n\tPprime := SumPoints(P, ScalarMulPoint(h, Mul(ux, proof.tHat)))\n\n\treturn verifyInnerProductProof(0, n, xs, proof.Ls, proof.Rs,\n\t\tproof.a, proof.b, Pprime, ScalarMulPoint(h, ux), G, Hprime)\n}",
"func (n NamespacedMerkleTree) Prove(index int) (Proof, error) {\n\tisMaxNsIgnored := n.treeHasher.IsMaxNamespaceIDIgnored()\n\tn.computeLeafHashesIfNecessary()\n\tsubTreeHasher := internal.NewCachedSubtreeHasher(n.leafHashes, n.treeHasher)\n\t// TODO: store nodes and re-use the hashes instead recomputing parts of the tree here\n\tproof, err := merkletree.BuildRangeProof(index, index+1, subTreeHasher)\n\tif err != nil {\n\t\treturn NewEmptyRangeProof(isMaxNsIgnored), err\n\t}\n\n\treturn NewInclusionProof(index, index+1, proof, isMaxNsIgnored), nil\n}",
"func TestFuzzBlockHash(t *testing.T) {\n\tf := fuzz.New()\n\tf.NilChance(0)\n\tfor i := 0; i < 10000; i++ {\n\t\tvar testBlock Block\n\t\tf.Fuzz(&testBlock)\n\t\ttestBlock.Justify = CreateQuorumCert(&testBlock)\n\t\tnumSigs, _ := rand.Int(rand.Reader, big.NewInt(10))\n\t\tfor j := int64(0); j < numSigs.Int64(); j++ {\n\t\t\tvar sig PartialSig\n\t\t\tf.Fuzz(&sig)\n\t\t\tid, _ := rand.Int(rand.Reader, big.NewInt(1000))\n\t\t\trID := config.ReplicaID(id.Int64())\n\t\t\tsig.ID = rID\n\t\t\tsig.R, _ = rand.Int(rand.Reader, big.NewInt(math.MaxInt64))\n\t\t\tsig.S, _ = rand.Int(rand.Reader, big.NewInt(math.MaxInt64))\n\t\t\ttestBlock.Justify.Sigs[rID] = sig\n\t\t}\n\t\thash1 := testBlock.Hash()\n\t\thash2 := testBlock.Hash()\n\t\tif !bytes.Equal(hash1[:], hash2[:]) {\n\t\t\tt.Fatalf(\"Non-determinism in hash function detected:\\nBlock: %s\\nHash1: %s\\nHash2: %s\", testBlock, hash1, hash2)\n\t\t}\n\t}\n}"
] | [
"0.6469084",
"0.58733064",
"0.5705257",
"0.5511857",
"0.5511857",
"0.5510959",
"0.54576856",
"0.5450013",
"0.5369648",
"0.5359943",
"0.5351932",
"0.5337506",
"0.53114235",
"0.53063196",
"0.52800083",
"0.5264212",
"0.52385473",
"0.5237696",
"0.5209671",
"0.51887167",
"0.51771456",
"0.5172485",
"0.51500165",
"0.51493",
"0.5144492",
"0.51423043",
"0.51403815",
"0.5137703",
"0.51258844",
"0.51234144",
"0.51005185",
"0.50814563",
"0.507443",
"0.5070591",
"0.5060165",
"0.50546575",
"0.50476956",
"0.5033876",
"0.50161576",
"0.5013702",
"0.5009913",
"0.4983418",
"0.49770966",
"0.4972729",
"0.4964868",
"0.49469018",
"0.4942832",
"0.4928432",
"0.49281836",
"0.49276704",
"0.49207544",
"0.49166402",
"0.49145737",
"0.4908994",
"0.49044397",
"0.49043825",
"0.489548",
"0.48953745",
"0.48878568",
"0.48871124",
"0.4880395",
"0.4875479",
"0.487396",
"0.48679784",
"0.48659658",
"0.48563653",
"0.48549327",
"0.48472872",
"0.48372132",
"0.48326123",
"0.4832496",
"0.48268816",
"0.48265514",
"0.48224983",
"0.48171952",
"0.48166442",
"0.48064825",
"0.4803661",
"0.47888115",
"0.4786078",
"0.47859487",
"0.47854796",
"0.47854233",
"0.47829178",
"0.47764987",
"0.47692263",
"0.47657585",
"0.47657585",
"0.47655717",
"0.47654602",
"0.47601652",
"0.47569695",
"0.47548074",
"0.47536507",
"0.4748021",
"0.4737319",
"0.47359508",
"0.47333205",
"0.47326854",
"0.4725481"
] | 0.69053805 | 0 |
Public returns the corresponding public key as bytes. | func (k PrivateKey) Public() crypto.PublicKey {
return &k.PublicKey
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (priv *PrivateKey) Public() crypto.PublicKey",
"func (priv *PrivateKey) Public() crypto.PublicKey",
"func (j *JWK) PublicKeyBytes() ([]byte, error) {\n\tif isSecp256k1(j.Kty, j.Crv) {\n\t\tvar ecPubKey *ecdsa.PublicKey\n\n\t\tecPubKey, ok := j.Key.(*ecdsa.PublicKey)\n\t\tif !ok {\n\t\t\tecPubKey = &j.Key.(*ecdsa.PrivateKey).PublicKey\n\t\t}\n\n\t\tpubKey := &btcec.PublicKey{\n\t\t\tCurve: btcec.S256(),\n\t\t\tX: ecPubKey.X,\n\t\t\tY: ecPubKey.Y,\n\t\t}\n\n\t\treturn pubKey.SerializeCompressed(), nil\n\t}\n\n\tswitch pubKey := j.Public().Key.(type) {\n\tcase *ecdsa.PublicKey, ed25519.PublicKey:\n\t\tpubKBytes, err := x509.MarshalPKIXPublicKey(pubKey)\n\t\tif err != nil {\n\t\t\treturn nil, errors.New(\"failed to read public key bytes\")\n\t\t}\n\n\t\treturn pubKBytes, nil\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unsupported public key type in kid '%s'\", j.KeyID)\n\t}\n}",
"func (c CertificateKey) Public() string {\n\treturn c.public\n}",
"func (k *RSAPrivKey) Public() PubKey {\n\treturn &RSAPubKey{\n\t\tkey: &k.key.PublicKey,\n\t}\n}",
"func MarshalPublic(key *ecdsa.PublicKey) (string, error) {\n\tif key == nil || key.Curve == nil || key.X == nil || key.Y == nil {\n\t\treturn \"\", fmt.Errorf(\"key or part of key is nil: %+v\", key)\n\t}\n\n\tkey.Curve = fixCurve(key.Curve)\n\n\trawPriv, err := x509.MarshalPKIXPublicKey(key)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tkeyBlock := &pem.Block{\n\t\tType: \"PUBLIC KEY\",\n\t\tBytes: rawPriv,\n\t}\n\n\treturn string(pem.EncodeToMemory(keyBlock)), nil\n}",
"func (p *PublicKey) Serialize() []byte {\n\treturn (*btcec.PublicKey)(p).SerializeCompressed()\n}",
"func (s NativeSigner) PublicKey() ([]byte, error) {\n\tkeybuf := new(bytes.Buffer)\n\tif err := (*openpgp.Entity)(&s).Serialize(keybuf); err != nil {\n\t\treturn nil, err\n\t}\n\treturn keybuf.Bytes(), nil\n}",
"func (priv *PrivateKey) Public() crypto.PublicKey {\n\treturn &priv.PublicKey\n}",
"func PublicKeyToBytes(publicKey *rsa.PublicKey) []byte {\n\tblock := &pem.Block{\n\t\tType: \"RSA PUBLIC KEY\",\n\t\tBytes: x509.MarshalPKCS1PublicKey(publicKey),\n\t}\n\n\tpublic := pem.EncodeToMemory(block)\n\n\treturn public\n}",
"func (k *PrivateKey) Public() crypto.PublicKey {\n\treturn k.PublicKey()\n}",
"func (p PrivateKey) Public() crypto.PublicKey {\n\tpub, _ := p.PublicKey()\n\treturn pub\n}",
"func (s GPGSigner) PublicKey() ([]byte, error) {\n\tgpg2 := exec.Command(s.gpgExecutable, \"--export\", s.GPGUserName)\n\tif err := s.Rewriter(gpg2); err != nil {\n\t\treturn nil, fmt.Errorf(\"Error invoking Rewrite: %v\", err)\n\t}\n\tout, err := gpg2.StdoutPipe()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Error getting stdout pipe: %v\", err)\n\t}\n\tif err := gpg2.Start(); err != nil {\n\t\treturn nil, fmt.Errorf(\"Error starting gpg command: %v\", err)\n\t}\n\tpubkey, err := ioutil.ReadAll(out)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Error reading pubkey data: %v\", err)\n\t}\n\tif err := gpg2.Wait(); err != nil {\n\t\treturn nil, fmt.Errorf(\"Error running gpg: %v\", err)\n\t}\n\treturn pubkey, nil\n}",
"func (account *ED25519Account) PublicKeyBytes() []byte {\n\treturn account.PublicKey[:]\n}",
"func (account *NothingAccount) PublicKeyBytes() []byte {\n\treturn account.PublicKey[:]\n}",
"func (h PublicKey) String() string {\n\treturn h.Hex()\n}",
"func (s *SigningIdentity) Public() crypto.PublicKey {\n\treturn s.Certificate.PublicKey\n}",
"func (a *managedAddress) pubKeyBytes() []byte {\n\tif a.compressed {\n\t\treturn a.pubKey.SerializeCompressed()\n\t}\n\treturn a.pubKey.SerializeUncompressed()\n}",
"func (priv *PKCS11PrivateKeyECDSA) Public() crypto.PublicKey {\n\treturn priv.key.PubKey\n}",
"func (s *Signer) Public() crypto.PublicKey {\n\treturn s.publicKey\n}",
"func (priv *PKCS11PrivateKeyRSA) Public() crypto.PublicKey {\n\treturn priv.key.PubKey\n}",
"func GetPublicKey(pub ssh.PublicKey) []byte {\n\tmarshaled := ssh.MarshalAuthorizedKey(pub)\n\t// Strip trailing newline\n\treturn marshaled[:len(marshaled)-1]\n}",
"func (c *CertInfo) PublicKey() []byte {\n\tdata := c.KeyPair().Certificate[0]\n\treturn pem.EncodeToMemory(&pem.Block{Type: \"CERTIFICATE\", Bytes: data})\n}",
"func (p *PublicKey) Serialize() []byte {\n\treturn p.pk().SerializeCompressed()\n}",
"func PublicKeyToBytes(pub *rsa.PublicKey) []byte {\n\tpubASN1, err := x509.MarshalPKIXPublicKey(pub)\n\tif err != nil {\n\t\tlogrus.Error(err)\n\t}\n\n\tpubBytes := pem.EncodeToMemory(&pem.Block{\n\t\tType: \"RSA PUBLIC KEY\",\n\t\tBytes: pubASN1,\n\t})\n\n\treturn pubBytes\n}",
"func (priv *PrivateKey) Public() (*PublicKey, error) {\n\tslice, err := curve25519.X25519(priv[:], curve25519.Basepoint)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tp, _ := PublicKeyFromSlice(slice)\n\treturn p, nil\n}",
"func (key PublicKey) String() string {\n\treturn base58.Encode(key[:])\n}",
"func PublicKeyToBytes(pub *rsa.PublicKey) []byte {\n\tpubASN1, err := x509.MarshalPKIXPublicKey(pub)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tpubBytes := pem.EncodeToMemory(&pem.Block{\n\t\tType: \"RSA PUBLIC KEY\",\n\t\tBytes: pubASN1,\n\t})\n\n\treturn pubBytes\n}",
"func (x *X25519) PublicKey() []byte {\n\treturn x.publicKey[:]\n}",
"func (_BondedECDSAKeep *BondedECDSAKeepSession) PublicKey() ([]byte, error) {\n\treturn _BondedECDSAKeep.Contract.PublicKey(&_BondedECDSAKeep.CallOpts)\n}",
"func (pk PublicKey) Encode() []byte {\n\treturn pk.publicKey.Encode()\n}",
"func (k PublicKey) String() string {\n\treturn hexutil.Encode(k[:])\n}",
"func publicKeyToBytes(pub interface{}) []byte {\r\n\tvar b []byte\r\n\tswitch k := pub.(type) {\r\n\tcase *rsa.PublicKey:\r\n\t\tb = k.N.Bytes()\r\n\tcase *dsa.PublicKey:\r\n\t\tb = k.Y.Bytes()\r\n\tcase *ecdsa.PublicKey:\r\n\t\tb = append(k.X.Bytes(), k.Y.Bytes()...)\r\n\t}\r\n\treturn b\r\n}",
"func (pk _Ed25519PublicKey) _Bytes() []byte {\n\treturn pk.keyData\n}",
"func marshalPublicKey(pub interface{}) ([]byte, error) {\n\tvar publicKeyBytes []byte\n\tvar err error\n\tswitch p := pub.(type) {\n\tcase *ecdsa.PublicKey:\n\t\t// Stolen from https://golang.org/src/crypto/x509/x509.go?s=2771:2829#L87\n\t\tpublicKeyBytes = elliptic.Marshal(p.Curve, p.X, p.Y)\n\tcase *rsa.PublicKey:\n\t\t// TODO: Append exponent\n\t\tpublicKeyBytes = p.N.Bytes()\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Unsupported public key type: %T\", pub)\n\t}\n\treturn publicKeyBytes, err\n}",
"func (k *Ed25519PublicKey) Bytes() ([]byte, error) {\n\treturn MarshalPublicKey(k)\n}",
"func (k *VrfablePrivateKey) Public() PublicKey {\n\treturn &VrfablePublicKey{&k.PublicKey}\n}",
"func (k *Ed25519PrivateKey) GetPublic() PubKey {\n\treturn &Ed25519PublicKey{k: k.pubKeyBytes()}\n}",
"func (s *Signer) Public() crypto.PublicKey {\n\treturn s.Signer.Public()\n}",
"func (_BondedECDSAKeep *BondedECDSAKeepCallerSession) PublicKey() ([]byte, error) {\n\treturn _BondedECDSAKeep.Contract.PublicKey(&_BondedECDSAKeep.CallOpts)\n}",
"func (c *publicKey) Raw() ([]byte, error) {\n\tif c.ki == nil {\n\t\treturn nil, errors.ErrNilPointerValue()\n\t}\n\n\treturn c.ki.Raw()\n}",
"func (d *DocsCrypto) GetPublic() *rsa.PublicKey {\n\td.Debug(\"gettting public key\")\n\treturn d.privateKey.Public().(*rsa.PublicKey)\n}",
"func (c *Session) GetAKPublic() []byte {\n\treturn c.akPub\n}",
"func (pk *PrivateKey) GetPublicKey() *PublicKey {\n var publicKeyBytes PublicKey\n copy(publicKeyBytes[:], pk[32:])\n return &publicKeyBytes\n}",
"func (keyRing *KeyRing) GetPublicKey() (b []byte, err error) {\n\tvar outBuf bytes.Buffer\n\tif err = keyRing.WritePublicKey(&outBuf); err != nil {\n\t\treturn\n\t}\n\n\tb = outBuf.Bytes()\n\treturn\n}",
"func (s PublicKey) String() string {\n\treturn awsutil.Prettify(s)\n}",
"func (d *Device) PublicKey() string {\n\treturn d.pubKey\n}",
"func (sk *opensslPrivateKey) GetPublic() PubKey {\n\treturn &opensslPublicKey{key: sk.key}\n}",
"func (kt KeyType) PublicKey() string {\n\treturn fmt.Sprintf(\"%s.pub\", kt.KeyBaseName)\n}",
"func (pk PrivateKey) PublicKey() hotstuff.PublicKey {\n\treturn pk.Public()\n}",
"func (s Sig) PublicKey() ([]byte, error) {\n\treturn nil, fmt.Errorf(\"not implemented\")\n}",
"func (pk *PublicKey) Key() string {\n\treturn string(pk.PublicKeyHex.Value)\n}",
"func (c *publicKey) String() string {\n\tb, err := c.Raw()\n\tif err != nil {\n\t\treturn \"<nil>\"\n\t}\n\n\treturn hex.EncodeToString(b)\n}",
"func (o PlaybackKeyPairOutput) PublicKey() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *PlaybackKeyPair) pulumi.StringOutput { return v.PublicKey }).(pulumi.StringOutput)\n}",
"func encodePublicKey(public *rsa.PublicKey) ([]byte, error) {\n\tpublicBytes, err := x509.MarshalPKIXPublicKey(public)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn pem.EncodeToMemory(&pem.Block{\n\t\tBytes: publicBytes,\n\t\tType: \"PUBLIC KEY\",\n\t\tHeaders: nil,\n\t}), nil\n}",
"func (pk _Ed25519PublicKey) _BytesRaw() []byte {\n\treturn pk.keyData\n}",
"func (p publicKey) MarshalBinary() (data []byte, err error) {\n\treturn p.PublicKey.Bytes(), nil\n}",
"func (pk *PublicKey) MarshalBinary() ([]byte, error) {\n\treturn pk.Bytes(), nil\n}",
"func PubKey(key []byte) ([]byte, error) {\n\tcmd := exec.Command(\"wg\", \"pubkey\")\n\tstdin, err := cmd.StdinPipe()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to open pipe to stdin: %v\", err)\n\t}\n\n\tgo func() {\n\t\tdefer stdin.Close()\n\t\tstdin.Write(key)\n\t}()\n\n\tpublic, err := cmd.Output()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to generate public key: %v\", err)\n\t}\n\treturn bytes.Trim(public, \"\\n\"), nil\n}",
"func (i *Identity) Public() (*ecdsa.PublicKey, error) {\n\treturn crypto.ParseECDSAPublicKey(i.PublicKey)\n}",
"func (k *PublicKey) Bytes() []byte {\n\t// Copy the public key to a fixed size buffer that can get allocated on the\n\t// caller's stack after inlining.\n\tvar buf [133]byte\n\treturn append(buf[:0], k.publicKey...)\n}",
"func (a *Account) PublicKey() *PubKey {\n\tk := new(PubKey)\n\tcopy(k[:], a.pub[:])\n\treturn k\n}",
"func (priv PrivateKey) Public() crypto.PublicKey {\n\tpub := ed25519.PrivateKey(priv).Public().(ed25519.PublicKey)\n\treturn PublicKey(pub)\n}",
"func (k *JSONWebKey) Public() JSONWebKey {\n\tif k.IsPublic() {\n\t\treturn *k\n\t}\n\tret := *k\n\tswitch key := k.Key.(type) {\n\tcase *ecdsa.PrivateKey:\n\t\tret.Key = key.Public()\n\tcase *rsa.PrivateKey:\n\t\tret.Key = key.Public()\n\tcase ed25519.PrivateKey:\n\t\tret.Key = key.Public()\n\tdefault:\n\t\treturn JSONWebKey{} // returning invalid key\n\t}\n\treturn ret\n}",
"func (w *Wallet) PublicKeyString() string {\n\treturn fmt.Sprintf(\"%x%x\", w.publicKey.X, w.publicKey.Y)\n}",
"func (sk *PrivateKey) Public() crypto.PublicKey {\n\treturn &PublicKey{\n\t\tsk.e.Public().(ed25519.PublicKey),\n\t\t*sk.d.Public().(*mode2.PublicKey),\n\t}\n}",
"func (v *Validator) PublicKeyBytes() []byte {\n\tif v.pubBytes == nil || len(v.pubBytes) == 0 {\n\t\tv.pubBytes = keys.FromPublicKey(&v.Key.PublicKey)\n\t}\n\treturn v.pubBytes\n}",
"func (pk *PublicKey) String() string {\n\treturn pk.Algorithm.String() + \":\" + pk.Key.String()\n}",
"func (k *RSAPubKey) Serialize() ([]byte, error) {\n\tasn1Bytes, err := x509.MarshalPKIXPublicKey(k.key)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpemKey := &pem.Block{\n\t\tType: \"PUBLIC KEY\",\n\t\tBytes: asn1Bytes,\n\t}\n\treturn pem.EncodeToMemory(pemKey), nil\n}",
"func (s *p11Signer) Public() crypto.PublicKey {\n\tswitch s.keyType {\n\tcase crypki.RSA:\n\t\treturn publicRSA(s)\n\tcase crypki.ECDSA:\n\t\treturn publicECDSA(s)\n\tdefault: // RSA is the default\n\t\treturn publicRSA(s)\n\t}\n}",
"func (priv ECDHPrivate) PublicKey() ECDHPublic {\n\ttoret := make([]byte, ECDHKeyLength)\n\tC.crypto_scalarmult_base((*C.uchar)(&toret[0]),\n\t\t(*C.uchar)(&priv[0]))\n\treturn toret\n}",
"func (c *publicKey) Encode() (*pb.PublicKey, error) {\n\tif c.ki == nil {\n\t\treturn nil, ErrPublicKeyCannotBeNil()\n\t}\n\n\tblob, err := crypto.MarshalPublicKey(c.ki)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpbuf := pb.PublicKey{Blob: blob}\n\n\treturn &pbuf, nil\n}",
"func (c *Client) GetPublicKey(scope ...string) (map[string]interface{}, error) {\n\tlog.info(\"========== GET PUBLIC KEY ==========\")\n\turl := buildURL(path[\"client\"])\n\tdefaultScope := \"OAUTH|POST,USERS|POST,USERS|GET,USER|GET,USER|PATCH,SUBSCRIPTIONS|GET,SUBSCRIPTIONS|POST,SUBSCRIPTION|GET,SUBSCRIPTION|PATCH,CLIENT|REPORTS,CLIENT|CONTROLS\"\n\n\tif len(scope) > 0 {\n\t\tdefaultScope = scope[0]\n\t}\n\n\tqp := []string{\"issue_public_key=YES&scope=\" + defaultScope}\n\n\tif len(scope) > 1 {\n\t\tuserId := scope[1]\n\t\tqp[0] += \"&user_id=\" + userId\n\t}\n\n\treturn c.do(\"GET\", url, \"\", qp)\n}",
"func BytesToPublicKey(public []byte) *rsa.PublicKey {\n\n\tblock, _ := pem.Decode(public)\n\tresult, _ := x509.ParsePKCS1PublicKey(block.Bytes)\n\treturn result\n}",
"func (pk PublicKey) Bytes() []byte {\n\tbuf := elliptic.Marshal(pk.PubKey.Curve, pk.PubKey.X, pk.PubKey.Y)\n\t//fmt.Printf(\"end pub key marshal, len=%v, data=%v\\n\", len(buf), buf)\n\treturn buf\n}",
"func (priv *DHPrivateKey) Public() *DHPublicKey {\n\treturn &priv.DHPublicKey\n}",
"func ToPublic(sigType crypto.SigType, pk []byte) ([]byte, error) {\n\tsv, ok := sigs[sigType]\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"cannot generate public key of unsupported type: %v\", sigType)\n\t}\n\n\treturn sv.ToPublic(pk)\n}",
"func (d Dispatcher) PublicKey() string {\n\treturn d.GetPubString()\n}",
"func PublicKeyToProto(pub interfaces.PublicKey) *PublicKey {\n\tif pub == nil {\n\t\treturn nil\n\t}\n\tpb := NewEmptyPublicKey()\n\tpb.Raw = pub.SerializeCompressed()\n\treturn pb\n}",
"func (o *DKSharesInfo) GetPublicKey() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.PublicKey\n}",
"func GetPublicKey() (string, error) {\n\tkeybytes, err := x509.MarshalPKIXPublicKey(publicKey)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tblock := &pem.Block{\n\t\tType: \"PUBLIC KEY\",\n\t\tBytes: keybytes,\n\t}\n\tkeybuffer := pem.EncodeToMemory(block)\n\treturn string(keybuffer), nil\n}",
"func PublicKey(pemkey []byte) (pub []byte, err error) {\n\tvar (\n\t\tpkey *rsa.PrivateKey\n\t)\n\n\tblk, _ := pem.Decode(pemkey) // assumes a single valid pem encoded key.\n\n\tif pkey, err = x509.ParsePKCS1PrivateKey(blk.Bytes); err != nil {\n\t\treturn pub, err\n\t}\n\n\treturn x509.MarshalPKCS1PublicKey(&pkey.PublicKey), nil\n}",
"func generatePublicKey(privatekey *rsa.PublicKey) ([]byte, error) {\n\tpublicRsaKey, err := ssh.NewPublicKey(privatekey)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpubKeyBytes := ssh.MarshalAuthorizedKey(publicRsaKey)\n\treturn pubKeyBytes, nil\n\n}",
"func (pk PublicKey) PublicKeyBase58() string {\n\treturn stringEntry(pk[PublicKeyBase58Property])\n}",
"func (p PubKey) Bytes() []byte {\n\treturn crypto.PubKeyEd25519(p).Bytes()\n}",
"func PrivateKeyPublic(priv *rsa.PrivateKey,) crypto.PublicKey",
"func (a *managedAddress) PubKey() chainec.PublicKey {\n\treturn a.pubKey\n}",
"func (ca *clientAuthWrapper) Public() crypto.PublicKey {\n\tca.finalizeClientAuth()\n\tcert := ca.certificate\n\tif cert.Leaf == nil {\n\t\treturn nil\n\t}\n\treturn cert.Leaf.PublicKey\n}",
"func (k otherKey) Public() crypto.PublicKey {\n\treturn nil\n}",
"func (nr NodeRecord) GetPublicKey() (string, error) {\n\treturn nr.Record.PublicKey, nil\n}",
"func (_BondedECDSAKeep *BondedECDSAKeepSession) GetPublicKey() ([]byte, error) {\n\treturn _BondedECDSAKeep.Contract.GetPublicKey(&_BondedECDSAKeep.CallOpts)\n}",
"func publicKey(privateKey string) (string, error) {\n\tdecoded, _ := pem.Decode([]byte(privateKey))\n\tif decoded == nil {\n\t\treturn \"\", errors.New(\"no PEM data found\")\n\t}\n\n\tprivKey, err := x509.ParsePKCS1PrivateKey(decoded.Bytes)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tder, err := x509.MarshalPKIXPublicKey(privKey.Public())\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tblock := pem.Block{\n\t\tType: \"PUBLIC KEY\",\n\t\tBytes: der,\n\t}\n\treturn string(pem.EncodeToMemory(&block)), nil\n}",
"func (e *Domain) Public() *PublicKey {\n\tif e.PublicKey != nil {\n\t\treturn e.PublicKey\n\t}\n\n\tif e.ClearPrivateKey != nil {\n\t\treturn e.ClearPrivateKey.Public()\n\t}\n\treturn nil\n}",
"func MarshalPublicKey(pubkey *rsa.PublicKey) string {\n\tpk, err := ssh.NewPublicKey(pubkey)\n\tif err != nil {\n\t\treturn \"\"\n\t}\n\n\treturn string(ssh.MarshalAuthorizedKey(pk))\n}",
"func (_BondedECDSAKeep *BondedECDSAKeepCaller) PublicKey(opts *bind.CallOpts) ([]byte, error) {\n\tvar (\n\t\tret0 = new([]byte)\n\t)\n\tout := ret0\n\terr := _BondedECDSAKeep.contract.Call(opts, out, \"publicKey\")\n\treturn *ret0, err\n}",
"func (pk *PublicKey) ToString() (string, error) {\n\ttypeStr, err := keyTypeToString(pk.Type)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"converting key type to string: %v\", err)\n\t}\n\treturn fmt.Sprintf(\"%s:%s\", typeStr, base58.Encode(pk.Data)), nil\n}",
"func (s Slot) Public() crypto.PublicKey {\n\treturn s.PublicKey\n}",
"func (p *PrivateKey) PubKey() *PublicKey {\n\treturn (*PublicKey)(&p.PublicKey)\n}",
"func (pk PublicKey) MarshalJSON() ([]byte, error) {\n\treturn json.Marshal(pk.String())\n}",
"func (s *SMJWT) PublicKey() *rsa.PublicKey {\n\treturn s.publicKey\n}"
] | [
"0.73033845",
"0.73033845",
"0.7199707",
"0.7082733",
"0.7060463",
"0.70405895",
"0.70381963",
"0.70164436",
"0.7010828",
"0.69798744",
"0.6968808",
"0.6966117",
"0.6949062",
"0.69171274",
"0.69095975",
"0.68865144",
"0.6865919",
"0.6858021",
"0.6843556",
"0.6826548",
"0.68192935",
"0.6793445",
"0.67890465",
"0.6776205",
"0.6771147",
"0.67632324",
"0.67620206",
"0.67574126",
"0.675263",
"0.6750905",
"0.6749351",
"0.67468935",
"0.6721042",
"0.6714349",
"0.6694967",
"0.66940767",
"0.6691155",
"0.6666571",
"0.6660408",
"0.66574955",
"0.6649601",
"0.66138625",
"0.6594621",
"0.65898556",
"0.657945",
"0.65761006",
"0.6562174",
"0.65621096",
"0.6554147",
"0.6554099",
"0.6548242",
"0.6538028",
"0.6537708",
"0.65370214",
"0.6522095",
"0.65144897",
"0.65113515",
"0.65046793",
"0.6496414",
"0.6479304",
"0.6467684",
"0.64607626",
"0.64368653",
"0.6426294",
"0.6423145",
"0.64229983",
"0.64212054",
"0.64197814",
"0.6402388",
"0.6391433",
"0.6383612",
"0.6372232",
"0.6366566",
"0.63579476",
"0.6355476",
"0.63533276",
"0.63446987",
"0.6335713",
"0.63295156",
"0.6326361",
"0.6323525",
"0.630809",
"0.6294691",
"0.62945575",
"0.6293482",
"0.62926173",
"0.62882143",
"0.6251728",
"0.62491435",
"0.6239499",
"0.6238214",
"0.62355876",
"0.62262875",
"0.6224801",
"0.62213075",
"0.6216135",
"0.62099206",
"0.6207436",
"0.6202426",
"0.6170463"
] | 0.71303064 | 3 |
Wait waits for the process to exit. Wait releases any resources associated with the Process | func (p Process) Wait() (*os.ProcessState, error) {
if p.ops == nil {
return nil, errInvalidProcess
}
return p.ops.wait()
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (proc *Process) Wait() {\n\terr := proc.Cmd.Wait()\n\tif err != nil {\n\t\t// fmt.Printf(\"Process exit: %v\\n\", err)\n\t\tif exitError, ok := err.(*exec.ExitError); ok {\n\t\t\tproc.ExitState = exitError.ProcessState\n\t\t}\n\t}\n\tproc.ExitState = proc.Cmd.ProcessState\n\tproc.EndTime = time.Now() // TODO make this goroutine-safe\n}",
"func ProcessWait(p *os.Process,) (*os.ProcessState, error)",
"func (p *process) Wait(ctx context.Context) error {\n\tselect {\n\tcase err, ok := <-p.waitC:\n\t\t// Process exited\n\t\tif ok {\n\t\t\treturn err\n\t\t}\n\t\treturn errWaitAlreadyCalled\n\tcase <-ctx.Done():\n\t\t// Timed out. Send a kill signal and release our handle to it.\n\t\treturn multierr.Combine(ctx.Err(), p.cmd.Process.Kill())\n\t}\n}",
"func (a *ExternalAgentProcess) Wait() error {\n\treturn a.cmd.Wait()\n}",
"func (b *BoatHandle) Wait() (*os.ProcessState, error) { return b.cmd.Process.Wait() }",
"func (ep *ExpectProcess) Wait() {\n\tep.wg.Wait()\n}",
"func (p *process) Wait(ctx context.Context) error {\n\treturn WaitContext(ctx, p.cmd)\n}",
"func (p *process) Wait() {\n\tif p.cmdDoneCh != nil {\n\t\t<-p.cmdDoneCh\n\t}\n}",
"func (p *Process) waitOnProcess() (*os.ProcessState, error) {\n\tif p.command == nil || p.command.Process == nil {\n\t\treturn &os.ProcessState{}, nil\n\t}\n\n\treturn p.command.Process.Wait()\n}",
"func (n *mockAgent) waitProcess(c *Container, processID string) (int32, error) {\n\treturn 0, nil\n}",
"func WaitProcess(pid int, w *Waitmsg) (err error) {\n\tprocs.Lock()\n\tch := procs.waits[pid]\n\tprocs.Unlock()\n\n\tvar wmsg *waitErr\n\tif ch != nil {\n\t\twmsg = <-ch\n\t\tprocs.Lock()\n\t\tif procs.waits[pid] == ch {\n\t\t\tdelete(procs.waits, pid)\n\t\t}\n\t\tprocs.Unlock()\n\t}\n\tif wmsg == nil {\n\t\t// ch was missing or ch is closed\n\t\treturn NewError(\"process not found\")\n\t}\n\tif wmsg.err != nil {\n\t\treturn wmsg.err\n\t}\n\tif w != nil {\n\t\t*w = wmsg.Waitmsg\n\t}\n\treturn nil\n}",
"func (pg *ProcessGroup) Wait() {\n\n\t\t// With every public method calling setup, we avoid the need for a constructor function and it becomes safe\n\t\t// to allow end users to be able to acess the process group itself (ProcessGroup instead of processGroup)\n\t\tpg.setup()\n\n\t\t// Use the wait function of our wait group\n\t\tpg.group.Wait()\n\t}",
"func (container *container) Wait() error {\r\n\terr := container.system.Wait()\r\n\tif err == nil {\r\n\t\terr = container.system.ExitError()\r\n\t}\r\n\treturn convertSystemError(err, container)\r\n}",
"func (g *GenericPilot) waitForProcess() <-chan struct{} {\n\tout := make(chan struct{})\n\tgo func() {\n\t\tdefer close(out)\n\t\tfor {\n\t\t\tif g.process != nil {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\ttime.Sleep(2)\n\t\t}\n\t\t<-g.process.Wait()\n\t}()\n\treturn out\n}",
"func (c *Cmd) Wait() error {\n\treturn c.cmd.Wait()\n}",
"func (c *Cmd) Wait() error {\n\treturn c.Cmd.Wait()\n}",
"func (r *reaper) wait(pid int, proc waitProcess) (int, error) {\n\texitCodeCh, err := r.getExitCodeCh(pid)\n\tif err != nil {\n\t\treturn -1, err\n\t}\n\n\t// Wait for the subreaper to receive the SIGCHLD signal. Once it gets\n\t// it, this channel will be notified by receiving the exit code of the\n\t// corresponding process.\n\texitCode := <-exitCodeCh\n\n\t// Ignore errors since the process has already been reaped by the\n\t// subreaping loop. This call is only used to make sure libcontainer\n\t// properly cleans up its internal structures and pipes.\n\tproc.wait()\n\n\tr.deleteExitCodeCh(pid)\n\n\treturn exitCode, nil\n}",
"func (c *Cmd) Wait(opts ...RunOption) error {\n\tif c.Process == nil {\n\t\treturn errNotStarted\n\t}\n\tif c.ProcessState != nil {\n\t\treturn errAlreadyWaited\n\t}\n\n\twerr := c.Cmd.Wait()\n\tcerr := c.ctx.Err()\n\n\tc.watchdogStop <- true\n\n\tif (werr != nil || cerr != nil) && hasOpt(DumpLogOnError, opts) {\n\t\t// Ignore the DumpLog intentionally, because the primary error\n\t\t// here is either werr or cerr. Note that, practically, the\n\t\t// error from DumpLog is returned when ProcessState is nil,\n\t\t// so it shouldn't happen here, because it should be assigned\n\t\t// in Wait() above.\n\t\tc.DumpLog(c.ctx)\n\t}\n\n\tif cerr != nil {\n\t\tc.timedOut = true\n\t\treturn cerr\n\t}\n\treturn werr\n}",
"func Wait() {\n\twaitGroup.Wait()\n}",
"func (c *Config) Wait(cErr chan error) error {\n\t// Exec should not return until the process is actually running\n\tselect {\n\tcase <-c.waitStart:\n\tcase err := <-cErr:\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (p *Processor) Wait() []error {\n\tclose(p.Input)\n\tp.wg.Wait()\n\tclose(p.output)\n\tp.hwg.Wait()\n\treturn p.herr\n}",
"func (e *dockerExec) Wait(ctx context.Context) error {\n\treturn e.WaitUntil(execComplete)\n}",
"func waitOnProcess(processID int) error {\n\tproxyProc, err := findProcess(processID)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Try to kill the pid with sigterm\n\tif runtime.GOOS != \"windows\" { // FIXME: temporary work around because signals are lame in windows\n\t\tif err := proxyProc.Signal(syscall.SIGTERM); err != nil {\n\t\t\tif err == syscall.ESRCH {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\treturn err\n\t\t}\n\n\t\tif err := backoffForProcess(processID); err == nil {\n\t\t\treturn nil\n\t\t}\n\t}\n\n\t// sigterm has not killed it yet, lets send a sigkill\n\tproxyProc, err = findProcess(processID)\n\tif proxyProc == nil && err != nil {\n\t\t// process is killed, gone\n\t\treturn nil //nolint: nilerr\n\t}\n\tif err := proxyProc.Signal(syscall.SIGKILL); err != nil {\n\t\tif err == syscall.ESRCH {\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t}\n\treturn backoffForProcess(processID)\n}",
"func (p *Pipeline) Wait() error {\n\tp.Stop()\n\tp.wait.Wait()\n\treturn p.err\n}",
"func (s *sshSessionExternal) Wait() error {\n\tif s.exited() {\n\t\treturn nil\n\t}\n\terr := s.cmd.Wait()\n\tif err == nil {\n\t\tfs.Debugf(s.f, \"ssh external: command exited OK\")\n\t} else {\n\t\tfs.Debugf(s.f, \"ssh external: command exited with error: %v\", err)\n\t}\n\treturn err\n}",
"func (s *Server) Wait() { <-s.exited; s.BaseService.Wait() }",
"func (c *gcsCore) WaitProcess(pid int) (<-chan int, chan<- bool, error) {\n\tc.processCacheMutex.Lock()\n\tentry, ok := c.processCache[pid]\n\tif !ok {\n\t\tc.processCacheMutex.Unlock()\n\t\treturn nil, nil, gcserr.NewHresultError(gcserr.HrErrNotFound)\n\t}\n\tc.processCacheMutex.Unlock()\n\n\t// If we are an init process waiter increment our count for this waiter.\n\tif entry.isInitProcess {\n\t\tentry.writersSyncRoot.Lock()\n\t\tlogrus.Debugf(\"gcscore::WaitProcess Incrementing waitgroup as isInitProcess\")\n\t\tentry.writersWg.Add(1)\n\t\tentry.writersSyncRoot.Unlock()\n\t}\n\n\texitCodeChan := make(chan int, 1)\n\tdoneChan := make(chan bool)\n\n\tgo func() {\n\t\tbgExitCodeChan := make(chan int, 1)\n\t\tgo func() {\n\t\t\tentry.exitWg.Wait()\n\t\t\tbgExitCodeChan <- entry.exitCode\n\t\t}()\n\n\t\t// Wait for the exit code or the caller to stop waiting.\n\t\tselect {\n\t\tcase exitCode := <-bgExitCodeChan:\n\t\t\tlogrus.Debugf(\"gcscore::WaitProcess got an exitCode %d\", exitCode)\n\t\t\t// We got an exit code tell our caller.\n\t\t\texitCodeChan <- exitCode\n\n\t\t\t// Wait for the caller to tell us they have issued the write and\n\t\t\t// release the writers count.\n\t\t\tselect {\n\t\t\tcase <-doneChan:\n\t\t\t\tif entry.isInitProcess {\n\t\t\t\t\tentry.writersSyncRoot.Lock()\n\t\t\t\t\t// Decrement this waiter\n\t\t\t\t\tlogrus.Debugf(\"-1 writersWg [gcsCore::WaitProcess] exitCode from bgExitCodeChan doneChan\")\n\t\t\t\t\tentry.writersWg.Done()\n\t\t\t\t\tif !entry.writersCalled {\n\t\t\t\t\t\t// Decrement the container exited waiter now that we\n\t\t\t\t\t\t// know we have successfully written at least 1\n\t\t\t\t\t\t// WaitProcess on the init process.\n\t\t\t\t\t\tlogrus.Debugf(\"-1 writersWg [gcsCore::WaitProcess] exitCode from bgExitCodeChan, !writersCalled\")\n\t\t\t\t\t\tentry.writersCalled = true\n\t\t\t\t\t\tentry.writersWg.Done()\n\t\t\t\t\t}\n\t\t\t\t\tentry.writersSyncRoot.Unlock()\n\t\t\t\t}\n\t\t\t}\n\t\tcase <-doneChan:\n\t\t\tlogrus.Debugf(\"gcscore::WaitProcess done channel\")\n\t\t\t// This case means that the waiter decided to stop waiting before\n\t\t\t// the process had an exit code. In this case we need to cleanup\n\t\t\t// just our waiter because the no response was written.\n\t\t\tif entry.isInitProcess {\n\t\t\t\tlogrus.Debugf(\"-1 writersWg [gcsCore::WaitProcess] doneChan\")\n\t\t\t\tentry.writersSyncRoot.Lock()\n\t\t\t\tentry.writersWg.Done()\n\t\t\t\tentry.writersSyncRoot.Unlock()\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn exitCodeChan, doneChan, nil\n}",
"func (s *SeleniumServer) Wait() {\n\terr := s.cmd.Wait()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}",
"func (c *qemuCmd) Wait() (int, error) {\n\terr := c.cmd.Wait()\n\n\texitStatus := -1\n\topAPI := c.cmd.Get()\n\tif opAPI.Metadata != nil {\n\t\texitStatusRaw, ok := opAPI.Metadata[\"return\"].(float64)\n\t\tif ok {\n\t\t\texitStatus = int(exitStatusRaw)\n\n\t\t\t// Convert special exit statuses into errors.\n\t\t\tswitch exitStatus {\n\t\t\tcase 127:\n\t\t\t\terr = ErrExecCommandNotFound\n\t\t\tcase 126:\n\t\t\t\terr = ErrExecCommandNotExecutable\n\t\t\t}\n\t\t}\n\t}\n\n\tif err != nil {\n\t\treturn exitStatus, err\n\t}\n\n\t<-c.dataDone\n\n\tif c.cleanupFunc != nil {\n\t\tdefer c.cleanupFunc()\n\t}\n\n\treturn exitStatus, nil\n}",
"func (m *mware) Wait() error {\n\treturn m.cmd.Wait()\n}",
"func wait(s *suite.Suite, data *runData, expectedExitStatus int) {\n\terr := data.cmd.Wait()\n\tif expectedExitStatus == 0 {\n\t\ts.NoError(err)\n\t} else {\n\t\tstatus := err.(*exec.ExitError).ProcessState.Sys().(syscall.WaitStatus)\n\t\ts.Equal(expectedExitStatus, status.ExitStatus())\n\t}\n}",
"func (r *reaper) wait(exitCodeCh <-chan int, proc waitProcess) (int, error) {\n\t// Wait for the subreaper to receive the SIGCHLD signal. Once it gets\n\t// it, this channel will be notified by receiving the exit code of the\n\t// corresponding process.\n\texitCode := <-exitCodeCh\n\n\t// Ignore errors since the process has already been reaped by the\n\t// subreaping loop. This call is only used to make sure libcontainer\n\t// properly cleans up its internal structures and pipes.\n\tproc.wait()\n\n\treturn exitCode, nil\n}",
"func (f *FFmpeg) Wait() error {\n\t// Verify ffmpeg is running\n\tif !f.started {\n\t\treturn ErrFFmpegNotStarted\n\t}\n\n\t// Wait for exit\n\tif err := f.ffmpeg.Wait(); err != nil {\n\t\treturn err\n\t}\n\n\t// Stopped!\n\tf.started = false\n\treturn nil\n}",
"func Wait() {\n\t<-wait\n}",
"func (e *Executor) Wait() { <-e.exit }",
"func (cmd *Command) Wait() error {\n\t// According to https://github.com/golang/go/issues/28461,\n\t// exec.Cmd#Wait is not thread-safe, so we need to implement\n\t// our own version.\n\tcmd.waitOnce.Do(func() {\n\t\tcmd.waitResult = cmd.c.Wait()\n\t\tclose(cmd.waitDoneCh)\n\t})\n\treturn cmd.waitResult\n}",
"func WaitTimeout(c *exec.Cmd, timeout time.Duration) error {\n\ttimer := time.AfterFunc(timeout, func() {\n\t\terr := c.Process.Kill()\n\t\tif err != nil {\n\t\t\tlog.Printf(\"E! [agent] Error killing process: %s\", err)\n\t\t\treturn\n\t\t}\n\t})\n\n\terr := c.Wait()\n\n\t// Shutdown all timers\n\ttermSent := !timer.Stop()\n\n\t// If the process exited without error treat it as success. This allows a\n\t// process to do a clean shutdown on signal.\n\tif err == nil {\n\t\treturn nil\n\t}\n\n\t// If SIGTERM was sent then treat any process error as a timeout.\n\tif termSent {\n\t\treturn ErrTimeout\n\t}\n\n\t// Otherwise there was an error unrelated to termination.\n\treturn err\n}",
"func (c *Cmd) CmdWait() {\n\tc.Wg.Wait()\n\tc.ExitError = c.Cmd.Wait()\n\tc.GetExitCode()\n}",
"func (ep *ExpectProcess) Close() error {\n\tep.wg.Wait()\n\n\tep.mu.Lock()\n\tdefer ep.mu.Unlock()\n\n\t// this signals to other funcs that the process has finished\n\tep.cmd = nil\n\treturn ep.exitErr\n}",
"func wait() os.Signal {\n\tsigC := make(chan os.Signal, 1)\n\tsignal.Notify(\n\t\tsigC,\n\t\tsyscall.SIGINT,\n\t\tsyscall.SIGTERM,\n\t\tsyscall.SIGQUIT)\n\tsignal := <-sigC\n\treturn signal\n}",
"func (p *Probe) wait() {\n\tp.waitGroup.Wait()\n}",
"func (bw *BinaryWaiter) Wait() error {\n\terr := bw.cmd.Wait()\n\t<-bw.logsWritten\n\treturn err\n}",
"func (r *Runsc) Wait(context context.Context, id string) (int, error) {\n\tdata, stderr, err := cmdOutput(r.command(context, \"wait\", id), false)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"%w: %s\", err, stderr)\n\t}\n\tvar res waitResult\n\tif err := json.Unmarshal(data, &res); err != nil {\n\t\treturn 0, err\n\t}\n\treturn res.ExitStatus, nil\n}",
"func (s *Session) Wait() error {\n\t<-s.exited\n\treturn s.exitErr\n}",
"func Wait() {\n\twg.Wait()\n}",
"func Wait(exitChannel chan error, cancel context.CancelFunc) error {\n\terr := <-exitChannel\n\t// cancel the context\n\tcancel()\n\treturn err\n}",
"func (c *CmdReal) Wait() error {\n\treturn c.cmd.Wait()\n}",
"func (s *Supervisor) Wait() error {\n\ts.wg.Wait()\n\treturn nil\n}",
"func (p *Pool) Wait() {\n\tclose(p.workers)\n\t<-p.wait\n}",
"func (muxer *Muxer) Wait() error {\n\tif muxer.cmd == nil {\n\t\treturn errors.New(\"ffmpeg dash: not started\")\n\t}\n\n\terr := muxer.cmd.Wait()\n\n\t// Ignore 255 status -- just indicates that we exited early\n\tif err != nil && err.Error() == \"exit status 255\" {\n\t\terr = nil\n\t}\n\n\treturn err\n}",
"func (t *Terminal) Wait() {\n\tfor <-t.stopChan {\n\t\treturn\n\t}\n}",
"func wait() {\n\tsig := make(chan os.Signal, 1)\n\tsignal.Notify(sig, os.Interrupt, os.Kill)\n\t<-sig\n\tfmt.Println()\n}",
"func wait() {\n\tsig := make(chan os.Signal, 1)\n\tsignal.Notify(sig, os.Interrupt, os.Kill)\n\t<-sig\n\tfmt.Println()\n}",
"func (a *Application) Wait() {\n\t<-a.terminated\n}",
"func (c *Client) Wait(containerId, execId string, noHang bool) (int32, error) {\n\tctx, cancel := getContextWithTimeout(hyperContextTimeout)\n\tdefer cancel()\n\n\treq := types.WaitRequest{\n\t\tContainer: containerId,\n\t\tProcessId: execId,\n\t\tNoHang: noHang,\n\t}\n\n\tresp, err := c.client.Wait(ctx, &req)\n\tif err != nil {\n\t\treturn -1, err\n\t}\n\n\treturn resp.ExitCode, nil\n}",
"func (p *Pool) Wait() {\n\tp.WaitGroup.Wait()\n}",
"func (k *KubeletExecutor) Wait(containerID string) error {\n\treturn k.cli.WaitForTermination(containerID, 0)\n}",
"func (r *Runner) Wait(t *time.Timer, signals []os.Signal) bool {\n\tif !r.Started() {\n\t\treturn true\n\t}\n\tfor _, sig := range signals {\n\t\tselect {\n\t\tcase err, ok := <-r.errCh:\n\t\t\tif ok {\n\t\t\t\tr.setError(err, ifNotSet)\n\t\t\t}\n\t\t\treturn true\n\t\tcase <-t.C:\n\t\t\tr.setError(timeoutErr, ifNotSet)\n\t\t\tr.Cmd.Process.Signal(sig)\n\t\t\tt.Reset(r.Killout)\n\t\t}\n\t}\n\tr.setError(fmt.Errorf(\"unkillable child: %v\", r.Cmd.Process))\n\treturn false\n}",
"func (wg *WaitGroup) Wait() {\n\twg.Wg.Wait()\n}",
"func (b *buildandrun) Wait() {\n\t<-b.done\n}",
"func (wg *WaitGroup) Wait() {\n\twg.waitGroup.Wait()\n}",
"func (app *App) Wait() error {\n\terr := app.group.Wait()\n\tif err == ErrDone {\n\t\treturn nil\n\t}\n\treturn err\n}",
"func (e *WindowsEvent) Wait() {\n\te.WaitTimeout(-1)\n}",
"func (n *Node) Wait() (int, error) {\n\tctx := context.TODO()\n\n\tclient, err := client.NewEnvClient()\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn client.ContainerWait(ctx, n.id)\n}",
"func WaitContext(ctx context.Context, cmd *exec.Cmd) error {\n\t// We use cmd.Process.Wait instead of cmd.Wait because cmd.Wait is not reenterable\n\tc := make(chan error, 1)\n\tgo func() {\n\t\tif cmd == nil || cmd.Process == nil {\n\t\t\tc <- nil\n\t\t} else {\n\t\t\t_, err := cmd.Process.Wait()\n\t\t\tc <- err\n\t\t}\n\t}()\n\tselect {\n\tcase <-ctx.Done():\n\t\treturn ErrorWaitTimeout\n\tcase err := <-c:\n\t\treturn err\n\t}\n}",
"func (self *ShadowRedisSlave) wait() {\n\tsigChan := make(chan os.Signal, 1)\n\tsignal.Notify(sigChan, os.Interrupt, os.Kill)\n\n\t// Block until a signal is received.\n\ts := <-sigChan\n\tlog.Debugf(\"Got signal:%v\", s)\n\tself.Close()\n}",
"func Wait() {\n\tselect {}\n}",
"func (s *service) waitForExit(cmd *exec.Cmd) {\n\tif err := cmd.Wait(); err != nil {\n\t\tlogrus.Debugf(\"Envoy terminated: %v\", err.Error())\n\t} else {\n\t\tlogrus.Debug(\"Envoy process exited\")\n\t}\n\n\ts.mutex.Lock()\n\tdefer s.mutex.Unlock()\n\tdelete(s.cmdMap, cmd)\n}",
"func (g *Group) Wait() error",
"func (s *Server) Wait() {\n\ts.wg.Wait()\n}",
"func (s *Server) Wait() {\n\ts.wg.Wait()\n}",
"func (manager *Manager) Wait() {\n\tlogger.FromCtx(manager.ctx, logger.Flow).WithField(\"flow\", manager.Name).Info(\"Awaiting till all processes are completed\")\n\tmanager.wg.Wait()\n}",
"func (p *Pool) Wait() {\n\tp.wg.Wait()\n}",
"func (p *Pool) Wait() {\n\tp.wg.Wait()\n}",
"func (f *FakeCmdRunner) Wait() error {\n\treturn f.Err\n}",
"func (kw *osKillWait) KillAndWait(command Commander, waitCh chan error) error {\n\tprocess := command.Process()\n\tif process == nil {\n\t\treturn ErrProcessNotStarted\n\t}\n\n\tlog := kw.logger.WithFields(logrus.Fields{\n\t\t\"PID\": process.Pid,\n\t})\n\n\tprocessKiller := newProcessKiller(log, command)\n\tprocessKiller.Terminate()\n\n\tselect {\n\tcase err := <-waitCh:\n\t\treturn err\n\tcase <-time.After(kw.gracefulKillTimeout):\n\t\tprocessKiller.ForceKill()\n\n\t\tselect {\n\t\tcase err := <-waitCh:\n\t\t\treturn err\n\t\tcase <-time.After(kw.forceKillTimeout):\n\t\t\treturn &KillProcessError{pid: process.Pid}\n\t\t}\n\t}\n}",
"func (s *Stopper) Wait() {\n\ts.wg.Wait()\n}",
"func (m *Machine) Wait(ctx context.Context) error {\n\tselect {\n\tcase <-ctx.Done():\n\t\treturn ctx.Err()\n\tcase <-m.exitCh:\n\t\treturn m.fatalErr\n\t}\n}",
"func (str *Stream) Wait() error {\n\treturn str.close(false)\n}",
"func (l *CommandQueueStatusListener) Wait() {\n\t<-l.signal\n}",
"func (p *ParallelManager) wait() {\n\tp.wg.Wait()\n\tclose(p.stopMonitorCh)\n}",
"func WaitForExit(matcher Matcher, wait time.Duration, delay time.Duration, log Log) ([]ps.Process, error) {\n\tbreakFn := func(procs []ps.Process) bool {\n\t\treturn len(procs) == 0\n\t}\n\treturn findProcesses(matcher, breakFn, wait, delay, log)\n}",
"func (j *JournalTailer) Wait() error {\n\terr := j.cmd.Wait()\n\tif err == nil {\n\t\tglog.V(2).Infof(\"Journal tailing of %s stopped, get them again with: %s\", j.GetUnitName(), j.GetCommandLine())\n\t\treturn nil\n\t}\n\tglog.Errorf(\"Journal tailing of %s stopped with unexpected error: %s\", j.GetUnitName(), err)\n\treturn err\n}",
"func (p *process) Monitor() {\n\tgo func() {\n\t\tp.waitC <- p.cmd.Wait()\n\t\tclose(p.waitC)\n\t\tp.done()\n\t}()\n}",
"func (p *Init) Wait() {\n\t<-p.waitBlock\n}",
"func (c *C) Wait() {\n\tc.wg.Wait()\n}",
"func (g *Group) Wait() error {\n\treturn g.parent.Wait()\n}",
"func (a *Application) Wait() {\n\t<-a.terminated\n\tlog.Printf(\"[TEST] thats all folks\")\n}",
"func (w *WaitGroup) Wait() {\n\tw.wg.Wait()\n}",
"func waitForCanclation(returnChn, abortWait, cancel chan bool, cmd *exec.Cmd) {\n\tselect {\n\tcase <-cancel:\n\t\tcmd.Process.Kill()\n\t\treturnChn <- true\n\tcase <-abortWait:\n\t}\n}",
"func (w *CommandExecutedWaiter) Wait(ctx context.Context, params *GetCommandInvocationInput, maxWaitDur time.Duration, optFns ...func(*CommandExecutedWaiterOptions)) error {\n\t_, err := w.WaitForOutput(ctx, params, maxWaitDur, optFns...)\n\treturn err\n}",
"func (p *Input) Wait() {\n\tp.Stop()\n}",
"func (f *FileSnapshot) Wait() {\n\tf.wg.Wait()\n}",
"func (a *Agent) Wait() error {\n\ta.init()\n\treturn <-a.waitCh\n}",
"func (c *Container) Wait() error {\n\tif c.id == \"\" {\n\t\treturn fmt.Errorf(\"container %s absent\", c.id)\n\t}\n\t_, err := c.cli.ContainerWait(c.ctx, c.id)\n\treturn err\n}",
"func WaitPid(t *kernel.Task, sysno uintptr, args arch.SyscallArguments) (uintptr, *kernel.SyscallControl, error) {\n\tpid := int(args[0].Int())\n\tstatusAddr := args[1].Pointer()\n\toptions := int(args[2].Uint())\n\n\tn, err := wait4(t, pid, statusAddr, options, 0)\n\treturn n, nil, err\n}",
"func (w *InstanceTerminatedWaiter) Wait(ctx context.Context, params *DescribeInstancesInput, maxWaitDur time.Duration, optFns ...func(*InstanceTerminatedWaiterOptions)) error {\n\t_, err := w.WaitForOutput(ctx, params, maxWaitDur, optFns...)\n\treturn err\n}",
"func (ts *TaskService) Wait(requestCtx context.Context, req *taskAPI.WaitRequest) (*taskAPI.WaitResponse, error) {\n\tdefer logPanicAndDie(log.G(requestCtx))\n\tlog.G(requestCtx).WithFields(logrus.Fields{\"id\": req.ID, \"exec_id\": req.ExecID}).Debug(\"wait\")\n\n\tresp, err := ts.runcService.Wait(requestCtx, req)\n\tif err != nil {\n\t\tlog.G(requestCtx).WithError(err).Error(\"wait failed\")\n\t\treturn nil, err\n\t}\n\n\tlog.G(requestCtx).WithField(\"exit_status\", resp.ExitStatus).Debug(\"wait succeeded\")\n\treturn resp, nil\n}",
"func (b *basebackup) Wait() {\n\tb.wg.Wait()\n}",
"func (wg *WaitGroup) Wait() error {\n\twg.wg.Wait()\n\treturn wg.err\n}"
] | [
"0.81128645",
"0.7869257",
"0.7748207",
"0.76745343",
"0.75808007",
"0.75490844",
"0.7530024",
"0.746755",
"0.6854481",
"0.6778684",
"0.6717715",
"0.6611814",
"0.661017",
"0.66004753",
"0.6592647",
"0.65859425",
"0.65060246",
"0.64455724",
"0.6444631",
"0.64402",
"0.64355767",
"0.6434956",
"0.6430905",
"0.6426059",
"0.64169985",
"0.64080256",
"0.63928735",
"0.6389095",
"0.63818276",
"0.6361206",
"0.6339109",
"0.63257223",
"0.6298538",
"0.62938607",
"0.629086",
"0.627942",
"0.62621325",
"0.62447834",
"0.6189046",
"0.6176536",
"0.61753017",
"0.6168836",
"0.6144178",
"0.6123357",
"0.6106821",
"0.6106336",
"0.6104284",
"0.61030823",
"0.61019564",
"0.6089234",
"0.60813475",
"0.6070602",
"0.6070602",
"0.6043367",
"0.6031513",
"0.60189754",
"0.60177374",
"0.60006624",
"0.5998005",
"0.5991488",
"0.596271",
"0.5953822",
"0.5952512",
"0.5946896",
"0.59428805",
"0.5942362",
"0.5942053",
"0.5938179",
"0.5932961",
"0.59243864",
"0.59243864",
"0.590499",
"0.5880848",
"0.5880848",
"0.5878627",
"0.58782876",
"0.58682525",
"0.5868021",
"0.5865584",
"0.5864287",
"0.58625335",
"0.58593875",
"0.5859307",
"0.5857044",
"0.5844497",
"0.58417773",
"0.58407325",
"0.58269393",
"0.58250153",
"0.5820062",
"0.5809898",
"0.57966375",
"0.5788311",
"0.5780087",
"0.5773335",
"0.57660276",
"0.5760708",
"0.5760441",
"0.5746321",
"0.57425106"
] | 0.7441739 | 8 |
Pid returns the process ID | func (p Process) Pid() (int, error) {
// math.MinInt32 is returned here, because it's invalid value
// for the kill() system call.
if p.ops == nil {
return math.MinInt32, errInvalidProcess
}
return p.ops.pid(), nil
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func Getpid() int",
"func Pid() int {\n\treturn processPid\n}",
"func (p *process) Pid() int {\n\treturn p.cmd.Process.Pid\n}",
"func (p *procBase) Pid() int {\n\tif !p.Running() {\n\t\treturn 0\n\t}\n\treturn p.cmd.Process.Pid\n}",
"func (c *D) Pid() (int, error) {\n\tif !c.IsRunning() {\n\t\treturn 0, ErrNotRunning\n\t}\n\treturn c.cmd.Process.Pid, nil\n}",
"func (proc *Proc) GetPid() int {\n\treturn proc.Pid\n}",
"func (p *Proc) PID() int {\n\treturn int(p.proc.Pid)\n}",
"func PID() int {\n\treturn os.Getpid()\n}",
"func (nm *NodeMonitor) GetPid() int {\n\treturn nm.GetProcess().Pid\n}",
"func libc_getpid() int32",
"func (dc *DockerContainer) Pid() int {\n\treturn dc.Cmd.Process.Pid\n}",
"func (p *process) ProcessID() int {\n\tproc := p.p\n\tif proc != nil {\n\t\treturn proc.Pid\n\t}\n\treturn 0\n}",
"func ProcessStatePid(p *os.ProcessState,) int",
"func (s *PerProcessStat) Pid() string {\n\treturn s.pid\n}",
"func (d *Daemon) Pid() (int, error) {\n\tpid, err := ioutil.ReadFile(d.PidFileName)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tp, err := strconv.Atoi(string(bytes.TrimSpace(pid)))\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"%s: invalid process id\", d.PidFileName)\n\t}\n\treturn p, nil\n}",
"func (m *Machine) PID() (int, error) {\n\tif m.cmd == nil || m.cmd.Process == nil {\n\t\treturn 0, fmt.Errorf(\"machine is not running\")\n\t}\n\tselect {\n\tcase <-m.exitCh:\n\t\treturn 0, fmt.Errorf(\"machine process has exited\")\n\tdefault:\n\t}\n\treturn m.cmd.Process.Pid, nil\n}",
"func (r *Runtime) PID() (int, bool) {\n\tif r.cmd.Process == nil {\n\t\treturn 0, false\n\t}\n\n\treturn r.cmd.Process.Pid, true\n}",
"func (x *RpcExector) getPid(rpcc *rpc.XmlRPCClient, process string) {\n\tprocInfo, err := rpcc.GetProcessInfo(process)\n\tif err != nil {\n\t\tfmt.Printf(\"program '%s' not found\\n\", process)\n\t\tos.Exit(1)\n\t} else {\n\t\tfmt.Printf(\"%d\\n\", procInfo.Pid)\n\t}\n}",
"func (g *Goer) getPid() (int, error) {\n\tif _, err := os.Stat(g.PidFile); err == nil {\n\t\tdata, err := ioutil.ReadFile(g.PidFile)\n\t\tif err != nil {\n\t\t\treturn 0, errors.New(\"Goes not run.\")\n\t\t}\n\t\tprocessPid, err := strconv.Atoi(string(data))\n\t\tif err != nil {\n\t\t\treturn 0, errors.New(\"Unable to read and parse process pid.\")\n\t\t}\n\t\treturn processPid, nil\n\t}\n\n\treturn 0, errors.New(\"Goes not run.\")\n}",
"func (c *Container) PID() (int, error) {\n\tc.lock.Lock()\n\tdefer c.lock.Unlock()\n\n\tif err := c.syncContainer(); err != nil {\n\t\treturn -1, err\n\t}\n\n\treturn c.state.PID, nil\n}",
"func (x *CtlCommand) getPid(rpcc *xmlrpcclient.XMLRPCClient, process string) {\n\tprocInfo, err := rpcc.GetProcessInfo(process)\n\tif err != nil {\n\t\tfmt.Printf(\"program '%s' not found\\n\", process)\n\t\tos.Exit(1)\n\t} else {\n\t\tfmt.Printf(\"%d\\n\", procInfo.Pid)\n\t}\n}",
"func (w *Worker) GetPID() int {\n\tif w.cmd != nil && w.cmd.Process != nil {\n\t\treturn w.cmd.Process.Pid\n\t}\n\treturn -1\n}",
"func (x *CtlCommand) getPid(rpcc *rpcclient.RPCClient, process string) {\n\tret, err := rpcc.GetProcessInfo(&rpcclient.GetProcessInfoArg{process})\n\tif err != nil {\n\t\tfmt.Printf(\"program '%s' not found\\n\", process)\n\t\tos.Exit(1)\n\t\treturn\n\t}\n\tfmt.Printf(\"%d\\n\", ret.ProcessInfo.Pid)\n}",
"func (o *EquipmentFanModule) GetPid() string {\n\tif o == nil || o.Pid == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Pid\n}",
"func Getpid() int {\n\treturn syscall.Getpid()\n}",
"func Getpid() int {\n\treturn syscall.Getpid()\n}",
"func (o *EquipmentIoCardBase) GetPid() string {\n\tif o == nil || o.Pid == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Pid\n}",
"func Getppid() int",
"func Getpid(t *kernel.Task, sysno uintptr, args arch.SyscallArguments) (uintptr, *kernel.SyscallControl, error) {\n\treturn uintptr(t.ThreadGroup().ID()), nil, nil\n}",
"func (p *Init) Pid() int {\n\treturn p.pid\n}",
"func (c *Client) GetPID() (int, error) {\n\tvar pid int\n\terr := c.Call(\"supervisor.getPID\", nil, &pid)\n\n\treturn pid, err\n}",
"func (hm *HM) GetProcID() uint8 {\n\treturn hm.pid\n}",
"func rcPid(ctx context.Context, in Params) (out Params, err error) {\n\tout = make(Params)\n\tout[\"pid\"] = os.Getpid()\n\treturn out, nil\n}",
"func (o *TechsupportmanagementEndPointAllOf) GetPid() string {\n\tif o == nil || o.Pid == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Pid\n}",
"func (n *Namespace) GetProcessID(cmd *exec.Cmd) {\n\tcmd.SysProcAttr = &syscall.SysProcAttr{\n\t\tCloneflags: syscall.CLONE_NEWPID,\n\t}\n}",
"func (p *DockerPod) GetPid() int {\n\tif p.netTask == nil {\n\t\treturn 0\n\t}\n\treturn p.netTask.RuntimeConf.Pid\n}",
"func pid(instance int) (pid string, err error) {\n file, err := os.Open(pidFileName(instance))\n if err != nil {\n return\n }\n\n defer file.Close()\n\n scanner := bufio.NewScanner(file)\n scanner.Scan()\n pid = scanner.Text()\n return\n}",
"func (c *Config) PID() string {\n\treturn c.RunDir + \"/pid.txt\"\n}",
"func (o *StoragePhysicalDiskAllOf) GetPid() string {\n\tif o == nil || o.Pid == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Pid\n}",
"func (m *Message) PID() (*PID, error) {\n\tps, err := m.Parse(\"PID\")\n\tpst, ok := ps.(*PID)\n\tif ok {\n\t\treturn pst, err\n\t}\n\treturn nil, err\n}",
"func (metadata EventMetadata) GetPID() int {\n\treturn int(metadata.Pid)\n}",
"func (p *Process) Ppid() (int32, error) {\n\treturn p.PpidWithContext(context.Background())\n}",
"func (c *qemuCmd) PID() int {\n\treturn c.attachedChildPid\n}",
"func (n *ParDo) GetPID() string {\n\treturn n.PID\n}",
"func PPid() int {\n\tif !IsChild() {\n\t\treturn Pid()\n\t}\n\tppidValue := os.Getenv(envKeyPPid)\n\tif ppidValue != \"\" && ppidValue != \"0\" {\n\t\treturn gconv.Int(ppidValue)\n\t}\n\treturn PPidOS()\n}",
"func (o *StoragePhysicalDisk) GetPid() string {\n\tif o == nil || o.Pid == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Pid\n}",
"func GetPidByPort(port int) (pid string) {\n\t// lsof -i:5050 |grep -v PID| awk '{print $2}'\n\to, err := sh.Command(\"lsof\", fmt.Sprintf(\"-i:%d\", port)).\n\t\tCommand(\"grep\", \"-v\", \"PID\").\n\t\tCommand(\"awk\", `{print $2}`).\n\t\tOutput()\n\tif err != nil {\n\t\treturn\n\t}\n\n\treturn strings.TrimSpace(string(o))\n}",
"func Getppid() int {\n\treturn syscall.Getppid()\n}",
"func (o *InlineResponse20027Person) GetPid() string {\n\tif o == nil || o.Pid == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Pid\n}",
"func GetPidFrom(pidFilePath string) (pid int, err error) {\n\n\tif pidFilePath == \"\" {\n\t\tpidFilePath = types.MosnPidDefaultFileName\n\t}\n\n\tvar pf io.Reader\n\tif pf, err = os.Open(pidFilePath); err != nil {\n\t\treturn\n\t}\n\n\tvar bs []byte\n\tif bs, err = ioutil.ReadAll(pf); err != nil {\n\t\treturn\n\t}\n\n\tpid, err = strconv.Atoi(strings.TrimRight(string(bs), \"\\n\"))\n\treturn\n}",
"func GetServicePid(serviceCmd string)int{\n\tres := GetServicePsInfo(serviceCmd)\n\tif \"\" == res{\n\t\treturn 0\n\t}\n\tresSplit := strings.Split(res,\" \")\n\tif len(resSplit) < 2{\n\t\treturn 0\n\t}\n\tval,_ := strconv.Atoi(resSplit[1])\n\treturn val\n}",
"func (o *os) GetProcessId() gdnative.Int {\n\to.ensureSingleton()\n\t//log.Println(\"Calling _OS.GetProcessId()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"_OS\", \"get_process_id\")\n\n\t// Call the parent method.\n\t// int\n\tretPtr := gdnative.NewEmptyInt()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := gdnative.NewIntFromPointer(retPtr)\n\treturn ret\n}",
"func Getppid(t *kernel.Task, sysno uintptr, args arch.SyscallArguments) (uintptr, *kernel.SyscallControl, error) {\n\tparent := t.Parent()\n\tif parent == nil {\n\t\treturn 0, nil, nil\n\t}\n\treturn uintptr(t.PIDNamespace().IDOfThreadGroup(parent.ThreadGroup())), nil, nil\n}",
"func (m *RegistryKeyState) GetProcessId()(*int32) {\n return m.processId\n}",
"func (o *CapabilitySiocModuleManufacturingDef) GetPid() string {\n\tif o == nil || o.Pid == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Pid\n}",
"func getPpid(pid uint64) uint64 {\n\tf, err := os.OpenFile(fmt.Sprintf(\"/proc/%d/status\", pid), os.O_RDONLY, os.ModePerm)\n\tif err != nil {\n\t\treturn 0\n\t}\n\tdefer f.Close()\n\n\tsc := bufio.NewScanner(f)\n\tfor sc.Scan() {\n\t\ttext := sc.Text()\n\t\tif strings.Contains(text, \"PPid:\") {\n\t\t\tf := strings.Fields(text)\n\t\t\ti, _ := strconv.ParseUint(f[len(f)-1], 10, 64)\n\t\t\treturn i\n\t\t}\n\t}\n\treturn 0\n}",
"func getProcessPidFromName(t *testing.T) (string, int32) {\n\tpp, err := process.Processes()\n\tif err != nil {\n\t\tt.Fatalf(\"failed to get the processes with err: %v\", err)\n\t\treturn \"\", 0\n\t}\n\tp := pp[0]\n\tif len(pp) == 0 {\n\t\tt.Skip(\"no available processes\")\n\t\treturn \"\", 0\n\t}\n\tname, err := p.Name()\n\tif err != nil {\n\t\tt.Fatalf(\"failed to get process name with error: %v\", err)\n\t}\n\treturn name, p.Pid\n}",
"func (t *TableNode) PID() string {\n\treturn t.PUID\n}",
"func GetPID(runtime, containerID, socketPath string) (int, error) {\n\tvar PID int\n\n\tswitch runtime {\n\tcase \"docker\":\n\t\thost := \"unix://\" + socketPath\n\t\t// deriving pid from the inspect out of target container\n\t\tout, err := exec.Command(\"sudo\", \"docker\", \"--host\", host, \"inspect\", containerID).CombinedOutput()\n\t\tif err != nil {\n\t\t\tlog.Error(fmt.Sprintf(\"[docker]: Failed to run docker inspect: %s\", string(out)))\n\t\t\treturn 0, err\n\t\t}\n\t\t// parsing data from the json output of inspect command\n\t\tPID, err = parsePIDFromJSON(out, runtime)\n\t\tif err != nil {\n\t\t\tlog.Error(fmt.Sprintf(\"[docker]: Failed to parse json from docker inspect output: %s\", string(out)))\n\t\t\treturn 0, err\n\t\t}\n\tcase \"containerd\", \"crio\":\n\t\t// deriving pid from the inspect out of target container\n\t\tendpoint := \"unix://\" + socketPath\n\t\tout, err := exec.Command(\"sudo\", \"crictl\", \"-i\", endpoint, \"-r\", endpoint, \"inspect\", containerID).CombinedOutput()\n\t\tif err != nil {\n\t\t\tlog.Error(fmt.Sprintf(\"[cri]: Failed to run crictl: %s\", string(out)))\n\t\t\treturn 0, err\n\t\t}\n\t\t// parsing data from the json output of inspect command\n\t\tPID, err = parsePIDFromJSON(out, runtime)\n\t\tif err != nil {\n\t\t\tlog.Errorf(fmt.Sprintf(\"[cri]: Failed to parse json from crictl output: %s\", string(out)))\n\t\t\treturn 0, err\n\t\t}\n\tdefault:\n\t\treturn 0, errors.Errorf(\"%v container runtime not suported\", runtime)\n\t}\n\n\tlog.Info(fmt.Sprintf(\"[Info]: Container ID=%s has process PID=%d\", containerID, PID))\n\n\treturn PID, nil\n}",
"func (p propEncoding) ProcessID() int {\n\tpanic(\"not implemented\")\n}",
"func GetPID(experimentDetails *experimentTypes.ExperimentDetails, clients clients.ClientSets) (int, error) {\n\n\tpod, err := clients.KubeClient.CoreV1().Pods(experimentDetails.AppNS).Get(experimentDetails.TargetPods, v1.GetOptions{})\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\tvar containerID string\n\n\t// filtering out the container id from the details of containers inside containerStatuses of the given pod\n\t// container id is present in the form of <runtime>://<container-id>\n\tfor _, container := range pod.Status.ContainerStatuses {\n\t\tif container.Name == experimentDetails.TargetContainer {\n\t\t\tcontainerID = strings.Split(container.ContainerID, \"//\")[1]\n\t\t\tbreak\n\t\t}\n\t}\n\n\tlog.Infof(\"containerid: %v\", containerID)\n\n\t// deriving pid from the inspect out of target container\n\tout, err := exec.Command(\"crictl\", \"inspect\", containerID).CombinedOutput()\n\tif err != nil {\n\t\tlog.Error(fmt.Sprintf(\"[cri]: Failed to run crictl: %s\", string(out)))\n\t\treturn 0, err\n\t}\n\t// parsing data from the json output of inspect command\n\tPID, err := parsePIDFromJSON(out, experimentDetails.ContainerRuntime)\n\tif err != nil {\n\t\tlog.Error(fmt.Sprintf(\"[cri]: Failed to parse json from crictl output: %s\", string(out)))\n\t\treturn 0, err\n\t}\n\n\tlog.Info(fmt.Sprintf(\"[cri]: Container ID=%s has process PID=%d\", containerID, PID))\n\n\treturn PID, nil\n\n}",
"func libc_getppid() int32",
"func (c *Container) SandboxPid(ctx context.Context) (int, error) {\n\tresp, err := c.client.ContainerInspect(ctx, c.id)\n\tif err != nil {\n\t\treturn -1, err\n\t}\n\treturn resp.ContainerJSONBase.State.Pid, nil\n}",
"func (f *FFlags) ReadPid(procSign string) int {\n\tpidFile := f.getPidFile(procSign)\n\tif util.FileExists(pidFile) {\n\t\tdata, err := ioutil.ReadFile(pidFile)\n\t\tif err != nil {\n\t\t\treturn 0\n\t\t}\n\t\t// II. get pid\n\t\tpid, err := strconv.Atoi(string(data))\n\t\tif err != nil {\n\t\t\treturn 0\n\t\t}\n\t\treturn pid\n\t}\n\treturn 0\n}",
"func FindProcess(pid int) (*os.Process, error)",
"func (a ProblemAdapter) GetPID() string {\n\treturn a.event.PID\n}",
"func FindPidByPs(procname string) (int, error) {\n\tcmd := exec.Command(\"ps\", \"aux\")\n\tvar out bytes.Buffer\n\tcmd.Stdout = &out\n\terr := cmd.Run()\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tfor {\n\t\tline, err := out.ReadString('\\n')\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t\tif strings.Contains(line, procname) {\n\t\t\tfields := strings.Split(line, \" \")\n\t\t\tfiter := make([]string, 0)\n\t\t\tfor _, v := range fields {\n\t\t\t\tif v != \"\" && v != \"\\t\" {\n\t\t\t\t\tfiter = append(fiter, v)\n\t\t\t\t}\n\t\t\t}\n\t\t\tpid, err := strconv.Atoi(fiter[1])\n\t\t\tif err != nil {\n\t\t\t\treturn 0, err\n\t\t\t}\n\t\t\treturn pid, nil\n\t\t} else {\n\t\t\tcontinue\n\t\t}\n\t}\n\treturn 0, ErrNotExist\n}",
"func (c *Client) containerPID(ctx context.Context, id string) (int, error) {\n\tpack, err := c.watch.get(id)\n\tif err != nil {\n\t\treturn -1, err\n\t}\n\treturn int(pack.task.Pid()), nil\n}",
"func (c *GlobalConfig) GetPidPath() string {\n\treturn c.PidFilePath\n}",
"func (s *Server) PIDPath() string { return fmt.Sprintf(\"%s/harp/%s/app.pid\", s.Home, cfg.App.Name) }",
"func GetPidByName(name string) (pid int, err error) {\n\terr = filepath.Walk(\"/proc\", func(path string, info os.FileInfo, err error) error {\n\t\t// using the truth that file `/proc/[pid]/status` store information for\n\t\t// a running app with pid\n\t\tif strings.Contains(path, \"/status\") && strings.Count(path, \"/\") == 3 {\n\t\t\tf, err := os.Open(path)\n\t\t\tif err != nil {\n\t\t\t\treturn err // maybe just return a nil\n\t\t\t}\n\t\t\tdefer f.Close()\n\n\t\t\trd := bufio.NewReader(f)\n\t\t\t// the first line contains name, so just read this line\n\t\t\tfor i := 0; i < 1; i++ {\n\t\t\t\tline, err := rd.ReadString('\\n')\n\t\t\t\tif err != nil || io.EOF == err {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tif strings.Contains(line, name) {\n\t\t\t\t\t// get the pid from file path\n\t\t\t\t\ttarget := strings.Split(path, \"/\")[2]\n\t\t\t\t\tvar err error\n\t\t\t\t\tpid, err = strconv.Atoi(target)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t})\n\n\treturn pid, err\n}",
"func getPidCmd(cmdName string) func() (int, error) {\n\t// prometheus' process collector only works on linux anyway. let them do the\n\t// process detection, if we return an error here we just get 0 metrics and it\n\t// does not log / blow up (that's fine!) it's also likely we hit permissions\n\t// errors here for many installations, we want to do similar and ignore (we\n\t// just want for prod).\n\n\tvar pid int\n\n\treturn func() (int, error) {\n\t\tif pid != 0 {\n\t\t\t// make sure it's our pid.\n\t\t\tif isPidMatchCmd(cmdName, pid) {\n\t\t\t\treturn pid, nil\n\t\t\t}\n\t\t\tpid = 0 // reset to go search\n\t\t}\n\n\t\tif pids, err := getPidList(); err == nil {\n\t\t\tfor _, test := range pids {\n\t\t\t\tif isPidMatchCmd(cmdName, test) {\n\t\t\t\t\tpid = test\n\t\t\t\t\treturn pid, nil\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\treturn pid, io.EOF\n\t}\n}",
"func PID(id ID) uint16 {\n\treturn dgen.PID(id)\n}",
"func findPid (sc ServerConfig, pid int) int {\n\tfor i := 0; i < len(sc.Mypeers); i++ {\n\t\tif pid == sc.Mypeers[i] {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}",
"func pidPrefix() string {\n\treturn fmt.Sprintf(\"[%d] \", os.Getpid())\n}",
"func (c *Client) GetAgentPid() (string, error) {\n\treturn c.get(\"SSH_AGENT_PID\")\n}",
"func newFilePid(filestring string) (Pid, error) {\n\tps := strings.Split(filestring, \":\")\n\t// Check to see if process alias has been configured, if not use actual process name\n\tvar pn string\n\tif len(ps) > 1 {\n\t\tpn = ps[1]\n\t} else {\n\t\tpn = \"\"\n\t}\n\tf := filePid{filepath: ps[0], name: pn, failed: true}\n\treturn &f, nil\n}",
"func (o *InlineResponse20027Person) SetPid(v string) {\n\to.Pid = &v\n}",
"func GetNetStatPid(rw http.ResponseWriter, protocol string, process string) error {\n\tpid, err := strconv.ParseInt(process, 10, 32)\n\tif err != nil || protocol == \"\" || pid == 0 {\n\t\treturn errors.New(\"Can't parse request\")\n\t}\n\n\tconn, err := net.ConnectionsPid(protocol, int32(pid))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn share.JSONResponse(conn, rw)\n}",
"func (p *PodmanTestIntegration) PodmanPID(args []string) (*PodmanSessionIntegration, int) {\n\t// TODO\n\treturn nil, 0\n}",
"func (s *TXPoolServer) GetPID(actor tc.ActorType) *actor.PID {\n\tif actor < tc.TxActor || actor >= tc.MaxActor {\n\t\treturn nil\n\t}\n\n\treturn s.actors[actor]\n}",
"func (o *TechsupportmanagementEndPointAllOf) SetPid(v string) {\n\to.Pid = &v\n}",
"func (o *EquipmentIoCardBase) SetPid(v string) {\n\to.Pid = &v\n}",
"func GetProcessInfo(rw http.ResponseWriter, proc string, property string) error {\n\tpid, err := strconv.ParseInt(proc, 10, 32)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tp, err := process.NewProcess(int32(pid))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tswitch property {\n\tcase \"pid-connections\":\n\t\tconn, err := p.Connections()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn share.JSONResponse(conn, rw)\n\n\tcase \"pid-rlimit\":\n\t\trlimit, err := p.Rlimit()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn share.JSONResponse(rlimit, rw)\n\n\tcase \"pid-rlimit-usage\":\n\t\trlimit, err := p.RlimitUsage(true)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn share.JSONResponse(rlimit, rw)\n\n\tcase \"pid-status\":\n\t\ts, err := p.Status()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn share.JSONResponse(s, rw)\n\n\tcase \"pid-username\":\n\t\tu, err := p.Username()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn share.JSONResponse(u, rw)\n\n\tcase \"pid-open-files\":\n\t\tf, err := p.OpenFiles()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn share.JSONResponse(f, rw)\n\n\tcase \"pid-fds\":\n\t\tf, err := p.NumFDs()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn share.JSONResponse(f, rw)\n\n\tcase \"pid-name\":\n\t\tn, err := p.Name()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn share.JSONResponse(n, rw)\n\n\tcase \"pid-memory-percent\":\n\t\tm, err := p.MemoryPercent()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn share.JSONResponse(m, rw)\n\n\tcase \"pid-memory-maps\":\n\t\tm, err := p.MemoryMaps(true)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn share.JSONResponse(m, rw)\n\n\tcase \"pid-memory-info\":\n\t\tm, err := p.MemoryInfo()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn share.JSONResponse(m, rw)\n\n\tcase \"pid-io-counters\":\n\t\tm, err := p.IOCounters()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn share.JSONResponse(m, rw)\n\t}\n\n\treturn nil\n}",
"func CheckPidFromFile(filename string) error {\n\tfile, err := os.OpenFile(filename,os.O_RDONLY,0644)\n\tif err != nil {\n\t\t// log.Warnf(\"open file failed !\")\n\t\treturn err\n\t}\n\tdefer file.Close()\n\tbody, err := ioutil.ReadAll(file)\n\tif err != nil {\n\t\t// log.Warnf(\"ReadAll\", err)\n\t\t\treturn err\n\t}\n\tdata := strings.Split(string(body),\"=\")\n\t// log.Debugf(\"get pid file: %v\",data)\n\tpid,err := strconv.Atoi(data[1])\n\t// log.Debugf(\"get pid = %d\",pid)\n\n\treturn process.FindProcess(pid)\n}",
"func (c *Config) GetContainerPid() int {\n\treturn int(c.config.container.pid)\n}",
"func (g *Generator) PID(id ID) uint16 {\n\tif g.mode == Sequential {\n\t\treturn binary.BigEndian.Uint16(id[7:9])\n\t} else {\n\t\treturn binary.BigEndian.Uint16(id[6:8])\n\t}\n}",
"func (c *PIDController) PID() (p, i, d float64) {\n\treturn c.p, c.i, c.d\n}",
"func (config *Configuration) PIDFileName() string {\n name := \"~/.run/\" + config.ServiceName + \".pid\"\n name = Util.AbsolutePath(name)\n return name\n}",
"func (o *EquipmentFanModule) SetPid(v string) {\n\to.Pid = &v\n}",
"func (file *LockFile) ReadPid() (pid int, err error) {\n\tif _, err = file.Seek(0, os.SEEK_SET); err != nil {\n\t\treturn\n\t}\n\t_, err = fmt.Fscan(file, &pid)\n\treturn\n}",
"func getCryptohomedPID(ctx context.Context, r hwsec.CmdRunner) (int, error) {\n\traw, err := r.Run(ctx, \"pidof\", \"cryptohomed\")\n\tif err != nil {\n\t\treturn -1, errors.Wrap(err, \"failed to run pidof to get cryptohomed pid\")\n\t}\n\tout := strings.TrimSpace(string(raw))\n\tpid, err := strconv.Atoi(out)\n\tif err != nil {\n\t\treturn -1, errors.Wrapf(err, \"failed to parse pid from str %q\", out)\n\t}\n\treturn pid, nil\n}",
"func ProcInfo(pid int) (*os.Process, error) {\n\treturn os.FindProcess(pid)\n}",
"func NewPid(pid int) (Capabilities, error) {\n\treturn newPid(pid)\n}",
"func getProcessPidAndPort(t *testing.T) (int32, uint32) {\n\tcc, err := net.Connections(\"\")\n\tif err != nil {\n\t\tt.Fatalf(\"failed to get the connections with err: %v\", err)\n\t\treturn 0, 0\n\t}\n\tif len(cc) == 0 {\n\t\tt.Skip(\"no available connection\")\n\t\treturn 0, 0\n\t}\n\tc := cc[0]\n\treturn c.Pid, c.Laddr.Port\n}",
"func VMPID(ctx context.Context) (pid int, err error) {\n\t_, _, pid, err = upstart.JobStatus(ctx, wilcoVMJob)\n\n\treturn pid, err\n}",
"func processPid(pid string) (string, string, string, error) {\n\tif x, y, z, err := processIiifPid(pid); err == nil {\n\t\treturn x, y, z, err\n\t}\n\n\tif x, y, z, err := processMandalaPid(pid); err == nil {\n\t\treturn x, y, z, err\n\t}\n\n\treturn \"\", \"\", \"\", errors.New(\"PID is neither IIIF nor Mandala\")\n}",
"func (c *Config) GetContainerPid() int {\n\treturn int(c.config.containerPid)\n}",
"func New(pathfile string) *Pidfile {\n\tfile, err := fileutil.OpenFile(pathfile, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, fileutil.DefaultFileMode)\n\tif err != nil {\n\t\tgolog.Warnf(\"pidfile: failed to open %s (%s)\", pathfile, err)\n\t\treturn nil\n\t}\n\tdefer file.Close()\n\tpid := strconv.Itoa(os.Getpid())\n\tgolog.Infof(\"start process pid:%v --> %s\", pid, pathfile)\n\tfile.WriteString(pid)\n\treturn &Pidfile{pathfile}\n}",
"func GetProcessByPID(PID int) (Process, error) {\n\t// Try to open the folder to see if it exists and if we have access to it.\n\tf, err := os.Open(fmt.Sprintf(\"/proc/%d\", PID))\n\tdefer f.Close()\n\tif err != nil {\n\t\treturn Process{}, errors.New(fmt.Sprintf(\"Error opening /proc/%d\", PID))\n\t}\n\n\treturn Process{PID: uint64(PID)}, nil\n}"
] | [
"0.838524",
"0.8062947",
"0.8008295",
"0.7893553",
"0.7868351",
"0.7649986",
"0.7641024",
"0.75601506",
"0.7502672",
"0.74702924",
"0.7432728",
"0.73989254",
"0.73691005",
"0.7308452",
"0.73001015",
"0.7295917",
"0.72577465",
"0.71821296",
"0.7167185",
"0.7123233",
"0.70919085",
"0.7079286",
"0.705523",
"0.7047575",
"0.69953066",
"0.69953066",
"0.69658864",
"0.6959497",
"0.6950113",
"0.6901526",
"0.6900627",
"0.68190056",
"0.67918926",
"0.678406",
"0.6780403",
"0.6769465",
"0.67479765",
"0.67432684",
"0.67095363",
"0.66818637",
"0.6635649",
"0.66248196",
"0.66203886",
"0.65983737",
"0.6591517",
"0.6580135",
"0.6541533",
"0.6523729",
"0.6502996",
"0.6501535",
"0.6423651",
"0.63327974",
"0.6331908",
"0.63291055",
"0.6326431",
"0.6302831",
"0.6252091",
"0.6246696",
"0.6230171",
"0.6189189",
"0.6146155",
"0.6142475",
"0.6129415",
"0.61292326",
"0.6087438",
"0.6060436",
"0.60289717",
"0.60037106",
"0.59832275",
"0.5967807",
"0.595157",
"0.59303087",
"0.58842015",
"0.58744943",
"0.5868833",
"0.5864518",
"0.5855196",
"0.5850814",
"0.5809778",
"0.57917434",
"0.57823366",
"0.5772774",
"0.577084",
"0.5766806",
"0.5766651",
"0.57356",
"0.5733496",
"0.57324344",
"0.57078743",
"0.5700852",
"0.57007444",
"0.5692138",
"0.5672366",
"0.5666379",
"0.5650212",
"0.5646034",
"0.5621376",
"0.560924",
"0.55810463",
"0.55650055"
] | 0.77644545 | 5 |
Signal sends a signal to the Process. | func (p Process) Signal(sig os.Signal) error {
if p.ops == nil {
return errInvalidProcess
}
return p.ops.signal(sig)
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (p *Process) Signal(sig os.Signal) error {\n return p.Process.Signal(sig)\n}",
"func ProcessSignal(p *os.Process, sig os.Signal,) error",
"func (p *process) Signal(s os.Signal) error {\n\treturn syscall.Kill(p.pid, s.(syscall.Signal))\n}",
"func (p *Process) SendSignal(sig Signal) error {\n\treturn p.SendSignalWithContext(context.Background(), sig)\n}",
"func (b *BoatHandle) Signal(sig os.Signal) error { return b.cmd.Process.Signal(sig) }",
"func (c *qemuCmd) Signal(sig unix.Signal) error {\n\tcommand := api.InstanceExecControl{\n\t\tCommand: \"signal\",\n\t\tSignal: int(sig),\n\t}\n\n\t// Check handler hasn't finished.\n\tselect {\n\tcase <-c.dataDone:\n\t\treturn fmt.Errorf(\"no such process\") // Aligns with error retured from unix.Kill in lxc's Signal().\n\tdefault:\n\t}\n\n\tc.controlSendCh <- command\n\terr := <-c.controlResCh\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlogger.Debugf(`Forwarded signal \"%d\" to lxd-agent`, sig)\n\treturn nil\n}",
"func (s *Session) Signal(sig os.Signal) {\n\ts.command.Process.Signal(sig)\n}",
"func (d *Daemon) Signal(sig os.Signal) error {\n\tprocess, err := d.Process()\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn process.Signal(sig)\n}",
"func (c *D) Signal(signal os.Signal) error {\n\tif !c.IsRunning() {\n\t\treturn ErrNotRunning\n\t}\n\treturn c.cmd.Process.Signal(signal)\n}",
"func (ep *ExpectProcess) Signal(sig os.Signal) error {\n\tep.mu.Lock()\n\tdefer ep.mu.Unlock()\n\n\tif ep.cmd == nil {\n\t\treturn errors.New(\"expect process already closed\")\n\t}\n\n\treturn ep.cmd.Process.Signal(sig)\n}",
"func (c *Cmd) Signal(sig os.Signal) error {\n\treturn signal(c.cmd.Process, sig)\n}",
"func (c *Cmd) Signal(signal syscall.Signal) error {\n\tif c.Process == nil {\n\t\treturn errNotStarted\n\t}\n\tif c.ProcessState != nil {\n\t\treturn errAlreadyWaited\n\t}\n\n\t// Negative PID means the process group led by the process.\n\treturn syscall.Kill(-c.Process.Pid, signal)\n}",
"func (x *RpcExector) signal(rpcc *rpc.XmlRPCClient, sig_name string, processes []string) {\n\tfor _, process := range processes {\n\t\tif process == \"all\" {\n\t\t\treply, err := rpcc.SignalAll(process)\n\t\t\tif err == nil {\n\t\t\t\tx.showProcessInfo(&reply, make(map[string]bool))\n\t\t\t} else {\n\t\t\t\tfmt.Printf(\"Fail to send signal %s to all process\", sig_name)\n\t\t\t\tos.Exit(1)\n\t\t\t}\n\t\t} else {\n\t\t\treply, err := rpcc.SignalProcess(sig_name, process)\n\t\t\tif err == nil && reply.Success {\n\t\t\t\tfmt.Printf(\"Succeed to send signal %s to process %s\\n\", sig_name, process)\n\t\t\t} else {\n\t\t\t\tfmt.Printf(\"Fail to send signal %s to process %s\\n\", sig_name, process)\n\t\t\t\tos.Exit(1)\n\t\t\t}\n\t\t}\n\t}\n}",
"func (x *CtlCommand) signal(rpcc *xmlrpcclient.XMLRPCClient, sigName string, processes []string) {\n\tfor _, process := range processes {\n\t\tif process == \"all\" {\n\t\t\treply, err := rpcc.SignalAll(process)\n\t\t\tif err == nil {\n\t\t\t\tx.showProcessInfo(&reply, make(map[string]bool))\n\t\t\t} else {\n\t\t\t\tfmt.Printf(\"Fail to send signal %s to all process\", sigName)\n\t\t\t\tos.Exit(1)\n\t\t\t}\n\t\t} else {\n\t\t\treply, err := rpcc.SignalProcess(sigName, process)\n\t\t\tif err == nil && reply.Success {\n\t\t\t\tfmt.Printf(\"Succeed to send signal %s to process %s\\n\", sigName, process)\n\t\t\t} else {\n\t\t\t\tfmt.Printf(\"Fail to send signal %s to process %s\\n\", sigName, process)\n\t\t\t\tos.Exit(1)\n\t\t\t}\n\t\t}\n\t}\n}",
"func (x *CtlCommand) signal(rpcc *rpcclient.RPCClient, sig_name string, processes []string) {\n\tfor _, process := range processes {\n\t\tif process == \"all\" {\n\t\t\treply, err := rpcc.SignalAllProcesses(&rpcclient.SignalAllProcessesArg{\n\t\t\t\tSignal: sig_name,\n\t\t\t})\n\t\t\tif err == nil {\n\t\t\t\tx.showProcessInfo(reply.AllProcessInfo, make(map[string]bool))\n\t\t\t} else {\n\t\t\t\tfmt.Printf(\"Fail to send signal %s to all process\", sig_name)\n\t\t\t\tos.Exit(1)\n\t\t\t}\n\t\t} else {\n\t\t\treply, err := rpcc.SignalProcess(&rpcclient.SignalProcessArg{\n\t\t\t\tProcName: process,\n\t\t\t\tSignal: sig_name,\n\t\t\t})\n\t\t\tif err == nil && reply.Success {\n\t\t\t\tfmt.Printf(\"Succeed to send signal %s to process %s\\n\", sig_name, process)\n\t\t\t} else {\n\t\t\t\tfmt.Printf(\"Fail to send signal %s to process %s\\n\", sig_name, process)\n\t\t\t\tos.Exit(1)\n\t\t\t}\n\t\t}\n\t}\n}",
"func (c *gcsCore) SignalProcess(pid int, options prot.SignalProcessOptions) error {\n\tc.processCacheMutex.Lock()\n\tif _, ok := c.processCache[pid]; !ok {\n\t\tc.processCacheMutex.Unlock()\n\t\treturn gcserr.NewHresultError(gcserr.HrErrNotFound)\n\t}\n\tc.processCacheMutex.Unlock()\n\n\t// Interpret signal value 0 as SIGKILL.\n\t// TODO: Remove this special casing when we are not worried about breaking\n\t// older Windows builds which don't support sending signals.\n\tvar signal syscall.Signal\n\tif options.Signal == 0 {\n\t\tsignal = unix.SIGKILL\n\t} else {\n\t\tsignal = syscall.Signal(options.Signal)\n\t}\n\n\tif err := syscall.Kill(pid, signal); err != nil {\n\t\treturn errors.Wrapf(err, \"failed call to kill on process %d with signal %d\", pid, options.Signal)\n\t}\n\n\treturn nil\n}",
"func (l *Libvirt) DomainSendProcessSignal(Dom Domain, PidValue int64, Signum uint32, Flags uint32) (err error) {\n\tvar buf []byte\n\n\targs := DomainSendProcessSignalArgs {\n\t\tDom: Dom,\n\t\tPidValue: PidValue,\n\t\tSignum: Signum,\n\t\tFlags: Flags,\n\t}\n\n\tbuf, err = encode(&args)\n\tif err != nil {\n\t\treturn\n\t}\n\n\n\t_, err = l.requestStream(295, constants.Program, buf, nil, nil)\n\tif err != nil {\n\t\treturn\n\t}\n\n\treturn\n}",
"func (p *Process) signalToProcess(signal os.Signal) error {\n\tif p.command == nil || p.command.Process == nil {\n\t\terr := errors.Errorf(\"attempt to send signal to non-running process\")\n\t\tp.log.Error(err)\n\t\treturn err\n\t}\n\n\treturn p.command.Process.Signal(signal)\n}",
"func (n *mockAgent) signalProcess(c *Container, processID string, signal syscall.Signal, all bool) error {\n\treturn nil\n}",
"func (k *KACollector) signal(sig syscall.Signal) error {\n\tps, err := process.Processes()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar pid int32\n\tfor _, p := range ps {\n\t\tname, err := p.Name()\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\n\t\tif name == \"keepalived\" {\n\t\t\tpid = p.Pid\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif pid == 0 {\n\t\treturn fmt.Errorf(\"cannot find pid\")\n\t}\n\n\tproc, err := os.FindProcess(int(pid))\n\tif err != nil {\n\t\treturn fmt.Errorf(\"process %v: %v\", pid, err)\n\t}\n\n\terr = proc.Signal(sig)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"signal %v: %v\", sig, err)\n\t}\n\n\ttime.Sleep(100 * time.Millisecond)\n\treturn nil\n}",
"func signal(s os.Signal) {\n\tp, _ := os.FindProcess(os.Getpid())\n\t_ = p.Signal(s)\n\t// Sleep so test won't finish and signal will be received.\n\ttime.Sleep(999)\n}",
"func (s *sidecar) signalProcess() (err error) {\n\tif atomic.LoadInt32(&s.processRunning) == 0 {\n\t\tcmd := exec.Command(s.config.Cmd, strings.Split(s.config.CmdArgs, \" \")...)\n\t\tcmd.Stdout = os.Stdout\n\t\tcmd.Stderr = os.Stderr\n\t\terr = cmd.Start()\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error executing process: %v\\n%v\", s.config.Cmd, err)\n\t\t}\n\t\ts.process = cmd.Process\n\t\tgo s.checkProcessExit()\n\t} else {\n\t\t// Signal to reload certs\n\t\tsig, err := getSignal(s.config.RenewSignal)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error getting signal: %v\\n%v\", s.config.RenewSignal, err)\n\t\t}\n\n\t\terr = s.process.Signal(sig)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error signaling process with signal: %v\\n%v\", sig, err)\n\t\t}\n\t}\n\n\treturn nil\n}",
"func (c *Cond) Signal() {\n\tc.Do(func() {})\n}",
"func (s *countingSemaphore) Signal() {\n\ts.sem <- 1\n}",
"func sendSignal(cmd *exec.Cmd, ch <-chan error, sig syscall.Signal, timeout time.Duration) bool {\n\tif cmd.Process == nil {\n\t\tlog.Debug(\"Not terminating process, it seems to have not started yet\")\n\t\treturn false\n\t}\n\t// This is a bit of a fiddle. We want to wait for the process to exit but only for just so\n\t// long (we do not want to get hung up if it ignores our SIGTERM).\n\tlog.Debug(\"Sending signal %s to -%d\", sig, cmd.Process.Pid)\n\tsyscall.Kill(-cmd.Process.Pid, sig) // Kill the group - we always set one in ExecCommand.\n\n\tselect {\n\tcase <-ch:\n\t\treturn true\n\tcase <-time.After(timeout):\n\t\treturn false\n\t}\n}",
"func (o *V0037JobProperties) SetSignal(v string) {\n\to.Signal = &v\n}",
"func (o *ContainerSignalParams) SetSignal(signal int64) {\n\to.Signal = signal\n}",
"func (q *queue) Signal() {\n\tq.notEmpty.Broadcast()\n}",
"func (o *Wireless) SetSignal(v int32) {\n\to.Signal = &v\n}",
"func PidfdSendSignal(pidfd uintptr, signum unix.Signal) error {\n\t// the runtime OS thread must be locked to safely enter namespaces.\n\truntime.LockOSThread()\n\tdefer runtime.UnlockOSThread()\n\t// setns with pidfd requires at least kernel version 5.8.0\n\terr := unix.Setns(int(pidfd), unix.CLONE_NEWPID)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// pifd_send_signal was introduced in kernel version 5.3\n\t_, _, e1 := unix.Syscall(unix.SYS_PIDFD_SEND_SIGNAL, pidfd, uintptr(signum), 0)\n\tif e1 != 0 {\n\t\treturn e1\n\t}\n\treturn nil\n}",
"func (s *ShutdownManager) SignalShutdown() {\n\ts.ShutdownState = true\n}",
"func NotifySignal(c chan<- Signal, sig ...Signal) error {\n\tif c == nil {\n\t\treturn fmt.Errorf(\"NotifySignal using nil channel\")\n\t}\n\n\tvar pid = os.Getpid()\n\tevts := make([]windows.Handle, 0, len(sig))\n\n\tfor _, s := range sig {\n\t\tname, err := windows.UTF16PtrFromString(eventName(s, pid))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\th, err := windows.CreateEvent(nil, 1, 0, name)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tevts = append(evts, h)\n\t}\n\n\tgo func() {\n\t\tfor {\n\t\t\tev, err := windows.WaitForMultipleObjects(evts, false, windows.INFINITE)\n\n\t\t\tif err != nil {\n\t\t\t\tlog.Printf(\"WaitForMultipleObjects failed: %v\", err)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\toffset := ev - windows.WAIT_OBJECT_0\n\t\t\tc <- sig[offset]\n\t\t\tif err := windows.ResetEvent(evts[offset]); err != nil {\n\t\t\t\tlog.Printf(\"ResetEvent failed: %v\", err)\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn nil\n}",
"func (s *Service) onSignal(sig os.Signal) {\n\tswitch sig {\n\tcase syscall.SIGTERM:\n\t\tfallthrough\n\tcase syscall.SIGINT:\n\t\ts.xlog.Infof(\"received signal %s, exiting...\", sig.String())\n\t\ts.Close()\n\t\tos.Exit(0)\n\t}\n}",
"func Example_signal() {\n\tevents.Listen(&events.Listener{\n\t\tEventName: SignalHello,\n\t\tHandler: func(e events.Event) {\n\t\t\tfmt.Println(e)\n\t\t},\n\t})\n\tevents.Emit(events.Signal(SignalHello))\n\t// Output: Hello world\n}",
"func signal() {\n\tnoEvents = true\n}",
"func (mr *MockOSProcessMockRecorder) Signal(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Signal\", reflect.TypeOf((*MockOSProcess)(nil).Signal), arg0)\n}",
"func PrintSignal(l Logger, signal os.Signal) {\n\tfmt.Println(\"\")\n\tl.Println(strings.ToUpper(signal.String()))\n}",
"func (a *AbstractSessionChannelHandler) OnSignal(\n\t_ uint64,\n\t_ string,\n) error {\n\treturn fmt.Errorf(\"not supported\")\n}",
"func (mr *MockProcessMockRecorder) Signal(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Signal\", reflect.TypeOf((*MockProcess)(nil).Signal), arg0)\n}",
"func (p *promise) Signal(waitChan chan Controller) Promise {\n\tp.Always(func(p2 Controller) {\n\t\twaitChan <- p2\n\t})\n\n\treturn p\n}",
"func (g *Goer) installSignal() {\n\tch := make(chan os.Signal, 1)\n\tsignal.Notify(ch, syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT, syscall.SIGUSR1, syscall.SIGUSR2)\n\tfor signalType := range ch {\n\t\tswitch signalType {\n\t\t// stop process in debug mode with Ctrl+c.\n\t\tcase syscall.SIGINT:\n\t\t\tg.stopAll(ch, signalType)\n\t\t// kill signal in bash shell.\n\t\tcase syscall.SIGKILL | syscall.SIGTERM:\n\t\t\tg.stopAll(ch, signalType)\n\t\t// graceful reload\n\t\tcase syscall.SIGQUIT:\n\t\t\tsignal.Stop(ch)\n\t\t\tg.reload()\n\t\t\tos.Exit(0)\n\t\t}\n\t}\n}",
"func doSignal() {\n\tstop := signals.NewStopChannel()\n\t<-stop\n\tlog.Println(\"Exit signal received. Shutting down.\")\n\tos.Exit(0)\n}",
"func (s *Signal) SendSig(recv, sender Ki, sig int64, data any) {\n\ts.Mu.RLock()\n\tfun := s.Cons[recv]\n\ts.Mu.RUnlock()\n\tif fun != nil {\n\t\tfun(recv, sender, sig, data)\n\t}\n}",
"func (e *Exit) Signal() {\n\te.once.Do(func() {\n\t\tclose(e.c)\n\n\t\te.mtx.Lock()\n\t\texits := e.exits\n\t\te.exits = nil\n\t\te.mtx.Unlock()\n\n\t\tfor i := len(exits); i > 0; i-- {\n\t\t\texits[i-1]()\n\t\t}\n\t})\n}",
"func Signal(val string) error {\n\t_, err := signals.Parse(val)\n\tif err != nil {\n\t\treturn err //nolint: wrapcheck // error string formed in external package is styled correctly\n\t}\n\n\treturn nil\n}",
"func (k Keeper) SetSignal(ctx sdk.Context, protocol uint64, address string) {\n\tkvStore := ctx.KVStore(k.storeKey)\n\tkvStore.Set(types.GetSignalKey(protocol, address), k.cdc.MustMarshalBinaryLengthPrefixed(true))\n}",
"func Signal(signs ...os.Signal) Option {\n\treturn func(o *options) { o.signs = signs }\n}",
"func Kill(sig os.Signal) {\n go func() {\n signals.ch <- sig\n }()\n}",
"func (srv *Server) handleSignal(msg *Message) {\n\tsrv.opsLock.Lock()\n\t// Ignore incoming signals during shutdown\n\tif srv.shutdown {\n\t\tsrv.opsLock.Unlock()\n\t\treturn\n\t}\n\tsrv.currentOps++\n\tsrv.opsLock.Unlock()\n\n\tsrv.hooks.OnSignal(context.WithValue(context.Background(), Msg, *msg))\n\n\t// Mark signal as done and shutdown the server if scheduled and no ops are left\n\tsrv.opsLock.Lock()\n\tsrv.currentOps--\n\tif srv.shutdown && srv.currentOps < 1 {\n\t\tclose(srv.shutdownRdy)\n\t}\n\tsrv.opsLock.Unlock()\n}",
"func X__sysv_signal(tls *TLS, signum int32, handler uintptr) {\n\tch := make(chan os.Signal)\n\tgo func() {\n\t\t<-ch\n\t\t(*(*func(*TLS, int32))(unsafe.Pointer(&handler)))(tls, signum)\n\t}()\n\tsignal.Notify(ch, syscall.Signal(signum))\n}",
"func RaiseSignal(pid int, sig Signal) error {\n\tname, err := windows.UTF16PtrFromString(eventName(sig, pid))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tev, err := windows.OpenEvent(windows.EVENT_MODIFY_STATE, false, name)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefer windows.CloseHandle(ev)\n\treturn windows.SetEvent(ev)\n}",
"func (em *EventMgr) SendSig(recv, sender ki.Ki, evi oswin.Event) {\n\tet := evi.Type()\n\tfor pri := HiPri; pri < EventPrisN; pri++ {\n\t\tem.EventSigs[et][pri].SendSig(recv, sender, int64(et), evi)\n\t}\n}",
"func OnSignal(handler func(os.Signal), signals ...os.Signal) {\n\tif handler == nil || len(signals) == 0 {\n\t\treturn\n\t}\n\n\tsh := &sigHandler{\n\t\tsignals: signals,\n\t\ttarget: handler,\n\t}\n\tsh.Start()\n}",
"func (srv *Server) handleSignal(msg *Message) {\n\tsrv.hooks.OnSignal(context.WithValue(context.Background(), Msg, *msg))\n}",
"func sendSignal(status string) {\n\tcf := cloudformation.New(session.New(&aws.Config{Region: ®ion}))\n\tparams := &cloudformation.SignalResourceInput{\n\t\tLogicalResourceId: &resource,\n\t\tStackName: &stack,\n\t\tStatus: &status,\n\t\tUniqueId: &uniqueID,\n\t}\n\t_, err := cf.SignalResource(params)\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to signal CloudFormation: %q.\\n\", err.Error())\n\t}\n\tlog.Printf(\"Sent a %q signal to CloudFormation.\\n\", status)\n\treturn\n}",
"func (t *Broadcaster) Signal(ctx context.Context) error {\n\tif !t.mutex.RTryLock(ctx) {\n\t\treturn context.DeadlineExceeded\n\t}\n\tdefer t.mutex.RUnlock()\n\n\tselect {\n\tcase <-ctx.Done():\n\t\treturn context.DeadlineExceeded\n\tcase t.channel <- struct{}{}:\n\tdefault:\n\t}\n\n\treturn nil\n}",
"func (g *Pin) Notify(sig ...os.Signal) {\n\tc := make(chan os.Signal)\n\tsignal.Notify(c, sig...)\n\tgo func() {\n\t\tn := 0\n\t\tfor sig := range c {\n\t\t\tif n == 1 {\n\t\t\t\tpanic(\"got too many signals\")\n\t\t\t}\n\t\t\tg.Pull(fmt.Errorf(\"Recieved signal %s\", sig))\n\t\t\tn++\n\t\t}\n\t}()\n}",
"func (o *TechnicalAnalysis) SetSignal(v string) {\n\to.Signal = &v\n}",
"func (s WorkerSemaphore) Signal(n int) {\n\te := empty{}\n\tfor i := 0; i < n; i++ {\n\t\ts.permits <- e\n\t}\n}",
"func TestSignals(t *testing.T) {\n\tseq := make(chan int)\n\twait := make(chan int)\n\tfreq := make(chan time.Time)\n\n\tqueue := &WaitQueue{\n\t\tsem: new(sync.WaitGroup),\n\t\tseq: seq,\n\t\twait: wait,\n\t}\n\n\t// begin listening\n\tgo waitListen(queue, freq, seq)\n\n\t// send a tick, this should start a call to Poll()\n\tfreq <- time.Now()\n\n\t// when that call starts, we should get `1` on the sequence channel\n\tval := <-seq\n\trequire.Equal(t, val, 1)\n\n\t// send a signal, this should start the graceful exit\n\tsignals <- os.Interrupt\n\n\t// tell Poll() that it can exit\n\twait <- 1\n\n\t// first Poll() should exit\n\tval = <-seq\n\trequire.Equal(t, val, 2)\n\n\t// then Listen() should exit\n\tval = <-seq\n\trequire.Equal(t, val, 3)\n}",
"func StartSignal(myPhone string, targetGroupID string, writer io.Writer) (*exec.Cmd, io.ReadCloser) {\n\tcmd := exec.Command(\"signal-cli\", \"-u\", myPhone, \"receive\", \"-t\", \"-1\", \"--json\")\n\tstdout, err := cmd.StdoutPipe()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tif err := cmd.Start(); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\treturn cmd, stdout\n}",
"func TestSignal(t *testing.T) {\n\t// Ask for SIGHUP\n\tc := make(chan os.Signal, 1)\n\tNotify(c, syscall.SIGHUP)\n\tdefer Stop(c)\n\n\t// Send this process a SIGHUP\n\tt.Logf(\"sighup...\")\n\tsyscall.Kill(syscall.Getpid(), syscall.SIGHUP)\n\twaitSig(t, c, syscall.SIGHUP)\n\n\t// Ask for everything we can get. The buffer size has to be\n\t// more than 1, since the runtime might send SIGURG signals.\n\t// Using 10 is arbitrary.\n\tc1 := make(chan os.Signal, 10)\n\tNotify(c1)\n\t// Stop relaying the SIGURG signals. See #49724\n\tReset(syscall.SIGURG)\n\tdefer Stop(c1)\n\n\t// Send this process a SIGWINCH\n\tt.Logf(\"sigwinch...\")\n\tsyscall.Kill(syscall.Getpid(), syscall.SIGWINCH)\n\twaitSigAll(t, c1, syscall.SIGWINCH)\n\n\t// Send two more SIGHUPs, to make sure that\n\t// they get delivered on c1 and that not reading\n\t// from c does not block everything.\n\tt.Logf(\"sighup...\")\n\tsyscall.Kill(syscall.Getpid(), syscall.SIGHUP)\n\twaitSigAll(t, c1, syscall.SIGHUP)\n\tt.Logf(\"sighup...\")\n\tsyscall.Kill(syscall.Getpid(), syscall.SIGHUP)\n\twaitSigAll(t, c1, syscall.SIGHUP)\n\n\t// The first SIGHUP should be waiting for us on c.\n\twaitSig(t, c, syscall.SIGHUP)\n}",
"func setupSignal(d chan int) {\n\tc := make(chan os.Signal, 1)\n\tsignal.Notify(c, os.Interrupt)\n\tgo func() {\n\t\tfor sig := range c {\n\t\t\tfmt.Printf(\"\\nCaptured signal %v\\n\", sig)\n\t\t\tfmt.Printf(\"Output in %v\\n\", \"proc.log\")\n\t\t\tos.Exit(1) // Will exit immediately.\n\t\t\td <- 0\n\t\t\tos.Exit(1)\n\t\t}\n\t}()\n\n}",
"func SignalSelf(sig os.Signal) error {\n\tself, err := os.FindProcess(os.Getpid())\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn self.Signal(sig)\n}",
"func (a *App) SignalShutdown() {\n\ta.shutdown <- syscall.SIGTERM\n}",
"func (a *App) SignalShutdown() {\n\ta.shutdown <- syscall.SIGTERM\n}",
"func (re RunError) Signal() string {\n\treturn re.signal\n}",
"func (m *MockProcess) Signal(arg0 os.Signal) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Signal\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func (s *Supervisor) SetShutdownSignal(signal chan struct{}) {\n\tif signal != nil {\n\t\ts.shutdown = signal\n\t}\n}",
"func Signal(sigs ...os.Signal) Option {\n\treturn func(o *options) { o.sigs = sigs }\n}",
"func AddSignal(sig ...os.Signal) {\n\tsignal.Notify(sigchan, sig...)\n}",
"func (h *DriverHandle) SetKillSignal(signal string) {\n\th.killSignal = signal\n}",
"func (m *MockOSProcess) Signal(arg0 os.Signal) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Signal\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}",
"func (r *ResetReady) Signal() {\n\tr.lock.Lock()\n\tr.ready.Signal()\n\tr.lock.Unlock()\n}",
"func (m *Manager) SendSignals() {\n\tnow := time.Now()\n\tfor m.notifications.Len() > 0 {\n\t\tpeek := m.notifications.Peek()\n\t\tif now.Before(peek.timestamp) {\n\t\t\treturn\n\t\t}\n\t\tnote := heap.Pop(&m.notifications).(*Notification)\n\t\tnote.sensor.Notify(note.signal)\n\t}\n}",
"func (lc *Closer) Signal() {\n\tlc.cancel()\n}",
"func initSigHandle(c *os.Process) {\n\tLog(robot.Info, \"Starting pid 1 signal handler\")\n\tsigs := make(chan os.Signal, 1)\n\n\tsignal.Notify(sigs, unix.SIGINT, unix.SIGTERM)\n\n\tfor {\n\t\tselect {\n\t\tcase sig := <-sigs:\n\t\t\tsignal.Stop(sigs)\n\t\t\tLog(robot.Info, \"Caught signal '%s', propagating to child pid %d\", sig, c.Pid)\n\t\t\tc.Signal(sig)\n\t\t}\n\t}\n}",
"func HandleSignal(c chan os.Signal, arg interface{}) {\n\t// Block until a signal is received.\n\tfor {\n\t\ts := <-c\n\t\tLog.Info(\"Get a signal %s\", s.String())\n\t\tswitch s {\n\t\tcase syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGSTOP, syscall.SIGINT:\n\t\t\t// Exit\n\t\t\treturn\n\t\tcase syscall.SIGHUP:\n\t\t\t// TODO\n\t\t\t// Reload\n\t\t\t// return\n\t\tdefault:\n\t\t\treturn\n\t\t}\n\t}\n}",
"func handleSignal(onSignal func()) {\n\tsigChan := make(chan os.Signal, 10)\n\tsignal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM, syscall.SIGPIPE)\n\tfor signo := range sigChan {\n\t\tswitch signo {\n\t\tcase syscall.SIGINT, syscall.SIGTERM:\n\t\t\tlog.Infof(\"received signal %d (%v)\", signo, signo)\n\t\t\tonSignal()\n\t\t\treturn\n\t\tcase syscall.SIGPIPE:\n\t\t\t// By default systemd redirects the stdout to journald. When journald is stopped or crashes we receive a SIGPIPE signal.\n\t\t\t// Go ignores SIGPIPE signals unless it is when stdout or stdout is closed, in this case the agent is stopped.\n\t\t\t// We never want the agent to stop upon receiving SIGPIPE, so we intercept the SIGPIPE signals and just discard them.\n\t\tdefault:\n\t\t\tlog.Warnf(\"unhandled signal %d (%v)\", signo, signo)\n\t\t}\n\t}\n}",
"func (c *Conn) WriteSignal(m3 messages.M3UA) (n int, err error) {\n\tn = m3.MarshalLen()\n\tbuf := make([]byte, n)\n\tif err := m3.MarshalTo(buf); err != nil {\n\t\treturn 0, fmt.Errorf(\"failed to create %T: %w\", m3, err)\n\t}\n\n\tsctpInfo := c.sctpInfo\n\tif m3.MessageClass() != messages.MsgClassTransfer {\n\t\tsctpInfo.Stream = 0\n\t}\n\n\tnn, err := c.sctpConn.SCTPWrite(buf, sctpInfo)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"failed to write M3UA: %w\", err)\n\t}\n\n\tn += nn\n\treturn\n}",
"func HandleSignal(b bool) Option {\n\treturn func(o *Options) {\n\t\to.Signal = b\n\t}\n}",
"func (d *Driver) SignalTask(taskID string, signal string) error {\n\thandle, ok := d.tasks.Get(taskID)\n\tif !ok {\n\t\treturn drivers.ErrTaskNotFound\n\t}\n\n\treturn d.podman.ContainerKill(d.ctx, handle.containerID, signal)\n}",
"func handleSignal(env *Environment) {\n\tch := make(chan os.Signal, 2)\n\tsignal.Notify(ch, stopSignals...)\n\n\tgo func() {\n\t\ts := <-ch\n\t\tdelay := getDelaySecondsFromEnv()\n\t\tlog.Warn(\"well: got signal\", map[string]interface{}{\n\t\t\t\"signal\": s.String(),\n\t\t\t\"delay\": delay,\n\t\t})\n\t\ttime.Sleep(time.Duration(delay) * time.Second)\n\t\tenv.Cancel(errSignaled)\n\t}()\n}",
"func handlerSignal() {\n\texpectedSignals := make(chan os.Signal, 1)\n\tdoneSignals := make(chan bool, 1)\n\n\t// register channel to receive 2 signals\n\tsignal.Notify(expectedSignals, syscall.SIGTERM, syscall.SIGINT)\n\n\t// this routine is blocking, i.e. when it gets one signal it prints it and notifies the program that it can finish\n\tgo func() {\n\t\tsig := <-expectedSignals\n\t\tfmt.Println()\n\t\tfmt.Println(sig.String())\n\t\tdoneSignals <- true\n\t}()\n\n\tfmt.Println(\"awaiting signal...\")\n\n\t<-doneSignals\n\n\tfmt.Println(\"exiting...\")\n}",
"func writeCrashSignal(crashSig string) {\n\tif err := ioutil.WriteFile(crashSig, []byte{}, 0644); err != nil {\n\t\tlog.Fatalf(\"failed to write crash signal: %v\", err)\n\t}\n}",
"func (wc *workflowClient) SignalWorkflow(ctx context.Context, workflowID string, runID string, signalName string, arg interface{}) error {\n\tinput, err := encodeArg(wc.dataConverter, arg)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn signalWorkflow(ctx, wc.workflowService, wc.identity, wc.domain, workflowID, runID, signalName, input, wc.featureFlags)\n}",
"func (t *SignalTable)StartSignalHandle() {\n go t.signalHandle()\n}",
"func (a *Agent) trapSignal() {\n\tch := make(chan os.Signal, 1)\n\tsignal.Notify(ch, os.Interrupt)\n\tsignal.Notify(ch, syscall.SIGTERM)\n\tgo func() {\n\t\t<-ch\n\t\tlog.Println(\"\\nagent received SIGTERM signal\")\n\t\ta.stop()\n\t\tos.Exit(1)\n\t}()\n}",
"func Kill(pid int, signal syscall.Signal) error {\n\treturn syscall.Kill(pid, signal)\n}",
"func Kill(pid int, signal syscall.Signal) error {\n\treturn syscall.Kill(pid, signal)\n}",
"func TrapSignal(cleanupFunc func()) {\n\tsigs := make(chan os.Signal, 1)\n\tsignal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)\n\n\tgo func() {\n\t\tsig := <-sigs\n\n\t\tif cleanupFunc != nil {\n\t\t\tcleanupFunc()\n\t\t}\n\t\texitCode := 128\n\n\t\tswitch sig {\n\t\tcase syscall.SIGINT:\n\t\t\texitCode += int(syscall.SIGINT)\n\t\tcase syscall.SIGTERM:\n\t\t\texitCode += int(syscall.SIGTERM)\n\t\t}\n\n\t\tos.Exit(exitCode)\n\t}()\n}",
"func (s *SFUSignalBridge) Signal(sstream sfu.SFU_SignalServer) error {\n\tvar peer *Peer\n\tvar cstream sfu.SFU_SignalClient = nil\n\treqCh := make(chan *sfu.SignalRequest)\n\trepCh := make(chan *sfu.SignalReply)\n\terrCh := make(chan error)\n\n\tdefer func() {\n\t\tif cstream != nil {\n\t\t\terr := cstream.CloseSend()\n\t\t\tif err != nil {\n\t\t\t\tlog.Errorf(\"cstream.CloseSend() failed %v\", err)\n\t\t\t}\n\t\t}\n\t\tclose(errCh)\n\t\tlog.Infof(\"SFU.Signal loop done\")\n\t}()\n\n\tgo func() {\n\t\tdefer close(reqCh)\n\t\tfor {\n\t\t\treq, err := sstream.Recv()\n\t\t\tif err != nil {\n\t\t\t\tlog.Errorf(\"Singal server stream.Recv() err: %v\", err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\treqCh <- req\n\t\t}\n\t}()\n\n\tfor {\n\t\tselect {\n\t\tcase err := <-errCh:\n\t\t\treturn err\n\t\tcase req, ok := <-reqCh:\n\n\t\t\tif !ok {\n\t\t\t\treturn io.EOF\n\t\t\t}\n\n\t\t\tif cstream != nil {\n\t\t\t\terr := cstream.Send(req)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Errorf(\"cstream.Send(req) failed %v\", err)\n\t\t\t\t}\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tswitch payload := req.Payload.(type) {\n\t\t\tcase *sfu.SignalRequest_Join:\n\t\t\t\t//TODO: Check if you have permission to connect to the SFU node\n\t\t\t\tr := s.BizServer.getRoom(payload.Join.Sid)\n\t\t\t\tif r != nil {\n\t\t\t\t\tpeer = r.getPeer(payload.Join.Uid)\n\t\t\t\t\tif peer != nil {\n\t\t\t\t\t\t// Use nats-grpc or grpc\n\t\t\t\t\t\t// TODO: change to util.NewGRPCClientConnForNode.\n\t\t\t\t\t\tcli := sfu.NewSFUClient(nrpc.NewClient(s.BizServer.nc, r.sfunid))\n\t\t\t\t\t\tvar err error\n\t\t\t\t\t\tcstream, err = cli.Signal(context.Background())\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\tlog.Errorf(\"Singal cli.Signal() err: %v\", err)\n\t\t\t\t\t\t\treturn err\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tgo func() {\n\t\t\t\t\t\t\tdefer close(repCh)\n\t\t\t\t\t\t\tfor {\n\t\t\t\t\t\t\t\treply, err := cstream.Recv()\n\t\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\t\tlog.Errorf(\"Singal client stream.Recv() err: %v\", err)\n\t\t\t\t\t\t\t\t\treturn\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\trepCh <- reply\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}()\n\n\t\t\t\t\t\terr = cstream.Send(req)\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\treturn fmt.Errorf(\"cstream.Send(req) failed %v\", err)\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn fmt.Errorf(\"peer [%v] not found\", payload.Join.Uid)\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\treturn fmt.Errorf(\"session [%v] not found\", payload.Join.Sid)\n\t\t\t\t}\n\t\t\t}\n\t\tcase reply, ok := <-repCh:\n\t\t\tif ok {\n\t\t\t\terr := sstream.Send(reply)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"sstream.Send(reply) failed %v\", err)\n\t\t\t\t}\n\t\t\t\tbreak\n\t\t\t}\n\t\t\treturn io.EOF\n\t\t}\n\t}\n}",
"func LogSignal(sig os.Signal) {\n\tDebug(\"Encoutered signal: \", sig.String())\n\t// Handle exit signals.\n\tif _, found := ExitSignals[sig]; found {\n\t\tDebug(\"Exiting from signal: \", sig.String())\n\t\t// If we get here, an exit signal was seen. We must handle this by forcing\n\t\t// the program to exit. Without this, the program would ignore all exit signals\n\t\t// except SIGKILL.\n\t\tStop()\n\t}\n}",
"func signalHandler(server net.PacketConn) {\n\tsigCh := make(chan os.Signal, 1)\n\tsignal.Notify(sigCh, os.Interrupt, syscall.SIGTERM, syscall.SIGHUP)\n\tfor {\n\t\tsig := <-sigCh\n\t\tswitch sig {\n\t\tcase syscall.SIGHUP:\n\t\t\tlog.Info(\"SIGHUP received, reloading route map\")\n\t\t\tif err := routeMapReload(); err != nil {\n\t\t\t\tlog.Error(err)\n\t\t\t}\n\t\tcase os.Interrupt, syscall.SIGTERM:\n\t\t\tlog.Info(sig)\n\t\t\tserver.Close()\n\t\t\treturn\n\t\t}\n\t}\n}",
"func (rc *SignalCommand) Execute(args []string) error {\n\tsigName, processes := args[0], args[1:]\n\tctlCommand.signal(ctlCommand.createRPCClient(), sigName, processes)\n\treturn nil\n}",
"func (e *AutoResetEvent) Signal() {\n\te.l.Lock()\n\tif len(e.c) == 0 {\n\t\te.c <- struct{}{}\n\t}\n\te.l.Unlock()\n}",
"func (em *EventMgr) SendEventSignal(evi oswin.Event, popup bool) {\n\tet := evi.Type()\n\tif et > oswin.EventTypeN || et < 0 {\n\t\treturn // can't handle other types of events here due to EventSigs[et] size\n\t}\n\n\tem.EventMu.Lock()\n\n\tsend := em.Master.EventTopNode()\n\n\t// fmt.Printf(\"got event type: %v\\n\", et)\n\tfor pri := HiPri; pri < EventPrisN; pri++ {\n\t\tif pri != LowRawPri && evi.IsProcessed() { // someone took care of it\n\t\t\tcontinue\n\t\t}\n\n\t\t// we take control of signal process to sort elements by depth, and\n\t\t// dispatch to inner-most one first\n\t\trvs := make(WinEventRecvList, 0, 10)\n\n\t\tesig := &em.EventSigs[et][pri]\n\n\t\tesig.Mu.RLock()\n\t\tfor recv, fun := range esig.Cons {\n\t\t\tif recv.IsDestroyed() {\n\t\t\t\t// fmt.Printf(\"ki.Signal deleting destroyed receiver: %v type %T\\n\", recv.Name(), recv)\n\t\t\t\tdelete(esig.Cons, recv)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif recv.IsDeleted() {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tesig.Mu.RUnlock()\n\t\t\tcont := em.SendEventSignalFunc(evi, popup, &rvs, recv, fun)\n\t\t\tesig.Mu.RLock()\n\t\t\tif !cont {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tesig.Mu.RUnlock()\n\n\t\tif len(rvs) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\t// deepest first\n\t\tsort.Slice(rvs, func(i, j int) bool {\n\t\t\treturn rvs[i].Data > rvs[j].Data\n\t\t})\n\n\t\tfor _, rr := range rvs {\n\t\t\tswitch evi.(type) {\n\t\t\tcase *mouse.DragEvent:\n\t\t\t\tif em.Dragging == nil {\n\t\t\t\t\trr.Recv.SetFlag(int(NodeDragging)) // PROVISIONAL!\n\t\t\t\t}\n\t\t\t}\n\t\t\tem.EventMu.Unlock()\n\t\t\trr.Call(send, int64(et), evi) // could call further event loops..\n\t\t\tem.EventMu.Lock()\n\t\t\tif pri != LowRawPri && evi.IsProcessed() { // someone took care of it\n\t\t\t\tswitch evi.(type) { // only grab events if processed\n\t\t\t\tcase *mouse.DragEvent:\n\t\t\t\t\tif em.Dragging == nil {\n\t\t\t\t\t\tem.Dragging = rr.Recv\n\t\t\t\t\t\trr.Recv.SetFlag(int(NodeDragging))\n\t\t\t\t\t}\n\t\t\t\tcase *mouse.ScrollEvent:\n\t\t\t\t\tif em.Scrolling == nil {\n\t\t\t\t\t\tem.Scrolling = rr.Recv\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak\n\t\t\t} else {\n\t\t\t\tswitch evi.(type) {\n\t\t\t\tcase *mouse.DragEvent:\n\t\t\t\t\tif em.Dragging == nil {\n\t\t\t\t\t\trr.Recv.ClearFlag(int(NodeDragging)) // clear provisional\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tem.EventMu.Unlock()\n}",
"func (a *Application) AwaitSignal() {\n\tc := make(chan os.Signal, 1)\n\tsignal.Reset(syscall.SIGTERM, syscall.SIGINT)\n\tsignal.Notify(c, syscall.SIGTERM, syscall.SIGINT)\n\n\ts := <-c\n\ta.logger.Info(\"receive a signal\", zap.String(\"signal\", s.String()))\n\tif a.httpServer != nil {\n\t\tif err := a.httpServer.Stop(); err != nil {\n\t\t\ta.logger.Warn(\"stop http server error\", zap.Error(err))\n\t\t}\n\t}\n\tif a.grpcServer != nil {\n\t\tif err := a.grpcServer.Stop(); err != nil {\n\t\t\ta.logger.Warn(\"stop grpc server error\", zap.Error(err))\n\t\t}\n\t}\n\n\tos.Exit(0)\n}",
"func ForwardSignals(cmd *exec.Cmd, logFn func(error), shutdownCh <-chan struct{}) {\n\tgo func() {\n\t\tsignalCh := make(chan os.Signal, 10)\n\t\tsignal.Notify(signalCh, forwardSignals...)\n\t\tdefer signal.Stop(signalCh)\n\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase sig := <-signalCh:\n\t\t\t\tif err := cmd.Process.Signal(sig); err != nil {\n\t\t\t\t\tlogFn(fmt.Errorf(\"failed to send signal %q: %v\", sig, err))\n\t\t\t\t}\n\n\t\t\tcase <-shutdownCh:\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n}",
"func WaitSignal(stop chan struct{}) {\n\tsigs := make(chan os.Signal, 1)\n\tsignal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)\n\t<-sigs\n\tglog.Warningln(\"Finishing with signal handling.\")\n\tclose(stop)\n}"
] | [
"0.83232",
"0.8034805",
"0.7861894",
"0.74374187",
"0.7412237",
"0.7406529",
"0.7346595",
"0.7263789",
"0.7231644",
"0.7139125",
"0.7114482",
"0.70723146",
"0.69304484",
"0.6903541",
"0.6882808",
"0.6860277",
"0.672449",
"0.6675943",
"0.66536725",
"0.66462773",
"0.65615785",
"0.6388967",
"0.63263637",
"0.6281123",
"0.6151132",
"0.60918707",
"0.6052646",
"0.6044288",
"0.6027649",
"0.59939134",
"0.5952206",
"0.59139156",
"0.5902753",
"0.58987415",
"0.58297926",
"0.57796067",
"0.57676214",
"0.5745831",
"0.57200825",
"0.57168776",
"0.5676381",
"0.56523365",
"0.56432337",
"0.5631405",
"0.56308323",
"0.56076163",
"0.558622",
"0.5580091",
"0.5566486",
"0.55657864",
"0.5563368",
"0.5559864",
"0.5556962",
"0.5548634",
"0.55471194",
"0.5546349",
"0.55350876",
"0.55313045",
"0.55242306",
"0.55009216",
"0.550066",
"0.54906493",
"0.5490136",
"0.54842776",
"0.54768544",
"0.54768544",
"0.5470025",
"0.54513276",
"0.5441005",
"0.5439754",
"0.5390488",
"0.5383804",
"0.5379123",
"0.53716624",
"0.5371291",
"0.5312582",
"0.53075886",
"0.5304986",
"0.5304564",
"0.5271448",
"0.526366",
"0.526329",
"0.52506644",
"0.5249129",
"0.52454966",
"0.52313447",
"0.5225182",
"0.5224539",
"0.51959956",
"0.51959956",
"0.5189482",
"0.5176516",
"0.5169386",
"0.51665074",
"0.5161765",
"0.5158466",
"0.5145282",
"0.5129188",
"0.51185626",
"0.50910085"
] | 0.7709792 | 3 |
operate on the accumulation buffer | func Accum(op uint32, value float32) {
C.glowAccum(gpAccum, (C.GLenum)(op), (C.GLfloat)(value))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (d *pmac) processBuffer() {\n\txor(d.offset[:], d.l[bits.TrailingZeros(d.ctr+1)][:])\n\txor(d.buf[:], d.offset[:])\n\td.ctr++\n\n\td.buf.Encrypt(d.c)\n\txor(d.digest[:], d.buf[:])\n\td.pos = 0\n}",
"func Accumulate(data []float64, initValue float64, f Operation) float64 {\r\n\tres := initValue\r\n\tfor _, v := range data {\r\n\t\tres = f(res, v)\r\n\t}\r\n\treturn res\r\n}",
"func Accum(op uint32, value float32) {\n\tsyscall.Syscall(gpAccum, 2, uintptr(op), uintptr(math.Float32bits(value)), 0)\n}",
"func Accum(op uint32, value float32) {\n C.glowAccum(gpAccum, (C.GLenum)(op), (C.GLfloat)(value))\n}",
"func (s *Scanner) accum(r rune, valid func(rune) bool) {\n\ts.buf.Reset()\n\tfor {\n\t\ts.buf.WriteRune(r)\n\t\tr = s.read()\n\t\tif r == -1 {\n\t\t\treturn\n\t\t}\n\t\tif !valid(r) {\n\t\t\treturn\n\t\t}\n\t}\n}",
"func sum(arr []int, c chan int){\n\tsum := 0\n\tfor _, v:= range arr{\n\t\tsum += v\n\t}\n\tc <- sum\n}",
"func (mm *MetricManager) accumulate(metric config.CarbonMetric) {\n\tconfig.G.Log.System.LogDebug(\"MetricManager::accumulate %s=%v\", metric.Path, metric.Value)\n\n\t// Locate the metric in the map.\n\tvar currentRollup *rollup\n\tvar found bool\n\tif currentRollup, found = mm.byPath[metric.Path]; !found {\n\n\t\t// Initialize, and insert the new rollup into both maps.\n\t\tcurrentRollup = mm.addToMaps(metric.Path)\n\n\t\t// Send the entry off for writing to the path index.\n\t\tconfig.G.Channels.IndexStore <- metric\n\t}\n\n\t// Apply the incoming metric to each rollup bucket.\n\tfor i, v := range currentRollup.value {\n\t\tcurrentRollup.value[i] = mm.applyMethod(\n\t\t\tmm.rollup[currentRollup.expr].Method, v, metric.Value, currentRollup.count[i])\n\t\tcurrentRollup.count[i]++\n\t}\n}",
"func (m *metricMysqlBufferPoolOperations) emit(metrics pmetric.MetricSlice) {\n\tif m.settings.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func (c *StepLookbackAccumulator) BufferStep() {\n\t// Update earliest lookback then remove stale values for the next\n\t// iteration of the datapoint set.\n\tc.earliestLookback = c.earliestLookback.Add(c.stepSize)\n\tif len(c.datapoints) == 0 {\n\t\tc.unconsumed = append(c.unconsumed, nil)\n\t\treturn\n\t}\n\n\taccumulated := make([]xts.Datapoint, len(c.datapoints))\n\tcopy(accumulated, c.datapoints)\n\tc.datapoints = c.datapoints[:0]\n\tc.unconsumed = append(c.unconsumed, accumulated)\n}",
"func (phStats *passwordHasherStats) accumulateStats() {\n\tphStats.logger.Print(\"Collecting stats...\")\n\tok := true\n\tfor ok {\n\t\tvar ms microseconds\n\t\tif ms, ok = <-phStats.queue; ok {\n\t\t\tphStats.logger.Printf(\"Elapsed time: %dms\", ms)\n\n\t\t\t// block reads while appending/resizing/reallocating\n\t\t\tphStats.lock.Lock()\n\t\t\tphStats.times = append(phStats.times, ms)\n\t\t\tphStats.lock.Unlock()\n\t\t}\n\t}\n\tphStats.logger.Print(\"Done collecting stats\")\n}",
"func (r *ringBufferProvider) Feed(elem interface{}) {\n\trbs := uint64(len(r.ringBuffer))\n\tfor i := 0; ; i++ {\n\t\tringRead := atomic.LoadUint64(&r.ringRead)\n\t\tringWrite := atomic.LoadUint64(&r.ringWrite)\n\t\tringWAlloc := atomic.LoadUint64(&r.ringWAlloc)\n\t\tringWriteNextVal := ringWrite + 1\n\t\tif ringWrite == ringWAlloc && ringWriteNextVal-uint64(len(r.ringBuffer)) != ringRead {\n\t\t\tif atomic.CompareAndSwapUint64(&r.ringWAlloc, ringWAlloc, ringWriteNextVal) {\n\t\t\t\tr.ringBuffer[ringWrite%rbs] = elem\n\t\t\t\tif !atomic.CompareAndSwapUint64(&r.ringWrite, ringWrite, ringWriteNextVal) {\n\t\t\t\t\tpanic(\"failed to commit allocated write in ring-buffer\")\n\t\t\t\t}\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\truntime.Gosched()\n\t\ttime.Sleep(time.Duration(i) * time.Nanosecond) // notice nanos vs micros\n\t}\n}",
"func (b *buffer) inc(n int) {\n\tb.offset += n\n}",
"func (q Query) Accumulate(a Accumulator, starting interface{}) (interface{}, error) {\n\tstartTime := time.Now()\n\tlog.WithField(\"startTime\", startTime).Info(\"Starting accumulator\")\n\tdefer log.WithFields(log.Fields{\n\t\t\"duration\": time.Since(startTime),\n\t\t\"finished\": time.Now(),\n\t}).Info(\"Finished accumulator\")\n\n\tbuffer := q.MakeBuffer()\n\n\tcurrent := starting\n\tfor q.result.Next() {\n\t\tlog.Info(\"Starting next accumulator iteration\")\n\n\t\terr := q.result.Scan(buffer...)\n\t\tif err != nil {\n\t\t\tlog.WithField(\"message\", err.Error()).Errorf(\"Failed to scan query results: %v\", err)\n\t\t\treturn nil, err\n\t\t}\n\n\t\tcurrent, err = a.Accumulate(current, buffer)\n\t\tif err != nil {\n\t\t\tlog.WithField(\"message\", err.Error()).Errorf(\"Error calling accumulator: %v\", err)\n\t\t\treturn starting, err\n\t\t}\n\t}\n\n\treturn current, nil\n}",
"func looper(number int) (result []int, sumChan int) {\n\tx := MakeArr(number)\n\n\t//making buffered channel\n\tchanresult := make(chan int, 0)\n\tvar acc int\n\n\t// close channel when done\n\tdefer close(chanresult)\n\n\t// creating channels to calculate the total sum of each array value\n\tfor acc = 0; acc < len(x); acc++ {\n\t\tgo summer(x[acc], chanresult)\n\t\tx := <-chanresult\n\t\tresult = append(result, x)\n\t\tsumChan += x\n\t}\n\n\treturn\n}",
"func (m *metricFlinkJvmMemoryMappedTotalCapacity) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func (r *IntegerMovingAverageReducer) Emit() []FloatPoint {\n\tif len(r.buf) != cap(r.buf) {\n\t\treturn []FloatPoint{}\n\t}\n\treturn []FloatPoint{\n\t\t{\n\t\t\tValue: float64(r.sum) / float64(len(r.buf)),\n\t\t\tTime: r.time,\n\t\t\tAggregated: uint32(len(r.buf)),\n\t\t},\n\t}\n}",
"func (a *Avg) Accum(_ io.TimeBucketKey, argMap *functions.ArgumentMap, cols io.ColumnInterface,\n) (*io.ColumnSeries, error) {\n\tif cols.Len() == 0 {\n\t\treturn a.Output(), nil\n\t}\n\tinputColDSV := argMap.GetMappedColumns(requiredColumns[0].Name)\n\tinputColName := inputColDSV[0].Name\n\tinputCol, err := uda.ColumnToFloat32(cols, inputColName)\n\tif err != nil {\n\t\tlog.Debug(\"COLS: \", cols)\n\t\treturn nil, err\n\t}\n\n\tfor _, value := range inputCol {\n\t\ta.Avg += float64(value)\n\t\ta.Count++\n\t}\n\treturn a.Output(), nil\n}",
"func computeValues(o *Operation) {\n\to.Values[o.Size-1] = o.cumm[o.Size-1]\n\tfor i := o.Size - 2; i >= 0; i-- {\n\t\to.Values[i] = o.Values[i+1] + o.cumm[i]\n\t}\n}",
"func (m *metricFlinkJvmMemoryDirectTotalCapacity) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func (c *StepLookbackAccumulator) AccumulateAndMoveToNext() []xts.Datapoint {\n\tif len(c.unconsumed) == 0 {\n\t\treturn nil\n\t}\n\n\tval := c.unconsumed[0]\n\tremaining := c.unconsumed[1:]\n\n\tif len(remaining) > 0 {\n\t\t// Move any unconsumed values to the front of unconsumed.\n\t\tc.unconsumed = c.buffer[:len(remaining)]\n\t\tcopy(c.unconsumed, remaining)\n\t} else {\n\t\t// Otherwise just repoint to the start of the buffer.\n\t\tc.unconsumed = c.buffer[:0]\n\t}\n\n\treturn val\n}",
"func (b *buffer) grow() {\n\t// ugh all these atomics\n\tatomic.AddUint32(&b.free, uint32(len(b.data)))\n\tatomic.AddUint32(&b.mask, atomic.LoadUint32(&b.mask))\n\tatomic.AddUint32(&b.mask, 1)\n\tatomic.AddUint32(&b.bits, 1)\n\n\tnext := make([]unsafe.Pointer, 2*len(b.data))\n\tcopy(next, b.data)\n\n\t// UGH need to do this with atomics. one pointer + 2 uint64 calls?\n\tb.data = next\n}",
"func sum(a []int, channel chan int) {\n\ttotal := 0\n\tfor _, value := range a {\n\t\ttotal += value\n\t}\n\tchannel <- total // insert into the channel\n}",
"func (m *metricFlinkMemoryManagedTotal) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func (i *Inc) Execute(mem turing.Memory, _ turing.Cache) error {\n\t// borrow slice\n\tbuf, ref := fpack.Borrow(int64Len)\n\tdefer ref.Release()\n\n\t// encode count\n\tbuf = buf[:0]\n\tbuf = strconv.AppendInt(buf, i.Value, 10)\n\n\t// add value\n\terr := mem.Merge(i.Key, buf, Add)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func Accumulate_int(input <-chan int) int {\n\tsum := 0\n\tfor x := range input {\n\t\tsum += x\n\t}\n\treturn sum\n}",
"func summer(num int, result chan int) {\n\tvar total int\n\n\tvar i int\n\t// returns the sum of the array\n\tfor i = 0; i < num; i++ {\n\t\ttotal += i\n\t}\n\t// returning value to channel\n\tresult <- total\n}",
"func (o *DataAccumulator) Publish() {\n\t// lock for prohibit concurrency\n\to.Lock()\n\t//copy current data\n\ttmpBuffer := o.current.Clone()\n\t//swap the DataBuffers\n\to.current, o.previous = o.previous, o.current\n\t//start a new collection\n\to.current.StartCollection()\n\t//end the old collection\n\ttmpBuffer.EndCollection()\n\t//publish the old collection\n\to.accumulator.publish(tmpBuffer)\n\t//unlock\n\to.Unlock()\n}",
"func (this *channelMeterStruct) process(buffer []float64, sampleRate uint32) {\n\tthis.mutex.RLock()\n\tenabled := this.enabled\n\tthis.mutex.RUnlock()\n\n\t/*\n\t * Only perform processing if this channel is enabled.\n\t */\n\tif enabled {\n\t\tthis.mutex.RLock()\n\t\tcurrentValue := this.currentValue\n\t\tpeakValue := this.peakValue\n\t\tsampleCounter := this.sampleCounter\n\t\tthis.mutex.RUnlock()\n\t\tsampleRateFloat := float64(sampleRate)\n\t\tholdTimeSamples := uint64(PEAK_HOLD_TIME_SECONDS * sampleRateFloat)\n\t\tdecayExp := -1.0 / (TIME_CONSTANT * sampleRateFloat)\n\t\tdecayFactor := math.Pow(10.0, decayExp)\n\n\t\t/*\n\t\t * Process each sample.\n\t\t */\n\t\tfor _, sample := range buffer {\n\t\t\tcurrentValue *= decayFactor\n\n\t\t\t/*\n\t\t\t * If we're above the hold time, let the peak indicator decay,\n\t\t\t * otherwise increment sample counter.\n\t\t\t */\n\t\t\tif sampleCounter > holdTimeSamples {\n\t\t\t\tpeakValue *= decayFactor\n\t\t\t} else {\n\t\t\t\tsampleCounter++\n\t\t\t}\n\n\t\t\tsampleAbs := math.Abs(sample)\n\n\t\t\t/*\n\t\t\t * If we got a sample with larger amplitude, update current value.\n\t\t\t */\n\t\t\tif sampleAbs > currentValue {\n\t\t\t\tcurrentValue = sampleAbs\n\t\t\t}\n\n\t\t\t/*\n\t\t\t * If we got a sample with larger or equal amplitude, update peak value.\n\t\t\t */\n\t\t\tif sampleAbs >= peakValue {\n\t\t\t\tpeakValue = sampleAbs\n\t\t\t\tsampleCounter = 0\n\t\t\t}\n\n\t\t}\n\n\t\tthis.mutex.Lock()\n\t\tthis.currentValue = currentValue\n\t\tthis.peakValue = peakValue\n\t\tthis.sampleCounter = sampleCounter\n\t\tthis.mutex.Unlock()\n\t}\n\n}",
"func (p *pipe) outputAdvance(count int) {\n\tp.outPos += int32(count)\n\tif p.outPos >= p.size {\n\t\tp.outPos -= p.size\n\t}\n\tatomic.AddInt32(&p.free, int32(count))\n\n\tselect {\n\tcase p.inWake <- struct{}{}:\n\tdefault:\n\t}\n}",
"func (b *mpgBuff) buffer() *concBuff {\n\tif !b.cur {\n\t\treturn b.bufA\n\t}\n\treturn b.bufB\n}",
"func (r *FloatMovingAverageReducer) AggregateFloat(p *FloatPoint) {\n\tif len(r.buf) != cap(r.buf) {\n\t\tr.buf = append(r.buf, p.Value)\n\t} else {\n\t\tr.sum -= r.buf[r.pos]\n\t\tr.buf[r.pos] = p.Value\n\t}\n\tr.sum += p.Value\n\tr.time = p.Time\n\tr.pos++\n\tif r.pos >= cap(r.buf) {\n\t\tr.pos = 0\n\t}\n}",
"func (d *pmac) Sum(in []byte) []byte {\n\tif d.finished {\n\t\tpanic(\"pmac: already finished\")\n\t}\n\n\tif d.pos == block.Size {\n\t\txor(d.digest[:], d.buf[:])\n\t\txor(d.digest[:], d.lInv[:])\n\t} else {\n\t\txor(d.digest[:], d.buf[:d.pos])\n\t\td.digest[d.pos] ^= 0x80\n\t}\n\n\td.digest.Encrypt(d.c)\n\td.finished = true\n\n\treturn append(in, d.digest[:]...)\n}",
"func (p *movingAverageProcessor) getBufferData(index int, namespace string) interface{} {\n\n\treturn p.movingAverageMap[namespace].movingAverageBuf[index]\n}",
"func (i *Intel8080) Accumulator() byte {\n\treturn i.r[A]\n}",
"func (ctx Context) Add(inputs ...chan float64) (output chan float64) {\n\toutput = make(chan float64, ctx.StreamBufferSize)\n\n\tgo func() {\n\t\tdefer close(output)\n\n\t\tfor len(inputs) > 0 {\n\t\t\tsum := 0.0\n\n\t\t\tfor i := 0; i < len(inputs); i++ {\n\t\t\t\tx, ok := <-inputs[i]\n\t\t\t\tif !ok {\n\t\t\t\t\tcopy(inputs[i:], inputs[i+1:])\n\t\t\t\t\tinputs = inputs[:len(inputs)-1]\n\t\t\t\t\ti--\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tsum += x\n\t\t\t}\n\n\t\t\toutput <- sum\n\t\t}\n\t}()\n\n\treturn output\n}",
"func Accumulate(s []*big.Int) (r *big.Int) {\n\tr = big.NewInt(0)\n\tfor _, e := range s {\n\t\tr.Add(r, e)\n\t}\n\treturn\n}",
"func (m *metricMysqlBufferPoolUsage) emit(metrics pmetric.MetricSlice) {\n\tif m.settings.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func (p *pipe) inputAdvance(count int) {\n\tp.inPos += int32(count)\n\tif p.inPos >= p.size {\n\t\tp.inPos -= p.size\n\t}\n\tatomic.AddInt32(&p.free, -int32(count))\n\n\tselect {\n\tcase p.outWake <- struct{}{}:\n\tdefault:\n\t}\n}",
"func sum(a []int, c chan int) {\n\tsum := 0\n\tfor _, v := range a {\n\t\tsum += v }\n\tc <- sum // send sum to c\n}",
"func (b *Buffer) Add(series ...*influxdb.Series) {\n\tif b.capacity == 0 {\n\t\tb.fn(series)\n\t\treturn\n\t}\n\n\tfor _, item := range series {\n\t\tb.in <- item\n\t}\n}",
"func (b *Buffer) addBuf() {\n\tif b.curBufIdx < len(b.bufs)-1 {\n\t\tb.curBufIdx++\n\t\tb.curBuf = b.bufs[b.curBufIdx]\n\t} else {\n\t\tbuf := b.pool.acquire()\n\t\tb.bufs = append(b.bufs, buf)\n\t\tb.curBuf = buf\n\t\tb.curBufIdx = len(b.bufs) - 1\n\t}\n\n\tb.curBufLen = len(b.curBuf)\n\tb.curIdx = 0\n}",
"func consume(ctx context.Context, count int, p *payload) []int {\n\taccumulator := make([]int, 0)\n\tvisited := make(map[int]struct{})\n\tfor i := 0; i < count; i++ {\n\t\tselect {\n\t\tcase res := <-p.res:\n\t\t\tfor _, val := range res.Numbers {\n\t\t\t\tif _, ok := visited[val]; !ok {\n\t\t\t\t\taccumulator = append(accumulator, val)\n\t\t\t\t\tvisited[val] = struct{}{}\n\t\t\t\t}\n\t\t\t}\n\t\tcase err := <-p.err:\n\t\t\tlog.Println(err)\n\t\tcase <-ctx.Done():\n\t\t\tlog.Println(ctx.Err())\n\t\t\tsort.Ints(accumulator)\n\t\t\treturn accumulator\n\t\t}\n\t}\n\tsort.Ints(accumulator)\n\treturn accumulator\n}",
"func (d *digest) Sum(buf []byte) []byte {\n\tif d.buflen > BlockSize {\n\t\td.incrementCounter(BlockSize)\n\t\td.compress()\n\t\td.buflen -= BlockSize\n\t\tcopy(d.buf[:d.buflen], d.buf[BlockSize:])\n\t}\n\td.incrementCounter(uint32(d.buflen))\n\td.f[0] = 0xffffffff;\n\tj := 2*BlockSize - d.buflen\n\tfor i := 0; i < j; i++ {\n\t\td.buf[i+d.buflen] = 0\n\t}\n\td.compress()\n\tbuffer := make([]byte, 32)\n\tfor i := 0; i < 8; i++ {\n\t\tbinary.LittleEndian.PutUint32(buffer[i*4:], d.h[i])\n\t}\n\treturn append(buf, buffer[:]...)\n}",
"func (r *FloatMovingAverageReducer) Emit() []FloatPoint {\n\tif len(r.buf) != cap(r.buf) {\n\t\treturn []FloatPoint{}\n\t}\n\treturn []FloatPoint{\n\t\t{\n\t\t\tValue: r.sum / float64(len(r.buf)),\n\t\t\tTime: r.time,\n\t\t\tAggregated: uint32(len(r.buf)),\n\t\t},\n\t}\n}",
"func (m *metricRedisClientsMaxInputBuffer) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Gauge().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func Accumulate(input []string, operation func(string) string) (output []string) {\n\n\tdefer trackTime.TrackTime(time.Now())\n\n\tfor _, v := range input {\n\t\toutput = append(output, operation(v))\n\t}\n\n\treturn\n}",
"func (a512srv *Avx512Server) Sum(uid uint64, p []byte) [32]byte {\n\tsumCh := make(chan [32]byte)\n\ta512srv.blocksCh <- blockInput{uid: uid, msg: p, final: true, sumCh: sumCh}\n\treturn <-sumCh\n}",
"func (m *metricMysqlBufferPoolLimit) emit(metrics pmetric.MetricSlice) {\n\tif m.settings.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func incBuffer(bz []byte) bool {\n\tfor i := 0; i < len(bz); i++ {\n\t\tif bz[i] == 0xFF {\n\t\t\tbz[i] = 0x00\n\t\t} else {\n\t\t\tbz[i]++\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}",
"func (m *MemStats) Receive(e slf.Event) {\n\tif e.Type == slf.TypeInc {\n\t\tm.m.Lock()\n\t\tprev, _ := m.values[e.Content]\n\t\tm.values[e.Content] = prev + e.I64\n\t\tm.m.Unlock()\n\t} else if e.Type == slf.TypeGauge {\n\t\tm.m.Lock()\n\t\tm.values[e.Content] = e.I64\n\t\tm.m.Unlock()\n\t}\n}",
"func (e *Engine) callback(in []float32, out [][]float32) {\n\tfor k := 0; k < e.chunks; k++ {\n\t\tif msg := e.messages.Receive(); msg != nil {\n\t\t\te.handle(msg)\n\t\t}\n\n\t\tvar (\n\t\t\tframeSize = e.frameSize\n\t\t\toffset = frameSize * k\n\t\t\tinput = e.graph.in\n\t\t\tleftOut = e.graph.leftOut\n\t\t\trightOut = e.graph.rightOut\n\t\t\tgain = e.gain\n\t\t)\n\t\tfor i := 0; i < frameSize; i++ {\n\t\t\tinput[i] = float64(in[offset+i])\n\t\t}\n\t\tfor _, p := range e.graph.Processors() {\n\t\t\tp.ProcessFrame(frameSize)\n\t\t}\n\t\tfor i := range out {\n\t\t\tfor j := 0; j < frameSize; j++ {\n\t\t\t\tif i%2 == 0 {\n\t\t\t\t\tout[i][offset+j] = float32(leftOut[j]) * gain\n\t\t\t\t} else {\n\t\t\t\t\tout[i][offset+j] = float32(rightOut[j]) * gain\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}",
"func sum(b *bolt.Bucket, fn func([]byte) int) (int, error) {\n\tsum := 0\n\terr := b.ForEach(func(_, v []byte) error {\n\t\tsum += fn(v)\n\t\treturn nil\n\t})\n\treturn sum, err\n}",
"func (ctl *Controller) updateBuffer(position int, colour Colour) {\n\tbufferOffset := headerSize + position*ledPacketSize\n\t// Write out the brightness\n\tbrightness := colour.L\n\tif ctl.brightness != 255 {\n\t\tbrightness = uint8(float32(ctl.brightness) * float32(brightness) / 255)\n\t}\n\tif ctl.gammaFunc != nil {\n\t\t// Apply gamma correction.\n\t\tcolour = ctl.gammaFunc(colour)\n\t}\n\tctl.buffer[bufferOffset] = brightness>>3 | brightnessHeader\n\tctl.buffer[bufferOffset+ctl.rOffset] = colour.R\n\tctl.buffer[bufferOffset+ctl.bOffset] = colour.B\n\tctl.buffer[bufferOffset+ctl.gOffset] = colour.G\n}",
"func (m *metricFlinkMemoryManagedUsed) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func (c *RingBuffer) consume(n int) int {\n\tif n > c.len {\n\t\tn = c.len\n\t}\n\tc.index = (c.index + n) % len(c.buf)\n\tc.addLen(-n)\n\treturn n\n}",
"func processAckBuffer(s *server, ConnID int) {\n\tfor s.clientMap[ConnID].ackClientQueue.Len() > 0 && s.clientMap[ConnID].ackSequenceMap+1 == s.clientMap[ConnID].ackClientQueue.Front().Value.(int) {\n\t\tfront := s.clientMap[ConnID].ackClientQueue.Front()\n\t\ts.clientMap[ConnID].ackClientQueue.Remove(front)\n\t\ts.clientMap[ConnID].ackSequenceMap = s.clientMap[ConnID].ackSequenceMap + 1\n\t\tif verbose {\n\t\t\tfmt.Printf(\" new target ack is: %v\\n\", s.clientMap[ConnID].ackSequenceMap+1)\n\t\t}\n\t}\n}",
"func advanceBuffer(buff *bytes.Buffer, num int) {\n\tbuff.Next(num)\n\t// move buffer from num offset to 0\n\tbytearr := buff.Bytes()\n\tbuff.Reset()\n\tbuff.Write(bytearr)\n}",
"func (p *movingAverageProcessor) addBufferData(index int, data interface{}, namespace string) error {\n\tif _, ok := p.movingAverageMap[namespace]; ok {\n\t\tif index >= len(p.movingAverageMap[namespace].movingAverageBuf) {\n\t\t\treturn errors.New(\"Incorrect value of index, trying to access non-existing element of buffer\")\n\t\t}\n\t\tp.movingAverageMap[namespace].movingAverageBuf[index] = data\n\t\treturn nil\n\t} else {\n\t\treturn errors.New(\"Namespace is not present in the map\")\n\t}\n}",
"func (rb *ringBuffer) read(f func([]byte)) {\n\tvar dataHead, dataTail uint64\n\n\tdataTail = rb.metadata.DataTail\n\tdataHead = atomic.LoadUint64(&rb.metadata.DataHead)\n\n\tfor dataTail < dataHead {\n\t\tdataBegin := dataTail % uint64(len(rb.data))\n\t\tdataEnd := dataHead % uint64(len(rb.data))\n\n\t\tvar data []byte\n\t\tif dataEnd >= dataBegin {\n\t\t\tdata = rb.data[dataBegin:dataEnd]\n\t\t} else {\n\t\t\tdata = rb.data[dataBegin:]\n\t\t\tdata = append(data, rb.data[:dataEnd]...)\n\t\t}\n\n\t\tf(data)\n\n\t\t//\n\t\t// Write dataHead to dataTail to let kernel know that we've\n\t\t// consumed the data up to it.\n\t\t//\n\t\tdataTail = dataHead\n\t\tatomic.StoreUint64(&rb.metadata.DataTail, dataTail)\n\n\t\t// Update dataHead in case it has been advanced in the interim\n\t\tdataHead = atomic.LoadUint64(&rb.metadata.DataHead)\n\t}\n}",
"func (m *metricFlinkJvmMemoryMappedUsed) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func consume(buffer chan item) {\n\tfor elem := range buffer {\n\t\tfmt.Printf(\"%d %d\\n\", elem.producer, elem.value)\n\t}\n}",
"func (ch *RingChannel) ringBuffer() {\n\tvar input, output chan dynamic.Message\n\tvar next dynamic.Message\n\tinput = ch.input\n\n\tfor input != nil || output != nil {\n\t\tselect {\n\t\t// Prefer to write if possible, which is surprisingly effective in reducing\n\t\t// dropped elements due to overflow. The naive read/write select chooses randomly\n\t\t// when both channels are ready, which produces unnecessary drops 50% of the time.\n\t\tcase output <- next:\n\t\t\tch.buffer = nil\n\t\tdefault:\n\t\t\tselect {\n\t\t\tcase elem, open := <-input:\n\t\t\t\tif !open {\n\t\t\t\t\tinput = nil\n\t\t\t\t\tbreak\n\t\t\t\t}\n\n\t\t\t\tch.buffer = &elem\n\t\t\tcase output <- next:\n\t\t\t\tch.buffer = nil\n\t\t\t}\n\t\t}\n\n\t\tif ch.buffer == nil {\n\t\t\toutput = nil\n\t\t\tcontinue\n\t\t}\n\n\t\toutput = ch.output\n\t\tnext = *ch.buffer\n\t}\n\n\tclose(ch.output)\n}",
"func TestPacketAccumulator(t *testing.T) {\n\tb, _ := hex.DecodeString(\"474064100002b0ba0001c10000e065f00b0504435545490e03c03dd01be065f016970028046400283fe907108302808502800e03c0392087e066f0219700050445414333cc03c0c2100a04656e6700e907108302808502800e03c000f087e067f0219700050445414333cc03c0c4100a0473706100e907108302808502800e03c001e00fe068f01697000a04656e6700e907108302808502800e03c000f00fe069f01697000a0473706100e907108302808502800e03c000f086e0dc\")\n\tfirstPacket := &Packet{}\n\tcopy(firstPacket[:], b)\n\tb, _ = hex.DecodeString(\"47006411f0002b59bc22ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\")\n\tsecondPacket := &Packet{}\n\tcopy(secondPacket[:], b)\n\n\tvar called = false\n\n\tpackets := []*Packet{firstPacket, secondPacket}\n\t// Just a simple func to accumulate two packets\n\tdFunc := func(b []byte) (bool, error) {\n\t\tif len(b) <= PacketSize {\n\t\t\treturn false, nil\n\t\t}\n\n\t\tcalled = true\n\n\t\treturn true, nil\n\t}\n\n\tacc := NewAccumulator(dFunc)\n\tfor _, pkt := range packets {\n\t\t_, err := acc.WritePacket(pkt)\n\t\tif err == gots.ErrAccumulatorDone {\n\t\t\t// Accumulation is done\n\t\t\tbreak\n\t\t} else if err != nil {\n\t\t\tt.Errorf(\"Unexpected accumulator error: %s\", err)\n\t\t}\n\t}\n\n\tif !called {\n\t\tt.Error(\"Expected Accumulator doneFunc to be called\")\n\t}\n}",
"func (r *IntegerMovingAverageReducer) AggregateInteger(p *IntegerPoint) {\n\tif len(r.buf) != cap(r.buf) {\n\t\tr.buf = append(r.buf, p.Value)\n\t} else {\n\t\tr.sum -= r.buf[r.pos]\n\t\tr.buf[r.pos] = p.Value\n\t}\n\tr.sum += p.Value\n\tr.time = p.Time\n\tr.pos++\n\tif r.pos >= cap(r.buf) {\n\t\tr.pos = 0\n\t}\n}",
"func (m *metricFlinkJvmMemoryDirectUsed) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func (tr *traverser) Inc() {\n\ttr.idx++\n\tif tr.ConsumedAll() {\n\t\ttr.SetDone()\n\t}\n}",
"func (h *batchHistogram) update(his *metrics.Float64Histogram, sum float64) {\n\tcounts, buckets := his.Counts, his.Buckets\n\n\th.mu.Lock()\n\tdefer h.mu.Unlock()\n\n\t// Clear buckets.\n\tfor i := range h.counts {\n\t\th.counts[i] = 0\n\t}\n\t// Copy and reduce buckets.\n\tvar j int\n\tfor i, count := range counts {\n\t\th.counts[j] += count\n\t\tif buckets[i+1] == h.buckets[j+1] {\n\t\t\tj++\n\t\t}\n\t}\n\tif h.hasSum {\n\t\th.sum = sum\n\t}\n}",
"func (m *metricRedisCommandsProcessed) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func (z *Big) Reduce() *Big { return z.Context.Reduce(z) }",
"func routineBuffer(){\n\tchannel := make(chan int,4)//make channel and add buffer\n\twg.Add(2)\n\t//receive from channel\n\tgo func(channel <- chan int){\n\t\t//loop throug the channel\n\t\tfor i := range channel{\n\t\t\tfmt.Println(i)\n\t\t}\n\t\twg.Done()\n\t}(channel)\n\t//sending to channel\n\tgo func(channel chan <- int){\n\t\tchannel <- 100\n\t\tchannel <- 200\n\t\tclose(channel) //after sending close the channel \n\t\twg.Done()\n\t}(channel)\n\twg.Wait()\n}",
"func sum(numbers chan int) int {\n\tacc := 0\n\tfor v := range numbers {\n\t\tfmt.Printf(\"Got doubled value %d \\n\", v)\n\t\tacc += v\n\t}\n\treturn acc\n}",
"func (cc computer) acc() int { return cc.accumulator }",
"func sumWorker(nums chan int, out chan int) {\n\t// TODO: implement me\n\t// HINT: use for loop over `nums`\n\tvar tot int\n\tfor i := range nums {\n\t\ttot += i\n\t}\n\tout <- tot\n}",
"func (shp *SHPImpl) ComputMoveGainWithBuffer() {\n\tfor bucketI := uint64(0); bucketI < shp.bucketSize; bucketI++ {\n\t\tfor bucketJ := uint64(0); bucketJ < shp.bucketSize; bucketJ++ {\n\t\t\tshp.vertexTrans[bucketI][bucketJ] = 0\n\t\t}\n\t}\n\tfor vertex := uint64(0); vertex < shp.vertexSize; vertex++ {\n\t\tminGain, target := shp.calcSingleGain(shp.graph.Nodes[vertex])\n\n\t\tif minGain < 0 {\n\t\t\tshp.vertex2Target[vertex] = target\n\t\t\tshp.vertexTrans[shp.vertex2Bucket[vertex]][target]++\n\t\t\tshp.tf.buffer[shp.tf.bufferSize] = vertex\n\t\t\tshp.tf.bufferSize++\n\t\t}\n\t}\n}",
"func (b *Ring) add(val interface{}) error {\n\tif b.size >= len(b.buf) {\n\t\treturn ErrFull\n\t}\n\tb.lock.Lock()\n\tdefer b.lock.Unlock()\n\n\tnext := Next(1, b.head, len(b.buf))\n\tb.buf[next] = val\n\tb.head = next\n\tb.size++ // increase the inner size\n\treturn nil\n}",
"func (m *metricFlinkJvmMemoryMetaspaceCommitted) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func processAudio(out []float32) {\n\tfor i := range out {\n\t\tout[i] = 0\n\t\tfor _, t := range toneMap {\n\t\t\tout[i] += t.next()\n\t\t}\n\t}\n}",
"func (m *metricFlinkJvmMemoryNonheapCommitted) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func (m *metricBigipPoolRequestCount) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func (tb *TelemetryBuffer) BufferAndPushData(intervalms time.Duration) {\n\tdefer tb.close()\n\tif !tb.FdExists {\n\t\ttelemetryLogger.Printf(\"[Telemetry] Buffer telemetry data and send it to host\")\n\t\tif intervalms < DefaultInterval {\n\t\t\tintervalms = DefaultInterval\n\t\t}\n\n\t\tinterval := time.NewTicker(intervalms).C\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-interval:\n\t\t\t\t// Send payload to host and clear cache when sent successfully\n\t\t\t\t// To-do : if we hit max slice size in payload, write to disk and process the logs on disk on future sends\n\t\t\t\ttelemetryLogger.Printf(\"[Telemetry] send data to host\")\n\t\t\t\tif err := tb.sendToHost(); err == nil {\n\t\t\t\t\ttb.payload.reset()\n\t\t\t\t} else {\n\t\t\t\t\ttelemetryLogger.Printf(\"[Telemetry] sending to host failed with error %+v\", err)\n\t\t\t\t}\n\t\t\tcase report := <-tb.data:\n\t\t\t\ttelemetryLogger.Printf(\"[Telemetry] Got data..Append it to buffer\")\n\t\t\t\ttb.payload.push(report)\n\t\t\tcase <-tb.cancel:\n\t\t\t\tgoto EXIT\n\t\t\t}\n\t\t}\n\t} else {\n\t\t<-tb.cancel\n\t}\n\nEXIT:\n}",
"func Accumulate(collection []string, operation func(string) string) []string {\n\tfor i, element := range collection {\n\t\tcollection[i] = operation(element)\n\t}\n\treturn collection\n}",
"func (m *metricActiveDirectoryDsOperationRate) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func (a *MovAvg) push(t time.Time, v []int) {\n\ta.q[a.w].t = t\n\tcopy(a.q[a.w].v, v)\n\tfor i := range v {\n\t\ta.sum[i] += v[i]\n\t}\n\tif a.w++; a.w == len(a.q) {\n\t\ta.w = 0\n\t}\n}",
"func (s *Stats) Inc(d *Data) {\n\ts.Lock()\n\tdefer s.Unlock()\n\n\ts.StatusCode[d.StatusCode]++\n\ts.Method[d.Method]++\n\ts.Path[d.Path]++\n\ts.InBytes += d.InBytes\n\ts.OutBytes += d.OutBytes\n}",
"func Reduce[I, A any](input <-chan I, reduceFunc func(accum A, element I) A) A {\n\tvar accum A\n\tfor element := range input {\n\t\taccum = reduceFunc(accum, element)\n\t}\n\n\treturn accum\n}",
"func (m *metricBigipPoolDataTransmitted) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func (a *Int64) Accumulate(i int, err error) {\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\t*a += Int64(i)\n}",
"func sumWorker(nums chan int, out chan int) {\n\t// TODO: implement me\n\t// HINT: use for loop over `nums`\n\tvar sum int\n\tdefer wg.Done()\n\tcounterA:= len(nums)\n\tfor i := 0; i<counterA; i++ {\n\t\telement := <- nums\n\t\tsum+=element\n\t}\n\tout <- sum\n}",
"func (m *metricRedisMemoryUsed) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Gauge().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func sum(s []int, c chan int) {\n\tsum := 0\n\t// adds the numbers in the slice\n\tfor _, v := range s {\n\t\tsum += v\n\t}\n\tfmt.Println(\"Computation of\", s, \"must be:\", sum)\n\t// sends sum to channel c\n\tc <- sum\n}",
"func (client *Client) OnsConsumerAccumulateWithCallback(request *OnsConsumerAccumulateRequest, callback func(response *OnsConsumerAccumulateResponse, err error)) <-chan int {\n\tresult := make(chan int, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tvar response *OnsConsumerAccumulateResponse\n\t\tvar err error\n\t\tdefer close(result)\n\t\tresponse, err = client.OnsConsumerAccumulate(request)\n\t\tcallback(response, err)\n\t\tresult <- 1\n\t})\n\tif err != nil {\n\t\tdefer close(result)\n\t\tcallback(nil, err)\n\t\tresult <- 0\n\t}\n\treturn result\n}",
"func (c *CandleCandler) Accum(_ io.TimeBucketKey, argMap *functions.ArgumentMap, cols io.ColumnInterface,\n) (*io.ColumnSeries, error) {\n\tif cols.Len() == 0 {\n\t\treturn nil, fmt.Errorf(\"empty input to Accum\")\n\t}\n\t/*\n\t\tGet the input column for \"Price\"\n\t*/\n\topenCols := argMap.GetMappedColumns(requiredColumns[0].Name)\n\thighCols := argMap.GetMappedColumns(requiredColumns[1].Name)\n\tlowCols := argMap.GetMappedColumns(requiredColumns[2].Name)\n\tcloseCols := argMap.GetMappedColumns(requiredColumns[3].Name)\n\topen, err := candler.GetAverageColumnFloat32(cols, openCols)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\thigh, err := candler.GetAverageColumnFloat32(cols, highCols)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlow, err := candler.GetAverageColumnFloat32(cols, lowCols)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tclos, err := candler.GetAverageColumnFloat32(cols, closeCols)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t/*\n\t\tGet the time column\n\t*/\n\tts, err := cols.GetTime()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t/*\n\t\tUpdate each candle\n\t\tPrepare a consolidated map of columns for use in updating sums\n\t*/\n\tvar sumCols map[string][]float32\n\tif len(c.AccumSumNames) != 0 {\n\t\tsumCols = make(map[string][]float32)\n\t\tfor _, name := range c.AccumSumNames {\n\t\t\tsumCols[name], err = uda.ColumnToFloat32(cols, name)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t}\n\tvar candle *candler.Candle\n\tfor i, t := range ts {\n\t\tcandle = c.GetCandle(t, candle)\n\t\tcandle.AddCandle(t, open[i], high[i], low[i], clos[i])\n\t\t/*\n\t\t\tIterate over the candle's named columns that need sums\n\t\t*/\n\t\tfor _, name := range c.AccumSumNames {\n\t\t\tcandle.SumMap[name] += float64(sumCols[name][i])\n\t\t}\n\t\tcandle.Count++\n\t}\n\treturn c.Output()\n}",
"func (m *metricFlinkOperatorRecordCount) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func Accumulate(collect []string, operation func(string) string) []string {\n\n\toutput := []string{}\n\n\tfor i := range collect {\n\t\toutput = append(output, operation(collect[i]))\n\t}\n\treturn output\n}",
"func (m *metricActiveDirectoryDsReplicationSyncRequestCount) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func sum(s []int, c chan int) {\n\tsum := 0\n\tfor _, v := range s {\n\t\tsum += v\n\t}\n\tc <- sum // send sum to c\n}",
"func sum(msg string, values ...int) *int {\n\t// fmt.Println(values)\n\t// fmt.Printf(\"%T\\n\", values)\n\tresult := 0\n\tfor _, v := range values {\n\t\tresult += v\n\t}\n\t// fmt.Println(msg, result)\n\t// return result\n\n\t// In other languages the local stack of a function is destroyed when the function is done\n\t// executing. Golang saves this on a shared memory(heap memory) before destroying the local stack\n\treturn &result\n}",
"func (m *metricActiveDirectoryDsReplicationOperationPending) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func (m *metricBigipNodeRequestCount) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}",
"func bzReduce(combine bzConsumer, start float64, L, d int, fn BzFunc) float64 {\n\tpoints := bzPoints(L, d)\n\ttotal := start\n\tfor i := 0; i < len(points); i++ {\n\t\tk := points[i]\n\t\ttotal = combine(fn(k), total)\n\t}\n\treturn total\n}"
] | [
"0.62614363",
"0.6201176",
"0.61862767",
"0.6068468",
"0.57331413",
"0.56892174",
"0.56844246",
"0.56659853",
"0.5638753",
"0.55710655",
"0.55572665",
"0.554525",
"0.55062646",
"0.54850453",
"0.5482546",
"0.5482173",
"0.5472898",
"0.5457682",
"0.5447925",
"0.5440757",
"0.5434414",
"0.5434123",
"0.54272914",
"0.54119694",
"0.5410957",
"0.5387552",
"0.53760624",
"0.53617215",
"0.53575605",
"0.5353574",
"0.53521484",
"0.53501165",
"0.5341517",
"0.53163916",
"0.5310258",
"0.5307371",
"0.5293641",
"0.52860814",
"0.52534676",
"0.52482",
"0.52423614",
"0.5232144",
"0.5230235",
"0.522898",
"0.5227996",
"0.52278435",
"0.52269775",
"0.52183545",
"0.5217752",
"0.52169245",
"0.5207118",
"0.5203333",
"0.5200513",
"0.5193989",
"0.5190965",
"0.51876897",
"0.51865065",
"0.5186261",
"0.5185027",
"0.5179705",
"0.5166453",
"0.5163109",
"0.5161887",
"0.5138447",
"0.51374817",
"0.51280385",
"0.512442",
"0.5123931",
"0.5122143",
"0.5116034",
"0.51123846",
"0.5109327",
"0.5096083",
"0.5091755",
"0.5087457",
"0.50809634",
"0.5078593",
"0.50736207",
"0.5066236",
"0.50650555",
"0.5064008",
"0.5058089",
"0.50542414",
"0.50531787",
"0.50530666",
"0.50504285",
"0.50478905",
"0.50456303",
"0.50430995",
"0.5041966",
"0.5035384",
"0.5033685",
"0.5030689",
"0.50279284",
"0.502575",
"0.50237083",
"0.5020915",
"0.50171226",
"0.50145227",
"0.5011364"
] | 0.6006751 | 4 |
set the active program object for a program pipeline object | func ActiveShaderProgram(pipeline uint32, program uint32) {
C.glowActiveShaderProgram(gpActiveShaderProgram, (C.GLuint)(pipeline), (C.GLuint)(program))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (m *ProgramControl) SetProgram(value Programable)() {\n err := m.GetBackingStore().Set(\"program\", value)\n if err != nil {\n panic(err)\n }\n}",
"func ActiveShaderProgram(pipeline uint32, program uint32) {\n\tsyscall.Syscall(gpActiveShaderProgram, 2, uintptr(pipeline), uintptr(program), 0)\n}",
"func BindProgramPipeline(pipeline uint32) {\n\tsyscall.Syscall(gpBindProgramPipeline, 1, uintptr(pipeline), 0, 0)\n}",
"func (m *ProgramControl) SetProgramId(value *string)() {\n err := m.GetBackingStore().Set(\"programId\", value)\n if err != nil {\n panic(err)\n }\n}",
"func ActiveShaderProgram(pipeline uint32, program uint32) {\n C.glowActiveShaderProgram(gpActiveShaderProgram, (C.GLuint)(pipeline), (C.GLuint)(program))\n}",
"func (t *cliTransHandler) SetPipeline(p *remote.TransPipeline) {\n\tt.transPipe = p\n}",
"func BindProgramPipeline(pipeline uint32) {\n C.glowBindProgramPipeline(gpBindProgramPipeline, (C.GLuint)(pipeline))\n}",
"func UseProgramStages(pipeline uint32, stages uint32, program uint32) {\n\tsyscall.Syscall(gpUseProgramStages, 3, uintptr(pipeline), uintptr(stages), uintptr(program))\n}",
"func (s *BaselimboListener) EnterProgram(ctx *ProgramContext) {}",
"func BindProgramPipeline(pipeline uint32) {\n\tC.glowBindProgramPipeline(gpBindProgramPipeline, (C.GLuint)(pipeline))\n}",
"func BindProgramPipeline(pipeline uint32) {\n\tC.glowBindProgramPipeline(gpBindProgramPipeline, (C.GLuint)(pipeline))\n}",
"func (p* ScreenManager)ChangeProg(forward bool){\r\n if(len(p.RunningApps)<2){//Impossibru!!! -_-\r\n fmt.Printf(\"ERROR: Only one or zero progs running\\n\")\r\n return\r\n }\r\n names,index:=p.ProgNameList()\r\n //Deattach current prog\r\n if(index>-1){\r\n if(forward){\r\n p.ActiveApp=names[(index+1)%len(p.RunningApps)]\r\n }else{\r\n p.RunningApps[p.ActiveApp].Display=make(chan gomonochromebitmap.MonoBitmap,1) //Make active app writing to another chan TODO copy here when chancing\r\n if(index>0){\r\n p.ActiveApp=names[index-1]\r\n }else{\r\n p.ActiveApp=names[len(p.RunningApps)-1]\r\n }\r\n }\r\n }else{\r\n p.ActiveApp=names[0]\r\n }\r\n fmt.Printf(\"\\n\\n!!!!! ACTIVE APP IS NOW %v !!!!!\\n\",p.ActiveApp)\r\n}",
"func (e *Executor) SetPipeline(v pipeline.Build) {\n\t// return if Executor type is nil\n\tif e == nil {\n\t\treturn\n\t}\n\n\te.Pipeline = &v\n}",
"func (context *Pipeline) setCurrentProcessor(name string) {\n\tcontext.currentProcessor = name\n}",
"func UseProgram(p Program) {\n\tgl.UseProgram(p.Value)\n}",
"func UseProgram(program uint32) {\n\tsyscall.Syscall(gpUseProgram, 1, uintptr(program), 0, 0)\n}",
"func SetProcess(proc string) {\n process = proc\n}",
"func setProcess(p *os.Process) {\n lock.Lock()\n defer lock.Unlock()\n proc = p\n if group != nil {\n group.Invoke()\n }\n group = newGrouper(time.Millisecond * 2500, func() error {\n return term(p)\n })\n}",
"func (ans *answer) setPipelineCaller(c *lockedConn, m capnp.Method, pcall capnp.PipelineCaller) {\n\tc.assertIs(ans.c)\n\n\tif !ans.flags.Contains(resultsReady) {\n\t\tans.pcall = pcall\n\t\tans.promise = capnp.NewPromise(m, pcall)\n\t}\n}",
"func (s *BaseAspidaListener) EnterProgram(ctx *ProgramContext) {}",
"func (s *BaseednListener) EnterProgram(ctx *ProgramContext) {}",
"func UseProgramStages(pipeline uint32, stages uint32, program uint32) {\n C.glowUseProgramStages(gpUseProgramStages, (C.GLuint)(pipeline), (C.GLbitfield)(stages), (C.GLuint)(program))\n}",
"func (s *BasemumpsListener) EnterProgram(ctx *ProgramContext) {}",
"func Program(name string, env []string) Runner {\n\treturn &program{\n\t\tname: name,\n\t\tenv: append(os.Environ(), env...),\n\t}\n}",
"func (b *ballotMaster) setBallotInProg(value bool) bool {\n\tb.mutex.Lock()\n\tdefer b.mutex.Unlock()\n\n\tif b.inProg == value {\n\t\treturn false\n\t}\n\n\tb.inProg = value\n\treturn true\n}",
"func (launcher *Launcher) setProcess(proc *os.Process) {\n\tlauncher.Mutex.Lock()\n\tlauncher.process = proc\n\tlauncher.Mutex.Unlock()\n}",
"func (r *Registers) SetProgramCounter(address uint16) {\n\tr.pc = address\n}",
"func newProgram(e *Env, ast *Ast, opts []ProgramOption) (Program, error) {\n\t// Build the dispatcher, interpreter, and default program value.\n\tdisp := interpreter.NewDispatcher()\n\n\t// Ensure the default attribute factory is set after the adapter and provider are\n\t// configured.\n\tp := &prog{\n\t\tEnv: e,\n\t\tdecorators: []interpreter.InterpretableDecorator{},\n\t\tdispatcher: disp,\n\t}\n\n\t// Configure the program via the ProgramOption values.\n\tvar err error\n\tfor _, opt := range opts {\n\t\tp, err = opt(p)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// Add the function bindings created via Function() options.\n\tfor _, fn := range e.functions {\n\t\tbindings, err := fn.bindings()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\terr = disp.Add(bindings...)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// Set the attribute factory after the options have been set.\n\tvar attrFactory interpreter.AttributeFactory\n\tif p.evalOpts&OptPartialEval == OptPartialEval {\n\t\tattrFactory = interpreter.NewPartialAttributeFactory(e.Container, e.adapter, e.provider)\n\t} else {\n\t\tattrFactory = interpreter.NewAttributeFactory(e.Container, e.adapter, e.provider)\n\t}\n\tinterp := interpreter.NewInterpreter(disp, e.Container, e.provider, e.adapter, attrFactory)\n\tp.interpreter = interp\n\n\t// Translate the EvalOption flags into InterpretableDecorator instances.\n\tdecorators := make([]interpreter.InterpretableDecorator, len(p.decorators))\n\tcopy(decorators, p.decorators)\n\n\t// Enable interrupt checking if there's a non-zero check frequency\n\tif p.interruptCheckFrequency > 0 {\n\t\tdecorators = append(decorators, interpreter.InterruptableEval())\n\t}\n\t// Enable constant folding first.\n\tif p.evalOpts&OptOptimize == OptOptimize {\n\t\tdecorators = append(decorators, interpreter.Optimize())\n\t\tp.regexOptimizations = append(p.regexOptimizations, interpreter.MatchesRegexOptimization)\n\t}\n\t// Enable regex compilation of constants immediately after folding constants.\n\tif len(p.regexOptimizations) > 0 {\n\t\tdecorators = append(decorators, interpreter.CompileRegexConstants(p.regexOptimizations...))\n\t}\n\t// Enable compile-time checking of syntax/cardinality for string.format calls.\n\tif p.evalOpts&OptCheckStringFormat == OptCheckStringFormat {\n\t\tvar isValidType func(id int64, validTypes ...*types.TypeValue) (bool, error)\n\t\tif ast.IsChecked() {\n\t\t\tisValidType = func(id int64, validTypes ...*types.TypeValue) (bool, error) {\n\t\t\t\tt, err := ExprTypeToType(ast.typeMap[id])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn false, err\n\t\t\t\t}\n\t\t\t\tif t.kind == DynKind {\n\t\t\t\t\treturn true, nil\n\t\t\t\t}\n\t\t\t\tfor _, vt := range validTypes {\n\t\t\t\t\tk, err := typeValueToKind(vt)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn false, err\n\t\t\t\t\t}\n\t\t\t\t\tif k == t.kind {\n\t\t\t\t\t\treturn true, nil\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn false, nil\n\t\t\t}\n\t\t} else {\n\t\t\t// if the AST isn't type-checked, short-circuit validation\n\t\t\tisValidType = func(id int64, validTypes ...*types.TypeValue) (bool, error) {\n\t\t\t\treturn true, nil\n\t\t\t}\n\t\t}\n\t\tdecorators = append(decorators, interpreter.InterpolateFormattedString(isValidType))\n\t}\n\n\t// Enable exhaustive eval, state tracking and cost tracking last since they require a factory.\n\tif p.evalOpts&(OptExhaustiveEval|OptTrackState|OptTrackCost) != 0 {\n\t\tfactory := func(state interpreter.EvalState, costTracker *interpreter.CostTracker) (Program, error) {\n\t\t\tcostTracker.Estimator = p.callCostEstimator\n\t\t\tcostTracker.Limit = p.costLimit\n\t\t\t// Limit capacity to guarantee a reallocation when calling 'append(decs, ...)' below. This\n\t\t\t// prevents the underlying memory from being shared between factory function calls causing\n\t\t\t// undesired mutations.\n\t\t\tdecs := decorators[:len(decorators):len(decorators)]\n\t\t\tvar observers []interpreter.EvalObserver\n\n\t\t\tif p.evalOpts&(OptExhaustiveEval|OptTrackState) != 0 {\n\t\t\t\t// EvalStateObserver is required for OptExhaustiveEval.\n\t\t\t\tobservers = append(observers, interpreter.EvalStateObserver(state))\n\t\t\t}\n\t\t\tif p.evalOpts&OptTrackCost == OptTrackCost {\n\t\t\t\tobservers = append(observers, interpreter.CostObserver(costTracker))\n\t\t\t}\n\n\t\t\t// Enable exhaustive eval over a basic observer since it offers a superset of features.\n\t\t\tif p.evalOpts&OptExhaustiveEval == OptExhaustiveEval {\n\t\t\t\tdecs = append(decs, interpreter.ExhaustiveEval(), interpreter.Observe(observers...))\n\t\t\t} else if len(observers) > 0 {\n\t\t\t\tdecs = append(decs, interpreter.Observe(observers...))\n\t\t\t}\n\n\t\t\treturn p.clone().initInterpretable(ast, decs)\n\t\t}\n\t\treturn newProgGen(factory)\n\t}\n\treturn p.initInterpretable(ast, decorators)\n}",
"func UseProgramStages(pipeline uint32, stages uint32, program uint32) {\n\tC.glowUseProgramStages(gpUseProgramStages, (C.GLuint)(pipeline), (C.GLbitfield)(stages), (C.GLuint)(program))\n}",
"func UseProgramStages(pipeline uint32, stages uint32, program uint32) {\n\tC.glowUseProgramStages(gpUseProgramStages, (C.GLuint)(pipeline), (C.GLbitfield)(stages), (C.GLuint)(program))\n}",
"func (s *BaseLittleDuckListener) EnterPrograma(ctx *ProgramaContext) {}",
"func SetActiveTexture(texture Enum) {\n\tctexture, _ := (C.GLenum)(texture), cgoAllocsUnknown\n\tC.glActiveTexture(ctexture)\n}",
"func NewProgram(cfg *client.Config, parentName string) *tea.Program {\n\tm := NewModel(cfg)\n\tm.standalone = true\n\tm.parentName = parentName\n\treturn tea.NewProgram(m)\n}",
"func (s *BaseBrainfuckListener) EnterProgram(ctx *ProgramContext) {}",
"func IsProgramPipeline(pipeline uint32) bool {\n\tret := C.glowIsProgramPipeline(gpIsProgramPipeline, (C.GLuint)(pipeline))\n\treturn ret == TRUE\n}",
"func IsProgramPipeline(pipeline uint32) bool {\n\tret := C.glowIsProgramPipeline(gpIsProgramPipeline, (C.GLuint)(pipeline))\n\treturn ret == TRUE\n}",
"func UseProgram(program uint32) {\n\tC.glowUseProgram(gpUseProgram, (C.GLuint)(program))\n}",
"func UseProgram(program uint32) {\n\tC.glowUseProgram(gpUseProgram, (C.GLuint)(program))\n}",
"func (m *ProgramControl) SetResource(value ProgramResourceable)() {\n err := m.GetBackingStore().Set(\"resource\", value)\n if err != nil {\n panic(err)\n }\n}",
"func IsProgramPipeline(pipeline uint32) bool {\n ret := C.glowIsProgramPipeline(gpIsProgramPipeline, (C.GLuint)(pipeline))\n return ret == TRUE\n}",
"func (b *B) SetParallelism(p int)",
"func (coll *Collection) DetachProgram(name string) *Program {\n\tp := coll.Programs[name]\n\tdelete(coll.Programs, name)\n\treturn p\n}",
"func (b *ProgramControlRequestBuilder) Program() *ProgramRequestBuilder {\n\tbb := &ProgramRequestBuilder{BaseRequestBuilder: b.BaseRequestBuilder}\n\tbb.baseURL += \"/program\"\n\treturn bb\n}",
"func (m *ProgramControl) GetProgram()(Programable) {\n val, err := m.GetBackingStore().Get(\"program\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(Programable)\n }\n return nil\n}",
"func (self Context) SetActive(active bool) {\n\tif active {\n\t\tC.sfContext_setActive(self.Cref, C.sfBool(1))\n\t} else {\n\t\tC.sfContext_setActive(self.Cref, C.sfBool(0))\n\t}\n}",
"func (o *ExportUsingGETParams) SetPipeline(pipeline *string) {\n\to.Pipeline = pipeline\n}",
"func (program Program) Use() {\n\tgl.UseProgram(uint32(program))\n}",
"func (w *Workflow) setCore(c *Core) {\n\tw.c = c\n}",
"func IsProgramPipeline(pipeline uint32) bool {\n\tret, _, _ := syscall.Syscall(gpIsProgramPipeline, 1, uintptr(pipeline), 0, 0)\n\treturn ret != 0\n}",
"func NewProgramControl()(*ProgramControl) {\n m := &ProgramControl{\n Entity: *NewEntity(),\n }\n return m\n}",
"func (s *SearchRecord) SetPipeline(v *Pipeline) *SearchRecord {\n\ts.Pipeline = v\n\treturn s\n}",
"func (debugging *debuggingOpenGL) UseProgram(program uint32) {\n\tdebugging.recordEntry(\"UseProgram\", program)\n\tdebugging.gl.UseProgram(program)\n\tdebugging.recordExit(\"UseProgram\")\n}",
"func UseProgram(program uint32) {\n C.glowUseProgram(gpUseProgram, (C.GLuint)(program))\n}",
"func (m *ProgramControl) SetOwner(value UserIdentityable)() {\n err := m.GetBackingStore().Set(\"owner\", value)\n if err != nil {\n panic(err)\n }\n}",
"func (w *worker) registerActive(parent *worker) {\n\t// Only needed for bots not created by IncomingMessage\n\tif w.maps == nil {\n\t\tcurrentUCMaps.Lock()\n\t\tw.maps = currentUCMaps.ucmap\n\t\tcurrentUCMaps.Unlock()\n\t}\n\tif len(w.ProtocolUser) == 0 && len(w.User) > 0 {\n\t\tif idRegex.MatchString(w.User) {\n\t\t\tw.ProtocolUser = w.User\n\t\t} else if ui, ok := w.maps.user[w.User]; ok {\n\t\t\tw.ProtocolUser = bracket(ui.UserID)\n\t\t\tw.BotUser = ui.BotUser\n\t\t} else {\n\t\t\tw.ProtocolUser = w.User\n\t\t}\n\t}\n\tif len(w.ProtocolChannel) == 0 && len(w.Channel) > 0 {\n\t\tif idRegex.MatchString(w.Channel) {\n\t\t\tw.ProtocolChannel = w.Channel\n\t\t} else if ci, ok := w.maps.channel[w.Channel]; ok {\n\t\t\tw.ProtocolChannel = bracket(ci.ChannelID)\n\t\t} else {\n\t\t\tw.ProtocolChannel = w.Channel\n\t\t}\n\t}\n\n\tactivePipelines.Lock()\n\tif len(w.eid) == 0 {\n\t\tvar eid string\n\t\tfor {\n\t\t\t// 4 bytes of entropy per pipeline\n\t\t\tb := make([]byte, 4)\n\t\t\trand.Read(b)\n\t\t\teid = fmt.Sprintf(\"%02x%02x%02x%02x\", b[0], b[1], b[2], b[3])\n\t\t\tif _, ok := activePipelines.eids[eid]; !ok {\n\t\t\t\tactivePipelines.eids[eid] = struct{}{}\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tw.eid = eid\n\t}\n\tif parent != nil {\n\t\tparent._child = w\n\t\tw._parent = parent\n\t}\n\tactivePipelines.i[w.id] = w\n\tactivePipelines.Unlock()\n\tw.active = true\n}",
"func (a *amplifier) init(program, input []int) {\n\ta.state = make([]int, len(program))\n\tcopy(a.state, program)\n\ta.input = input\n}",
"func serializeProgram(prgrm *CXProgram, s *SerializedCXProgram) {\n\ts.Program = serializedProgram{}\n\tsPrgrm := &s.Program\n\tsPrgrm.PackagesOffset = int64(0)\n\tsPrgrm.PackagesSize = int64(len(prgrm.Packages))\n\n\tif pkgOff, found := s.PackagesMap[prgrm.CurrentPackage.Name]; found {\n\t\tsPrgrm.CurrentPackageOffset = int64(pkgOff)\n\t} else {\n\t\tpanic(\"package reference not found\")\n\t}\n\n\tsPrgrm.InputsOffset, sPrgrm.InputsSize = serializeSliceOfArguments(prgrm.ProgramInput, s)\n\tsPrgrm.OutputsOffset, sPrgrm.OutputsSize = serializeSliceOfArguments(prgrm.ProgramOutput, s)\n\n\tsPrgrm.CallStackOffset, sPrgrm.CallStackSize = serializeCalls(prgrm.CallStack[:prgrm.CallCounter], s)\n\n\tsPrgrm.CallCounter = int64(prgrm.CallCounter)\n\n\tsPrgrm.MemoryOffset = int64(0)\n\tsPrgrm.MemorySize = int64(len(PROGRAM.Memory))\n\n\tsPrgrm.HeapPointer = int64(prgrm.HeapPointer)\n\tsPrgrm.StackPointer = int64(prgrm.StackPointer)\n\tsPrgrm.StackSize = int64(prgrm.StackSize)\n\tsPrgrm.DataSegmentSize = int64(prgrm.DataSegmentSize)\n\tsPrgrm.DataSegmentStartsAt = int64(prgrm.DataSegmentStartsAt)\n\tsPrgrm.HeapSize = int64(prgrm.HeapSize)\n\tsPrgrm.HeapStartsAt = int64(prgrm.HeapStartsAt)\n\n\tsPrgrm.Terminated = serializeBoolean(prgrm.Terminated)\n\tsPrgrm.VersionOffset, sPrgrm.VersionSize = serializeString(prgrm.Version, s)\n}",
"func (obj *Device) SetSoftwareVertexProcessing(software bool) Error {\n\tret, _, _ := syscall.Syscall(\n\t\tobj.vtbl.SetSoftwareVertexProcessing,\n\t\t2,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptrBool(software),\n\t\t0,\n\t)\n\treturn toErr(ret)\n}",
"func (c *CmdBuff) SetActive(b bool) {\n\tc.mx.Lock()\n\t{\n\t\tc.active = b\n\t}\n\tc.mx.Unlock()\n\n\tc.fireActive(c.active)\n}",
"func (s *BaselimboListener) ExitProgram(ctx *ProgramContext) {}",
"func (coll *Collection) AttachCgroupProgram(secName string, cgroupPath string) error {\n\tprog, ok := coll.Programs[secName]\n\tif !ok {\n\t\treturn errors.Wrapf(\n\t\t\terrors.New(\"section not found\"),\n\t\t\t\"couldn't attach program %s\",\n\t\t\tsecName,\n\t\t)\n\t}\n\tif prog.IsCgroupProgram() {\n\t\treturn prog.AttachCgroup(cgroupPath)\n\t}\n\treturn errors.Wrapf(\n\t\terrors.New(\"not a cgroup program\"),\n\t\t\"couldn't attach program %s\",\n\t\tsecName,\n\t)\n}",
"func (my *Driver) UseLaunchExecutable(l launchFn) {\n\tmy.launchExe = l\n}",
"func (gl *WebGL) UseProgram(shaderProgram WebGLShaderProgram) {\n\tgl.context.Call(\"useProgram\", shaderProgram)\n}",
"func UseProgram(program Uint) {\n\tcprogram, _ := (C.GLuint)(program), cgoAllocsUnknown\n\tC.glUseProgram(cprogram)\n}",
"func (native *OpenGL) UseProgram(program uint32) {\n\tgl.UseProgram(program)\n}",
"func (c *Cras) SetActiveNode(ctx context.Context, node CrasNode) error {\n\tcmd := \"SetActiveOutputNode\"\n\tif node.IsInput {\n\t\tcmd = \"SetActiveInputNode\"\n\t}\n\treturn c.call(ctx, cmd, node.ID).Err\n}",
"func NewProgram(data []byte) *Program {\n\tp := new(Program)\n\tp.data = make([]byte, len(data))\n\tcopy(data, p.data)\n\tp.Pc = 0\n\treturn p\n}",
"func (m *CommunicationsIdentitySet) SetApplicationInstance(value Identityable)() {\n err := m.GetBackingStore().Set(\"applicationInstance\", value)\n if err != nil {\n panic(err)\n }\n}",
"func (c *Vrouter) SetInstanceActive(client client.Client, activeStatus *bool, ds *appsv1.DaemonSet, request reconcile.Request, object runtime.Object) error {\n\tif err := client.Get(context.TODO(), types.NamespacedName{Name: ds.Name, Namespace: request.Namespace},\n\t\tds); err != nil {\n\t\treturn err\n\t}\n\tactive := false\n\tif ds.Status.DesiredNumberScheduled == ds.Status.NumberReady {\n\t\tactive = true\n\t}\n\n\t*activeStatus = active\n\tif err := client.Status().Update(context.TODO(), object); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (r *LaunchPlanRepo) SetActive(\n\tctx context.Context, toEnable models.LaunchPlan, toDisable *models.LaunchPlan) error {\n\ttimer := r.launchPlanMetrics.SetActiveDuration.Start()\n\tdefer timer.Stop()\n\t// Use a transaction to guarantee no partial updates.\n\ttx := r.db.Begin()\n\n\t// There is a launch plan to disable as part of this transaction\n\tif toDisable != nil {\n\t\ttx.Model(&toDisable).UpdateColumns(toDisable)\n\t\tif err := tx.Error; err != nil {\n\t\t\ttx.Rollback()\n\t\t\treturn r.errorTransformer.ToFlyteAdminError(err)\n\t\t}\n\t}\n\n\t// And update the desired version.\n\ttx.Model(&toEnable).UpdateColumns(toEnable)\n\tif err := tx.Error; err != nil {\n\t\ttx.Rollback()\n\t\treturn r.errorTransformer.ToFlyteAdminError(err)\n\t}\n\tif err := tx.Commit().Error; err != nil {\n\t\treturn r.errorTransformer.ToFlyteAdminError(err)\n\t}\n\treturn nil\n}",
"func (c *Context) UseProgram(p gfx.Program) gfx.ContextStateValue {\n\treturn s.CSV{\n\t\tValue: p,\n\t\tDefaultValue: nil, // TODO(slimsag): verify\n\t\tKey: csUseProgram,\n\t\tGLCall: c.glUseProgram,\n\t}\n}",
"func (m *Workbook) SetApplication(value WorkbookApplicationable)() {\n m.application = value\n}",
"func ProgramIsolatedTransformer(args ...string) IsolatedTransformer {\n\treturn func(ctx context.Context, dir string) error {\n\t\tlogging.Infof(ctx, \"Invoking transform_program: %q\", args)\n\t\ttProg := exec.CommandContext(ctx, args[0], args[1:]...)\n\t\ttProg.Stdout = os.Stderr\n\t\ttProg.Stderr = os.Stderr\n\t\ttProg.Dir = dir\n\t\treturn errors.Annotate(tProg.Run(), \"running transform_program\").Err()\n\t}\n}",
"func (r *Reconciler) setApplicationObjects(\n\tctx context.Context,\n\tinstance *v1alpha1.ServiceBindingRequest,\n\tobjs []string,\n) error {\n\tinstance.Status.BindingStatus = bindingSuccess\n\tinstance.Status.ApplicationObjects = objs\n\treturn r.client.Status().Update(ctx, instance)\n}",
"func (inst *DeprecatedCreateMasterEdition) SetSystemProgramAccount(systemProgram ag_solanago.PublicKey) *DeprecatedCreateMasterEdition {\n\tinst.AccountMetaSlice[10] = ag_solanago.Meta(systemProgram)\n\treturn inst\n}",
"func NewProgram() *Program {\n\treturn &Program{\n\t\tframe: EmptyFrame(),\n\t}\n}",
"func (win *Window) SetActive() {\n\tC.sfRenderWindow_setActive(win.win, C.sfTrue)\n}",
"func (nm *NodeMonitor) setProcess(proc *os.Process) {\n\tnm.mutex.Lock()\n\tnm.process = proc\n\tnm.mutex.Unlock()\n}",
"func NewApplicationSetCommand(clientOpts *argocdclient.ClientOptions) *cobra.Command {\n\tvar (\n\t\tappOpts appOptions\n\t)\n\tvar command = &cobra.Command{\n\t\tUse: \"set\",\n\t\tShort: fmt.Sprintf(\"%s app set APPNAME\", cliName),\n\t\tRun: func(c *cobra.Command, args []string) {\n\t\t\tif len(args) != 1 {\n\t\t\t\tc.HelpFunc()(c, args)\n\t\t\t\tos.Exit(1)\n\t\t\t}\n\t\t\tappName := args[0]\n\t\t\tconn, appIf := argocdclient.NewClientOrDie(clientOpts).NewApplicationClientOrDie()\n\t\t\tdefer util.Close(conn)\n\t\t\tapp, err := appIf.Get(context.Background(), &application.ApplicationQuery{Name: appName})\n\t\t\terrors.CheckError(err)\n\t\t\tvisited := 0\n\t\t\tc.Flags().Visit(func(f *pflag.Flag) {\n\t\t\t\tvisited++\n\t\t\t\tswitch f.Name {\n\t\t\t\tcase \"repo\":\n\t\t\t\t\tapp.Spec.Source.RepoURL = appOpts.repoURL\n\t\t\t\tcase \"path\":\n\t\t\t\t\tapp.Spec.Source.Path = appOpts.appPath\n\t\t\t\tcase \"env\":\n\t\t\t\t\tapp.Spec.Source.Environment = appOpts.env\n\t\t\t\tcase \"revision\":\n\t\t\t\t\tapp.Spec.Source.TargetRevision = appOpts.revision\n\t\t\t\tcase \"dest-server\":\n\t\t\t\t\tif app.Spec.Destination == nil {\n\t\t\t\t\t\tapp.Spec.Destination = &argoappv1.ApplicationDestination{}\n\t\t\t\t\t}\n\t\t\t\t\tapp.Spec.Destination.Server = appOpts.destServer\n\t\t\t\tcase \"dest-namespace\":\n\t\t\t\t\tif app.Spec.Destination == nil {\n\t\t\t\t\t\tapp.Spec.Destination = &argoappv1.ApplicationDestination{}\n\t\t\t\t\t}\n\t\t\t\t\tapp.Spec.Destination.Namespace = appOpts.destNamespace\n\t\t\t\t}\n\t\t\t})\n\t\t\tif visited == 0 {\n\t\t\t\tlog.Error(\"Please set at least one option to update\")\n\t\t\t\tc.HelpFunc()(c, args)\n\t\t\t\tos.Exit(1)\n\t\t\t}\n\t\t\tsetParameterOverrides(app, appOpts.parameters)\n\t\t\t_, err = appIf.Update(context.Background(), app)\n\t\t\terrors.CheckError(err)\n\t\t},\n\t}\n\taddAppFlags(command, &appOpts)\n\treturn command\n}",
"func main() {\n\ta := &computer{\"Apple\"}\n\tb := a\n\tchange(b)\n\tchange(b)\n}",
"func (gp *GenginePool) SetExecModel(execModel int) error {\n\tgp.updateLock.Lock()\n\tdefer gp.updateLock.Unlock()\n\tif execModel != SortModel && execModel != ConcurrentModel && execModel != MixModel && execModel != InverseMixModel {\n\t\treturn errors.New(fmt.Sprintf(\"exec model must be SORT_MODEL(1) or CONCOURRENT_MODEL(2) or MIX_MODEL(3) or INVERSE_MIX_MODEL(4), now it is %d\", execModel))\n\t} else {\n\t\tgp.execModel = execModel\n\t}\n\treturn nil\n}",
"func (s *UpdatePipelineNotificationsOutput) SetPipeline(v *Pipeline) *UpdatePipelineNotificationsOutput {\n\ts.Pipeline = v\n\treturn s\n}",
"func (c *CmdReal) SetProcess(process *os.Process) {\n\tc.cmd.Process = process\n}",
"func (f *Filter) Program() unsafe.Pointer {\n return unsafe.Pointer(&f.program)\n}",
"func ValidateProgramPipeline(pipeline uint32) {\n\tsyscall.Syscall(gpValidateProgramPipeline, 1, uintptr(pipeline), 0, 0)\n}",
"func (s *Service) SetPipeline(v *ServicePipeline) *Service {\n\ts.Pipeline = v\n\treturn s\n}",
"func (p *MasterWorker) SetActivo(key string, act bool) {\n\tif p.ValidWork(key) {\n\t\tp.workers[key].SetActivo(act)\n\t}\n}",
"func (a *PipelineControllerApiService) ResumePipelineUsingPUT(ctx _context.Context, id string) apiResumePipelineUsingPUTRequest {\n\treturn apiResumePipelineUsingPUTRequest{\n\t\tapiService: a,\n\t\tctx: ctx,\n\t\tid: id,\n\t}\n}",
"func (app *App) Set(a *teoapi.Application) (uuid gocql.UUID, err error) {\n\tstmt, names := qb.Update(\"applications\").Set(\n\t\t\"name\", \"descr\", \"author\", \"license\", \"goget\", \"git\",\n\t).Where(qb.Eq(\"uuid\")).ToCql()\n\tq := gocqlx.Query(app.tre.session.Query(stmt), names).BindStruct(a)\n\tif err = q.ExecRelease(); err != nil {\n\t\tfmt.Printf(\"List Error: %s\\n\", err.Error())\n\t\treturn\n\t}\n\tuuid = a.UUID\n\treturn\n}",
"func (am *Manager) AddProgram(set ShaderSet, prog uint32) error {\n\tif _, ok := am.GetProgram(set); ok {\n\t\treturn fmt.Errorf(\"asset.Manager.AddProgram error: Program '%v' already exists\", set)\n\t}\n\n\tLogger.Printf(\"Manager: adding Program '%v'\\n\", set)\n\tam.Programs[set] = prog\n\n\treturn nil\n}",
"func (env *Environment) SetFocus(module *Module) {\n\tif env != module.env {\n\t\tpanic(\"SetFocus to module from another environment\")\n\t}\n\tC.EnvFocus(env.env, module.modptr)\n}",
"func (pool *Pool) Pipeline() *Pipeline {\n\treturn BlankPipeline(int64(pool.DB))\n}",
"func (d *DSP) SetActive(active bool) error {\n\tres := C.FMOD_DSP_SetActive(d.cptr, getBool(active))\n\treturn errs[res]\n}",
"func (z *Zest) SetCli(cli cli.App) {\n\t*z.cli = cli\n}",
"func (s *CancelServicePipelineDeploymentOutput) SetPipeline(v *ServicePipeline) *CancelServicePipelineDeploymentOutput {\n\ts.Pipeline = v\n\treturn s\n}",
"func Pipeline(g *graph.Graph, id string, factory *Factory, top Values) executor.Pipeline {\n\tp := pipelineGen{Graph: g, RenderingPlant: factory, Top: top, ID: id}\n\treturn executor.NewPipeline().\n\t\tAndThen(p.maybeTransformRoot).\n\t\tAndThen(p.prepareNode).\n\t\tAndThen(p.wrapTask)\n}",
"func PipelineFromLibrary(p *library.Pipeline) *Pipeline {\n\tpipeline := &Pipeline{\n\t\tID: sql.NullInt64{Int64: p.GetID(), Valid: true},\n\t\tRepoID: sql.NullInt64{Int64: p.GetRepoID(), Valid: true},\n\t\tCommit: sql.NullString{String: p.GetCommit(), Valid: true},\n\t\tFlavor: sql.NullString{String: p.GetFlavor(), Valid: true},\n\t\tPlatform: sql.NullString{String: p.GetPlatform(), Valid: true},\n\t\tRef: sql.NullString{String: p.GetRef(), Valid: true},\n\t\tType: sql.NullString{String: p.GetType(), Valid: true},\n\t\tVersion: sql.NullString{String: p.GetVersion(), Valid: true},\n\t\tExternalSecrets: sql.NullBool{Bool: p.GetExternalSecrets(), Valid: true},\n\t\tInternalSecrets: sql.NullBool{Bool: p.GetInternalSecrets(), Valid: true},\n\t\tServices: sql.NullBool{Bool: p.GetServices(), Valid: true},\n\t\tStages: sql.NullBool{Bool: p.GetStages(), Valid: true},\n\t\tSteps: sql.NullBool{Bool: p.GetSteps(), Valid: true},\n\t\tTemplates: sql.NullBool{Bool: p.GetTemplates(), Valid: true},\n\t\tData: p.GetData(),\n\t}\n\n\treturn pipeline.Nullify()\n}",
"func (job *JobObject) Assign(pid uint32) error {\n\tjob.handleLock.RLock()\n\tdefer job.handleLock.RUnlock()\n\n\tif job.handle == 0 {\n\t\treturn ErrAlreadyClosed\n\t}\n\n\tif pid == 0 {\n\t\treturn errors.New(\"invalid pid: 0\")\n\t}\n\thProc, err := windows.OpenProcess(winapi.PROCESS_ALL_ACCESS, true, pid)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer windows.Close(hProc)\n\treturn windows.AssignProcessToJobObject(job.handle, hProc)\n}",
"func (b *CobblerBackend) setProfile(hosts []string, profile string) error {\n\trunner := DefaultRunner(func(host string) error {\n\t\t_, err := processWrapper(\"cobbler\", \"system\", \"edit\", \"--name=\"+host, \"--profile=\"+profile)\n\t\treturn err\n\t})\n\n\tif err := runner.RunAll(hosts); err != nil {\n\t\treturn fmt.Errorf(\"unable to set cobbler profile: %v\", err)\n\t}\n\n\treturn nil\n}"
] | [
"0.68859893",
"0.62889445",
"0.596266",
"0.59011215",
"0.58729166",
"0.5695654",
"0.5693641",
"0.5625199",
"0.5574988",
"0.5567587",
"0.5567587",
"0.55162305",
"0.5494934",
"0.54705536",
"0.5433244",
"0.53992385",
"0.53609896",
"0.5354863",
"0.5314572",
"0.52216446",
"0.5211734",
"0.51600075",
"0.5071708",
"0.50573766",
"0.5046422",
"0.49867857",
"0.49583778",
"0.49492297",
"0.49420077",
"0.49420077",
"0.49193776",
"0.48748",
"0.48587355",
"0.48537847",
"0.48491496",
"0.48491496",
"0.4845402",
"0.4845402",
"0.4841569",
"0.4834056",
"0.48316634",
"0.48129228",
"0.48102158",
"0.4796979",
"0.4794257",
"0.4790948",
"0.47668612",
"0.47658885",
"0.47553995",
"0.47418827",
"0.47339568",
"0.47305655",
"0.473028",
"0.47184765",
"0.4712613",
"0.47084802",
"0.4700777",
"0.46931207",
"0.4659487",
"0.46577707",
"0.46570286",
"0.46483776",
"0.46212506",
"0.46207356",
"0.46163413",
"0.4611836",
"0.46030378",
"0.45991012",
"0.45951423",
"0.45905647",
"0.45832306",
"0.4582209",
"0.45721945",
"0.45716712",
"0.45651072",
"0.4546449",
"0.45457777",
"0.4539275",
"0.4527576",
"0.45207787",
"0.4496722",
"0.44925514",
"0.4488521",
"0.44705236",
"0.44647872",
"0.44642177",
"0.4462095",
"0.44482505",
"0.44402844",
"0.4420543",
"0.44114697",
"0.4409711",
"0.43994233",
"0.43931866",
"0.4389598",
"0.4388195",
"0.43871444",
"0.4386075",
"0.4367469"
] | 0.57121 | 6 |
select active texture unit | func ActiveTexture(texture uint32) {
C.glowActiveTexture(gpActiveTexture, (C.GLenum)(texture))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func ActiveTexture(texture uint32) {\n C.glowActiveTexture(gpActiveTexture, (C.GLenum)(texture))\n}",
"func ActiveTexture(texture Enum) {\n\tgl.ActiveTexture(uint32(texture))\n}",
"func ActiveTexture(texture uint32) {\n\tsyscall.Syscall(gpActiveTexture, 1, uintptr(texture), 0, 0)\n}",
"func (gl *WebGL) ActiveTexture(target GLEnum) {\n\tgl.context.Call(\"activeTexture\", target)\n}",
"func ClientActiveTexture(texture uint32) {\n C.glowClientActiveTexture(gpClientActiveTexture, (C.GLenum)(texture))\n}",
"func (native *OpenGL) ActiveTexture(texture uint32) {\n\tgl.ActiveTexture(texture)\n}",
"func SetActiveTexture(texture Enum) {\n\tctexture, _ := (C.GLenum)(texture), cgoAllocsUnknown\n\tC.glActiveTexture(ctexture)\n}",
"func (debugging *debuggingOpenGL) ActiveTexture(texture uint32) {\n\tdebugging.recordEntry(\"ActiveTexture\", texture)\n\tdebugging.gl.ActiveTexture(texture)\n\tdebugging.recordExit(\"ActiveTexture\")\n}",
"func (bm Blendmap) Texture() *gl.Texture {\n\treturn bm.Map.id\n}",
"func ClientActiveTexture(texture uint32) {\n\tsyscall.Syscall(gpClientActiveTexture, 1, uintptr(texture), 0, 0)\n}",
"func GetActiveUniform(program uint32, index uint32, bufSize int32, length *int32, size *int32, xtype *uint32, name *int8) {\n C.glowGetActiveUniform(gpGetActiveUniform, (C.GLuint)(program), (C.GLuint)(index), (C.GLsizei)(bufSize), (*C.GLsizei)(unsafe.Pointer(length)), (*C.GLint)(unsafe.Pointer(size)), (*C.GLenum)(unsafe.Pointer(xtype)), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func (self *TileSprite) SetTexture1O(texture *Texture, destroy bool) {\n self.Object.Call(\"setTexture\", texture, destroy)\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTextureUnit, 2, uintptr(unit), uintptr(texture), 0)\n}",
"func (self *TileSprite) TilingTexture() *PIXITexture{\n return &PIXITexture{self.Object.Get(\"tilingTexture\")}\n}",
"func GetActiveUniform(program uint32, index uint32, bufSize int32, length *int32, size *int32, xtype *uint32, name *uint8) {\n\tC.glowGetActiveUniform(gpGetActiveUniform, (C.GLuint)(program), (C.GLuint)(index), (C.GLsizei)(bufSize), (*C.GLsizei)(unsafe.Pointer(length)), (*C.GLint)(unsafe.Pointer(size)), (*C.GLenum)(unsafe.Pointer(xtype)), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func GetActiveUniform(program uint32, index uint32, bufSize int32, length *int32, size *int32, xtype *uint32, name *uint8) {\n\tC.glowGetActiveUniform(gpGetActiveUniform, (C.GLuint)(program), (C.GLuint)(index), (C.GLsizei)(bufSize), (*C.GLsizei)(unsafe.Pointer(length)), (*C.GLint)(unsafe.Pointer(size)), (*C.GLenum)(unsafe.Pointer(xtype)), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func (self *TileSprite) SetTextureA(member *Texture) {\n self.Object.Set(\"texture\", member)\n}",
"func (self *TileSprite) TintedTexture() *Canvas{\n return &Canvas{self.Object.Get(\"tintedTexture\")}\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n C.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func (t *Three) Texture() *Texture {\n\tp := t.ctx.Get(\"Texture\")\n\treturn TextureFromJSObject(p)\n}",
"func ClientActiveTexture(texture uint32) {\n\tC.glowClientActiveTexture(gpClientActiveTexture, (C.GLenum)(texture))\n}",
"func (self *TileSprite) SetTilingTextureA(member *PIXITexture) {\n self.Object.Set(\"tilingTexture\", member)\n}",
"func (self *TileSprite) Texture() *Texture{\n return &Texture{self.Object.Get(\"texture\")}\n}",
"func (self *GameObjectCreator) RenderTexture1O(width int) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", width)}\n}",
"func (self *Graphics) GenerateTexture1O(resolution int) *Texture{\n return &Texture{self.Object.Call(\"generateTexture\", resolution)}\n}",
"func (md MetalDrawable) Texture() mtl.Texture {\n\treturn mtl.NewTexture(C.MetalDrawable_Texture(md.metalDrawable))\n}",
"func (tx *TextureBase) Activate(sc *Scene, texNo int) {\n\tif tx.Tex != nil {\n\t\ttx.Tex.SetBotZero(tx.Bot0)\n\t\ttx.Tex.Activate(texNo)\n\t}\n}",
"func (self *TileSprite) SetTexture(texture *Texture) {\n self.Object.Call(\"setTexture\", texture)\n}",
"func (self *TileSprite) SetTintedTextureA(member *Canvas) {\n self.Object.Set(\"tintedTexture\", member)\n}",
"func (self *TileSprite) LoadTexture1O(key interface{}, frame interface{}) {\n self.Object.Call(\"loadTexture\", key, frame)\n}",
"func (f *Font) GetTexture() *Texture { return f.texture }",
"func GetActiveUniform(program uint32, index uint32, bufSize int32, length *int32, size *int32, xtype *uint32, name *uint8) {\n\tsyscall.Syscall9(gpGetActiveUniform, 7, uintptr(program), uintptr(index), uintptr(bufSize), uintptr(unsafe.Pointer(length)), uintptr(unsafe.Pointer(size)), uintptr(unsafe.Pointer(xtype)), uintptr(unsafe.Pointer(name)), 0, 0)\n}",
"func GetActiveUniform(program Uint, index Uint, bufSize Sizei, length *Sizei, size *Int, kind *Enum, name []byte) {\n\tcprogram, _ := (C.GLuint)(program), cgoAllocsUnknown\n\tcindex, _ := (C.GLuint)(index), cgoAllocsUnknown\n\tcbufSize, _ := (C.GLsizei)(bufSize), cgoAllocsUnknown\n\tclength, _ := (*C.GLsizei)(unsafe.Pointer(length)), cgoAllocsUnknown\n\tcsize, _ := (*C.GLint)(unsafe.Pointer(size)), cgoAllocsUnknown\n\tckind, _ := (*C.GLenum)(unsafe.Pointer(kind)), cgoAllocsUnknown\n\tcname, _ := (*C.GLchar)(unsafe.Pointer((*sliceHeader)(unsafe.Pointer(&name)).Data)), cgoAllocsUnknown\n\tC.glGetActiveUniform(cprogram, cindex, cbufSize, clength, csize, ckind, cname)\n}",
"func (c *Canvas) Texture() *glhf.Texture {\n\treturn c.gf.Texture()\n}",
"func GetActiveUniformName(program uint32, uniformIndex uint32, bufSize int32, length *int32, uniformName *int8) {\n C.glowGetActiveUniformName(gpGetActiveUniformName, (C.GLuint)(program), (C.GLuint)(uniformIndex), (C.GLsizei)(bufSize), (*C.GLsizei)(unsafe.Pointer(length)), (*C.GLchar)(unsafe.Pointer(uniformName)))\n}",
"func GetActiveUniformsiv(program uint32, uniformCount int32, uniformIndices *uint32, pname uint32, params *int32) {\n C.glowGetActiveUniformsiv(gpGetActiveUniformsiv, (C.GLuint)(program), (C.GLsizei)(uniformCount), (*C.GLuint)(unsafe.Pointer(uniformIndices)), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func GetTextureSubImage(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, bufSize int32, pixels unsafe.Pointer) {\n\tsyscall.Syscall12(gpGetTextureSubImage, 12, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(zoffset), uintptr(width), uintptr(height), uintptr(depth), uintptr(format), uintptr(xtype), uintptr(bufSize), uintptr(pixels))\n}",
"func GetActiveUniformBlockName(program uint32, uniformBlockIndex uint32, bufSize int32, length *int32, uniformBlockName *int8) {\n C.glowGetActiveUniformBlockName(gpGetActiveUniformBlockName, (C.GLuint)(program), (C.GLuint)(uniformBlockIndex), (C.GLsizei)(bufSize), (*C.GLsizei)(unsafe.Pointer(length)), (*C.GLchar)(unsafe.Pointer(uniformBlockName)))\n}",
"func (self *GameObjectCreator) RenderTexture3O(width int, height int, key string) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", width, height, key)}\n}",
"func BindTexture(target Enum, t Texture) {\n\tgl.BindTexture(uint32(target), t.Value)\n}",
"func (tx *TextureFile) Activate(sc *Scene, texNo int) {\n\tif tx.Tex == nil {\n\t\ttx.Init(sc)\n\t}\n\ttx.Tex.SetBotZero(tx.Bot0)\n\ttx.Tex.Activate(texNo)\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n\tsyscall.Syscall9(gpTextureView, 8, uintptr(texture), uintptr(target), uintptr(origtexture), uintptr(internalformat), uintptr(minlevel), uintptr(numlevels), uintptr(minlayer), uintptr(numlayers), 0)\n}",
"func (fnt *Font) Texture() *Texture {\n\treturn fnt.texture\n}",
"func GetTextureSubImage(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, bufSize int32, pixels unsafe.Pointer) {\n\tC.glowGetTextureSubImage(gpGetTextureSubImage, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), (C.GLsizei)(bufSize), pixels)\n}",
"func GetTextureSubImage(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, bufSize int32, pixels unsafe.Pointer) {\n\tC.glowGetTextureSubImage(gpGetTextureSubImage, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), (C.GLsizei)(bufSize), pixels)\n}",
"func GetActiveUniformBlockiv(program uint32, uniformBlockIndex uint32, pname uint32, params *int32) {\n\tC.glowGetActiveUniformBlockiv(gpGetActiveUniformBlockiv, (C.GLuint)(program), (C.GLuint)(uniformBlockIndex), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func GetActiveUniformBlockiv(program uint32, uniformBlockIndex uint32, pname uint32, params *int32) {\n\tC.glowGetActiveUniformBlockiv(gpGetActiveUniformBlockiv, (C.GLuint)(program), (C.GLuint)(uniformBlockIndex), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n C.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func (self *Graphics) GenerateTexture3O(resolution int, scaleMode int, padding int) *Texture{\n return &Texture{self.Object.Call(\"generateTexture\", resolution, scaleMode, padding)}\n}",
"func (self *TileSprite) LoadTexture(key interface{}) {\n self.Object.Call(\"loadTexture\", key)\n}",
"func (self *Graphics) GenerateTexture() *Texture{\n return &Texture{self.Object.Call(\"generateTexture\")}\n}",
"func TexStorage2D(target uint32, levels int32, internalformat uint32, width int32, height int32) {\n C.glowTexStorage2D(gpTexStorage2D, (C.GLenum)(target), (C.GLsizei)(levels), (C.GLenum)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func IsTexture(texture uint32) bool {\n ret := C.glowIsTexture(gpIsTexture, (C.GLuint)(texture))\n return ret == TRUE\n}",
"func (self *TileSprite) BlendMode() int{\n return self.Object.Get(\"blendMode\").Int()\n}",
"func (am *Manager) GetTexture(name string) (*Texture, bool) {\n\tif tex, ok := am.Textures[name]; ok {\n\t\treturn tex, ok\n\t}\n\n\tif am.Parent != nil {\n\t\treturn am.Parent.GetTexture(name)\n\t}\n\n\treturn nil, false\n}",
"func GetActiveUniformName(program uint32, uniformIndex uint32, bufSize int32, length *int32, uniformName *uint8) {\n\tC.glowGetActiveUniformName(gpGetActiveUniformName, (C.GLuint)(program), (C.GLuint)(uniformIndex), (C.GLsizei)(bufSize), (*C.GLsizei)(unsafe.Pointer(length)), (*C.GLchar)(unsafe.Pointer(uniformName)))\n}",
"func GetActiveUniformName(program uint32, uniformIndex uint32, bufSize int32, length *int32, uniformName *uint8) {\n\tC.glowGetActiveUniformName(gpGetActiveUniformName, (C.GLuint)(program), (C.GLuint)(uniformIndex), (C.GLsizei)(bufSize), (*C.GLsizei)(unsafe.Pointer(length)), (*C.GLchar)(unsafe.Pointer(uniformName)))\n}",
"func GetActiveUniformBlockName(program uint32, uniformBlockIndex uint32, bufSize int32, length *int32, uniformBlockName *uint8) {\n\tC.glowGetActiveUniformBlockName(gpGetActiveUniformBlockName, (C.GLuint)(program), (C.GLuint)(uniformBlockIndex), (C.GLsizei)(bufSize), (*C.GLsizei)(unsafe.Pointer(length)), (*C.GLchar)(unsafe.Pointer(uniformBlockName)))\n}",
"func GetActiveUniformBlockName(program uint32, uniformBlockIndex uint32, bufSize int32, length *int32, uniformBlockName *uint8) {\n\tC.glowGetActiveUniformBlockName(gpGetActiveUniformBlockName, (C.GLuint)(program), (C.GLuint)(uniformBlockIndex), (C.GLsizei)(bufSize), (*C.GLsizei)(unsafe.Pointer(length)), (*C.GLchar)(unsafe.Pointer(uniformBlockName)))\n}",
"func (obj *Device) GetTexture(stage uint32) (*BaseTexture, Error) {\n\tvar tex *BaseTexture\n\tret, _, _ := syscall.Syscall(\n\t\tobj.vtbl.GetTexture,\n\t\t3,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(stage),\n\t\tuintptr(unsafe.Pointer(&tex)),\n\t)\n\treturn tex, toErr(ret)\n}",
"func GetActiveAttrib(program uint32, index uint32, bufSize int32, length *int32, size *int32, xtype *uint32, name *int8) {\n C.glowGetActiveAttrib(gpGetActiveAttrib, (C.GLuint)(program), (C.GLuint)(index), (C.GLsizei)(bufSize), (*C.GLsizei)(unsafe.Pointer(length)), (*C.GLint)(unsafe.Pointer(size)), (*C.GLenum)(unsafe.Pointer(xtype)), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func FramebufferTexture(target uint32, attachment uint32, texture uint32, level int32) {\n C.glowFramebufferTexture(gpFramebufferTexture, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level))\n}",
"func TexSubImage1D(target uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage1D(gpTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage3D(gpTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func GetActiveUniformsiv(program uint32, uniformCount int32, uniformIndices *uint32, pname uint32, params *int32) {\n\tC.glowGetActiveUniformsiv(gpGetActiveUniformsiv, (C.GLuint)(program), (C.GLsizei)(uniformCount), (*C.GLuint)(unsafe.Pointer(uniformIndices)), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func GetActiveUniformsiv(program uint32, uniformCount int32, uniformIndices *uint32, pname uint32, params *int32) {\n\tC.glowGetActiveUniformsiv(gpGetActiveUniformsiv, (C.GLuint)(program), (C.GLsizei)(uniformCount), (*C.GLuint)(unsafe.Pointer(uniformIndices)), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func ActiveShaderProgram(pipeline uint32, program uint32) {\n C.glowActiveShaderProgram(gpActiveShaderProgram, (C.GLuint)(pipeline), (C.GLuint)(program))\n}",
"func TextureSubImage1D(texture uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall9(gpTextureSubImage1D, 7, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(width), uintptr(format), uintptr(xtype), uintptr(pixels), 0, 0)\n}",
"func (self *TileSprite) LoadTexture2O(key interface{}, frame interface{}, stopAnimation bool) {\n self.Object.Call(\"loadTexture\", key, frame, stopAnimation)\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n\tC.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n\tC.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func (self *GameObjectCreator) RenderTexture2O(width int, height int) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", width, height)}\n}",
"func (self *GameObjectCreator) RenderTexture4O(width int, height int, key string, addToCache bool) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", width, height, key, addToCache)}\n}",
"func GetActiveUniform(p Program, index uint32) (name string, size int, ty Enum) {\n\tvar length, si int32\n\tvar typ uint32\n\tname = strings.Repeat(\"\\x00\", 256)\n\tcname := gl.Str(name)\n\tgl.GetActiveUniform(p.Value, uint32(index), int32(len(name)-1), &length, &si, &typ, cname)\n\tname = name[:strings.IndexRune(name, 0)]\n\treturn name, int(si), Enum(typ)\n}",
"func (obj *Device) GetCurrentTexturePalette() (paletteNumber uint, err Error) {\n\tret, _, _ := syscall.Syscall(\n\t\tobj.vtbl.GetCurrentTexturePalette,\n\t\t2,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(unsafe.Pointer(&paletteNumber)),\n\t\t0,\n\t)\n\terr = toErr(ret)\n\treturn\n}",
"func BindTexture(target Enum, texture Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tctexture, _ := (C.GLuint)(texture), cgoAllocsUnknown\n\tC.glBindTexture(ctarget, ctexture)\n}",
"func (self *GameObjectCreator) RenderTexture() *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\")}\n}",
"func (tex Texture) Sub(ctx gl.Context, lvl int, width int, height int, data []byte) {\n\tctx.TexSubImage2D(gl.TEXTURE_2D, lvl, 0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, data)\n\tif lvl > 0 {\n\t\tctx.GenerateMipmap(gl.TEXTURE_2D)\n\t}\n}",
"func GetActiveUniformBlockiv(program uint32, uniformBlockIndex uint32, pname uint32, params *int32) {\n\tsyscall.Syscall6(gpGetActiveUniformBlockiv, 4, uintptr(program), uintptr(uniformBlockIndex), uintptr(pname), uintptr(unsafe.Pointer(params)), 0, 0)\n}",
"func TexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage2D(gpTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func GetActiveUniformsiv(program uint32, uniformCount int32, uniformIndices *uint32, pname uint32, params *int32) {\n\tsyscall.Syscall6(gpGetActiveUniformsiv, 5, uintptr(program), uintptr(uniformCount), uintptr(unsafe.Pointer(uniformIndices)), uintptr(pname), uintptr(unsafe.Pointer(params)), 0)\n}",
"func (gstr *GlyphString) GetTexture() *Texture { return gstr.font.GetTexture() }",
"func EGLImageTargetTextureStorageEXT(texture uint32, image unsafe.Pointer, attrib_list *int32) {\n\tsyscall.Syscall(gpEGLImageTargetTextureStorageEXT, 3, uintptr(texture), uintptr(image), uintptr(unsafe.Pointer(attrib_list)))\n}",
"func (self *Graphics) GenerateTexture2O(resolution int, scaleMode int) *Texture{\n return &Texture{self.Object.Call(\"generateTexture\", resolution, scaleMode)}\n}",
"func TexBufferRange(target uint32, internalformat uint32, buffer uint32, offset int, size int) {\n C.glowTexBufferRange(gpTexBufferRange, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func TexStorage1D(target uint32, levels int32, internalformat uint32, width int32) {\n C.glowTexStorage1D(gpTexStorage1D, (C.GLenum)(target), (C.GLsizei)(levels), (C.GLenum)(internalformat), (C.GLsizei)(width))\n}",
"func (self *TileSprite) SetTextureDebugA(member bool) {\n self.Object.Set(\"textureDebug\", member)\n}",
"func (spriteBatch *SpriteBatch) SetTexture(newtexture ITexture) {\n\tspriteBatch.texture = newtexture\n}",
"func (obj *Device) SetTexture(sampler uint32, texture BaseTextureImpl) Error {\n\tvar base uintptr\n\tif texture != nil {\n\t\tbase = texture.baseTexturePointer()\n\t}\n\tret, _, _ := syscall.Syscall(\n\t\tobj.vtbl.SetTexture,\n\t\t3,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(sampler),\n\t\tbase,\n\t)\n\treturn toErr(ret)\n}",
"func (self *TileSprite) SetRefreshTextureA(member bool) {\n self.Object.Set(\"refreshTexture\", member)\n}",
"func (this *RectangleShape) GetTexture() *Texture {\n\treturn this.texture\n}",
"func BindTexture(target uint32, texture uint32) {\n C.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func Make(width, height int, internalformat int32, format, pixelType uint32,\n\tdata unsafe.Pointer, min, mag, s, t int32) Texture {\n\n\ttexture := Texture{0, gl.TEXTURE_2D, 0}\n\n\t// generate and bind texture\n\tgl.GenTextures(1, &texture.handle)\n\ttexture.Bind(0)\n\n\t// set texture properties\n\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, min)\n\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, mag)\n\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, s)\n\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, t)\n\n\t// specify a texture image\n\tgl.TexImage2D(gl.TEXTURE_2D, 0, internalformat, int32(width), int32(height),\n\t\t0, format, pixelType, data)\n\n\t// unbind texture\n\ttexture.Unbind()\n\n\treturn texture\n}",
"func TexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexSubImage3D(gpTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexSubImage3D(gpTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func (self *TileSprite) SetTextureI(args ...interface{}) {\n self.Object.Call(\"setTexture\", args)\n}",
"func TexImage2D(target uint32, level int32, internalformat int32, width int32, height int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexImage2D(gpTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}"
] | [
"0.6987255",
"0.6659273",
"0.6604428",
"0.6587843",
"0.63703996",
"0.6236901",
"0.6154774",
"0.60388523",
"0.5987232",
"0.5789151",
"0.57844275",
"0.5777845",
"0.57609534",
"0.57609534",
"0.5628775",
"0.55807346",
"0.55703926",
"0.55703926",
"0.5533289",
"0.5532886",
"0.5528525",
"0.5514004",
"0.54720837",
"0.54566807",
"0.5419754",
"0.54165983",
"0.5410714",
"0.53856367",
"0.53603595",
"0.5309446",
"0.5307062",
"0.5302133",
"0.5276813",
"0.52537996",
"0.52528507",
"0.5237042",
"0.5219494",
"0.51970005",
"0.5193814",
"0.5187465",
"0.5182816",
"0.5176764",
"0.5172267",
"0.51706386",
"0.5143093",
"0.5136382",
"0.5136382",
"0.5136158",
"0.5136158",
"0.5106296",
"0.508716",
"0.50757796",
"0.5055069",
"0.50335085",
"0.50302476",
"0.5025034",
"0.5020287",
"0.5011211",
"0.5011211",
"0.4984484",
"0.4984484",
"0.4975448",
"0.49703982",
"0.49670148",
"0.49653572",
"0.4965172",
"0.4964914",
"0.4964914",
"0.49535206",
"0.4944856",
"0.49417588",
"0.49314663",
"0.49314663",
"0.4921283",
"0.4919224",
"0.49184203",
"0.49176535",
"0.49153075",
"0.4912328",
"0.4906364",
"0.49062482",
"0.48948687",
"0.4887098",
"0.4868327",
"0.4867605",
"0.48622414",
"0.48510107",
"0.4848432",
"0.48476362",
"0.48450375",
"0.4842146",
"0.48146746",
"0.48091775",
"0.48070323",
"0.47972322",
"0.47868186",
"0.47868186",
"0.47858715",
"0.47758397"
] | 0.6225811 | 7 |
specify the alpha test function | func AlphaFunc(xfunc uint32, ref float32) {
C.glowAlphaFunc(gpAlphaFunc, (C.GLenum)(xfunc), (C.GLfloat)(ref))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (t *T) Alpha(name string, f interface{}) bool {\n\tt.Helper()\n\treturn t.invokeFeature(feature.Alpha, name, f)\n}",
"func AlphaFunc(xfunc uint32, ref float32) {\n\tsyscall.Syscall(gpAlphaFunc, 2, uintptr(xfunc), uintptr(math.Float32bits(ref)), 0)\n}",
"func SampleAlpha(alpha string) {\n\n}",
"func (n *Node) Alpha(alpha float64) *Node {\n\treturn n.setAttr(\"alpha\", fmt.Sprintf(\"%.0f\", 65535.0*alpha))\n}",
"func AlphaFunc(xfunc uint32, ref float32) {\n C.glowAlphaFunc(gpAlphaFunc, (C.GLenum)(xfunc), (C.GLfloat)(ref))\n}",
"func (p *BW) HasAlpha() bool {\n\treturn false\n}",
"func ALPHA() operators.Operator {\n\treturn operators.Alts(\n\t\t\"ALPHA\",\n\t\toperators.Range(\"%x41-5A\", []byte{65}, []byte{90}),\n\t\toperators.Range(\"%x61-7A\", []byte{97}, []byte{122}),\n\t)\n}",
"func (s *Surface) Alpha() float64 {\n\treturn s.Ctx.Get(\"globalAlpha\").Float()\n}",
"func CheckAlpha(alpha float64) error {\n\tif alpha <= 0 || alpha >= 1 || math.IsNaN(alpha) || math.IsInf(alpha, 0) {\n\t\treturn fmt.Errorf(\"Alpha is %f, must be within (0, 1) and finite\", alpha)\n\t}\n\treturn nil\n}",
"func TestEDDiscretionaryDataAlphaNumeric(t *testing.T) {\n\ttestEDDiscretionaryDataAlphaNumeric(t)\n}",
"func TestEDdfiAccountNumberAlphaNumeric(t *testing.T) {\n\ttestEDdfiAccountNumberAlphaNumeric(t)\n}",
"func isAlpha(fl FieldLevel) bool {\n\treturn alphaRegex.MatchString(fl.Field().String())\n}",
"func TestBetaToAlphaConversion(t *testing.T) {\n\ttests := []struct {\n\t\tname string\n\t\tinput *v1beta1.ArgoCD\n\t\texpectedOutput *ArgoCD\n\t}{\n\t\t{\n\t\t\tname: \"ArgoCD Example - Empty\",\n\t\t\tinput: makeTestArgoCDBeta(func(cr *v1beta1.ArgoCD) {}),\n\t\t\texpectedOutput: makeTestArgoCDAlpha(func(cr *ArgoCD) {}),\n\t\t},\n\t\t{\n\t\t\tname: \"ArgoCD Example - Image + ExtraConfig\",\n\t\t\tinput: makeTestArgoCDBeta(func(cr *v1beta1.ArgoCD) {\n\t\t\t\tcr.Spec.Image = \"test-image\"\n\t\t\t\tcr.Spec.ExtraConfig = map[string]string{\n\t\t\t\t\t\"ping\": \"pong\",\n\t\t\t\t}\n\t\t\t}),\n\t\t\texpectedOutput: makeTestArgoCDAlpha(func(cr *ArgoCD) {\n\t\t\t\tcr.Spec.Image = \"test-image\"\n\t\t\t\tcr.Spec.ExtraConfig = map[string]string{\n\t\t\t\t\t\"ping\": \"pong\",\n\t\t\t\t}\n\t\t\t}),\n\t\t},\n\t\t{\n\t\t\tname: \"ArgoCD Example - Dex + RBAC\",\n\t\t\tinput: makeTestArgoCDBeta(func(cr *v1beta1.ArgoCD) {\n\t\t\t\tcr.Spec.SSO = &v1beta1.ArgoCDSSOSpec{\n\t\t\t\t\tProvider: v1beta1.SSOProviderTypeDex,\n\t\t\t\t\tDex: &v1beta1.ArgoCDDexSpec{\n\t\t\t\t\t\tOpenShiftOAuth: true,\n\t\t\t\t\t},\n\t\t\t\t}\n\n\t\t\t\tdefaultPolicy := \"role:readonly\"\n\t\t\t\tpolicy := \"g, system:cluster-admins, role:admin\"\n\t\t\t\tscope := \"[groups]\"\n\t\t\t\tcr.Spec.RBAC = v1beta1.ArgoCDRBACSpec{\n\t\t\t\t\tDefaultPolicy: &defaultPolicy,\n\t\t\t\t\tPolicy: &policy,\n\t\t\t\t\tScopes: &scope,\n\t\t\t\t}\n\n\t\t\t\tcr.Spec.Server = v1beta1.ArgoCDServerSpec{\n\t\t\t\t\tRoute: v1beta1.ArgoCDRouteSpec{\n\t\t\t\t\t\tEnabled: true,\n\t\t\t\t\t},\n\t\t\t\t}\n\t\t\t}),\n\t\t\texpectedOutput: makeTestArgoCDAlpha(func(cr *ArgoCD) {\n\t\t\t\tcr.Spec.SSO = &ArgoCDSSOSpec{\n\t\t\t\t\tProvider: SSOProviderTypeDex,\n\t\t\t\t\tDex: &ArgoCDDexSpec{\n\t\t\t\t\t\tOpenShiftOAuth: true,\n\t\t\t\t\t},\n\t\t\t\t}\n\n\t\t\t\tdefaultPolicy := \"role:readonly\"\n\t\t\t\tpolicy := \"g, system:cluster-admins, role:admin\"\n\t\t\t\tscope := \"[groups]\"\n\t\t\t\tcr.Spec.RBAC = ArgoCDRBACSpec{\n\t\t\t\t\tDefaultPolicy: &defaultPolicy,\n\t\t\t\t\tPolicy: &policy,\n\t\t\t\t\tScopes: &scope,\n\t\t\t\t}\n\n\t\t\t\tcr.Spec.Server = ArgoCDServerSpec{\n\t\t\t\t\tRoute: ArgoCDRouteSpec{\n\t\t\t\t\t\tEnabled: true,\n\t\t\t\t\t},\n\t\t\t\t}\n\t\t\t}),\n\t\t},\n\t}\n\tfor _, test := range tests {\n\t\tt.Run(test.name, func(t *testing.T) {\n\n\t\t\t// Add input v1beta1 object in Hub\n\t\t\tvar hub conversion.Hub = test.input\n\n\t\t\tresult := &ArgoCD{}\n\t\t\t// Call ConvertFrom function to convert v1beta1 version to v1alpha\n\t\t\tresult.ConvertFrom(hub)\n\n\t\t\t// Compare converted object with expected.\n\t\t\tassert.Equal(t, test.expectedOutput, result)\n\t\t})\n\t}\n}",
"func test(a, b int) float64 {\n\treturn 5.5\n}",
"func testAbTest(t *testing.T, s *Service) {\n\tp := &model.ArgAbTest{\n\t\tGroups: \"不显示热门tab,显示热门tab\",\n\t\tIP: \"127.0.0.1\",\n\t}\n\tres, err := s.AbTest(context.TODO(), p)\n\tif err != nil {\n\t\tt.Logf(\"testAbTest error(%v) \\n\", err)\n\t\treturn\n\t}\n\tt.Logf(\"testAbTest res: %+v \\n\", res)\n}",
"func TestCheckValidCurve(t *testing.T) {\n\tfmt.Println(CheckValidCurve())\n}",
"func isAlpha(b byte) bool {\n\treturn (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z')\n}",
"func isAlpha(b byte) bool {\n\treturn (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z')\n}",
"func PTEST(mx, x operand.Op) { ctx.PTEST(mx, x) }",
"func testabc(a,b int){\n\n}",
"func Alpha(comp query.TokenType, a, b string) bool {\n\tswitch comp {\n\tcase query.Equals:\n\t\treturn a == b\n\tcase query.NotEquals:\n\t\treturn a != b\n\tcase query.Like:\n\t\tif b[0] == '%' && b[len(b)-1] == '%' {\n\t\t\treturn strings.Contains(a, b[1:len(b)-1])\n\t\t}\n\n\t\tif b[0] == '%' {\n\t\t\treturn strings.HasSuffix(a, b[1:])\n\t\t}\n\n\t\tif b[len(b)-1] == '%' {\n\t\t\treturn strings.HasPrefix(a, b[:len(b)-1])\n\t\t}\n\n\t\treturn strings.Contains(a, b)\n\tcase query.RLike:\n\t\treturn regexp.MustCompile(b).MatchString(a)\n\t}\n\treturn false\n}",
"func LRNAlpha(value float32) LRNAttr {\n\treturn func(m optionalAttr) {\n\t\tm[\"alpha\"] = value\n\t}\n}",
"func LeakyReluAlpha(value float32) LeakyReluAttr {\n\treturn func(m optionalAttr) {\n\t\tm[\"alpha\"] = value\n\t}\n}",
"func LRNGradAlpha(value float32) LRNGradAttr {\n\treturn func(m optionalAttr) {\n\t\tm[\"alpha\"] = value\n\t}\n}",
"func (c *Color) Alpha() float32 {\n\treturn c.a\n}",
"func TestA(t *testing.T) {}",
"func TESTB(ir, amr operand.Op) { ctx.TESTB(ir, amr) }",
"func Alpha(prefix string, length int) string {\n\treturn Runes(prefix, length, AlphaRunes)\n}",
"func VPTEST(mxy, xy operand.Op) { ctx.VPTEST(mxy, xy) }",
"func TestEe00b(t *testing.T) {\n\tconst fname = \"Ee00b\"\n\tvar ee float64\n\n\ttests := []struct {\n\t\tref string\n\t\tfn func(a1,a2 float64) float64\t\n\t}{\n\t\t{\"cgo\", CgoEe00b},\n\t\t{\"go\", GoEe00b},\n\t}\n\n\tfor _, test := range tests {\n\t\ttname := fname + \" \" + test.ref\n\t\tee = test.fn(2400000.5, 53736.0)\n\n\t\tvvd(t, ee, -0.8835700060003032831e-5, 1e-18, tname, \"\")\n\t}\n}",
"func (s *Surface) SetAlpha(a float64) {\n\ts.Ctx.Set(\"globalAlpha\", a)\n}",
"func VTESTPS(mxy, xy operand.Op) { ctx.VTESTPS(mxy, xy) }",
"func TestHd2ae(t *testing.T) {\n\tconst fname = \"Hd2ae\"\n\tvar h, d, p, a, e float64\n\n\th = 1.1\n\td = 1.2\n\tp = 0.3\n\n\ttests := []struct {\n\t\tref string\n\t\tfn func(a1, a2, a3 float64) (c1, c2 float64)\n\t}{\n\t\t{\"cgo\", CgoHd2ae},\n\t\t{\"go\", GoHd2ae},\n\t}\n\n\tfor _, test := range tests {\n\t\ttname := fname + \" \" + test.ref\n\n\t\ta, e = test.fn(h, d, p)\n\n\t\tvvd(t, a, 5.916889243730066194, 1e-13, tname, \"a\")\n\t\tvvd(t, e, 0.4472186304990486228, 1e-14, tname, \"e\")\n\t}\n}",
"func (c *TestClient) CreateInstanceAlpha(project, zone string, i *computeAlpha.Instance) error {\n\tif c.CreateInstanceBetaFn != nil {\n\t\treturn c.CreateInstanceAlphaFn(project, zone, i)\n\t}\n\treturn c.client.CreateInstanceAlpha(project, zone, i)\n}",
"func (r *ImageRef) HasAlpha() bool {\n\treturn vipsHasAlpha(r.image)\n}",
"func (t *T) Beta(name string, f interface{}) bool {\n\tt.Helper()\n\treturn t.invokeFeature(feature.Beta, name, f)\n}",
"func (s *Scanner) isAlpha(c byte) bool {\n\tre := regexp.MustCompile(alpha)\n\treturn re.MatchString(string(c))\n}",
"func TestAnpm(t *testing.T) {\n\tconst fname = \"Anpm\"\n\ttests := []struct {\n\t\tref string\n\t\tfn func(float64) float64\n\t}{\n\t\t{\"cgo\", CgoAnpm},\n\t\t{\"go\", GoAnpm},\n\t}\n\tfor _, test := range tests {\n\t\ttname := fname + \" \" + test.ref\n\t\tvvd(t, test.fn(-4.0), 2.283185307179586477,\n\t\t\t1e-12, tname, \"\")\n\t}\n}",
"func TestComplexifyRandom(t *testing.T) {\n\n}",
"func NewAlpha(r Rectangle) *Alpha {\n\treturn &Alpha{\n\t\tPix: make([]uint8, pixelBufferLength(1, r, \"Alpha\")),\n\t\tStride: 1 * r.Dx(),\n\t\tRect: r,\n\t}\n}",
"func TestAdd(t *testing.T) {\n x, y := float32(3), float32(5)\n args := []float32{x, y}\n if float32(x+y) != Add(args) {\n panic(\"Simple function is not working.\")\n }\n}",
"func test(t *testing.T, scene string, f func(*testing.T)) {\n\tif t.Failed() {\n\t\treturn\n\t}\n\tConvey(scene, t, func() {\n\t\tf(t)\n\t})\n}",
"func (x *Float) Acc() Accuracy {}",
"func (self *Sax) alphabetize(paaX []float64) string {\n alphabetizedX := \"\"\n for i := 0; i < len(paaX); i++ {\n letterFound := false\n\n for j := 0; j < len(self.beta); j++ {\n if paaX[i] < self.beta[j] {\n alphabetizedX += string(rune(int(self.aOffset) + j))\n letterFound = true\n break\n }\n\n }\n if !letterFound {\n alphabetizedX += string(rune(int(self.aOffset) + len(self.beta)))\n }\n\n }\n return alphabetizedX\n\n}",
"func (pw *PixelWand) SetAlpha(alpha float64) {\n\tC.PixelSetAlpha(pw.pw, C.double(alpha))\n\truntime.KeepAlive(pw)\n}",
"func KTESTB(k, k1 operand.Op) { ctx.KTESTB(k, k1) }",
"func (s *BasePCREListener) EnterAlpha_nums(ctx *Alpha_numsContext) {}",
"func TestEDIdentificationNumberAlphaNumeric(t *testing.T) {\n\ttestEDIdentificationNumberAlphaNumeric(t)\n}",
"func (t *T) AFact() {}",
"func (self *Graphics) FillAlpha() int{\n return self.Object.Get(\"fillAlpha\").Int()\n}",
"func TestNextZeta(t *testing.T) {\n\tfmt.Println(\"test NextZeta\")\n\tconst nIter = 1e7\n\tx := 0.0\n\ty := ZetaMean(2.152)\n\tfor i := 0; i < nIter; i++ {\n\t\tx += float64(NextZeta(2.152))\n\t}\n\tx /= nIter\n\tif !check(x, y) {\n\t\tt.Error()\n\t\tfmt.Println(x, y)\n\t}\n}",
"func Test1174(t *testing.T) {\n\ttest.TestGroup(new(ExtendedCurve).Init(Param1174(), false))\n}",
"func VTESTPD(mxy, xy operand.Op) { ctx.VTESTPD(mxy, xy) }",
"func (t *KValidator) IsAlpha() bool {\n\treturn alphaRegex.MatchString(t.data.String())\n}",
"func value(player int, board [8][8]int, alpha float32, beta float32, depth int) float32 {\n\tval, _ := AlphaBeta(enemy(player), board, -beta, -alpha, depth-1)\n\n\treturn -val\n}",
"func TestFullOrder1174(t *testing.T) {\n\ttest.TestGroup(new(ExtendedCurve).Init(Param1174(), true))\n}",
"func testEDdfiAccountNumberAlphaNumeric(t testing.TB) {\n\ted := mockEntryDetail()\n\ted.DFIAccountNumber = \"®\"\n\terr := ed.Validate()\n\tif !base.Match(err, ErrNonAlphanumeric) {\n\t\tt.Errorf(\"%T: %s\", err, err)\n\t}\n}",
"func TestEncrypt() {\n\n}",
"func TestGetCaptcha(t *testing.T) {}",
"func (self *Graphics) SetFillAlphaA(member int) {\n self.Object.Set(\"fillAlpha\", member)\n}",
"func IsAlpha(r rune) bool {\n\tif (r < 'a' || r > 'z') && (r < 'A' || r > 'Z') {\n\t\treturn false\n\t}\n\treturn true\n}",
"func TestEDIndividualNameAlphaNumeric(t *testing.T) {\n\ttestEDIndividualNameAlphaNumeric(t)\n}",
"func (c *Color) SetAlpha(a float32) error {\n\tif a < 0 || a > 1 {\n\t\treturn ErrNotColor\n\t}\n\tc.a = a\n\treturn nil\n}",
"func Test(t *testing.T) {\n}",
"func (s *SmartContract) FindBestAlpha(ctx contractapi.TransactionContextInterface, receiveMsg string) error {\n\tfmt.Println(\"[FIND BEST ALPHA MSG] Received\")\n\treceiveMsgBytes := []byte(receiveMsg)\n\trecMsg := new(HttpAccAlphaMessage)\n\t_ = json.Unmarshal(receiveMsgBytes, recMsg)\n\n\tvar accAlphaMap = map[string]AccAlpha{}\n\taccAlphaInterface, err := readAsMap(ctx, \"accAlphaMap\", recMsg.Epochs)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to read acc_test and alpha map from state. %s\", err.Error())\n\t}\n\taccAlphaString, err := json.Marshal(accAlphaInterface)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to marshal accAlpha interface: %s\", err.Error())\n\t}\n\terr = json.Unmarshal(accAlphaString, &accAlphaMap)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to unmarshal accAlpha interface to accAlphaMap: %s\", err.Error())\n\t}\n\t// count accAlpha map length. If gathered all of the acc_test, choose the best alpha according to the policy\n\t// (findMaxAccAvg or findMinAccVar), release alpha and w\n\tif len(accAlphaMap) == userNum {\n\t\tfmt.Println(\"gathered enough acc_test and alpha, choose the best alpha according to the policy\")\n\t\talpha, acc := findMaxAccAvg(accAlphaMap)\n\t\t// release alpha and accuracy\n\t\tdata := make(map[string]interface{})\n\t\tdata[\"alpha\"] = alpha // alpha is included in data\n\t\tdata[\"accuracy\"] = acc // accuracy for alpha is included in data\n\t\tsendMsg := new(HttpMessage)\n\t\tsendMsg.Message = \"best_alpha\"\n\t\tsendMsg.Data = data\n\t\tsendMsg.Uuid = myuuid\n\t\tsendMsg.Epochs = recMsg.Epochs\n\t\tsendMsgAsBytes, _ := json.Marshal(sendMsg)\n\n\t\tgo sendPostRequest(sendMsgAsBytes, \"BEST_ALPHA\")\n\t} else {\n\t\tfmt.Println(\"not gathered enough acc_test and alpha [\" + strconv.Itoa(len(accAlphaMap)) + \"], do nothing\")\n\t}\n\n\treturn nil\n}",
"func NewTestPingFunc(sequence map[string][]bool) func(string) (bool, error) {\n\ti := make(map[string]int, len(sequence))\n\tfor host := range sequence {\n\t\ti[host] = 0 // initialize counter\n\t}\n\n\tvar m sync.Mutex\n\treturn func(host string) (bool, error) {\n\t\tdefer func() {\n\t\t\tm.Unlock()\n\t\t\trecover()\n\t\t}()\n\n\t\tm.Lock()\n\t\tvalue := sequence[host][i[host]]\n\t\ti[host]++\n\t\treturn value, nil\n\t}\n}",
"func TestTorusInterior(t *testing.T) {\n p := []float64{0, 0, 0}\n v := []float64{0, 0, 1}\n var R, r float64 = 2, 1\n\n torus := NewTorus(p, v, R, r)\n\n if torus == nil {\n t.Error(\"torus error: torus should exist but is nil!!!!\") \n return \n }\n\n if surface.SurfaceInterior(torus, p) {\n t.Error(\"torus error: torus is inside-out 1! \", torus.F(p)) \n }\n\n if !surface.SurfaceInterior(torus, []float64{2, 0, 0}) {\n t.Error(\"torus error: torus is inside-out 2! \", torus.F([]float64{2, 0, 0})) \n }\n}",
"func (obj *material) Alpha() Alpha {\n\treturn obj.alpha\n}",
"func (a *AGI) SayAlpha(label string, escapeDigits string) (digit string, err error) {\n\treturn a.Command(\"SAY ALPHA\", label, escapeDigits).Val()\n}",
"func Test0(par0 bool) {\n}",
"func (c *Color) SetAlphaPreMult() bool {\n\tif c.A == 255 {\n\t\treturn false\n\t}\n\tr, g, b, a := c.ToFloat32()\n\tc.SetNPFloat32(r, g, b, a)\n\treturn true\n}",
"func alphaBeta(position *position, alpha int, beta int, depth int) int {\n\t// At the bottom of the tree, return the score of the position for the attacking player.\n\tif depth == 0 {\n\t\treturn evaluate(*position)\n\t}\n\n\t// Otherwise, generate all possible moves.\n\tmoves := generateLegalMoves(*position)\n\tfor _, move := range moves {\n\n\t\t// Make the move.\n\t\tartifacts := makeMove(position, move)\n\n\t\t// Recursively call the search function to determine the move's score.\n\t\tscore := -alphaBeta(position, -beta, -alpha, depth-1)\n\n\t\t// If the score is higher than the beta cutoff, the rest of the search\n\t\t// tree is irrelevant and the cutoff is returned.\n\t\tif score >= beta {\n\t\t\tunmakeMove(position, move, artifacts)\n\t\t\treturn beta\n\t\t}\n\n\t\t// Otherwise, replace the alpha if the new score is higher.\n\t\tif score > alpha {\n\t\t\talpha = score\n\t\t}\n\n\t\t// Restore the pre-move state of the board.\n\t\tunmakeMove(position, move, artifacts)\n\t}\n\n\treturn alpha\n}",
"func (r *ImageRef) AddAlpha() error {\n\tif vipsHasAlpha(r.image) {\n\t\treturn nil\n\t}\n\n\tout, err := vipsAddAlpha(r.image)\n\tif err != nil {\n\t\treturn err\n\t}\n\tr.setImage(out)\n\treturn nil\n}",
"func TestCallFunc_function(t *testing.T) {\n\n}",
"func TESTW(ir, amr operand.Op) { ctx.TESTW(ir, amr) }",
"func main() {\n\ttest_plain_background()\n\ttest_cloud()\n\ttest_enemy()\n\ttest_move_background()\n\ttest_display_score()\n}",
"func Alpha(s string) bool {\n\tfor _, v := range s {\n\t\tif ('Z' < v || v < 'A') && ('z' < v || v < 'a') {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }",
"func Test(t *testing.T) { TestingT(t) }"
] | [
"0.7365797",
"0.6467262",
"0.5990663",
"0.5964319",
"0.5907028",
"0.58390313",
"0.5737841",
"0.5732525",
"0.5585304",
"0.55348474",
"0.545238",
"0.54403746",
"0.5396641",
"0.5375207",
"0.53567845",
"0.5353082",
"0.5320442",
"0.5320442",
"0.5308834",
"0.5270199",
"0.5252198",
"0.52374125",
"0.52201927",
"0.52035105",
"0.51659226",
"0.5141136",
"0.5139648",
"0.51341504",
"0.51338243",
"0.50906414",
"0.5066236",
"0.5051422",
"0.50448585",
"0.5030276",
"0.50288165",
"0.50264937",
"0.4998581",
"0.49935272",
"0.4992905",
"0.49893323",
"0.49790478",
"0.49545196",
"0.49431497",
"0.491786",
"0.49162018",
"0.49005264",
"0.48985845",
"0.48908785",
"0.48901644",
"0.48896992",
"0.4888487",
"0.48884654",
"0.4881922",
"0.4879831",
"0.48780802",
"0.48633808",
"0.4862299",
"0.48615855",
"0.4848579",
"0.48470455",
"0.4844332",
"0.48434973",
"0.48415825",
"0.48296517",
"0.48224986",
"0.48176724",
"0.47999948",
"0.47989714",
"0.47986364",
"0.4797554",
"0.47968665",
"0.4791674",
"0.47676212",
"0.4764512",
"0.4758823",
"0.47579417",
"0.47532207",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772",
"0.47459772"
] | 0.60668135 | 2 |
determine if textures are loaded in texture memory | func AreTexturesResident(n int32, textures *uint32, residences *bool) bool {
ret := C.glowAreTexturesResident(gpAreTexturesResident, (C.GLsizei)(n), (*C.GLuint)(unsafe.Pointer(textures)), (*C.GLboolean)(unsafe.Pointer(residences)))
return ret == TRUE
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func AreTexturesResident(n int32, textures *uint32, residences *bool) bool {\n\tret, _, _ := syscall.Syscall(gpAreTexturesResident, 3, uintptr(n), uintptr(unsafe.Pointer(textures)), uintptr(unsafe.Pointer(residences)))\n\treturn ret != 0\n}",
"func IsTexture(texture uint32) bool {\n ret := C.glowIsTexture(gpIsTexture, (C.GLuint)(texture))\n return ret == TRUE\n}",
"func AreTexturesResident(n int32, textures *uint32, residences *bool) bool {\n ret := C.glowAreTexturesResident(gpAreTexturesResident, (C.GLsizei)(n), (*C.GLuint)(unsafe.Pointer(textures)), (*C.GLboolean)(unsafe.Pointer(residences)))\n return ret == TRUE\n}",
"func IsTexture(texture uint32) bool {\n\tret, _, _ := syscall.Syscall(gpIsTexture, 1, uintptr(texture), 0, 0)\n\treturn ret != 0\n}",
"func (texture Texture) IsTexture() bool {\n\treturn gl.IsTexture(uint32(texture))\n}",
"func IsTexture(texture Uint) Boolean {\n\tctexture, _ := (C.GLuint)(texture), cgoAllocsUnknown\n\t__ret := C.glIsTexture(ctexture)\n\t__v := (Boolean)(__ret)\n\treturn __v\n}",
"func IsTexture(t Texture) bool {\n\treturn gl.IsTexture(t.Value)\n}",
"func IsLoaded() bool {\n\treturn len(mapping) > 0\n}",
"func IsTexture(texture uint32) bool {\n\tret := C.glowIsTexture(gpIsTexture, (C.GLuint)(texture))\n\treturn ret == TRUE\n}",
"func IsTexture(texture uint32) bool {\n\tret := C.glowIsTexture(gpIsTexture, (C.GLuint)(texture))\n\treturn ret == TRUE\n}",
"func LoadTextures(eng sprite.Engine) map[string]sprite.SubTex {\n\tallTexs := make(map[string]sprite.SubTex)\n\tboundedImgs := []string{\"Clubs-2.png\", \"Clubs-3.png\", \"Clubs-4.png\", \"Clubs-5.png\", \"Clubs-6.png\", \"Clubs-7.png\", \"Clubs-8.png\",\n\t\t\"Clubs-9.png\", \"Clubs-10.png\", \"Clubs-Jack.png\", \"Clubs-Queen.png\", \"Clubs-King.png\", \"Clubs-Ace.png\",\n\t\t\"Diamonds-2.png\", \"Diamonds-3.png\", \"Diamonds-4.png\", \"Diamonds-5.png\", \"Diamonds-6.png\", \"Diamonds-7.png\", \"Diamonds-8.png\",\n\t\t\"Diamonds-9.png\", \"Diamonds-10.png\", \"Diamonds-Jack.png\", \"Diamonds-Queen.png\", \"Diamonds-King.png\", \"Diamonds-Ace.png\",\n\t\t\"Spades-2.png\", \"Spades-3.png\", \"Spades-4.png\", \"Spades-5.png\", \"Spades-6.png\", \"Spades-7.png\", \"Spades-8.png\",\n\t\t\"Spades-9.png\", \"Spades-10.png\", \"Spades-Jack.png\", \"Spades-Queen.png\", \"Spades-King.png\", \"Spades-Ace.png\",\n\t\t\"Hearts-2.png\", \"Hearts-3.png\", \"Hearts-4.png\", \"Hearts-5.png\", \"Hearts-6.png\", \"Hearts-7.png\", \"Hearts-8.png\",\n\t\t\"Hearts-9.png\", \"Hearts-10.png\", \"Hearts-Jack.png\", \"Hearts-Queen.png\", \"Hearts-King.png\", \"Hearts-Ace.png\", \"BakuSquare.png\",\n\t}\n\tunboundedImgs := []string{\"Club.png\", \"Diamond.png\", \"Spade.png\", \"Heart.png\", \"gray.jpeg\", \"blue.png\", \"trickDrop.png\",\n\t\t\"trickDropBlue.png\", \"player0.jpeg\", \"player1.jpeg\", \"player2.jpeg\", \"player3.jpeg\", \"laptopIcon.png\", \"watchIcon.png\",\n\t\t\"phoneIcon.png\", \"tabletIcon.png\", \"A-Upper.png\", \"B-Upper.png\", \"C-Upper.png\", \"D-Upper.png\", \"E-Upper.png\", \"F-Upper.png\",\n\t\t\"G-Upper.png\", \"H-Upper.png\", \"I-Upper.png\", \"J-Upper.png\", \"K-Upper.png\", \"L-Upper.png\", \"M-Upper.png\", \"N-Upper.png\",\n\t\t\"O-Upper.png\", \"P-Upper.png\", \"Q-Upper.png\", \"R-Upper.png\", \"S-Upper.png\", \"T-Upper.png\", \"U-Upper.png\", \"V-Upper.png\",\n\t\t\"W-Upper.png\", \"X-Upper.png\", \"Y-Upper.png\", \"Z-Upper.png\", \"A-Lower.png\", \"B-Lower.png\", \"C-Lower.png\", \"D-Lower.png\",\n\t\t\"E-Lower.png\", \"F-Lower.png\", \"G-Lower.png\", \"H-Lower.png\", \"I-Lower.png\", \"J-Lower.png\", \"K-Lower.png\", \"L-Lower.png\",\n\t\t\"M-Lower.png\", \"N-Lower.png\", \"O-Lower.png\", \"P-Lower.png\", \"Q-Lower.png\", \"R-Lower.png\", \"S-Lower.png\", \"T-Lower.png\",\n\t\t\"U-Lower.png\", \"V-Lower.png\", \"W-Lower.png\", \"X-Lower.png\", \"Y-Lower.png\", \"Z-Lower.png\", \"Space.png\", \"Colon.png\", \"Bang.png\",\n\t\t\"Apostrophe.png\", \"1.png\", \"2.png\", \"3.png\", \"4.png\", \"5.png\", \"6.png\", \"7.png\", \"8.png\", \"9.png\", \"0.png\", \"1-Red.png\",\n\t\t\"2-Red.png\", \"3-Red.png\", \"4-Red.png\", \"5-Red.png\", \"6-Red.png\", \"7-Red.png\", \"8-Red.png\", \"9-Red.png\", \"0-Red.png\",\n\t\t\"1-DBlue.png\", \"2-DBlue.png\", \"3-DBlue.png\", \"4-DBlue.png\", \"5-DBlue.png\", \"6-DBlue.png\", \"7-DBlue.png\", \"8-DBlue.png\",\n\t\t\"9-DBlue.png\", \"0-DBlue.png\", \"A-Upper-DBlue.png\", \"B-Upper-DBlue.png\",\n\t\t\"C-Upper-DBlue.png\", \"D-Upper-DBlue.png\", \"E-Upper-DBlue.png\", \"F-Upper-DBlue.png\", \"G-Upper-DBlue.png\", \"H-Upper-DBlue.png\",\n\t\t\"I-Upper-DBlue.png\", \"J-Upper-DBlue.png\", \"K-Upper-DBlue.png\", \"L-Upper-DBlue.png\", \"M-Upper-DBlue.png\", \"N-Upper-DBlue.png\",\n\t\t\"O-Upper-DBlue.png\", \"P-Upper-DBlue.png\", \"Q-Upper-DBlue.png\", \"R-Upper-DBlue.png\", \"S-Upper-DBlue.png\", \"T-Upper-DBlue.png\",\n\t\t\"U-Upper-DBlue.png\", \"V-Upper-DBlue.png\", \"W-Upper-DBlue.png\", \"X-Upper-DBlue.png\", \"Y-Upper-DBlue.png\", \"Z-Upper-DBlue.png\",\n\t\t\"A-Lower-DBlue.png\", \"B-Lower-DBlue.png\", \"C-Lower-DBlue.png\", \"D-Lower-DBlue.png\", \"E-Lower-DBlue.png\", \"F-Lower-DBlue.png\",\n\t\t\"G-Lower-DBlue.png\", \"H-Lower-DBlue.png\", \"I-Lower-DBlue.png\", \"J-Lower-DBlue.png\", \"K-Lower-DBlue.png\", \"L-Lower-DBlue.png\",\n\t\t\"M-Lower-DBlue.png\", \"N-Lower-DBlue.png\", \"O-Lower-DBlue.png\", \"P-Lower-DBlue.png\", \"Q-Lower-DBlue.png\", \"R-Lower-DBlue.png\",\n\t\t\"S-Lower-DBlue.png\", \"T-Lower-DBlue.png\", \"U-Lower-DBlue.png\", \"V-Lower-DBlue.png\", \"W-Lower-DBlue.png\", \"X-Lower-DBlue.png\",\n\t\t\"Y-Lower-DBlue.png\", \"Z-Lower-DBlue.png\", \"Apostrophe-DBlue.png\", \"Space-DBlue.png\", \"A-Upper-LBlue.png\", \"B-Upper-LBlue.png\",\n\t\t\"C-Upper-LBlue.png\", \"D-Upper-LBlue.png\", \"E-Upper-LBlue.png\", \"F-Upper-LBlue.png\", \"G-Upper-LBlue.png\", \"H-Upper-LBlue.png\",\n\t\t\"I-Upper-LBlue.png\", \"J-Upper-LBlue.png\", \"K-Upper-LBlue.png\", \"L-Upper-LBlue.png\", \"M-Upper-LBlue.png\", \"N-Upper-LBlue.png\",\n\t\t\"O-Upper-LBlue.png\", \"P-Upper-LBlue.png\", \"Q-Upper-LBlue.png\", \"R-Upper-LBlue.png\", \"S-Upper-LBlue.png\", \"T-Upper-LBlue.png\",\n\t\t\"U-Upper-LBlue.png\", \"V-Upper-LBlue.png\", \"W-Upper-LBlue.png\", \"X-Upper-LBlue.png\", \"Y-Upper-LBlue.png\", \"Z-Upper-LBlue.png\",\n\t\t\"A-Lower-LBlue.png\", \"B-Lower-LBlue.png\", \"C-Lower-LBlue.png\", \"D-Lower-LBlue.png\", \"E-Lower-LBlue.png\", \"F-Lower-LBlue.png\",\n\t\t\"G-Lower-LBlue.png\", \"H-Lower-LBlue.png\", \"I-Lower-LBlue.png\", \"J-Lower-LBlue.png\", \"K-Lower-LBlue.png\", \"L-Lower-LBlue.png\",\n\t\t\"M-Lower-LBlue.png\", \"N-Lower-LBlue.png\", \"O-Lower-LBlue.png\", \"P-Lower-LBlue.png\", \"Q-Lower-LBlue.png\", \"R-Lower-LBlue.png\",\n\t\t\"S-Lower-LBlue.png\", \"T-Lower-LBlue.png\", \"U-Lower-LBlue.png\", \"V-Lower-LBlue.png\", \"W-Lower-LBlue.png\", \"X-Lower-LBlue.png\",\n\t\t\"Y-Lower-LBlue.png\", \"Z-Lower-LBlue.png\", \"A-Upper-Gray.png\", \"B-Upper-Gray.png\", \"C-Upper-Gray.png\", \"D-Upper-Gray.png\",\n\t\t\"E-Upper-Gray.png\", \"F-Upper-Gray.png\", \"G-Upper-Gray.png\", \"H-Upper-Gray.png\", \"I-Upper-Gray.png\", \"J-Upper-Gray.png\",\n\t\t\"K-Upper-Gray.png\", \"L-Upper-Gray.png\", \"M-Upper-Gray.png\", \"N-Upper-Gray.png\", \"O-Upper-Gray.png\", \"P-Upper-Gray.png\",\n\t\t\"Q-Upper-Gray.png\", \"R-Upper-Gray.png\", \"S-Upper-Gray.png\", \"T-Upper-Gray.png\", \"U-Upper-Gray.png\", \"V-Upper-Gray.png\",\n\t\t\"W-Upper-Gray.png\", \"X-Upper-Gray.png\", \"Y-Upper-Gray.png\", \"Z-Upper-Gray.png\", \"A-Lower-Gray.png\", \"B-Lower-Gray.png\",\n\t\t\"C-Lower-Gray.png\", \"D-Lower-Gray.png\", \"E-Lower-Gray.png\", \"F-Lower-Gray.png\", \"G-Lower-Gray.png\", \"H-Lower-Gray.png\",\n\t\t\"I-Lower-Gray.png\", \"J-Lower-Gray.png\", \"K-Lower-Gray.png\", \"L-Lower-Gray.png\", \"M-Lower-Gray.png\", \"N-Lower-Gray.png\",\n\t\t\"O-Lower-Gray.png\", \"P-Lower-Gray.png\", \"Q-Lower-Gray.png\", \"R-Lower-Gray.png\", \"S-Lower-Gray.png\", \"T-Lower-Gray.png\",\n\t\t\"U-Lower-Gray.png\", \"V-Lower-Gray.png\", \"W-Lower-Gray.png\", \"X-Lower-Gray.png\", \"Y-Lower-Gray.png\", \"Z-Lower-Gray.png\",\n\t\t\"Space-Gray.png\", \"RoundedRectangle-DBlue.png\", \"RoundedRectangle-LBlue.png\", \"RoundedRectangle-Gray.png\", \"Rectangle-LBlue.png\",\n\t\t\"Rectangle-DBlue.png\", \"HorizontalPullTab.png\", \"VerticalPullTab.png\", \"NewGamePressed.png\", \"NewGameUnpressed.png\",\n\t\t\"NewRoundPressed.png\", \"NewRoundUnpressed.png\", \"JoinGamePressed.png\", \"JoinGameUnpressed.png\", \"Period.png\",\n\t\t\"SitSpotPressed.png\", \"SitSpotUnpressed.png\", \"WatchSpotPressed.png\", \"WatchSpotUnpressed.png\", \"StartBlue.png\", \"StartGray.png\",\n\t\t\"StartBluePressed.png\", \"Restart.png\", \"Visibility.png\", \"VisibilityOff.png\", \"QuitPressed.png\", \"QuitUnpressed.png\",\n\t\t\"PassPressed.png\", \"PassUnpressed.png\", \"RightArrowBlue.png\", \"LeftArrowBlue.png\", \"AcrossArrowBlue.png\", \"RightArrowGray.png\",\n\t\t\"LeftArrowGray.png\", \"AcrossArrowGray.png\", \"TakeTrickTableUnpressed.png\", \"TakeTrickTablePressed.png\", \"TakeTrickHandPressed.png\",\n\t\t\"TakeTrickHandUnpressed.png\", \"android.png\", \"cat.png\", \"man.png\", \"woman.png\", \"TakeUnpressed.png\", \"TakePressed.png\",\n\t\t\"UnplayedBorder1.png\", \"UnplayedBorder2.png\", \"RejoinPressed.png\", \"RejoinUnpressed.png\",\n\t}\n\tfor _, f := range boundedImgs {\n\t\ta, err := asset.Open(f)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\timg, _, err := image.Decode(a)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tt, err := eng.LoadTexture(img)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\timgWidth, imgHeight := t.Bounds()\n\t\tallTexs[f] = sprite.SubTex{t, image.Rect(0, 0, imgWidth, imgHeight)}\n\t\ta.Close()\n\t}\n\tfor _, f := range unboundedImgs {\n\t\ta, err := asset.Open(f)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\timg, _, err := image.Decode(a)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tt, err := eng.LoadTexture(img)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\timgWidth, imgHeight := t.Bounds()\n\t\tallTexs[f] = sprite.SubTex{t, image.Rect(1, 1, imgWidth-1, imgHeight-1)}\n\t\ta.Close()\n\t}\n\treturn allTexs\n}",
"func (wa *WzAES) IsLoaded() bool {\n\treturn wa.key != nil\n}",
"func IsLoaded(filename string) bool {\n\tloadedLock.RLock()\n\t_, ok := loaded[filename]\n\tloadedLock.RUnlock()\n\treturn ok\n}",
"func (self *TileSprite) TextureDebug() bool{\n return self.Object.Get(\"textureDebug\").Bool()\n}",
"func loadTextures() {\n\tfor i := 0; i < 7; i++ {\n\n\t\ttextures[i], _, _ = ebutil.NewImageFromFile(\"assets/image/\"+colors[i]+\".png\", eb.FilterDefault)\n\t}\n\ttextures[7], _, _ = ebutil.NewImageFromFile(\"assets/image/tetris_backgraund.png\", eb.FilterDefault)\n}",
"func (DrawTexture) IsDrawAction() {}",
"func loadAllReferenceTextures(compMesh *component.Mesh) {\n\tfor _, texFile := range compMesh.Material.Textures {\n\t\tdoLoadTexture(texFile)\n\t}\n\tif len(compMesh.Material.DiffuseTexture) > 0 {\n\t\tdoLoadTexture(compMesh.Material.DiffuseTexture)\n\t}\n\tif len(compMesh.Material.NormalsTexture) > 0 {\n\t\tdoLoadTexture(compMesh.Material.NormalsTexture)\n\t}\n\tif len(compMesh.Material.SpecularTexture) > 0 {\n\t\tdoLoadTexture(compMesh.Material.SpecularTexture)\n\t}\n}",
"func (lvl *LevelDB) IsLoadedChunk(x, y int) bool {\n\tlvl.mutex.RLock()\n\t_, ok := lvl.chunks[lvl.at(x, y)]\n\tlvl.mutex.RUnlock()\n\n\treturn ok\n}",
"func (s *State) HasResources() bool {\n\tif s.Empty() {\n\t\treturn false\n\t}\n\n\tfor _, mod := range s.Modules {\n\t\tif len(mod.Resources) > 0 {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}",
"func (self *TileSprite) RefreshTexture() bool{\n return self.Object.Get(\"refreshTexture\").Bool()\n}",
"func (provider *ChunkProvider) IsChunkLoaded(x, z int32) bool {\n\tprovider.mutex.RLock()\n\tvar _, ok = provider.chunks[provider.GetChunkIndex(x, z)]\n\tprovider.mutex.RUnlock()\n\treturn ok\n}",
"func IsSampler(sampler uint32) bool {\n ret := C.glowIsSampler(gpIsSampler, (C.GLuint)(sampler))\n return ret == TRUE\n}",
"func (am *Manager) GetTexture(name string) (*Texture, bool) {\n\tif tex, ok := am.Textures[name]; ok {\n\t\treturn tex, ok\n\t}\n\n\tif am.Parent != nil {\n\t\treturn am.Parent.GetTexture(name)\n\t}\n\n\treturn nil, false\n}",
"func (m *MMU) Loaded() bool {\n\treturn m.hasBoot || m.hasROM\n}",
"func (bm Blendmap) Texture() *gl.Texture {\n\treturn bm.Map.id\n}",
"func (obj *Device) GetAvailableTextureMem() uint {\n\tret, _, _ := syscall.Syscall(\n\t\tobj.vtbl.GetAvailableTextureMem,\n\t\t1,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\t0,\n\t\t0,\n\t)\n\treturn uint(ret)\n}",
"func (r pciResource) isMem() bool {\n\treturn r.flags&IORESOURCE_TYPE_BITS == IORESOURCE_MEM\n}",
"func (s SnapshotState) HasInMemory() bool {\n\treturn uint32(s)&1 != 0\n}",
"func IsSampler(sampler uint32) bool {\n\tret, _, _ := syscall.Syscall(gpIsSampler, 1, uintptr(sampler), 0, 0)\n\treturn ret != 0\n}",
"func doLoadTexture(texFile string) error {\n\tprefixDir := getComponentPrefix()\n\ttexFilepath := prefixDir + texFile\n\t_, err := textureMan.LoadTexture(texFile, texFilepath)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Failed to load texture %s: %v\", texFile, err)\n\t}\n\n\tfmt.Printf(\"Loaded texture: %s\\n\", texFile)\n\treturn nil\n}",
"func Loaded(source string) bool {\n\treturn source != \"\"\n}",
"func (am *Manager) LoadTextures(names ...string) ([]*Texture, error) {\n\tvar (\n\t\ttextures = make([]*Texture, len(names))\n\t\terr error\n\t)\n\n\tfor i, name := range names {\n\t\ttextures[i], err = am.LoadTexture(name)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn textures, nil\n}",
"func LoadImageAsTexture(name string, path string) error {\n\tif Textures == nil {\n\t\tlog.Print(\"Initialize resource manager\")\n\t\tTextures = make(map[string]*ebiten.Image)\n\t}\n\timg, err := LoadImage(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tTextures[name] = img\n\treturn nil\n}",
"func (s Resource) GetTextures() ([3]uint32, SurfaceFormat, error) {\n\tvar outTextures [3]C.uint32_t\n\tvar format C.enum_wlc_surface_format\n\tval := bool(C.wlc_surface_get_textures(C.wlc_resource(s), &outTextures[0], &format))\n\tif val {\n\t\tvar textures [3]uint32\n\t\tfor i, t := range outTextures {\n\t\t\ttextures[i] = uint32(t)\n\t\t}\n\t\treturn textures, SurfaceFormat(format), nil\n\t}\n\n\treturn [3]uint32{}, 0, fmt.Errorf(\"invalid surface\")\n}",
"func IsRenderbuffer(renderbuffer uint32) bool {\n ret := C.glowIsRenderbuffer(gpIsRenderbuffer, (C.GLuint)(renderbuffer))\n return ret == TRUE\n}",
"func (am *AssetManager) LoadTexture(name, iname string) {\n\tif strings.Contains(name, \".png\") {\n\t\tpic, err := LoadPng(am.texturesDir + name)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tam.Textures[iname] = pic\n\t} else {\n\t\tlog.Fatal(\"unable to find texture \" + (am.modelsDir + name))\n\t}\n}",
"func (loader *Loader) HasChunkInUse(chunkX, chunkZ int32) bool {\n\tloader.mutex.RLock()\n\tvar _, ok = loader.loadedChunks[loader.GetChunkHash(chunkX, chunkZ)]\n\tloader.mutex.RUnlock()\n\treturn ok\n}",
"func EGLImageTargetTextureStorageEXT(texture uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTextureStorageEXT(gpEGLImageTargetTextureStorageEXT, (C.GLuint)(texture), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func EGLImageTargetTextureStorageEXT(texture uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTextureStorageEXT(gpEGLImageTargetTextureStorageEXT, (C.GLuint)(texture), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func (o *AddOn) HasExternalResources() bool {\n\tif o != nil && o.bitmap_&64 != 0 {\n\t\treturn o.hasExternalResources\n\t}\n\treturn false\n}",
"func (cache *FrameCache) Texture(key FrameCacheKey) *BitmapTexture {\n\treturn cache.textures[key]\n}",
"func EGLImageTargetTextureStorageEXT(texture uint32, image unsafe.Pointer, attrib_list *int32) {\n\tsyscall.Syscall(gpEGLImageTargetTextureStorageEXT, 3, uintptr(texture), uintptr(image), uintptr(unsafe.Pointer(attrib_list)))\n}",
"func IsList(list uint32) bool {\n ret := C.glowIsList(gpIsList, (C.GLuint)(list))\n return ret == TRUE\n}",
"func (level *Level) Textures() (result []int) {\n\tblockData := level.store.Get(res.ResourceID(4000 + level.id*100 + 7)).BlockData(0)\n\treader := bytes.NewReader(blockData)\n\tvar ids [54]uint16\n\n\tbinary.Read(reader, binary.LittleEndian, &ids)\n\tfor _, id := range ids {\n\t\tresult = append(result, int(id))\n\t}\n\n\treturn\n}",
"func (ChunkLoader *ChunkLoader) HasChunkInUse(chunkX, chunkZ int32) bool {\n\tChunkLoader.mutex.RLock()\n\tvar _, ok = ChunkLoader.loadedchunk[GetChunkIndex(chunkX, chunkZ)]\n\tChunkLoader.mutex.RUnlock()\n\treturn ok\n}",
"func (t *Type) IsRegularMemory() bool",
"func (am *Manager) LoadTexture(name string) (*Texture, error) {\n\tif tex, ok := am.GetTexture(name); ok {\n\t\treturn tex, nil\n\t}\n\n\tvar (\n\t\terr error\n\t\tf *os.File\n\t)\n\n\tif f, err = os.Open(\"assets/textures/\" + name); err != nil {\n\t\treturn nil, err\n\t}\n\tdefer f.Close()\n\n\tvar img image.Image\n\n\tif img, _, err = image.Decode(f); err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar tex *Texture\n\n\tif tex, err = NewTextureFromImage(name, img); err != nil {\n\t\treturn nil, err\n\t}\n\n\tam.AddTexture(tex)\n\n\treturn tex, nil\n}",
"func (self *TileSprite) LoadTexture2O(key interface{}, frame interface{}, stopAnimation bool) {\n self.Object.Call(\"loadTexture\", key, frame, stopAnimation)\n}",
"func (self *TileSprite) LoadTexture(key interface{}) {\n self.Object.Call(\"loadTexture\", key)\n}",
"func (t *Three) Texture() *Texture {\n\tp := t.ctx.Get(\"Texture\")\n\treturn TextureFromJSObject(p)\n}",
"func (*myScene) Preload() {\n engo.Files.Load(\"textures/cumulus.png\")\n}",
"func getTextureIds(texInfos []TexInfo) map[string]int {\n\ttextureIds := make(map[string]int)\n\tnextId := 0\n\tfor i := 0; i < len(texInfos); i++ {\n\t\ttexInfo := texInfos[i]\n\n\t\t// convert filename byte array to string\n\t\tfilename := \"\"\n\t\tfor j := 0; j < len(texInfo.TextureName); j++ {\n\t\t\t// end of string\n\t\t\tif texInfo.TextureName[j] == 0 {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tfilename += string(texInfo.TextureName[j])\n\t\t}\n\n\t\t// generate new id for texture if necessary\n\t\t_, exists := textureIds[filename]\n\t\tif !exists {\n\t\t\ttextureIds[filename] = nextId\n\t\t\tnextId++\n\t\t}\n\t}\n\treturn textureIds\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n C.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (f *Font) GetTexture() *Texture { return f.texture }",
"func (w *Worley) GenerateTexture(tex *texture.Texture) {\n\tgl.BindImageTexture(0, tex.GetHandle(), 0, false, 0, gl.READ_WRITE, gl.RGBA32F)\n\tgl.BindImageTexture(1, w.noisetexture.GetHandle(), 0, false, 0, gl.READ_ONLY, gl.RGBA32F)\n\n\tw.computeshader.Use()\n\tw.computeshader.UpdateInt32(\"uWidth\", w.width)\n\tw.computeshader.UpdateInt32(\"uHeight\", w.height)\n\tw.computeshader.UpdateInt32(\"uResolution\", w.resolution)\n\tw.computeshader.UpdateInt32(\"uOctaves\", w.octaves)\n\tw.computeshader.UpdateFloat32(\"uRadius\", w.radius)\n\tw.computeshader.UpdateFloat32(\"uRadiusScale\", w.radiusscale)\n\tw.computeshader.UpdateFloat32(\"uBrightness\", w.brightness)\n\tw.computeshader.UpdateFloat32(\"uContrast\", w.contrast)\n\tw.computeshader.UpdateFloat32(\"uScale\", w.scale)\n\tw.computeshader.UpdateFloat32(\"uPersistance\", w.persistance)\n\tw.computeshader.Compute(uint32(w.width), uint32(w.height), 1)\n\tw.computeshader.Compute(1024, 1024, 1)\n\tw.computeshader.Release()\n\n\tgl.MemoryBarrier(gl.ALL_BARRIER_BITS)\n\n\tgl.BindImageTexture(0, 0, 0, false, 0, gl.WRITE_ONLY, gl.RGBA32F)\n\tgl.BindImageTexture(1, 0, 0, false, 0, gl.READ_ONLY, gl.RGBA32F)\n}",
"func (t *Texture) Loader() TextureLoader {\n\treturn t.loader\n}",
"func (self *TileSprite) LoadTexture1O(key interface{}, frame interface{}) {\n self.Object.Call(\"loadTexture\", key, frame)\n}",
"func IsBuffer(buffer uint32) bool {\n ret := C.glowIsBuffer(gpIsBuffer, (C.GLuint)(buffer))\n return ret == TRUE\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n C.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (self *TileSprite) Exists() bool{\n return self.Object.Get(\"exists\").Bool()\n}",
"func EGLImageTargetTexStorageEXT(target uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTexStorageEXT(gpEGLImageTargetTexStorageEXT, (C.GLenum)(target), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func EGLImageTargetTexStorageEXT(target uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTexStorageEXT(gpEGLImageTargetTexStorageEXT, (C.GLenum)(target), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func (self *TileSprite) Texture() *Texture{\n return &Texture{self.Object.Get(\"texture\")}\n}",
"func IsSampler(sampler uint32) bool {\n\tret := C.glowIsSampler(gpIsSampler, (C.GLuint)(sampler))\n\treturn ret == TRUE\n}",
"func IsSampler(sampler uint32) bool {\n\tret := C.glowIsSampler(gpIsSampler, (C.GLuint)(sampler))\n\treturn ret == TRUE\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func (p *Plugins) IsLoaded(t string, name string) (bool, error) {\n\t// Get all (running: false) plugins\n\tplugins := p.pClient.GetPlugins(false)\n\tif plugins.Err != nil {\n\t\treturn false, errors.Wrap(plugins.Err, \"could not obtain loaded plugins\")\n\t}\n\n\tfor _, lp := range plugins.LoadedPlugins {\n\t\tif t == lp.Type && name == lp.Name {\n\t\t\treturn true, nil\n\t\t}\n\t}\n\n\treturn false, nil\n}",
"func hasMemoryAndCPUInstUsage(info *cadvisorapiv2.ContainerInfo) bool {\n\tif !info.Spec.HasCpu || !info.Spec.HasMemory {\n\t\treturn false\n\t}\n\tcstat, found := latestContainerStats(info)\n\tif !found {\n\t\treturn false\n\t}\n\tif cstat.CpuInst == nil {\n\t\treturn false\n\t}\n\treturn cstat.CpuInst.Usage.Total != 0 && cstat.Memory.RSS != 0\n}",
"func (md MetalDrawable) Texture() mtl.Texture {\n\treturn mtl.NewTexture(C.MetalDrawable_Texture(md.metalDrawable))\n}",
"func GetTexImage(target uint32, level int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowGetTexImage(gpGetTexImage, (C.GLenum)(target), (C.GLint)(level), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func LoadTextureFileHandle(n *scene.Node, file io.ReadCloser) texture.Type {\n\t// Create texture object using no texture\n\ttex2 := texture.New()\n\n\t// Spawn goroutine to do texture loading\n\tgo func() {\n\t\tdefer file.Close()\n\n\t\t// Decode image\n\t\tsrcImage, _, err := image.Decode(file)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Load():\", err)\n\t\t}\n\n\t\t// Check to see if we need to resize image because it is too large.\n\t\tmaxSize := int(renderer.MaxTextureSize(n))\n\t\tsz := srcImage.Bounds().Size()\n\t\tif sz.X > maxSize || sz.Y > maxSize {\n\t\t\t// Cannot upload texture larger than max texture size, we must resize\n\t\t\t// it first.\n\t\t\tnewWidth := sz.X\n\t\t\tif newWidth > maxSize {\n\t\t\t\tnewWidth = maxSize\n\t\t\t}\n\t\t\tnewHeight := sz.Y\n\t\t\tif newHeight > maxSize {\n\t\t\t\tnewHeight = maxSize\n\t\t\t}\n\n\t\t\tsrcImage = resize.Resample(srcImage, srcImage.Bounds(), newWidth, newHeight)\n\t\t}\n\n\t\trgbaImage, ok := srcImage.(*image.RGBA)\n\t\tif !ok {\n\t\t\t// Convert image to RGBA\n\t\t\tb := srcImage.Bounds()\n\t\t\trgbaImage = image.NewRGBA(image.Rect(0, 0, b.Dx(), b.Dy()))\n\t\t\tdraw.Draw(rgbaImage, rgbaImage.Bounds(), srcImage, b.Min, draw.Src)\n\t\t}\n\n\t\t// Assign image to texture\n\t\ttex2.SetImage(rgbaImage)\n\n\t\t// Tell renderer to load the texture\n\t\trenderer.LoadTexture(n, tex2)\n\t}()\n\n\treturn tex2\n}",
"func (manager *Manager) IsLevelLoaded(levelName string) bool {\n\tmanager.mutex.RLock()\n\tvar _, ok = manager.levels[levelName]\n\tmanager.mutex.RUnlock()\n\treturn ok\n}",
"func (o *DeviceNode) HasResources() bool {\n\tif o != nil && o.Resources != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n\tsyscall.Syscall9(gpTextureView, 8, uintptr(texture), uintptr(target), uintptr(origtexture), uintptr(internalformat), uintptr(minlevel), uintptr(numlevels), uintptr(minlayer), uintptr(numlayers), 0)\n}",
"func ActiveTexture(texture uint32) {\n C.glowActiveTexture(gpActiveTexture, (C.GLenum)(texture))\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n C.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func GetTexImage(target uint32, level int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall6(gpGetTexImage, 5, uintptr(target), uintptr(level), uintptr(format), uintptr(xtype), uintptr(pixels), 0)\n}",
"func (renderbuffer Renderbuffer) IsRenderbuffer() bool {\n\treturn gl.IsRenderbuffer(uint32(renderbuffer))\n}",
"func (o *V0037JobProperties) HasMemoryPerGpu() bool {\n\tif o != nil && o.MemoryPerGpu != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func (fnt *Font) Texture() *Texture {\n\treturn fnt.texture\n}",
"func (b *GoGLBackend) CanUseAsImage(b2 backendbase.Backend) bool {\n\t_, ok := b2.(*GoGLBackendOffscreen)\n\treturn ok\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func HasResources(containers []v1.Container) bool {\n\tfor _, container := range containers {\n\t\thas := hasCPUAndMemory(container.Resources.Limits)\n\t\tif !has {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}",
"func (m *stateManager) Loaded() bool {\n\treturn m.stateLoaded\n}",
"func GetTextureSubImage(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, bufSize int32, pixels unsafe.Pointer) {\n\tsyscall.Syscall12(gpGetTextureSubImage, 12, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(zoffset), uintptr(width), uintptr(height), uintptr(depth), uintptr(format), uintptr(xtype), uintptr(bufSize), uintptr(pixels))\n}",
"func IsRenderbuffer(renderbuffer Uint) Boolean {\n\tcrenderbuffer, _ := (C.GLuint)(renderbuffer), cgoAllocsUnknown\n\t__ret := C.glIsRenderbuffer(crenderbuffer)\n\t__v := (Boolean)(__ret)\n\treturn __v\n}",
"func (texture Texture) Delete() {\n\t// TODO: Is it somehow possible to get &uint32(texture) without assigning it to textures?\n\ttextures := uint32(texture)\n\tgl.DeleteTextures(1, &textures)\n}",
"func (adapter *LevelAdapter) LevelTextureIDs() []int {\n\treturn *adapter.levelTextures.get().(*[]int)\n}",
"func (o *DynamicFont) GetUseMipmaps() gdnative.Bool {\n\t//log.Println(\"Calling DynamicFont.GetUseMipmaps()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"DynamicFont\", \"get_use_mipmaps\")\n\n\t// Call the parent method.\n\t// bool\n\tretPtr := gdnative.NewEmptyBool()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := gdnative.NewBoolFromPointer(retPtr)\n\treturn ret\n}",
"func GetTextureImage(texture uint32, level int32, format uint32, xtype uint32, bufSize int32, pixels unsafe.Pointer) {\n\tsyscall.Syscall6(gpGetTextureImage, 6, uintptr(texture), uintptr(level), uintptr(format), uintptr(xtype), uintptr(bufSize), uintptr(pixels))\n}",
"func ActiveTexture(texture Enum) {\n\tgl.ActiveTexture(uint32(texture))\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n C.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func GetTextureSubImage(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, bufSize int32, pixels unsafe.Pointer) {\n\tC.glowGetTextureSubImage(gpGetTextureSubImage, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), (C.GLsizei)(bufSize), pixels)\n}",
"func GetTextureSubImage(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, bufSize int32, pixels unsafe.Pointer) {\n\tC.glowGetTextureSubImage(gpGetTextureSubImage, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), (C.GLsizei)(bufSize), pixels)\n}",
"func IsNotLoaded(err error) bool {\n\tif err == nil {\n\t\treturn false\n\t}\n\tvar e *NotLoadedError\n\treturn errors.As(err, &e)\n}",
"func WasInit() bool {\n\treturn int(C.TTF_WasInit()) != 0\n}",
"func (c *Button) loadTextureFromTTF() {\n\tvar err error\n\tc.font = engosdl.GetFontManager().CreateFont(c.GetName(), c.FontFile, c.FontSize)\n\tc.texture = c.font.GetTextureFromFont(c.Message, c.Color)\n\t_, _, c.width, c.height, err = c.texture.Query()\n\tif err != nil {\n\t\tengosdl.Logger.Error().Err(err).Msg(\"Query error\")\n\t\tpanic(err)\n\t}\n\tc.GetEntity().GetTransform().SetDim(engosdl.NewVector(float64(c.width), float64(c.height)))\n}",
"func IsPluginLoaded(id string) bool {\n\tfor _, pl := range loadedPlugins {\n\t\tif pl.Plugin != nil && pl.Manifest.Id == id {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}",
"func IsRenderbuffer(renderbuffer uint32) bool {\n\tret := C.glowIsRenderbuffer(gpIsRenderbuffer, (C.GLuint)(renderbuffer))\n\treturn ret == TRUE\n}"
] | [
"0.68152726",
"0.6739268",
"0.66833866",
"0.66781384",
"0.6520428",
"0.65059024",
"0.6449533",
"0.6223648",
"0.6177797",
"0.6177797",
"0.60325664",
"0.5914812",
"0.5843292",
"0.5765134",
"0.57647234",
"0.57006824",
"0.56956804",
"0.5619371",
"0.5588844",
"0.55697125",
"0.55187196",
"0.5480212",
"0.54710215",
"0.5455568",
"0.5298807",
"0.52522236",
"0.52517575",
"0.5251301",
"0.52433085",
"0.523334",
"0.52118945",
"0.5189131",
"0.5128858",
"0.5127275",
"0.5121958",
"0.51059717",
"0.5105866",
"0.5103308",
"0.5103308",
"0.50921494",
"0.50737065",
"0.5073299",
"0.50677866",
"0.50672126",
"0.505268",
"0.50501704",
"0.50462574",
"0.5043177",
"0.5040154",
"0.50395215",
"0.5023498",
"0.49758965",
"0.49710265",
"0.4967123",
"0.49664646",
"0.49502444",
"0.49450552",
"0.49414968",
"0.49374813",
"0.49155468",
"0.4914869",
"0.4914869",
"0.49110472",
"0.49004042",
"0.49004042",
"0.4895977",
"0.48473367",
"0.48461676",
"0.48434803",
"0.48416972",
"0.48301938",
"0.48194146",
"0.4819084",
"0.4812356",
"0.48062977",
"0.4791636",
"0.47899824",
"0.47790992",
"0.47739342",
"0.47703475",
"0.4765929",
"0.47575516",
"0.47575516",
"0.47532785",
"0.47511673",
"0.4749254",
"0.47448787",
"0.47399956",
"0.47378695",
"0.47312286",
"0.472217",
"0.4717949",
"0.47022057",
"0.46973535",
"0.46973535",
"0.46941125",
"0.4684364",
"0.4680006",
"0.46766177",
"0.4675876"
] | 0.6508359 | 5 |
render a vertex using the specified vertex array element | func ArrayElement(i int32) {
C.glowArrayElement(gpArrayElement, (C.GLint)(i))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tsyscall.Syscall(gpVertexArrayElementBuffer, 2, uintptr(vaobj), uintptr(buffer), 0)\n}",
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tC.glowVertexArrayElementBuffer(gpVertexArrayElementBuffer, (C.GLuint)(vaobj), (C.GLuint)(buffer))\n}",
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tC.glowVertexArrayElementBuffer(gpVertexArrayElementBuffer, (C.GLuint)(vaobj), (C.GLuint)(buffer))\n}",
"func (vao *VAO) Render() {\n\tgl.BindVertexArray(vao.handle)\n\tif vao.indexBuffer != nil {\n\t\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, vao.indexBuffer.GetHandle())\n\t\tgl.DrawElements(vao.mode, vao.indexBuffer.Len(), gl.UNSIGNED_SHORT, nil)\n\t\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, 0)\n\t} else {\n\t\tgl.DrawArrays(vao.mode, 0, vao.vertexBuffers[0].Len())\n\t}\n\tgl.BindVertexArray(0)\n}",
"func VertexArrayAttribFormat(vaobj uint32, attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tsyscall.Syscall6(gpVertexArrayAttribFormat, 6, uintptr(vaobj), uintptr(attribindex), uintptr(size), uintptr(xtype), boolToUintptr(normalized), uintptr(relativeoffset))\n}",
"func DrawElementsBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, basevertex int32) {\n\tsyscall.Syscall6(gpDrawElementsBaseVertex, 5, uintptr(mode), uintptr(count), uintptr(xtype), uintptr(indices), uintptr(basevertex), 0)\n}",
"func (obj *Device) DrawIndexedPrimitive(\n\ttyp PRIMITIVETYPE,\n\tbaseVertexIndex int,\n\tminIndex uint,\n\tnumVertices uint,\n\tstartIndex uint,\n\tprimitiveCount uint,\n) Error {\n\tret, _, _ := syscall.Syscall9(\n\t\tobj.vtbl.DrawIndexedPrimitive,\n\t\t7,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(typ),\n\t\tuintptr(baseVertexIndex),\n\t\tuintptr(minIndex),\n\t\tuintptr(numVertices),\n\t\tuintptr(startIndex),\n\t\tuintptr(primitiveCount),\n\t\t0,\n\t\t0,\n\t)\n\treturn toErr(ret)\n}",
"func VertexArrayVertexBuffer(vaobj uint32, bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tsyscall.Syscall6(gpVertexArrayVertexBuffer, 5, uintptr(vaobj), uintptr(bindingindex), uintptr(buffer), uintptr(offset), uintptr(stride), 0)\n}",
"func DrawElementsBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, basevertex int32) {\n C.glowDrawElementsBaseVertex(gpDrawElementsBaseVertex, (C.GLenum)(mode), (C.GLsizei)(count), (C.GLenum)(xtype), indices, (C.GLint)(basevertex))\n}",
"func VertexArrayVertexBuffers(vaobj uint32, first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tsyscall.Syscall6(gpVertexArrayVertexBuffers, 6, uintptr(vaobj), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(strides)))\n}",
"func (va *VertexArray) Draw() {\n\tgl.BindVertexArray(va.vao)\n\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, va.ibo)\n\tgl.DrawElements(gl.TRIANGLES, int32(va.vertices), gl.UNSIGNED_INT, nil)\n}",
"func VertexArrayAttribFormat(vaobj uint32, attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tC.glowVertexArrayAttribFormat(gpVertexArrayAttribFormat, (C.GLuint)(vaobj), (C.GLuint)(attribindex), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLuint)(relativeoffset))\n}",
"func VertexArrayAttribFormat(vaobj uint32, attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tC.glowVertexArrayAttribFormat(gpVertexArrayAttribFormat, (C.GLuint)(vaobj), (C.GLuint)(attribindex), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLuint)(relativeoffset))\n}",
"func VertexArrayVertexBuffer(vaobj uint32, bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowVertexArrayVertexBuffer(gpVertexArrayVertexBuffer, (C.GLuint)(vaobj), (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func VertexArrayVertexBuffer(vaobj uint32, bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowVertexArrayVertexBuffer(gpVertexArrayVertexBuffer, (C.GLuint)(vaobj), (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func VertexAttribFormat(attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tsyscall.Syscall6(gpVertexAttribFormat, 5, uintptr(attribindex), uintptr(size), uintptr(xtype), boolToUintptr(normalized), uintptr(relativeoffset), 0)\n}",
"func VertexPointer(size int32, xtype uint32, stride int32, pointer unsafe.Pointer) {\n C.glowVertexPointer(gpVertexPointer, (C.GLint)(size), (C.GLenum)(xtype), (C.GLsizei)(stride), pointer)\n}",
"func EnableVertexAttribArray(index uint32) {\n C.glowEnableVertexAttribArray(gpEnableVertexAttribArray, (C.GLuint)(index))\n}",
"func VertexArrayVertexBuffers(vaobj uint32, first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowVertexArrayVertexBuffers(gpVertexArrayVertexBuffers, (C.GLuint)(vaobj), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func VertexArrayVertexBuffers(vaobj uint32, first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowVertexArrayVertexBuffers(gpVertexArrayVertexBuffers, (C.GLuint)(vaobj), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindVertexArray(array uint32) {\n C.glowBindVertexArray(gpBindVertexArray, (C.GLuint)(array))\n}",
"func DrawElementsBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, basevertex int32) {\n\tC.glowDrawElementsBaseVertex(gpDrawElementsBaseVertex, (C.GLenum)(mode), (C.GLsizei)(count), (C.GLenum)(xtype), indices, (C.GLint)(basevertex))\n}",
"func DrawElementsBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, basevertex int32) {\n\tC.glowDrawElementsBaseVertex(gpDrawElementsBaseVertex, (C.GLenum)(mode), (C.GLsizei)(count), (C.GLenum)(xtype), indices, (C.GLint)(basevertex))\n}",
"func PackVertex(v *gdbi.Vertex) map[string]interface{} {\n\treturn map[string]interface{}{\n\t\t\"gid\": v.ID,\n\t\t\"label\": v.Label,\n\t\t\"data\": v.Data,\n\t}\n}",
"func ArrayElement(i int32) {\n C.glowArrayElement(gpArrayElement, (C.GLint)(i))\n}",
"func (v Vertex) String() string {\n\treturn \"Vertex: \" + strconv.Itoa(v.X) + \",\" + strconv.Itoa(v.Y)\n}",
"func (r *Renderer) Render() {\n\tgl.DrawArrays(gl.TRIANGLES, 0, int32(len(r.RawRenderer)*4))\n}",
"func VertexPointer(size int32, xtype uint32, stride int32, pointer unsafe.Pointer) {\n\tsyscall.Syscall6(gpVertexPointer, 4, uintptr(size), uintptr(xtype), uintptr(stride), uintptr(pointer), 0, 0)\n}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n\tsyscall.Syscall6(gpMultiDrawElementsBaseVertex, 6, uintptr(mode), uintptr(unsafe.Pointer(count)), uintptr(xtype), uintptr(unsafe.Pointer(indices)), uintptr(drawcount), uintptr(unsafe.Pointer(basevertex)))\n}",
"func VertexAttrib1fv(index uint32, value []float32) {\n\tgl.VertexAttrib1fv(index, &value[0])\n}",
"func (native *OpenGL) DrawElements(mode uint32, count int32, elementType uint32, indices uintptr) {\n\tgl.DrawElements(mode, count, elementType, unsafe.Pointer(indices)) // nolint: govet,gas\n}",
"func EnableVertexArrayAttrib(vaobj uint32, index uint32) {\n\tC.glowEnableVertexArrayAttrib(gpEnableVertexArrayAttrib, (C.GLuint)(vaobj), (C.GLuint)(index))\n}",
"func EnableVertexArrayAttrib(vaobj uint32, index uint32) {\n\tC.glowEnableVertexArrayAttrib(gpEnableVertexArrayAttrib, (C.GLuint)(vaobj), (C.GLuint)(index))\n}",
"func VertexAttribFormat(attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n C.glowVertexAttribFormat(gpVertexAttribFormat, (C.GLuint)(attribindex), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLuint)(relativeoffset))\n}",
"func DrawElementsInstancedBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, instancecount int32, basevertex int32) {\n C.glowDrawElementsInstancedBaseVertex(gpDrawElementsInstancedBaseVertex, (C.GLenum)(mode), (C.GLsizei)(count), (C.GLenum)(xtype), indices, (C.GLsizei)(instancecount), (C.GLint)(basevertex))\n}",
"func DrawElementsInstancedBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, instancecount int32, basevertex int32) {\n\tsyscall.Syscall6(gpDrawElementsInstancedBaseVertex, 6, uintptr(mode), uintptr(count), uintptr(xtype), uintptr(indices), uintptr(instancecount), uintptr(basevertex))\n}",
"func ProgramUniform1fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform1fv(gpProgramUniform1fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func (v Vertex) Print() {\n fmt.Printf(\"Vertice: %d\\n Feromona Ini: %f\\n Feromona Act: %f\\n\", v.index, v.pheromone_init, v.pheromone)\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n C.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func initFontVbo() {\n\tvar vertexAttributes = make([]float32, 5*6*len(charDatas))\n\ti := 0\n\tfor _, charData := range charDatas {\n\t\ttop := float32(charData.ty+charData.h) / 256\n\t\tbottom := float32(charData.ty) / 256\n\t\tright := float32(charData.tx+charData.w) / 256\n\t\tleft := float32(charData.tx) / 256\n\n\t\tw := float32(charData.w) / 256\n\t\th := float32(charData.h) / 256\n\n\t\t// tri 1\n\t\tvertexAttributes[i] = w\n\t\tvertexAttributes[i+1] = h\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = right\n\t\tvertexAttributes[i+4] = bottom\n\t\ti += 5\n\n\t\tvertexAttributes[i] = w\n\t\tvertexAttributes[i+1] = 0\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = right\n\t\tvertexAttributes[i+4] = top\n\t\ti += 5\n\n\t\tvertexAttributes[i] = 0\n\t\tvertexAttributes[i+1] = h\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = left\n\t\tvertexAttributes[i+4] = bottom\n\t\ti += 5\n\n\t\t// tri 2\n\t\tvertexAttributes[i] = w\n\t\tvertexAttributes[i+1] = 0\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = right\n\t\tvertexAttributes[i+4] = top\n\t\ti += 5\n\n\t\tvertexAttributes[i] = 0\n\t\tvertexAttributes[i+1] = 0\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = left\n\t\tvertexAttributes[i+4] = top\n\t\ti += 5\n\n\t\tvertexAttributes[i] = 0\n\t\tvertexAttributes[i+1] = h\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = left\n\t\tvertexAttributes[i+4] = bottom\n\t\ti += 5\n\t}\n\n\tgl.GenBuffers(1, &vbo)\n\tgl.GenVertexArrays(1, &vao)\n\tgl.BindVertexArray(vao)\n\tgl.EnableVertexAttribArray(0)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(\n\t\tgl.ARRAY_BUFFER,\n\t\t4*len(vertexAttributes),\n\t\tgl.Ptr(vertexAttributes),\n\t\tgl.STATIC_DRAW,\n\t)\n\tgl.VertexAttribPointer(0, 3, gl.FLOAT, false, 5*4, gl.PtrOffset(0))\n\tgl.EnableVertexAttribArray(0)\n\tgl.VertexAttribPointer(1, 2, gl.FLOAT, false, 5*4, gl.PtrOffset(4*3))\n\tgl.EnableVertexAttribArray(1)\n\t//unbind\n\tgl.BindVertexArray(0)\n\n}",
"func VertexAttrib3fv(index uint32, value []float32) {\n\tgl.VertexAttrib3fv(index, &value[0])\n}",
"func VertexAttribFormat(attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tC.glowVertexAttribFormat(gpVertexAttribFormat, (C.GLuint)(attribindex), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLuint)(relativeoffset))\n}",
"func VertexAttribFormat(attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tC.glowVertexAttribFormat(gpVertexAttribFormat, (C.GLuint)(attribindex), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLuint)(relativeoffset))\n}",
"func Draw(bp *BoundProgram) {\n\t// TODO might still need the buffer contents to be pushed:\n\t/*\n\t\tgl.BufferData(gl.ARRAY_BUFFER, len(cube_vertices)*4, gl.Ptr(cube_vertices), gl.STATIC_DRAW)\n\t*/\n\t// TODO might still need textures to be bound for the call:\n\t/*\n\t\tgl.ActiveTexture(gl.TEXTURE0)\n\t\tgl.BindTexture(gl.TEXTURE_2D, tCache.Get(\"placeholder\"))\n\t*/\n\t// TODO draw calls are themselves still specialized and param'd:\n\t/*\n\t\tgl.DrawArrays(gl.TRIANGLES, 0, 6*2*3)\n\t*/\n}",
"func VertexPointer(size int32, xtype uint32, stride int32, pointer unsafe.Pointer) {\n\tC.glowVertexPointer(gpVertexPointer, (C.GLint)(size), (C.GLenum)(xtype), (C.GLsizei)(stride), pointer)\n}",
"func (g *Geometry) Draw() {\n\tgl.BindVertexArray(g.handle)\n\tif g.hasIndices {\n\t\tgl.DrawElements(\n\t\t\tg.PrimType,\n\t\t\tg.numIndices,\n\t\t\tg.IndexBuffer.ComponentType,\n\t\t\tgl.PtrOffset(g.IndexBuffer.ByteOffset),\n\t\t)\n\t} else {\n\t\tgl.DrawArrays(g.PrimType, 0, g.numIndices)\n\t}\n}",
"func VertexArrayBindingDivisor(vaobj uint32, bindingindex uint32, divisor uint32) {\n\tsyscall.Syscall(gpVertexArrayBindingDivisor, 3, uintptr(vaobj), uintptr(bindingindex), uintptr(divisor))\n}",
"func ProgramUniform3fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform3fv(gpProgramUniform3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func (gl *WebGL) DrawElements(mode GLEnum, count int, valueType GLEnum, offset int) {\n\tgl.context.Call(\"drawElements\", mode, count, valueType, offset)\n}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n C.glowMultiDrawElementsBaseVertex(gpMultiDrawElementsBaseVertex, (C.GLenum)(mode), (*C.GLsizei)(unsafe.Pointer(count)), (C.GLenum)(xtype), indices, (C.GLsizei)(drawcount), (*C.GLint)(unsafe.Pointer(basevertex)))\n}",
"func ProgramUniformMatrix4x3fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix4x3fv(gpProgramUniformMatrix4x3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func GenVertexArrays(n int32, arrays *uint32) {\n C.glowGenVertexArrays(gpGenVertexArrays, (C.GLsizei)(n), (*C.GLuint)(unsafe.Pointer(arrays)))\n}",
"func (self *Graphics) _renderWebGLI(args ...interface{}) {\n self.Object.Call(\"_renderWebGL\", args)\n}",
"func EnableVertexArrayAttrib(vaobj uint32, index uint32) {\n\tsyscall.Syscall(gpEnableVertexArrayAttrib, 2, uintptr(vaobj), uintptr(index), 0)\n}",
"func VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, pointer unsafe.Pointer) {\n C.glowVertexAttribPointer(gpVertexAttribPointer, (C.GLuint)(index), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLsizei)(stride), pointer)\n}",
"func ProgramUniform4fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform4fv(gpProgramUniform4fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, pointer unsafe.Pointer) {\n\tsyscall.Syscall6(gpVertexAttribPointer, 6, uintptr(index), uintptr(size), uintptr(xtype), boolToUintptr(normalized), uintptr(stride), uintptr(pointer))\n}",
"func (c *Calculator) AddVertex(id string) {\n\tc.g.AddVertex(id, nil)\n}",
"func ProgramUniformMatrix3x4fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix3x4fv(gpProgramUniformMatrix3x4fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n\tsyscall.Syscall(gpVertexAttribBinding, 2, uintptr(attribindex), uintptr(bindingindex), 0)\n}",
"func (obj *Device) DrawIndexedPrimitiveUP(\n\ttyp PRIMITIVETYPE,\n\tminVertexIndex uint,\n\tnumVertices uint,\n\tprimitiveCount uint,\n\tindexData uintptr,\n\tindexDataFormat FORMAT,\n\tvertexStreamZeroData uintptr,\n\tvertexStreamZeroStride uint,\n) Error {\n\tret, _, _ := syscall.Syscall9(\n\t\tobj.vtbl.DrawIndexedPrimitiveUP,\n\t\t9,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(typ),\n\t\tuintptr(minVertexIndex),\n\t\tuintptr(numVertices),\n\t\tuintptr(primitiveCount),\n\t\tindexData,\n\t\tuintptr(indexDataFormat),\n\t\tvertexStreamZeroData,\n\t\tuintptr(vertexStreamZeroStride),\n\t)\n\treturn toErr(ret)\n}",
"func ProvokingVertex(mode uint32) {\n C.glowProvokingVertex(gpProvokingVertex, (C.GLenum)(mode))\n}",
"func GenVertexArrays(n int32, arrays *uint32) {\n\tsyscall.Syscall(gpGenVertexArrays, 2, uintptr(n), uintptr(unsafe.Pointer(arrays)), 0)\n}",
"func DrawRangeElementsBaseVertex(mode uint32, start uint32, end uint32, count int32, xtype uint32, indices unsafe.Pointer, basevertex int32) {\n\tsyscall.Syscall9(gpDrawRangeElementsBaseVertex, 7, uintptr(mode), uintptr(start), uintptr(end), uintptr(count), uintptr(xtype), uintptr(indices), uintptr(basevertex), 0, 0)\n}",
"func VertexArrayBindingDivisor(vaobj uint32, bindingindex uint32, divisor uint32) {\n\tC.glowVertexArrayBindingDivisor(gpVertexArrayBindingDivisor, (C.GLuint)(vaobj), (C.GLuint)(bindingindex), (C.GLuint)(divisor))\n}",
"func VertexArrayBindingDivisor(vaobj uint32, bindingindex uint32, divisor uint32) {\n\tC.glowVertexArrayBindingDivisor(gpVertexArrayBindingDivisor, (C.GLuint)(vaobj), (C.GLuint)(bindingindex), (C.GLuint)(divisor))\n}",
"func (vao *VAO) RenderInstanced(instancecount int32) {\n\tgl.BindVertexArray(vao.handle)\n\tif vao.indexBuffer != nil {\n\t\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, vao.indexBuffer.GetHandle())\n\t\tgl.DrawElementsInstanced(vao.mode, vao.indexBuffer.Len(), gl.UNSIGNED_SHORT, nil, instancecount)\n\t\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, 0)\n\t} else {\n\t\tgl.DrawArraysInstanced(vao.mode, 0, vao.vertexBuffers[0].Len(), instancecount)\n\t}\n\tgl.BindVertexArray(0)\n}",
"func (obj *Device) DrawPrimitive(\n\ttyp PRIMITIVETYPE,\n\tstartVertex uint,\n\tprimitiveCount uint,\n) Error {\n\tret, _, _ := syscall.Syscall6(\n\t\tobj.vtbl.DrawPrimitive,\n\t\t4,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(typ),\n\t\tuintptr(startVertex),\n\t\tuintptr(primitiveCount),\n\t\t0,\n\t\t0,\n\t)\n\treturn toErr(ret)\n}",
"func makeVao(data []float32) uint32 {\n\tvar vbo uint32\n\tgl.GenBuffers(1, &vbo)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(gl.ARRAY_BUFFER, 4*len(data), gl.Ptr(data), gl.STATIC_DRAW)\n\n\tvar vao uint32\n\tgl.GenVertexArrays(1, &vao)\n\tgl.BindVertexArray(vao)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tvar offset int\n\n\t// position attribute\n\tgl.VertexAttribPointer(0, 3, gl.FLOAT, false, 8*4, gl.PtrOffset(offset))\n\tgl.EnableVertexAttribArray(0)\n\toffset += 3 * 4\n\n\t// color attribute\n\tgl.VertexAttribPointer(1, 3, gl.FLOAT, false, 8*4, gl.PtrOffset(offset))\n\tgl.EnableVertexAttribArray(1)\n\toffset += 3 * 4\n\n\t// texture coord attribute\n\tgl.VertexAttribPointer(2, 2, gl.FLOAT, false, 8*4, gl.PtrOffset(offset))\n\tgl.EnableVertexAttribArray(2)\n\toffset += 2 * 4\n\n\treturn vao\n}",
"func (v Vertex) GetIndex() int {\n return v.index\n}",
"func ProgramUniform1iv(program uint32, location int32, count int32, value *int32) {\n C.glowProgramUniform1iv(gpProgramUniform1iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func VertexAttrib1fv(index Uint, v []Float) {\n\tcindex, _ := (C.GLuint)(index), cgoAllocsUnknown\n\tcv, _ := (*C.GLfloat)(unsafe.Pointer((*sliceHeader)(unsafe.Pointer(&v)).Data)), cgoAllocsUnknown\n\tC.glVertexAttrib1fv(cindex, cv)\n}",
"func VertexAttrib4fv(index uint32, value []float32) {\n\tgl.VertexAttrib4fv(index, &value[0])\n}",
"func EnableVertexAttribArray(index Uint) {\n\tcindex, _ := (C.GLuint)(index), cgoAllocsUnknown\n\tC.glEnableVertexAttribArray(cindex)\n}",
"func (c Cell) Vertex(k int) Point {\n\treturn Point{faceUVToXYZ(int(c.face), c.uv.Vertices()[k].X, c.uv.Vertices()[k].Y).Normalize()}\n}",
"func ProgramUniform1f(program uint32, location int32, v0 float32) {\n C.glowProgramUniform1f(gpProgramUniform1f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0))\n}",
"func EnableVertexAttribArray(index uint32) {\n\tgl.EnableVertexAttribArray(index)\n}",
"func ProgramUniform4f(program uint32, location int32, v0 float32, v1 float32, v2 float32, v3 float32) {\n C.glowProgramUniform4f(gpProgramUniform4f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1), (C.GLfloat)(v2), (C.GLfloat)(v3))\n}",
"func VertexBindingDivisor(bindingindex uint32, divisor uint32) {\n\tsyscall.Syscall(gpVertexBindingDivisor, 2, uintptr(bindingindex), uintptr(divisor), 0)\n}",
"func p(v *Vertex, depth int, showPointer bool) string {\n\tstr := \"\"\n\tlevels := \"\"\n\t// Calculate our depth string\n\t// levels := strings.Repeat(\" \", depth)\n\tfor i := depth; i >= 0; i-- {\n\t\tlevels = fmt.Sprintf(\"%s%s\", levels, \" \")\n\t}\n\tstr = fmt.Sprintf(\"%s\\n\", str)\n\tstr = fmt.Sprintf(\"%s%sDepth : %d\\n\", str, levels, depth)\n\tstr = fmt.Sprintf(\"%s%sName : %s\\n\", str, levels, v.Name)\n\tstr = fmt.Sprintf(\"%s%sValue : %d\\n\", str, levels, v.Value)\n\tif showPointer == true {\n\t\tstr = fmt.Sprintf(\"%s%sLocation : %p\\n\", str, levels, v)\n\t}\n\tstr = fmt.Sprintf(\"%s\\n\", str)\n\treturn str\n}",
"func (t *TextRenderer) Print(text string, x, y float32, scale float32) error {\n\tindices := []rune(text)\n\tif len(indices) == 0 {\n\t\treturn nil\n\t}\n\tt.shader.Use()\n\n\tlowChar := rune(32)\n\n\tgl.ActiveTexture(gl.TEXTURE0)\n\tgl.BindVertexArray(t.vao)\n\n\tfor i := range indices {\n\t\truneIndex := indices[i]\n\n\t\tif int(runeIndex)-int(lowChar) > len(t.fontChar) || runeIndex < lowChar {\n\t\t\tcontinue\n\t\t}\n\n\t\tch := t.fontChar[runeIndex-lowChar]\n\n\t\txpos := x + float32(ch.bearingH)*scale\n\t\typos := y - float32(ch.height-ch.bearingV)*scale\n\t\tw := float32(ch.width) * scale\n\t\th := float32(ch.height) * scale\n\n\t\tvar vertices = []float32{\n\t\t\txpos, ypos + h, 0.0, 1.0,\n\t\t\txpos + w, ypos, 1.0, 0.0,\n\t\t\txpos, ypos, 0.0, 0.0,\n\t\t\txpos, ypos + h, 0.0, 1.0,\n\t\t\txpos + w, ypos + h, 1.0, 1.0,\n\t\t\txpos + w, ypos, 1.0, 0.0,\n\t\t}\n\n\t\tgl.BindTexture(gl.TEXTURE_2D, ch.textureID)\n\t\tgl.BindBuffer(gl.ARRAY_BUFFER, t.vbo)\n\t\tgl.BufferSubData(gl.ARRAY_BUFFER, 0, len(vertices)*4, gl.Ptr(vertices))\n\n\t\tgl.BindBuffer(gl.ARRAY_BUFFER, 0)\n\t\tgl.DrawArrays(gl.TRIANGLES, 0, 6)\n\n\t\tx += float32(ch.advance>>6) * scale\n\t}\n\tgl.BindVertexArray(0)\n\tgl.BindTexture(gl.TEXTURE_2D, 0)\n\tgl.UseProgram(0)\n\treturn nil\n}",
"func (debugging *debuggingOpenGL) DrawElements(mode uint32, count int32, elementType uint32, indices uintptr) {\n\tdebugging.recordEntry(\"DrawElements\", mode, count, elementType, indices)\n\tdebugging.gl.DrawElements(mode, count, elementType, indices)\n\tdebugging.recordExit(\"DrawElements\")\n}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n\tC.glowMultiDrawElementsBaseVertex(gpMultiDrawElementsBaseVertex, (C.GLenum)(mode), (*C.GLsizei)(unsafe.Pointer(count)), (C.GLenum)(xtype), indices, (C.GLsizei)(drawcount), (*C.GLint)(unsafe.Pointer(basevertex)))\n}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n\tC.glowMultiDrawElementsBaseVertex(gpMultiDrawElementsBaseVertex, (C.GLenum)(mode), (*C.GLsizei)(unsafe.Pointer(count)), (C.GLenum)(xtype), indices, (C.GLsizei)(drawcount), (*C.GLint)(unsafe.Pointer(basevertex)))\n}",
"func (s *Shader) VertexFormat() AttrFormat {\n\treturn s.vertexFmt\n}",
"func makeVao(points []float32) uint32 {\n\tvar vbo uint32\n\tvar vao uint32\n\tvar stride int32\n\n\t//points only 9\n\t//points and colors 18\n\tstride = int32(4 * len(points) / 3)\n\tprintln(\"stride: \", stride)\n\n\tgl.GenVertexArrays(1, &vao)\n\tgl.GenBuffers(1, &vbo)\n\tgl.BindVertexArray(vao)\n\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(gl.ARRAY_BUFFER, 4*len(points), gl.Ptr(points), gl.STATIC_DRAW)\n\n\tgl.EnableVertexAttribArray(0)\n\tgl.VertexAttribPointer(0, 3, gl.FLOAT, false, stride, gl.PtrOffset(0))\n\tprintln(\"triangle length: \", len(points))\n\tif len(points) >= 18 {\n\t\tlog.Println(\"In if\")\n\t\tgl.EnableVertexAttribArray(1)\n\t\tgl.VertexAttribPointer(1, 3, gl.FLOAT, false, stride, gl.PtrOffset(3*4))\n\t}\n\treturn vao\n}",
"func GetVertexAttribfv(index uint32, pname uint32, params *float32) {\n C.glowGetVertexAttribfv(gpGetVertexAttribfv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLfloat)(unsafe.Pointer(params)))\n}",
"func DrawElementsInstancedBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, instancecount int32, basevertex int32) {\n\tC.glowDrawElementsInstancedBaseVertex(gpDrawElementsInstancedBaseVertex, (C.GLenum)(mode), (C.GLsizei)(count), (C.GLenum)(xtype), indices, (C.GLsizei)(instancecount), (C.GLint)(basevertex))\n}",
"func DrawElementsInstancedBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, instancecount int32, basevertex int32) {\n\tC.glowDrawElementsInstancedBaseVertex(gpDrawElementsInstancedBaseVertex, (C.GLenum)(mode), (C.GLsizei)(count), (C.GLenum)(xtype), indices, (C.GLsizei)(instancecount), (C.GLint)(basevertex))\n}",
"func DrawElements(mode GLenum, count int, typ GLenum, indices interface{}) {\n\tC.glDrawElements(C.GLenum(mode), C.GLsizei(count), C.GLenum(typ),\n\t\tptr(indices))\n}",
"func VertexAttrib3fv(index Uint, v []Float) {\n\tcindex, _ := (C.GLuint)(index), cgoAllocsUnknown\n\tcv, _ := (*C.GLfloat)(unsafe.Pointer((*sliceHeader)(unsafe.Pointer(&v)).Data)), cgoAllocsUnknown\n\tC.glVertexAttrib3fv(cindex, cv)\n}",
"func EnableVertexAttribArray(index uint32) {\n\tC.glowEnableVertexAttribArray(gpEnableVertexAttribArray, (C.GLuint)(index))\n}",
"func EnableVertexAttribArray(index uint32) {\n\tC.glowEnableVertexAttribArray(gpEnableVertexAttribArray, (C.GLuint)(index))\n}",
"func ProvokingVertex(mode uint32) {\n\tC.glowProvokingVertex(gpProvokingVertex, (C.GLenum)(mode))\n}",
"func ProvokingVertex(mode uint32) {\n\tC.glowProvokingVertex(gpProvokingVertex, (C.GLenum)(mode))\n}",
"func ProgramUniformMatrix4x2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix4x2fv(gpProgramUniformMatrix4x2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform4iv(program uint32, location int32, count int32, value *int32) {\n C.glowProgramUniform4iv(gpProgramUniform4iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform1uiv(program uint32, location int32, count int32, value *uint32) {\n C.glowProgramUniform1uiv(gpProgramUniform1uiv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform3f(program uint32, location int32, v0 float32, v1 float32, v2 float32) {\n C.glowProgramUniform3f(gpProgramUniform3f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1), (C.GLfloat)(v2))\n}",
"func (quad *Quad) Draw() {\n\tgl.BindVertexArray(quad.vao)\n\tgl.DrawArrays(gl.TRIANGLES, 0, 6)\n}"
] | [
"0.65441036",
"0.6367963",
"0.6367963",
"0.6162593",
"0.57998973",
"0.5791173",
"0.57730454",
"0.5724036",
"0.5672329",
"0.5670119",
"0.56397915",
"0.5591109",
"0.5591109",
"0.556642",
"0.556642",
"0.55547774",
"0.5543371",
"0.5539181",
"0.5487333",
"0.5487333",
"0.5472863",
"0.5444109",
"0.5444109",
"0.54218155",
"0.5394738",
"0.5393712",
"0.53862613",
"0.5373471",
"0.5360846",
"0.5303586",
"0.53020424",
"0.5296102",
"0.5296102",
"0.5287832",
"0.5284662",
"0.5278463",
"0.52604353",
"0.52546006",
"0.5250028",
"0.52497315",
"0.52454495",
"0.5238137",
"0.5238137",
"0.52280563",
"0.5197623",
"0.5197539",
"0.5171673",
"0.5169724",
"0.51687455",
"0.5153393",
"0.5146261",
"0.51424116",
"0.5131579",
"0.51290554",
"0.5128659",
"0.51231843",
"0.5122518",
"0.51224655",
"0.5113081",
"0.5112979",
"0.5111585",
"0.5110444",
"0.508911",
"0.508713",
"0.50870794",
"0.50870794",
"0.50865954",
"0.50845647",
"0.5081821",
"0.5064638",
"0.50641835",
"0.506026",
"0.5052015",
"0.5047515",
"0.5027597",
"0.5024585",
"0.5022744",
"0.50093627",
"0.5007686",
"0.50003564",
"0.49913946",
"0.4987876",
"0.49791297",
"0.49791297",
"0.4978117",
"0.497671",
"0.49760097",
"0.4969349",
"0.4969349",
"0.496626",
"0.4965062",
"0.49642342",
"0.49642342",
"0.49641064",
"0.49641064",
"0.49638566",
"0.49586317",
"0.49579862",
"0.49532163",
"0.4946831"
] | 0.5283302 | 35 |
Attaches a shader object to a program object | func AttachShader(program uint32, shader uint32) {
C.glowAttachShader(gpAttachShader, (C.GLuint)(program), (C.GLuint)(shader))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func AttachShader(program uint32, shader uint32) {\n C.glowAttachShader(gpAttachShader, (C.GLuint)(program), (C.GLuint)(shader))\n}",
"func AttachShader(program uint32, shader uint32) {\n\tsyscall.Syscall(gpAttachShader, 2, uintptr(program), uintptr(shader), 0)\n}",
"func (program Program) AttachShader(shader Shader) {\n\tgl.AttachShader(uint32(program), uint32(shader))\n}",
"func AttachShader(p Program, s Shader) {\n\tgl.AttachShader(p.Value, s.Value)\n}",
"func (native *OpenGL) AttachShader(program uint32, shader uint32) {\n\tgl.AttachShader(program, shader)\n}",
"func (gl *WebGL) AttachShader(shaderProgram WebGLShaderProgram, shader WebGLShader) {\n\tgl.context.Call(\"attachShader\", shaderProgram, shader)\n}",
"func (debugging *debuggingOpenGL) AttachShader(program uint32, shader uint32) {\n\tdebugging.recordEntry(\"AttachShader\", program, shader)\n\tdebugging.gl.AttachShader(program, shader)\n\tdebugging.recordExit(\"AttachShader\")\n}",
"func AttachShader(program Uint, shader Uint) {\n\tcprogram, _ := (C.GLuint)(program), cgoAllocsUnknown\n\tcshader, _ := (C.GLuint)(shader), cgoAllocsUnknown\n\tC.glAttachShader(cprogram, cshader)\n}",
"func (self *TileSprite) SetShaderA(member *AbstractFilter) {\n self.Object.Set(\"shader\", member)\n}",
"func ActiveShaderProgram(pipeline uint32, program uint32) {\n C.glowActiveShaderProgram(gpActiveShaderProgram, (C.GLuint)(pipeline), (C.GLuint)(program))\n}",
"func UseProgram(program uint32) {\n C.glowUseProgram(gpUseProgram, (C.GLuint)(program))\n}",
"func LinkProgram(program uint32) {\n C.glowLinkProgram(gpLinkProgram, (C.GLuint)(program))\n}",
"func (gl *WebGL) UseProgram(shaderProgram WebGLShaderProgram) {\n\tgl.context.Call(\"useProgram\", shaderProgram)\n}",
"func (s *Shader) Use() {\n\tgl.UseProgram(s.programID)\n}",
"func (c *Context) BindShader(shader *ShaderProgram) {\n\tif c.currentShaderProgram == nil || shader.id != c.currentShaderProgram.id {\n\t\tgl.UseProgram(shader.id)\n\t\tc.currentShaderProgram = shader\n\t}\n}",
"func UseProgram(p Program) {\n\tgl.UseProgram(p.Value)\n}",
"func (program Program) Link() {\n\tgl.LinkProgram(uint32(program))\n}",
"func ActiveShaderProgram(pipeline uint32, program uint32) {\n\tsyscall.Syscall(gpActiveShaderProgram, 2, uintptr(pipeline), uintptr(program), 0)\n}",
"func DetachShader(program uint32, shader uint32) {\n C.glowDetachShader(gpDetachShader, (C.GLuint)(program), (C.GLuint)(shader))\n}",
"func (am *Manager) AddShader(name string, shader uint32) error {\n\tif _, ok := am.GetShader(name); ok {\n\t\treturn fmt.Errorf(\"asset.Manager.AddShader error: Shader '%s' already exists\", name)\n\t}\n\n\tLogger.Printf(\"Manager: adding Shader '%s'\\n\", name)\n\tam.Shaders[name] = shader\n\n\treturn nil\n}",
"func (am *Manager) AddProgram(set ShaderSet, prog uint32) error {\n\tif _, ok := am.GetProgram(set); ok {\n\t\treturn fmt.Errorf(\"asset.Manager.AddProgram error: Program '%v' already exists\", set)\n\t}\n\n\tLogger.Printf(\"Manager: adding Program '%v'\\n\", set)\n\tam.Programs[set] = prog\n\n\treturn nil\n}",
"func ActiveShaderProgram(pipeline uint32, program uint32) {\n\tC.glowActiveShaderProgram(gpActiveShaderProgram, (C.GLuint)(pipeline), (C.GLuint)(program))\n}",
"func ActiveShaderProgram(pipeline uint32, program uint32) {\n\tC.glowActiveShaderProgram(gpActiveShaderProgram, (C.GLuint)(pipeline), (C.GLuint)(program))\n}",
"func ProgramUniform3f(program uint32, location int32, v0 float32, v1 float32, v2 float32) {\n C.glowProgramUniform3f(gpProgramUniform3f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1), (C.GLfloat)(v2))\n}",
"func (s *Shader) setUniform(name string, value int32) {\n location:=gl.GetUniformLocation(s.idPrograma, gl.Str(name + \"\\x00\"))\n if location != -1 { // Si existe ese nombre de variable\n gl.Uniform1i(location, value)\n }\n}",
"func (native *OpenGL) LinkProgram(program uint32) {\n\tgl.LinkProgram(program)\n}",
"func (native *OpenGL) UseProgram(program uint32) {\n\tgl.UseProgram(program)\n}",
"func (p *Prog) BuildProgram() {\n\tvar (\n\t\tprogram gl.Program\n\t\tvshader gl.Shader\n\t\tfshader gl.Shader\n\t)\n\tprogram = gl.CreateProgram()\n\t//vertex shader\n\tvshader = gl.CreateShader(gl.VERTEX_SHADER)\n\tgl.ShaderSource(vshader, p.Vs)\n\tgl.CompileShader(vshader)\n\tif gl.GetShaderi(vshader, gl.COMPILE_STATUS) == gl.FALSE {\n\t\tlog.Printf(\"glprog: VS compilation failed: %v\", gl.GetShaderInfoLog(vshader))\n\t}\n\t//fragment shader\n\tfshader = gl.CreateShader(gl.FRAGMENT_SHADER)\n\tgl.ShaderSource(fshader, p.Fs)\n\tgl.CompileShader(fshader)\n\tif gl.GetShaderi(fshader, gl.COMPILE_STATUS) == gl.FALSE {\n\t\tlog.Printf(\"glprog: FS compilation failed: %v\", gl.GetShaderInfoLog(fshader))\n\t}\n\t//link program\n\tgl.AttachShader(program, vshader)\n\tgl.AttachShader(program, fshader)\n\tgl.LinkProgram(program)\n\tif gl.GetProgrami(program, gl.LINK_STATUS) == gl.FALSE {\n\t\tlog.Printf(\"glprog: LinkProgram failed: %v\", gl.GetProgramInfoLog(program))\n\t\tgl.DeleteProgram(program)\n\t}\n\t//mark shaders for deletion when program is unlinked\n\tgl.DeleteShader(vshader)\n\tgl.DeleteShader(fshader)\n\n\tp.P = program\n\tfor i := range p.Uniforms {\n\t\tp.Uniforms[i].Location = gl.GetUniformLocation(p.P, p.Uniforms[i].Name)\n\t}\n}",
"func BindProgramPipeline(pipeline uint32) {\n C.glowBindProgramPipeline(gpBindProgramPipeline, (C.GLuint)(pipeline))\n}",
"func (program Program) Use() {\n\tgl.UseProgram(uint32(program))\n}",
"func (r *device) LoadShader(s *gfx.Shader, done chan *gfx.Shader) {\n\t// If we are sharing assets with another renderer, allow it to load the\n\t// shader instead.\n\tr.shared.RLock()\n\tif r.shared.device != nil {\n\t\tr.shared.device.LoadShader(s, done)\n\t\tr.shared.RUnlock()\n\t\treturn\n\t}\n\tr.shared.RUnlock()\n\n\t// Perform pre-load checks on the shader.\n\tdoLoad, err := glutil.PreLoadShader(s, done)\n\tif err != nil {\n\t\tr.warner.Warnf(\"%v\\n\", err)\n\t\treturn\n\t}\n\tif !doLoad {\n\t\treturn\n\t}\n\n\tr.renderExec <- func() bool {\n\t\tnative := &nativeShader{\n\t\t\tr: r.rsrcManager,\n\t\t}\n\n\t\t// Compile vertex shader.\n\t\tnative.vertex = gl.CreateShader(gl.VERTEX_SHADER)\n\t\tsources, free := gl.Strs(string(s.GLSL.Vertex) + \"\\x00\")\n\t\tgl.ShaderSource(native.vertex, 1, sources, nil) // TODO(slimsag): use length parameter instead of null terminator\n\t\tgl.CompileShader(native.vertex)\n\t\tfree()\n\n\t\t// Check if the shader compiled or not.\n\t\tlog, compiled := shaderCompilerLog(native.vertex)\n\t\tif !compiled {\n\t\t\t// Just for sanity.\n\t\t\tnative.vertex = 0\n\n\t\t\t// Append the errors.\n\t\t\ts.Error = append(s.Error, []byte(s.Name+\" | Vertex shader errors:\\n\")...)\n\t\t\ts.Error = append(s.Error, log...)\n\t\t}\n\t\tif len(log) > 0 {\n\t\t\t// Send the compiler log to the debug writer.\n\t\t\tr.warner.Warnf(\"%s | Vertex shader errors:\\n\", s.Name)\n\t\t\tr.warner.Warnf(string(log))\n\t\t}\n\n\t\t// Compile fragment shader.\n\t\tnative.fragment = gl.CreateShader(gl.FRAGMENT_SHADER)\n\t\tsources, free = gl.Strs(string(s.GLSL.Fragment) + \"\\x00\")\n\t\tgl.ShaderSource(native.fragment, 1, sources, nil) // TODO(slimsag): use length parameter instead of null terminator\n\t\tgl.CompileShader(native.fragment)\n\t\tfree()\n\n\t\t// Check if the shader compiled or not.\n\t\tlog, compiled = shaderCompilerLog(native.fragment)\n\t\tif !compiled {\n\t\t\t// Just for sanity.\n\t\t\tnative.fragment = 0\n\n\t\t\t// Append the errors.\n\t\t\ts.Error = append(s.Error, []byte(s.Name+\" | Fragment shader errors:\\n\")...)\n\t\t\ts.Error = append(s.Error, log...)\n\t\t}\n\t\tif len(log) > 0 {\n\t\t\t// Send the compiler log to the debug writer.\n\t\t\tr.warner.Warnf(\"%s | Fragment shader errors:\\n\", s.Name)\n\t\t\tr.warner.Warnf(string(log))\n\t\t}\n\n\t\t// Create the shader program if all went well with the vertex and\n\t\t// fragment shaders.\n\t\tif native.vertex != 0 && native.fragment != 0 {\n\t\t\tnative.program = gl.CreateProgram()\n\t\t\tgl.AttachShader(native.program, native.vertex)\n\t\t\tgl.AttachShader(native.program, native.fragment)\n\t\t\tgl.LinkProgram(native.program)\n\n\t\t\t// Grab the linker's log.\n\t\t\tvar (\n\t\t\t\tlogSize int32\n\t\t\t\tlog []byte\n\t\t\t)\n\t\t\tgl.GetProgramiv(native.program, gl.INFO_LOG_LENGTH, &logSize)\n\n\t\t\tif logSize > 0 {\n\t\t\t\tlog = make([]byte, logSize)\n\t\t\t\tgl.GetProgramInfoLog(native.program, logSize, nil, &log[0])\n\n\t\t\t\t// Strip the null-termination byte.\n\t\t\t\tlog = log[:len(log)-1]\n\t\t\t}\n\n\t\t\t// Check for linker errors.\n\t\t\tvar ok int32\n\t\t\tgl.GetProgramiv(native.program, gl.LINK_STATUS, &ok)\n\t\t\tif ok == 0 {\n\t\t\t\t// Just for sanity.\n\t\t\t\tnative.program = 0\n\n\t\t\t\t// Append the errors.\n\t\t\t\ts.Error = append(s.Error, []byte(s.Name+\" | Linker errors:\\n\")...)\n\t\t\t\ts.Error = append(s.Error, log...)\n\t\t\t}\n\t\t\tif len(log) > 0 {\n\t\t\t\t// Send the linker log to the debug writer.\n\t\t\t\tr.warner.Warnf(\"%s | Linker errors:\\n\", s.Name)\n\t\t\t\tr.warner.Warnf(string(log))\n\t\t\t}\n\t\t}\n\n\t\t// Mark the shader as loaded if there were no errors.\n\t\tif len(s.Error) == 0 {\n\t\t\tnative.LocationCache = &glutil.LocationCache{\n\t\t\t\tGetAttribLocation: func(name string) int {\n\t\t\t\t\treturn int(gl.GetAttribLocation(native.program, gl.Str(name+\"\\x00\")))\n\t\t\t\t},\n\t\t\t\tGetUniformLocation: func(name string) int {\n\t\t\t\t\treturn int(gl.GetUniformLocation(native.program, gl.Str(name+\"\\x00\")))\n\t\t\t\t},\n\t\t\t}\n\n\t\t\ts.Loaded = true\n\t\t\ts.NativeShader = native\n\t\t\ts.ClearData()\n\n\t\t\t// Attach a finalizer to the shader that will later free it.\n\t\t\truntime.SetFinalizer(native, finalizeShader)\n\t\t}\n\n\t\t// Finish not Flush, see http://higherorderfun.com/blog/2011/05/26/multi-thread-opengl-texture-loading/\n\t\tgl.Finish()\n\n\t\t// Signal completion and return.\n\t\tselect {\n\t\tcase done <- s:\n\t\tdefault:\n\t\t}\n\t\treturn false // no frame rendered.\n\t}\n}",
"func (s *Shader) Init(shader io.Reader) (err error) {\n\ts.uniformLocs = make(map[string]int32)\n\ts.uniformBIndices = make(map[string]uint32)\n\ts.uniformBOs = make(map[string]uint32)\n\tshaders := []uint32{}\n\n\treader := bufio.NewReader(shader)\n\n\tshaderTypeLine, err := reader.ReadString('\\n')\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// eof gets set to true if we reached the end of the file\n\teof := false\n\n\tfor {\n\t\tshader := \"\"\n\n\t\t// decide on the shader type\n\t\tvar shaderType uint32\n\t\ttypeStr := strings.Split(shaderTypeLine, \" \")[1]\n\t\tswitch strings.ToLower(typeStr) {\n\t\tcase \"vertex\\n\":\n\t\t\tshaderType = gl.VERTEX_SHADER\n\t\tcase \"fragment\\n\":\n\t\t\tshaderType = gl.FRAGMENT_SHADER\n\t\tdefault:\n\t\t\terr = errors.New(\"Shader type \" + typeStr + \" not known.\")\n\t\t\treturn err\n\t\t}\n\n\t\tfor {\n\t\t\tline, err := reader.ReadString('\\n')\n\t\t\tif err == io.EOF {\n\t\t\t\teof = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t// start a new shader string with a new type if the line starts with \"#shader\"\n\t\t\tif strings.HasPrefix(line, \"#shader\") {\n\t\t\t\t// tell the next iteration the information on shader type we read\n\t\t\t\tshaderTypeLine = line\n\t\t\t\tbreak\n\t\t\t} else {\n\t\t\t\tshader += line\n\t\t\t}\n\t\t}\n\n\t\t// if the shader is not empty, compile it\n\t\tif len(shader) > 0 {\n\t\t\tshaderptr, err := s.compile(shader+\"\\x00\", shaderType)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tshaders = append(shaders, shaderptr)\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\n\t\tif eof {\n\t\t\tbreak\n\t\t}\n\t}\n\n\t// link shaders\n\ts.Program = gl.CreateProgram()\n\tfor _, shader := range shaders {\n\t\tgl.AttachShader(s.Program, shader)\n\t}\n\tgl.LinkProgram(s.Program)\n\n\t// delete the singke shaders. we won't need them anymore\n\tfor _, shader := range shaders {\n\t\tgl.DeleteShader(shader)\n\t}\n\n\treturn\n}",
"func (debugging *debuggingOpenGL) LinkProgram(program uint32) {\n\tdebugging.recordEntry(\"LinkProgram\", program)\n\tdebugging.gl.LinkProgram(program)\n\tdebugging.recordExit(\"LinkProgram\")\n}",
"func ProgramUniform4f(program uint32, location int32, v0 float32, v1 float32, v2 float32, v3 float32) {\n C.glowProgramUniform4f(gpProgramUniform4f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1), (C.GLfloat)(v2), (C.GLfloat)(v3))\n}",
"func (am *Manager) LoadProgram(vfile, ffile, gfile string) (uint32, error) {\n\tvar (\n\t\tset ShaderSet\n\t\terr error\n\t)\n\n\tif set.Vs, err = am.LoadShader(gl.VERTEX_SHADER, vfile); err != nil {\n\t\treturn 0, err\n\t}\n\tif set.Fs, err = am.LoadShader(gl.FRAGMENT_SHADER, ffile); err != nil {\n\t\treturn 0, err\n\t}\n\tif len(gfile) > 0 {\n\t\tif set.Gs, err = am.LoadShader(gl.GEOMETRY_SHADER, gfile); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tif prog, ok := am.GetProgram(set); ok {\n\t\treturn prog, nil\n\t}\n\n\tLogger.Printf(\"Manager: loading Program '%v'\\n\", set)\n\n\tvar prog = gl.CreateProgram()\n\tgl.AttachShader(prog, set.Vs)\n\tgl.AttachShader(prog, set.Fs)\n\tif set.Gs > 0 {\n\t\tgl.AttachShader(prog, set.Gs)\n\t}\n\tgl.LinkProgram(prog)\n\n\tvar infoLogLen int32\n\tgl.GetProgramiv(prog, gl.INFO_LOG_LENGTH, &infoLogLen)\n\n\tif infoLogLen > 1 {\n\t\tvar log = make([]uint8, infoLogLen)\n\t\tgl.GetProgramInfoLog(prog, infoLogLen, nil, &log[0])\n\t\treturn 0, errors.New(string(log))\n\t}\n\n\tam.AddProgram(set, prog)\n\n\treturn prog, nil\n}",
"func (sh *ShaderStd) PostRender() error { return nil }",
"func (debugging *debuggingOpenGL) UseProgram(program uint32) {\n\tdebugging.recordEntry(\"UseProgram\", program)\n\tdebugging.gl.UseProgram(program)\n\tdebugging.recordExit(\"UseProgram\")\n}",
"func (p *Plane) Setup(mat Material, mod Model, name string, collide bool, reflective int, refractionIndex float32) error {\n\n\tp.vertexValues.Vertices = []float32{\n\t\t0.0, 0.5, 0.5,\n\t\t0.0, 0.5, 0.0,\n\t\t0.5, 0.5, 0.0,\n\t\t0.5, 0.5, 0.5,\n\t}\n\n\tp.vertexValues.Faces = []uint32{\n\t\t0, 2, 1, 2, 0, 3,\n\t}\n\n\tp.vertexValues.Normals = []float32{\n\t\t0.0, -1.0, 0.0,\n\t\t0.0, -1.0, 0.0,\n\t\t0.0, -1.0, 0.0,\n\t\t0.0, -1.0, 0.0,\n\t}\n\tp.vertexValues.Uvs = []float32{\n\t\t0.0, 0.0,\n\t\t5.0, 0.0,\n\t\t5.0, 5.0,\n\t\t0.0, 5.0,\n\t}\n\n\tp.name = name\n\tp.programInfo = ProgramInfo{}\n\tp.material = mat\n\n\tvar shaderVals map[string]bool\n\tshaderVals = make(map[string]bool)\n\n\tif mat.ShaderType == 0 {\n\t\tshaderVals[\"aPosition\"] = true\n\t\tbS := &shader.BasicShader{}\n\t\tbS.Setup()\n\t\tp.shaderVal = bS\n\t\tp.programInfo.Program = InitOpenGL(p.shaderVal.GetVertShader(), p.shaderVal.GetFragShader(), p.shaderVal.GetGeometryShader())\n\t\tp.programInfo.attributes = Attributes{\n\t\t\tposition: 0,\n\t\t\tnormal: 1,\n\t\t}\n\n\t\tSetupAttributesMap(&p.programInfo, shaderVals)\n\n\t\tp.buffers.Vao = CreateTriangleVAO(&p.programInfo, p.vertexValues.Vertices, nil, nil, nil, nil, p.vertexValues.Faces)\n\n\t} else if mat.ShaderType == 1 {\n\t\tshaderVals[\"aPosition\"] = true\n\t\tshaderVals[\"aNormal\"] = true\n\t\tshaderVals[\"diffuseVal\"] = true\n\t\tshaderVals[\"ambientVal\"] = true\n\t\tshaderVals[\"specularVal\"] = true\n\t\tshaderVals[\"nVal\"] = true\n\t\tshaderVals[\"uProjectionMatrix\"] = true\n\t\tshaderVals[\"uViewMatrix\"] = true\n\t\tshaderVals[\"uModelMatrix\"] = true\n\t\tshaderVals[\"pointLights\"] = true\n\t\tshaderVals[\"cameraPosition\"] = true\n\n\t\tbS := &shader.BlinnNoTexture{}\n\t\tbS.Setup()\n\t\tp.shaderVal = bS\n\t\tp.programInfo.Program = InitOpenGL(p.shaderVal.GetVertShader(), p.shaderVal.GetFragShader(), p.shaderVal.GetGeometryShader())\n\t\tp.programInfo.attributes = Attributes{\n\t\t\tposition: 0,\n\t\t\tnormal: 1,\n\t\t}\n\n\t\tSetupAttributesMap(&p.programInfo, shaderVals)\n\n\t\tp.buffers.Vao = CreateTriangleVAO(&p.programInfo, p.vertexValues.Vertices, p.vertexValues.Normals, nil, nil, nil, p.vertexValues.Faces)\n\n\t} else if mat.ShaderType == 2 {\n\t\tp.programInfo.Program = InitOpenGL(p.shaderVal.GetVertShader(), p.shaderVal.GetFragShader(), p.shaderVal.GetGeometryShader())\n\t\tp.programInfo.attributes = Attributes{\n\t\t\tposition: 0,\n\t\t\tnormal: 1,\n\t\t\tuv: 2,\n\t\t}\n\n\t} else if mat.ShaderType == 3 {\n\t\tshaderVals[\"aPosition\"] = true\n\t\tshaderVals[\"aNormal\"] = true\n\t\tshaderVals[\"aUV\"] = true\n\t\tshaderVals[\"diffuseVal\"] = true\n\t\tshaderVals[\"ambientVal\"] = true\n\t\tshaderVals[\"specularVal\"] = true\n\t\tshaderVals[\"nVal\"] = true\n\t\tshaderVals[\"uProjectionMatrix\"] = true\n\t\tshaderVals[\"uViewMatrix\"] = true\n\t\tshaderVals[\"uModelMatrix\"] = true\n\t\tshaderVals[\"pointLights\"] = true\n\t\tshaderVals[\"cameraPosition\"] = true\n\t\tshaderVals[\"uDiffuseTexture\"] = true\n\n\t\tbS := &shader.BlinnDiffuseTexture{}\n\t\tbS.Setup()\n\t\tp.shaderVal = bS\n\t\tp.programInfo.Program = InitOpenGL(p.shaderVal.GetVertShader(), p.shaderVal.GetFragShader(), p.shaderVal.GetGeometryShader())\n\t\tp.programInfo.attributes = Attributes{\n\t\t\tposition: 0,\n\t\t\tnormal: 1,\n\t\t\tuv: 2,\n\t\t}\n\t\ttexture0, err := texture.NewTextureFromFile(\"../Editor/materials/\"+p.material.DiffuseTexture,\n\t\t\tgl.REPEAT, gl.REPEAT)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tp.diffuseTexture = texture0\n\n\t\tSetupAttributesMap(&p.programInfo, shaderVals)\n\t\tp.buffers.Vao = CreateTriangleVAO(&p.programInfo, p.vertexValues.Vertices, p.vertexValues.Normals, p.vertexValues.Uvs, nil, nil, p.vertexValues.Faces)\n\n\t} else if mat.ShaderType == 4 {\n\t\tshaderVals[\"aPosition\"] = true\n\t\tshaderVals[\"aNormal\"] = true\n\t\tshaderVals[\"aUV\"] = true\n\t\tshaderVals[\"diffuseVal\"] = true\n\t\tshaderVals[\"ambientVal\"] = true\n\t\tshaderVals[\"specularVal\"] = true\n\t\tshaderVals[\"nVal\"] = true\n\t\tshaderVals[\"uProjectionMatrix\"] = true\n\t\tshaderVals[\"uViewMatrix\"] = true\n\t\tshaderVals[\"uModelMatrix\"] = true\n\t\tshaderVals[\"pointLights\"] = true\n\t\tshaderVals[\"cameraPosition\"] = true\n\t\tshaderVals[\"uDiffuseTexture\"] = true\n\n\t\t//calculate tangents and bitangents\n\t\ttangents, bitangents := CalculateBitangents(p.vertexValues.Vertices, p.vertexValues.Uvs)\n\n\t\tbS := &shader.BlinnDiffuseAndNormal{}\n\t\tbS.Setup()\n\t\tp.shaderVal = bS\n\t\tp.programInfo.Program = InitOpenGL(p.shaderVal.GetVertShader(), p.shaderVal.GetFragShader(), p.shaderVal.GetGeometryShader())\n\t\tp.programInfo.attributes = Attributes{\n\t\t\tposition: 0,\n\t\t\tnormal: 1,\n\t\t\tuv: 2,\n\t\t\ttangent: 3,\n\t\t\tbitangent: 4,\n\t\t}\n\t\t//load diffuse texture\n\t\ttexture0, err := texture.NewTextureFromFile(\"../Editor/materials/\"+p.material.DiffuseTexture,\n\t\t\tgl.REPEAT, gl.REPEAT)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\t//load normal texture\n\t\ttexture1, err := texture.NewTextureFromFile(\"../Editor/materials/\"+p.material.NormalTexture,\n\t\t\tgl.REPEAT, gl.REPEAT)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tp.diffuseTexture = texture0\n\t\tp.normalTexture = texture1\n\n\t\tSetupAttributesMap(&p.programInfo, shaderVals)\n\t\tp.buffers.Vao = CreateTriangleVAO(&p.programInfo, p.vertexValues.Vertices, p.vertexValues.Normals, p.vertexValues.Uvs, tangents, bitangents, p.vertexValues.Faces)\n\n\t}\n\n\tp.boundingBox = GetBoundingBox(p.vertexValues.Vertices)\n\n\tif collide {\n\t\tp.boundingBox.Collide = true\n\t} else {\n\t\tp.boundingBox.Collide = false\n\t}\n\tp.Scale(mod.Scale)\n\tp.boundingBox = ScaleBoundingBox(p.boundingBox, mod.Scale)\n\tp.model.Position = mod.Position\n\tp.boundingBox = TranslateBoundingBox(p.boundingBox, mod.Position)\n\tp.model.Rotation = mod.Rotation\n\tp.centroid = CalculateCentroid(p.vertexValues.Vertices, p.model.Scale)\n\tp.onCollide = func(box BoundingBox) {}\n\tp.reflective = reflective\n\tp.refractionIndex = refractionIndex\n\n\treturn nil\n}",
"func UseProgram(program Uint) {\n\tcprogram, _ := (C.GLuint)(program), cgoAllocsUnknown\n\tC.glUseProgram(cprogram)\n}",
"func (s *Shader) Begin() {\n\ts.program.bind()\n}",
"func (obj *material) Render(\n\tdelta time.Duration,\n\tpos Position,\n\torientation Orientation,\n\tactiveScene Scene,\n\tprogram uint32,\n) error {\n\t// use the program:\n\tgl.UseProgram(program)\n\n\t// loop the layers:\n\tfor _, oneLayer := range obj.layers {\n\t\terr := oneLayer.Render(delta, pos, orientation, activeScene, program)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// fetch the unform variable on the alpha, and update it:\n\talphaValue := obj.alpha.Value()\n\talphaVar := obj.alpha.Variable()\n\talphaVarName := fmt.Sprintf(glStrPattern, alphaVar)\n\talphaVarUniform := gl.GetUniformLocation(program, gl.Str(alphaVarName))\n\tgl.Uniform1f(alphaVarUniform, alphaValue)\n\n\t// update the viewport:\n\trect := obj.viewport.Rectangle()\n\trectPos := rect.Position()\n\trectDim := rect.Dimension()\n\tviewportVar := obj.viewport.Variable()\n\tviewportVarname := fmt.Sprintf(glStrPattern, viewportVar)\n\tviewportUiform := gl.GetUniformLocation(program, gl.Str(viewportVarname))\n\tgl.Uniform4i(viewportUiform, int32(rectPos.X()), int32(rectPos.Y()), int32(rectDim.X()), int32(rectDim.Y()))\n\n\treturn nil\n}",
"func EnableVertexAttribArray(index uint32) {\n C.glowEnableVertexAttribArray(gpEnableVertexAttribArray, (C.GLuint)(index))\n}",
"func ProgramUniform3fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform3fv(gpProgramUniform3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func finalizeShader(n *nativeShader) {\n\tn.r.Lock()\n\n\t// If the shader program is zero, it has already been free'd.\n\tif n.program == 0 {\n\t\tn.r.Unlock()\n\t\treturn\n\t}\n\tn.r.shaders = append(n.r.shaders, n)\n\tn.r.Unlock()\n}",
"func UseProgramStages(pipeline uint32, stages uint32, program uint32) {\n C.glowUseProgramStages(gpUseProgramStages, (C.GLuint)(pipeline), (C.GLbitfield)(stages), (C.GLuint)(program))\n}",
"func UseProgram(program uint32) {\n\tC.glowUseProgram(gpUseProgram, (C.GLuint)(program))\n}",
"func UseProgram(program uint32) {\n\tC.glowUseProgram(gpUseProgram, (C.GLuint)(program))\n}",
"func LinkProgram(p Program) {\n\tgl.LinkProgram(p.Value)\n}",
"func ProgramUniformMatrix4x3fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix4x3fv(gpProgramUniformMatrix4x3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func LinkProgram(program uint32) {\n\tC.glowLinkProgram(gpLinkProgram, (C.GLuint)(program))\n}",
"func LinkProgram(program uint32) {\n\tC.glowLinkProgram(gpLinkProgram, (C.GLuint)(program))\n}",
"func ProgramUniform2f(program uint32, location int32, v0 float32, v1 float32) {\n C.glowProgramUniform2f(gpProgramUniform2f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1))\n}",
"func ProgramUniformMatrix3fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix3fv(gpProgramUniformMatrix3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func (program Program) DetachShader(shader Shader) {\n\tgl.DetachShader(uint32(program), uint32(shader))\n}",
"func (am *AssetManager) LoadRenderProgram(vertexShader, fragmentShader, iname string) {\n\t//program, err := LoadProgram(am.shadersDir+\"standard.vert\", am.shadersDir+\"standard.frag\")\n\t//if err != nil {\n\t//\tlog.Fatal(err)\n\t//}\n\t//am.Programs[iname] = program\n}",
"func (gl *WebGL) LinkProgram(shaderProgram WebGLShaderProgram) error {\n\tgl.context.Call(\"linkProgram\", shaderProgram)\n\n\tif !gl.GetProgramParameter(shaderProgram, GlLinkStatus).Bool() {\n\t\terr := errors.New(gl.GetProgramInfoLog(shaderProgram))\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func CompileShader(shader uint32) {\n C.glowCompileShader(gpCompileShader, (C.GLuint)(shader))\n}",
"func (self *AbstractFilter) SyncUniforms() {\n self.Object.Call(\"syncUniforms\")\n}",
"func ReleaseShaderCompiler() {\n C.glowReleaseShaderCompiler(gpReleaseShaderCompiler)\n}",
"func DetachShader(program uint32, shader uint32) {\n\tsyscall.Syscall(gpDetachShader, 2, uintptr(program), uintptr(shader), 0)\n}",
"func (prg Program) Pointer(ctx gl.Context, a gl.Attrib, size int) {\n\tctx.EnableVertexAttribArray(a)\n\tctx.VertexAttribPointer(a, size, gl.FLOAT, false, 0, 0)\n}",
"func initOpenGL() uint32 {\n\tif err := gl.Init(); err != nil {\n\t\tpanic(err)\n\t}\n\tversion := gl.GoStr(gl.GetString(gl.VERSION))\n\tlog.Println(\"OpenGL version\", version)\n\n\tvar vertexShaderSource string\n\tvar fragmentShaderSource string\n\n\tvertexShaderSource = `\n\t#version 410\n\tlayout (location=0) in vec3 position;\n\tlayout (location=1) in vec2 texcoord;\n\tout vec2 tCoord;\n\tuniform mat4 projection;\n\tuniform mat4 world;\n\tuniform mat4 view;\n\tuniform vec2 texScale;\n\tuniform vec2 texOffset;\n\tvoid main() {\n\t\tgl_Position = projection * world * vec4(position, 1.0);\n\t\ttCoord = (texcoord+texOffset) * texScale;\n\t}\n\t` + \"\\x00\"\n\t//gl_Position = vec4(position, 10.0, 1.0) * camera * projection;\n\n\tfragmentShaderSource = `\n\t#version 410\n\tin vec2 tCoord;\n\tout vec4 frag_colour;\n\tuniform sampler2D ourTexture;\n\tuniform vec4 color;\n\tvoid main() {\n\t\t\tfrag_colour = texture(ourTexture, tCoord) * color;\n\t}\n\t` + \"\\x00\"\n\n\tprog := CreateProgram(vertexShaderSource, fragmentShaderSource)\n\n\tgl.UseProgram(prog)\n\tgl.Uniform2f(\n\t\tgl.GetUniformLocation(prog, gl.Str(\"texScale\\x00\")),\n\t\t1.0, 1.0,\n\t)\n\tgl.Uniform4f(\n\t\tgl.GetUniformLocation(prog, gl.Str(\"color\\x00\")),\n\t\t1, 1, 1, 1,\n\t)\n\n\t// line opengl program\n\tvertexShaderSource = `\n\t#version 330 core\n\tlayout (location = 0) in vec3 aPos;\n\tuniform mat4 uProjection;\n\tuniform mat4 uWorld;\n\n\tvoid main()\n\t{\n\t gl_Position = uProjection * vec4(aPos, 1.0);\n\t}` + \"\\x00\"\n\n\tfragmentShaderSource = `\n\t#version 330 core\n\tout vec4 FragColor;\n\tuniform vec3 uColor;\n\n\tvoid main()\n\t{\n\t FragColor = vec4(uColor, 1.0f);\n\t}` + \"\\x00\"\n\n\tlineProgram = CreateProgram(vertexShaderSource, fragmentShaderSource)\n\n\treturn prog\n}",
"func LinkProgram(program Uint) {\n\tcprogram, _ := (C.GLuint)(program), cgoAllocsUnknown\n\tC.glLinkProgram(cprogram)\n}",
"func ProgramUniformMatrix3x2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix3x2fv(gpProgramUniformMatrix3x2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func (self *TileSprite) Shader() *AbstractFilter{\n return &AbstractFilter{self.Object.Get(\"shader\")}\n}",
"func (debugging *debuggingOpenGL) ShaderSource(shader uint32, source string) {\n\tdebugging.recordEntry(\"ShaderSource\", shader, source)\n\tdebugging.gl.ShaderSource(shader, source)\n\tdebugging.recordExit(\"ShaderSource\")\n}",
"func ProgramUniformMatrix3x4fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix3x4fv(gpProgramUniformMatrix3x4fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniformMatrix2x3fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix2x3fv(gpProgramUniformMatrix2x3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func (s *Scene) Setup(ctx *app.Context) error {\n\ts.AmbientColor = mgl32.Vec4{0.2, 0.2, 0.2, 1.0}\n\ts.LightPos = mgl32.Vec3{0.0, 0.0, 10.0}\n\ts.LightColor = mgl32.Vec4{0.7, 0.7, 0.7}\n\ts.LightPower = 500\n\n\tshaders := []shader.Info{}\n\tfor i := range s.VertFiles {\n\t\tshaders = append(shaders, shader.Info{Type: gl.VERTEX_SHADER, Filename: s.VertFiles[i]})\n\t}\n\tfor i := range s.FragFiles {\n\t\tshaders = append(shaders, shader.Info{Type: gl.FRAGMENT_SHADER, Filename: s.FragFiles[i]})\n\t}\n\n\tprogram, err := shader.Load(&shaders)\n\tif err != nil {\n\t\treturn err\n\t}\n\ts.Programs[progID] = program\n\n\tgl.UseProgram(s.Programs[progID])\n\n\tgl.Enable(gl.CULL_FACE)\n\tgl.Enable(gl.DEPTH_TEST)\n\n\ts.ProjMatrix = mgl32.Perspective(mgl32.DegToRad(45.0), float32(ctx.ScreenWidth)/float32(ctx.ScreenHeight), 0.1, 10.0)\n\ts.ProjMatrixLoc = gl.GetUniformLocation(s.Programs[progID], gl.Str(\"ProjMatrix\\x00\"))\n\tgl.UniformMatrix4fv(s.ProjMatrixLoc, 1, false, &s.ProjMatrix[0])\n\n\ts.ViewMatrix = mgl32.LookAtV(mgl32.Vec3{3, 3, 3}, mgl32.Vec3{0, 0, 0}, mgl32.Vec3{0, 1, 0})\n\ts.ViewMatrixLoc = gl.GetUniformLocation(s.Programs[progID], gl.Str(\"ViewMatrix\\x00\"))\n\tgl.UniformMatrix4fv(s.ViewMatrixLoc, 1, false, &s.ViewMatrix[0])\n\n\tmodelMatrix := mgl32.Ident4()\n\ts.ModelMatrixLoc = gl.GetUniformLocation(s.Programs[progID], gl.Str(\"ModelMatrix\\x00\"))\n\tgl.UniformMatrix4fv(s.ModelMatrixLoc, 1, false, &modelMatrix[0])\n\n\ts.UseColorMapLoc = gl.GetUniformLocation(s.Programs[progID], gl.Str(\"UseColorMap\\x00\"))\n\tif s.ColorFile != \"\" {\n\t\tgl.Uniform1i(s.UseColorMapLoc, 1)\n\t\ts.ColorMapLoc = gl.GetUniformLocation(s.Programs[progID], gl.Str(\"ColorMap\\x00\"))\n\t\tgl.Uniform1i(s.ColorMapLoc, 0)\n\t\tcolorMap, err := os.Open(s.ColorFile)\n\t\tdefer colorMap.Close()\n\t\tif err != nil {\n\t\t\tlog.Fatalln(\"failed to open tex:\", err)\n\t\t}\n\t\tif _, err := loadTex(colorMap, gl.TEXTURE0); err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\t} else {\n\t\tgl.Uniform1i(s.UseColorMapLoc, 0)\n\t}\n\n\tif s.NormalFile != \"\" {\n\t\ts.NormalMapLoc = gl.GetUniformLocation(s.Programs[progID], gl.Str(\"NormalMap\\x00\"))\n\t\tgl.Uniform1i(s.NormalMapLoc, 1)\n\t\tnormalMap, err := os.Open(s.NormalFile)\n\t\tdefer normalMap.Close()\n\t\tif err != nil {\n\t\t\tlog.Fatalln(\"failed to open tex:\", err)\n\t\t}\n\t\tif _, err := loadTex(normalMap, gl.TEXTURE1); err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\t}\n\n\tgl.BindFragDataLocation(s.Programs[progID], 0, gl.Str(\"FragColor\\x00\"))\n\n\tmdlReader, err := os.Open(s.ModelFile)\n\tdefer mdlReader.Close()\n\tif err != nil {\n\t\tlog.Fatalln(\"could not open model:\", err)\n\t}\n\ts.Model = model.New()\n\tif err := s.Model.Load(mdlReader); err != nil {\n\t\tlog.Fatalln(\"could not load model:\", err)\n\t}\n\n\t// Configure the vertex data\n\tgl.GenVertexArrays(numVAOs, &s.VAOs[0])\n\tgl.BindVertexArray(s.VAOs[triangleName])\n\n\tgl.GenBuffers(numBuffers, &s.Buffers[0])\n\tgl.BindBuffer(gl.ARRAY_BUFFER, s.Buffers[aBufferName])\n\tgl.BufferData(gl.ARRAY_BUFFER, len(s.Model.VertexData)*8, gl.Ptr(s.Model.VertexData), gl.STATIC_DRAW)\n\n\tmcVertexLoc := uint32(gl.GetAttribLocation(s.Programs[progID], gl.Str(\"MCVertex\\x00\")))\n\tgl.EnableVertexAttribArray(mcVertexLoc)\n\tgl.VertexAttribPointer(mcVertexLoc, 3, gl.FLOAT, false, 8*4, gl.PtrOffset(0))\n\n\tmcNormalLoc := uint32(gl.GetAttribLocation(s.Programs[progID], gl.Str(\"MCNormal\\x00\")))\n\tgl.EnableVertexAttribArray(mcNormalLoc)\n\tgl.VertexAttribPointer(mcNormalLoc, 3, gl.FLOAT, false, 8*4, gl.PtrOffset(3*4))\n\n\ttexCoordLoc := uint32(gl.GetAttribLocation(s.Programs[progID], gl.Str(\"TexCoord0\\x00\")))\n\tgl.EnableVertexAttribArray(texCoordLoc)\n\tgl.VertexAttribPointer(texCoordLoc, 2, gl.FLOAT, false, 8*4, gl.PtrOffset(6*4))\n\n\tvar fbo uint32\n\tgl.GenBuffers(1, &fbo)\n\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, fbo)\n\tgl.BufferData(gl.ELEMENT_ARRAY_BUFFER, len(s.Model.FaceData)*4, gl.Ptr(s.Model.FaceData), gl.STATIC_DRAW)\n\n\ts.LightPosLoc = gl.GetUniformLocation(s.Programs[progID], gl.Str(\"LightPos\\x00\"))\n\tgl.Uniform3f(s.LightPosLoc, s.LightPos[0], s.LightPos[1], s.LightPos[2])\n\n\ts.AmbientColorLoc = gl.GetUniformLocation(s.Programs[progID], gl.Str(\"AmbientColor\\x00\"))\n\tgl.Uniform4f(s.AmbientColorLoc, s.AmbientColor[0], s.AmbientColor[1], s.AmbientColor[2], s.AmbientColor[3])\n\n\ts.LightColorLoc = gl.GetUniformLocation(s.Programs[progID], gl.Str(\"LightColor\\x00\"))\n\tgl.Uniform4f(s.LightColorLoc, s.LightColor[0], s.LightColor[1], s.LightColor[2], s.LightColor[3])\n\n\ts.LightPowerLoc = gl.GetUniformLocation(s.Programs[progID], gl.Str(\"LightPower\\x00\"))\n\tgl.Uniform1f(s.LightPowerLoc, s.LightPower)\n\n\treturn nil\n}",
"func ProgramUniformMatrix4x2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix4x2fv(gpProgramUniformMatrix4x2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func (gl *WebGL) ShaderSource(shader WebGLShader, source string) {\n\tgl.context.Call(\"shaderSource\", shader, source)\n}",
"func ProgramUniform2fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform2fv(gpProgramUniform2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func (s *s2d) Pre() error {\n\treturn Exec(func() error {\n\t\tflush_errors(\"pre: start\")\n\t\ts.vao = gl.GenVertexArray()\n\t\ts.vao.Bind()\n\n\t\ts.vertvbo = gl.GenBuffer()\n\t\ts.vertvbo.Bind(gl.ARRAY_BUFFER)\n\t\ts.quad = []float32{\n\t\t\t0.9, 0.9,\n\t\t\t0.9, -0.9,\n\t\t\t-0.9, -0.9,\n\t\t\t-0.9, 0.9,\n\t\t}\n\t\t// unsafe.Sizeof(quad) seems to think quad is 24 bytes, which is absurd.\n\t\t// so we just calculate the size manually.\n\t\tgl.BufferData(gl.ARRAY_BUFFER, len(s.quad)*4, s.quad, gl.STATIC_DRAW)\n\n\t\tflush_errors(\"creating program\")\n\t\ts.program = s2dprogram()\n\t\tflush_errors(\"program created, gen'ing texturing\")\n\n\t\ts.texture = gl.GenTexture()\n\t\tgl.ActiveTexture(gl.TEXTURE0)\n\t\ts.texture.Bind(gl.TEXTURE_2D)\n\n\t\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR)\n\t\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR)\n\t\t// never ever use anything but clamp to edge; others do not make any sense.\n\t\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_R, gl.CLAMP_TO_EDGE)\n\t\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)\n\t\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)\n\t\ttxs2d := s.program.GetUniformLocation(tex2dname)\n\t\ttxs2d.Uniform1i(0)\n\t\tflush_errors(\"setting '\" + tex2dname + \"' uniform.\")\n\n\t\ts.fldmaxloc = s.program.GetUniformLocation(\"fieldmax\")\n\t\tgfx.Trace(\"field max loc is: %v\\n\", s.fldmaxloc)\n\t\treturn nil\n\t})\n}",
"func ProgramUniform1f(program uint32, location int32, v0 float32) {\n C.glowProgramUniform1f(gpProgramUniform1f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0))\n}",
"func ProgramUniform4fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform4fv(gpProgramUniform4fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func BindAttribLocation(program uint32, index uint32, name *int8) {\n C.glowBindAttribLocation(gpBindAttribLocation, (C.GLuint)(program), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func NewShader(vertexFmt, uniformFmt AttrFormat, vertexShader, fragmentShader string) (*Shader, error) {\n\tshader := &Shader{\n\t\tprogram: binder{\n\t\t\trestoreLoc: gl.CURRENT_PROGRAM,\n\t\t\tbindFunc: func(obj uint32) {\n\t\t\t\tgl.UseProgram(obj)\n\t\t\t},\n\t\t},\n\t\tvertexFmt: vertexFmt,\n\t\tuniformFmt: uniformFmt,\n\t\tuniformLoc: make([]int32, len(uniformFmt)),\n\t}\n\n\tvar vshader, fshader uint32\n\n\t// vertex shader\n\t{\n\t\tvshader = gl.CreateShader(gl.VERTEX_SHADER)\n\t\tsrc, free := gl.Strs(vertexShader)\n\t\tdefer free()\n\t\tlength := int32(len(vertexShader))\n\t\tgl.ShaderSource(vshader, 1, src, &length)\n\t\tgl.CompileShader(vshader)\n\n\t\tvar success int32\n\t\tgl.GetShaderiv(vshader, gl.COMPILE_STATUS, &success)\n\t\tif success == gl.FALSE {\n\t\t\tvar logLen int32\n\t\t\tgl.GetShaderiv(vshader, gl.INFO_LOG_LENGTH, &logLen)\n\n\t\t\tinfoLog := make([]byte, logLen)\n\t\t\tgl.GetShaderInfoLog(vshader, logLen, nil, &infoLog[0])\n\t\t\treturn nil, fmt.Errorf(\"error compiling vertex shader: %s\", string(infoLog))\n\t\t}\n\n\t\tdefer gl.DeleteShader(vshader)\n\t}\n\n\t// fragment shader\n\t{\n\t\tfshader = gl.CreateShader(gl.FRAGMENT_SHADER)\n\t\tsrc, free := gl.Strs(fragmentShader)\n\t\tdefer free()\n\t\tlength := int32(len(fragmentShader))\n\t\tgl.ShaderSource(fshader, 1, src, &length)\n\t\tgl.CompileShader(fshader)\n\n\t\tvar success int32\n\t\tgl.GetShaderiv(fshader, gl.COMPILE_STATUS, &success)\n\t\tif success == gl.FALSE {\n\t\t\tvar logLen int32\n\t\t\tgl.GetShaderiv(fshader, gl.INFO_LOG_LENGTH, &logLen)\n\n\t\t\tinfoLog := make([]byte, logLen)\n\t\t\tgl.GetShaderInfoLog(fshader, logLen, nil, &infoLog[0])\n\t\t\treturn nil, fmt.Errorf(\"error compiling fragment shader: %s\", string(infoLog))\n\t\t}\n\n\t\tdefer gl.DeleteShader(fshader)\n\t}\n\n\t// shader program\n\t{\n\t\tshader.program.obj = gl.CreateProgram()\n\t\tgl.AttachShader(shader.program.obj, vshader)\n\t\tgl.AttachShader(shader.program.obj, fshader)\n\t\tgl.LinkProgram(shader.program.obj)\n\n\t\tvar success int32\n\t\tgl.GetProgramiv(shader.program.obj, gl.LINK_STATUS, &success)\n\t\tif success == gl.FALSE {\n\t\t\tvar logLen int32\n\t\t\tgl.GetProgramiv(shader.program.obj, gl.INFO_LOG_LENGTH, &logLen)\n\n\t\t\tinfoLog := make([]byte, logLen)\n\t\t\tgl.GetProgramInfoLog(shader.program.obj, logLen, nil, &infoLog[0])\n\t\t\treturn nil, fmt.Errorf(\"error linking shader program: %s\", string(infoLog))\n\t\t}\n\t}\n\n\t// uniforms\n\tfor i, uniform := range uniformFmt {\n\t\tloc := gl.GetUniformLocation(shader.program.obj, gl.Str(uniform.Name+\"\\x00\"))\n\t\tshader.uniformLoc[i] = loc\n\t}\n\n\truntime.SetFinalizer(shader, (*Shader).delete)\n\n\treturn shader, nil\n}",
"func ProgramUniform1fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform1fv(gpProgramUniform1fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func EnableVertexArrayAttrib(vaobj uint32, index uint32) {\n\tC.glowEnableVertexArrayAttrib(gpEnableVertexArrayAttrib, (C.GLuint)(vaobj), (C.GLuint)(index))\n}",
"func EnableVertexArrayAttrib(vaobj uint32, index uint32) {\n\tC.glowEnableVertexArrayAttrib(gpEnableVertexArrayAttrib, (C.GLuint)(vaobj), (C.GLuint)(index))\n}",
"func ProgramUniformMatrix4fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix4fv(gpProgramUniformMatrix4fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func (va *VertexArray) SetLayout(layout VertexLayout) {\n\tif len(va.layout.layout) != 0 {\n\t\treturn\n\t}\n\n\tva.layout = layout\n\n\t// generate and bind the vertex array\n\tgl.GenVertexArrays(1, &va.vao) // generates the vertex array (or multiple)\n\tgl.BindVertexArray(va.vao) // binds the vertex array\n\n\t// make vertex array pointer attributes\n\t// offset is the offset in bytes to the first attribute\n\toffset := 0\n\n\t// calculate vertex stride\n\tstride := 0\n\tfor _, elem := range va.layout.layout {\n\t\tstride += elem.getByteSize()\n\n\t}\n\n\t// Vertex Buffer Object\n\tgl.GenBuffers(1, &va.vbo) // generates the buffer (or multiple)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, va.vbo)\n\n\tfor i, elem := range va.layout.layout {\n\n\t\t// define an array of generic vertex attribute data\n\t\t// index, size, type, normalized, stride of vertex (in bytes), pointer (offset)\n\t\t// point positions\n\t\tgl.VertexAttribPointer(uint32(i), int32(elem.getSize()),\n\t\t\telem.getGLType(), false, int32(stride), gl.PtrOffset(offset))\n\t\tgl.EnableVertexAttribArray(uint32(i))\n\t\toffset += elem.getByteSize()\n\t}\n\n}",
"func BindProgramPipeline(pipeline uint32) {\n\tC.glowBindProgramPipeline(gpBindProgramPipeline, (C.GLuint)(pipeline))\n}",
"func BindProgramPipeline(pipeline uint32) {\n\tC.glowBindProgramPipeline(gpBindProgramPipeline, (C.GLuint)(pipeline))\n}",
"func EnableVertexAttribArray(index uint32) {\n\tgl.EnableVertexAttribArray(index)\n}",
"func CreateShaderProgramv(xtype uint32, count int32, strings **int8) uint32 {\n ret := C.glowCreateShaderProgramv(gpCreateShaderProgramv, (C.GLenum)(xtype), (C.GLsizei)(count), (**C.GLchar)(unsafe.Pointer(strings)))\n return (uint32)(ret)\n}",
"func ProgramUniform3i(program uint32, location int32, v0 int32, v1 int32, v2 int32) {\n C.glowProgramUniform3i(gpProgramUniform3i, (C.GLuint)(program), (C.GLint)(location), (C.GLint)(v0), (C.GLint)(v1), (C.GLint)(v2))\n}",
"func UseProgram() gl.Uint {\n\tprogram, err := CreateProgram(\n\t\tReadVertexShader(\"Demo.glsl\"),\n\t\tReadFragmentShader(\"Demo.glsl\"))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tgl.UseProgram(program)\n\n\treturn program\n}",
"func ProgramUniformMatrix2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix2fv(gpProgramUniformMatrix2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func DetachShader(program uint32, shader uint32) {\n\tC.glowDetachShader(gpDetachShader, (C.GLuint)(program), (C.GLuint)(shader))\n}",
"func DetachShader(program uint32, shader uint32) {\n\tC.glowDetachShader(gpDetachShader, (C.GLuint)(program), (C.GLuint)(shader))\n}",
"func ProgramUniform4i(program uint32, location int32, v0 int32, v1 int32, v2 int32, v3 int32) {\n C.glowProgramUniform4i(gpProgramUniform4i, (C.GLuint)(program), (C.GLint)(location), (C.GLint)(v0), (C.GLint)(v1), (C.GLint)(v2), (C.GLint)(v3))\n}",
"func (gl *WebGL) EnableVertexAttribArray(position WebGLAttributeLocation) {\n\tgl.context.Call(\"enableVertexAttribArray\", position)\n}",
"func DetachShader(p Program, s Shader) {\n\tgl.DetachShader(p.Value, s.Value)\n}",
"func EnableVertexAttribArray(index uint32) {\n\tsyscall.Syscall(gpEnableVertexAttribArray, 1, uintptr(index), 0, 0)\n}",
"func BindVertexArray(array uint32) {\n C.glowBindVertexArray(gpBindVertexArray, (C.GLuint)(array))\n}",
"func (p *Plane) SetShader(vertShader string, fragShader string) error {\n\n\tif vertShader != \"\" && fragShader != \"\" {\n\t\tp.fragShader = fragShader\n\t\tp.vertShader = vertShader\n\t\treturn nil\n\t}\n\treturn errors.New(\"Error setting the shader, shader code must not be blank\")\n}",
"func ShaderProgramFill(r, g, b, a byte) *shaderir.Program {\n\tir, err := graphics.CompileShader([]byte(fmt.Sprintf(`//kage:unit pixels\n\npackage main\n\nfunc Fragment(position vec4, texCoord vec2, color vec4) vec4 {\n\treturn vec4(%0.9f, %0.9f, %0.9f, %0.9f)\n}\n`, float64(r)/0xff, float64(g)/0xff, float64(b)/0xff, float64(a)/0xff)))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn ir\n}",
"func EnableVertexArrayAttrib(vaobj uint32, index uint32) {\n\tsyscall.Syscall(gpEnableVertexArrayAttrib, 2, uintptr(vaobj), uintptr(index), 0)\n}"
] | [
"0.7530386",
"0.7152642",
"0.7089462",
"0.7000029",
"0.6843556",
"0.67589235",
"0.6711889",
"0.6614953",
"0.6595897",
"0.65101314",
"0.6135568",
"0.6095363",
"0.60770255",
"0.601223",
"0.60121894",
"0.60080314",
"0.5971357",
"0.5923006",
"0.5922547",
"0.5917164",
"0.5842594",
"0.58016133",
"0.58016133",
"0.57838476",
"0.5770627",
"0.57698834",
"0.5757943",
"0.575591",
"0.5747873",
"0.5703903",
"0.5689792",
"0.5688194",
"0.5685162",
"0.5672616",
"0.5623306",
"0.56155187",
"0.56149614",
"0.5604697",
"0.55990654",
"0.5595181",
"0.55891705",
"0.55765736",
"0.5572456",
"0.5567258",
"0.55631256",
"0.55560946",
"0.55560946",
"0.5555867",
"0.5547888",
"0.55311763",
"0.55311763",
"0.55300146",
"0.55242515",
"0.5520572",
"0.55182904",
"0.55144435",
"0.55131155",
"0.55030423",
"0.548133",
"0.5475615",
"0.5466783",
"0.5463449",
"0.54530334",
"0.544888",
"0.54390156",
"0.54371697",
"0.5434717",
"0.5425755",
"0.5413555",
"0.54089737",
"0.54086477",
"0.5403312",
"0.53993255",
"0.53777194",
"0.5374121",
"0.5372055",
"0.53703123",
"0.53648156",
"0.53552973",
"0.53552973",
"0.5354928",
"0.53412575",
"0.5312706",
"0.5312706",
"0.5302546",
"0.5302145",
"0.53012747",
"0.52993566",
"0.52952164",
"0.5288589",
"0.5288589",
"0.52759385",
"0.52737325",
"0.5262707",
"0.52625334",
"0.5253025",
"0.52523476",
"0.5252063",
"0.52478564"
] | 0.687415 | 5 |
delimit the vertices of a primitive or a group of like primitives | func Begin(mode uint32) {
C.glowBegin(gpBegin, (C.GLenum)(mode))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (p plane) splitPolygon(poly polygon, coplanarFront, coplanarBack, front, back *[]polygon) {\n\tconst (\n\t\tcoplanarType = 0\n\t\tfrontType = 1\n\t\tbackType = 2\n\t\tspanningType = 3\n\t)\n\tpolygonType := 0\n\tvar types []int\n\tfor _, v := range poly.vertices {\n\t\tt := p.normal.dot(v.pos) - p.w\n\t\tvar pType int\n\t\tif t < -planeEpsilon {\n\t\t\tpType = backType\n\t\t} else {\n\t\t\tif t > planeEpsilon {\n\t\t\t\tpType = frontType\n\t\t\t} else {\n\t\t\t\tpType = coplanarType\n\t\t\t}\n\t\t}\n\t\tpolygonType |= pType\n\t\ttypes = append(types, pType)\n\t}\n\tswitch polygonType {\n\tcase coplanarType:\n\t\tif p.normal.dot(poly.plane.normal) > 0 {\n\t\t\t*coplanarFront = append(*coplanarFront, poly)\n\t\t} else {\n\t\t\t*coplanarBack = append(*coplanarBack, poly)\n\t\t}\n\tcase frontType:\n\t\t*front = append(*front, poly)\n\tcase backType:\n\t\t*back = append(*back, poly)\n\tcase spanningType:\n\t\tvar f, b []vertex\n\t\tfor i, vi := range poly.vertices {\n\t\t\tj := (i + 1) % len(poly.vertices) // next vertex of polygon (wraps over)\n\t\t\tti := types[i]\n\t\t\ttj := types[j]\n\t\t\tvj := poly.vertices[j]\n\t\t\tif ti != backType {\n\t\t\t\tf = append(f, vi)\n\t\t\t}\n\t\t\tif ti != frontType {\n\t\t\t\tb = append(b, vi)\n\t\t\t}\n\t\t\tif (ti | tj) == spanningType {\n\t\t\t\tt := (p.w - p.normal.dot(vi.pos)) / p.normal.dot(vj.pos.minus(vi.pos))\n\t\t\t\tv := vi.interpolated(vj, t)\n\t\t\t\tf = append(f, v)\n\t\t\t\tb = append(b, v)\n\t\t\t}\n\t\t}\n\t\tif len(f) >= 3 {\n\t\t\t*front = append(*front, newPolygon(f, poly.shared))\n\t\t}\n\t\tif len(b) >= 3 {\n\t\t\t*back = append(*back, newPolygon(b, poly.shared))\n\t\t}\n\t}\n}",
"func (v *Vertex) flatten() []string {\n\n\tlineage := []string{}\n\n\tp := v\n\tfor p != nil {\n\t\t// Prepend the lineage\n\t\tlineage = append([]string{p.Identifier}, lineage...)\n\t\tp = p.Parent\n\t}\n\n\treturn lineage\n}",
"func parseVertex(t []string) []float32 {\n\tx, _ := strconv.ParseFloat(t[0], 32)\n\ty, _ := strconv.ParseFloat(t[1], 32)\n\tz, _ := strconv.ParseFloat(t[2], 32)\n\n\treturn []float32{float32(x), float32(y), float32(z)}\n}",
"func (outer outer) Shape() []pos.Rel {\r\n\tl := make([]pos.Rel, 2*(outer.Xlen+outer.Ylen))\r\n\tfor i := 0; i < outer.Xlen; i++ {\r\n\t\tl[i] = pos.Rel{Z: i, W: -1}\r\n\t\tl[outer.Xlen+i] = pos.Rel{Z: i, W: outer.Ylen}\r\n\t}\r\n\tfor j := 0; j < outer.Ylen; j++ {\r\n\t\tl[2*outer.Xlen+j] = pos.Rel{Z: -1, W: j}\r\n\t\tl[2*outer.Xlen+outer.Ylen+j] = pos.Rel{Z: outer.Xlen, W: j}\r\n\t}\r\n\treturn l\r\n}",
"func parseFace(line string, verts, normals []Vector3, material Material) Triangle {\n\tgroups := strings.Split(line, \" \")\n\tif len(groups) > 4 {\n\t\tlog.Fatal(\".obj models should be triangulated\")\n\t}\n\n\tvertexIndices := make([]int, 3)\n\tnormalIndices := make([]int, 3)\n\tfor i := 1; i < 4; i++ {\n\t\tsplitGroup := strings.Split(groups[i], \"/\")\n\n\t\tvertexIndex, err1 := strconv.Atoi(splitGroup[0])\n\t\tif err1 != nil {\n\t\t\tlog.Fatal(\"Couldn't parse vertex index as integer\")\n\t\t}\n\t\tvertexIndices[i-1] = vertexIndex\n\n\t\tnormalIndex, err2 := strconv.Atoi(splitGroup[2])\n\t\tif err2 != nil {\n\t\t\tlog.Fatal(\"Couldn't parse normal index as integer\")\n\t\t}\n\t\tnormalIndices[i-1] = normalIndex\n\t}\n\n\treturn Triangle{\n\t\tV0: verts[vertexIndices[0]-1],\n\t\tV1: verts[vertexIndices[1]-1],\n\t\tV2: verts[vertexIndices[2]-1],\n\t\tN0: normals[normalIndices[0]-1],\n\t\tN1: normals[normalIndices[1]-1],\n\t\tN2: normals[normalIndices[2]-1],\n\t\tMaterial: material,\n\t}\n}",
"func TestVertexSplit(t *testing.T) {\n\tt.Parallel()\n\tt.Run(\"TestVertexSplitWhenMatchingSubstringReturnsPrefixAndRest\", func(t *testing.T) {\n\t\ts := \"a.b.c.d.e\"\n\t\tg := graph.New()\n\t\tg.Add(node.New(\"a\", \"a\"))\n\t\tg.Add(node.New(\"a.b\", \"a.b.\"))\n\t\tg.Add(node.New(\"a.c.d.\", \"a.c.d\"))\n\t\tg.Add(node.New(\"a.b.c\", \"a.b.c\"))\n\t\tpfx, rest, found := preprocessor.VertexSplit(g, s)\n\t\tassert.Equal(t, \"a.b.c\", pfx)\n\t\tassert.Equal(t, \"d.e\", rest)\n\t\tassert.True(t, found)\n\t})\n\n\tt.Run(\"TestVertexSplitWhenExactMatchReturnsPrefix\", func(t *testing.T) {\n\t\ts := \"a.b.c\"\n\t\tg := graph.New()\n\t\tg.Add(node.New(\"a.b.c\", \"a.b.c\"))\n\t\tpfx, rest, found := preprocessor.VertexSplit(g, s)\n\t\tassert.Equal(t, \"a.b.c\", pfx)\n\t\tassert.Equal(t, \"\", rest)\n\t\tassert.True(t, found)\n\t})\n\n\tt.Run(\"TestVertexSplitWhenNoMatchReturnsRest\", func(t *testing.T) {\n\t\ts := \"x.y.z\"\n\t\tg := graph.New()\n\t\tg.Add(node.New(\"a.b.c\", \"a.b.c\"))\n\t\tpfx, rest, found := preprocessor.VertexSplit(g, s)\n\t\tassert.Equal(t, \"\", pfx)\n\t\tassert.Equal(t, \"x.y.z\", rest)\n\t\tassert.False(t, found)\n\t})\n}",
"func (shape *Shape) PolyShapeSetVertsRaw(verts []Vect) {\n\tC.cpPolyShapeSetVertsRaw(\n\t\t(*C.cpShape)(unsafe.Pointer(shape)),\n\t\tC.int(len(verts)),\n\t\t(*C.cpVect)(unsafe.Pointer(&verts[0])),\n\t)\n}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n\tsyscall.Syscall6(gpMultiDrawElementsBaseVertex, 6, uintptr(mode), uintptr(unsafe.Pointer(count)), uintptr(xtype), uintptr(unsafe.Pointer(indices)), uintptr(drawcount), uintptr(unsafe.Pointer(basevertex)))\n}",
"func ShapeToVertexList(s string) (vertexList []shared.Point2d, length int) {\n\tvar stringArray = StringToStringArray(s)\n\tvar currentPosX = 0\n\tvar currentPosY = 0\n\tvar startPosX = 0\n\tvar startPosY = 0\n\tvar index = 0\n\tvar l float64\n\n\tfor index < len(stringArray) {\n\t\tswitch stringArray[index] {\n\t\tcase \"M\", \"L\", \"m\", \"l\":\n\t\t\ttempX, err := strconv.Atoi(stringArray[index+1])\n\t\t\ttempY, err := strconv.Atoi(stringArray[index+2])\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Invalid SVG String\")\n\t\t\t\t// TODO: return InvalidShapeSvgStringError\n\t\t\t}\n\n\t\t\t// Uppercase = absolute pos, lowercase = relative pos\n\t\t\tif stringArray[index] == \"M\" {\n\t\t\t\tstartPosX = tempX\n\t\t\t\tstartPosY = tempY\n\t\t\t\tcurrentPosX = tempX\n\t\t\t\tcurrentPosY = tempY\n\t\t\t} else if stringArray[index] == \"m\" {\n\t\t\t\tstartPosX += tempX\n\t\t\t\tstartPosY += tempY\n\t\t\t\tcurrentPosX += tempX\n\t\t\t\tcurrentPosY += tempY\n\t\t\t} else if stringArray[index] == \"L\" {\n\t\t\t\t// need to calculate the length of x and y\n\t\t\t\tx := math.Abs(float64(currentPosX - tempX))\n\t\t\t\ty := math.Abs(float64(currentPosY - tempY))\n\t\t\t\tl += math.Sqrt(x*x + y*y)\n\t\t\t\tcurrentPosX = tempX\n\t\t\t\tcurrentPosY = tempY\n\t\t\t} else {\n\t\t\t\tl += math.Sqrt(float64(tempX*tempX + tempY*tempY))\n\t\t\t\tcurrentPosX += tempX\n\t\t\t\tcurrentPosY += tempY\n\t\t\t}\n\n\t\t\tindex += 3\n\t\tcase \"H\", \"h\":\n\t\t\ttempX, err := strconv.Atoi(stringArray[index+1])\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Invalid SVG String\")\n\t\t\t\t// TODO: return InvalidShapeSvgStringError\n\t\t\t}\n\n\t\t\tif stringArray[index] == \"H\" {\n\t\t\t\tx := math.Abs(float64(currentPosX - tempX))\n\t\t\t\tl += x\n\t\t\t\tcurrentPosX = tempX\n\t\t\t} else {\n\t\t\t\tl += math.Abs(float64(tempX))\n\t\t\t\tcurrentPosX += tempX\n\t\t\t}\n\n\t\t\tindex += 2\n\t\tcase \"V\", \"v\":\n\t\t\ttempY, err := strconv.Atoi(stringArray[index+1])\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Invalid SVG String\")\n\t\t\t\t// TODO: return InvalidShapeSvgStringError\n\t\t\t}\n\n\t\t\tif stringArray[index] == \"V\" {\n\t\t\t\ty := math.Abs(float64(currentPosY - tempY))\n\t\t\t\tl += y\n\t\t\t\tcurrentPosY = tempY\n\t\t\t} else {\n\t\t\t\tl += math.Abs(float64(tempY))\n\t\t\t\tcurrentPosY += tempY\n\t\t\t}\n\n\t\t\tindex += 2\n\t\tcase \"Z\", \"z\":\n\t\t\tx := math.Abs(float64(currentPosX - startPosX))\n\t\t\ty := math.Abs(float64(currentPosY - startPosY))\n\t\t\tl += math.Sqrt(x*x + y*y)\n\t\t\tcurrentPosX = startPosX\n\t\t\tcurrentPosY = startPosY\n\n\t\t\tindex++\n\t\tdefault:\n\t\t\tfmt.Println(\"unsupported svg command\")\n\t\t\t// TODO: return InvalidShapeSvgStringError\n\t\t\tindex++\n\t\t}\n\t\t// Adding a new vertex\n\t\tpoint := shared.Point2d{X: currentPosX, Y: currentPosY}\n\t\tvertexList = append(vertexList, point)\n\t}\n\tlength = int(math.Ceil(l))\n\treturn vertexList, length\n}",
"func DecodeGeometry(geom []uint32) [][][]int {\n\tpos := 0\n\tfirstpt, currentpt := []int{}, []int{}\n\tnewline := [][]int{}\n\tlines := [][][]int{}\n\tfor pos < len(geom) {\n\t\tgeomval := geom[pos]\n\n\t\tcmd, length := Get_Command_Length(geomval)\n\n\t\t// conde for a move to cmd\n\t\tif cmd == 1 {\n\t\t\txdelta := DecodeDelta(geom[pos+1])\n\t\t\tydelta := DecodeDelta(geom[pos+2])\n\t\t\tfirstpt = []int{xdelta, ydelta}\n\t\t\tcurrentpt = firstpt\n\t\t\t//fmt.Println(firstpt)\n\t\t\tpos += 2\n\n\t\t\tif pos == len(geom)-1 {\n\t\t\t\tlines = append(lines, [][]int{currentpt})\n\t\t\t}\n\t\t} else if cmd == 2 {\n\t\t\tnewline = [][]int{firstpt}\n\t\t\tcurrentpos := pos + 1\n\t\t\tendpos := currentpos + int(length*2)\n\t\t\tfor currentpos < endpos {\n\t\t\t\txdelta := DecodeDelta(geom[currentpos])\n\t\t\t\tydelta := DecodeDelta(geom[currentpos+1])\n\t\t\t\tcurrentpt = []int{currentpt[0] + xdelta, currentpt[1] + ydelta}\n\t\t\t\tnewline = append(newline, currentpt)\n\t\t\t\tcurrentpos += 2\n\t\t\t}\n\n\t\t\tpos = currentpos - 1\n\t\t\tlines = append(lines, newline)\n\n\t\t} else if cmd == 7 {\n\t\t\tnewline := lines[len(lines)-1]\n\t\t\tnewline = append(newline, newline[0])\n\t\t\tlines[len(lines)-1] = newline\n\t\t}\n\n\t\t//fmt.Println(cmd,length)\n\t\tpos += 1\n\t}\n\treturn lines\n}",
"func (r *wavefrontSceneReader) parseFace(lineTokens []string, relVertexOffset, relUvOffset, relNormalOffset int) ([]*input.Primitive, error) {\n\tif len(lineTokens) < 4 || len(lineTokens) > 5 {\n\t\treturn nil, fmt.Errorf(`unsupported syntax for \"f\"; expected 3 arguments for triangular face or 4 arguments for a quad face; got %d. Select the triangulation option in your exporter`, len(lineTokens)-1)\n\t}\n\n\tvar vertices [4]types.Vec3\n\tvar normals [4]types.Vec3\n\tvar uv [4]types.Vec2\n\tvar vOffset int\n\tvar err error\n\texpIndices := 0\n\thasNormals := false\n\tfor arg := 0; arg < len(lineTokens)-1; arg++ {\n\t\tvTokens := strings.Split(lineTokens[arg+1], \"/\")\n\n\t\t// The first arg defines the format for the following args\n\t\tif arg == 0 {\n\t\t\texpIndices = len(vTokens)\n\t\t} else if len(vTokens) != expIndices {\n\t\t\treturn nil, fmt.Errorf(\"expected each face argument to contain %d indices; arg %d contains %d indices\", expIndices, arg, len(vTokens))\n\t\t}\n\n\t\t// Faces must at least define a vertex coord\n\t\tif vTokens[0] == \"\" {\n\t\t\treturn nil, fmt.Errorf(\"face argument %d does not include a vertex index\", arg)\n\t\t}\n\n\t\tvOffset, err = selectFaceCoordIndex(vTokens[0], len(r.vertexList), relVertexOffset)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"could not parse vertex coord for face argument %d: %s\", arg, err.Error())\n\t\t}\n\t\tvertices[arg] = r.vertexList[vOffset]\n\n\t\t// Parse UV coords if specified\n\t\tif expIndices > 1 && vTokens[1] != \"\" {\n\t\t\tvOffset, err = selectFaceCoordIndex(vTokens[1], len(r.uvList), relUvOffset)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"could not parse tex coord for face argument %d: %s\", arg, err.Error())\n\t\t\t}\n\t\t\tuv[arg] = r.uvList[vOffset]\n\t\t}\n\n\t\t// Parse normal coords if specified\n\t\tif expIndices > 2 && vTokens[2] != \"\" {\n\t\t\tvOffset, err = selectFaceCoordIndex(vTokens[2], len(r.normalList), relNormalOffset)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"could not parse normal coord for face argument %d: %s\", arg, err.Error())\n\t\t\t}\n\t\t\tnormals[arg] = r.normalList[vOffset]\n\t\t\thasNormals = true\n\t\t}\n\t}\n\n\t// If no material defined select the default. Also flag the current material\n\t// as being in use so we don't prune it later.\n\tif r.curMaterial == nil {\n\t\tr.curMaterial = r.defaultMaterial()\n\t}\n\tr.curMaterial.Used = true\n\n\t// If no normals are available generate them from the vertices\n\tif !hasNormals {\n\t\te01 := vertices[1].Sub(vertices[0])\n\t\te02 := vertices[2].Sub(vertices[0])\n\t\tfaceNormal := e01.Cross(e02).Normalize()\n\t\tnormals[0] = faceNormal\n\t\tnormals[1] = faceNormal\n\t\tnormals[2] = faceNormal\n\t\tnormals[3] = faceNormal\n\t}\n\n\t// Assemble vertices into one or two primitives depending on whether we are parsing a triangular or a quad face\n\tprimitives := make([]*input.Primitive, 0)\n\tindiceList := [][3]int{{0, 1, 2}}\n\tif len(lineTokens) == 5 {\n\t\tindiceList = append(indiceList, [3]int{0, 2, 3})\n\t}\n\n\tvar triVerts [3]types.Vec3\n\tvar triNormals [3]types.Vec3\n\tvar triUVs [3]types.Vec2\n\tfor _, indices := range indiceList {\n\t\t// copy vertices for this triangle\n\t\tfor triIndex, selectIndex := range indices {\n\t\t\ttriVerts[triIndex] = vertices[selectIndex]\n\t\t\ttriNormals[triIndex] = normals[selectIndex]\n\t\t\ttriUVs[triIndex] = uv[selectIndex]\n\t\t}\n\n\t\tprim := &input.Primitive{\n\t\t\tVertices: triVerts,\n\t\t\tNormals: triNormals,\n\t\t\tUVs: triUVs,\n\t\t\tMaterialIndex: r.matNameToIndex[r.curMaterial.Name],\n\t\t}\n\t\tprim.SetBBox(\n\t\t\t[2]types.Vec3{\n\t\t\t\ttypes.MinVec3(triVerts[0], types.MinVec3(triVerts[1], triVerts[2])),\n\t\t\t\ttypes.MaxVec3(triVerts[0], types.MaxVec3(triVerts[1], triVerts[2])),\n\t\t\t},\n\t\t)\n\t\tprim.SetCenter(triVerts[0].Add(triVerts[1]).Add(triVerts[2]).Mul(1.0 / 3.0))\n\t\tprimitives = append(primitives, prim)\n\t}\n\n\treturn primitives, nil\n}",
"func MultiGeometry(children ...Element) *CompoundElement { return newCE(\"MultiGeometry\", children) }",
"func splitPolygon(a, b *node) *node {\n\ta2 := newNode(a.i, a.x, a.y)\n\tb2 := newNode(b.i, b.x, b.y)\n\tan := a.next\n\tbp := b.prev\n\n\ta.next = b\n\tb.prev = a\n\n\ta2.next = an\n\tan.prev = a2\n\n\tb2.next = a2\n\ta2.prev = b2\n\n\tbp.next = b2\n\tb2.prev = bp\n\n\treturn b2\n}",
"func Union(elements ...Shape) Shape {\n\treturn Group{space.NoTransformation(), elements}\n}",
"func Decode_Polygon(geom []uint32) [][][][]int {\n\tpos := 0\n\tcurrentpt := []int{0, 0}\n\tnewline := [][]int{}\n\tpolygons := [][][][]int{}\n\tfor pos < len(geom) {\n\t\tgeomval := geom[pos]\n\n\t\tcmd, length := Get_Command_Length(geomval)\n\n\t\t// conde for a move to cmd\n\t\tif cmd == 1 {\n\t\t\txdelta := DecodeDelta(geom[pos+1])\n\t\t\tydelta := DecodeDelta(geom[pos+2])\n\t\t\tcurrentpt = []int{currentpt[0] + xdelta, currentpt[1] + ydelta}\n\t\t\t//fmt.Println(firstpt)\n\t\t\tpos += 2\n\n\t\t} else if cmd == 2 {\n\t\t\tnewline = [][]int{currentpt}\n\t\t\tcurrentpos := pos + 1\n\t\t\tendpos := currentpos + int(length*2)\n\t\t\tfor currentpos < endpos {\n\t\t\t\txdelta := DecodeDelta(geom[currentpos])\n\t\t\t\tydelta := DecodeDelta(geom[currentpos+1])\n\t\t\t\tcurrentpt = []int{currentpt[0] + xdelta, currentpt[1] + ydelta}\n\t\t\t\tnewline = append(newline, currentpt)\n\t\t\t\tcurrentpos += 2\n\t\t\t}\n\n\t\t\tpos = currentpos - 1\n\n\t\t} else if cmd == 7 {\n\t\t\t//newline = append(newline,newline[0])\n\t\t\tif Exterior_Ring(newline) == false {\n\t\t\t\tpolygons = append(polygons, [][][]int{newline})\n\t\t\t\tnewline = [][]int{}\n\t\t\t} else {\n\t\t\t\tif len(polygons) == 0 {\n\t\t\t\t\tpolygons = append(polygons, [][][]int{newline})\n\n\t\t\t\t} else {\n\t\t\t\t\tpolygons[len(polygons)-1] = append(polygons[len(polygons)-1], newline)\n\n\t\t\t\t}\n\t\t\t\tnewline = [][]int{}\n\t\t\t}\n\n\t\t}\n\n\t\t//fmt.Println(cmd,length)\n\t\tpos += 1\n\t}\n\n\treturn polygons\n}",
"func getVertices(shapeSVGString string) (vertices []Coordinates, err error) {\n\t// https://www.w3.org/TR/SVG2/paths.html\n\t// ex. M 0 0 L 0 5\n\n\tpoints := []Coordinates{}\n\tr, err := regexp.Compile(`[MmHhVvLlZz][ \\-0-9]*`)\n\tif err != nil {\n\t\tfmt.Println(\"Error getting vertices.\", err)\n\t\treturn nil, err\n\t}\n\tres := r.FindAllString(shapeSVGString, -1)\n\n\tvar x_start, y_start, x_current, y_current float64\n\tfor i := range res {\n\t\tvar tmp int64\n\t\targs := strings.Fields(res[i])\n\t\tif args[0] == \"M\" {\n\t\t\t// Move to location given\n\t\t\ttmp, _ = strconv.ParseInt(args[1], 0, 8)\n\t\t\tx_start = float64(tmp)\n\t\t\tx_current = x_start\n\t\t\ttmp, _ = strconv.ParseInt(args[2], 0, 8)\n\t\t\ty_start = float64(tmp)\n\t\t\ty_current = y_start\n\n\t\t} else if args[0] == \"L\" {\n\t\t\t// Draw line from start pos to given pos\n\t\t\ttmp, _ = strconv.ParseInt(args[1], 0, 8)\n\t\t\tx_current = math.Abs(float64(tmp))\n\t\t\ttmp, _ = strconv.ParseInt(args[2], 0, 8)\n\t\t\ty_current = math.Abs(float64(tmp))\n\n\t\t} else if args[0] == \"l\" {\n\t\t\t// Draw line from current pos to given pos\n\t\t\ttmp, _ = strconv.ParseInt(args[1], 0, 8)\n\t\t\tx_current = math.Abs(float64(tmp) + x_current)\n\t\t\ttmp, _ = strconv.ParseInt(args[2], 0, 8)\n\t\t\ty_current = math.Abs(float64(tmp) + y_current)\n\n\t\t} else if args[0] == \"H\" {\n\t\t\t// Draw horizontal line from start pos to given pos\n\t\t\ttmp, _ := strconv.ParseInt(args[1], 0, 8)\n\n\t\t\tx_current = math.Abs(float64(tmp))\n\n\t\t} else if args[0] == \"h\" {\n\t\t\t// Draw horizontal line from current pos to given pos\n\t\t\ttmp, _ = strconv.ParseInt(args[1], 0, 8)\n\t\t\tx_current = math.Abs(float64(tmp) + x_current)\n\n\t\t} else if args[0] == \"V\" {\n\t\t\t// Draw vertical line from start pos to given pos\n\t\t\ttmp, _ = strconv.ParseInt(args[1], 0, 8)\n\n\t\t\ty_current = math.Abs(float64(tmp))\n\n\t\t} else if args[0] == \"v\" {\n\t\t\t// Draw vertical line from current pos to given pos\n\t\t\ttmp, _ = strconv.ParseInt(args[1], 0, 8)\n\t\t\ty_current = math.Abs(float64(tmp) + y_current)\n\n\t\t} else if args[0] == \"Z\" || args[0] == \"z\" {\n\t\t\t// Return to start pos\n\t\t\tx_current = x_start\n\t\t\ty_current = y_start\n\t\t}\n\n\t\t// Check that vertices are not out of bounds\n\t\tif x_current < 0 || x_current > float64(Settings.CanvasXMax) {\n\t\t\treturn nil, OutOfBoundsError{}\n\t\t}\n\t\tif y_current < 0 || y_current > float64(Settings.CanvasYMax) {\n\t\t\treturn nil, OutOfBoundsError{}\n\t\t}\n\t\tpoints = append(points, Coordinates{int(x_current), int(y_current)})\n\t}\n\treturn points, nil\n}",
"func Destructure(ctx context.Context, cmp pkgcmp.Compare, clipbox *geom.Extent, multipolygon *geom.MultiPolygon) ([]geom.Line, error) {\n\n\tsegments, err := asSegments(*multipolygon)\n\tif err != nil {\n\t\tif debug {\n\t\t\tlog.Printf(\"asSegments returned error: %v\", err)\n\t\t}\n\t\treturn nil, err\n\t}\n\tgext, err := geom.NewExtentFromGeometry(multipolygon)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Let's see if our clip box is bigger then our polygon.\n\t// if it is we don't need the clip box.\n\thasClipbox := clipbox != nil && !clipbox.Contains(gext)\n\t// Let's get the edges of our clipbox; as segments and add it to the begining.\n\tif hasClipbox {\n\t\tedges := clipbox.Edges(nil)\n\t\tsegments = append([]geom.Line{\n\t\t\tgeom.Line(edges[0]), geom.Line(edges[1]),\n\t\t\tgeom.Line(edges[2]), geom.Line(edges[3]),\n\t\t}, segments...)\n\t}\n\tipts := make(map[int][][2]float64)\n\n\t// Lets find all the places we need to split the lines on.\n\teq := intersect.NewEventQueue(segments)\n\teq.FindIntersects(ctx, true, func(src, dest int, pt [2]float64) error {\n\t\tipts[src] = append(ipts[src], pt)\n\t\tipts[dest] = append(ipts[dest], pt)\n\t\treturn nil\n\t})\n\n\t// Time to start splitting lines. if we have a clip box we can ignore the first 4 (0,1,2,3) lines.\n\n\tnsegs := make([]geom.Line, 0, len(segments))\n\n\tfor i := 0; i < len(segments); i++ {\n\t\tpts := append([][2]float64{segments[i][0], segments[i][1]}, ipts[i]...)\n\n\t\t// Normalize the direction of the points.\n\t\tsort.Sort(ByXYPoint(pts))\n\n\t\tfor j := 1; j < len(pts); j++ {\n\t\t\tif cmp.PointEqual(pts[j-1], pts[j]) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tnl := geom.Line{pts[j-1], pts[j]}\n\t\t\tif hasClipbox && !clipbox.ContainsLine(nl) {\n\t\t\t\t// Not in clipbox discard segment.\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tnsegs = append(nsegs, nl)\n\t\t}\n\t\tif ctx.Err() != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tunique(nsegs)\n\treturn nsegs, nil\n}",
"func Polygon(children ...Element) *CompoundElement { return newCE(\"Polygon\", children) }",
"func GxTourPrimitive(children ...Element) *CompoundElement { return newCE(\"gx:TourPrimitive\", children) }",
"func NewPrimitives() Primitives {\n\treturn Primitives{\n\t\t\"vtxs\": NewAttributes(Int, 0),\n\t}\n}",
"func (shape *Shape) PolyShapeSetVerts(verts []Vect, transform Transform) {\n\tC.cpPolyShapeSetVerts(\n\t\t(*C.cpShape)(unsafe.Pointer(shape)),\n\t\tC.int(len(verts)),\n\t\t(*C.cpVect)(unsafe.Pointer(&verts[0])),\n\t\ttransform.c(),\n\t)\n}",
"func DrawElementsBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, basevertex int32) {\n\tsyscall.Syscall6(gpDrawElementsBaseVertex, 5, uintptr(mode), uintptr(count), uintptr(xtype), uintptr(indices), uintptr(basevertex), 0)\n}",
"func TestBinaryOp(t *testing.T) {\n\tfor i, geomCase := range []struct {\n\t\tinput1, input2 string\n\t\tunion, inter, fwdDiff, revDiff, symDiff, relate string\n\t}{\n\t\t{\n\t\t\t/*\n\t\t\t /\\\n\t\t\t / \\\n\t\t\t / \\\n\t\t\t / \\\n\t\t\t / /\\ \\\n\t\t\t / / \\ \\\n\t\t\t / / \\ \\\n\t\t\t +---/------\\---+\n\t\t\t / \\\n\t\t\t / \\\n\t\t\t / \\\n\t\t\t +--------------+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,1 2,2 0,0 0))\",\n\t\t\tinput2: \"POLYGON((0 1,2 1,1 3,0 1))\",\n\t\t\tunion: \"POLYGON((0 0,0.5 1,0 1,1 3,2 1,1.5 1,2 0,0 0))\",\n\t\t\tinter: \"POLYGON((0.5 1,1 2,1.5 1,0.5 1))\",\n\t\t\tfwdDiff: \"POLYGON((0 0,2 0,1.5 1,0.5 1,0 0))\",\n\t\t\trevDiff: \"POLYGON((1 3,2 1,1.5 1,1 2,0.5 1,0 1,1 3))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((0 0,2 0,1.5 1,0.5 1,0 0)),((0 1,0.5 1,1 2,1.5 1,2 1,1 3,0 1)))\",\n\t\t\trelate: \"212101212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +-----------+\n\t\t\t | |\n\t\t\t | |\n\t\t\t +-----+-----+ |\n\t\t\t | | | |\n\t\t\t | | | |\n\t\t\t | +-----+-----+\n\t\t\t | |\n\t\t\t | |\n\t\t\t +-----------+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,2 0,2 2,0 2,0 0))\",\n\t\t\tinput2: \"POLYGON((1 1,3 1,3 3,1 3,1 1))\",\n\t\t\tunion: \"POLYGON((0 0,2 0,2 1,3 1,3 3,1 3,1 2,0 2,0 0))\",\n\t\t\tinter: \"POLYGON((1 1,2 1,2 2,1 2,1 1))\",\n\t\t\tfwdDiff: \"POLYGON((0 0,2 0,2 1,1 1,1 2,0 2,0 0))\",\n\t\t\trevDiff: \"POLYGON((2 1,3 1,3 3,1 3,1 2,2 2,2 1))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((0 0,2 0,2 1,1 1,1 2,0 2,0 0)),((2 1,3 1,3 3,1 3,1 2,2 2,2 1)))\",\n\t\t\trelate: \"212101212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +-----+\n\t\t\t | |\n\t\t\t | |\n\t\t\t +-----+\n\n\n\t\t\t +-----+\n\t\t\t | |\n\t\t\t | |\n\t\t\t +-----+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,1 0,1 1,0 1,0 0))\",\n\t\t\tinput2: \"POLYGON((2 2,3 2,3 3,2 3,2 2))\",\n\t\t\tunion: \"MULTIPOLYGON(((0 0,1 0,1 1,0 1,0 0)),((2 2,3 2,3 3,2 3,2 2)))\",\n\t\t\tinter: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\tfwdDiff: \"POLYGON((0 0,1 0,1 1,0 1,0 0))\",\n\t\t\trevDiff: \"POLYGON((2 2,3 2,3 3,2 3,2 2))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((0 0,1 0,1 1,0 1,0 0)),((2 2,3 2,3 3,2 3,2 2)))\",\n\t\t\trelate: \"FF2FF1212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +-----------------+\n\t\t\t | |\n\t\t\t | |\n\t\t\t | +-----+ |\n\t\t\t | | | |\n\t\t\t | | | |\n\t\t\t | +-----+ |\n\t\t\t | |\n\t\t\t | |\n\t\t\t +-----------------+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,3 0,3 3,0 3,0 0))\",\n\t\t\tinput2: \"POLYGON((1 1,2 1,2 2,1 2,1 1))\",\n\t\t\tunion: \"POLYGON((0 0,3 0,3 3,0 3,0 0))\",\n\t\t\tinter: \"POLYGON((1 1,2 1,2 2,1 2,1 1))\",\n\t\t\tfwdDiff: \"POLYGON((0 0,3 0,3 3,0 3,0 0),(1 1,2 1,2 2,1 2,1 1))\",\n\t\t\trevDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\tsymDiff: \"POLYGON((0 0,0 3,3 3,3 0,0 0),(1 1,2 1,2 2,1 2,1 1))\",\n\t\t\trelate: \"212FF1FF2\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +-----+\n\t\t\t | A |\n\t\t\t | |\n\t\t\t +-----+\n\n\n\t\t\t +-----------+\n\t\t\t | A |\n\t\t\t | |\n\t\t\t | +-----+-----+\n\t\t\t | | A&B | |\n\t\t\t | | | |\n\t\t\t +-----+-----+ | +-----+\n\t\t\t | | | B |\n\t\t\t | B | | |\n\t\t\t o +-----------+ +-----+\n\t\t\t*/\n\t\t\tinput1: \"MULTIPOLYGON(((0 4,0 5,1 5,1 4,0 4)),((0 1,0 3,2 3,2 1,0 1)))\",\n\t\t\tinput2: \"MULTIPOLYGON(((4 0,4 1,5 1,5 0,4 0)),((1 0,1 2,3 2,3 0,1 0)))\",\n\t\t\tunion: \"MULTIPOLYGON(((0 4,0 5,1 5,1 4,0 4)),((0 1,0 3,2 3,2 2,3 2,3 0,1 0,1 1,0 1)),((4 0,4 1,5 1,5 0,4 0)))\",\n\t\t\tinter: \"POLYGON((2 2,2 1,1 1,1 2,2 2))\",\n\t\t\tfwdDiff: \"MULTIPOLYGON(((0 4,0 5,1 5,1 4,0 4)),((0 1,0 3,2 3,2 2,1 2,1 1,0 1)))\",\n\t\t\trevDiff: \"MULTIPOLYGON(((4 0,4 1,5 1,5 0,4 0)),((1 0,1 1,2 1,2 2,3 2,3 0,1 0)))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((0 4,0 5,1 5,1 4,0 4)),((0 1,0 3,2 3,2 2,1 2,1 1,0 1)),((1 1,2 1,2 2,3 2,3 0,1 0,1 1)),((4 0,4 1,5 1,5 0,4 0)))\",\n\t\t\trelate: \"212101212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\n\t\t\t Two interlocking rings:\n\n\t\t\t +-------------------+\n\t\t\t | |\n\t\t\t | +-----------+ |\n\t\t\t | | | |\n\t\t\t | | +-------+---+-------+\n\t\t\t | | | | | |\n\t\t\t | | | +---+---+---+ |\n\t\t\t | | | | | | | |\n\t\t\t | +---+---+---+ | | |\n\t\t\t | | | | | |\n\t\t\t +-------+---+-------+ | |\n\t\t\t | | | |\n\t\t\t | +-----------+ |\n\t\t\t | |\n\t\t\t +-------------------+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 2,5 2,5 7,0 7,0 2),(1 3,4 3,4 6,1 6,1 3))\",\n\t\t\tinput2: \"POLYGON((2 0,7 0,7 5,2 5,2 0),(3 1,6 1,6 4,3 4,3 1))\",\n\t\t\tunion: \"POLYGON((2 2,0 2,0 7,5 7,5 5,7 5,7 0,2 0,2 2),(5 4,5 2,3 2,3 1,6 1,6 4,5 4),(1 3,2 3,2 5,4 5,4 6,1 6,1 3),(3 3,4 3,4 4,3 4,3 3))\",\n\t\t\tinter: \"MULTIPOLYGON(((3 2,2 2,2 3,3 3,3 2)),((5 5,5 4,4 4,4 5,5 5)))\",\n\t\t\tfwdDiff: \"MULTIPOLYGON(((2 2,0 2,0 7,5 7,5 5,4 5,4 6,1 6,1 3,2 3,2 2)),((5 4,5 2,3 2,3 3,4 3,4 4,5 4)))\",\n\t\t\trevDiff: \"MULTIPOLYGON(((5 5,7 5,7 0,2 0,2 2,3 2,3 1,6 1,6 4,5 4,5 5)),((2 3,2 5,4 5,4 4,3 4,3 3,2 3)))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((5 5,7 5,7 0,2 0,2 2,3 2,3 1,6 1,6 4,5 4,5 5)),((5 5,4 5,4 6,1 6,1 3,2 3,2 2,0 2,0 7,5 7,5 5)),((2 3,2 5,4 5,4 4,3 4,3 3,2 3)),((4 4,5 4,5 2,3 2,3 3,4 3,4 4)))\",\n\t\t\trelate: \"212101212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\n\t\t\t /\\ /\\\n\t\t\t / \\ / \\\n\t\t\t / A \\ / A \\\n\t\t\t / \\/ \\\n\t\t\t \\ /\\ /\\ /\\ /\n\t\t\t \\/AB\\/ \\/AB\\/\n\t\t\t /\\ /\\ /\\ /\\\n\t\t\t / \\/ \\/ \\/ \\\n\t\t\t \\ /\\ /\n\t\t\t \\ B / \\ B /\n\t\t\t \\ / \\ /\n\t\t\t \\/ \\/\n\n\t\t\t*/\n\t\t\tinput1: \"MULTIPOLYGON(((0 2,1 1,2 2,1 3,0 2)),((2 2,3 1,4 2,3 3,2 2)))\",\n\t\t\tinput2: \"MULTIPOLYGON(((0 1,1 2,2 1,1 0,0 1)),((2 1,3 0,4 1,3 2,2 1)))\",\n\t\t\tunion: \"MULTIPOLYGON(((0.5 1.5,0 2,1 3,2 2,1.5 1.5,2 1,1 0,0 1,0.5 1.5)),((2.5 1.5,2 2,3 3,4 2,3.5 1.5,4 1,3 0,2 1,2.5 1.5)))\",\n\t\t\tinter: \"MULTIPOLYGON(((1.5 1.5,1 1,0.5 1.5,1 2,1.5 1.5)),((3.5 1.5,3 1,2.5 1.5,3 2,3.5 1.5)))\",\n\t\t\tfwdDiff: \"MULTIPOLYGON(((0.5 1.5,0 2,1 3,2 2,1.5 1.5,1 2,0.5 1.5)),((2.5 1.5,2 2,3 3,4 2,3.5 1.5,3 2,2.5 1.5)))\",\n\t\t\trevDiff: \"MULTIPOLYGON(((1 0,0 1,0.5 1.5,1 1,1.5 1.5,2 1,1 0)),((3.5 1.5,4 1,3 0,2 1,2.5 1.5,3 1,3.5 1.5)))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((1 0,0 1,0.5 1.5,1 1,1.5 1.5,2 1,1 0)),((1.5 1.5,1 2,0.5 1.5,0 2,1 3,2 2,1.5 1.5)),((3.5 1.5,4 1,3 0,2 1,2.5 1.5,3 1,3.5 1.5)),((3.5 1.5,3 2,2.5 1.5,2 2,3 3,4 2,3.5 1.5)))\",\n\t\t\trelate: \"212101212\",\n\t\t},\n\n\t\t{\n\t\t\t/*\n\t\t\t +-----+-----+\n\t\t\t | B | A |\n\t\t\t | | |\n\t\t\t +-----+-----+\n\t\t\t | A | B |\n\t\t\t | | |\n\t\t\t +-----+-----+\n\t\t\t*/\n\t\t\tinput1: \"MULTIPOLYGON(((0 0,0 1,1 1,1 0,0 0)),((1 1,1 2,2 2,2 1,1 1)))\",\n\t\t\tinput2: \"MULTIPOLYGON(((0 1,0 2,1 2,1 1,0 1)),((1 0,1 1,2 1,2 0,1 0)))\",\n\t\t\tunion: \"POLYGON((0 0,0 1,0 2,1 2,2 2,2 1,2 0,1 0,0 0))\",\n\t\t\tinter: \"MULTILINESTRING((0 1,1 1),(1 1,1 0),(1 1,1 2),(2 1,1 1))\",\n\t\t\tfwdDiff: \"MULTIPOLYGON(((0 0,0 1,1 1,1 0,0 0)),((1 1,1 2,2 2,2 1,1 1)))\",\n\t\t\trevDiff: \"MULTIPOLYGON(((0 1,0 2,1 2,1 1,0 1)),((1 0,1 1,2 1,2 0,1 0)))\",\n\t\t\tsymDiff: \"POLYGON((0 0,0 1,0 2,1 2,2 2,2 1,2 0,1 0,0 0))\",\n\t\t\trelate: \"FF2F11212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +-----+-----+\n\t\t\t | A | B |\n\t\t\t | | |\n\t\t\t +-----+-----+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,0 1,1 1,1 0,0 0))\",\n\t\t\tinput2: \"POLYGON((1 0,1 1,2 1,2 0,1 0))\",\n\t\t\tunion: \"POLYGON((0 0,0 1,1 1,2 1,2 0,1 0,0 0))\",\n\t\t\tinter: \"LINESTRING(1 1,1 0)\",\n\t\t\tfwdDiff: \"POLYGON((0 0,0 1,1 1,1 0,0 0))\",\n\t\t\trevDiff: \"POLYGON((1 0,1 1,2 1,2 0,1 0))\",\n\t\t\tsymDiff: \"POLYGON((1 1,2 1,2 0,1 0,0 0,0 1,1 1))\",\n\t\t\trelate: \"FF2F11212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +-------+\n\t\t\t | A |\n\t\t\t | +-------+\n\t\t\t | | B |\n\t\t\t +-------+ |\n\t\t\t | |\n\t\t\t +-------+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0.5,0 1.5,1 1.5,1 0.5,0 0.5))\",\n\t\t\tinput2: \"POLYGON((1 0,1 1,2 1,2 0,1 0))\",\n\t\t\tunion: \"POLYGON((0 0.5,0 1.5,1 1.5,1 1,2 1,2 0,1 0,1 0.5,0 0.5))\",\n\t\t\tinter: \"LINESTRING(1 1,1 0.5)\",\n\t\t\tfwdDiff: \"POLYGON((0 0.5,0 1.5,1 1.5,1 1,1 0.5,0 0.5))\",\n\t\t\trevDiff: \"POLYGON((1 0,1 0.5,1 1,2 1,2 0,1 0))\",\n\t\t\tsymDiff: \"POLYGON((1 0,1 0.5,0 0.5,0 1.5,1 1.5,1 1,2 1,2 0,1 0))\",\n\t\t\trelate: \"FF2F11212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +-----+\n\t\t\t | A&B |\n\t\t\t | |\n\t\t\t +-----+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,0 1,1 1,1 0,0 0))\",\n\t\t\tinput2: \"POLYGON((0 0,0 1,1 1,1 0,0 0))\",\n\t\t\tunion: \"POLYGON((0 0,0 1,1 1,1 0,0 0))\",\n\t\t\tinter: \"POLYGON((0 0,0 1,1 1,1 0,0 0))\",\n\t\t\tfwdDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\trevDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\tsymDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\trelate: \"2FFF1FFF2\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t *-------*\n\t\t\t |\\ A&B /|\n\t\t\t | \\ / |\n\t\t\t | \\ / |\n\t\t\t * * *\n\t\t\t | A | B |\n\t\t\t | | |\n\t\t\t *---*---*\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,0 2,2 2,1 1,1 0,0 0))\",\n\t\t\tinput2: \"POLYGON((1 0,1 1,0 2,2 2,2 0,1 0))\",\n\t\t\tunion: \"POLYGON((0 0,0 2,2 2,2 0,1 0,0 0))\",\n\t\t\tinter: \"GEOMETRYCOLLECTION(LINESTRING(1 1,1 0),POLYGON((0 2,2 2,1 1,0 2)))\",\n\t\t\tfwdDiff: \"POLYGON((0 0,0 2,1 1,1 0,0 0))\",\n\t\t\trevDiff: \"POLYGON((1 0,1 1,2 2,2 0,1 0))\",\n\t\t\tsymDiff: \"POLYGON((0 2,1 1,2 2,2 0,1 0,0 0,0 2))\",\n\t\t\trelate: \"212111212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +---+\n\t\t\t | A |\n\t\t\t +---+---+\n\t\t\t | B |\n\t\t\t +---+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 1,1 1,1 2,0 2,0 1))\",\n\t\t\tinput2: \"POLYGON((1 0,2 0,2 1,1 1,1 0))\",\n\t\t\tunion: \"MULTIPOLYGON(((1 1,0 1,0 2,1 2,1 1)),((1 1,2 1,2 0,1 0,1 1)))\",\n\t\t\tinter: \"POINT(1 1)\",\n\t\t\tfwdDiff: \"POLYGON((1 1,0 1,0 2,1 2,1 1))\",\n\t\t\trevDiff: \"POLYGON((1 1,2 1,2 0,1 0,1 1))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((1 1,2 1,2 0,1 0,1 1)),((1 1,0 1,0 2,1 2,1 1)))\",\n\t\t\trelate: \"FF2F01212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +-----+-----+\n\t\t\t | / \\ |\n\t\t\t | +-+-+ |\n\t\t\t | A | B |\n\t\t\t +-----+-----+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,2 0,2 1,1 1,2 2,0 2,0 0))\",\n\t\t\tinput2: \"POLYGON((2 0,4 0,4 2,2 2,3 1,2 1,2 0))\",\n\t\t\tunion: \"POLYGON((2 0,0 0,0 2,2 2,4 2,4 0,2 0),(2 2,1 1,2 1,3 1,2 2))\",\n\t\t\tinter: \"GEOMETRYCOLLECTION(POINT(2 2),LINESTRING(2 0,2 1))\",\n\t\t\tfwdDiff: \"POLYGON((2 0,0 0,0 2,2 2,1 1,2 1,2 0))\",\n\t\t\trevDiff: \"POLYGON((2 2,4 2,4 0,2 0,2 1,3 1,2 2))\",\n\t\t\tsymDiff: \"POLYGON((2 2,4 2,4 0,2 0,0 0,0 2,2 2),(2 2,1 1,2 1,3 1,2 2))\",\n\t\t\trelate: \"FF2F11212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +---+\n\t\t\t | A |\n\t\t\t +---+---+\n\t\t\t | B |\n\t\t\t +---+ +---+\n\t\t\t |A&B|\n\t\t\t +---+\n\t\t\t*/\n\t\t\tinput1: \"MULTIPOLYGON(((1 1,1 0,0 0,0 1,1 1)),((1 2,2 2,2 3,1 3,1 2)))\",\n\t\t\tinput2: \"MULTIPOLYGON(((1 1,1 0,0 0,0 1,1 1)),((2 1,3 1,3 2,2 2,2 1)))\",\n\t\t\tunion: \"MULTIPOLYGON(((0 0,0 1,1 1,1 0,0 0)),((2 2,1 2,1 3,2 3,2 2)),((2 2,3 2,3 1,2 1,2 2)))\",\n\t\t\tinter: \"GEOMETRYCOLLECTION(POINT(2 2),POLYGON((0 0,0 1,1 1,1 0,0 0)))\",\n\t\t\tfwdDiff: \"POLYGON((2 2,1 2,1 3,2 3,2 2))\",\n\t\t\trevDiff: \"POLYGON((2 2,3 2,3 1,2 1,2 2))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((2 2,3 2,3 1,2 1,2 2)),((2 2,1 2,1 3,2 3,2 2)))\",\n\t\t\trelate: \"2F2F11212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +-------+\n\t\t\t | |\n\t\t\t +---+---+ |\n\t\t\t | | | |\n\t\t\t | +---+ |\n\t\t\t | A | |\n\t\t\t | +---+ |\n\t\t\t | | | |\n\t\t\t +---+---+ |\n\t\t\t |A&B| B |\n\t\t\t +---+-------+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,1 0,1 4,0 4,0 0))\",\n\t\t\tinput2: \"POLYGON((0 0,3 0,3 5,1 5,1 4,2 4,2 3,1 3,1 2,2 2,2 1,0 1,0 0))\",\n\t\t\tunion: \"POLYGON((1 0,0 0,0 1,0 4,1 4,1 5,3 5,3 0,1 0),(1 4,1 3,2 3,2 4,1 4),(1 2,1 1,2 1,2 2,1 2))\",\n\t\t\tinter: \"GEOMETRYCOLLECTION(POINT(1 4),LINESTRING(1 2,1 3),POLYGON((1 0,0 0,0 1,1 1,1 0)))\",\n\t\t\tfwdDiff: \"POLYGON((1 2,1 1,0 1,0 4,1 4,1 3,1 2))\",\n\t\t\trevDiff: \"POLYGON((1 4,1 5,3 5,3 0,1 0,1 1,2 1,2 2,1 2,1 3,2 3,2 4,1 4))\",\n\t\t\tsymDiff: \"POLYGON((1 4,1 5,3 5,3 0,1 0,1 1,0 1,0 4,1 4),(1 1,2 1,2 2,1 2,1 1),(1 4,1 3,2 3,2 4,1 4))\",\n\t\t\trelate: \"212111212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +-------+-------+\n\t\t\t | A | B |\n\t\t\t | +---+---+ |\n\t\t\t | | | |\n\t\t\t | +---+---+ |\n\t\t\t | | |\n\t\t\t +-------+-------+\n\t\t\t*/\n\n\t\t\tinput1: \"POLYGON((0 0,2 0,2 1,1 1,1 2,2 2,2 3,0 3,0 0))\",\n\t\t\tinput2: \"POLYGON((2 0,4 0,4 3,2 3,2 2,3 2,3 1,2 1,2 0))\",\n\t\t\tunion: \"POLYGON((2 0,0 0,0 3,2 3,4 3,4 0,2 0),(2 2,1 2,1 1,2 1,3 1,3 2,2 2))\",\n\t\t\tinter: \"MULTILINESTRING((2 0,2 1),(2 2,2 3))\",\n\t\t\tfwdDiff: \"POLYGON((2 0,0 0,0 3,2 3,2 2,1 2,1 1,2 1,2 0))\",\n\t\t\trevDiff: \"POLYGON((2 3,4 3,4 0,2 0,2 1,3 1,3 2,2 2,2 3))\",\n\t\t\tsymDiff: \"POLYGON((2 3,4 3,4 0,2 0,0 0,0 3,2 3),(2 1,3 1,3 2,2 2,1 2,1 1,2 1))\",\n\t\t\trelate: \"FF2F11212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t *-------------+\n\t\t\t |\\`. B |\n\t\t\t | \\ `. |\n\t\t\t | \\ `. |\n\t\t\t | \\ `* |\n\t\t\t | * \\ |\n\t\t\t | `. \\ |\n\t\t\t | `. \\ |\n\t\t\t | A `. \\|\n\t\t\t +-----------`-*\n\t\t\t*/\n\n\t\t\tinput1: \"POLYGON((0 0,3 0,1 1,0 3,0 0))\",\n\t\t\tinput2: \"POLYGON((3 0,3 3,0 3,2 2,3 0))\",\n\t\t\tunion: \"MULTIPOLYGON(((3 0,0 0,0 3,1 1,3 0)),((0 3,3 3,3 0,2 2,0 3)))\",\n\t\t\tinter: \"MULTIPOINT(0 3,3 0)\",\n\t\t\tfwdDiff: \"POLYGON((3 0,0 0,0 3,1 1,3 0))\",\n\t\t\trevDiff: \"POLYGON((0 3,3 3,3 0,2 2,0 3))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((0 3,3 3,3 0,2 2,0 3)),((3 0,0 0,0 3,1 1,3 0)))\",\n\t\t\trelate: \"FF2F01212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +\n\t\t\t |A\n\t\t\t | B\n\t\t\t +----+\n\t\t\t*/\n\t\t\tinput1: \"LINESTRING(0 0,0 1)\",\n\t\t\tinput2: \"LINESTRING(0 0,1 0)\",\n\t\t\tunion: \"MULTILINESTRING((0 0,0 1),(0 0,1 0))\",\n\t\t\tinter: \"POINT(0 0)\",\n\t\t\tfwdDiff: \"LINESTRING(0 0,0 1)\",\n\t\t\trevDiff: \"LINESTRING(0 0,1 0)\",\n\t\t\tsymDiff: \"MULTILINESTRING((0 0,1 0),(0 0,0 1))\",\n\t\t\trelate: \"FF1F00102\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t + +\n\t\t\t | |\n\t\t\t A B\n\t\t\t | |\n\t\t\t +--A&B--+\n\t\t\t*/\n\t\t\tinput1: \"LINESTRING(0 1,0 0,1 0)\",\n\t\t\tinput2: \"LINESTRING(0 0,1 0,1 1)\",\n\t\t\tunion: \"MULTILINESTRING((0 1,0 0),(0 0,1 0),(1 0,1 1))\",\n\t\t\tinter: \"LINESTRING(0 0,1 0)\",\n\t\t\tfwdDiff: \"LINESTRING(0 1,0 0)\",\n\t\t\trevDiff: \"LINESTRING(1 0,1 1)\",\n\t\t\tsymDiff: \"MULTILINESTRING((1 0,1 1),(0 1,0 0))\",\n\t\t\trelate: \"1010F0102\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t \\ /\n\t\t\t \\ /\n\t\t\t B A\n\t\t\t \\/\n\t\t\t /\\\n\t\t\t A B\n\t\t\t / \\\n\t\t\t / \\\n\t\t\t*/\n\t\t\tinput1: \"LINESTRING(0 0,1 1)\",\n\t\t\tinput2: \"LINESTRING(0 1,1 0)\",\n\t\t\tunion: \"MULTILINESTRING((0 0,0.5 0.5),(0.5 0.5,1 1),(0 1,0.5 0.5),(0.5 0.5,1 0))\",\n\t\t\tinter: \"POINT(0.5 0.5)\",\n\t\t\tfwdDiff: \"MULTILINESTRING((0 0,0.5 0.5),(0.5 0.5,1 1))\",\n\t\t\trevDiff: \"MULTILINESTRING((0 1,0.5 0.5),(0.5 0.5,1 0))\",\n\t\t\tsymDiff: \"MULTILINESTRING((0 1,0.5 0.5),(0.5 0.5,1 0),(0 0,0.5 0.5),(0.5 0.5,1 1))\",\n\t\t\trelate: \"0F1FF0102\",\n\t\t},\n\t\t{\n\t\t\t// +---A---+\n\t\t\t// | |\n\t\t\t// B B\n\t\t\t// | |\n\t\t\t// +---A---+\n\t\t\t//\n\t\t\tinput1: \"MULTILINESTRING((0 0,1 0),(0 1,1 1))\",\n\t\t\tinput2: \"MULTILINESTRING((0 0,0 1),(1 0,1 1))\",\n\t\t\tunion: \"MULTILINESTRING((0 0,1 0),(0 1,1 1),(0 0,0 1),(1 0,1 1))\",\n\t\t\tinter: \"MULTIPOINT(0 0,0 1,1 0,1 1)\",\n\t\t\tfwdDiff: \"MULTILINESTRING((0 0,1 0),(0 1,1 1))\",\n\t\t\trevDiff: \"MULTILINESTRING((0 0,0 1),(1 0,1 1))\",\n\t\t\tsymDiff: \"MULTILINESTRING((0 0,0 1),(1 0,1 1),(0 0,1 0),(0 1,1 1))\",\n\t\t\trelate: \"FF1F0F1F2\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +--A&B--+---A---+\n\t\t\t | | |\n\t\t\t A&B B A\n\t\t\t | | |\n\t\t\t +---A---+---A---+\n\t\t\t | |\n\t\t\t B B\n\t\t\t | |\n\t\t\t +---B---+\n\t\t\t*/\n\t\t\tinput1: \"LINESTRING(0 2,2 2,2 1,0 1,0 2)\",\n\t\t\tinput2: \"LINESTRING(1 2,1 0,0 0,0 2,1 2)\",\n\t\t\tunion: \"MULTILINESTRING((0 2,1 2),(1 2,2 2,2 1,1 1),(1 1,0 1),(0 1,0 2),(1 2,1 1),(1 1,1 0,0 0,0 1))\",\n\t\t\tinter: \"GEOMETRYCOLLECTION(POINT(1 1),LINESTRING(0 2,1 2),LINESTRING(0 1,0 2))\",\n\t\t\tfwdDiff: \"MULTILINESTRING((1 2,2 2,2 1,1 1),(1 1,0 1))\",\n\t\t\trevDiff: \"MULTILINESTRING((1 2,1 1),(1 1,1 0,0 0,0 1))\",\n\t\t\tsymDiff: \"MULTILINESTRING((1 2,2 2,2 1,1 1),(1 1,0 1),(1 2,1 1),(1 1,1 0,0 0,0 1))\",\n\t\t\trelate: \"1F1FFF1F2\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +---------+\n\t\t\t `, ,` `,\n\t\t\t `, ,` `,\n\t\t\t ,`, ,`\n\t\t\t ,` `, ,`\n\t\t\t +` `+`\n\n\t\t\t*/\n\t\t\tinput1: \"LINESTRING(0 0,2 2,0 2,2 0)\",\n\t\t\tinput2: \"LINESTRING(2 0,3 1,2 2)\",\n\t\t\tunion: \"MULTILINESTRING((0 0,1 1),(1 1,2 2),(2 2,0 2,1 1),(1 1,2 0),(2 0,3 1,2 2))\",\n\t\t\tinter: \"MULTIPOINT(2 0,2 2)\",\n\t\t\tfwdDiff: \"MULTILINESTRING((0 0,1 1),(1 1,2 2),(2 2,0 2,1 1),(1 1,2 0))\",\n\t\t\trevDiff: \"LINESTRING(2 0,3 1,2 2)\",\n\t\t\tsymDiff: \"MULTILINESTRING((0 0,1 1),(1 1,2 2),(2 2,0 2,1 1),(1 1,2 0),(2 0,3 1,2 2))\",\n\t\t\trelate: \"F01F001F2\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +\n\t\t\t |\n\t\t\t +---+---+\n\t\t\t | | |\n\t\t\t | + |\n\t\t\t | |\n\t\t\t +-------+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,0 2,2 2,2 0,0 0))\",\n\t\t\tinput2: \"LINESTRING(1 1,1 3)\",\n\t\t\tunion: \"GEOMETRYCOLLECTION(LINESTRING(1 2,1 3),POLYGON((0 0,0 2,1 2,2 2,2 0,0 0)))\",\n\t\t\tinter: \"LINESTRING(1 1,1 2)\",\n\t\t\tfwdDiff: \"POLYGON((0 0,0 2,1 2,2 2,2 0,0 0))\",\n\t\t\trevDiff: \"LINESTRING(1 2,1 3)\",\n\t\t\tsymDiff: \"GEOMETRYCOLLECTION(LINESTRING(1 2,1 3),POLYGON((0 0,0 2,1 2,2 2,2 0,0 0)))\",\n\t\t\trelate: \"1020F1102\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +--------+\n\t\t\t | , |\n\t\t\t | ,` |\n\t\t\t | ` |\n\t\t\t +--------+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,0 3,3 3,3 0,0 0))\",\n\t\t\tinput2: \"LINESTRING(1 1,2 2)\",\n\t\t\tunion: \"POLYGON((0 0,0 3,3 3,3 0,0 0))\",\n\t\t\tinter: \"LINESTRING(1 1,2 2)\",\n\t\t\tfwdDiff: \"POLYGON((0 0,0 3,3 3,3 0,0 0))\",\n\t\t\trevDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\tsymDiff: \"POLYGON((0 0,0 3,3 3,3 0,0 0))\",\n\t\t\trelate: \"102FF1FF2\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +---+---+---+\n\t\t\t | A |A&B|\n\t\t\t +---+---+---+\n\t\t\t |A&B| B |\n\t\t\t +---+---+---+\n\t\t\t | A |A&B|\n\t\t\t +---+---+---+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,3 0,3 1,1 1,1 2,3 2,3 3,0 3,0 0))\",\n\t\t\tinput2: \"POLYGON((0 1,0 2,2 2,2 3,3 3,3 0,2 0,2 1,0 1))\",\n\t\t\tunion: \"POLYGON((2 0,0 0,0 1,0 2,0 3,2 3,3 3,3 2,3 1,3 0,2 0))\",\n\t\t\tinter: \"GEOMETRYCOLLECTION(LINESTRING(2 1,1 1),LINESTRING(1 2,2 2),POLYGON((3 0,2 0,2 1,3 1,3 0)),POLYGON((1 2,1 1,0 1,0 2,1 2)),POLYGON((3 2,2 2,2 3,3 3,3 2)))\",\n\t\t\tfwdDiff: \"MULTIPOLYGON(((2 0,0 0,0 1,1 1,2 1,2 0)),((2 2,1 2,0 2,0 3,2 3,2 2)))\",\n\t\t\trevDiff: \"POLYGON((1 2,2 2,3 2,3 1,2 1,1 1,1 2))\",\n\t\t\tsymDiff: \"POLYGON((1 2,0 2,0 3,2 3,2 2,3 2,3 1,2 1,2 0,0 0,0 1,1 1,1 2))\",\n\t\t\trelate: \"212111212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t + + +\n\t\t\t A A&B B\n\t\t\t*/\n\t\t\tinput1: \"MULTIPOINT(0 0,1 1)\",\n\t\t\tinput2: \"MULTIPOINT(1 1,2 2)\",\n\t\t\tunion: \"MULTIPOINT(0 0,1 1,2 2)\",\n\t\t\tinter: \"POINT(1 1)\",\n\t\t\tfwdDiff: \"POINT(0 0)\",\n\t\t\trevDiff: \"POINT(2 2)\",\n\t\t\tsymDiff: \"MULTIPOINT(0 0,2 2)\",\n\t\t\trelate: \"0F0FFF0F2\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +-------+\n\t\t\t | |\n\t\t\t | + | +\n\t\t\t | |\n\t\t\t +-------+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,0 2,2 2,2 0,0 0))\",\n\t\t\tinput2: \"MULTIPOINT(1 1,3 1)\",\n\t\t\tunion: \"GEOMETRYCOLLECTION(POINT(3 1),POLYGON((0 0,0 2,2 2,2 1,2 0,0 0)))\",\n\t\t\tinter: \"POINT(1 1)\",\n\t\t\tfwdDiff: \"POLYGON((0 0,0 2,2 2,2 1,2 0,0 0))\",\n\t\t\trevDiff: \"POINT(3 1)\",\n\t\t\tsymDiff: \"GEOMETRYCOLLECTION(POINT(3 1),POLYGON((0 0,0 2,2 2,2 1,2 0,0 0)))\",\n\t\t\trelate: \"0F2FF10F2\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +\n\t\t\t |\\\n\t\t\t | \\\n\t\t\t | \\\n\t\t\t | \\\n\t\t\t | \\\n\t\t\t O-----+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,0 1,1 0,0 0))\",\n\t\t\tinput2: \"POINT(0 0)\",\n\t\t\tunion: \"POLYGON((0 0,0 1,1 0,0 0))\",\n\t\t\tinter: \"POINT(0 0)\",\n\t\t\tfwdDiff: \"POLYGON((0 0,0 1,1 0,0 0))\",\n\t\t\trevDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\tsymDiff: \"POLYGON((0 0,0 1,1 0,0 0))\",\n\t\t\trelate: \"FF20F1FF2\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +\n\t\t\t |\\\n\t\t\t | \\\n\t\t\t | O\n\t\t\t | \\\n\t\t\t | \\\n\t\t\t +-----+\n\t\t\t*/\n\t\t\tinput1: \"POLYGON((0 0,0 1,1 0,0 0))\",\n\t\t\tinput2: \"POINT(0.5 0.5)\",\n\t\t\tunion: \"POLYGON((0 0,0 1,0.5 0.5,1 0,0 0))\",\n\t\t\tinter: \"POINT(0.5 0.5)\",\n\t\t\tfwdDiff: \"POLYGON((0 0,0 1,0.5 0.5,1 0,0 0))\",\n\t\t\trevDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\tsymDiff: \"POLYGON((0 0,0 1,0.5 0.5,1 0,0 0))\",\n\t\t\trelate: \"FF20F1FF2\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +-------+\n\t\t\t | |\n\t\t\t | + |\n\t\t\t | |\n\t\t\t +-------+\n\t\t\t*/\n\t\t\tinput1: \"LINESTRING(0 0,0 1,1 1,1 0,0 0,0 1)\", // overlapping line segment\n\t\t\tinput2: \"POINT(0.5 0.5)\",\n\t\t\tunion: \"GEOMETRYCOLLECTION(LINESTRING(0 0,0 1),LINESTRING(0 1,1 1,1 0,0 0),POINT(0.5 0.5))\",\n\t\t\tinter: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\tfwdDiff: \"MULTILINESTRING((0 0,0 1),(0 1,1 1,1 0,0 0))\",\n\t\t\trevDiff: \"POINT(0.5 0.5)\",\n\t\t\tsymDiff: \"GEOMETRYCOLLECTION(LINESTRING(0 0,0 1),LINESTRING(0 1,1 1,1 0,0 0),POINT(0.5 0.5))\",\n\t\t\trelate: \"FF1FF00F2\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t +\n\t\t\t /\n\t\t\t *\n\t\t\t /\n\t\t\t +\n\t\t\t*/\n\t\t\tinput1: \"LINESTRING(0 0,1 1)\",\n\t\t\tinput2: \"POINT(0.35355339059327373 0.35355339059327373)\",\n\t\t\tunion: \"MULTILINESTRING((0 0,0.35355339059327373 0.35355339059327373),(0.35355339059327373 0.35355339059327373,1 1))\",\n\t\t\tinter: \"POINT(0.35355339059327373 0.35355339059327373)\",\n\t\t\tfwdDiff: \"MULTILINESTRING((0 0,0.35355339059327373 0.35355339059327373),(0.35355339059327373 0.35355339059327373,1 1))\",\n\t\t\trevDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\tsymDiff: \"MULTILINESTRING((0 0,0.35355339059327373 0.35355339059327373),(0.35355339059327373 0.35355339059327373,1 1))\",\n\t\t\trelate: \"0F1FF0FF2\",\n\t\t},\n\t\t{\n\t\t\t// LineString with a Point in the middle of it.\n\t\t\tinput1: \"POINT(5 5)\",\n\t\t\tinput2: \"LINESTRING(1 2,9 8)\",\n\t\t\tunion: \"MULTILINESTRING((1 2,5 5),(5 5,9 8))\",\n\t\t\tinter: \"POINT(5 5)\",\n\t\t\tfwdDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\trevDiff: \"MULTILINESTRING((1 2,5 5),(5 5,9 8))\",\n\t\t\tsymDiff: \"MULTILINESTRING((1 2,5 5),(5 5,9 8))\",\n\t\t\trelate: \"0FFFFF102\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t *\n\t\t\t + /\n\t\t\t \\/\n\t\t\t /\\\n\t\t\t * *\n\t\t\t*/\n\n\t\t\t// Tests a case where intersection between two segments is *not* commutative if done naively.\n\t\t\tinput1: \"LINESTRING(0 0,1 2)\",\n\t\t\tinput2: \"LINESTRING(0 1,1 0)\",\n\t\t\tunion: \"MULTILINESTRING((0 0,0.3333333333 0.6666666667),(0.3333333333 0.6666666667,1 2),(0 1,0.3333333333 0.6666666667),(0.3333333333 0.6666666667,1 0))\",\n\t\t\tinter: \"POINT(0.3333333333 0.6666666667)\",\n\t\t\tfwdDiff: \"MULTILINESTRING((0 0,0.3333333333 0.6666666667),(0.3333333333 0.6666666667,1 2))\",\n\t\t\trevDiff: \"MULTILINESTRING((0 1,0.3333333333 0.6666666667),(0.3333333333 0.6666666667,1 0))\",\n\t\t\tsymDiff: \"MULTILINESTRING((0 1,0.3333333333 0.6666666667),(0.3333333333 0.6666666667,1 0),(0 0,0.3333333333 0.6666666667),(0.3333333333 0.6666666667,1 2))\",\n\t\t\trelate: \"0F1FF0102\",\n\t\t},\n\t\t{\n\t\t\t// Similar case for when line segment non-commutative operations are\n\t\t\t// done, but this time with a line segment doubling back on itself.\n\t\t\tinput1: \"LINESTRING(0 0,1 2,0 0)\",\n\t\t\tinput2: \"LINESTRING(0 1,1 0)\",\n\t\t\tunion: \"MULTILINESTRING((0 0,0.3333333333 0.6666666667),(0.3333333333 0.6666666667,1 2),(0 1,0.3333333333 0.6666666667),(0.3333333333 0.6666666667,1 0))\",\n\t\t\tinter: \"POINT(0.3333333333 0.6666666667)\",\n\t\t\tfwdDiff: \"MULTILINESTRING((0 0,0.3333333333 0.6666666667),(0.3333333333 0.6666666667,1 2))\",\n\t\t\trevDiff: \"MULTILINESTRING((0 1,0.3333333333 0.6666666667),(0.3333333333 0.6666666667,1 0))\",\n\t\t\tsymDiff: \"MULTILINESTRING((0 1,0.3333333333 0.6666666667),(0.3333333333 0.6666666667,1 0),(0 0,0.3333333333 0.6666666667),(0.3333333333 0.6666666667,1 2))\",\n\t\t\trelate: \"0F1FFF102\",\n\t\t},\n\n\t\t// In the following test cases, lines from the first input intersect\n\t\t// *almost* exactly with one of the vertices in the second input.\n\t\t{\n\t\t\tinput1: \"LINESTRING(-1 1,1 -1)\",\n\t\t\tinput2: \"POLYGON((-1 0,-0.070710678118655 0.070710678118655,0 1,-1 0))\",\n\t\t\tunion: \"GEOMETRYCOLLECTION(LINESTRING(-1 1,-0.5 0.5),LINESTRING(-0.070710678118655 0.070710678118655,1 -1),POLYGON((-1 0,-0.5 0.5,0 1,-0.070710678118655 0.070710678118655,-1 0)))\",\n\t\t\tinter: \"LINESTRING(-0.5 0.5,-0.070710678118655 0.070710678118655)\",\n\t\t\tfwdDiff: \"MULTILINESTRING((-1 1,-0.5 0.5),(-0.070710678118655 0.070710678118655,1 -1))\",\n\t\t\trevDiff: \"POLYGON((-1 0,-0.5 0.5,0 1,-0.070710678118655 0.070710678118655,-1 0))\",\n\t\t\tsymDiff: \"GEOMETRYCOLLECTION(LINESTRING(-1 1,-0.5 0.5),LINESTRING(-0.070710678118655 0.070710678118655,1 -1),POLYGON((-1 0,-0.5 0.5,0 1,-0.070710678118655 0.070710678118655,-1 0)))\",\n\t\t\trelate: \"101FF0212\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"LINESTRING(0 0,1 1)\",\n\t\t\tinput2: \"LINESTRING(1 0,0.5000000000000001 0.5,0 1)\",\n\t\t\tunion: \"MULTILINESTRING((0 0,0.5 0.5),(0.5 0.5,1 1),(1 0,0.5 0.5),(0.5 0.5,0 1))\",\n\t\t\tinter: \"POINT(0.5 0.5)\",\n\t\t\tfwdDiff: \"MULTILINESTRING((0 0,0.5 0.5),(0.5 0.5,1 1))\",\n\t\t\trevDiff: \"MULTILINESTRING((1 0,0.5 0.5),(0.5 0.5,0 1))\",\n\t\t\tsymDiff: \"MULTILINESTRING((1 0,0.5 0.5),(0.5 0.5,0 1),(0 0,0.5 0.5),(0.5 0.5,1 1))\",\n\t\t\trelate: \"0F1FF0102\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t + +\n\t\t\t |\\ |\\\n\t\t\t | \\ | \\\n\t\t\t +--+--+--+ -> +--+ +--+\n\t\t\t | \\ | \\\n\t\t\t | \\ | \\\n\t\t\t +-----+ +-----+\n\t\t\t*/\n\t\t\tinput1: \"GEOMETRYCOLLECTION(POLYGON((1 0,3 2,1 2,1 0)))\",\n\t\t\tinput2: \"GEOMETRYCOLLECTION(LINESTRING(0 1,3 1))\",\n\t\t\tunion: \"GEOMETRYCOLLECTION(POLYGON((1 0,2 1,3 2,1 2,1 1,1 0)),LINESTRING(0 1,1 1),LINESTRING(2 1,3 1))\",\n\t\t\tinter: \"LINESTRING(1 1,2 1)\",\n\t\t\tfwdDiff: \"POLYGON((1 0,2 1,3 2,1 2,1 1,1 0))\",\n\t\t\trevDiff: \"MULTILINESTRING((0 1,1 1),(2 1,3 1))\",\n\t\t\tsymDiff: \"GEOMETRYCOLLECTION(POLYGON((1 0,2 1,3 2,1 2,1 1,1 0)),LINESTRING(0 1,1 1),LINESTRING(2 1,3 1))\",\n\t\t\trelate: \"1F20F1102\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t Reproduces a bug with set ops between self-intersecting GeometryCollections.\n\t\t\t + +\n\t\t\t |\\ |\n\t\t\t | \\|\n\t\t\t + | +\n\t\t\t |\\ | |\\\n\t\t\t | \\| | \\\n\t\t\t | + | \\\n\t\t\t | |\\ | \\\n\t\t\t | | \\| \\\n\t\t\t +--+--+--+-----+--+1B\n\t\t\t | | |\\ \\\n\t\t\t | | | \\ 2A \\\n\t\t\t | +--+--+-----+\n\t\t\t | | \\\n\t\t\t | 1A | \\\n\t\t\t +-----+-----+\n\t\t\t |\n\t\t\t |2B\n\t\t\t +\n\t\t\t*/\n\t\t\tinput1: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((1 1,5 5,1 5,1 1)),\n\t\t\t\tLINESTRING(0 3,6 3))`,\n\t\t\tinput2: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((2 0,6 4,2 4,2 0)),\n\t\t\t\tLINESTRING(3 0,3 6))`,\n\t\t\tunion: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((2 2,2 0,3 1,5 3,6 4,4 4,5 5,3 5,1 5,1 3,1 1,2 2)),\n\t\t\t\tLINESTRING(0 3,1 3),\n\t\t\t\tLINESTRING(5 3,6 3),\n\t\t\t\tLINESTRING(3 0,3 1),\n\t\t\t\tLINESTRING(3 5,3 6))`,\n\t\t\tinter: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((2 2,3 3,4 4,3 4,2 4,2 3,2 2)),\n\t\t\t\tLINESTRING(3 3,5 3),\n\t\t\t\tLINESTRING(3 4,3 5))`,\n\t\t\tfwdDiff: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((1 1,2 2,2 3,2 4,3 4,4 4,5 5,3 5,1 5,1 3,1 1)),\n\t\t\t\tLINESTRING(0 3,1 3),\n\t\t\t\tLINESTRING(5 3,6 3))`,\n\t\t\trevDiff: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((3 1,5 3,6 4,4 4,3 3,2 2,2 0,3 1)),\n\t\t\t\tLINESTRING(3 0,3 1),\n\t\t\t\tLINESTRING(3 5,3 6))`,\n\t\t\tsymDiff: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((1 1,2 2,2 3,2 4,3 4,4 4,5 5,3 5,1 5,1 3,1 1)),\n\t\t\t\tPOLYGON((3 1,5 3,6 4,4 4,3 3,2 2,2 0,3 1)),\n\t\t\t\tLINESTRING(0 3,1 3),\n\t\t\t\tLINESTRING(5 3,6 3),\n\t\t\t\tLINESTRING(3 0,3 1),\n\t\t\t\tLINESTRING(3 5,3 6))`,\n\t\t\trelate: `212101212`,\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t Reproduces a bug with set ops between self-intersecting GeometryCollections.\n\t\t\t Similar to the previous case, but none of the crossing points are coincident.\n\t\t\t + +\n\t\t\t |\\ |\n\t\t\t | \\|\n\t\t\t + | +\n\t\t\t |\\ | |\\\n\t\t\t | \\| | \\\n\t\t\t | + | \\\n\t\t\t | |\\ | \\\n\t\t\t | | \\| \\\n\t\t\t | | + \\\n\t\t\t | | |\\ \\\n\t\t\t | | | \\ \\\n\t\t\t +--+--+--+--+--+--+--+1B\n\t\t\t | | | \\ \\\n\t\t\t | | | \\ 2A \\\n\t\t\t | +--+-----+-----+\n\t\t\t | | \\\n\t\t\t | 1A | \\\n\t\t\t +-----+--------+\n\t\t\t |\n\t\t\t |2B\n\t\t\t +\n\t\t\t*/\n\t\t\tinput1: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((1 1,6 6,1 6,1 1)),\n\t\t\t\tLINESTRING(0 4,7 4))`,\n\t\t\tinput2: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((2 0,7 5,2 5,2 0)),\n\t\t\t\tLINESTRING(3 0,3 7))`,\n\t\t\tunion: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((2 2,2 0,3 1,6 4,7 5,5 5,6 6,3 6,1 6,1 4,1 1,2 2)),\n\t\t\t\tLINESTRING(0 4,1 4),\n\t\t\t\tLINESTRING(6 4,7 4),\n\t\t\t\tLINESTRING(3 0,3 1),\n\t\t\t\tLINESTRING(3 6,3 7))`,\n\t\t\tinter: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((2 2,3 3,4 4,5 5,3 5,2 5,2 4,2 2)),\n\t\t\t\tLINESTRING(4 4,6 4),\n\t\t\t\tLINESTRING(3 5,3 6))`,\n\t\t\tfwdDiff: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((5 5,6 6,3 6,1 6,1 4,1 1,2 2,2 4,2 5,3 5,5 5)),\n\t\t\t\tLINESTRING(0 4,1 4),\n\t\t\t\tLINESTRING(6 4,7 4))`,\n\t\t\trevDiff: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((2 0,3 1,6 4,7 5,5 5,4 4,3 3,2 2,2 0)),\n\t\t\t\tLINESTRING(3 0,3 1),\n\t\t\t\tLINESTRING(3 6,3 7))`,\n\t\t\tsymDiff: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((3 6,1 6,1 4,1 1,2 2,2 4,2 5,3 5,5 5,6 6,3 6)),\n\t\t\t\tPOLYGON((3 3,2 2,2 0,3 1,6 4,7 5,5 5,4 4,3 3)),\n\t\t\t\tLINESTRING(0 4,1 4),\n\t\t\t\tLINESTRING(6 4,7 4),\n\t\t\t\tLINESTRING(3 0,3 1),\n\t\t\t\tLINESTRING(3 6,3 7))`,\n\t\t\trelate: `212101212`,\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t\t+-----+--+ +-----+--+\n\t\t\t\t| 1A |2 | | |\n\t\t\t\t| +--+--+ | +\n\t\t\t\t| | | | -> | |\n\t\t\t\t+--+--+ | +--+ |\n\t\t\t\t | 1B | | |\n\t\t\t\t +--+--+ +--+--+\n\t\t\t*/\n\t\t\tinput1: \"GEOMETRYCOLLECTION(POLYGON((0 0,2 0,2 2,0 2,0 0)),POLYGON((1 1,3 1,3 3,1 3,1 1)))\",\n\t\t\tinput2: \"POLYGON((2 0,3 0,3 1,2 1,2 0))\",\n\t\t\tunion: \"POLYGON((2 0,3 0,3 1,3 3,1 3,1 2,0 2,0 0,2 0))\",\n\t\t\tinter: \"MULTILINESTRING((2 1,3 1),(2 0,2 1))\",\n\t\t\tfwdDiff: \"POLYGON((1 2,0 2,0 0,2 0,2 1,3 1,3 3,1 3,1 2))\",\n\t\t\trevDiff: \"POLYGON((2 0,3 0,3 1,2 1,2 0))\",\n\t\t\tsymDiff: \"POLYGON((0 0,2 0,3 0,3 1,3 3,1 3,1 2,0 2,0 0))\",\n\t\t\trelate: \"FF2F11212\",\n\t\t},\n\t\t{\n\t\t\t/*\n\t\t\t\t +--------+ +--------+\n\t\t\t\t | | | |\n\t\t\t\t | 1A | | |\n\t\t\t\t | | | |\n\t\t\t\t+-----+--+ +--+-----+ +-----+ +-----+\n\t\t\t\t| | | | | | | |\n\t\t\t\t| +--+--+--+ | | +--+ |\n\t\t\t\t| 2A | | 2B | -> | | | |\n\t\t\t\t| +--+--+--+ | | +--+ |\n\t\t\t\t| | | | | | | |\n\t\t\t\t+-----+--+ +--+-----+ +-----+ +-----+\n\t\t\t\t | | | |\n\t\t\t\t | 1B | | |\n\t\t\t\t | | | |\n\t\t\t\t +--------+ +--------+\n\t\t\t*/\n\t\t\tinput1: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((2 0,5 0,5 3,2 3,2 0)),\n\t\t\t\tPOLYGON((2 4,5 4,5 7,2 7,2 4)))`,\n\t\t\tinput2: `GEOMETRYCOLLECTION(\n\t\t\t\tPOLYGON((0 2,3 2,3 5,0 5,0 2)),\n\t\t\t\tPOLYGON((4 2,7 2,7 5,4 5,4 2)))`,\n\t\t\tunion: `POLYGON(\n\t\t\t\t(0 2,2 2,2 0,5 0,5 2,7 2,7 5,5 5,5 7,2 7,2 5,0 5,0 2),\n\t\t\t\t(3 3,3 4,4 4,4 3,3 3))`,\n\t\t\tinter: `MULTIPOLYGON(\n\t\t\t\t((2 2,3 2,3 3,2 3,2 2)),\n\t\t\t\t((2 4,3 4,3 5,2 5,2 4)),\n\t\t\t\t((4 2,5 2,5 3,4 3,4 2)),\n\t\t\t\t((4 4,5 4,5 5,4 5,4 4)))`,\n\t\t\tfwdDiff: `MULTIPOLYGON(\n\t\t\t\t((2 0,5 0,5 2,4 2,4 3,3 3,3 2,2 2,2 0)),\n\t\t\t\t((3 4,4 4,4 5,5 5,5 7,2 7,2 5,3 5,3 4)))`,\n\t\t\trevDiff: `MULTIPOLYGON(\n\t\t\t\t((0 2,2 2,2 3,3 3,3 4,2 4,2 5,0 5,0 2)),\n\t\t\t\t((5 2,7 2,7 5,5 5,5 4,4 4,4 3,5 3,5 2)))`,\n\t\t\tsymDiff: `MULTIPOLYGON(\n\t\t\t\t((2 0,5 0,5 2,4 2,4 3,3 3,3 2,2 2,2 0)),\n\t\t\t\t((2 2,2 3,3 3,3 4,2 4,2 5,0 5,0 2,2 2)),\n\t\t\t\t((3 4,4 4,4 5,5 5,5 7,2 7,2 5,3 5,3 4)),\n\t\t\t\t((4 3,5 3,5 2,7 2,7 5,5 5,5 4,4 4,4 3)))`,\n\t\t\trelate: \"212101212\",\n\t\t},\n\n\t\t// Empty cases for relate.\n\t\t{input1: \"POINT EMPTY\", input2: \"POINT(0 0)\", relate: \"FFFFFF0F2\"},\n\t\t{input1: \"POINT EMPTY\", input2: \"LINESTRING(0 0,1 1)\", relate: \"FFFFFF102\"},\n\t\t{input1: \"POINT EMPTY\", input2: \"LINESTRING(0 0,0 1,1 0,0 0)\", relate: \"FFFFFF1F2\"},\n\t\t{input1: \"POINT EMPTY\", input2: \"POLYGON((0 0,0 1,1 0,0 0))\", relate: \"FFFFFF212\"},\n\n\t\t// Cases involving geometry collections where polygons from one of the\n\t\t// inputs interact with each other.\n\t\t{\n\t\t\tinput1: `GEOMETRYCOLLECTION(\n\t\t\t\t\t\tPOLYGON((0 0,1 0,0 1,0 0)),\n\t\t\t\t\t\tPOLYGON((0 0,1 1,0 1,0 0)))`,\n\t\t\tinput2: \"LINESTRING(0 0,1 1)\",\n\t\t\tunion: \"POLYGON((0 0,1 0,0.5 0.5,1 1,0 1,0 0))\",\n\t\t\tinter: \"MULTILINESTRING((0 0,0.5 0.5),(0.5 0.5,1 1))\",\n\t\t\tfwdDiff: \"POLYGON((0 0,1 0,0.5 0.5,1 1,0 1,0 0))\",\n\t\t\trevDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\tsymDiff: \"POLYGON((0 0,1 0,0.5 0.5,1 1,0 1,0 0))\",\n\t\t\trelate: \"1F2101FF2\",\n\t\t},\n\t\t{\n\t\t\tinput1: `GEOMETRYCOLLECTION(\n\t\t\t\t\t\tPOLYGON((0 0,1 0,0 1,0 0)),\n\t\t\t\t\t\tPOLYGON((1 1,0 1,1 0,1 1)))`,\n\t\t\tinput2: \"POLYGON((0 0,2 0,2 2,0 2,0 0))\",\n\t\t\tunion: \"POLYGON((0 0,1 0,2 0,2 2,0 2,0 1,0 0))\",\n\t\t\tinter: \"POLYGON((0 0,1 0,1 1,0 1,0 0))\",\n\t\t\tfwdDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\trevDiff: \"POLYGON((1 0,2 0,2 2,0 2,0 1,1 1,1 0))\",\n\t\t\tsymDiff: \"POLYGON((1 0,2 0,2 2,0 2,0 1,1 1,1 0))\",\n\t\t\trelate: \"2FF11F212\",\n\t\t},\n\t\t{\n\t\t\tinput1: `GEOMETRYCOLLECTION(\n\t\t\t\t\t\tPOLYGON((0 0,2 0,2 1,0 1,0 0)),\n\t\t\t\t\t\tPOLYGON((0 0,1 0,1 2,0 2,0 0)))`,\n\t\t\tinput2: \"POLYGON((1 0,2 1,1 2,0 1,1 0))\",\n\t\t\tunion: \"POLYGON((0 0,1 0,2 0,2 1,1 2,0 2,0 1,0 0))\",\n\t\t\tinter: \"POLYGON((1 0,2 1,1 1,1 2,0 1,1 0))\",\n\t\t\tfwdDiff: \"MULTIPOLYGON(((0 0,1 0,0 1,0 0)),((1 0,2 0,2 1,1 0)),((0 1,1 2,0 2,0 1)))\",\n\t\t\trevDiff: \"POLYGON((1 1,2 1,1 2,1 1))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((0 0,1 0,0 1,0 0)),((1 0,2 0,2 1,1 0)),((0 1,1 2,0 2,0 1)),((1 1,2 1,1 2,1 1)))\",\n\t\t\trelate: \"212101212\",\n\t\t},\n\n\t\t// Bug reproductions:\n\t\t{\n\t\t\tinput1: \"LINESTRING(-1 1,1 -1)\",\n\t\t\tinput2: \"MULTILINESTRING((1 0,0 1),(0 1,1 2),(2 0,3 1),(3 1,2 2))\",\n\t\t\tunion: \"MULTILINESTRING((-1 1,1 -1),(1 0,0 1),(0 1,1 2),(2 0,3 1),(3 1,2 2))\",\n\t\t\tinter: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\tfwdDiff: \"LINESTRING(-1 1,1 -1)\",\n\t\t\trevDiff: \"MULTILINESTRING((1 0,0 1),(0 1,1 2),(2 0,3 1),(3 1,2 2))\",\n\t\t\tsymDiff: \"MULTILINESTRING((1 0,0 1),(0 1,1 2),(2 0,3 1),(3 1,2 2),(-1 1,1 -1))\",\n\t\t\trelate: \"FF1FF0102\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"LINESTRING(0 1,1 0)\",\n\t\t\tinput2: \"MULTIPOLYGON(((0 0,0 1,1 1,1 0,0 0)),((2 0,2 1,3 1,3 0,2 0)))\",\n\t\t\tunion: \"MULTIPOLYGON(((0 0,0 1,1 1,1 0.5,1 0,0 0)),((2 0,2 1,3 1,3 0,2 0)))\",\n\t\t\tinter: \"LINESTRING(0 1,1 0)\",\n\t\t\tfwdDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\trevDiff: \"MULTIPOLYGON(((0 0,0 1,1 1,1 0.5,1 0,0 0)),((2 0,2 1,3 1,3 0,2 0)))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((0 0,0 1,1 1,1 0.5,1 0,0 0)),((2 0,2 1,3 1,3 0,2 0)))\",\n\t\t\trelate: \"1FFF0F212\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"POLYGON((1 0,0 1,1 1,1 0))\",\n\t\t\tinput2: \"POLYGON((2 0,2 1,3 1,3 0,2 0))\",\n\t\t\tunion: \"MULTIPOLYGON(((1 0,0 1,1 1,1 0)),((2 0,2 1,3 1,3 0,2 0)))\",\n\t\t\tinter: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\tfwdDiff: \"POLYGON((1 0,0 1,1 1,1 0))\",\n\t\t\trevDiff: \"POLYGON((2 0,2 1,3 1,3 0,2 0))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((2 0,2 1,3 1,3 0,2 0)),((1 0,0 1,1 1,1 0)))\",\n\t\t\trelate: \"FF2FF1212\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"POLYGON((0 0,1 1,1 0,0 0))\",\n\t\t\tinput2: \"POLYGON((2 2,3 2,3 1,2 1,2 2))\",\n\t\t\tunion: \"MULTIPOLYGON(((0 0,1 0,1 1,0 0)),((2 1,2 2,3 2,3 1,2 1)))\",\n\t\t\tinter: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\tfwdDiff: \"POLYGON((0 0,1 1,1 0,0 0))\",\n\t\t\trevDiff: \"POLYGON((2 1,2 2,3 2,3 1,2 1))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((2 1,2 2,3 2,3 1,2 1)),((0 0,1 0,1 1,0 0)))\",\n\t\t\trelate: \"FF2FF1212\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"LINESTRING(0 1,1 0)\",\n\t\t\tinput2: \"MULTIPOLYGON(((1 1,1 0,0 0,0 1,1 1)),((2 1,2 2,3 2,3 1,2 1)))\",\n\t\t\tunion: \"MULTIPOLYGON(((1 1,1 0,0 0,0 1,1 1)),((2 1,2 2,3 2,3 1,2 1)))\",\n\t\t\tinter: \"LINESTRING(0 1,1 0)\",\n\t\t\tfwdDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\trevDiff: \"MULTIPOLYGON(((0 0,0 1,1 1,1 0,0 0)),((2 1,2 2,3 2,3 1,2 1)))\",\n\t\t\tsymDiff: \"MULTIPOLYGON(((0 0,0 1,1 1,1 0,0 0)),((2 1,2 2,3 2,3 1,2 1)))\",\n\t\t\trelate: \"1FFF0F212\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"POINT(5 5)\",\n\t\t\tinput2: \"LINESTRING(5 3,4 8,1 2,9 8)\",\n\t\t\tfwdDiff: \"GEOMETRYCOLLECTION EMPTY\",\n\t\t\trelate: \"0FFFFF102\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"LINESTRING(1 1,2 2,3 3,0 0)\",\n\t\t\tinput2: \"LINESTRING(1 2,2 0)\",\n\t\t\tinter: \"POINT(1.3333333333 1.3333333333)\",\n\t\t\trelate: \"0F1FF0102\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"MULTILINESTRING((0 0,1 1),(0 1,1 0))\",\n\t\t\tinput2: \"LINESTRING(0 1,0.3333333333 0.6666666667,1 0)\",\n\t\t\tunion: \"MULTILINESTRING((0 0,0.5 0.5),(0.5 0.5,1 1),(0 1,0.3333333333 0.6666666667,0.5 0.5),(0.5 0.5,1 0))\",\n\t\t\trelate: \"1F1F00FF2\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"POLYGON((-1 0,0 0,0 1,-1 0))\",\n\t\t\tinput2: \"POLYGON((1 0,-0.9 -0.2,-1 -0.0000000000000032310891488651735,-0.9 0.2,1 0))\",\n\t\t\tunion: \"POLYGON((-1 0,-0.9 0.2,-0.80952380952381 0.19047619047619,0 1,0 0.105263157894737,1 0,-0.9 -0.2,-1 0))\",\n\t\t\trelate: \"212101212\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"LINESTRING(1 2.1,2.1 1)\",\n\t\t\tinput2: \"POLYGON((0 0,0 10,10 10,10 0,0 0),(1.5 1.5,8.5 1.5,8.5 8.5,1.5 8.5,1.5 1.5))\",\n\t\t\tinter: \"MULTILINESTRING((1 2.1,1.5 1.6),(1.6 1.5,2.1 1))\",\n\t\t\trelate: \"1010FF212\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"LINESTRING(1 2,2 3)\",\n\t\t\tinput2: \"MULTIPOLYGON(((1 1,1 0,0 0,0 1,1 1)),((1 2,2 2,2 3,1 3,1 2)))\",\n\t\t\tunion: \"MULTIPOLYGON(((1 1,1 0,0 0,0 1,1 1)),((1 2,2 2,2 3,1 3,1 2)))\",\n\t\t\trelate: \"1FFF0F212\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"LINESTRING(0 1,0 0,1 0)\",\n\t\t\tinput2: \"POLYGON((0 0,1 0,1 1,0 1,0 0.5,0 0))\",\n\t\t\tunion: \"POLYGON((0 0,1 0,1 1,0 1,0 0.5,0 0))\",\n\t\t\trelate: \"F1FF0F212\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"LINESTRING(2 2,3 3,4 4,5 5,0 0)\",\n\t\t\tinput2: \"LINESTRING(0 0,1 1)\",\n\t\t\tfwdDiff: \"MULTILINESTRING((2 2,3 3,4 4,5 5),(1 1,2 2))\",\n\t\t\trelate: \"101F00FF2\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"LINESTRING(0 0,0 0,0 1,1 0,0 0)\",\n\t\t\tinput2: \"MULTILINESTRING((0 0,0.5 0.5),(0.5 0.5,1 1),(0 1,0.3333333333 0.6666666667,0.5 0.5),(0.5 0.5,1 0))\",\n\t\t\tfwdDiff: \"MULTILINESTRING((0 0,0 1),(1 0,0 0))\",\n\t\t\trelate: \"101FFF102\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"LINESTRING(1 0,0.5000000000000001 0.5,0 1)\",\n\t\t\tinput2: \"MULTIPOLYGON(((0 0,2 0,2 2,0 2,0 0),(0.5 0.5,1 0.5,1 1.5,0.5 1.5,0.5 0.5)))\",\n\t\t\tunion: \"POLYGON((0 0,1 0,2 0,2 2,0 2,0 1,0 0),(0.5000000000000001 0.5,1 0.5,1 1.5,0.5 1.5,0.5000000000000001 0.5))\",\n\t\t\trelate: \"10FF0F212\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"LINESTRING(1 1,3 1,1 1,3 1)\",\n\t\t\tinput2: \"POLYGON((0 0,0 2,2 2,2 0,0 0))\",\n\t\t\trelate: \"1010F0212\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"LINESTRING(-1 1,1 -1)\",\n\t\t\tinput2: \"MULTILINESTRING((0 0,0 1),(0 0,1 0))\",\n\t\t\trelate: \"0F1FF0102\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"MULTILINESTRING((2 0,2 1),(2 2,2 3))\",\n\t\t\tinput2: \"POLYGON((0 0,0 10,10 10,10 0,0 0),(1.5 1.5,8.5 1.5,8.5 8.5,1.5 8.5,1.5 1.5))\",\n\t\t\tunion: \"GEOMETRYCOLLECTION(POLYGON((2 0,10 0,10 10,0 10,0 0,2 0),(1.5 1.5,1.5 8.5,8.5 8.5,8.5 1.5,1.5 1.5)),LINESTRING(2 2,2 3))\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"POINT(0 0)\",\n\t\t\tinput2: \"POINT(0 0)\",\n\t\t\trelate: \"0FFFFFFF2\",\n\t\t\tunion: \"POINT(0 0)\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"GEOMETRYCOLLECTION(POINT(0 0))\",\n\t\t\tinput2: \"GEOMETRYCOLLECTION(LINESTRING(2 0,2 1))\",\n\t\t\tunion: \"GEOMETRYCOLLECTION(POINT(0 0),LINESTRING(2 0,2 1))\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"GEOMETRYCOLLECTION(POLYGON((0 0,1 0,0 1,0 0)),POLYGON((0 0,1 1,0 1,0 0)))\",\n\t\t\tinput2: \"POINT(0 0)\",\n\t\t\tunion: \"POLYGON((0 0,1 0,0.5 0.5,1 1,0 1,0 0))\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"GEOMETRYCOLLECTION(POLYGON((0 0,0 1,1 0,0 0)),POLYGON((0 1,1 1,1 0,0 1)))\",\n\t\t\tinput2: \"POLYGON EMPTY\",\n\t\t\tunion: \"POLYGON((0 0,0 1,1 1,1 0,0 0))\",\n\t\t},\n\t\t{\n\t\t\tinput1: \"LINESTRING(0 0,0 0,0 1,1 0,0 0)\",\n\t\t\tinput2: \"LINESTRING(0.1 0.1,0.5 0.5)\",\n\t\t\tinter: \"POINT(0.5 0.5)\",\n\t\t},\n\t} {\n\t\tt.Run(strconv.Itoa(i), func(t *testing.T) {\n\t\t\tg1 := geomFromWKT(t, geomCase.input1)\n\t\t\tg2 := geomFromWKT(t, geomCase.input2)\n\t\t\tt.Logf(\"input1: %s\", geomCase.input1)\n\t\t\tt.Logf(\"input2: %s\", geomCase.input2)\n\t\t\tfor _, opCase := range []struct {\n\t\t\t\topName string\n\t\t\t\top func(geom.Geometry, geom.Geometry) (geom.Geometry, error)\n\t\t\t\twant string\n\t\t\t}{\n\t\t\t\t{\"union\", geom.Union, geomCase.union},\n\t\t\t\t{\"inter\", geom.Intersection, geomCase.inter},\n\t\t\t\t{\"fwd_diff\", geom.Difference, geomCase.fwdDiff},\n\t\t\t\t{\"rev_diff\", func(a, b geom.Geometry) (geom.Geometry, error) { return geom.Difference(b, a) }, geomCase.revDiff},\n\t\t\t\t{\"sym_diff\", geom.SymmetricDifference, geomCase.symDiff},\n\t\t\t} {\n\t\t\t\tt.Run(opCase.opName, func(t *testing.T) {\n\t\t\t\t\tif opCase.want == \"\" {\n\t\t\t\t\t\tt.Skip(\"Skipping test because it's not specified or is commented out\")\n\t\t\t\t\t}\n\t\t\t\t\twant := geomFromWKT(t, opCase.want)\n\t\t\t\t\tgot, err := opCase.op(g1, g2)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tt.Fatalf(\"could not perform op: %v\", err)\n\t\t\t\t\t}\n\t\t\t\t\texpectGeomEq(t, got, want, geom.IgnoreOrder, geom.ToleranceXY(1e-7))\n\t\t\t\t})\n\t\t\t}\n\t\t\tt.Run(\"relate\", func(t *testing.T) {\n\t\t\t\tif geomCase.relate == \"\" {\n\t\t\t\t\tt.Skip(\"Skipping test because it's not specified or is commented out\")\n\t\t\t\t}\n\t\t\t\tfor _, swap := range []struct {\n\t\t\t\t\tdescription string\n\t\t\t\t\treverse bool\n\t\t\t\t}{\n\t\t\t\t\t{\"fwd\", false},\n\t\t\t\t\t{\"rev\", true},\n\t\t\t\t} {\n\t\t\t\t\tt.Run(swap.description, func(t *testing.T) {\n\t\t\t\t\t\tvar (\n\t\t\t\t\t\t\tgot string\n\t\t\t\t\t\t\terr error\n\t\t\t\t\t\t)\n\t\t\t\t\t\tif swap.reverse {\n\t\t\t\t\t\t\tgot, err = geom.Relate(g2, g1)\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tgot, err = geom.Relate(g1, g2)\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\tt.Fatal(\"could not perform relate op\")\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\twant := geomCase.relate\n\t\t\t\t\t\tif swap.reverse {\n\t\t\t\t\t\t\twant = \"\"\n\t\t\t\t\t\t\tfor j := 0; j < 9; j++ {\n\t\t\t\t\t\t\t\tk := 3*(j%3) + j/3\n\t\t\t\t\t\t\t\twant += geomCase.relate[k : k+1]\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif got != want {\n\t\t\t\t\t\t\tt.Errorf(\"\\nwant: %v\\ngot: %v\\n\", want, got)\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t})\n\t\t})\n\t}\n}",
"func (ms *mergeSort) Primitive() engine.Primitive {\n\treturn ms.input.Primitive()\n}",
"func regroup(toGroup *vector.Vector) (*vector.Vector, int) {\n\tfor x:=0; x < toGroup.Len(); x++ {\n\t\t//Get the current subvector\n\t\tvar subv *vector.Vector = toGroup.At(x).(*vector.Vector)\n\t\t//If the vector is of len 1, it's a token, so pull out the token\n\t\t//If not, it'll be \"\", so no problem\n\t\tvar argu string\n\t\tif subv.Len() == 1 { \n\t\t\targu = subv.At(0).(string)\n\t\t\t//Set up temp vars for the inner loop, \n\t\t\t//they'll be needed after the loop though\n\t\t\tvar vec vector.Vector\n\t\t\tvar y int = 0\n\t\t\tif argu == \"(\" {\n\t\t\t\t//parentheses count\n\t\t\t\tvar pc = 1\n\t\t\t\tvar moreParen bool = false\n\t\t\t\tfor y=x+1; pc > 0; y++ {\n\t\t\t\t\t//Again, see if you're working with a\n\t\t\t\t\t//token this time, reset argu in case \n\t\t\t\t\t//it's still a ( from before\n\t\t\t\t\tvar vx *vector.Vector = \n\t\t\t\t\ttoGroup.At(y).(*vector.Vector)\n\t\t\t\t\tif vx.Len() == 1 {\n\t\t\t\t\t\targu = vx.At(0).(string) \n\t\t\t\t\t} else { \n\t\t\t\t\t\targu = \"\" \n\t\t\t\t\t}\n\t\t\t\t\t//Augment the counts if necessary\n\t\t\t\t\tif argu == \"(\" {\n\t\t\t\t\t\tpc++\n\t\t\t\t\t\tmoreParen = true\n\t\t\t\t\t} else if argu == \")\" {\n\t\t\t\t\t\tpc--\n\t\t\t\t\t\tif pc == 0 {\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\t//Push the subvector into another\n\t\t\t\t\t//subvector\n\t\t\t\t\tvar tempV *vector.Vector =\n\t\t\t\t\ttoGroup.At(y).(*vector.Vector)\n\t\t\t\t\tvec.Push(tempV)\n\t\t\t\t}\n\t\t\t\tvar vNew *vector.Vector = &vec\n\t\t\t\tif moreParen {\n\t\t\t\t\tvNew, _ = regroup(&vec)\n\t\t\t\t}\n\t\t\t\t//Delete all subvectors from x-y, then put the\n\t\t\t\t//new subvector in its place\n\t\t\t\tfor temp:=x; temp <= y; temp++{\n\t\t\t\t\ttoGroup.Delete(x)\n\t\t\t\t}\n\t\t\t\ttoGroup.Insert(x, vNew)\n\t\t\t}\n\t\t} else {\n\t\t\ttoGroup.Delete(x)\n\t\t\tsubv, _ = regroup(subv)\n\t\t\ttoGroup.Insert(x, subv)\n\t\t}\n\t}\n\treturn toGroup, 0\n}",
"func (l *Line) GetIntersectionPoints(other Shape) []IntersectionPoint {\n\n\tintersections := []IntersectionPoint{}\n\n\tswitch b := other.(type) {\n\n\tcase *Line:\n\n\t\tdet := (l.X2-l.X)*(b.Y2-b.Y) - (b.X2-b.X)*(l.Y2-l.Y)\n\n\t\tif det != 0 {\n\n\t\t\t// MAGIC MATH; the extra + 1 here makes it so that corner cases (literally aiming the line through the corners of the\n\t\t\t// hollow square in world5) works!\n\n\t\t\tlambda := (((l.Y - b.Y) * (b.X2 - b.X)) - ((l.X - b.X) * (b.Y2 - b.Y)) + 1) / det\n\n\t\t\tgamma := (((l.Y - b.Y) * (l.X2 - l.X)) - ((l.X - b.X) * (l.Y2 - l.Y)) + 1) / det\n\n\t\t\tif (0 < lambda && lambda < 1) && (0 < gamma && gamma < 1) {\n\t\t\t\tdx, dy := l.GetDelta()\n\t\t\t\tintersections = append(intersections, IntersectionPoint{l.X + lambda*dx, l.Y + lambda*dy, other})\n\t\t\t}\n\n\t\t}\n\tcase *Rectangle:\n\t\tside := NewLine(b.X, b.Y, b.X, b.Y+b.H)\n\t\tintersections = append(intersections, l.GetIntersectionPoints(side)...)\n\n\t\tside.Y = b.Y + b.H\n\t\tside.X2 = b.X + b.W\n\t\tside.Y2 = b.Y + b.H\n\t\tintersections = append(intersections, l.GetIntersectionPoints(side)...)\n\n\t\tside.X = b.X + b.W\n\t\tside.Y2 = b.Y\n\t\tintersections = append(intersections, l.GetIntersectionPoints(side)...)\n\n\t\tside.Y = b.Y\n\t\tside.X2 = b.X\n\t\tside.Y2 = b.Y\n\t\tintersections = append(intersections, l.GetIntersectionPoints(side)...)\n\tcase *Space:\n\t\tfor _, shape := range *b {\n\t\t\tintersections = append(intersections, l.GetIntersectionPoints(shape)...)\n\t\t}\n\tcase *Circle:\n\t\t// \tTO-DO: Add this later, because this is kinda hard and would necessitate some complex vector math that, for whatever\n\t\t// reason, is not even readily available in a Golang library as far as I can tell???\n\t\tbreak\n\t}\n\n\t// fmt.Println(\"WARNING! Object \", other, \" isn't a valid shape for collision testing against Line \", l, \"!\")\n\n\tsort.Slice(intersections, func(i, j int) bool {\n\t\treturn Distance(l.X, l.Y, intersections[i].X, intersections[i].Y) < Distance(l.X, l.Y, intersections[j].X, intersections[j].Y)\n\t})\n\n\treturn intersections\n\n}",
"func asSegments(g geom.Geometry) (segs []geom.Line, err error) {\n\tswitch g := g.(type) {\n\tcase geom.LineString:\n\t\treturn g.AsSegments()\n\tcase geom.MultiLineString:\n\t\ts, err := g.AsSegments()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfor i := range s {\n\t\t\tsegs = append(segs, s[i]...)\n\t\t}\n\t\treturn segs, nil\n\tcase geom.Polygon:\n\t\ts, err := g.AsSegments()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfor i := range s {\n\t\t\tsegs = append(segs, s[i]...)\n\t\t}\n\t\treturn segs, nil\n\tcase geom.MultiPolygon:\n\t\ts, err := g.AsSegments()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfor i := range s {\n\t\t\tfor j := range s[i] {\n\t\t\t\tsegs = append(segs, s[i][j]...)\n\t\t\t}\n\t\t}\n\t\treturn segs, nil\n\t}\n\treturn nil, errors.New(\"Unsupported\")\n}",
"func (o *OctahedronGeometry) JSObject() *js.Object { return o.p }",
"func (obj *Device) DrawPrimitive(\n\ttyp PRIMITIVETYPE,\n\tstartVertex uint,\n\tprimitiveCount uint,\n) Error {\n\tret, _, _ := syscall.Syscall6(\n\t\tobj.vtbl.DrawPrimitive,\n\t\t4,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(typ),\n\t\tuintptr(startVertex),\n\t\tuintptr(primitiveCount),\n\t\t0,\n\t\t0,\n\t)\n\treturn toErr(ret)\n}",
"func (s *subsumer) vertices(x, y *adt.Vertex) bool {\n\tif x == y {\n\t\treturn true\n\t}\n\n\tif s.Defaults {\n\t\ty = y.Default()\n\t}\n\n\tif b, _ := y.BaseValue.(*adt.Bottom); b != nil {\n\t\t// If the value is incomplete, the error is not final. So either check\n\t\t// structural equivalence or return an error.\n\t\treturn !b.IsIncomplete()\n\t}\n\n\tctx := s.ctx\n\n\tfinal := y.IsData() || s.Final\n\n\tswitch v := x.BaseValue.(type) {\n\tcase *adt.Bottom:\n\t\treturn false\n\n\tcase *adt.ListMarker:\n\t\tif !y.IsList() {\n\t\t\ts.errf(\"list does not subsume %s (type %s)\", y, y.Kind())\n\t\t\treturn false\n\t\t}\n\t\tif !s.listVertices(x, y) {\n\t\t\treturn false\n\t\t}\n\t\t// TODO: allow other arcs alongside list arc.\n\t\treturn true\n\n\tcase *adt.StructMarker:\n\t\t_, ok := y.BaseValue.(*adt.StructMarker)\n\t\tif !ok {\n\t\t\treturn false\n\t\t}\n\n\tcase adt.Value:\n\t\tif !s.values(v, y.Value()) {\n\t\t\treturn false\n\t\t}\n\n\t\t// Embedded scalars could still have arcs.\n\t\tif final {\n\t\t\treturn true\n\t\t}\n\n\tdefault:\n\t\tpanic(fmt.Sprintf(\"unexpected type %T\", v))\n\t}\n\n\txClosed := x.IsClosedStruct() && !s.IgnoreClosedness\n\t// TODO: this should not close for taking defaults. Do a more principled\n\t// makeover of this package before making it public, though.\n\tyClosed := s.Final || s.Defaults ||\n\t\t(y.IsClosedStruct() && !s.IgnoreClosedness)\n\n\tif xClosed && !yClosed && !final {\n\t\treturn false\n\t}\n\n\ttypes := x.OptionalTypes()\n\tif !final && !s.IgnoreOptional && types&(adt.HasPattern|adt.HasAdditional) != 0 {\n\t\t// TODO: there are many cases where pattern constraints can be checked.\n\t\ts.inexact = true\n\t\treturn false\n\t}\n\n\t// All arcs in x must exist in y and its values must subsume.\n\txFeatures := export.VertexFeatures(s.ctx, x)\n\tfor _, f := range xFeatures {\n\t\tif s.Final && !f.IsRegular() {\n\t\t\tcontinue\n\t\t}\n\n\t\ta := x.Lookup(f)\n\t\taOpt := false\n\t\tif a == nil {\n\t\t\t// x.f is optional\n\t\t\tif s.IgnoreOptional {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\ta = &adt.Vertex{Label: f}\n\t\t\tx.MatchAndInsert(ctx, a)\n\t\t\ta.Finalize(ctx)\n\n\t\t\t// If field a is optional and has value top, neither the\n\t\t\t// omission of the field nor the field defined with any value\n\t\t\t// may cause unification to fail.\n\t\t\tif a.Kind() == adt.TopKind {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\taOpt = true\n\t\t}\n\n\t\tb := y.Lookup(f)\n\t\tif b == nil {\n\t\t\t// y.f is optional\n\t\t\tif !aOpt {\n\t\t\t\ts.errf(\"required field is optional in subsumed value: %s\", f)\n\t\t\t\treturn false\n\t\t\t}\n\n\t\t\t// If f is undefined for y and if y is closed, the field is\n\t\t\t// implicitly defined as _|_ and thus subsumed. Technically, this is\n\t\t\t// even true if a is not optional, but in that case it means that y\n\t\t\t// is invalid, so return false regardless\n\t\t\tif !y.Accept(ctx, f) || y.IsData() || s.Final {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tb = &adt.Vertex{Label: f}\n\t\t\ty.MatchAndInsert(ctx, b)\n\t\t\tb.Finalize(ctx)\n\t\t}\n\n\t\tif s.values(a, b) {\n\t\t\tcontinue\n\t\t}\n\n\t\ts.missing = f\n\t\ts.gt = a\n\t\ts.lt = y\n\n\t\ts.errf(\"field %s not present in %s\", f, y)\n\t\treturn false\n\t}\n\n\tif xClosed && !yClosed && !s.Final {\n\t\ts.errf(\"closed struct does not subsume open struct\")\n\t\treturn false\n\t}\n\n\tyFeatures := export.VertexFeatures(s.ctx, y)\nouter:\n\tfor _, f := range yFeatures {\n\t\tif s.Final && !f.IsRegular() {\n\t\t\tcontinue\n\t\t}\n\n\t\tfor _, g := range xFeatures {\n\t\t\tif g == f {\n\t\t\t\t// already validated\n\t\t\t\tcontinue outer\n\t\t\t}\n\t\t}\n\n\t\tb := y.Lookup(f)\n\t\tif b == nil {\n\t\t\tif s.IgnoreOptional || s.Final {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tb = &adt.Vertex{Label: f}\n\t\t\ty.MatchAndInsert(ctx, b)\n\t\t}\n\n\t\tif !x.Accept(ctx, f) {\n\t\t\tif s.Profile.IgnoreClosedness {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\ts.errf(\"field not allowed in closed struct: %s\", f)\n\t\t\treturn false\n\t\t}\n\n\t\ta := &adt.Vertex{Label: f}\n\t\tx.MatchAndInsert(ctx, a)\n\t\tif len(a.Conjuncts) == 0 {\n\t\t\t// It is accepted and has no further constraints, so all good.\n\t\t\tcontinue\n\t\t}\n\n\t\ta.Finalize(ctx)\n\t\tb.Finalize(ctx)\n\n\t\tif !s.vertices(a, b) {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}",
"func (tet *Tetrahedron) ContainsPoint(point [3]float64)(bool) {\n\n\tfor idx, v := range tet.Vertices {\n\t\t//contains the three points of the face that does not include v\n\t\tvar opposite []*Vertex3d\n\t\tfor idx2, v2 := range tet.Vertices {\n\t\t\tif idx != idx2 {\n\t\t\t\topposite = append(opposite, v2)\n\t\t\t}\n\t\t}\n\n\t\t//get 3d coordinates of opposite face points\n\t\tfacePt0, facePt1, facePt2 := opposite[0].Vec, opposite[1].Vec, opposite[2].Vec\n\n\t\t//get two vectors contained in the opposite face\n\t\tvec0 := []float64{facePt1[0] - facePt0[0], facePt1[1] - facePt0[1], facePt1[2] - facePt0[2]}\n\n\t\tvec1 := []float64{facePt2[0] - facePt0[0], facePt2[1] - facePt0[1], facePt2[2] - facePt0[2]}\n\n\t\t\n\t\t//get normal of opposite face\n\t\tnormal := []float64{vec0[1] * vec1[2] - vec0[2] * vec1[1], vec0[2] * vec1[0] - vec0[0] * vec1[2],\n\t\t\tvec0[0] * vec1[1] - vec0[1] * vec1[0]}\n\n\n\t\tv4Diff := []float64{v.Vec[0] - facePt0[0], v.Vec[1] - facePt0[1], v.Vec[2] - facePt0[2]}\n\t\tnewPointDiff := []float64{point[0] - facePt0[0], point[1] - facePt0[1], point[2] - facePt0[2]}\n\n\t\tv4Dot := normal[0] * v4Diff[0] + normal[1] * v4Diff[1] + normal[2] * v4Diff[2]\n\t\tnewPointDot := normal[0] * newPointDiff[0] + normal[1] * newPointDiff[1] + normal[2] * newPointDiff[2]\n\n\t\t//log.Println(\"asdf\")\n\t\t//log.Println(v4Dot)\n\t\t//log.Println(newPointDot)\n\n\t\tif v4Dot * newPointDot < 0.0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}",
"func makeVao(points []float32) uint32 {\n\tvar vbo uint32\n\tvar vao uint32\n\tvar stride int32\n\n\t//points only 9\n\t//points and colors 18\n\tstride = int32(4 * len(points) / 3)\n\tprintln(\"stride: \", stride)\n\n\tgl.GenVertexArrays(1, &vao)\n\tgl.GenBuffers(1, &vbo)\n\tgl.BindVertexArray(vao)\n\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(gl.ARRAY_BUFFER, 4*len(points), gl.Ptr(points), gl.STATIC_DRAW)\n\n\tgl.EnableVertexAttribArray(0)\n\tgl.VertexAttribPointer(0, 3, gl.FLOAT, false, stride, gl.PtrOffset(0))\n\tprintln(\"triangle length: \", len(points))\n\tif len(points) >= 18 {\n\t\tlog.Println(\"In if\")\n\t\tgl.EnableVertexAttribArray(1)\n\t\tgl.VertexAttribPointer(1, 3, gl.FLOAT, false, stride, gl.PtrOffset(3*4))\n\t}\n\treturn vao\n}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n C.glowMultiDrawElementsBaseVertex(gpMultiDrawElementsBaseVertex, (C.GLenum)(mode), (*C.GLsizei)(unsafe.Pointer(count)), (C.GLenum)(xtype), indices, (C.GLsizei)(drawcount), (*C.GLint)(unsafe.Pointer(basevertex)))\n}",
"func (p *Polygon) smoothVertices() {\n\tdone := false\n\tfor done == false {\n\t\tdone = true\n\t\tfor i := range p.vlist {\n\t\t\tif p.smoothVertex(i) {\n\t\t\t\tdone = false\n\t\t\t}\n\t\t}\n\t}\n}",
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tsyscall.Syscall(gpVertexArrayElementBuffer, 2, uintptr(vaobj), uintptr(buffer), 0)\n}",
"func VertexArrayBindingDivisor(vaobj uint32, bindingindex uint32, divisor uint32) {\n\tsyscall.Syscall(gpVertexArrayBindingDivisor, 3, uintptr(vaobj), uintptr(bindingindex), uintptr(divisor))\n}",
"func (a Vec4) Dehomogenized() Vec3 {\n\treturn Vec3{a.X / a.W, a.Y / a.W, a.Z / a.W}\n}",
"func cureLocalIntersections(start *node, triangles *[]int, dim int) *node {\n\tp := start\n\tfor {\n\t\ta := p.prev\n\t\tb := p.next.next\n\n\t\tif !equals(a, b) &&\n\t\t\tintersects(a, p, p.next, b) &&\n\t\t\tlocallyInside(a, b) &&\n\t\t\tlocallyInside(b, a) {\n\t\t\t*triangles = append(*triangles, a.i/dim, p.i/dim, b.i/dim)\n\n\t\t\t// remove two nodes involved\n\t\t\tremoveNode(p)\n\t\t\tremoveNode(p.next)\n\n\t\t\tp = b\n\t\t\tstart = b\n\t\t}\n\t\tp = p.next\n\t\tif p == start {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn p\n}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n\tC.glowMultiDrawElementsBaseVertex(gpMultiDrawElementsBaseVertex, (C.GLenum)(mode), (*C.GLsizei)(unsafe.Pointer(count)), (C.GLenum)(xtype), indices, (C.GLsizei)(drawcount), (*C.GLint)(unsafe.Pointer(basevertex)))\n}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n\tC.glowMultiDrawElementsBaseVertex(gpMultiDrawElementsBaseVertex, (C.GLenum)(mode), (*C.GLsizei)(unsafe.Pointer(count)), (C.GLenum)(xtype), indices, (C.GLsizei)(drawcount), (*C.GLint)(unsafe.Pointer(basevertex)))\n}",
"func SegmentizeGeom(\n\tgeometry geom.T,\n\tsegmentMaxAngleOrLength float64,\n\tsegmentizeCoords func(geom.Coord, geom.Coord, float64) []float64,\n) (geom.T, error) {\n\tif geometry.Empty() {\n\t\treturn geometry, nil\n\t}\n\tswitch geometry := geometry.(type) {\n\tcase *geom.Point, *geom.MultiPoint:\n\t\treturn geometry, nil\n\tcase *geom.LineString:\n\t\tvar allFlatCoordinates []float64\n\t\tfor pointIdx := 1; pointIdx < geometry.NumCoords(); pointIdx++ {\n\t\t\tallFlatCoordinates = append(\n\t\t\t\tallFlatCoordinates,\n\t\t\t\tsegmentizeCoords(geometry.Coord(pointIdx-1), geometry.Coord(pointIdx), segmentMaxAngleOrLength)...,\n\t\t\t)\n\t\t}\n\t\t// Appending end point as it wasn't included in the iteration of coordinates.\n\t\tallFlatCoordinates = append(allFlatCoordinates, geometry.Coord(geometry.NumCoords()-1)...)\n\t\treturn geom.NewLineStringFlat(geom.XY, allFlatCoordinates).SetSRID(geometry.SRID()), nil\n\tcase *geom.MultiLineString:\n\t\tsegMultiLine := geom.NewMultiLineString(geom.XY).SetSRID(geometry.SRID())\n\t\tfor lineIdx := 0; lineIdx < geometry.NumLineStrings(); lineIdx++ {\n\t\t\tl, err := SegmentizeGeom(geometry.LineString(lineIdx), segmentMaxAngleOrLength, segmentizeCoords)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\terr = segMultiLine.Push(l.(*geom.LineString))\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\treturn segMultiLine, nil\n\tcase *geom.LinearRing:\n\t\tvar allFlatCoordinates []float64\n\t\tfor pointIdx := 1; pointIdx < geometry.NumCoords(); pointIdx++ {\n\t\t\tallFlatCoordinates = append(\n\t\t\t\tallFlatCoordinates,\n\t\t\t\tsegmentizeCoords(geometry.Coord(pointIdx-1), geometry.Coord(pointIdx), segmentMaxAngleOrLength)...,\n\t\t\t)\n\t\t}\n\t\t// Appending end point as it wasn't included in the iteration of coordinates.\n\t\tallFlatCoordinates = append(allFlatCoordinates, geometry.Coord(geometry.NumCoords()-1)...)\n\t\treturn geom.NewLinearRingFlat(geom.XY, allFlatCoordinates).SetSRID(geometry.SRID()), nil\n\tcase *geom.Polygon:\n\t\tsegPolygon := geom.NewPolygon(geom.XY).SetSRID(geometry.SRID())\n\t\tfor loopIdx := 0; loopIdx < geometry.NumLinearRings(); loopIdx++ {\n\t\t\tl, err := SegmentizeGeom(geometry.LinearRing(loopIdx), segmentMaxAngleOrLength, segmentizeCoords)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\terr = segPolygon.Push(l.(*geom.LinearRing))\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\treturn segPolygon, nil\n\tcase *geom.MultiPolygon:\n\t\tsegMultiPolygon := geom.NewMultiPolygon(geom.XY).SetSRID(geometry.SRID())\n\t\tfor polygonIdx := 0; polygonIdx < geometry.NumPolygons(); polygonIdx++ {\n\t\t\tp, err := SegmentizeGeom(geometry.Polygon(polygonIdx), segmentMaxAngleOrLength, segmentizeCoords)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\terr = segMultiPolygon.Push(p.(*geom.Polygon))\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\treturn segMultiPolygon, nil\n\tcase *geom.GeometryCollection:\n\t\tsegGeomCollection := geom.NewGeometryCollection().SetSRID(geometry.SRID())\n\t\tfor geoIdx := 0; geoIdx < geometry.NumGeoms(); geoIdx++ {\n\t\t\tg, err := SegmentizeGeom(geometry.Geom(geoIdx), segmentMaxAngleOrLength, segmentizeCoords)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\terr = segGeomCollection.Push(g)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\treturn segGeomCollection, nil\n\t}\n\treturn nil, errors.Newf(\"unknown type: %T\", geometry)\n}",
"func (obj *Device) ProcessVertices(\n\tsrcStartIndex uint,\n\tdestIndex uint,\n\tvertexCount uint,\n\tdestBuffer *VertexBuffer,\n\tvertexDecl *VertexDeclaration,\n\tflags uint32,\n) Error {\n\tret, _, _ := syscall.Syscall9(\n\t\tobj.vtbl.ProcessVertices,\n\t\t7,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(srcStartIndex),\n\t\tuintptr(destIndex),\n\t\tuintptr(vertexCount),\n\t\tuintptr(unsafe.Pointer(destBuffer)),\n\t\tuintptr(unsafe.Pointer(vertexDecl)),\n\t\tuintptr(flags),\n\t\t0,\n\t\t0,\n\t)\n\treturn toErr(ret)\n}",
"func Structs() {\n\tfmt.Println(vertex{1, 2})\n}",
"func DrawRangeElementsBaseVertex(mode uint32, start uint32, end uint32, count int32, xtype uint32, indices unsafe.Pointer, basevertex int32) {\n\tsyscall.Syscall9(gpDrawRangeElementsBaseVertex, 7, uintptr(mode), uintptr(start), uintptr(end), uintptr(count), uintptr(xtype), uintptr(indices), uintptr(basevertex), 0, 0)\n}",
"func (s Shaper) Shape(text string, ppem uint16, direction Direction, script Script, language string, features string, variations string) []Glyph {\n\tglyphs := make([]Glyph, len([]rune(text)))\n\ti := 0\n\tvar prevIndex uint16\n\tfor cluster, r := range text {\n\t\tindex := s.sfnt.GlyphIndex(r)\n\t\tglyphs[i].Text = string(r)\n\t\tglyphs[i].ID = index\n\t\tglyphs[i].Cluster = uint32(cluster)\n\t\tglyphs[i].XAdvance = int32(s.sfnt.GlyphAdvance(index))\n\t\tif 0 < i {\n\t\t\tglyphs[i-1].XAdvance += int32(s.sfnt.Kerning(prevIndex, index))\n\t\t}\n\t\tprevIndex = index\n\t\ti++\n\t}\n\treturn glyphs\n}",
"func p(v *Vertex, depth int, showPointer bool) string {\n\tstr := \"\"\n\tlevels := \"\"\n\t// Calculate our depth string\n\t// levels := strings.Repeat(\" \", depth)\n\tfor i := depth; i >= 0; i-- {\n\t\tlevels = fmt.Sprintf(\"%s%s\", levels, \" \")\n\t}\n\tstr = fmt.Sprintf(\"%s\\n\", str)\n\tstr = fmt.Sprintf(\"%s%sDepth : %d\\n\", str, levels, depth)\n\tstr = fmt.Sprintf(\"%s%sName : %s\\n\", str, levels, v.Name)\n\tstr = fmt.Sprintf(\"%s%sValue : %d\\n\", str, levels, v.Value)\n\tif showPointer == true {\n\t\tstr = fmt.Sprintf(\"%s%sLocation : %p\\n\", str, levels, v)\n\t}\n\tstr = fmt.Sprintf(\"%s\\n\", str)\n\treturn str\n}",
"func (el *Fill) Polygon() {}",
"func pgParsePolygon(a []byte) (out [][2][]byte) {\n\ta = a[1 : len(a)-1] // drop the first `(` and last `)`\n\treturn pgParsePointList(a)\n}",
"func (n *Node) split() {\n\tquads := n.boundingBox.Quarter()\n\n\tn.children[0] = NewNode(n.level+1, quads[0])\n\tn.children[1] = NewNode(n.level+1, quads[1])\n\tn.children[2] = NewNode(n.level+1, quads[2])\n\tn.children[3] = NewNode(n.level+1, quads[3])\n\n\t// Make a copy of our values\n\tvar values []Boxer\n\tvalues = append(values, n.values...)\n\n\t// Clear out the current values\n\tn.values = nil\n\n\t// Reinsert our values\n\tfor i, _ := range values {\n\t\tn.Insert(values[i])\n\t}\n}",
"func (l loader) obj2Data(lines []string, odata *objData) (faces []face, err error) {\n\tfor _, line := range lines {\n\t\ttokens := strings.Split(line, \" \")\n\t\tvar f1, f2, f3 float32\n\t\tvar s1, s2, s3 string\n\t\tswitch tokens[0] {\n\t\tcase \"v\":\n\t\t\tif _, e := fmt.Sscanf(line, \"v %f %f %f\", &f1, &f2, &f3); e != nil {\n\t\t\t\tlog.Printf(\"Bad vertex: %s\\n\", line)\n\t\t\t\treturn faces, fmt.Errorf(\"could not parse vertex %s\", e)\n\t\t\t}\n\t\t\todata.v = append(odata.v, dataPoint{f1, f2, f3})\n\t\tcase \"vn\":\n\t\t\tif _, e := fmt.Sscanf(line, \"vn %f %f %f\", &f1, &f2, &f3); e != nil {\n\t\t\t\tlog.Printf(\"Bad normal: %s\\n\", line)\n\t\t\t\treturn faces, fmt.Errorf(\"could not parse normal %s\", e)\n\t\t\t}\n\t\t\todata.n = append(odata.n, dataPoint{f1, f2, f3})\n\t\tcase \"vt\":\n\t\t\tif _, e := fmt.Sscanf(line, \"vt %f %f\", &f1, &f2); e != nil {\n\t\t\t\tlog.Printf(\"Bad texture coord: %s\\n\", line)\n\t\t\t\treturn faces, fmt.Errorf(\"could not texture coordinate %s\", e)\n\t\t\t}\n\t\t\todata.t = append(odata.t, uvPoint{f1, 1 - f2})\n\t\tcase \"f\":\n\t\t\tif _, e := fmt.Sscanf(line, \"f %s %s %s\", &s1, &s2, &s3); e != nil {\n\t\t\t\tlog.Printf(\"Bad face: %s\\n\", line)\n\t\t\t\treturn faces, fmt.Errorf(\"could not parse face %s\", e)\n\t\t\t}\n\t\t\tfaces = append(faces, face{[]string{s1, s2, s3}})\n\t\tcase \"o\": // mesh name is processed before this method is called.\n\t\tcase \"s\": // smoothing group - ignored for now.\n\t\tcase \"mtllib\": // materials loaded separately and explicitly.\n\t\tcase \"usemtl\": // material name - ignored, see above.\n\t\t}\n\t}\n\treturn\n}",
"func parseVertex(line string) Vector3 {\n\tvar x, y, z float64\n\t_, err := fmt.Sscanf(line, \"v %f %f %f\", &x, &y, &z)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tfmt.Printf(\"Parsed vertex %v\\n\", Vector3{x, y, z})\n\treturn Vector3{x, y, z}\n}",
"func encodeGeometry(ctx context.Context, geometry geom.Geometry) (g []uint32, vtyp vectorTile.Tile_GeomType, err error) {\n\tif geometry == nil {\n\t\treturn nil, vectorTile.Tile_UNKNOWN, ErrNilGeometryType\n\t}\n\n\tc := NewCursor()\n\n\tswitch t := geometry.(type) {\n\tcase geom.Point:\n\t\tg = append(g, c.MoveTo(t)...)\n\t\treturn g, vectorTile.Tile_POINT, nil\n\n\tcase geom.MultiPoint:\n\t\tg = append(g, c.MoveTo(t.Points()...)...)\n\t\treturn g, vectorTile.Tile_POINT, nil\n\n\tcase geom.LineString:\n\t\tpoints := t.Verticies()\n\t\tg = append(g, c.MoveTo(points[0])...)\n\t\tg = append(g, c.LineTo(points[1:]...)...)\n\t\treturn g, vectorTile.Tile_LINESTRING, nil\n\n\tcase geom.MultiLineString:\n\t\tlines := t.LineStrings()\n\t\tfor _, l := range lines {\n\t\t\tpoints := geom.LineString(l).Verticies()\n\t\t\tg = append(g, c.MoveTo(points[0])...)\n\t\t\tg = append(g, c.LineTo(points[1:]...)...)\n\t\t}\n\t\treturn g, vectorTile.Tile_LINESTRING, nil\n\n\tcase geom.Polygon:\n\t\t// TODO: Right now c.ScaleGeo() never returns a Polygon, so this is dead code.\n\t\tlines := t.LinearRings()\n\t\tfor _, l := range lines {\n\t\t\tpoints := geom.LineString(l).Verticies()\n\t\t\tg = append(g, c.MoveTo(points[0])...)\n\t\t\tg = append(g, c.LineTo(points[1:]...)...)\n\t\t\tg = append(g, c.ClosePath())\n\t\t}\n\t\treturn g, vectorTile.Tile_POLYGON, nil\n\n\tcase geom.MultiPolygon:\n\t\tpolygons := t.Polygons()\n\t\tfor _, p := range polygons {\n\t\t\tlines := geom.Polygon(p).LinearRings()\n\t\t\tfor _, l := range lines {\n\t\t\t\tpoints := geom.LineString(l).Verticies()\n\t\t\t\tg = append(g, c.MoveTo(points[0])...)\n\t\t\t\tg = append(g, c.LineTo(points[1:]...)...)\n\t\t\t\tg = append(g, c.ClosePath())\n\t\t\t}\n\t\t}\n\t\treturn g, vectorTile.Tile_POLYGON, nil\n\n\tdefault:\n\t\treturn nil, vectorTile.Tile_UNKNOWN, ErrUnknownGeometryType\n\t}\n}",
"func (g *graph) prepareVertices(from, to Vertex) (Vertex, Vertex, error) {\n\tfromIsBad := !g.isMyVertex(from)\n\ttoIsBad := !g.isMyVertex(to)\n\n\tif fromIsBad || toIsBad {\n\t\treturn from, to, fmt.Errorf(\"%s : %v : %v, %v : %v\",\n\t\t\tvertexDoesNotBelongMsg,\n\t\t\tfrom.ID(),\n\t\t\tfromIsBad,\n\t\t\tto.ID(),\n\t\t\ttoIsBad,\n\t\t)\n\t}\n\n\tif !g.GraphType.SelfLoops && from.ID() == to.ID() {\n\t\treturn from, to, fmt.Errorf(\"%s\", selfLoopNotAllowedMsg)\n\t}\n\n\tif g.Directed() && to.ID() < from.ID() {\n\t\treturn from, to, nil\n\t}\n\treturn to, from, nil\n}",
"func VertexBindingDivisor(bindingindex uint32, divisor uint32) {\n\tsyscall.Syscall(gpVertexBindingDivisor, 2, uintptr(bindingindex), uintptr(divisor), 0)\n}",
"func (qh *QuickHull) createConvexHalfEdgeMesh() {\n\tvar visibleFaces []int\n\tvar horizontalEdges []int\n\n\ttype faceData struct {\n\t\tfaceIndex int\n\t\tenteredFromHalfEdge int // If the Face turns out not to be visible, this half edge will be marked as horizon edge\n\t}\n\n\tvar possiblyVisibleFaces []faceData\n\n\t// Compute base tetrahedron\n\tqh.mesh = qh.initialTetrahedron()\n\tassertTrue(len(qh.mesh.faces) == 4)\n\n\tvar faceList []int\n\tfor i := 0; i < 4; i++ {\n\t\tf := &qh.mesh.faces[i]\n\t\tif len(f.pointsOnPositiveSide) > 0 {\n\t\t\tfaceList = append(faceList, i)\n\t\t\tf.inFaceStack = true\n\t\t}\n\t}\n\n\t// Process Faces until the Face list is empty.\n\titer := 0\n\tfor len(faceList) > 0 {\n\t\titer++\n\t\tif iter == maxInt {\n\t\t\t// Visible Face traversal marks visited Faces with iteration counter (to mark that the Face has been visited on this iteration) and the max value represents unvisited Faces. At this point we have to reset iteration counter. This shouldn't be an\n\t\t\t// issue on 64 bit machines.\n\t\t\titer = 0\n\t\t}\n\n\t\tvar topFaceIndex int\n\t\ttopFaceIndex, faceList = faceList[0], faceList[1:]\n\n\t\ttf := &qh.mesh.faces[topFaceIndex]\n\t\ttf.inFaceStack = false\n\n\t\tassertTrue(tf.pointsOnPositiveSide == nil || len(tf.pointsOnPositiveSide) > 0)\n\t\tif tf.pointsOnPositiveSide == nil || tf.isDisabled() {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Pick the most distant point to this triangle plane as the point to which we extrude\n\t\tactivePoint := qh.vertexData[tf.mostDistantPoint]\n\t\tactivePointIndex := tf.mostDistantPoint\n\n\t\t// Clear outer vars\n\t\thorizontalEdges = horizontalEdges[:0]\n\t\tvisibleFaces = visibleFaces[:0]\n\t\tpossiblyVisibleFaces = possiblyVisibleFaces[:0]\n\n\t\t// Find out the Faces that have our active point on their positive side (these are the \"visible Faces\").\n\t\t// The Face on top of the stack of course is one of them. At the same time, we create a list of horizon edges.\n\t\tpossiblyVisibleFaces = append(possiblyVisibleFaces, faceData{faceIndex: topFaceIndex, enteredFromHalfEdge: maxInt})\n\t\tfor len(possiblyVisibleFaces) > 0 {\n\t\t\tfd := possiblyVisibleFaces[len(possiblyVisibleFaces)-1]\n\t\t\tpossiblyVisibleFaces = possiblyVisibleFaces[:len(possiblyVisibleFaces)-1]\n\t\t\tpvf := &qh.mesh.faces[fd.faceIndex]\n\t\t\tassertTrue(!pvf.isDisabled())\n\n\t\t\tif pvf.visibilityCheckedOnIteration == iter {\n\t\t\t\tif pvf.isVisibleFaceOnCurrentIteration {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tp := pvf.plane\n\t\t\t\tpvf.visibilityCheckedOnIteration = iter\n\t\t\t\td := p.n.Dot(activePoint) + p.d\n\t\t\t\tif d > 0 {\n\t\t\t\t\tpvf.isVisibleFaceOnCurrentIteration = true\n\t\t\t\t\tpvf.horizonEdgesOnCurrentIteration = 0\n\t\t\t\t\tvisibleFaces = append(visibleFaces, fd.faceIndex)\n\n\t\t\t\t\tfor _, heIndex := range qh.mesh.halfEdgeIndicesOfFace(*pvf) {\n\t\t\t\t\t\topp := qh.mesh.halfEdges[heIndex].Opp\n\t\t\t\t\t\tif opp != fd.enteredFromHalfEdge {\n\t\t\t\t\t\t\tpossiblyVisibleFaces = append(possiblyVisibleFaces, faceData{faceIndex: qh.mesh.halfEdges[opp].Face, enteredFromHalfEdge: heIndex})\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tassertTrue(fd.faceIndex != topFaceIndex)\n\t\t\t}\n\n\t\t\t// The Face is not visible. Therefore, the halfedge we came from is part of the horizon edge.\n\t\t\tpvf.isVisibleFaceOnCurrentIteration = false\n\t\t\thorizontalEdges = append(horizontalEdges, fd.enteredFromHalfEdge)\n\n\t\t\t// Store which half edge is the horizon edge. The other half edges of the Face will not be part of the final mesh so their data slots can by recycled.\n\t\t\thalfEdges := qh.mesh.halfEdgeIndicesOfFace(qh.mesh.faces[qh.mesh.halfEdges[fd.enteredFromHalfEdge].Face])\n\t\t\tvar ind byte\n\t\t\tif halfEdges[0] != fd.enteredFromHalfEdge {\n\t\t\t\tif halfEdges[1] == fd.enteredFromHalfEdge {\n\t\t\t\t\tind = 1\n\t\t\t\t} else {\n\t\t\t\t\tind = 2\n\t\t\t\t}\n\t\t\t}\n\t\t\tqh.mesh.faces[qh.mesh.halfEdges[fd.enteredFromHalfEdge].Face].horizonEdgesOnCurrentIteration |= 1 << ind\n\t\t}\n\n\t\tnHorizontalEdges := len(horizontalEdges)\n\n\t\t// Order horizon edges so that they form a loop. This may fail due to numerical instability in which case we give up trying to solve horizon edge for this point and accept a minor degeneration in the convex hull.\n\t\tif !qh.reorderHorizontalEdges(horizontalEdges) {\n\t\t\tqh.diagnostics.failedHorizonEdges++\n\t\t\tlog.Println(\"Failed to solve horizon edge\")\n\n\t\t\tfor i := range tf.pointsOnPositiveSide {\n\t\t\t\tif tf.pointsOnPositiveSide[i] == activePointIndex {\n\t\t\t\t\ttf.pointsOnPositiveSide = append(tf.pointsOnPositiveSide[:i], tf.pointsOnPositiveSide[i+1:]...)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t/*\n\t\t\t\tTODO: optimize\n\t\t\t\tif len(tf.pointsOnPositiveSide) == 0 {\n\t\t\t\t\treclaimToIndexVectorPool(tf.m_pointsOnPositiveSide);\n\t\t\t\t}\n\t\t\t*/\n\t\t\tcontinue\n\t\t}\n\n\t\t// Except for the horizon edges, all half edges of the visible Faces can be marked as disabled. Their data slots will be reused.\n\t\t// The Faces will be disabled as well, but we need to remember the points that were on the positive side of them - therefore\n\t\t// we save pointers to them.\n\t\tqh.newFaceIndices = qh.newFaceIndices[:0]\n\t\tqh.newHalfEdgeIndices = qh.newHalfEdgeIndices[:0]\n\t\tqh.disabledFacePointVectors = qh.disabledFacePointVectors[:0]\n\n\t\tvar nDisabled int\n\t\tfor _, faceIdx := range visibleFaces {\n\t\t\tdisabledFace := qh.mesh.faces[faceIdx]\n\t\t\thalfEdges := qh.mesh.halfEdgeIndicesOfFace(disabledFace)\n\t\t\tfor i := uint(0); i < 3; i++ {\n\t\t\t\tif disabledFace.horizonEdgesOnCurrentIteration&(1<<i) == 0 {\n\t\t\t\t\tif nDisabled < nHorizontalEdges*2 {\n\t\t\t\t\t\t// Use on this iteration\n\t\t\t\t\t\tqh.newHalfEdgeIndices = append(qh.newHalfEdgeIndices, halfEdges[i])\n\t\t\t\t\t\tnDisabled++\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// Mark for reusal on later iteration step\n\t\t\t\t\t\tqh.mesh.disableHalfEdge(halfEdges[i])\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Disable the Face, but retain pointer to the points that were on the positive side of it. We need to assign those points\n\t\t\t// to the new Faces we create shortly.\n\t\t\tt := qh.mesh.disableFace(faceIdx)\n\t\t\tif t != nil {\n\t\t\t\tassertTrue(len(t) > 0)\n\t\t\t\tqh.disabledFacePointVectors = append(qh.disabledFacePointVectors, t)\n\t\t\t}\n\t\t}\n\t\tif nDisabled < nHorizontalEdges*2 {\n\t\t\tnNewHalfEdgesNeeded := nHorizontalEdges*2 - nDisabled\n\t\t\tfor i := 0; i < nNewHalfEdgesNeeded; i++ {\n\t\t\t\tqh.newHalfEdgeIndices = append(qh.newHalfEdgeIndices, qh.mesh.addHalfEdge())\n\t\t\t}\n\t\t}\n\t\t// Create new Faces using the edgeloop\n\t\tfor i := 0; i < nHorizontalEdges; i++ {\n\t\t\tab := horizontalEdges[i]\n\n\t\t\thorizonEdgeVertexIndices := qh.mesh.vertexIndicesOfHalfEdge(qh.mesh.halfEdges[ab])\n\t\t\ta, b, c := horizonEdgeVertexIndices[0], horizonEdgeVertexIndices[1], activePointIndex\n\n\t\t\tnewFaceIdx := qh.mesh.addFace()\n\t\t\tqh.newFaceIndices = append(qh.newFaceIndices, newFaceIdx)\n\n\t\t\tca, bc := qh.newHalfEdgeIndices[2*i+0], qh.newHalfEdgeIndices[2*i+1]\n\n\t\t\tqh.mesh.halfEdges[ab].Next = bc\n\t\t\tqh.mesh.halfEdges[bc].Next = ca\n\t\t\tqh.mesh.halfEdges[ca].Next = ab\n\n\t\t\tqh.mesh.halfEdges[bc].Face = newFaceIdx\n\t\t\tqh.mesh.halfEdges[ca].Face = newFaceIdx\n\t\t\tqh.mesh.halfEdges[ab].Face = newFaceIdx\n\n\t\t\tqh.mesh.halfEdges[ca].EndVertex = a\n\t\t\tqh.mesh.halfEdges[bc].EndVertex = c\n\n\t\t\tnewFace := &qh.mesh.faces[newFaceIdx]\n\n\t\t\tplaneNormal := triangleNormal(qh.vertexData[a], qh.vertexData[b], activePoint)\n\t\t\tnewFace.plane = newPlane(planeNormal, activePoint)\n\t\t\tnewFace.halfEdgeIndex = ab\n\n\t\t\tvar idx int\n\t\t\tif i > 0 {\n\t\t\t\tidx = i*2 - 1\n\t\t\t} else {\n\t\t\t\tidx = 2*nHorizontalEdges - 1\n\t\t\t}\n\t\t\tqh.mesh.halfEdges[ca].Opp = qh.newHalfEdgeIndices[idx]\n\t\t\tqh.mesh.halfEdges[bc].Opp = qh.newHalfEdgeIndices[((i+1)*2)%(nHorizontalEdges*2)]\n\t\t}\n\n\t\tfor _, disabledPoints := range qh.disabledFacePointVectors {\n\t\t\tassertTrue(disabledPoints != nil)\n\t\t\tfor _, pointIdx := range disabledPoints {\n\t\t\t\tif pointIdx == activePointIndex {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tfor i := 0; i < nHorizontalEdges; i++ {\n\t\t\t\t\tif qh.addPointToFace(&qh.mesh.faces[qh.newFaceIndices[i]], pointIdx) {\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\t/* TODO: optimize\n\t\t\t// The points are no longer needed: we can move them to the vector pool for reuse.\n\t\t\treclaimToIndexVectorPool(disabledPoints);\n\t\t\t*/\n\t\t}\n\t\t// Increase Face stack size if needed\n\t\tfor _, newFaceIdx := range qh.newFaceIndices {\n\t\t\tnewFace := &qh.mesh.faces[newFaceIdx]\n\t\t\tif newFace.pointsOnPositiveSide != nil {\n\t\t\t\tassertTrue(len(newFace.pointsOnPositiveSide) > 0)\n\t\t\t\tif !newFace.inFaceStack {\n\t\t\t\t\tfaceList = append(faceList, newFaceIdx)\n\t\t\t\t\tnewFace.inFaceStack = true\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t/* TODO: optimize\n\t// Cleanup\n\tm_indexVectorPool.clear();\n\t*/\n}",
"func VRANGEPD(ops ...operand.Op) { ctx.VRANGEPD(ops...) }",
"func BitmapToVector(floor [][]bool) []Line {\n\tp_array := createPointArray(floor)\n\n\t// Converts Points Array into a Line Array. Possibly could be turned into stand alone function \"pointsToLines(points []Point) []Line\"\n\tl_array := pointsToLines(p_array[0:])\n\n\t// Adds a Slope to each Line object.\n\tfor i := 0; i < len(l_array); i++ {\n\t\tl_array[i].Slope, l_array[i].Vertical = l_array[i].getSlope()\n\t}\n\n\t// Joins up any lines that have Points In Common and Equal Slope.\n\tfor i := 0; i < len(l_array); i++ {\n\t\tfor j := i + 1; j < len(l_array); j++ {\n\t\t\tif Debug {\n\t\t\t\tfmt.Println(\"== Line Array ==\", i, j)\n\t\t\t\tPrintLineArray(l_array[0:])\n\t\t\t}\n\t\t\tif l_array[i].canJoin(l_array[j]) {\n\t\t\t\tl_array[i] = l_array[i].joinTo(l_array[j])\n\t\t\t\tl_array_temp := l_array[0:j]\n\t\t\t\tl_array = append(l_array_temp, l_array[j+1:]...)\n\t\t\t\ti, j = 0, 1\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\n\treturn l_array\n}",
"func makerectangle(i int, j int, g *Graph) error {\n\tnodes := i * j //number of vertices\n\n\tif g.numvert < nodes {\n\t\t_, err := g.addVertices(nodes - g.numvert)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t}\n\tif g.numvert > nodes {\n\t\tlog.Fatal(\"Too many vertices\")\n\t}\n\n\tfrom := 1\n\tto := 2\n\trun := false\n\tdone := false\n\tfor !done {\n\t\ta := Vertex{vert: from}\n\t\tb := Vertex{vert: to}\n\t\tg.addEdge(a, b)\n\t\tfrom++\n\t\tto++\n\t\tif from%j == 0 && !run {\n\t\t\tfrom++\n\t\t\tto++\n\t\t}\n\t\tif run && to > nodes {\n\t\t\tdone = true\n\t\t}\n\t\tif to > nodes {\n\t\t\tfrom = 1\n\t\t\tto = 1 + j\n\t\t\trun = true\n\t\t}\n\t}\n\treturn nil\n}",
"func VGETEXPPS(ops ...operand.Op) { ctx.VGETEXPPS(ops...) }",
"func GenerateShapes() {\r\n\t// Square\r\n\tShapes[0].vertices = []gl.GLfloat{-1, -1, 1, -1, -1, 1, 1, 1}\r\n\tShapes[0].elements = []gl.GLushort{0, 1, 2, 2, 3, 1}\r\n\r\n\t// ___|\r\n\tShapes[1].vertices = []gl.GLfloat{-2, 0, -2, -1, 2, -1, 2, 0, 2, 1, 1, 1, 1, 0}\r\n\tShapes[1].elements = []gl.GLushort{0, 1, 2, 2, 3, 0, 3, 4, 5, 5, 6, 3}\r\n\r\n\t// _|_\r\n\tShapes[2].vertices = []gl.GLfloat{-1.5, 0, -0.5, 0, -0.5, 1, 0.5, 1, 0.5, 0, 1.5, 0, 1.5, -1, -1.5, -1}\r\n\tShapes[2].elements = []gl.GLushort{1, 2, 3, 3, 4, 1, 0, 7, 6, 6, 0, 5}\r\n\r\n\t// Snake\r\n\tShapes[3].vertices = []gl.GLfloat{-1.5, -1, -1.5, 0, -0.5, 0, -0.5, 1, 1.5, 1, 1.5, 0, 0.5, 0, 0.5, -1}\r\n\tShapes[3].elements = []gl.GLushort{0, 1, 6, 6, 7, 0, 2, 3, 4, 4, 5, 2}\r\n\r\n\t// Now fill out the rest automatically.\r\n\t// FIXME why doesn't using _, shape in this loop work ?\r\n\tfor i := range Shapes {\r\n\t\tShapes[i].vao = gl.GenVertexArray()\r\n\t\tShapes[i].vao.Bind()\r\n\t\tShapes[i].vbo = gl.GenBuffer()\r\n\t\tShapes[i].vbo.Bind(gl.ARRAY_BUFFER)\r\n\t\tgl.BufferData(gl.ARRAY_BUFFER, len(Shapes[i].vertices)*4, Shapes[i].vertices, gl.STATIC_DRAW)\r\n\t\tShapes[i].elementBuffer = gl.GenBuffer()\r\n\t\tShapes[i].elementBuffer.Bind(gl.ELEMENT_ARRAY_BUFFER)\r\n\t\tgl.BufferData(gl.ELEMENT_ARRAY_BUFFER, len(Shapes[i].elements)*2, Shapes[i].elements, gl.STATIC_DRAW)\r\n\t\tShapes[i].numElements = len(Shapes[i].elements)\r\n\r\n\t\tvertexAttribArray := shaderProgram.GetAttribLocation(\"position\")\r\n\t\tvertexAttribArray.AttribPointer(2, gl.FLOAT, false, 0, uintptr(0))\r\n\t\tvertexAttribArray.EnableArray()\r\n\t}\r\n}",
"func (m *Mesh) FlattenBase(maxAngle float64) *Mesh {\n\tif maxAngle == 0 {\n\t\tmaxAngle = math.Pi / 4\n\t}\n\tminZ := m.Min().Z\n\tresult := NewMesh()\n\tm.Iterate(func(t *Triangle) {\n\t\tt1 := *t\n\t\tresult.Add(&t1)\n\t})\n\n\tangleZ := math.Cos(maxAngle)\n\tshouldFlatten := func(t *Triangle) bool {\n\t\tvar minCount int\n\t\tfor _, c := range t {\n\t\t\tif c.Z == minZ {\n\t\t\t\tminCount++\n\t\t\t}\n\t\t}\n\t\treturn minCount == 2 && -t.Normal().Z > angleZ\n\t}\n\n\tpending := map[*Triangle]bool{}\n\tresult.Iterate(func(t *Triangle) {\n\t\tif shouldFlatten(t) {\n\t\t\tpending[t] = true\n\t\t}\n\t})\n\n\tflattenCoord := func(c Coord3D) {\n\t\tnewC := c\n\t\tnewC.Z = minZ\n\t\tv2t := result.getVertexToFace()\n\t\tfor _, t2 := range v2t.Value(c) {\n\t\t\tfor i, c1 := range t2 {\n\t\t\t\tif c1 == c {\n\t\t\t\t\tt2[i] = newC\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t\tif shouldFlatten(t2) {\n\t\t\t\tpending[t2] = true\n\t\t\t} else {\n\t\t\t\tdelete(pending, t2)\n\t\t\t}\n\t\t}\n\t\tv2t.Store(newC, v2t.Value(c))\n\t\tv2t.Delete(c)\n\t}\n\n\tfor len(pending) > 0 {\n\t\toldPending := []*Triangle{}\n\t\tfor t := range pending {\n\t\t\toldPending = append(oldPending, t)\n\t\t}\n\t\tpending = map[*Triangle]bool{}\n\t\tfor _, t := range oldPending {\n\t\t\tfor _, c := range t {\n\t\t\t\tif c.Z != minZ {\n\t\t\t\t\tflattenCoord(c)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn result\n}",
"func DrawElementsInstancedBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, instancecount int32, basevertex int32) {\n\tsyscall.Syscall6(gpDrawElementsInstancedBaseVertex, 6, uintptr(mode), uintptr(count), uintptr(xtype), uintptr(indices), uintptr(instancecount), uintptr(basevertex))\n}",
"func ProvokingVertex(mode uint32) {\n C.glowProvokingVertex(gpProvokingVertex, (C.GLenum)(mode))\n}",
"func ProvokingVertex(mode uint32) {\n\tsyscall.Syscall(gpProvokingVertex, 1, uintptr(mode), 0, 0)\n}",
"func UnpackVertex(i map[string]interface{}) *gripql.Vertex {\n\to := &gripql.Vertex{}\n\to.Gid = i[\"gid\"].(string)\n\to.Label = i[\"label\"].(string)\n\tif p, ok := i[\"data\"]; ok {\n\t\to.Data, _ = structpb.NewStruct(p.(map[string]interface{}))\n\t}\n\treturn o\n}",
"func (tet *Tetrahedron) IsFlat()(bool){\n\tfor idx0, v0 := range tet.Vertices {\n\t\tfor idx1, v1 := range tet.Vertices {\n\t\t\tif idx0 != idx1 {\n\t\t\t\tif v0.equals(v1) {\n\t\t\t\t\t//log.Println(\"here\")\n\t\t\t\t\t//log.Println(tet.Id)\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tsingleX := true\n\tsingleY := true\n\tsingleZ := true\n\n\tfor i := 1; i < len(tet.Vertices); i++ {\n\t\tif tet.Vertices[i].Vec[0] != tet.Vertices[i-1].Vec[0] {\n\t\t\tsingleX = false\n\t\t}\n\t\tif tet.Vertices[i].Vec[1] != tet.Vertices[i-1].Vec[1] {\n\t\t\tsingleY = false\n\t\t}\n\t\tif tet.Vertices[i].Vec[2] != tet.Vertices[i-1].Vec[2] {\n\t\t\tsingleZ = false\n\t\t}\n\t}\n\treturn singleX || singleY || singleZ\n}",
"func (v Vec3) Homogeneous() Vec4 {\n\treturn Vec4{v[0], v[1], v[2], 1}\n}",
"func (p Polygon) Union(p2 Polygonal) Polygon {\n\treturn p.op(p2, polyclip.UNION)\n}",
"func VORPD(ops ...operand.Op) { ctx.VORPD(ops...) }",
"func (q *QuadTree) split() {\n\tsubWidth := q.bounds.maxX - q.bounds.minX\n\tsubHeight := q.bounds.maxY - q.bounds.minY\n\tx := q.bounds.position.X\n\ty := q.bounds.position.Y\n\tq.nodes[TOP_LEFT] = NewQuadTree(q.level+1, NewRectangle(x-subWidth, y-subHeight, subWidth, subHeight))\n\tq.nodes[TOP_RIGHT] = NewQuadTree(q.level+1, NewRectangle(x+subWidth, y-subHeight, subWidth, subHeight))\n\tq.nodes[BOTTOM_LEFT] = NewQuadTree(q.level+1, NewRectangle(x-subWidth, y+subHeight, subWidth, subHeight))\n\tq.nodes[BOTTOM_RIGHT] = NewQuadTree(q.level+1, NewRectangle(x+subWidth, y+subHeight, subWidth, subHeight))\n}",
"func vertexInterp(isolevel float64, p1, p2 gl.Vector, valp1, valp2 float64) gl.Vector {\n\tif math.Abs(isolevel-valp1) < epsilon {\n\t\treturn p1\n\t}\n\tif math.Abs(isolevel-valp2) < epsilon {\n\t\treturn p2\n\t}\n\tif math.Abs(valp1-valp2) < epsilon {\n\t\treturn p1\n\t}\n\tmu := (isolevel - valp1) / (valp2 - valp1)\n\treturn gl.Vector{\n\t\tX: p1.X + mu*(p2.X-p1.X),\n\t\tY: p1.Y + mu*(p2.Y-p1.Y),\n\t\tZ: p1.Z + mu*(p2.Z-p1.Z),\n\t}\n}",
"func NormalExtrude(p V3) V2 {\n\treturn V2{p.X, p.Y}\n}",
"func nextMulti(i *Iter) []byte {\n\tj := 0\n\td := i.multiSeg\n\t// skip first rune\n\tfor j = 1; j < len(d) && !utf8.RuneStart(d[j]); j++ {\n\t}\n\tfor j < len(d) {\n\t\tinfo := i.rb.f.info(input{bytes: d}, j)\n\t\tif info.BoundaryBefore() {\n\t\t\ti.multiSeg = d[j:]\n\t\t\treturn d[:j]\n\t\t}\n\t\tj += int(info.size)\n\t}\n\t// treat last segment as normal decomposition\n\ti.next = i.rb.f.nextMain\n\treturn i.next(i)\n}",
"func VPEXPANDW(ops ...operand.Op) { ctx.VPEXPANDW(ops...) }",
"func stripBare(re *syntax.Regexp) (retPart part) {\n\tswitch re.Op {\n\tcase syntax.OpNoMatch: // matches no strings\n\t\t// TODO(quis): Introduce a part type for this?\n\t\treturn word(\"__no_matches\")\n\tcase syntax.OpEmptyMatch: // matches empty string\n\t\treturn word(\"\")\n\tcase syntax.OpLiteral: // matches Runes sequence\n\t\treturn word(re.Rune)\n\tcase syntax.OpCharClass: // matches Runes interpreted as range pair list\n\t\trs := expandRanges(re.Rune)\n\t\tif len(rs) > 5 {\n\t\t\treturn separator{}\n\t\t}\n\t\tvar ret orPart\n\t\tfor _, r := range rs {\n\t\t\tret = append(ret, word(fmt.Sprintf(\"%c\", r)))\n\t\t}\n\t\treturn ret\n\tcase syntax.OpAnyCharNotNL: // matches any character except newline\n\t\treturn separator{}\n\tcase syntax.OpAnyChar: // matches any character\n\t\treturn separator{}\n\tcase syntax.OpBeginLine: // matches empty string at beginning of line\n\t\treturn separator{}\n\tcase syntax.OpEndLine: // matches empty string at end of line\n\t\treturn separator{}\n\tcase syntax.OpBeginText: // matches empty string at beginning of text\n\t\t// TODO(quis): Introduce a part type for this so we can generate SQL expressions with LIKEs that can be anchored at the start/end of a field.\n\t\treturn separator{}\n\tcase syntax.OpEndText: // matches empty string at end of text\n\t\treturn separator{}\n\tcase syntax.OpWordBoundary: // matches word boundary `\\b`\n\t\treturn word(\"\")\n\tcase syntax.OpNoWordBoundary: // matches word non-boundary `\\B`\n\t\treturn word(\"\")\n\tcase syntax.OpCapture: // capturing subexpression with index Cap, optional name Name\n\t\treturn stripBare(re.Sub[0])\n\tcase syntax.OpStar: // matches Sub[0] zero or more times\n\t\treturn separator{}\n\tcase syntax.OpPlus: // matches Sub[0] one or more times\n\t\treturn concatenation{stripBare(re.Sub[0]), separator{}}\n\tcase syntax.OpQuest: // matches Sub[0] zero or one times\n\t\treturn orPart{stripBare(re.Sub[0]), word(\"\")}\n\tcase syntax.OpRepeat: // matches Sub[0] at least Min times, at most Max (Max == -1 is no limit)\n\t\ts := stripBare(re.Sub[0])\n\t\t// If the difference is more than 5 we're generating too many different combinations. Just treat it as a separator rather than generating all possibilities.\n\t\tif re.Max == -1 || re.Max-re.Min > 5 {\n\t\t\tvar ret concatenation\n\t\t\tfor i := 0; re.Min > i; i++ {\n\t\t\t\tret = append(ret, s)\n\t\t\t}\n\t\t\tif re.Min != re.Max {\n\t\t\t\tret = append(ret, separator{})\n\t\t\t}\n\t\t\treturn ret\n\t\t} else {\n\t\t\tvar ret orPart\n\t\t\tfor j := re.Min; re.Max >= j; j++ {\n\t\t\t\tvar c concatenation\n\t\t\t\tfor i := 0; j > i; i++ {\n\t\t\t\t\tc = append(c, s)\n\t\t\t\t}\n\t\t\t\tret = append(ret, c)\n\t\t\t}\n\t\t\treturn ret\n\t\t}\n\tcase syntax.OpConcat: // matches concatenation of Subs\n\t\tvar ret concatenation\n\t\tfor _, s := range re.Sub {\n\t\t\tret = append(ret, stripBare(s))\n\t\t}\n\t\treturn ret\n\tcase syntax.OpAlternate: // matches alternation of Subs\n\t\tvar ret orPart\n\t\tfor _, s := range re.Sub {\n\t\t\tret = append(ret, stripBare(s))\n\t\t}\n\t\treturn ret\n\tdefault:\n\t\tpanic(fmt.Errorf(\"unknown opcode %d\", re.Op))\n\t}\n}",
"func Part2(ctx context.Context, input string) (interface{}, error) {\n\tg1 := new4DSpace(strings.Split(input, \"\\n\"), \".\")\n\tg2 := new4DSpace(strings.Split(input, \"\\n\"), \".\")\n\n\tvar curr, next map[coordinate4D]string\n\tfor i := 0; i < 6; i++ {\n\t\tif i%2 == 0 {\n\t\t\tcurr = g1\n\t\t\tnext = g2\n\t\t} else {\n\t\t\tcurr = g2\n\t\t\tnext = g1\n\t\t}\n\n\t\tminX, maxX, minY, maxY, minZ, maxZ, minW, maxW := 0, 0, 0, 0, 0, 0, 0, 0\n\t\tfor c := range curr {\n\t\t\tif c.X < minX {\n\t\t\t\tminX = c.X\n\t\t\t}\n\t\t\tif c.X > maxX {\n\t\t\t\tmaxX = c.X\n\t\t\t}\n\n\t\t\tif c.Y < minY {\n\t\t\t\tminY = c.Y\n\t\t\t}\n\t\t\tif c.Y > maxY {\n\t\t\t\tmaxY = c.Y\n\t\t\t}\n\n\t\t\tif c.Z < minZ {\n\t\t\t\tminZ = c.Z\n\t\t\t}\n\t\t\tif c.Z > maxZ {\n\t\t\t\tmaxZ = c.Z\n\t\t\t}\n\n\t\t\tif c.W < minW {\n\t\t\t\tminW = c.W\n\t\t\t}\n\t\t\tif c.W > maxW {\n\t\t\t\tmaxW = c.W\n\t\t\t}\n\t\t}\n\t\tmaxX++\n\t\tmaxY++\n\t\tmaxZ++\n\t\tmaxW++\n\n\t\tfor x := minX - 1; x <= maxX; x++ {\n\t\t\tfor y := minY - 1; y <= maxY; y++ {\n\t\t\t\tfor z := minZ - 1; z <= maxZ; z++ {\n\t\t\t\t\tfor w := minW - 1; w <= maxW; w++ {\n\t\t\t\t\t\tc := coordinate4D{X: x, Y: y, Z: z, W: w}\n\t\t\t\t\t\tv := curr[c]\n\t\t\t\t\t\tif v == \"\" {\n\t\t\t\t\t\t\tv = \".\"\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tactiveCount := 0\n\t\t\t\t\t\tfor _, s := range c.Surrounding() {\n\t\t\t\t\t\t\tswitch curr[s] {\n\t\t\t\t\t\t\tcase \"#\":\n\t\t\t\t\t\t\t\tactiveCount++\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif v == \"#\" && (activeCount != 2 && activeCount != 3) {\n\t\t\t\t\t\t\tnext[c] = \".\"\n\t\t\t\t\t\t} else if v == \".\" && activeCount == 3 {\n\t\t\t\t\t\t\tnext[c] = \"#\"\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tnext[c] = v\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Count\n\tc := 0\n\tfor _, v := range next {\n\t\tif v == \"#\" {\n\t\t\tc++\n\t\t}\n\t}\n\n\treturn c, nil\n}",
"func OfGeomPoints(points ...geom.Point) Winding { return Order{}.OfGeomPoints(points...) }",
"func (o BoundingPolyOutput) Vertices() VertexArrayOutput {\n\treturn o.ApplyT(func(v BoundingPoly) []Vertex { return v.Vertices }).(VertexArrayOutput)\n}",
"func DrawElementsBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, basevertex int32) {\n C.glowDrawElementsBaseVertex(gpDrawElementsBaseVertex, (C.GLenum)(mode), (C.GLsizei)(count), (C.GLenum)(xtype), indices, (C.GLint)(basevertex))\n}",
"func SplitCompoundProperty(key string, value Property) ([]KeyValue, error) {\n\tfields := strings.Fields(value.String())\n\tswitch key {\n\tcase \"margins\":\n\t\treturn feazeCompound4(\"margin\", \"\", fourDirs, fields)\n\tcase \"padding\":\n\t\treturn feazeCompound4(\"padding\", \"\", fourDirs, fields)\n\tcase \"border-color\":\n\t\treturn feazeCompound4(\"border\", \"color\", fourDirs, fields)\n\tcase \"border-width\":\n\t\treturn feazeCompound4(\"border\", \"width\", fourDirs, fields)\n\tcase \"border-style\":\n\t\treturn feazeCompound4(\"border\", \"style\", fourDirs, fields)\n\tcase \"border-radius\":\n\t\treturn feazeCompound4(\"border\", \"style\", fourCorners, fields)\n\t}\n\treturn nil, fmt.Errorf(\"Not recognized as compound property: %s\", key)\n}",
"func eliminateHoles(data []float64, holeIndices []int, outerNode *node, dim int) *node {\n\tqueue := []*node{}\n\tvar start, end int\n\tvar list *node\n\tl := len(holeIndices)\n\tfor i := 0; i < l; i++ {\n\t\tstart = holeIndices[i] * dim\n\t\tif i < l-1 {\n\t\t\tend = holeIndices[i+1] * dim\n\t\t} else {\n\t\t\tend = len(data)\n\t\t}\n\t\tlist = linkedList(data, start, end, dim, false)\n\t\tif list == list.next {\n\t\t\tlist.steiner = true\n\t\t}\n\t\tqueue = append(queue, getLeftmost(list))\n\t}\n\n\tsort.Sort(sortableQueue(queue))\n\n\t// process holes from left to right\n\tfor i := 0; i < len(queue); i++ {\n\t\teliminateHole(queue[i], outerNode)\n\t\touterNode = filterPoints(outerNode, outerNode.next)\n\t}\n\n\treturn outerNode\n}",
"func treeTRI( triArray[] string, arrayPos int) int{\n\n//Function check to see if the there is any shapes in the in the listArray\n//if not it changes the structures as it moves to the end\n\tif listArray[0] != \"TRI\" { \n\t\t\tlistArray[1]= \"<inst>\"\n\t\t\tlistArray[0] = \"TRI\"\n\t\tarrayPos++\n\t\t\n// Called the function so it can be processed with the valid format \t\n\t\ttreeTRI(triArray[0:],arrayPos)\t\n\t}else{ if listArray[1] == \"\" || listArray[1] == \"<inst>\"{ // after transforming it is place in a format that can be parsed \n\t\t\tif triArray[arrayPos] == \"TRI\"{ // Ensure we are not Validating a Shape\n\t\t\t\tarrayPos++\n\t\t\t}\n\t\t\t\n\t\t\t// Retrieve the Coordinated from the array\n\t\t\t// Proceeding to the next value\n\t\t\tvar curCoord string=triArray[arrayPos]\n\t\t\tarrayPos++\n\t\t\tvar secCoord string=triArray[arrayPos]\n\t\t\tarrayPos++\n\t\t\tvar triCoord string=triArray[arrayPos]\n\t\t\t\n\t\t\t// Using Slices we get each Values \n\t\t\tx:=curCoord[0:1]\n\t\t\ty:=curCoord[1:2]\t\t\t\n\t\t\tyy:=secCoord[1:2]\n\t\t\txx:=secCoord[0:1]\t\t\t\n\t\t\txxx:=triCoord[0:1]\n\t\t\tyyy:=triCoord[1:2]\n\t\t\t\n\t\t\t//The Printing format for the lower part of the tree\n\t\t\tfmt.Printf(\"\\n |\\n\")\n\t\t\tfmt.Printf(\"TRI\\n/ \\\\\\n\")\n\t\t\tfmt.Printf(\"<coord>,<coord>,<coord>\\n\")\n\t\t\tfmt.Printf(\"<x><y>,<x><y>,<x><y>\\n\"+x+\" \"+y+\" \"+xx+\" \"+yy+\" \"+xxx+\" \"+yyy)\n\t\t\tlistArray[0] = \"<inst>\"\n\t\t\tlistArray[1] = \"<inst_list>\"\n\t\t\t\n\t\t\ttempCount=tempCount-1\n\t\t\t\tif(tempCount >= 0){\t\t\t\t\n\t\t\t\t\tlistArray[tempCount]=\"\"\t\t\t\t\t\n\t\t\t\t}\n\t\t\t\n\t\t}\n\t\t }\n\t\n\treturn arrayPos\n}",
"func VertexArrayVertexBuffers(vaobj uint32, first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tsyscall.Syscall6(gpVertexArrayVertexBuffers, 6, uintptr(vaobj), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(strides)))\n}",
"func OctahedronGeometryFromJSObject(p *js.Object) *OctahedronGeometry {\n\treturn &OctahedronGeometry{p: p}\n}",
"func VPRORD(ops ...operand.Op) { ctx.VPRORD(ops...) }",
"func VBLENDPD(i, mxy, xy, xy1 operand.Op) { ctx.VBLENDPD(i, mxy, xy, xy1) }",
"func (obj *Device) DrawIndexedPrimitive(\n\ttyp PRIMITIVETYPE,\n\tbaseVertexIndex int,\n\tminIndex uint,\n\tnumVertices uint,\n\tstartIndex uint,\n\tprimitiveCount uint,\n) Error {\n\tret, _, _ := syscall.Syscall9(\n\t\tobj.vtbl.DrawIndexedPrimitive,\n\t\t7,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(typ),\n\t\tuintptr(baseVertexIndex),\n\t\tuintptr(minIndex),\n\t\tuintptr(numVertices),\n\t\tuintptr(startIndex),\n\t\tuintptr(primitiveCount),\n\t\t0,\n\t\t0,\n\t)\n\treturn toErr(ret)\n}",
"func qr_decoder_get_coderegion_vertexes(p _QrDecoderHandle) *_CvPoint {\n\tv := C.qr_decoder_get_coderegion_vertexes(C.QrDecoderHandle(p))\n\treturn (*_CvPoint)(v)\n}",
"func ProvokingVertex(mode uint32) {\n\tC.glowProvokingVertex(gpProvokingVertex, (C.GLenum)(mode))\n}",
"func ProvokingVertex(mode uint32) {\n\tC.glowProvokingVertex(gpProvokingVertex, (C.GLenum)(mode))\n}",
"func (w *Walker) MultiPolygon(ctx context.Context) (mplyg geom.MultiPolygon) {\n\tif w == nil {\n\t\tif debug {\n\t\t\tlog.Printf(\"walker is nil.\")\n\t\t}\n\t\treturn mplyg\n\t}\n\tif w.edgeMap == nil {\n\t\tw.edgeMap = edgeMapFromTriangles(w.Triangles...)\n\t}\n\tseen := make(map[int]bool, len(w.Triangles))\n\tfor i := range w.Triangles {\n\t\tif ctx.Err() != nil {\n\t\t\treturn nil\n\t\t}\n\t\tif seen[i] {\n\t\t\tcontinue\n\t\t}\n\t\tseen[i] = true\n\t\tplyg := w.PolygonForTriangle(ctx, i, seen)\n\t\tif debug {\n\t\t\tlog.Printf(\" %v : got the following plyg\\n%v\\n\", i, wkt.MustEncode(geom.Polygon(plyg)))\n\t\t}\n\t\tif len(plyg) > 0 {\n\t\t\tmplyg = append(mplyg, plyg)\n\t\t}\n\t}\n\treturn geom.MultiPolygon(mplyg)\n}",
"func pgParseVector(a []byte) (out [][]byte) {\n\tvar j int\n\tfor i := 0; i < len(a); i++ {\n\t\tif a[i] == ' ' {\n\t\t\tout = append(out, a[j:i])\n\t\t\tj = i + 1\n\t\t}\n\t}\n\tif len(a) > 0 {\n\t\tout = append(out, a[j:]) // last\n\t}\n\treturn out\n}",
"func (mesh *PolyMesh) init() error {\n\n\tfor _, t := range mesh.Transform.Elems {\n\t\tmesh.transformSRT = append(mesh.transformSRT, m.TransformDecompMatrix4(t))\n\t}\n\n\tmesh.initTransformBounds()\n\n\tif mesh.PolyCount != nil {\n\t\tbasei := uint32(0)\n\t\tfor k := range mesh.PolyCount {\n\t\t\ti := uint32(0)\n\t\t\tfor j := 0; j < int(mesh.PolyCount[k]-2); j++ {\n\t\t\t\ti++\n\t\t\t\tmesh.idxp = append(mesh.idxp, uint32(mesh.FaceIdx[basei]))\n\t\t\t\tmesh.idxp = append(mesh.idxp, uint32(mesh.FaceIdx[basei+i]))\n\t\t\t\tmesh.idxp = append(mesh.idxp, uint32(mesh.FaceIdx[basei+i+1]))\n\n\t\t\t\tV0 := mesh.Verts.Elems[mesh.FaceIdx[basei]]\n\t\t\t\tV1 := mesh.Verts.Elems[mesh.FaceIdx[basei+i]]\n\t\t\t\tV2 := mesh.Verts.Elems[mesh.FaceIdx[basei+i+1]]\n\n\t\t\t\tif V0 == V1 && V0 == V2 {\n\t\t\t\t\t//log.Printf(\"nil triangle: %v %v %v %v %v\\n\", mesh.NodeName, mesh.FaceIdx[basei], V0, V1, V2)\n\t\t\t\t}\n\n\t\t\t\tif mesh.UV.Elems != nil {\n\t\t\t\t\tif mesh.UVIdx != nil { // if UVIdx doesn't exist assume same as FaceIdx\n\t\t\t\t\t\tmesh.uvtriidx = append(mesh.uvtriidx, uint32(mesh.UVIdx[basei]))\n\t\t\t\t\t\tmesh.uvtriidx = append(mesh.uvtriidx, uint32(mesh.UVIdx[basei+i]))\n\t\t\t\t\t\tmesh.uvtriidx = append(mesh.uvtriidx, uint32(mesh.UVIdx[basei+i+1]))\n\t\t\t\t\t} else {\n\t\t\t\t\t\tmesh.uvtriidx = append(mesh.uvtriidx, uint32(mesh.FaceIdx[basei]))\n\t\t\t\t\t\tmesh.uvtriidx = append(mesh.uvtriidx, uint32(mesh.FaceIdx[basei+i]))\n\t\t\t\t\t\tmesh.uvtriidx = append(mesh.uvtriidx, uint32(mesh.FaceIdx[basei+i+1]))\n\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif mesh.Normals.Elems != nil {\n\t\t\t\t\tif mesh.NormalIdx != nil { // if NormalIdx doesn't exist assume same as FaceIdx\n\t\t\t\t\t\tmesh.normalidx = append(mesh.normalidx, uint32(mesh.NormalIdx[basei]))\n\t\t\t\t\t\tmesh.normalidx = append(mesh.normalidx, uint32(mesh.NormalIdx[basei+i]))\n\t\t\t\t\t\tmesh.normalidx = append(mesh.normalidx, uint32(mesh.NormalIdx[basei+i+1]))\n\t\t\t\t\t} else {\n\t\t\t\t\t\tmesh.normalidx = append(mesh.normalidx, uint32(mesh.FaceIdx[basei]))\n\t\t\t\t\t\tmesh.normalidx = append(mesh.normalidx, uint32(mesh.FaceIdx[basei+i]))\n\t\t\t\t\t\tmesh.normalidx = append(mesh.normalidx, uint32(mesh.FaceIdx[basei+i+1]))\n\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif mesh.ShaderIdx != nil {\n\t\t\t\t\tmesh.shaderidx = append(mesh.shaderidx, uint8(mesh.ShaderIdx[k]))\n\t\t\t\t}\n\t\t\t}\n\t\t\tbasei += uint32(mesh.PolyCount[k])\n\t\t}\n\n\t} else {\n\t\t// Assume already a triangle mesh\n\t\tif mesh.FaceIdx != nil {\n\t\t\tfor j := range mesh.FaceIdx {\n\t\t\t\tmesh.idxp = append(mesh.idxp, uint32(mesh.FaceIdx[j]))\n\n\t\t\t\tif mesh.UV.Elems != nil {\n\t\t\t\t\tif mesh.UVIdx != nil {\n\t\t\t\t\t\tmesh.uvtriidx = append(mesh.uvtriidx, uint32(mesh.UVIdx[j]))\n\t\t\t\t\t} else {\n\t\t\t\t\t\tmesh.uvtriidx = append(mesh.uvtriidx, uint32(mesh.FaceIdx[j]))\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif mesh.Normals.Elems != nil {\n\t\t\t\t\tif mesh.NormalIdx != nil {\n\t\t\t\t\t\tmesh.normalidx = append(mesh.normalidx, uint32(mesh.NormalIdx[j]))\n\t\t\t\t\t} else {\n\t\t\t\t\t\tmesh.normalidx = append(mesh.normalidx, uint32(mesh.FaceIdx[j]))\n\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t}\n\t\t} else {\n\t\t\t// No indexes so assume the vertex array is simply the triangle verts\n\t\t\tfor j := 0; j < mesh.Verts.ElemsPerKey; j++ {\n\t\t\t\tmesh.idxp = append(mesh.idxp, uint32(j))\n\n\t\t\t\tif mesh.UV.Elems != nil {\n\t\t\t\t\tif mesh.UVIdx != nil {\n\t\t\t\t\t\tmesh.uvtriidx = append(mesh.uvtriidx, uint32(mesh.UVIdx[j]))\n\t\t\t\t\t} else {\n\t\t\t\t\t\tmesh.uvtriidx = append(mesh.uvtriidx, uint32(j))\n\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif mesh.Normals.Elems != nil {\n\t\t\t\t\tif mesh.NormalIdx != nil {\n\t\t\t\t\t\tmesh.normalidx = append(mesh.normalidx, uint32(mesh.NormalIdx[j]))\n\t\t\t\t\t} else {\n\t\t\t\t\t\tmesh.normalidx = append(mesh.normalidx, uint32(j))\n\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t}\n\t\t}\n\n\t\tfor _, idx := range mesh.ShaderIdx {\n\t\t\tmesh.shaderidx = append(mesh.shaderidx, uint8(idx))\n\t\t}\n\n\t}\n\n\tmesh.FaceIdx = nil\n\tmesh.PolyCount = nil\n\tmesh.UVIdx = nil\n\tmesh.NormalIdx = nil\n\tmesh.ShaderIdx = nil\n\n\treturn nil\n}",
"func VPCMPESTRI(i, mx, x operand.Op) { ctx.VPCMPESTRI(i, mx, x) }",
"func newVerticesFromLump(lump *wad.Lump) ([]utils.Vec2, error) {\n\tvar verts []utils.Vec2\n\tswitch string(lump.Data[0:4]) {\n\tcase glMagicV5:\n\t\tverts = readGLVertsV5(lump.Data[4:])\n\tdefault:\n\t\tverts = readNormalVerts(lump.Data)\n\t}\n\n\treturn verts, nil\n}",
"func differentDiagPrim(queens []core.VarId, store *core.Store) {\n\tfor i := 0; i < len(queens)-1; i++ {\n\t\tremaining := queens[i:]\n\t\tfor offset := 1; offset < len(remaining); offset++ {\n\t\t\tcheckOffset(remaining, store, offset)\n\t\t\tcheckOffset(remaining, store, -offset)\n\t\t}\n\t}\n}",
"func adjList(lines []string) [][]int {\n\n\tV, err = strconv.Atoi(lines[0])\n\tcheck(err)\n\n\tvertices := make([][]int, V)\n\tfor i := range vertices {\n\t\tvertices[i] = make([]int, 0, V)\n\t}\n\n\tfor count := 2; count < len(lines); count++ {\n\t\ts := strings.Split(lines[count], \",\")\n\t\tu, _ := strconv.Atoi(s[0])\n\t\tv, _ := strconv.Atoi(s[1])\n\n\t\tvertices[u-1] = append(vertices[u-1], v)\n\t\tvertices[v-1] = append(vertices[v-1], u)\n\t}\n\n\treturn vertices\n\n}",
"func adjList(lines []string) [][]int {\n\n\tV, err = strconv.Atoi(lines[0])\n\tcheck(err)\n\n\tvertices := make([][]int, V)\n\tfor i := range vertices {\n\t\tvertices[i] = make([]int, 0, V)\n\t}\n\n\tfor count := 2; count < len(lines); count++ {\n\t\ts := strings.Split(lines[count], \",\")\n\t\tu, _ := strconv.Atoi(s[0])\n\t\tv, _ := strconv.Atoi(s[1])\n\n\t\tvertices[u-1] = append(vertices[u-1], v)\n\t\tvertices[v-1] = append(vertices[v-1], u)\n\t}\n\n\treturn vertices\n\n}",
"func VertexArrayBindingDivisor(vaobj uint32, bindingindex uint32, divisor uint32) {\n\tC.glowVertexArrayBindingDivisor(gpVertexArrayBindingDivisor, (C.GLuint)(vaobj), (C.GLuint)(bindingindex), (C.GLuint)(divisor))\n}",
"func VertexArrayBindingDivisor(vaobj uint32, bindingindex uint32, divisor uint32) {\n\tC.glowVertexArrayBindingDivisor(gpVertexArrayBindingDivisor, (C.GLuint)(vaobj), (C.GLuint)(bindingindex), (C.GLuint)(divisor))\n}",
"func reverse(curve *privCurve) {\n\tm := len(curve.segm)\n\tfor i, j := 0, m-1; i < j; i, j = i+1, j-1 {\n\t\tcurve.segm[i].vertex, curve.segm[j].vertex = curve.segm[j].vertex, curve.segm[i].vertex\n\t}\n}"
] | [
"0.5119342",
"0.5051116",
"0.49833047",
"0.4895917",
"0.48857006",
"0.48405302",
"0.48386937",
"0.48378637",
"0.4825112",
"0.47670814",
"0.47629687",
"0.47581664",
"0.47426316",
"0.47190487",
"0.46949178",
"0.46714163",
"0.4608143",
"0.45868334",
"0.45825988",
"0.4580345",
"0.45790306",
"0.45189846",
"0.45042878",
"0.4488546",
"0.44696677",
"0.44678754",
"0.44642594",
"0.44558752",
"0.44249168",
"0.4423674",
"0.4423427",
"0.44161797",
"0.440032",
"0.4388986",
"0.43783763",
"0.436614",
"0.43277818",
"0.43125686",
"0.43115097",
"0.43115097",
"0.4300676",
"0.4299685",
"0.42949733",
"0.42895225",
"0.42677376",
"0.42662454",
"0.42637253",
"0.42500037",
"0.42461255",
"0.42365408",
"0.42267457",
"0.42190915",
"0.42161742",
"0.42119125",
"0.4207819",
"0.42015544",
"0.41831118",
"0.41743648",
"0.41712174",
"0.4168913",
"0.41661224",
"0.4162711",
"0.41567725",
"0.41550857",
"0.41458434",
"0.41416115",
"0.41340894",
"0.4132772",
"0.41242322",
"0.41126695",
"0.41092092",
"0.41082853",
"0.41058773",
"0.4090277",
"0.4088471",
"0.4087808",
"0.40876642",
"0.40846026",
"0.40746328",
"0.40703434",
"0.40683582",
"0.4065708",
"0.4061677",
"0.40592632",
"0.40580696",
"0.4048274",
"0.4042584",
"0.4038232",
"0.40375996",
"0.40375996",
"0.40367654",
"0.40365177",
"0.40361148",
"0.40358573",
"0.40299737",
"0.4023574",
"0.40136698",
"0.40136698",
"0.40121773",
"0.40121773",
"0.40025753"
] | 0.0 | -1 |
delimit the boundaries of a query object | func BeginQuery(target uint32, id uint32) {
C.glowBeginQuery(gpBeginQuery, (C.GLenum)(target), (C.GLuint)(id))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func RangeQuery(in Query, r types.Request) (out Query) {\n\tout = in.Skip(r.Start)\n\tout = out.Limit(r.Length)\n\treturn\n}",
"func (g *GraphiteProvider) trimQuery(query string) string {\n\tspace := regexp.MustCompile(`\\s+`)\n\treturn space.ReplaceAllString(query, \" \")\n}",
"func (query *Query) CleanOffset() *Query {\n\treturn query.clean(OFFSET)\n}",
"func (query *Query) CleanLimit() *Query {\n\treturn query.clean(LIMIT)\n}",
"func (p *QL) QueryRange(w http.ResponseWriter, r *http.Request) {\n\n\ttoken := core.RetrieveToken(r)\n\tif len(token) == 0 {\n\t\trespondWithError(w, errors.New(\"Not authorized, please provide a READ token\"), http.StatusForbidden)\n\t\treturn\n\t}\n\n\tcontext := Context{}\n\tvar err error\n\n\tcontext.Start, err = core.ParsePromTime(r.FormValue(\"start\"))\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"error\": err.Error(),\n\t\t\t\"proto\": \"promql\",\n\t\t\t\"entity\": \"start\",\n\t\t\t\"context\": fmt.Sprintf(\"%+v\", context),\n\t\t}).Error(\"Unprocessable entity\")\n\t\trespondWithError(w, errors.New(\"Unprocessable Entity: start\"), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tcontext.End, err = core.ParsePromTime(r.FormValue(\"end\"))\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"error\": err.Error(),\n\t\t\t\"entity\": \"end\",\n\t\t\t\"proto\": \"promql\",\n\t\t\t\"context\": fmt.Sprintf(\"%+v\", context),\n\t\t}).Error(\"Unprocessable entity\")\n\t\trespondWithError(w, errors.New(\"Unprocessable Entity: start\"), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif context.End.Before(context.Start) {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"error\": errors.New(\"end is before start\"),\n\t\t\t\"context\": fmt.Sprintf(\"%+v\", context),\n\t\t\t\"proto\": \"promql\",\n\t\t\t\"entity\": \"start\",\n\t\t}).Error(\"Unprocessable entity\")\n\t\trespondWithError(w, errors.New(\"End is before start\"), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tcontext.Step, err = core.ParsePromDuration(r.FormValue(\"step\"))\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"error\": err.Error(),\n\t\t\t\"entity\": \"step\",\n\t\t\t\"proto\": \"promql\",\n\t\t\t\"context\": fmt.Sprintf(\"%+v\", context),\n\t\t}).Error(\"Unprocessable entity\")\n\t\trespondWithError(w, errors.New(\"Unprocessable Entity: step\"), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif context.Step == \"0 s\" {\n\t\trespondWithError(w, errors.New(\"zero or negative query resolution step widths are not accepted. Try a positive integer\"), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif context.Step == \"\" {\n\t\tcontext.Step = \"5 m\"\n\t}\n\n\tcontext.Query = r.FormValue(\"query\")\n\n\tlog.WithFields(log.Fields{\n\t\t\"query\": context.Query,\n\t\t\"proto\": \"promql\",\n\t\t\"context\": fmt.Sprintf(\"%+v\", context),\n\t}).Debug(\"Evaluating query\")\n\n\tcontext.Expr, err = promql.ParseExpr(context.Query)\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"query\": context.Query,\n\t\t\t\"proto\": \"promql\",\n\t\t\t\"context\": fmt.Sprintf(\"%+v\", context),\n\t\t\t\"err\": err,\n\t\t}).Debug(\"Bad query\")\n\t\trespondWithError(w, err, http.StatusUnprocessableEntity)\n\t\treturn\n\t}\n\n\tlog.WithFields(log.Fields{\n\t\t\"query\": context.Query,\n\t\t\"proto\": \"promql\",\n\t\t\"context\": fmt.Sprintf(\"%+v\", context),\n\t}).Debug(\"Query is OK\")\n\n\tevaluator := evaluator{}\n\ttree := evaluator.GenerateQueryTree(context)\n\n\tmc2 := tree.ToWarpScriptWithTime(token, context.Query, context.Step, context.Start, context.End)\n\tmc2 += \"\\n[ SWAP mapper.tostring 0 0 0 ] MAP\\n\"\n\n\tlog.WithFields(log.Fields{\n\t\t\"query\": context.Query,\n\t\t\"source\": r.RemoteAddr,\n\t\t\"proto\": \"promql\",\n\t\t\"method\": r.Method,\n\t\t\"path\": r.URL.String(),\n\t}).Debug(\"PromQL query\")\n\n\twarpServer := core.NewWarpServer(viper.GetString(\"warp_endpoint\"), \"prometheus-query-range\")\n\tresponse, err := warpServer.Query(mc2, w.Header().Get(middlewares.TxnHeader))\n\tif err != nil {\n\t\twErr := response.Header.Get(\"X-Warp10-Error-Message\")\n\t\tif wErr == \"\" {\n\t\t\tdump, err := httputil.DumpResponse(response, true)\n\t\t\tif err == nil {\n\t\t\t\twErr = string(dump)\n\t\t\t} else {\n\t\t\t\twErr = \"Unparsable error\"\n\t\t\t}\n\t\t}\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"error\": fmt.Errorf(wErr),\n\t\t\t\"proto\": \"promql\",\n\t\t}).Error(\"Bad response from Egress: \" + err.Error())\n\t\trespondWithError(w, fmt.Errorf(wErr), http.StatusServiceUnavailable)\n\t\treturn\n\t}\n\tbuffer, err := ioutil.ReadAll(response.Body)\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"error\": err.Error(),\n\t\t\t\"proto\": \"promql\",\n\t\t}).Error(\"can't fully read Egress response\")\n\t\trespondWithError(w, err, http.StatusServiceUnavailable)\n\t\treturn\n\t}\n\n\t// HACK : replace Infinity values from Warp to Inf\n\ts := strings.Replace(string(buffer), \"Infinity\", \"+Inf\", -1)\n\ts = strings.Replace(s, \"-+Inf\", \"-Inf\", -1)\n\tbuffer = []byte(s)\n\n\tresponses := [][]core.GeoTimeSeries{}\n\terr = json.Unmarshal(buffer, &responses)\n\tif err != nil {\n\t\twErr := response.Header.Get(\"X-Warp10-Error-Message\")\n\t\tif wErr == \"\" {\n\t\t\tdump, err := httputil.DumpResponse(response, true)\n\t\t\tif err == nil {\n\t\t\t\twErr = string(dump)\n\t\t\t} else {\n\t\t\t\twErr = \"Unparsable error\"\n\t\t\t}\n\t\t}\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"error\": fmt.Errorf(wErr),\n\t\t\t\"proto\": \"promql\",\n\t\t}).Error(\"Cannot unmarshal egress response: \" + err.Error())\n\t\trespondWithError(w, fmt.Errorf(wErr), http.StatusServiceUnavailable)\n\t\treturn\n\t}\n\t// Since it's a range_query, we can enforce the matrix resultType\n\tprometheusResponse, err := warpToPrometheusResponseRange(responses[0], model.ValMatrix.String())\n\tif err != nil {\n\t\tw.Write([]byte(err.Error()))\n\t\trespondWithError(w, err, http.StatusServiceUnavailable)\n\t}\n\trespond(w, prometheusResponse)\n}",
"func (db *DB) CompactRange(r Range) {\n\tif db.closed {\n\t\tpanic(ErrDBClosed)\n\t}\n\n\tvar start, limit *C.char\n\tif len(r.Start) != 0 {\n\t\tstart = (*C.char)(unsafe.Pointer(&r.Start[0]))\n\t}\n\tif len(r.Limit) != 0 {\n\t\tlimit = (*C.char)(unsafe.Pointer(&r.Limit[0]))\n\t}\n\tC.leveldb_compact_range(\n\t\tdb.Ldb, start, C.size_t(len(r.Start)), limit, C.size_t(len(r.Limit)))\n}",
"func rangeQuery(begin, end string) string {\n\tresult := allQuery\n\n\thasBegin := begin != \"\"\n\thasEnd := end != \"\"\n\n\tconst dbDate string = `strftime('%Y%m%d', entered, 'unixepoch', 'start of day')`\n\n\tif hasBegin || hasEnd {\n\t\tresult = result + `WHERE `\n\t\tif hasBegin {\n\t\t\tresult = result + dbDate + ` >= '` + begin + `' `\n\t\t\tif hasEnd {\n\t\t\t\tresult = result + `AND `\n\t\t\t}\n\t\t}\n\t\tif hasEnd {\n\t\t\tresult = result + dbDate + ` <= '` + end + `'`\n\t\t}\n\t}\n\treturn result\n}",
"func (q *ParsedRawQuery) FilterStartIndex() int { return 2 }",
"func processAndWrapQuery(jsonQueryMap map[string]interface{}) {\n\tfor jsonKey, jsonValue := range jsonQueryMap {\n\t\twraperField := processField(jsonKey)\n\t\tdelete(jsonQueryMap, jsonKey)\n\t\twraperValue := processValue(jsonValue)\n\t\tjsonQueryMap[wraperField] = wraperValue\n\t}\n}",
"func (w *Wrapper) buildQuery() {\n\tw.query += w.buildDuplicate()\n\tw.query += w.buildJoin()\n\tw.query += w.buildWhere(\"WHERE\")\n\tw.query += w.buildWhere(\"HAVING\")\n\tw.query += w.buildOrderBy()\n\tw.query += w.buildGroupBy()\n\tw.query += w.buildLimit()\n\n\t_, afterOptions := w.buildQueryOptions()\n\tw.query += afterOptions\n\tw.query = strings.TrimSpace(w.query)\n}",
"func combineRestrictions(fieldsMap map[string]string, rests string) (string, error) {\n\tif rests == \"\" {\n\t\treturn \"\", nil\n\t}\n\trestsArray := strings.Split(rests, \",\")\n\trestsBlock := \"\"\n\n\t// field\n\tfield := restsArray[0]\n\tif field != \"\" {\n\t\tf := fieldsMap[field]\n\t\tif f == \"\" {\n\t\t\treturn \"\", newError(\"Unexpected selection order field - \" + field)\n\t\t}\n\t\trestsBlock = \"order by q.\" + f + \" \"\n\t}\n\n\t// order\n\torder := restsArray[1]\n\tif order != \"\" {\n\t\tif order != \"asc\" && order != \"desc\" {\n\t\t\treturn \"\", newError(\"Unexpected selection order - \" + order)\n\t\t}\n\n\t\tif restsBlock == \"\" {\n\t\t\trestsBlock = \"order by q.ID \" + order\n\t\t} else {\n\t\t\trestsBlock = restsBlock + order\n\t\t}\n\t}\n\n\t// limit\n\tlimit := restsArray[2]\n\tif limit != \"\" {\n\t\t_, err := strconv.Atoi(limit)\n\t\tif err != nil {\n\t\t\treturn \"\", newError(\"Unexpected selection limit - \" + limit)\n\t\t}\n\n\t\tif restsBlock == \"\" {\n\t\t\trestsBlock = \"limit \" + limit\n\t\t} else {\n\t\t\trestsBlock = restsBlock + \" limit \" + limit\n\t\t}\n\t}\n\n\t// offset\n\toffset := restsArray[3]\n\tif offset != \"\" {\n\t\t_, err := strconv.Atoi(offset)\n\t\tif err != nil {\n\t\t\treturn \"\", newError(\"Unexpected selection offset - \" + offset)\n\t\t}\n\n\t\tif restsBlock == \"\" {\n\t\t\trestsBlock = \"offset \" + offset\n\t\t} else {\n\t\t\trestsBlock = restsBlock + \" offset \" + offset\n\t\t}\n\t}\n\n\treturn restsBlock, nil\n}",
"func (q *Query) Range(from, to time.Time) *Query {\n\tq.from, q.to = from, to\n\treturn q\n}",
"func (query *Query) CleanQuery() *Query {\n\tquery.content = make(map[string]interface{})\n\treturn query\n}",
"func setLimitAndOffset(params dragonfruit.QueryParams) (limit int,\n\toffset int) {\n\n\tlimit, offset = 10, 0\n\n\tl := params.QueryParams.Get(\"limit\")\n\n\tif l != \"\" {\n\t\tswitch l := l.(type) {\n\t\tcase int64:\n\t\t\tlimit = int(l)\n\t\tcase int:\n\t\t\tlimit = l\n\t\t}\n\n\t\tparams.QueryParams.Del(\"limit\")\n\t}\n\n\to := params.QueryParams.Get(\"offset\")\n\tif o != \"\" {\n\t\tswitch o := o.(type) {\n\t\tcase int64:\n\t\t\toffset = int(o)\n\t\tcase int:\n\t\t\toffset = o\n\t\t}\n\t\tparams.QueryParams.Del(\"offset\")\n\t}\n\n\treturn\n}",
"func queryEnc(lowerBound uint32, upperBound uint32) {\n\tqueryRangeEnc(lowerBound, true)\n\tqueryRangeEnc(upperBound, false)\n}",
"func (q *Query) parseLimit(result []int) []int {\n\tif q.limit[1] > 0 {\n\t\tlength := len(result)\n\t\tif length <= q.limit[0] {\n\t\t\treturn nil\n\t\t}\n\t\tif length <= q.limit[1] {\n\t\t\tq.limit[1] = length\n\t\t}\n\t\treturn result[q.limit[0]:q.limit[1]]\n\t}\n\treturn result\n}",
"func adjustQueryTimes(from time.Time, to time.Time, interval Interval) (time.Time, time.Time, error) {\n\tfromRounded := roundupTimeForInterval(from, interval)\n\ttoRounded := roundupTimeForInterval(to, interval)\n\n\t// Data is available from the OPEN API for 90 days\n\tninetyDaysAgo := roundupTimeForInterval(time.Now().Add(-NINETY_DAYS), interval)\n\n\t// Is the 'to' (end) time before data is available? If so, that's an error.\n\tif timeBeforeOldestData(toRounded, ninetyDaysAgo) {\n\t\terr := errors.New(\"Time range is before available data\")\n\t\tlog.DefaultLogger.Info(\"adjustQueryTimes\", \"err\", err)\n\t\treturn fromRounded, toRounded, err\n\t}\n\n\t// Limit the 'from' (start) time to when the oldest data is available.\n\tfromLimited := limitTimeToOldestData(fromRounded, ninetyDaysAgo)\n\n\t// Returned the fixed 'to' and 'from' times.\n\treturn fromLimited, toRounded, nil\n}",
"func (rawQuery *SearchRawQuery) ToSQLQuery(namespace string) *SearchSQLQuery {\n\tvar q string\n\tvar args []interface{}\n\n\tswitch namespace {\n\tcase SearchNamespaceAccounts:\n\t\tq = \"SELECT id, balance, data FROM current_balances\"\n\tcase SearchNamespaceTransactions:\n\t\tq = `SELECT id, timestamp, data,\n\t\t\t\t\tarray_to_json(ARRAY(\n\t\t\t\t\t\tSELECT lines.account_id FROM lines\n\t\t\t\t\t\t\tWHERE transaction_id=transactions.id\n\t\t\t\t\t\t\tORDER BY lines.account_id\n\t\t\t\t\t)) AS account_array,\n\t\t\t\t\tarray_to_json(ARRAY(\n\t\t\t\t\t\tSELECT lines.delta FROM lines\n\t\t\t\t\t\t\tWHERE transaction_id=transactions.id\n\t\t\t\t\t\t\tORDER BY lines.account_id\n\t\t\t\t\t)) AS delta_array\n\t\t\tFROM transactions`\n\tdefault:\n\t\treturn nil\n\t}\n\n\t// Process must queries\n\tvar mustWhere []string\n\tmustClause := rawQuery.Query.MustClause\n\tfieldsWhere, fieldsArgs := convertFieldsToSQL(mustClause.Fields)\n\tmustWhere = append(mustWhere, fieldsWhere...)\n\targs = append(args, fieldsArgs...)\n\n\ttermsWhere, termsArgs := convertTermsToSQL(mustClause.Terms)\n\tmustWhere = append(mustWhere, termsWhere...)\n\targs = append(args, termsArgs...)\n\n\trangesWhere, rangesArgs := convertRangesToSQL(mustClause.RangeItems)\n\tmustWhere = append(mustWhere, rangesWhere...)\n\targs = append(args, rangesArgs...)\n\n\t// Process should queries\n\tvar shouldWhere []string\n\tshouldClause := rawQuery.Query.ShouldClause\n\tfieldsWhere, fieldsArgs = convertFieldsToSQL(shouldClause.Fields)\n\tshouldWhere = append(shouldWhere, fieldsWhere...)\n\targs = append(args, fieldsArgs...)\n\n\ttermsWhere, termsArgs = convertTermsToSQL(shouldClause.Terms)\n\tshouldWhere = append(shouldWhere, termsWhere...)\n\targs = append(args, termsArgs...)\n\n\trangesWhere, rangesArgs = convertRangesToSQL(shouldClause.RangeItems)\n\tshouldWhere = append(shouldWhere, rangesWhere...)\n\targs = append(args, rangesArgs...)\n\n\tvar offset = rawQuery.Offset\n\tvar limit = rawQuery.Limit\n\n\tif len(mustWhere) == 0 && len(shouldWhere) == 0 {\n\t\treturn &SearchSQLQuery{sql: q, args: args}\n\t}\n\n\tq += \" WHERE \"\n\tif len(mustWhere) != 0 {\n\t\tq += \"(\" + strings.Join(mustWhere, \" AND \") + \")\"\n\t\tif len(shouldWhere) != 0 {\n\t\t\tq += \" AND \"\n\t\t}\n\t}\n\n\tif len(shouldWhere) != 0 {\n\t\tq += \"(\" + strings.Join(shouldWhere, \" OR \") + \")\"\n\t}\n\n\tif namespace == SearchNamespaceTransactions {\n\t\tif rawQuery.SortTime == SortDescByTime {\n\t\t\tq += \" ORDER BY timestamp DESC\"\n\t\t} else {\n\t\t\tq += \" ORDER BY timestamp\"\n\t\t}\n\t}\n\n\tif offset > 0 {\n\t\tq += \" OFFSET \" + strconv.Itoa(offset) + \" \"\n\t}\n\tif limit > 0 {\n\t\tq += \" LIMIT \" + strconv.Itoa(limit)\n\t}\n\n\tq = enumerateSQLPlacholder(q)\n\treturn &SearchSQLQuery{sql: q, args: args}\n}",
"func (oupq *OrgUnitPositionQuery) Limit(limit int) *OrgUnitPositionQuery {\n\toupq.limit = &limit\n\treturn oupq\n}",
"func (s *BaseQueryListener) ExitBracketQueryCriterias(ctx *BracketQueryCriteriasContext) {}",
"func (t *FenwickTreeSimple) QueryRange(i, j int) int {\n\treturn t.Query(j) - t.Query(i-1)\n}",
"func CleanQuery(query string) string {\n\tret := strings.Replace(query, \"\\n\", \"\", -1)\n\tret = reRemoveExtraSpace.ReplaceAllString(ret, \" \")\n\treturn ret\n}",
"func (w *Wrapper) buildLimit() (query string) {\n\tswitch len(w.limit) {\n\tcase 0:\n\t\treturn\n\tcase 1:\n\t\tquery = fmt.Sprintf(\"LIMIT %d \", w.limit[0])\n\tcase 2:\n\t\tquery = fmt.Sprintf(\"LIMIT %d, %d \", w.limit[0], w.limit[1])\n\t}\n\treturn\n}",
"func (q *Query) Range(indexName string, start, end interface{}) *Query {\n\t// For an index range search,\n\t// it is non-sensical to pass two nils\n\t// Set the error and return the query unchanged\n\tif start == nil && end == nil {\n\t\tq.err = errors.New(ErrNilInputsRangeIndexQuery)\n\t\treturn q\n\t}\n\tq.start = start\n\tq.end = end\n\tq.isIndexQuery = true\n\tq.indexName = []byte(indexName)\n\treturn q\n}",
"func (s *BasePlSqlParserListener) ExitBounds_clause(ctx *Bounds_clauseContext) {}",
"func queryRangeEnc(bound uint32, isLower bool) {\n\tvar (\n\t\tres QueryRangeCipher\n\t\toperator string\n\t\tprefix int64\n\t)\n\n\t// get the operator\n\tif isLower == true { // if bound is the lower bound\n\t\toperator = \">\"\n\t} else { // if bound is the upper bound\n\t\toperator = \"<\"\n\t}\n\n\tboundStr := strconv.FormatInt(int64(bound), 2) // calculate the binary value\n\tboundStr = fmt.Sprintf(\"%032s\", boundStr) //pad into 32 bits\n\n\tfor i := 0; i < 32/blockSize; i++ {\n\t\tblock, _ := strconv.ParseInt(boundStr[i*blockSize:i*blockSize+blockSize], 2, 0) // the block contains blockSize bits\n\t\tif i == 0 { // the first block (no prefix)\n\t\t\tprefix = -1\n\t\t} else { // other (has prefix)\n\t\t\tprefix, _ = strconv.ParseInt(boundStr[0:i*blockSize], 2, 0)\n\t\t}\n\t\tblockStr := strconv.FormatInt(block, 10) + operator\n\t\tres.blockCipher[i] = queryBlockEnc(blockStr, prefix, i)\n\t}\n\n\tif isLower == true { // if bound is the lower bound\n\t\tqueryCipher.lower = res\n\t} else { // if bound is the upper bound\n\t\tqueryCipher.upper = res\n\t}\n}",
"func splitRange(db *client.DB, key proto.Key) error {\n\treturn db.AdminSplit(key)\n}",
"func (t *serial) Query(from, to int) []Interval {\n\tresult := make([]Interval, 0, 10)\n\tfor _, intrvl := range t.base {\n\t\tif !intrvl.Segment.Disjoint(from, to) {\n\t\t\tresult = append(result, intrvl)\n\t\t}\n\t}\n\treturn result\n}",
"func (rrq *ReserveRoomQuery) Limit(limit int) *ReserveRoomQuery {\n\trrq.limit = &limit\n\treturn rrq\n}",
"func (statement *Statement) Limit(limit int, start ...int) *Statement {\n\tstatement.LimitN = &limit\n\tif len(start) > 0 {\n\t\tstatement.Start = start[0]\n\t}\n\treturn statement\n}",
"func (q *Query) Offset(offset, limit int) *Query {\n\tif limit < 1 || offset < 0 {\n\t\tlogrus.Warn(\"illegal offset or limit: \", offset, \", \", limit)\n\t\treturn q\n\t}\n\tq.offset = offset\n\tq.limit = limit\n\treturn q\n}",
"func (s *BasePlSqlParserListener) EnterBounds_clause(ctx *Bounds_clauseContext) {}",
"func (w *Wrapper) buildQueryOptions() (before string, after string) {\n\tfor _, v := range w.queryOptions {\n\t\tswitch v {\n\t\tcase \"ALL\", \"DISTINCT\", \"SQL_CACHE\", \"SQL_NO_CACHE\", \"DISTINCTROW\", \"HIGH_PRIORITY\", \"STRAIGHT_JOIN\", \"SQL_SMALL_RESULT\", \"SQL_BIG_RESULT\", \"SQL_BUFFER_RESULT\", \"SQL_CALC_FOUND_ROWS\", \"LOW_PRIORITY\", \"QUICK\", \"IGNORE\", \"DELAYED\":\n\t\t\tbefore += fmt.Sprintf(\"%s, \", v)\n\t\tcase \"FOR UPDATE\", \"LOCK IN SHARE MODE\":\n\t\t\tafter += fmt.Sprintf(\"%s, \", v)\n\t\t}\n\t}\n\tif before != \"\" {\n\t\tbefore = fmt.Sprintf(\"%s \", trim(before))\n\t}\n\tif after != \"\" {\n\t\tafter = fmt.Sprintf(\"%s \", trim(after))\n\t}\n\treturn\n}",
"func DeSerializeQuery(bytes []byte) Query {\n if len(bytes) != 32 {\n fmt.Println(\"Error : bytes length is not 32. Its \", len(bytes))\n }\n\n return Query {\n action : bytes[0],\n empty : 0,\n replyIp : binary.BigEndian.Uint32(bytes[2:6]),\n replyPort : binary.BigEndian.Uint16(bytes[6:8]),\n key : binary.BigEndian.Uint64(bytes[8:16]),\n value : binary.BigEndian.Uint64(bytes[16:24]),\n timeToLive: binary.BigEndian.Uint32(bytes[24:28]),\n requestId : binary.BigEndian.Uint32(bytes[28:32]),\n }\n}",
"func (x *fastReflection_QueryParamsRequest) Range(f func(protoreflect.FieldDescriptor, protoreflect.Value) bool) {\n}",
"func (p *Patch) Filter(q Query) (as []Address) {\n\tfor a := Address(1); a <= MaxAddress; a++ {\n\t\tif p.matchesAddress(q, a) {\n\t\t\tas = append(as, a)\n\t\t}\n\t}\n\treturn\n}",
"func queryBuilder(jsonq *gojsonq.JSONQ, query, op, value string) {\n\tjsonq.Where(query, typeToOp(\"string\", op), value)\n\tnewOp := typeToOp(\"notString\", op)\n\tif v, err := strconv.ParseInt(value, 10, 64); err == nil {\n\t\tjsonq.OrWhere(query, newOp, v)\n\t}\n\tif v, err := strconv.ParseFloat(value, 64); err == nil {\n\t\tjsonq.OrWhere(query, newOp, v)\n\t}\n\tif v, err := strconv.ParseBool(value); err == nil {\n\t\tjsonq.OrWhere(query, newOp, v)\n\t}\n\tjsonq.More()\n}",
"func (q *Query) buildQuery() (qry string) {\n\tvar queryBuilder string\n\n\tqueryBuilder = strings.TrimLeft(q.Qry, \" \")\n\tqueryBuilder = strings.TrimRight(queryBuilder, \" \")\n\n\tarray := strings.Split(queryBuilder, \" \")\n\n\tfor i, value := range array {\n\t\tif i == 0 {\n\t\t\tqueryBuilder = value\n\t\t} else {\n\t\t\tqueryBuilder += \" & \" + value\n\t\t}\n\t}\n\n\treturn queryBuilder\n}",
"func LimitQueryset(db *gorm.DB, p paginator.Paginator) *gorm.DB {\n\tlimit, offset := p.GetLimitOffset()\n\treturn db.Limit(limit).Offset(offset)\n}",
"func (lq *LocationQuery) Limit(limit int) *LocationQuery {\n\tlq.limit = &limit\n\treturn lq\n}",
"func trimToQuery(steps []Step, runQuery string, targets []Target) ([]Step, []TargetStatus) {\n\trunQueryParts := strings.Split(runQuery, \"::\")\n\tif len(runQueryParts) != 2 {\n\t\terr := fmt.Errorf(errorRunQueryArgument)\n\t\treturn nil, makeTargetStatuses(err, targets)\n\t}\n\n\tvar stepName, queryName string = runQueryParts[0], runQueryParts[1]\n\tif stepName == \"\" || queryName == \"\" {\n\t\terr := fmt.Errorf(errorRunQueryArgument)\n\t\treturn nil, makeTargetStatuses(err, targets)\n\t}\n\n\tsteps, trimErr := trimSteps(steps, stepName, targets)\n\tif trimErr != nil {\n\t\treturn nil, trimErr\n\t}\n\n\tstep := steps[0] // safe\n\tqueries := []Query{}\n\tfor _, query := range step.Queries {\n\t\tif query.Name == queryName {\n\t\t\tqueries = append(queries, query)\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif len(queries) == 0 {\n\t\terr := fmt.Errorf(\"%s: '%s'\", errorRunQueryNotFound, queryName)\n\t\treturn nil, makeTargetStatuses(err, targets)\n\t}\n\tstep.Queries = queries\n\n\treturn []Step{step}, nil\n}",
"func (wq *WordQuery) Limit(limit int) *WordQuery {\n\twq.limit = &limit\n\treturn wq\n}",
"func (this *DtNavMeshQuery) queryPolygonsInTile(tile *DtMeshTile, qmin, qmax []float32,\n\tfilter *DtQueryFilter, query DtPolyQuery) {\n\tDtAssert(this.m_nav != nil)\n\tconst batchSize int = 32\n\tvar polyRefs [batchSize]DtPolyRef\n\tvar polys [batchSize]*DtPoly\n\tn := 0\n\n\tif tile.BvTree != nil {\n\t\tnodeIndex := 0\n\t\tendIndex := int(tile.Header.BvNodeCount)\n\t\ttbmin := tile.Header.Bmin[:]\n\t\ttbmax := tile.Header.Bmax[:]\n\t\tqfac := tile.Header.BvQuantFactor\n\n\t\t// Calculate quantized box\n\t\tvar bmin, bmax [3]uint16\n\t\t// dtClamp query box to world box.\n\t\tminx := DtClampFloat32(qmin[0], tbmin[0], tbmax[0]) - tbmin[0]\n\t\tminy := DtClampFloat32(qmin[1], tbmin[1], tbmax[1]) - tbmin[1]\n\t\tminz := DtClampFloat32(qmin[2], tbmin[2], tbmax[2]) - tbmin[2]\n\t\tmaxx := DtClampFloat32(qmax[0], tbmin[0], tbmax[0]) - tbmin[0]\n\t\tmaxy := DtClampFloat32(qmax[1], tbmin[1], tbmax[1]) - tbmin[1]\n\t\tmaxz := DtClampFloat32(qmax[2], tbmin[2], tbmax[2]) - tbmin[2]\n\t\t// Quantize\n\t\tbmin[0] = (uint16)(qfac*minx) & 0xfffe\n\t\tbmin[1] = (uint16)(qfac*miny) & 0xfffe\n\t\tbmin[2] = (uint16)(qfac*minz) & 0xfffe\n\t\tbmax[0] = (uint16)(qfac*maxx+1) | 1\n\t\tbmax[1] = (uint16)(qfac*maxy+1) | 1\n\t\tbmax[2] = (uint16)(qfac*maxz+1) | 1\n\n\t\t// Traverse tree\n\t\tbase := this.m_nav.GetPolyRefBase(tile)\n\t\tfor nodeIndex < endIndex {\n\t\t\tnode := &tile.BvTree[nodeIndex]\n\t\t\toverlap := DtOverlapQuantBounds(bmin[:], bmax[:], node.Bmin[:], node.Bmax[:])\n\t\t\tisLeafNode := (node.I >= 0)\n\n\t\t\tif isLeafNode && overlap {\n\t\t\t\tref := base | (DtPolyRef)(node.I)\n\t\t\t\tif filter.PassFilter(ref, tile, &tile.Polys[node.I]) {\n\t\t\t\t\tpolyRefs[n] = ref\n\t\t\t\t\tpolys[n] = &tile.Polys[node.I]\n\n\t\t\t\t\tif n == batchSize-1 {\n\t\t\t\t\t\tquery.Process(tile, polys[:], polyRefs[:], batchSize)\n\t\t\t\t\t\tn = 0\n\t\t\t\t\t} else {\n\t\t\t\t\t\tn++\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif overlap || isLeafNode {\n\t\t\t\tnodeIndex++\n\t\t\t} else {\n\t\t\t\tescapeIndex := int(-node.I)\n\t\t\t\tnodeIndex += escapeIndex\n\t\t\t}\n\t\t}\n\t} else {\n\t\tvar bmin, bmax [3]float32\n\t\tbase := this.m_nav.GetPolyRefBase(tile)\n\t\tfor i := 0; i < int(tile.Header.PolyCount); i++ {\n\t\t\tp := &tile.Polys[i]\n\t\t\t// Do not return off-mesh connection polygons.\n\t\t\tif p.GetType() == DT_POLYTYPE_OFFMESH_CONNECTION {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\t// Must pass filter\n\t\t\tref := base | (DtPolyRef)(i)\n\t\t\tif !filter.PassFilter(ref, tile, p) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\t// Calc polygon bounds.\n\t\t\tv := tile.Verts[p.Verts[0]*3:]\n\t\t\tDtVcopy(bmin[:], v)\n\t\t\tDtVcopy(bmax[:], v)\n\t\t\tfor j := 1; j < int(p.VertCount); j++ {\n\t\t\t\tv = tile.Verts[p.Verts[j]*3:]\n\t\t\t\tDtVmin(bmin[:], v)\n\t\t\t\tDtVmax(bmax[:], v)\n\t\t\t}\n\t\t\tif DtOverlapBounds(qmin, qmax, bmin[:], bmax[:]) {\n\t\t\t\tpolyRefs[n] = ref\n\t\t\t\tpolys[n] = p\n\n\t\t\t\tif n == batchSize-1 {\n\t\t\t\t\tquery.Process(tile, polys[:], polyRefs[:], batchSize)\n\t\t\t\t\tn = 0\n\t\t\t\t} else {\n\t\t\t\t\tn++\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Process the last polygons that didn't make a full batch.\n\tif n > 0 {\n\t\tquery.Process(tile, polys[:], polyRefs[:], n)\n\t}\n}",
"func applyAdditionalQueryOptions(queryString string, queryLimit, querySkip int) (string, error) {\n\tconst jsonQueryFields = \"fields\"\n\tconst jsonQueryLimit = \"limit\"\n\tconst jsonQuerySkip = \"skip\"\n\t//create a generic map for the query json\n\tjsonQueryMap := make(map[string]interface{})\n\t//unmarshal the selector json into the generic map\n\tdecoder := json.NewDecoder(bytes.NewBuffer([]byte(queryString)))\n\tdecoder.UseNumber()\n\terr := decoder.Decode(&jsonQueryMap)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tif fieldsJSONArray, ok := jsonQueryMap[jsonQueryFields]; ok {\n\t\tswitch fieldsJSONArray.(type) {\n\t\tcase []interface{}:\n\t\t\t//Add the \"_id\" field, these are needed by default\n\t\t\tjsonQueryMap[jsonQueryFields] = append(fieldsJSONArray.([]interface{}),\n\t\t\t\tidField)\n\t\tdefault:\n\t\t\treturn \"\", fmt.Errorf(\"fields definition must be an array\")\n\t\t}\n\t}\n\t// Add limit\n\t// This will override any limit passed in the query.\n\t// Explicit paging not yet supported.\n\tjsonQueryMap[jsonQueryLimit] = queryLimit\n\t// Add skip\n\t// This will override any skip passed in the query.\n\t// Explicit paging not yet supported.\n\tjsonQueryMap[jsonQuerySkip] = querySkip\n\t//Marshal the updated json query\n\teditedQuery, err := json.Marshal(jsonQueryMap)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tlogger.Debugf(\"Rewritten query: %s\", editedQuery)\n\treturn string(editedQuery), nil\n}",
"func parseRangeQuery(p *parser) parserStateFn {\n\topTok := p.next() // Already checked to be the range operator token.\n\tvalTok := p.next()\n\tswitch valTok.typ {\n\tcase tokTypeError:\n\t\tp.backup(valTok)\n\t\treturn parseErrorTok\n\tcase tokTypeUnquotedLiteral, tokTypeQuotedLiteral:\n\t\tvar trm term\n\t\tif valTok.typ == tokTypeUnquotedLiteral {\n\t\t\ttrm = newTerm(valTok.val)\n\t\t} else {\n\t\t\ttrm = newQuotedTerm(valTok.val)\n\t\t}\n\t\tif trm.Wildcard {\n\t\t\treturn p.errorfAt(valTok.pos, \"cannot have a wildcard in range query token\")\n\t\t}\n\t\tvar q rpnStep\n\t\tswitch opTok.typ {\n\t\tcase tokTypeGt:\n\t\t\tq = &rpnGtRangeQuery{\n\t\t\t\tfield: p.field.val,\n\t\t\t\tterm: trm,\n\t\t\t\tlogLevelLess: p.logLevelLess,\n\t\t\t}\n\t\tcase tokTypeGte:\n\t\t\tq = &rpnGteRangeQuery{\n\t\t\t\tfield: p.field.val,\n\t\t\t\tterm: trm,\n\t\t\t\tlogLevelLess: p.logLevelLess,\n\t\t\t}\n\t\tcase tokTypeLt:\n\t\t\tq = &rpnLtRangeQuery{\n\t\t\t\tfield: p.field.val,\n\t\t\t\tterm: trm,\n\t\t\t\tlogLevelLess: p.logLevelLess,\n\t\t\t}\n\t\tcase tokTypeLte:\n\t\t\tq = &rpnLteRangeQuery{\n\t\t\t\tfield: p.field.val,\n\t\t\t\tterm: trm,\n\t\t\t\tlogLevelLess: p.logLevelLess,\n\t\t\t}\n\t\tdefault:\n\t\t\tlg.Fatalf(\"invalid opTok.typ=%v while parsing range query\", opTok.typ)\n\t\t}\n\t\tp.filter.addStep(q)\n\t\tp.field = nil\n\t\treturn parseAfterQuery\n\tdefault:\n\t\treturn p.errorfAt(valTok.pos, \"expected a literal after '%s'; got %s\",\n\t\t\topTok.val, valTok.typ)\n\t}\n}",
"func (q *Query) release() {\n\tq.sets.reset()\n\tq.sort = nil\n\tq.offset = 0\n\tq.around = 0\n\tq.limit = 50\n\tq.desc = false\n\tq.db.queries <- q\n}",
"func (w *Wrapper) cleanAfter() {\n\tw.queryOptions = []string{}\n\tw.tableName = []string{}\n\tw.params = []interface{}{}\n\tw.onDuplicateColumns = []string{}\n\tw.groupBy = []string{}\n\tw.joins = map[string]*join{}\n\tw.orders = []order{}\n\tw.conditions = []condition{}\n\tw.havingConditions = []condition{}\n\tw.limit = []int{}\n\tw.destination = nil\n}",
"func (c *Context) BindQuery(obj interface{}) error {\n\treturn validate(mapArgs(obj, c.QueryArgs()), obj)\n}",
"func (l instanceList) SplitQuery(\n\tquery *influxql.Query, now time.Time) (\n\tsplitQueries []*influxql.Query, err error) {\n\tif len(l) == 0 {\n\t\treturn\n\t}\n\tresult := make([]*influxql.Query, len(l))\n\tfor i := range result {\n\t\tresult[i], err = qlutils.QuerySetTimeRange(\n\t\t\tquery, l.minTime(i, now), l.maxTime(i, now))\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\treturn result, nil\n}",
"func (vq *VehicleQuery) Limit(limit int) *VehicleQuery {\n\tvq.limit = &limit\n\treturn vq\n}",
"func (rmq *RangeMexQuery) AddQuery(start, end int) {\r\n\trmq.query = append(rmq.query, [2]int{start, end})\r\n}",
"func (p *Parse) Boundary(finalState *TableState) *Pos {\n\tif finalState == nil {\n\t\treturn nil\n\t}\n\tstart := p.columns[finalState.Start+1].token.StartByte\n\tend := p.columns[finalState.End].token.EndByte\n\tif end < start { //TODO\n\t\tend = start\n\t}\n\treturn &Pos{start, end}\n}",
"func (m *ItemItemsItemWorkbookTablesWorkbookTableItemRequestBuilder) RangeEscaped()(*ItemItemsItemWorkbookTablesItemRangeRequestBuilder) {\n return NewItemItemsItemWorkbookTablesItemRangeRequestBuilderInternal(m.BaseRequestBuilder.PathParameters, m.BaseRequestBuilder.RequestAdapter)\n}",
"func (ouq *OrgUnitQuery) Limit(limit int) *OrgUnitQuery {\n\touq.limit = &limit\n\treturn ouq\n}",
"func (pq *PersonQuery) Limit(limit int) *PersonQuery {\n\tpq.limit = &limit\n\treturn pq\n}",
"func getPagedBunQuery(\n\tctx context.Context, query *bun.SelectQuery, offset, limit int,\n) (*apiv1.Pagination, *bun.SelectQuery, error) {\n\t// Count number of items without any limits or offsets.\n\ttotal, err := query.Count(ctx)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\t// Calculate end and start indexes.\n\tstartIndex := offset\n\tif offset > total || offset < -total {\n\t\tstartIndex = total\n\t} else if offset < 0 {\n\t\tstartIndex = total + offset\n\t}\n\n\tendIndex := startIndex + limit\n\tswitch {\n\tcase limit == -2:\n\t\tendIndex = startIndex\n\tcase limit == -1:\n\t\tendIndex = total\n\tcase limit == 0:\n\t\tendIndex = 100 + startIndex\n\t\tif total < endIndex {\n\t\t\tendIndex = total\n\t\t}\n\tcase startIndex+limit > total:\n\t\tendIndex = total\n\t}\n\n\t// Add start and end index to query.\n\tquery.Offset(startIndex)\n\tquery.Limit(endIndex - startIndex)\n\n\treturn &apiv1.Pagination{\n\t\tOffset: int32(offset),\n\t\tLimit: int32(limit),\n\t\tTotal: int32(total),\n\t\tStartIndex: int32(startIndex),\n\t\tEndIndex: int32(endIndex),\n\t}, query, nil\n}",
"func (s *BaseQueryListener) ExitAndQueryCriterias(ctx *AndQueryCriteriasContext) {}",
"func (q *queryImpl) Limit(limit uint64) Query {\n\tq.limit = strconv.FormatUint(limit, 10)\n\treturn q\n}",
"func (query *TestStringIdEntityQuery) Limit(limit uint64) *TestStringIdEntityQuery {\n\tquery.Query.Limit(limit)\n\treturn query\n}",
"func parseUrlEncodedQueryParams(rawQuery string) (bson.M, map[string]interface{}) {\n\t\n\tqueryMap, _ := url.ParseQuery(rawQuery)\n\tvar query map[string]interface{}\n\tfor key, value := range queryMap {\n\t\tswitch key {\n\t\tdefault:\n\t\t\treturn bson.M{}, nil\n\t\tcase \"where\":\n\t\t\tif len(value) == 1 {\n\t\t\t\t\n\t\t\t\terr := json.Unmarshal([]byte(value[0]), &query)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn bson.M{}, map[string]interface{}{\"code\": helpers.INVALID_JSON, \"error\": \"invalid JSON\"}\n\t\t\t\t}\n\n\t\t\t} \n\t\tcase \"order\":\n\t\tcase \"limit\":\n\t\tcase \"skip\":\n\t\tcase \"keys\":\n\t\tcase \"include\":\n\t\t}\n\t}\n\t// fmt.Println(findObjectWithKey(query, \"$select\"))\n\t_ = parseWhereQuery(query)\n\terrMap := formatObjectQuery(query)\n\t// map can be used as bson.M for return\n\treturn query, errMap\n}",
"func (hq *HarborQuery) Limit(limit int) *HarborQuery {\n\thq.limit = &limit\n\treturn hq\n}",
"func (query *EventQuery) Limit(limit uint64) *EventQuery {\n\tquery.Query.Limit(limit)\n\treturn query\n}",
"func parseBeforeQuery(p *parser) parserStateFn {\n\ttok := p.next()\n\tswitch tok.typ {\n\tcase tokTypeError:\n\t\tp.backup(tok)\n\t\treturn parseErrorTok\n\tcase tokTypeEOF:\n\t\tp.backup(tok)\n\t\treturn parseEOFTok\n\tcase tokTypeOpenParen:\n\t\t// Push the '(' onto the ops stack. It will be the marker at which to\n\t\t// stop when the ')' token is parsed.\n\t\tp.stagedOps.Push(tok)\n\t\treturn parseBeforeQuery\n\tcase tokTypeNot:\n\t\tp.stageBoolOp(tok)\n\t\tp.incompleteBoolOp = true\n\t\treturn parseBeforeQuery\n\tcase tokTypeUnquotedLiteral, tokTypeQuotedLiteral:\n\t\tp.incompleteBoolOp = false\n\t\tswitch tok2 := p.peek(); tok2.typ {\n\t\tcase tokTypeError:\n\t\t\treturn parseErrorTok\n\t\tcase tokTypeGt, tokTypeGte, tokTypeLt, tokTypeLte:\n\t\t\t// E.g.: `a.field >= 100`, `some.date.field < \"2021-02\"`\n\t\t\tif tok.typ == tokTypeQuotedLiteral {\n\t\t\t\treturn p.errorfAt(tok.pos, \"a *quoted* field for a range query is not yet supported\")\n\t\t\t}\n\t\t\tp.field = &tok\n\t\t\treturn parseRangeQuery\n\t\tcase tokTypeColon:\n\t\t\t// E.g.: `foo:value1 value2`, `foo:(a or b)`, `foo:(a and b and c)`,\n\t\t\t// `foo:*`\n\t\t\tif tok.typ == tokTypeQuotedLiteral {\n\t\t\t\treturn p.errorfAt(tok.pos, \"a *quoted* field for a term query is not yet supported\")\n\t\t\t}\n\t\t\tp.field = &tok\n\t\t\treturn parseTermsQuery\n\t\tdefault:\n\t\t\t// E.g.: `foo bar baz`\n\t\t\t// No range operator and no colon means this is a query without\n\t\t\t// a field name. In Kibana, this matches against \"default fields\".\n\t\t\ttermTok := tok\n\t\t\tvar terms []term\n\t\t\tfor {\n\t\t\t\tif termTok.typ == tokTypeUnquotedLiteral {\n\t\t\t\t\tterms = append(terms, newTerm(termTok.val))\n\t\t\t\t} else if termTok.typ == tokTypeQuotedLiteral {\n\t\t\t\t\tterms = append(terms, newQuotedTerm(termTok.val))\n\t\t\t\t} else {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\ttermTok = p.next()\n\t\t\t}\n\t\t\tp.backup(termTok)\n\t\t\tp.filter.addStep(&rpnDefaultFieldsTermsQuery{terms: terms})\n\t\t\treturn parseAfterQuery\n\t\t}\n\tdefault:\n\t\treturn p.errorfAt(tok.pos,\n\t\t\t\"expecting a literal, 'not', or '('; got %s\", tok.typ)\n\t}\n}",
"func (q *Query) NoLimit() *Query {\n\tq.offset = 0\n\tq.limit = 0\n\treturn q\n}",
"func (rq *RentQuery) Limit(limit int) *RentQuery {\n\trq.limit = &limit\n\treturn rq\n}",
"func (self *TStatement) Limit(limit int64, offset ...int64) *TStatement {\r\n\tself.LimitClause = limit\r\n\tif len(offset) > 0 {\r\n\t\tself.OffsetClause = offset[0]\r\n\t}\r\n\treturn self\r\n}",
"func (s *BaseQueryListener) EnterBracketQueryCriterias(ctx *BracketQueryCriteriasContext) {}",
"func FullQuery(s *vcloud.Session, o *Options) (interface{}, error) {\n\topts := *o\n\t//TODO: Process options\n\n\tqr, err := Query(s, opts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif qr.Total == 0 {\n\t\treturn nil, nil\n\t}\n\n\t//TODO: Error checking here, qr should be a slice\n\tdst := reflect.ValueOf(qr.Records)\n\tps := opts.PageSize\n\n\tvar i int // Number of records (pages * pageSize)\n\n\tfor i = ps; opts.Limit == 0 || i < opts.Limit; i += ps {\n\t\topts.Href = qr.Links.HrefOf(\"nextPage\")\n\n\t\tif opts.Href == \"\" {\n\t\t\tbreak\n\t\t}\n\n\t\t//TODO: Error checking here, qr should be a slice\n\t\tqr, err = Query(s, opts)\n\t\tif err != nil {\n\t\t\tpanic(err) //TODO\n\t\t}\n\n\t\tdst = reflect.AppendSlice(dst, reflect.ValueOf(qr.Records))\n\t}\n\n\tif opts.Limit != 0 && i > opts.Limit && opts.Limit < qr.Total {\n\t\tdst = dst.Slice(0, opts.Limit)\n\t}\n\n\treturn dst.Interface(), nil\n}",
"func (r *Elasticsearch) Query(collections []string, term string, bbox []float64, timeVal []time.Time, from int, size int, sr *search.Results) error {\n\tvar mr metadata.Record\n\t//\tvar query elastic.Query\n\tctx := context.Background()\n\n\tquery := elastic.NewBoolQuery()\n\n\tif term == \"\" {\n\t\tquery = query.Must(elastic.NewMatchAllQuery())\n\t} else {\n\t\tquery = query.Must(elastic.NewQueryStringQuery(term))\n\t}\n\tif len(timeVal) > 0 {\n\t\tif len(timeVal) == 1 { // exact match\n\t\t\tquery = query.Must(elastic.NewTermQuery(\"properties.product_info.acquisition_date\", timeVal[0]))\n\t\t} else if len(timeVal) == 2 { // range\n\t\t\trangeQuery := elastic.NewRangeQuery(\"properties.product_info.acquisition_date\").\n\t\t\t\tFrom(timeVal[0]).\n\t\t\t\tTo(timeVal[1])\n\t\t\tquery = query.Must(rangeQuery)\n\t\t}\n\t}\n\tif len(bbox) == 4 {\n\t\t// workaround for issuing a RawStringQuery until\n\t\t// GeoShape queries are supported (https://github.com/olivere/elastic/pull/276)\n\t\tvar tpl bytes.Buffer\n\t\tvars := map[string]interface{}{\n\t\t\t\"bbox\": bbox,\n\t\t\t\"field\": \"geometry\",\n\t\t}\n\t\trawStringQueryTemplate, _ := template.New(\"geo_shape_query\").Parse(`{ \n \"geo_shape\": {\n \"{{ .field }}\": {\n \"shape\": {\n \"type\": \"envelope\",\n \"coordinates\": [\n [ \n {{ index .bbox 0 }}, \n {{ index .bbox 1 }}\n ], \n [ \n {{ index .bbox 2 }}, \n {{ index .bbox 3 }}\n ] \n ]\n },\n \"relation\": \"within\"\n } \n } \n }`)\n\t\trawStringQueryTemplate.Execute(&tpl, vars)\n\n\t\tquery = query.Must(elastic.NewRawStringQuery(tpl.String()))\n\t}\n\tif len(collections) > 0 {\n\t\tc := make([]interface{}, len(collections))\n\t\tfor i, s := range collections {\n\t\t\tc[i] = s\n\t\t}\n\t\tquery = query.Must(elastic.NewTermsQuery(\"properties.product_info.collection\", c...))\n\t}\n\n\t//src, err := query.Source()\n\t//data, err := json.Marshal(src)\n\t//fmt.Println(string(data))\n\n\tsearchResult, err := r.Index.Search().\n\t\tIndex(r.IndexName).\n\t\tType(r.TypeName).\n\t\tFrom(from).\n\t\tSize(size).\n\t\tQuery(query).Do(ctx)\n\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\tfmt.Println(\"JJJJ\")\n\t\treturn err\n\t}\n\n\tsr.ElapsedTime = int(searchResult.TookInMillis)\n\tsr.Matches = int(searchResult.TotalHits())\n\tsr.Returned = size\n\tsr.NextRecord = size + 1\n\n\tif sr.Matches < size {\n\t\tsr.Returned = sr.Matches\n\t\tsr.NextRecord = 0\n\t}\n\n\tfor _, item := range searchResult.Each(reflect.TypeOf(mr)) {\n\t\tif t, ok := item.(metadata.Record); ok {\n\t\t\tsr.Records = append(sr.Records, t)\n\t\t}\n\t}\n\n\treturn nil\n}",
"func (goaq *GroupOfAgeQuery) Limit(limit int) *GroupOfAgeQuery {\n\tgoaq.limit = &limit\n\treturn goaq\n}",
"func (serv MetricsService) QueryRange(w http.ResponseWriter, r *http.Request) {\n\tsetAccessControlHeaders(w)\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\tparams := httputils.GetQueryParams(r)\n\texpr := params.Get(\"expr\")\n\n\tduration, err := parseDuration(params.Get(\"range\"))\n\tif err != nil {\n\t\thttpJSONError(w, fmt.Errorf(\"invalid query range: %s\", err), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tstep, err := parseDuration(params.Get(\"step\"))\n\tif err != nil {\n\t\thttpJSONError(w, fmt.Errorf(\"invalid query resolution: %s\", err), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tend, err := parseTimestampOrNow(params.Get(\"end\"), serv.Now())\n\tif err != nil {\n\t\thttpJSONError(w, fmt.Errorf(\"invalid query timestamp: %s\", err), http.StatusBadRequest)\n\t\treturn\n\t}\n\t// TODO(julius): Remove this special-case handling a while after PromDash and\n\t// other API consumers have been changed to no longer set \"end=0\" for setting\n\t// the current time as the end time. Instead, the \"end\" parameter should\n\t// simply be omitted or set to an empty string for that case.\n\tif end == 0 {\n\t\tend = serv.Now()\n\t}\n\n\texprNode, err := rules.LoadExprFromString(expr)\n\tif err != nil {\n\t\tfmt.Fprint(w, ast.ErrorToJSON(err))\n\t\treturn\n\t}\n\tif exprNode.Type() != ast.VectorType {\n\t\tfmt.Fprint(w, ast.ErrorToJSON(errors.New(\"expression does not evaluate to vector type\")))\n\t\treturn\n\t}\n\n\t// For safety, limit the number of returned points per timeseries.\n\t// This is sufficient for 60s resolution for a week or 1h resolution for a year.\n\tif duration/step > 11000 {\n\t\tfmt.Fprint(w, ast.ErrorToJSON(errors.New(\"exceeded maximum resolution of 11,000 points per timeseries. Try decreasing the query resolution (?step=XX)\")))\n\t\treturn\n\t}\n\n\t// Align the start to step \"tick\" boundary.\n\tend = end.Add(-time.Duration(end.UnixNano() % int64(step)))\n\n\tqueryStats := stats.NewTimerGroup()\n\n\tmatrix, err := ast.EvalVectorRange(\n\t\texprNode.(ast.VectorNode),\n\t\tend.Add(-duration),\n\t\tend,\n\t\tstep,\n\t\tserv.Storage,\n\t\tqueryStats)\n\tif err != nil {\n\t\tfmt.Fprint(w, ast.ErrorToJSON(err))\n\t\treturn\n\t}\n\n\tsortTimer := queryStats.GetTimer(stats.ResultSortTime).Start()\n\tsort.Sort(matrix)\n\tsortTimer.Stop()\n\n\tjsonTimer := queryStats.GetTimer(stats.JSONEncodeTime).Start()\n\tresult := ast.TypedValueToJSON(matrix, \"matrix\")\n\tjsonTimer.Stop()\n\n\tglog.V(1).Infof(\"Range query: %s\\nQuery stats:\\n%s\\n\", expr, queryStats)\n\tfmt.Fprint(w, result)\n}",
"func Trim(p projection) *trimFunc {\n\treturn &trimFunc{\n\t\tsubject: p.(element),\n\t\tsel: p.from(),\n\t\tlocation: TRIM_BOTH,\n\t}\n}",
"func (ecpq *EntityContactPointQuery) Limit(limit int) *EntityContactPointQuery {\n\tecpq.limit = &limit\n\treturn ecpq\n}",
"func (liq *LineItemQuery) Limit(limit int) *LineItemQuery {\n\tliq.limit = &limit\n\treturn liq\n}",
"func (s *SmartContract) queryUpTo(APIstub shim.ChaincodeStubInterface, args []string) sc.Response {\n\tstart := \"1\" //inclusive\n\tend := string(args[0]) //exclusive\n\n\tresultsIter, err := APIstub.GetStateByRange(start, end)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\tdefer resultsIter.Close()\n\n\tvar buff bytes.Buffer\n\tbuff.WriteString(\"[\")\n\n\twrittenArrMember := false\n\tfor resultsIter.HasNext() {\n\t\tqueryResp, err := resultsIter.Next()\n\t\tif err != nil {\n\t\t\treturn shim.Error(err.Error())\n\t\t}\n\n\t\tif writtenArrMember == true {\n\t\t\tbuff.WriteString(\",\")\n\t\t}\n\t\tbuff.WriteString(\"{\\\"Key\\\":\")\n\t\tbuff.WriteString(\"\\\"\")\n\t\tbuff.WriteString(queryResp.Key)\n\t\tbuff.WriteString(\"\\\"\")\n\t\tbuff.WriteString(\", \\\"Record\\\":\")\n\t\tbuff.WriteString(string(queryResp.Value))\n\t\tbuff.WriteString(\"}\")\n\t\twrittenArrMember = true\n\t}\n\tbuff.WriteString(\"]\")\n\n\tfmt.Printf(buff.String())\n\n\treturn shim.Success(buff.Bytes())\n}",
"func (s *BaseMySqlParserListener) EnterTrimFunctionCall(ctx *TrimFunctionCallContext) {}",
"func (ttrq *TradeTimeRangeQuery) Limit(limit int) *TradeTimeRangeQuery {\n\tttrq.limit = &limit\n\treturn ttrq\n}",
"func (query *EntityQuery) Limit(limit uint64) *EntityQuery {\n\tquery.Query.Limit(limit)\n\treturn query\n}",
"func (c *Client) all() (inside []boundary, err error) {\n\n\tresults, err := c.rdb.ZRangeByScoreWithScores(IPRangesKey, redis.ZRangeBy{\n\t\tMin: \"-inf\",\n\t\tMax: \"+inf\",\n\t}).Result()\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor _, result := range results {\n\t\tbnd := newBoundary(result.Score, \"\", false, false)\n\t\tinside = append(inside, bnd)\n\t}\n\n\ttx := c.rdb.TxPipeline()\n\n\tcmds := make([]*redis.SliceCmd, 0, len(inside))\n\tfor _, bnd := range inside {\n\t\tcmd := bnd.Get(tx)\n\t\tcmds = append(cmds, cmd)\n\t}\n\n\t_, err = tx.Exec()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor idx, cmd := range cmds {\n\t\tresult, err := cmd.Result()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif len(result) != 3 {\n\t\t\tpanic(\"database inconsistent\")\n\t\t}\n\n\t\tlow := false\n\t\tswitch t := result[0].(type) {\n\t\tcase string:\n\t\t\tlow = t == \"1\"\n\t\tdefault:\n\t\t\tlow = false\n\t\t}\n\n\t\thigh := false\n\t\tswitch t := result[1].(type) {\n\t\tcase string:\n\t\t\thigh = t == \"1\"\n\t\tdefault:\n\t\t\thigh = false\n\t\t}\n\n\t\treason := \"\"\n\t\tswitch t := result[2].(type) {\n\t\tcase string:\n\t\t\treason = t\n\t\tdefault:\n\t\t\treason = \"\"\n\t\t}\n\n\t\tinside[idx].LowerBound = low\n\t\tinside[idx].UpperBound = high\n\t\tinside[idx].Reason = reason\n\t}\n\n\tsort.Sort(byIP(inside))\n\treturn inside, nil\n}",
"func (daq *DrugAllergyQuery) Limit(limit int) *DrugAllergyQuery {\n\tdaq.limit = &limit\n\treturn daq\n}",
"func Offset(offset int) QueryOptions {\n\treturn func(query *Query) (*Query, error) {\n\t\tif offset < 0 {\n\t\t\treturn nil, errors.New(\"offset can't be negative\")\n\t\t}\n\t\tresMap, err := mergeQueryMaps(query.content,\n\t\t\tmap[string]interface{}{operations[OFFSET]: offset})\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tquery.content = resMap.(map[string]interface{})\n\t\treturn query, nil\n\t}\n}",
"func getRangeFilter(v []interface{}) (elastic.RangeFilter, error) {\n\tvar ret elastic.RangeFilter\n\n\tif size := len(v); size < 3 {\n\t\treturn ret, errors.New(\"not enough values for rangefilter\")\n\t}\n\n\t//get name of the field\n\tif name, ok := v[0].(string); ok {\n\t\tret = elastic.NewRangeFilter(name)\n\t} else {\n\t\treturn ret, errors.New(\"Range filter : first parameter must be a string\")\n\t}\n\t//get methods to apply\n\tmethods := v[1:]\n\tif len(methods)%2 != 0 {\n\t\treturn ret, errors.New(\"Range filter : Wrong number of parameters\")\n\t}\n\n\tfor len(methods) > 0 {\n\t\tmethod, ok := methods[0].(string)\n\t\tif !ok {\n\t\t\treturn ret, errors.New(\"Range filter : parameter must be a string\")\n\t\t}\n\t\tval := stringToNb(methods[1])\n\t\tswitch method {\n\t\tcase \"gt\":\n\t\t\tret = ret.Gt(val)\n\t\tcase \"gte\":\n\t\t\tret = ret.Gte(val)\n\t\tcase \"lt\":\n\t\t\tret = ret.Lt(val)\n\t\tcase \"lte\":\n\t\t\tret = ret.Lte(val)\n\t\tdefault:\n\t\t\treturn ret, errors.New(\"method not (yet) supported, only: gt, gte, lt, lte\")\n\t\t}\n\t\tmethods = methods[2:]\n\t}\n\treturn ret, nil\n}",
"func cutOff(array []string, first, last bool) []string {\n\tif len(array) > 0 && first {\n\t\tarray = array[1:]\n\t}\n\tif len(array) > 0 && last {\n\t\tarray = array[:len(array)-1]\n\t}\n\treturn array\n}",
"func (this *NamedParameterQuery) setQuery(queryText string) {\n\n\tvar revisedBuilder bytes.Buffer\n\tvar parameterBuilder bytes.Buffer\n\tvar position []int\n\tvar character rune\n\tvar parameterName string\n\tvar width int\n\tvar positionIndex int\n\n\tthis.originalQuery = queryText\n\tpositionIndex = 0\n\n\tfor i := 0; i < len(queryText); {\n\n\t\tcharacter, width = utf8.DecodeRuneInString(queryText[i:])\n\t\ti += width\n\n\t\t// if it's a colon, do not write to builder, but grab name\n\t\tif(character == ':') {\n\n\t\t\tfor ;; {\n\n\t\t\t\tcharacter, width = utf8.DecodeRuneInString(queryText[i:])\n\t\t\t\ti += width\n\n\t\t\t\tif unicode.IsLetter(character) || unicode.IsDigit(character) {\n\t\t\t\t\tparameterBuilder.WriteString(string(character))\n\t\t\t\t} else {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// add to positions\n\t\t\tparameterName = parameterBuilder.String()\n\t\t\tposition = this.positions[parameterName]\n\t\t\tthis.positions[parameterName] = append(position, positionIndex)\n\t\t\tpositionIndex++\n\n\t\t\trevisedBuilder.WriteString(\"?\")\n\t\t\tparameterBuilder.Reset()\n\n\t\t\tif(width <= 0) {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\t// otherwise write.\n\t\trevisedBuilder.WriteString(string(character))\n\n\t\t// if it's a quote, continue writing to builder, but do not search for parameters.\n\t\tif(character == '\\'') {\n\n\t\t\tfor ;; {\n\n\t\t\t\tcharacter, width = utf8.DecodeRuneInString(queryText[i:])\n\t\t\t\ti += width\n\t\t\t\trevisedBuilder.WriteString(string(character))\n\n\t\t\t\tif(character == '\\'') {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tthis.revisedQuery = revisedBuilder.String()\n\tthis.parameters = make([]interface{}, positionIndex)\n}",
"func TestParsing05(t *testing.T) {\n\tvar q = \"%a-b-c%d\"\n\tvar r = &RangeExpr{Buffer: q}\n\tr.Init()\n\tr.Expression.Init(q)\n\terr := r.Parse()\n\tif err == nil {\n\t\tt.Errorf(\"Expected Error, (Query: %s) should NOT BE parsed [contains %%, where not expected]\", q)\n\t}\n}",
"func (c *CircleCriteria) ToQueryToken(fieldName string, addQueryParameter func(interface{}) string) queryToken {\n\treturn c.SpatialCriteriaCommon.toQueryTokenCommon(c, fieldName, addQueryParameter)\n}",
"func (ba *FilterBitArray) UnsetRange(begin uint, end uint) {\n\tif begin > ba.Capacity() || begin == end {\n\t\treturn\n\t}\n\n\tstartByteIndex := ba.byteIndex(begin)\n\tendByteIndex := ba.byteIndex(end)\n\n\tfirstByteMask := byteMask << (begin % byteSize)\n\tlastByteMask := byteMask >> ((byteSize - end - 1) % byteSize)\n\n\tif startByteIndex == endByteIndex {\n\t\t(*ba)[startByteIndex] &= ^(firstByteMask & lastByteMask)\n\t} else {\n\t\t(*ba)[startByteIndex] &= ^firstByteMask\n\t\tfor i := startByteIndex + 1; i < endByteIndex; i++ {\n\t\t\t(*ba)[i] = 0\n\t\t}\n\t\t(*ba)[endByteIndex] &= ^lastByteMask\n\t}\n}",
"func (self *Query) Limit(l int) *Query {\n\tself.limit = l\n\treturn self\n}",
"func CreateBlankQueryArray() []Query {\n\treturn make([]Query, 0)\n}",
"func nearQueryKeys(pt s2.Point, d float64) ([]string, *QueryData, error) {\n\tif d <= 0 {\n\t\treturn nil, nil, x.Errorf(\"Invalid max distance specified for a near query\")\n\t}\n\ta := EarthAngle(d)\n\tc := s2.CapFromCenterAngle(pt, a)\n\tcu := indexCellsForCap(c)\n\t// A near query is similar to within, where we are looking for points within the cap. So we need\n\t// all objects whose parents match the cover of the cap.\n\treturn toTokens(cu, parentPrefix), &QueryData{cap: &c, qtype: QueryTypeNear}, nil\n}",
"func Limit(limit int) QueryOptions {\n\treturn func(query *Query) (*Query, error) {\n\t\tif limit < 0 {\n\t\t\treturn nil, errors.New(\"limit can't be negative\")\n\t\t}\n\t\tresMap, err := mergeQueryMaps(query.content,\n\t\t\tmap[string]interface{}{operations[LIMIT]: limit})\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tquery.content = resMap.(map[string]interface{})\n\t\treturn query, nil\n\t}\n}",
"func (fn *TruncateFn) SplitRestriction(_ []byte, rest offsetrange.Restriction) []offsetrange.Restriction {\n\treturn rest.EvenSplits(2)\n}",
"func OptimizeQuery(input Query) (Query, map[string]string) {\n\toptimizedQuery, optimizedLabel := removeSuperfluousGoupBy(input)\n\treturn optimizedQuery, optimizedLabel\n}",
"func (epdq *EquipmentPortDefinitionQuery) Limit(limit int) *EquipmentPortDefinitionQuery {\n\tepdq.limit = &limit\n\treturn epdq\n}",
"func TestParsing04(t *testing.T) {\n\tvar q = \"%aa1-b-c-d1\"\n\tvar r = &RangeExpr{Buffer: q}\n\tr.Init()\n\tr.Expression.Init(q)\n\terr := r.Parse()\n\tif err != nil {\n\t\tt.Errorf(\"Expected NO Error, (Query: %s) should BE parsed [is of %%[a-z][a-z0-9-_]*\", q)\n\t}\n}",
"func (_p *ArticlePage) buildIdRestrict(direction string) (idStr string, idParams []interface{}) {\n\tswitch direction {\n\tcase \"previous\":\n\t\tif strings.ToLower(_p.Order[\"id\"]) == \"desc\" {\n\t\t\tidStr += \"id > ? \"\n\t\t\tidParams = append(idParams, _p.FirstId)\n\t\t} else {\n\t\t\tidStr += \"id < ? \"\n\t\t\tidParams = append(idParams, _p.FirstId)\n\t\t}\n\tcase \"current\":\n\t\t// trick to make Where function work\n\t\tif _p.PageNum == 0 && _p.FirstId == 0 && _p.LastId == 0 {\n\t\t\tidStr += \"id > ? \"\n\t\t\tidParams = append(idParams, 0)\n\t\t} else {\n\t\t\tif strings.ToLower(_p.Order[\"id\"]) == \"desc\" {\n\t\t\t\tidStr += \"id <= ? AND id >= ? \"\n\t\t\t\tidParams = append(idParams, _p.FirstId, _p.LastId)\n\t\t\t} else {\n\t\t\t\tidStr += \"id >= ? AND id <= ? \"\n\t\t\t\tidParams = append(idParams, _p.FirstId, _p.LastId)\n\t\t\t}\n\t\t}\n\tcase \"next\":\n\t\tif strings.ToLower(_p.Order[\"id\"]) == \"desc\" {\n\t\t\tidStr += \"id < ? \"\n\t\t\tidParams = append(idParams, _p.LastId)\n\t\t} else {\n\t\t\tidStr += \"id > ? \"\n\t\t\tidParams = append(idParams, _p.LastId)\n\t\t}\n\t}\n\tif _p.WhereString != \"\" {\n\t\tidStr = \" AND \" + idStr\n\t}\n\treturn\n}",
"func (gq *GoodsQuery) Limit(limit int) *GoodsQuery {\n\tgq.limit = &limit\n\treturn gq\n}",
"func (deq *DentalExpenseQuery) Limit(limit int) *DentalExpenseQuery {\n\tdeq.limit = &limit\n\treturn deq\n}",
"func (q *PersonQuery) Offset(n uint64) *PersonQuery {\n\tq.BaseQuery.Offset(n)\n\treturn q\n}",
"func parseBodyQueryParams(body []byte) (bson.M, map[string]interface{}) {\n\t// return an empty bson hash map if the body is empty\n\tif string(body) == \"\" {\n\t\t\n\t\treturn bson.M{}, nil\n\t} else {\n\t\tvar queryMap map[string]interface{}\n\n\t\terr := json.Unmarshal(body, &queryMap)\n\t\tif err != nil {\n\t\t\treturn bson.M{}, map[string]interface{}{\"code\": helpers.INVALID_JSON, \"error\": \"invalid JSON\"}\n\t\t}\n\t\tvar query map[string]interface{}\n\t\tfor key, value := range queryMap {\n\t\t\tswitch key {\n\t\t\tdefault:\n\t\t\t\treturn bson.M{}, nil\n\t\t\tcase \"where\":\n\t\t\t\tquery = value.(map[string]interface{})\n\t\t\tcase \"order\":\n\t\t\tcase \"limit\":\n\t\t\tcase \"skip\":\n\t\t\tcase \"keys\":\n\t\t\tcase \"include\":\n\t\t\t}\n\t\t}\n\n\t\t// findObjectWithKey(query, \"$select\").(map[string]interface{})[\"helloworld\"] = \"helloworld\"\n\t\t// fmt.Println(query)\n\t\tparseWhereQuery(query)\n\t\terrMap := formatObjectQuery(query)\n\t\t\n\t\treturn query, errMap\n\t}\n\t\n}",
"func (q *queryImpl) Offset(offset uint64) Query {\n\tq.offset = strconv.FormatUint(offset, 10)\n\treturn q\n}"
] | [
"0.5581075",
"0.5370196",
"0.51416427",
"0.4843106",
"0.48349684",
"0.48158392",
"0.48061302",
"0.47843358",
"0.47429067",
"0.4741085",
"0.4722207",
"0.4709483",
"0.470353",
"0.4635659",
"0.46195775",
"0.46092957",
"0.45898736",
"0.45854548",
"0.4559825",
"0.45567867",
"0.45275775",
"0.4522047",
"0.4518165",
"0.45126593",
"0.45018208",
"0.4488262",
"0.4475624",
"0.4473933",
"0.4470682",
"0.4460843",
"0.44575322",
"0.44537714",
"0.44469512",
"0.44455767",
"0.44355682",
"0.44326025",
"0.44296333",
"0.44244093",
"0.44229528",
"0.44219786",
"0.43973398",
"0.4389464",
"0.43772185",
"0.4373043",
"0.4368197",
"0.43637475",
"0.433553",
"0.43114215",
"0.4309465",
"0.43038046",
"0.43017536",
"0.42909172",
"0.42836672",
"0.42742902",
"0.42716402",
"0.42425084",
"0.42421722",
"0.42380744",
"0.42375493",
"0.42371038",
"0.42344922",
"0.42313516",
"0.4228981",
"0.42263705",
"0.4226353",
"0.42226425",
"0.42213356",
"0.42210266",
"0.4213434",
"0.41963902",
"0.41901085",
"0.41893962",
"0.4186542",
"0.41829482",
"0.41750234",
"0.4169552",
"0.41669455",
"0.41598147",
"0.4153067",
"0.41493887",
"0.41493714",
"0.41479897",
"0.41459176",
"0.41339925",
"0.41335925",
"0.41321516",
"0.41287425",
"0.4126651",
"0.41242608",
"0.41240698",
"0.41197127",
"0.4116838",
"0.41060928",
"0.410422",
"0.40964574",
"0.40919915",
"0.40894753",
"0.40882742",
"0.40811187",
"0.40808856",
"0.4076619"
] | 0.0 | -1 |
start transform feedback operation | func BeginTransformFeedback(primitiveMode uint32) {
C.glowBeginTransformFeedback(gpBeginTransformFeedback, (C.GLenum)(primitiveMode))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (p *literalProcessor) start() { go p.run() }",
"func (w *Walker) startProcessing() {\n\tdoStart := false\n\tw.pipe.RLock()\n\tif w.pipe.filters == nil { // no processing up to now => start with initial node\n\t\tw.pipe.pushSync(w.initial, 0) // input is buffered, will return immediately\n\t\tdoStart = true // yes, we will have to start the pipeline\n\t}\n\tw.pipe.RUnlock()\n\tif doStart { // ok to be outside mutex as other goroutines will check pipe.empty()\n\t\tw.pipe.startProcessing() // must be outside of mutex lock\n\t}\n}",
"func Transform(ctx context.Context, input <-chan CrawlResult, worker TransformFunc, parallelism int) <-chan TransformResult {\n\tt := &transformer{\n\t\tinput: input,\n\t\toutput: make(chan TransformResult, 1000),\n\t\tworkerBody: worker,\n\t\tparallelism: parallelism,\n\t}\n\tgo t.runWorkersToCompletion(ctx)\n\treturn t.output\n}",
"func TransformFeedbackBufferBase(xfb uint32, index uint32, buffer uint32) {\n\tsyscall.Syscall(gpTransformFeedbackBufferBase, 3, uintptr(xfb), uintptr(index), uintptr(buffer))\n}",
"func (s *BaseSyslParserListener) EnterTransform(ctx *TransformContext) {}",
"func (p *Pipe) start() {\n\tp.cancel = make(chan struct{})\n\terrcList := make([]<-chan error, 0, 1+len(p.processors)+len(p.sinks))\n\t// start pump\n\tout, errc := p.pump.run(p.cancel, p.ID(), p.provide, p.consume, p.sampleRate, p.metric)\n\terrcList = append(errcList, errc)\n\n\t// start chained processesing\n\tfor _, proc := range p.processors {\n\t\tout, errc = proc.run(p.cancel, p.ID(), out, p.sampleRate, p.metric)\n\t\terrcList = append(errcList, errc)\n\t}\n\n\tsinkErrcList := p.broadcastToSinks(out)\n\terrcList = append(errcList, sinkErrcList...)\n\tp.errc = mergeErrors(errcList...)\n}",
"func (f *FakeOutput) Start(_ operator.Persister) error { return nil }",
"func TransformFeedbackBufferBase(xfb uint32, index uint32, buffer uint32) {\n\tC.glowTransformFeedbackBufferBase(gpTransformFeedbackBufferBase, (C.GLuint)(xfb), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func TransformFeedbackBufferBase(xfb uint32, index uint32, buffer uint32) {\n\tC.glowTransformFeedbackBufferBase(gpTransformFeedbackBufferBase, (C.GLuint)(xfb), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func (s *streamStrategy) Start() {\n\tgo func() {\n\t\tfor msg := range s.inputChan {\n\t\t\tif msg.Origin != nil {\n\t\t\t\tmsg.Origin.LogSource.LatencyStats.Add(msg.GetLatency())\n\t\t\t}\n\t\t\ts.outputChan <- &message.Payload{Messages: []*message.Message{msg}, Encoded: msg.Content, UnencodedSize: len(msg.Content)}\n\t\t}\n\t\ts.done <- struct{}{}\n\t}()\n}",
"func (transmuxer *Transmuxer) Run() {\n\tif transmuxer.closed {\n\t\treturn\n\t}\n\n\tif transmuxer.running {\n\t\treturn\n\t}\n\n\ttransmuxer.running = true\n\n\tfor {\n\t\tvar sample float64\n\n\t\tfor _, streamer := range transmuxer.Streamers {\n\t\t\tnewSample, err := streamer.ReadSample()\n\t\t\tif err != nil {\n\t\t\t\tstreamer.setError(err)\n\t\t\t\tstreamer.Close()\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tsample += newSample * streamer.Volume\n\t\t}\n\n\t\tsample = sample * transmuxer.MasterVolume\n\n\t\tif transmuxer.FinalStream != nil {\n\t\t\terr := transmuxer.FinalStream.WriteSample(sample)\n\t\t\tif err != nil {\n\t\t\t\ttransmuxer.setError(err)\n\t\t\t\ttransmuxer.Close()\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\tif transmuxer.buffer != nil {\n\t\t\ttransmuxer.buffer = append(transmuxer.buffer, sample)\n\t\t}\n\t}\n}",
"func main() {\n\tSample1()\n\tSample2()\n\tSample3()\n\tRFlatDontWorkWithTransform()\n\tRefine()\n}",
"func (track *AudioTrack) Transform(fns ...audio.TransformFunc) {\n\tsrc := track.Broadcaster.Source()\n\ttrack.Broadcaster.ReplaceSource(audio.Merge(fns...)(src))\n}",
"func (f *filtererProcessor) Start(ctx context.Context) {\n\tctx = f.StartInternal(ctx, filtererProcName)\n\tf.input.Start(ctx)\n}",
"func BeginTransformFeedback(primitiveMode uint32) {\n C.glowBeginTransformFeedback(gpBeginTransformFeedback, (C.GLenum)(primitiveMode))\n}",
"func PauseTransformFeedback() {\n C.glowPauseTransformFeedback(gpPauseTransformFeedback)\n}",
"func Transform(ctx context.Context, parallelism int, bufferSize int, in chan OutResult,\n\ttransformer func(interface{}) (interface{}, error), errhandler func(error),\n) chan InOutResult {\n\t// TODO: can we have a channel factory to do this?\n\toutChan := make(chan InOutResult, bufferSize)\n\tvar wg sync.WaitGroup\n\tif parallelism < 1 {\n\t\tparallelism = 1\n\t}\n\twg.Add(parallelism)\n\ti := func() {\n\t\tdefer wg.Done()\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\tout := simpleInOut{\n\t\t\t\t\tsimpleOut: simpleOut{err: ctx.Err()},\n\t\t\t\t}\n\t\t\t\tselect {\n\t\t\t\tcase outChan <- out:\n\t\t\t\tdefault:\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tcase sr, ok := <-in:\n\t\t\t\t// do stuff, write to out maybe\n\t\t\t\tif !ok {\n\t\t\t\t\t// channel is closed, time to exit\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tif sr.Err() != nil {\n\t\t\t\t\tif errhandler != nil {\n\t\t\t\t\t\terrhandler(sr.Err())\n\t\t\t\t\t}\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tres, err := transformer(sr.Output())\n\t\t\t\tif err == ErrSkip {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tout := simpleInOut{\n\t\t\t\t\tsimpleOut: simpleOut{err: err, out: res},\n\t\t\t\t\tin: sr.Output(),\n\t\t\t\t}\n\t\t\t\t// TODO: this section will never cancel if this write blocks. Problem?\n\t\t\t\toutChan <- out\n\t\t\t}\n\t\t}\n\t}\n\tfor x := 0; x < parallelism; x++ {\n\t\tgo i()\n\t}\n\tgo func() {\n\t\twg.Wait()\n\t\tclose(outChan)\n\t}()\n\treturn outChan\n}",
"func TransformFeedbackBufferRange(xfb uint32, index uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpTransformFeedbackBufferRange, 5, uintptr(xfb), uintptr(index), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func (m *Muxer) Start(ctx context.Context, t astiencoder.CreateTaskFunc) {\n\tm.BaseNode.Start(ctx, t, func(t *astikit.Task) {\n\t\t// Make sure to write header once\n\t\tvar ret int\n\t\tm.o.Do(func() { ret = m.ctxFormat.AvformatWriteHeader(nil) })\n\t\tif ret < 0 {\n\t\t\temitAvError(m, m.eh, ret, \"m.ctxFormat.AvformatWriteHeader on %s failed\", m.ctxFormat.Filename())\n\t\t\treturn\n\t\t}\n\n\t\t// Write trailer once everything is done\n\t\tm.cl.Add(func() error {\n\t\t\tif ret := m.ctxFormat.AvWriteTrailer(); ret < 0 {\n\t\t\t\treturn fmt.Errorf(\"m.ctxFormat.AvWriteTrailer on %s failed: %w\", m.ctxFormat.Filename(), NewAvError(ret))\n\t\t\t}\n\t\t\treturn nil\n\t\t})\n\n\t\t// Make sure to stop the chan properly\n\t\tdefer m.c.Stop()\n\n\t\t// Start chan\n\t\tm.c.Start(m.Context())\n\t})\n}",
"func runProcessor() {\n\t// process callback is invoked for each message delivered from\n\t// \"example-stream\" topic.\n\tcb := func(ctx goka.Context, msg interface{}) {\n\t\tvar counter int64\n\t\t// ctx.Value() gets from the group table the value that is stored for\n\t\t// the message's key.\n\t\tif val := ctx.Value(); val != nil {\n\t\t\tcounter = val.(int64)\n\t\t}\n\t\tcounter++\n\t\t// SetValue stores the incremented counter in the group table for in\n\t\t// the message's key.\n\t\tctx.SetValue(counter)\n\t\tlog.Printf(\"key = %s, counter = %v, msg = %v\", ctx.Key(), counter, msg)\n\t}\n\n\t// Define a new processor group. The group defines all inputs, outputs, and\n\t// serialization formats. The group-table topic is \"example-group-table\".\n\tg := goka.DefineGroup(group,\n\t\tgoka.Input(topic, new(codec.String), cb),\n\t\tgoka.Persist(new(codec.Int64)),\n\t)\n\n\tp, err := goka.NewProcessor(brokers,\n\t\tg,\n\t\tgoka.WithTopicManagerBuilder(goka.TopicManagerBuilderWithTopicManagerConfig(tmc)),\n\t\tgoka.WithConsumerGroupBuilder(goka.DefaultConsumerGroupBuilder),\n\t)\n\tif err != nil {\n\t\tlog.Fatalf(\"error creating processor: %v\", err)\n\t}\n\tctx, cancel := context.WithCancel(context.Background())\n\tdone := make(chan struct{})\n\tgo func() {\n\t\tdefer close(done)\n\t\tif err = p.Run(ctx); err != nil {\n\t\t\tlog.Printf(\"error running processor: %v\", err)\n\t\t}\n\t}()\n\n\tsigs := make(chan os.Signal)\n\tgo func() {\n\t\tsignal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM, syscall.SIGKILL)\n\t}()\n\n\tselect {\n\tcase <-sigs:\n\tcase <-done:\n\t}\n\tcancel()\n\t<-done\n}",
"func runTransformations() error {\n\t_, tool := filepath.Split(flag.Args()[0])\n\tif runtime.GOOS == \"windows\" {\n\t\ttool = strings.TrimSuffix(tool, \".exe\")\n\t}\n\ttransform, ok := transformFuncs[tool]\n\tif !ok {\n\t\treturn fmt.Errorf(\"unknown tool: %q\", tool)\n\t}\n\ttransformed := flag.Args()[1:]\n\t//log.Println(tool, transformed)\n\tif transform != nil {\n\t\tvar err error\n\t\tif transformed, err = transform(transformed); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tdefer func() {\n\t\tfor _, fn := range deferred {\n\t\t\tif err := fn(); err != nil {\n\t\t\t\tfmt.Fprintln(os.Stderr, err)\n\t\t\t}\n\t\t}\n\t}()\n\tcmd := exec.Command(flag.Args()[0], transformed...)\n\tcmd.Stdout = os.Stdout\n\tcmd.Stderr = os.Stderr\n\tif err := cmd.Run(); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (track *VideoTrack) Transform(fns ...video.TransformFunc) {\n\tsrc := track.Broadcaster.Source()\n\ttrack.Broadcaster.ReplaceSource(video.Merge(fns...)(src))\n}",
"func (v *Vectorizer) FitTranform() *FeatureMatrix {\n\n}",
"func main() {\n\tfmt.Println(\"Start Test....!\")\n\tinputDB := setupDB(\"mysql\", \"root:root123@tcp(127.0.0.1:13306)/srcDB\")\n\textractDP := processors.NewSQLReader(inputDB, mypkg.Query(5))\n\n\ttransformDP := mypkg.NewMyTransformer()\n\tfmt.Println(transformDP)\n\n\toutputDB := setupDB(\"mysql\", \"root:root123@tcp(127.0.0.1:13306)/dstDB\")\n\toutputTable := \"krew_info\"\n\tloadDP := processors.NewSQLWriter(outputDB, outputTable)\n\n\tpipeline := ratchet.NewPipeline(extractDP, transformDP, loadDP)\n\tpipeline.Name = \"My Pipeline\"\n\n\terr := <-pipeline.Run()\n\tif err != nil {\n\t\tlogger.ErrorWithoutTrace(pipeline.Name, \":\", err)\n\t\tlogger.ErrorWithoutTrace(pipeline.Stats())\n\t} else {\n\t\tlogger.Info(pipeline.Name, \": Completed successfully.\")\n\t}\n}",
"func (c *Curl) transform() {\n\tvar tmp [StateSize]int8\n\ttransform(&tmp, &c.state, uint(c.rounds))\n\t// for odd number of rounds we need to copy the buffer into the state\n\tif c.rounds%2 != 0 {\n\t\tcopy(c.state[:], tmp[:])\n\t}\n}",
"func main() {\n\tadapter.RunStage(split, chunk, join)\n}",
"func (s *Surface) Transform(a, b, c, d, e, f float64) {\n\ts.Ctx.Call(\"transform\", a, b, c, d, e, f)\n}",
"func (r *sinkRunner) run(pipeID, componentID string, cancel chan struct{}, in <-chan message, meter *meter) <-chan error {\n\terrc := make(chan error, 1)\n\tgo func() {\n\t\tdefer close(errc)\n\t\tcall(r.reset, pipeID, errc) // reset hook\n\t\tvar m message\n\t\tvar ok bool\n\t\tfor {\n\t\t\t// receive new message\n\t\t\tselect {\n\t\t\tcase m, ok = <-in:\n\t\t\t\tif !ok {\n\t\t\t\t\tcall(r.flush, pipeID, errc) // flush hook\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tcase <-cancel:\n\t\t\t\tcall(r.interrupt, pipeID, errc) // interrupt hook\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tm.params.applyTo(componentID) // apply params\n\t\t\terr := r.fn(m.Buffer) // sink a buffer\n\t\t\tif err != nil {\n\t\t\t\terrc <- err\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tmeter = meter.sample(int64(m.Buffer.Size())).message()\n\n\t\t\tm.feedback.applyTo(componentID) // apply feedback\n\t\t}\n\t}()\n\n\treturn errc\n}",
"func (p *Pipeline) Run(ctx context.Context) {\n\tp.runMutex.Lock()\n\tdefer p.runMutex.Unlock()\n\tif p.status == STATUS_RUN {\n\t\treturn\n\t}\n\t//logrus.Debug(\"mysql position\", p.Input.Options.Position)\n\tmyCtx, cancel := context.WithCancel(ctx)\n\tp.ctx = myCtx\n\tgo func() {\n\t\tvar err error\n\t\tdefer func() {\n\t\t\tif r := recover(); r != nil {\n\t\t\t\tlogrus.Errorln(\"pipeline run panic, \", r)\n\t\t\t}\n\t\t\tcancel()\n\t\t}()\n\t\tif err = p.Input.Run(myCtx); err != nil {\n\t\t\tevent.Event(event2.NewErrorPipeline(p.Options.Pipeline.Name, \"Start error: \"+err.Error()))\n\t\t\treturn\n\t\t}\n\t\tif err = p.Filter.Run(myCtx); err != nil {\n\t\t\tevent.Event(event2.NewErrorPipeline(p.Options.Pipeline.Name, \"Start error: \"+err.Error()))\n\t\t\treturn\n\t\t}\n\t\tif err = p.Output.Run(myCtx); err != nil {\n\t\t\tevent.Event(event2.NewErrorPipeline(p.Options.Pipeline.Name, \"Start error: \"+err.Error()))\n\t\t\treturn\n\t\t}\n\t\tevent.Event(event2.NewInfoPipeline(p.Options.Pipeline.Name, \"Start succeeded\"))\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\t{\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tcase <-p.Input.Context().Done():\n\t\t\t\t{\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tcase <-p.Filter.Context().Done():\n\t\t\t\t{\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tcase <-p.Output.Context().Done():\n\t\t\t\t{\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}()\n}",
"func runActivation(db *deep6.Deep6DB, crdtm *crdt.CRDTManager, gqlm *n3gql.GQLManager) error {\n\n\t// create context to manage pipeline\n\tctx, cancelFunc := context.WithCancel(context.Background())\n\tdefer cancelFunc()\n\n\t// start the stream listener for this context\n\titerator, err := crdtm.StartReceiver()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// error channels to monitor pipeline\n\tvar errcList []<-chan error\n\n\t// create a splitter for the stream\n\tstreamIterator1, streamIterator2, errc, err := streamSplitter(ctx, iterator)\n\tif err != nil {\n\t\treturn err\n\t}\n\terrcList = append(errcList, errc)\n\n\t// create the db sink stage\n\terrc, err = connectDB(ctx, db, streamIterator1)\n\tif err != nil {\n\t\treturn err\n\t}\n\terrcList = append(errcList, errc)\n\n\t// create the gql sink stage\n\terrc, err = connectGQL(ctx, gqlm, streamIterator2)\n\tif err != nil {\n\t\treturn err\n\t}\n\terrcList = append(errcList, errc)\n\n\treturn WaitForPipeline(errcList...)\n\n}",
"func (r *processRunner) run(pipeID, componentID string, cancel chan struct{}, in <-chan message, meter *meter) (<-chan message, <-chan error) {\n\terrc := make(chan error, 1)\n\tr.in = in\n\tr.out = make(chan message)\n\tgo func() {\n\t\tdefer close(r.out)\n\t\tdefer close(errc)\n\t\tcall(r.reset, pipeID, errc) // reset hook\n\t\tvar err error\n\t\tvar m message\n\t\tvar ok bool\n\t\tfor {\n\t\t\t// retrieve new message\n\t\t\tselect {\n\t\t\tcase m, ok = <-in:\n\t\t\t\tif !ok {\n\t\t\t\t\tcall(r.flush, pipeID, errc) // flush hook\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tcase <-cancel:\n\t\t\t\tcall(r.interrupt, pipeID, errc) // interrupt hook\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tm.applyTo(componentID) // apply params\n\t\t\tm.Buffer, err = r.fn(m.Buffer) // process new buffer\n\t\t\tif err != nil {\n\t\t\t\terrc <- err\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tmeter = meter.sample(int64(m.Buffer.Size())).message()\n\n\t\t\tm.feedback.applyTo(componentID) // apply feedback\n\n\t\t\t// send message further\n\t\t\tselect {\n\t\t\tcase r.out <- m:\n\t\t\tcase <-cancel:\n\t\t\t\tcall(r.interrupt, pipeID, errc) // interrupt hook\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n\treturn r.out, errc\n}",
"func TransformFeedbackBufferRange(xfb uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowTransformFeedbackBufferRange(gpTransformFeedbackBufferRange, (C.GLuint)(xfb), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func TransformFeedbackBufferRange(xfb uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowTransformFeedbackBufferRange(gpTransformFeedbackBufferRange, (C.GLuint)(xfb), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func Main(timeStampChannel chan<- []int64, tcpDataStreamCh <-chan tcphttpclient.Segment) {\n\tfor {\n\t\tvar timestampTuple = make([]int64, 0, 2)\n\t\tvar effect float64\n\t\t//var average float64\n\t\tvar threshold float64\n\t\tvar MEAChannel int64\n\t\tvar temp int8\n\n\t\t//\taverage = 0\n\t\teffect = 0.1\n\t\tthreshold = 5000000\n\t\tTimeStamp = 0\n\n\t\tvar wasActive = make([]int8, 60, 60)\n\t\tvar averages = make([]float64, 60, 60)\n\n\t\tfor {\n\t\t\trecord := <-tcpDataStreamCh\n\t\t\tatomic.AddInt64(&TimeStamp, 100)\n\t\t\tfor j := range record {\n\t\t\t\tval := -record[j]\n\t\t\t\t// UPDATE FILTER\n\t\t\t\taverages[j] = (1 - effect) * averages[j] + effect * float64(val) // UPDATES FILTER\n\t\t\t\tdiff := float64(val) - averages[j]\n\n\t\t\t\t// SEND TIMESTAMP\n\t\t\t\ttemp = 0\n\t\t\t\tif diff > threshold && wasActive[j] == 0 {\n\t\t\t\t\twasActive[j] = 1\n\t\t\t\t\ttemp = 1\n\t\t\t\t\ttimestampTuple = make([]int64, 0, 2)\n\t\t\t\t\tMEAChannel = int64(j)\n\t\t\t\t\ttimestampTuple = append(timestampTuple, atomic.LoadInt64(&TimeStamp))\n\t\t\t\t\ttimestampTuple = append(timestampTuple, MEAChannel)\n\t\t\t\t\ttimeStampChannel <- timestampTuple\n\t\t\t\t}\n\t\t\t\tif wasActive[j] == 1 && temp == 0 {\n\t\t\t\t\twasActive[j] = 0\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}",
"func doTextProcessor(p proc.TextProcessor, label string, c *Chunk, msg string) *Chunk {\n\tres := p.Run(c.Data)\n\n\tfor _, match := range res.Matches {\n\t\tformattedMsg := fmt.Sprintf(msg)\n\t\tc.Matches = append(c.Matches, NewMatch(match.Match, label, match.Indices, formattedMsg))\n\t\tc.Score += 1\n\t}\n\n\treturn c\n}",
"func (p *DefaultPipeline) Do() error {\n\t// prepare pipeline\n\tsReader, err := p.tarStage()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif p.samplesStage != nil {\n\t\tsReader = p.samplesStage(sReader)\n\t}\n\n\texReader := p.sample2ExampleStage(sReader)\n\n\tif p.tfExamplesStage != nil {\n\t\texReader = p.tfExamplesStage(exReader)\n\t}\n\n\t// The whole pipeline is ready, start doing the job\n\treturn p.tfRecordStage(exReader)\n}",
"func ResumeTransformFeedback() {\n C.glowResumeTransformFeedback(gpResumeTransformFeedback)\n}",
"func fnPTransform(ctx Context, doc *JDoc, params []string) interface{} {\n\t// note: calling ptransform in sync or debug mode does not make sense - should we raise an error in such a scenario?\n\tstats := ctx.Value(EelTotalStats).(*ServiceStats)\n\tif params == nil || len(params) == 0 || len(params) > 1 {\n\t\tctx.Log().Error(\"error_type\", \"func_ptransform\", \"op\", \"etransform\", \"cause\", \"wrong_number_of_parameters\", \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"wrong number of parameters in call to ptransform function\"), \"etransform\", params})\n\t\treturn nil\n\t}\n\t// prepare event\n\trawEvent := extractStringParam(params[0])\n\tevent, err := NewJDocFromString(rawEvent)\n\tif err != nil {\n\t\tctx.Log().Error(\"error_type\", \"func_ptransform\", \"op\", \"etransform\", \"cause\", \"invalid_json\", \"params\", params, \"error\", err.Error())\n\t\tstats.IncErrors()\n\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"non json parameters in call to ptransform function\"), \"etransform\", params})\n\t\treturn nil\n\t}\n\t// apply debug logs\n\tlogParams := GetConfig(ctx).LogParams\n\tif logParams != nil {\n\t\tfor k, v := range logParams {\n\t\t\tev := event.ParseExpression(ctx, v)\n\t\t\tctx.AddLogValue(k, ev)\n\t\t}\n\t}\n\t// handle event and execute publisher(s)\n\t// both sync=true or debug=true would not make sense here\n\thandleEvent(ctx, stats, event, rawEvent, false, false)\n\treturn nil\n}",
"func runProcessor() {\n\t// process callback is invoked for each message delivered from\n\t// \"example-stream\" topic.\n\tcb := func(ctx goka.Context, msg interface{}) {\n\n\t\t// during the second run, this should break (as value should already be in context)\n\t\tif val := ctx.Value(); val != nil {\n\t\t\tpanic(fmt.Sprintf(\"dealing with a value already in context %v\", ctx.Value()))\n\t\t}\n\n\t\t// store received value in context (first run)\n\t\tctx.SetValue(msg.(string))\n\t\tlog.Printf(\"stored to ctx key = %s, msg = %v\", ctx.Key(), msg)\n\t}\n\n\t// Define a new processor group. The group defines all inputs, outputs, and\n\t// serialization formats. The group-table topic is \"example-group-table\".\n\tg := goka.DefineGroup(group,\n\t\tgoka.Input(topic, new(codec.String), cb),\n\t\tgoka.Persist(new(codec.String)),\n\t)\n\n\tp, err := goka.NewProcessor(brokers, g)\n\tif err != nil {\n\t\tlog.Fatalf(\"error creating processor: %v\", err)\n\t}\n\tif err = p.Run(context.Background()); err != nil {\n\t\tlog.Fatalf(\"error running processor: %v\", err)\n\t}\n}",
"func (b *batch) trigger() {\n\tb.start.Do(b.run)\n}",
"func (b *batch) trigger() {\n\tb.start.Do(b.run)\n}",
"func feedback(t *tufCommander, payload []byte) error {\n\t// We only get here when everything goes well, since the flag \"quiet\" was\n\t// provided, we output nothing but just return.\n\tif t.quiet {\n\t\treturn nil\n\t}\n\n\t// Flag \"quiet\" was not \"true\", that's why we get here.\n\tif t.output != \"\" {\n\t\treturn ioutil.WriteFile(t.output, payload, 0644)\n\t}\n\n\tos.Stdout.Write(payload)\n\treturn nil\n}",
"func runIntermediateStage(listener net.Listener, functionList []types.AnyFunc, myID string, position int,\r\n\tregisterType interface{}, masterAddress string) {\r\n\r\n\tinputQueue := makeQueue()\r\n\toutputQueue := makeQueue()\r\n\tgo executeAndSend(functionList, position, myID, inputQueue, outputQueue)\r\n\tsetUpSignalHandler(inputQueue, outputQueue, masterAddress)\r\n\tfor {\r\n\t\tlogPrint(\"Waiting for connection from whoever\")\r\n\t\tlistenerConnection, err := listener.Accept()\r\n\t\tif err != nil {\r\n\t\t\tpanic(err)\r\n\t\t}\r\n\t\tgo handleConnection(listenerConnection, registerType, inputQueue)\r\n\t}\r\n}",
"func TestTransformer_default(t *testing.T) {\n\tsrcTopic := getTopic(t, \"source-topic\")\n\tdstTopic := srcTopic + \"-passthrough\"\n\n\tconfig := kafka.Config{\n\t\tSourceTopic: srcTopic,\n\t\tConsumerConfig: getConsumerConfig(t, \"integration-test-group\"),\n\t\tProducerConfig: getProducerConfig(),\n\t}\n\n\ttransformer, err := kafka.NewKafkaTransformer(config)\n\tif err != nil {\n\t\tt.Fatalf(\"unexpected error: %v\", err)\n\t}\n\tdefer transformer.Stop()\n\n\tgo func() {\n\t\terr = transformer.Run()\n\t}()\n\tif err != nil {\n\t\tt.Fatalf(\"unexpected error: %v\", err)\n\t}\n\n\tmessages := messages(srcTopic, 5)\n\tproduceMessages(t, messages)\n\tassertMessagesinTopic(t, dstTopic, messages)\n}",
"func (r *reducer) start() {\n\tfor _, m := range r.mappers {\n\t\tm.start()\n\t}\n\tgo r.run()\n}",
"func (t *Transport) start(msg Message, stream *Stream, out []byte) (n int) {\n\tatomic.AddUint64(&t.nTxstart, 1)\n\tn = tag2cbor(tagCborPrefix, out) // prefix\n\tn += arrayStart(out[n:]) // 0x9f (start stream as cbor array)\n\tn += t.framepkt(msg, stream, out[n:]) // packet\n\treturn n\n}",
"func (h *pardo) PrepareTransform(tid string, t *pipepb.PTransform, comps *pipepb.Components) (*pipepb.Components, []string) {\n\n\t// ParDos are a pain in the butt.\n\t// Combines, by comparison, are dramatically simpler.\n\t// This is because for ParDos, how they are handled, and what kinds of transforms are in\n\t// and around the ParDo, the actual shape of the graph will change.\n\t// At their simplest, it's something a DoFn will handle on their own.\n\t// At their most complex, they require intimate interaction with the subgraph\n\t// bundling process, the data layer, state layers, and control layers.\n\t// But unlike combines, which have a clear urn for composite + special payload,\n\t// ParDos have the standard URN for composites with the standard payload.\n\t// So always, we need to first unmarshal the payload.\n\n\tpardoPayload := t.GetSpec().GetPayload()\n\tpdo := &pipepb.ParDoPayload{}\n\tif err := (proto.UnmarshalOptions{}).Unmarshal(pardoPayload, pdo); err != nil {\n\t\tpanic(fmt.Sprintf(\"unable to decode ParDoPayload for transform[%v]\", t.GetUniqueName()))\n\t}\n\n\t// Lets check for and remove anything that makes things less simple.\n\tif pdo.OnWindowExpirationTimerFamilySpec == \"\" &&\n\t\t!pdo.RequestsFinalization &&\n\t\t!pdo.RequiresStableInput &&\n\t\t!pdo.RequiresTimeSortedInput &&\n\t\tlen(pdo.StateSpecs) == 0 &&\n\t\tlen(pdo.TimerFamilySpecs) == 0 &&\n\t\tpdo.RestrictionCoderId == \"\" {\n\t\t// Which inputs are Side inputs don't change the graph further,\n\t\t// so they're not included here. Any nearly any ParDo can have them.\n\n\t\t// At their simplest, we don't need to do anything special at pre-processing time, and simply pass through as normal.\n\t\treturn &pipepb.Components{\n\t\t\tTransforms: map[string]*pipepb.PTransform{\n\t\t\t\ttid: t,\n\t\t\t},\n\t\t}, nil\n\t}\n\n\t// Side inputs add to topology and make fusion harder to deal with\n\t// (side input producers can't be in the same stage as their consumers)\n\t// But we don't have fusion yet, so no worries.\n\n\t// State, Timers, Stable Input, Time Sorted Input, and some parts of SDF\n\t// Are easier to deal including a fusion break. But We can do that with a\n\t// runner specific transform for stable input, and another for timesorted\n\t// input.\n\n\t// SplittableDoFns have 3 required phases and a 4th optional phase.\n\t//\n\t// PAIR_WITH_RESTRICTION which pairs elements with their restrictions\n\t// Input: element; := INPUT\n\t// Output: KV(element, restriction) := PWR\n\t//\n\t// SPLIT_AND_SIZE_RESTRICTIONS splits the pairs into sub element ranges\n\t// and a relative size for each, in a float64 format.\n\t// Input: KV(element, restriction) := PWR\n\t// Output: KV(KV(element, restriction), float64) := SPLITnSIZED\n\t//\n\t// PROCESS_SIZED_ELEMENTS_AND_RESTRICTIONS actually processes the\n\t// elements. This is also where splits need to be handled.\n\t// In particular, primary and residual splits have the same format as the input.\n\t// Input: KV(KV(element, restriction), size) := SPLITnSIZED\n\t// Output: DoFn's output. := OUTPUT\n\t//\n\t// TRUNCATE_SIZED_RESTRICTION is how the runner has an SDK turn an\n\t// unbounded transform into a bound one. Not needed until the pipeline\n\t// is told to drain.\n\t// Input: KV(KV(element, restriction), float64) := synthetic split results from above\n\t// Output: KV(KV(element, restriction), float64). := synthetic, truncated results sent as Split n Sized\n\t//\n\t// So with that, we can figure out the coders we need.\n\t//\n\t// cE - Element Coder (same as input coder)\n\t// cR - Restriction Coder\n\t// cS - Size Coder (float64)\n\t// ckvER - KV<Element, Restriction>\n\t// ckvERS - KV<KV<Element, Restriction>, Size>\n\t//\n\t// There could be a few output coders, but the outputs can be copied from\n\t// the original transform directly.\n\n\t// First lets get the parallel input coder ID.\n\tvar pcolInID, inputLocalID string\n\tfor localID, globalID := range t.GetInputs() {\n\t\t// The parallel input is the one that isn't a side input.\n\t\tif _, ok := pdo.SideInputs[localID]; !ok {\n\t\t\tinputLocalID = localID\n\t\t\tpcolInID = globalID\n\t\t\tbreak\n\t\t}\n\t}\n\tinputPCol := comps.GetPcollections()[pcolInID]\n\tcEID := inputPCol.GetCoderId()\n\tcRID := pdo.RestrictionCoderId\n\tcSID := \"c\" + tid + \"size\"\n\tckvERID := \"c\" + tid + \"kv_ele_rest\"\n\tckvERSID := ckvERID + \"_size\"\n\n\tcoder := func(urn string, componentIDs ...string) *pipepb.Coder {\n\t\treturn &pipepb.Coder{\n\t\t\tSpec: &pipepb.FunctionSpec{\n\t\t\t\tUrn: urn,\n\t\t\t},\n\t\t\tComponentCoderIds: componentIDs,\n\t\t}\n\t}\n\n\tcoders := map[string]*pipepb.Coder{\n\t\tckvERID: coder(urns.CoderKV, cEID, cRID),\n\t\tcSID: coder(urns.CoderDouble),\n\t\tckvERSID: coder(urns.CoderKV, ckvERID, cSID),\n\t}\n\n\t// PCollections only have two new ones.\n\t// INPUT -> same as ordinary DoFn\n\t// PWR, uses ckvER\n\t// SPLITnSIZED, uses ckvERS\n\t// OUTPUT -> same as ordinary outputs\n\n\tnPWRID := \"n\" + tid + \"_pwr\"\n\tnSPLITnSIZEDID := \"n\" + tid + \"_splitnsized\"\n\n\tpcol := func(name, coderID string) *pipepb.PCollection {\n\t\treturn &pipepb.PCollection{\n\t\t\tUniqueName: name,\n\t\t\tCoderId: coderID,\n\t\t\tIsBounded: inputPCol.GetIsBounded(),\n\t\t\tWindowingStrategyId: inputPCol.GetWindowingStrategyId(),\n\t\t}\n\t}\n\n\tpcols := map[string]*pipepb.PCollection{\n\t\tnPWRID: pcol(nPWRID, ckvERID),\n\t\tnSPLITnSIZEDID: pcol(nSPLITnSIZEDID, ckvERSID),\n\t}\n\n\t// PTransforms have 3 new ones, with process sized elements and restrictions\n\t// taking the brunt of the complexity, consuming the inputs\n\n\tePWRID := \"e\" + tid + \"_pwr\"\n\teSPLITnSIZEDID := \"e\" + tid + \"_splitnsize\"\n\teProcessID := \"e\" + tid + \"_processandsplit\"\n\n\ttform := func(name, urn, in, out string) *pipepb.PTransform {\n\t\treturn &pipepb.PTransform{\n\t\t\tUniqueName: name,\n\t\t\tSpec: &pipepb.FunctionSpec{\n\t\t\t\tUrn: urn,\n\t\t\t\tPayload: pardoPayload,\n\t\t\t},\n\t\t\tInputs: map[string]string{\n\t\t\t\tinputLocalID: in,\n\t\t\t},\n\t\t\tOutputs: map[string]string{\n\t\t\t\t\"i0\": out,\n\t\t\t},\n\t\t\tEnvironmentId: t.GetEnvironmentId(),\n\t\t}\n\t}\n\n\tnewInputs := maps.Clone(t.GetInputs())\n\tnewInputs[inputLocalID] = nSPLITnSIZEDID\n\n\ttforms := map[string]*pipepb.PTransform{\n\t\tePWRID: tform(ePWRID, urns.TransformPairWithRestriction, pcolInID, nPWRID),\n\t\teSPLITnSIZEDID: tform(eSPLITnSIZEDID, urns.TransformSplitAndSize, nPWRID, nSPLITnSIZEDID),\n\t\teProcessID: {\n\t\t\tUniqueName: eProcessID,\n\t\t\tSpec: &pipepb.FunctionSpec{\n\t\t\t\tUrn: urns.TransformProcessSizedElements,\n\t\t\t\tPayload: pardoPayload,\n\t\t\t},\n\t\t\tInputs: newInputs,\n\t\t\tOutputs: t.GetOutputs(),\n\t\t\tEnvironmentId: t.GetEnvironmentId(),\n\t\t},\n\t}\n\n\treturn &pipepb.Components{\n\t\tCoders: coders,\n\t\tPcollections: pcols,\n\t\tTransforms: tforms,\n\t}, t.GetSubtransforms()\n}",
"func (s *Stream) Transform(op api.UnOperation) *Stream {\n\toperator := unary.New(s.ctx)\n\toperator.SetOperation(op)\n\ts.ops = append(s.ops, operator)\n\treturn s\n}",
"func (f *YFastBilateral) Perform() {\n\tf.minmaxOnce.Do(f.minmax)\n\tf.downsampling()\n\tf.convolution()\n\tf.normalize()\n}",
"func Pass1(ctx context.Context, fi FileInfo, cropArg string, ch chan<- progress.Report) {\n\tdefer close(ch)\n\tch <- progress.Report{Completed: 0.0}\n\targs := []string{\n\t\t\"-i\", fi.Filename,\n\t\t// Process all streams.\n\t\t\"-map\", \"0\",\n\t\t// Copy streams by default, eg subtitles.\n\t\t\"-c\", \"copy\",\n\t\t// Increase buffer.\n\t\t\"-max_muxing_queue_size\", \"400\",\n\t}\n\tvideoQualityArgs(&args, &fi, cropArg, 1)\n\tfor _, s := range fi.Streams {\n\t\tif s.ShouldSkip() {\n\t\t\targs = append(args, \"-map\", \"-0:\"+s.Id)\n\t\t}\n\t}\n\tif *darFlag != \"\" {\n\t\targs = append(args, \"-vf\", \"setdar=dar=\"+*darFlag)\n\t}\n\targs = append(args, \"-passlogfile\", fi.passlogfile(), \"-pass\", \"1\", \"-f\", \"matroska\", \"-y\", \"/dev/null\")\n\tif *showCmdFlag {\n\t\tfmt.Printf(\"$ ffmpeg '%s'\\n\", strings.Join(args, \"' '\"))\n\t}\n\tcmd := exec.CommandContext(ctx, \"ffmpeg\", args...)\n\toutput, err := start(cmd)\n\tif err != nil {\n\t\tch <- progress.Report{Err: err}\n\t\treturn\n\t}\n\treadConversionProgress(output, fi, ch)\n\tif err = cmd.Wait(); err != nil {\n\t\tch <- progress.Report{Err: err}\n\t}\n}",
"func (sl *StagesLatency) calculate() {\n\t// log.Println( \"- t.results set from t.current\" )\n\tsl.Results = make(Rows, len(sl.last))\n\tcopy(sl.Results, sl.last)\n\tif sl.WantRelativeStats() {\n\t\tsl.Results.subtract(sl.first)\n\t}\n\tsl.Totals = totals(sl.Results)\n}",
"func (e *Engine) callback(in []float32, out [][]float32) {\n\tfor k := 0; k < e.chunks; k++ {\n\t\tif msg := e.messages.Receive(); msg != nil {\n\t\t\te.handle(msg)\n\t\t}\n\n\t\tvar (\n\t\t\tframeSize = e.frameSize\n\t\t\toffset = frameSize * k\n\t\t\tinput = e.graph.in\n\t\t\tleftOut = e.graph.leftOut\n\t\t\trightOut = e.graph.rightOut\n\t\t\tgain = e.gain\n\t\t)\n\t\tfor i := 0; i < frameSize; i++ {\n\t\t\tinput[i] = float64(in[offset+i])\n\t\t}\n\t\tfor _, p := range e.graph.Processors() {\n\t\t\tp.ProcessFrame(frameSize)\n\t\t}\n\t\tfor i := range out {\n\t\t\tfor j := 0; j < frameSize; j++ {\n\t\t\t\tif i%2 == 0 {\n\t\t\t\t\tout[i][offset+j] = float32(leftOut[j]) * gain\n\t\t\t\t} else {\n\t\t\t\t\tout[i][offset+j] = float32(rightOut[j]) * gain\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}",
"func (s *BaseSyslParserListener) EnterTransform_arg(ctx *Transform_argContext) {}",
"func (graphMinion *graphMinion) start() {\n\tgo func() {\n\t\tdefer graphMinion.wg.Done()\n\t\tfor {\n\n\t\t\t// pull reads from queue until done\n\t\t\tmappingData, ok := <-graphMinion.inputChannel\n\t\t\tif !ok {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif mappingData == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// increment the nodes contained in the mapping window\n\t\t\tmisc.ErrorCheck(graphMinion.graph.IncrementSubPath(mappingData.ContainedNodes, mappingData.Freq))\n\t\t}\n\t}()\n}",
"func (r fifo) Run(ctx context.Context, params StageParams) {\n\tfor {\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\treturn\n\t\tcase payloadIn, ok := <-params.Input():\n\t\t\tif !ok {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tpayloadOut, err := r.proc.Process(ctx, payloadIn)\n\t\t\tif err != nil {\n\t\t\t\twrappedErr := xerrors.Errorf(\"pipeline stage %d : %w \", params.StageIndex(), err)\n\t\t\t\tmaybeEmitError(wrappedErr, params.Error())\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif payloadOut == nil {\n\t\t\t\tpayloadIn.MarkAsProcessed()\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tselect {\n\t\t\tcase params.Output() <- payloadOut:\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\n}",
"func (t *Track) compile() {\n\t/*\n\t\tsort.Sort(TempoSorted(t.Tempi))\n\t\tsort.Sort(EventsSorted(t.Events))\n\t*/\n\n\tif len(t.Events) == 0 {\n\t\tt.compiled = true\n\t\treturn\n\t}\n\n\ttempoChanges := map[Measure]Tempo{}\n\n\tfor _, ev := range t.Events {\n\t\t// fmt.Printf(\"event type: %s\\n\", ev.type_)\n\t\tif ev.type_ == \"TEMPO_CHANGE\" {\n\t\t\tbpm := ev.Params.Params()[\"bpm\"]\n\t\t\ttempAt := tempoAt{AbsPos: ev.absPosition, Tempo: BPM(bpm)}\n\t\t\t// fmt.Printf(\"set tempo to: %v at %v\\n\", BPM(bpm), tempAt.AbsPos)\n\t\t\tt.tempi = append(t.tempi, tempAt)\n\t\t\t// t.setTempo(ev.absPosition, BPM(bpm))\n\t\t}\n\t}\n\n\tfor _, tm := range t.tempi {\n\t\ttempoChanges[tm.AbsPos] = tm.Tempo\n\t}\n\n\tevents := map[Measure][]*Event{}\n\n\tfor _, ev := range t.Events {\n\t\tif ev.type_ != \"TEMPO_CHANGE\" {\n\t\t\t//fmt.Println(\"AbsPosition\", ev.AbsPosition)\n\t\t\tevents[ev.absPosition] = append(events[ev.absPosition], ev)\n\t\t}\n\t}\n\n\t//\tprevTempoNum := 0\n\tvar millisecs float64 = 0\n\tcurrentTempo := tempoChanges[Measure(0)]\n\t// fmt.Printf(\"start tempo: %v\\n\", currentTempo.MilliSecs(Measure(1)))\n\n\t// lastEventPos := t.Events[len(t.Events)-1].AbsPosition\n\n\t//fmt.Println(\"lastEventPos\", int(lastEventPos))\n\n\t//for i := 0; i < int(lastEventPos)+1; i++ {\n\ti := 0\n\tfor {\n\t\tif len(events) == 0 {\n\t\t\tbreak\n\t\t}\n\t\ttm, hasT := tempoChanges[Measure(i)]\n\t\tif hasT {\n\t\t\t// fmt.Printf(\"has tempo changes at position %v\\n\", Measure(i))\n\t\t\tcurrentTempo = tm\n\t\t}\n\n\t\t// fmt.Println(\"currentTempo\", currentTempo)\n\n\t\tevts, hasE := events[Measure(i)]\n\t\tif hasE {\n\t\t\t// fmt.Println(\"millisecs\", millisecs)\n\t\t\t//fmt.Printf(\"has events at position %v (%v), millisecs: %v\\n\", i, Measure(i), millisecs)\n\t\t\tfor _, ev := range evts {\n\t\t\t\tev.tick = uint(millisecs) //currentTempo.MilliSecs(ev.AbsPosition)\n\t\t\t}\n\n\t\t\tdelete(events, Measure(i))\n\t\t}\n\t\t// fmt.Printf(\"adding %d\\n\", int(currentTempo.MilliSecs(Measure(1))))\n\t\t//millisecs += int(currentTempo.MilliSecs(Measure(1)))\n\t\t//fmt.Printf(\"adding %d (%0f)\\n\", int(RoundFloat(currentTempo.MilliSecs(Measure(1)), 0)), currentTempo.MilliSecs(Measure(1)))\n\t\t// fmt.Printf(\"adding %d\\n\", int(currentTempo.MilliSecs(Measure(1))))\n\t\t//millisecs += int(RoundFloat(currentTempo.MilliSecs(Measure(1)), 0))\n\t\t//millisecs += int(currentTempo.MilliSecs(Measure(1)))\n\t\tmillisecs += currentTempo.MilliSecs(Measure(1))\n\t\ti++\n\t}\n\tt.compiled = true\n\tdebug.FreeOSMemory()\n}",
"func (self *ComponentScaleMinMax) TransformCallbackContext() interface{}{\n return self.Object.Get(\"transformCallbackContext\")\n}",
"func (s *MetalLBSpeaker) run(ctx context.Context) {\n\tl := log.WithFields(\n\t\tlogrus.Fields{\n\t\t\t\"component\": \"MetalLBSpeaker.run\",\n\t\t},\n\t)\n\tfor {\n\t\t// only check ctx here, we'll allow any in-flight\n\t\t// events to be processed completely.\n\t\tif ctx.Err() != nil {\n\t\t\treturn\n\t\t}\n\t\t// previous to this iteration, we processed an event\n\t\t// which indicates the speaker should yield. shut\n\t\t// it down.\n\t\tif s.shutdown.Load() {\n\t\t\tl.Info(\"speaker shutting down.\")\n\t\t\treturn\n\t\t}\n\t\tkey, quit := s.queue.Get()\n\t\tif quit {\n\t\t\treturn\n\t\t}\n\t\tl.Info(\"processing new event.\")\n\t\tst := s.do(key)\n\t\tswitch st {\n\t\tcase types.SyncStateError:\n\t\t\ts.queue.Add(key)\n\t\t\t// done must be called to requeue event after add.\n\t\tcase types.SyncStateSuccess, types.SyncStateReprocessAll:\n\t\t\t// SyncStateReprocessAll is returned in MetalLB when the\n\t\t\t// configuration changes. However, we are not watching for\n\t\t\t// configuration changes because our configuration is static and\n\t\t\t// loaded once at Cilium start time.\n\t\t}\n\t\t// if queue.Add(key) is called previous to this invocation the event\n\t\t// is requeued, else it is discarded from the queue.\n\t\ts.queue.Done(key)\n\t}\n}",
"func (m *mapper) start() {\n\tm.itr = m.executor.db.CreateIterator(m.seriesID, m.fieldID, m.typ,\n\t\tm.executor.min, m.executor.max, m.executor.interval)\n\tgo m.run()\n}",
"func (t *readFramebuffer) Transform(ctx context.Context, id api.CmdID, cmd api.Cmd, out transform.Writer) error {\n\ts := out.State()\n\tst := GetState(s)\n\tif cmd, ok := cmd.(*InsertionCommand); ok {\n\t\tidx_string := keyFromIndex(cmd.idx)\n\t\tif r, ok := t.injections[idx_string]; ok {\n\t\t\t// If this command is FOR an EOF command, we want to mutate it, so that\n\t\t\t// we have the presentation info available.\n\t\t\tif cmd.callee != nil && cmd.callee.CmdFlags(ctx, id, s).IsEndOfFrame() {\n\t\t\t\tcmd.callee.Mutate(ctx, id, out.State(), nil, nil)\n\t\t\t}\n\t\t\tfor _, injection := range r {\n\t\t\t\tif err := injection.fn(ctx, cmd, injection.res, out); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn nil\n\t\t}\n\t}\n\tif err := out.MutateAndWrite(ctx, id, cmd); err != nil {\n\t\treturn err\n\t}\n\t// If we have no deferred submissions left, then we can terminate\n\tif len(t.pendingReads) > 0 && len(st.deferredSubmissions) == 0 {\n\t\tif id != api.CmdNoID {\n\t\t\treturn t.FlushPending(ctx, out)\n\t\t}\n\t}\n\treturn nil\n}",
"func (er *BufferedExchangeReporter) Start() {\n\n}",
"func (apu *APU) Step() {\n\tvar sampleLeft float32\n\tvar sampleRight float32\n\tapu.sampleT += stepDuration\n\n\tif !apu.active {\n\t\tapu.sampleLeft = 0\n\t\tapu.sampleRight = 0\n\n\t\tif apu.sampleT >= sampleDuration {\n\t\t\tapu.sampleT -= sampleDuration\n\t\t\tapu.m.Lock()\n\t\t\tapu.soundBuffer = append(apu.soundBuffer, sampleLeft, sampleRight)\n\t\t\tsampleCount := len(apu.soundBuffer)\n\t\t\tapu.m.Unlock()\n\t\t\tif sampleCount > sampleBufferLength*channelCount*2 {\n\t\t\t\tsleepTime := sampleDuration * sampleBufferLength\n\t\t\t\ttime.Sleep(sleepTime)\n\t\t\t}\n\t\t}\n\t\treturn\n\t}\n\n\tstep := apu.fs.step()\n\n\tfor i, sc := range apu.generators {\n\t\tsc.Step(step)\n\t\tsample := sc.CurrentSample()\n\t\tsampleLeft += (sample * apu.getVolume(left, i))\n\t\tsampleRight += (sample * apu.getVolume(right, i))\n\t}\n\n\tgenCount := float32(len(apu.generators))\n\tapu.sampleLeft = mix(apu.sampleLeft, (sampleLeft / genCount))\n\tapu.sampleRight = mix(apu.sampleRight, (sampleLeft / genCount))\n\n\tif apu.sampleT >= sampleDuration {\n\t\tapu.sampleT -= sampleDuration\n\n\t\tsampleLeft = apu.sampleLeft * apu.masterVolume\n\t\tsampleRight = apu.sampleRight * apu.masterVolume\n\t\tapu.sampleLeft = 0\n\t\tapu.sampleRight = 0\n\n\t\tapu.m.Lock()\n\t\tapu.soundBuffer = append(apu.soundBuffer, sampleLeft, sampleRight)\n\t\tsampleCount := len(apu.soundBuffer)\n\t\tapu.m.Unlock()\n\n\t\tif (sampleCount > sampleBufferLength*channelCount*2) && !apu.TestMode {\n\t\t\tsleepTime := sampleDuration * sampleBufferLength\n\t\t\ttime.Sleep(sleepTime)\n\t\t}\n\t}\n}",
"func PauseTransformFeedback() {\n\tsyscall.Syscall(gpPauseTransformFeedback, 0, 0, 0, 0)\n}",
"func worker(threads int, doneWorker chan<- bool, imageTasks <-chan *imagetask.ImageTask, imageResults chan<- *imagetask.ImageTask) {\n\n\t// Initial placing image chunks in to a channel for filter to consume.\n\tchunkStreamGenerator := func(done <- chan interface{}, imageChunks []*imagetask.ImageTask) chan *imagetask.ImageTask {\n\t\tchunkStream := make(chan *imagetask.ImageTask)\n\t\tgo func() {\n\t\t\tdefer close(chunkStream)\n\t\t\tfor _, chunk := range imageChunks {\n\t\t\t\tselect {\n\t\t\t\tcase <-done:\n\t\t\t\t\treturn\n\t\t\t\tcase chunkStream <- chunk:\n\t\t\t\t}\n\t\t\t}\n\t\t}()\n\t\treturn chunkStream\n\t}\n\n\t// Filter applies a filter in a pipeline fashion. \n\t// A goroutine is spawned for each chunk that needs to be filtered (which is numOfThreads chunks for each filter effect)\n\tfilter := func(threads int, effect string, effectNum int, done <- chan interface{}, chunkStream chan *imagetask.ImageTask) chan *imagetask.ImageTask {\n\t\tfilterStream := make(chan *imagetask.ImageTask, threads) // Only numOfThreads image chunks should be in the local filter channel.\n\t\tdonefilterChunk := make(chan bool)\n\t\tfor chunk := range chunkStream { // For each image chunk ...\n\t\t\tif effectNum > 0 {\n\t\t\t\tchunk.Img.UpdateInImg() // Replace inImg with outImg if not the first effect to compund effects.\n\t\t\t}\t\n\t\t\tgo func(chunk *imagetask.ImageTask) { // Spawn a goroutine for each chunk, which is equal to the numOfThreads. Each goroutine works on a portion of the image.\n\t\t\t\tselect {\n\t\t\t\tcase <-done:\n\t\t\t\t\tdonefilterChunk <- true\n\t\t\t\t\treturn\n\t\t\t\tcase filterStream <- chunk:\n\t\t\t\t\tif effect != \"G\" {\n\t\t\t\t\t\tchunk.Img.ApplyConvolution(effect) // Can wait to apply effect until after chunk is in the channel because has to wait for all goroutines to finish before it can move on to the next filter for a given image.\n\t\t\t\t\t} else {\n\t\t\t\t\t\tchunk.Img.Grayscale()\n\t\t\t\t\t}\n\t\t\t\t\tdonefilterChunk <- true // Indicate that the filtering is done for the given chunk.\n\t\t\t\t}\n\t\t\t}(chunk)\n\t\t}\n\t\tfor i := 0; i < threads; i ++ { // Wait for all portions to be put through one filter because of image dependencies with convolution.\n\t\t\t<-donefilterChunk\n\t\t}\n\t\treturn filterStream\n\t}\n\n\t// While there are more image tasks to grab ...\n\tfor true {\n\t\t// Grab image task from image task channel.\t\n\t\timgTask, more := <-imageTasks\n\n\t\t// If you get an image task, split up the image in to even chunks by y-pixels.\n\t\tif more {\n\t\t\timageChunks := imgTask.SplitImage(threads)\n\t\t\t\n\t\t\t// Iterate through filters on image chunks.\n\t\t\t// Will spawn a goroutine for each chunk in each filter (n goroutines per filter)\n\t\t\tdone := make(chan interface{})\n\t\t\tdefer close(done)\n\t\t\tchunkStream := chunkStreamGenerator(done, imageChunks)\n\t\t\tfor i := 0; i < len(imgTask.Effects); i++ {\n\t\t\t\teffect := imgTask.Effects[i]\n\t\t\t\tchunkStream = filter(threads, effect, i, done, chunkStream)\n\t\t\t\tclose(chunkStream)\n\t\t\t}\n\n\t\t\t// Put the image back together.\n\t\t\treconstructedImage, _ := imgTask.Img.NewImage()\n\t\t\tfor imgChunk := range chunkStream {\n\t\t\t\treconstructedImage.ReAddChunk(imgChunk.Img, imgChunk.YPixelStart, imgChunk.ChunkPart)\n\t\t\t}\n\t\t\timgTask.Img = reconstructedImage\n\t\t\timageResults <- imgTask // Send image to results channel to be saved.\n\n\t\t} else { // Otherwise, if there are no more image tasks, then goroutine worker exits.\n\t\t\tdoneWorker <- true // Indicate that the worker is done.\n\t\t\treturn\n\t\t}\n\t}\n}",
"func transformationFeature(transformer Transformer) Feature {\n\ttransformationType := NewFeature(TransformationTypeFeature, -1)\n\tswitch transformer.(type) {\n\tcase logicalOperatorReplacement:\n\t\ttransformationType.Score = LogicalOperatorTransformation\n\tcase *adjacencyRange:\n\t\ttransformationType.Score = AdjacencyRangeTransformation\n\tcase meshExplosion:\n\t\ttransformationType.Score = MeshExplosionTransformation\n\tcase fieldRestrictions:\n\t\ttransformationType.Score = FieldRestrictionsTransformation\n\tcase adjacencyReplacement:\n\t\ttransformationType.Score = AdjacencyReplacementTransformation\n\tcase clauseRemoval:\n\t\ttransformationType.Score = ClauseRemovalTransformation\n\tcase cui2vecExpansion:\n\t\ttransformationType.Score = Cui2vecExpansionTransformation\n\tcase meshParent:\n\t\ttransformationType.Score = MeshParentTransformation\n\t}\n\treturn transformationType\n}",
"func processFile(file *File) {\n\tapplyTransformers(file)\n\tanalyzeFile(file)\n}",
"func (inNode *InputNode) Start() {\n}",
"func XLAT() { ctx.XLAT() }",
"func main() {\n\tPipeline1()\n\t// Pipeline2()\n\t// RunDirectionalChannel()\n\tfmt.Println(\"YYY\")\n}",
"func noTransformation(xs []string) (transform.Transformation, error) {\n\tif len(xs) != 0 {\n\t\treturn nil, ErrBadTransformation\n\t}\n\treturn transform.NoTransformation(), nil\n}",
"func (c *Collector) Start() {\n\tgo c.Source.Start()\n\tc.collect()\n}",
"func (l *lexer) run() {\n\tfor state := lexStart; state != nil; {\n\t\tstate = state(l)\n\t}\n}",
"func (wc *watchChan) transform(e *event) (res *watch.Event) {\n\tcurObj, oldObj, err := wc.prepareObjs(e)\n\tif err != nil {\n\t\tlogrus.Errorf(\"failed to prepare current and previous objects: %v\", err)\n\t\twc.sendError(err)\n\t\treturn nil\n\t}\n\tswitch {\n\tcase e.isProgressNotify:\n\t\tobj := wc.watcher.newFunc()\n\t\t// todo: update object version\n\t\tres = &watch.Event{\n\t\t\tType: watch.Bookmark,\n\t\t\tObject: obj,\n\t\t}\n\tcase e.isDeleted:\n\t\tres = &watch.Event{\n\t\t\tType: watch.Deleted,\n\t\t\tObject: oldObj,\n\t\t}\n\tcase e.isCreated:\n\t\tres = &watch.Event{\n\t\t\tType: watch.Added,\n\t\t\tObject: curObj,\n\t\t}\n\tdefault:\n\t\t// TODO: emit ADDED if the modified object causes it to actually pass the filter but the previous one did not\n\t\tres = &watch.Event{\n\t\t\tType: watch.Modified,\n\t\t\tObject: curObj,\n\t\t}\n\t}\n\treturn res\n}",
"func Transform(imgFile io.Reader, extension string, numberOfShapes int, mode PrimitiveMode) (io.Reader, error) {\n\n\t// Create temp input file.\n\tinputName := \"tempinput\" + extension\n\ti, err := os.Create(inputName)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"primitive.Transform: cannot create temp input - %v\", err)\n\t}\n\n\t// Remember defer is a stack so close needs to happen before remove.\n\tdefer os.Remove(inputName)\n\tdefer i.Close()\n\n\t// Write imgFile into a tempfile.\n\t_, err = io.Copy(i, imgFile)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"primitive.Transform: cannot populate input file - %v\", err)\n\t}\n\n\toutputName := \"tempoutput\" + extension\n\n\trunOutput, err := primCLI(inputName, outputName, numberOfShapes, mode)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"primitive.Transform: error running primitive command - %v\\nOutput: %s\", err, runOutput)\n\t}\n\n\t// Open output file.\n\to, err := os.Open(outputName)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"primitive.Transform: cannot open output file - %v\", err)\n\t}\n\n\t// Read all of outputName to a buffer, create a reader and send it out.\n\tb, err := ioutil.ReadAll(o)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"primitive.Transform: cannot read output file - %v\", err)\n\t}\n\tdefer os.Remove(outputName)\n\tdefer o.Close()\n\n\t// Otherwise the files will not be deleted.\n\tr := bytes.NewReader(b)\n\n\treturn r, nil\n}",
"func (l *lexer) run() {\n\tfor l.state = lexText; l.state != nil; {\n\t\tl.state = l.state(l)\n\t}\n}",
"func (l *lexer) run() {\n\tfor l.state = lexText; l.state != nil; {\n\t\tl.state = l.state(l)\n\t}\n}",
"func main() {\n\t// Full speed ahead\n\truntime.GOMAXPROCS(runtime.NumCPU())\n\n\tsync := make(chan string)\n\tfor _, filename := range flag.Args() {\n\t\tgo convert(filename, sync)\n\t}\n\tfor i, _ := range flag.Args() {\n\t\ts := <- sync\n\t\tfmt.Printf(\"Processed #%d: %s\\n\", i, s)\n\t}\n}",
"func (p *Pipeline) Start() {\n\tC.gstreamer_start_pipeline(p.Pipeline, C.CString(p.id))\n}",
"func (a *Actor) start(wg *sync.WaitGroup) (err error) {\n\tvar stdin io.WriteCloser\n\tvar stdout io.ReadCloser\n\n\t// notify the wait group when we're done\n\tif wg != nil {\n\t\tdefer wg.Done()\n\t}\n\n\t// create a pipe for STDIN\n\tif stdin, err = a.cmd.StdinPipe(); err != nil {\n\t\ta.errLog(err)\n\t\treturn\n\t}\n\n\t// create a pipe for STDOUT\n\tif stdout, err = a.cmd.StdoutPipe(); err != nil {\n\t\ta.errLog(err)\n\t\treturn\n\t}\n\n\t// redirect STDERR on our own one\n\ta.cmd.Stderr = os.Stderr\n\n\t// close STDIN if we're not writable\n\tif !a.writable {\n\t\tstdin.Close()\n\t}\n\n\t// close STDOUT if we're not readable\n\tif !a.readable {\n\t\tstdout.Close()\n\t}\n\n\t// start the underlying command\n\tif err = a.cmd.Start(); err != nil {\n\t\ta.errLog(err)\n\t\treturn\n\t}\n\n\t// bufferize our STDOUT pipe to be able to use higher level reading\n\t// methods\n\tstdoutReader := bufio.NewReader(stdout)\n\n\tvar buf []byte\n\tvar msg string\n\n\t// main loop\n\tfor {\n\t\t// 1. read on our input channel\n\t\tmsg = <-a.input\n\n\t\t// if it's the special \"stop\" message, break the loop\n\t\tif msg == stop {\n\t\t\tbreak\n\t\t}\n\n\t\t// 2. if we're writable, send the input on the command's STDIN\n\t\tif a.writable {\n\t\t\tif _, err = io.WriteString(stdin, msg); err != nil {\n\t\t\t\ta.errLog(err)\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\t// 3. if we're readable...\n\t\tif a.readable {\n\t\t\t// 3.1 read one line on the command's STDOUT...\n\t\t\tif buf, err = stdoutReader.ReadSlice('\\n'); err != nil {\n\t\t\t\ta.errLog(err)\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\t// 3.2 ...and send it on our output channel\n\t\t\ta.output <- string(buf)\n\t\t}\n\t}\n\n\t// close STDIN before waiting for the command to end\n\tif a.writable {\n\t\tstdin.Close()\n\t}\n\n\t// wait for the command to end\n\ta.cmd.Wait()\n\n\t// close our input/output channels. This means we can't start an actor\n\t// twice because the second time its channels will be closed, but we don't\n\t// do that anyway.\n\tclose(a.input)\n\tclose(a.output)\n\n\treturn\n}",
"func (l *Learner) Start() {\n\tgo func() {\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase lrn := <-l.learnIn:\n\t\t\t\tval, out := l.handleLearn(lrn)\n\t\t\t\tif out {\n\t\t\t\t\tl.valOut <- val\n\t\t\t\t\tl.learned = map[int]Learn{}\n\t\t\t\t}\n\t\t\tcase <-l.stop:\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n}",
"func transformSend(n *ir.SendStmt) {\n\tn.Value = assignconvfn(n.Value, n.Chan.Type().Elem())\n}",
"func fnTransform(ctx Context, doc *JDoc, params []string) interface{} {\n\tstats := ctx.Value(EelTotalStats).(*ServiceStats)\n\tif params == nil || len(params) == 0 || len(params) > 4 {\n\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"wrong_number_of_parameters\", \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"wrong number of parameters in call to transform function\"), \"transform\", params})\n\t\treturn nil\n\t}\n\th := GetCurrentHandlerConfig(ctx)\n\tif h == nil {\n\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"no_handler\", \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, RuntimeError{fmt.Sprintf(\"current handler not found in call to transform function\"), \"transform\", params})\n\t\treturn nil\n\t}\n\tif h.Transformations == nil {\n\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"no_named_transformations\", \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, RuntimeError{fmt.Sprintf(\"no named transformations found in call to transform function\"), \"transform\", params})\n\t\treturn nil\n\t}\n\tt := h.Transformations[extractStringParam(params[0])]\n\tif t == nil {\n\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"unknown_transformation\", \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, RuntimeError{fmt.Sprintf(\"no named transformation %s found in call to transform function\", extractStringParam(params[0])), \"transform\", params})\n\t\treturn nil\n\t}\n\tvar section interface{}\n\tsection = doc.GetOriginalObject()\n\tif len(params) >= 2 {\n\t\terr := json.Unmarshal([]byte(extractStringParam(params[1])), §ion)\n\t\tif err != nil {\n\t\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"invalid_json\", \"params\", params, \"error\", err.Error())\n\t\t\tstats.IncErrors()\n\t\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"non json parameters in call to transform function\"), \"transform\", params})\n\t\t\treturn nil\n\t\t}\n\t}\n\tvar pattern *JDoc\n\tif len(params) >= 3 && extractStringParam(params[2]) != \"\" {\n\t\tvar err error\n\t\tpattern, err = NewJDocFromString(extractStringParam(params[2]))\n\t\tif err != nil {\n\t\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"non_json_parameter\", \"params\", params, \"error\", err.Error())\n\t\t\tstats.IncErrors()\n\t\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"non json parameters in call to transform function\"), \"transform\", params})\n\t\t\treturn nil\n\t\t}\n\t}\n\tvar join *JDoc\n\tif len(params) == 4 && extractStringParam(params[3]) != \"\" {\n\t\tvar err error\n\t\tjoin, err = NewJDocFromString(extractStringParam(params[3]))\n\t\tif err != nil {\n\t\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"non_json_parameter\", \"params\", params, \"error\", err.Error())\n\t\t\tstats.IncErrors()\n\t\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"non json parameters in call to transform function\"), \"transform\", params})\n\t\t\treturn nil\n\t\t}\n\t}\n\tif pattern != nil {\n\t\tc, _ := doc.contains(section, pattern.GetOriginalObject(), 0)\n\t\tif !c {\n\t\t\treturn section\n\t\t}\n\t}\n\tif join != nil {\n\t\tsection = doc.merge(join.GetOriginalObject(), section)\n\t}\n\tlittleDoc, err := NewJDocFromInterface(section)\n\tif err != nil {\n\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"cause\", \"json_parse_error\", \"op\", \"transform\", \"error\", err.Error(), \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, RuntimeError{fmt.Sprintf(\"transformation error in call to transform function\"), \"transform\", params})\n\t\treturn nil\n\t}\n\tvar littleRes *JDoc\n\tif t.IsTransformationByExample {\n\t\tlittleRes = littleDoc.ApplyTransformationByExample(ctx, t.t)\n\t} else {\n\t\tlittleRes = littleDoc.ApplyTransformation(ctx, t.t)\n\t}\n\treturn littleRes.GetOriginalObject()\n}",
"func main() {\n\tsig := make(chan os.Signal, 1)\n\tsignal.Notify(sig, os.Interrupt, os.Kill)\n\n\te := events.NewStream(1000, 10)\n\tSource.Load(e)\n\n\tSource.Start()\n\n\tdefer Source.Stop()\n\n\t<-sig\n}",
"func (p *Pipeline) Start() {\n\tfmt.Printf(\"\\nIn start\")\n\tC.gstreamer_receive_start_pipeline(p.Pipeline)\n}",
"func (coll *FeatureCollection) Transform(t Transformer) {\n\tfor _, feat := range *coll {\n\t\tfeat.Transform(t)\n\t}\n}",
"func (e *ElkTimeseriesForwarder) start() {\n\n\tlog.L.Infof(\"Starting event forwarder for %v\", e.index())\n\tticker := time.NewTicker(e.interval)\n\n\tfor {\n\t\tselect {\n\t\tcase <-ticker.C:\n\t\t\t//send it off\n\t\t\tlog.L.Debugf(\"Sending bulk ELK update for %v\", e.index())\n\n\t\t\tgo forward(e.index(), e.url, e.buffer)\n\t\t\te.buffer = []ElkBulkUpdateItem{}\n\n\t\tcase event := <-e.incomingChannel:\n\t\t\te.bufferevent(event)\n\t\t}\n\t}\n}",
"func PauseTransformFeedback() {\n\tC.glowPauseTransformFeedback(gpPauseTransformFeedback)\n}",
"func PauseTransformFeedback() {\n\tC.glowPauseTransformFeedback(gpPauseTransformFeedback)\n}",
"func (w *Writer) Begin() {\n\tw.batch.Reset()\n}",
"func (n *Node) Start(ctx context.Context) error {\n\tif err := n.checkStart(); err != nil {\n\t\treturn err\n\t}\n\tinChan := n.input.Receive()\n\toutChan := make(chan *bytes.Buffer)\n\tgo n.output.Broadcast(outChan)\n\tdefer safeCloseChan(outChan)\n\tinCtx := newNodeContext(ctx, n.close)\n\targs := ProcessArgs{Input: inChan, Output: outChan}\n\terr := n.w.Process(inCtx, args)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"%s : %w\", n.name, err)\n\t}\n\treturn nil\n}",
"func (t *Tailer) Start(offset int64, whence int) error {\n\terr := t.setup(offset, whence)\n\tif err != nil {\n\t\tt.source.Status.Error(err)\n\t\treturn err\n\t}\n\tt.source.Status.Success()\n\tt.source.AddInput(t.path)\n\n\tgo t.forwardMessages()\n\tt.decoder.Start()\n\tgo t.readForever()\n\n\treturn nil\n}",
"func classifyWorker(results chan<-DistanceRecord, distancer base.Distancer,\n trainingTuples []base.NumericTuple, classifyTuple base.NumericTuple,\n startIndex int, numberOfTuples int) {\n for trainingIndex := startIndex; (trainingIndex - startIndex) < numberOfTuples && trainingIndex < len(trainingTuples); trainingIndex++ {\n results <- DistanceRecord{distancer.Distance(trainingTuples[trainingIndex], classifyTuple), trainingIndex};\n }\n}",
"func ConvertRaw(r io.Reader, w io.Writer, apikey string) {\n\tstart := time.Now()\n\tfmt.Fprintln(os.Stderr, \"starting.\")\n\n\tg := FullyProcessRaw(r, apikey)\n\n\tfmt.Fprintln(os.Stderr, \"writing fully processed graph to output Writer.\")\n\tw.Write([]byte(g.String()))\n\n\tfmt.Fprintf(os.Stderr, \"done. took %s.\\n\", time.Since(start))\n}",
"func (s *BaseSyslParserListener) ExitTransform(ctx *TransformContext) {}",
"func (inst *Instance) Run(input map[string]interface{}) (output map[string]interface{}, err error) {\n\n\t// Get the Scope of the CML pipeline.\n\t// Scope is the collection of the data in the CML\n\tscope, err := NewPipelineScope(input, inst.def.labels)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Log the time\n\tstart := time.Now()\n\n\t//Check the type of the input of the pipeline.\n\tfor key, _ := range inst.def.input {\n\n\t\ttemp, ok := inst.def.input[key].(PipelineInput)\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\n\t\terr = types.ValidateType(temp.Type, input[key])\n\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t}\n\n\t//Run the tasks.\n\tfor key, task := range inst.def.tasks {\n\t\ttask.Position()\n\t\tscope, err = task.Eval(scope, inst.logger)\n\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"Error %s in task \\\"%s-%v\\\" \", err.Error(), task.Name(), key)\n\t\t}\n\n\t}\n\n\t// Set the output.\n\n\tif inst.def.output.Data != nil {\n\t\tmf := GetMapperFactory()\n\t\tmappings := make(map[string]interface{})\n\n\t\t// Type Switch\n\t\tswitch t := inst.def.output.Data.(type) {\n\t\tcase map[string]interface{}:\n\t\t\tfor key, val := range t {\n\t\t\t\tmappings[key] = val\n\t\t\t}\n\t\tdefault:\n\t\t\tmappings[\"data\"] = inst.def.output.Data\n\t\t}\n\n\t\t// Get the data from output expression\n\t\toutMapper, err := mf.NewMapper(mappings)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\toutput, err = outMapper.Apply(scope)\n\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tvar definedType data.Type\n\n\t\t// Check if the output is defined as dataframe or map.\n\t\tif inst.def.output.Type == \"dataframe\" || inst.def.output.Type == \"map\" {\n\t\t\tdefinedType, err = data.ToTypeEnum(\"object\")\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tgivenType, err := data.GetType(output)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tif definedType != givenType {\n\t\t\t\treturn nil, fmt.Errorf(\"Type mismatch in output. Defined type [%s] passed type [%s]\", definedType, givenType)\n\t\t\t}\n\n\t\t\tinst.logger.Infof(\"The output took %v to calculate\", time.Since(start))\n\n\t\t\treturn output, nil\n\t\t}\n\n\t\tdefinedType, _ = data.ToTypeEnum(inst.def.output.Type)\n\n\t\tfor key, _ := range output {\n\n\t\t\tgivenType, err := data.GetType(output[key])\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tif definedType != givenType {\n\t\t\t\treturn nil, fmt.Errorf(\"Type mismatch in output. Defined type [%s] passed type [%s]\", definedType, givenType)\n\t\t\t}\n\t\t}\n\n\t}\n\tinst.logger.Infof(\"The output took %v to calculate\", time.Since(start))\n\n\treturn output, nil\n\n}",
"func main() {\n\tctx := pipeliner.FirstError()\n\tsourceCh := source(ctx, 0, 9000)\n\tbatches := batchInts(ctx, 320, sourceCh)\n\tfor batch := range batches {\n\t\tfmt.Printf(\"received batch of length: %d\\n\", len(batch))\n\t}\n}",
"func (ap *app) conduct(ctx context.Context) (err error) {\n\t// Start the audition. This initializes the epoch, and thus needs to\n\t// happen before the collector and the auditors start.\n\tap.openDoors(ctx)\n\n\t// Initialize all the actors.\n\tif err := ap.runCleanup(ctx); err != nil {\n\t\treturn err\n\t}\n\n\t// Ensure the cleanup actions are run at the end\n\t// even during early return.\n\tdefer func() {\n\t\tctx = logtags.AddTag(ctx, \"atend\", nil)\n\t\tif cleanupErr := ap.runCleanup(ctx); cleanupErr != nil {\n\t\t\t// Error during cleanup. runCleanup already\n\t\t\t// printed out the details via log.Errorf.\n\t\t\terr = combineErrors(err, cleanupErr)\n\t\t}\n\t}()\n\n\t// Ensure the data gets to the log by the end of the play.\n\tdefer func() { log.Flush() }()\n\n\t// Prepare the theater.\n\tth := ap.makeTheater(ctx)\n\n\t// Start the audition.\n\tvar wgau sync.WaitGroup\n\tauDone := th.au.startAudition(ctx, &wgau)\n\n\t// Start the collector.\n\tvar wgcol sync.WaitGroup\n\tcolDone := th.col.startCollector(ctx, &wgcol)\n\tdefer func() {\n\t\tif !errors.Is(err, errAuditViolation) {\n\t\t\t// This happens in the common case when a play is left to\n\t\t\t// terminate without early failure on audit errors: in that case,\n\t\t\t// the collector's context is canceled, the cancel error overtakes\n\t\t\t// the audit failure, and then dismissed (we're not reporting\n\t\t\t// context cancellation as a process failure).\n\t\t\t// In that case, we still want to verify whether there\n\t\t\t// are failures remaining.\n\t\t\terr = combineErrors(err, th.col.checkAuditViolations(ctx))\n\t\t}\n\t}()\n\n\t// Start the spotlights.\n\tvar wgspot sync.WaitGroup\n\tallSpotsDone := th.spm.startSpotlights(ctx, &wgspot)\n\n\t// Start the prompter.\n\tvar wgPrompt sync.WaitGroup\n\tpromptDone := th.pr.startPrompter(ctx, &wgPrompt)\n\n\t// The shutdown sequence without cancellation/stopper is:\n\t// - prompter exits, this closes spotTermCh\n\t// - spotlights detect closed spotTermCh, terminate, then close auChan.\n\t// - auditors detect closed auChan, exit, this closes collectorChan.\n\t// - collectors detects closed collectorChan and exits.\n\t// However it's possible for things to terminate out of order:\n\t// - spotlights can detect a command error.\n\t// - auditors can encounter an audit failure.\n\t// - collector can encounter a file failure.\n\t// So at each of the shutdown stages below, we detect if a stage\n\t// later has completed and cancel the stages before.\n\n\t// TODO: this code can probably factored into a loop, not yet found\n\t// out how.\n\n\tvar finalErr error\n\tvar interrupt bool\n\t// First stage of shutdown: wait for the prompter to finish.\n\tselect {\n\tcase err := <-th.prErrCh:\n\t\tfinalErr = combineErrors(err, finalErr)\n\t\t// ok\n\tcase err := <-th.spotErrCh:\n\t\tfinalErr = combineErrors(err, finalErr)\n\t\tinterrupt = true\n\tcase err := <-th.auErrCh:\n\t\tfinalErr = combineErrors(err, finalErr)\n\t\tinterrupt = true\n\tcase err := <-th.colErrCh:\n\t\tfinalErr = combineErrors(err, finalErr)\n\t\tinterrupt = true\n\t}\n\tif interrupt {\n\t\tlog.Info(ctx, \"something went wrong other than prompter, cancelling everything\")\n\t\tpromptDone()\n\t\tfinalErr = combineErrors(ignCancel(<-th.prErrCh), finalErr)\n\t\tallSpotsDone()\n\t\tfinalErr = combineErrors(ignCancel(<-th.spotErrCh), finalErr)\n\t\tauDone()\n\t\tfinalErr = combineErrors(ignCancel(<-th.auErrCh), finalErr)\n\t\tcolDone()\n\t\tfinalErr = combineErrors(ignCancel(<-th.colErrCh), finalErr)\n\t\tinterrupt = false\n\t}\n\twgPrompt.Wait()\n\tpromptDone() // in case not called before.\n\n\t// Second stage: wait for the spotlights to finish.\n\tselect {\n\tcase err := <-th.spotErrCh:\n\t\tfinalErr = combineErrors(err, finalErr)\n\t\t// ok\n\tcase err := <-th.auErrCh:\n\t\tfinalErr = combineErrors(err, finalErr)\n\t\tinterrupt = true\n\tcase err := <-th.colErrCh:\n\t\tfinalErr = combineErrors(err, finalErr)\n\t\tinterrupt = true\n\t}\n\tif interrupt {\n\t\tlog.Info(ctx, \"something went wrong after prompter terminated: cancelling spotlights, audience and collector\")\n\t\tallSpotsDone()\n\t\tfinalErr = combineErrors(ignCancel(<-th.spotErrCh), finalErr)\n\t\tauDone()\n\t\tfinalErr = combineErrors(ignCancel(<-th.auErrCh), finalErr)\n\t\tcolDone()\n\t\tfinalErr = combineErrors(ignCancel(<-th.colErrCh), finalErr)\n\t\tinterrupt = false\n\t}\n\twgspot.Wait()\n\tallSpotsDone() // in case not called before.\n\n\t// Third stage: wait for the auditors to finish.\n\tselect {\n\tcase err := <-th.auErrCh:\n\t\tfinalErr = combineErrors(err, finalErr)\n\t\t// ok\n\tcase err := <-th.colErrCh:\n\t\tfinalErr = combineErrors(err, finalErr)\n\t\tinterrupt = true\n\t}\n\tif interrupt {\n\t\tlog.Info(ctx, \"something went wrong after spotlights terminated, cancelling audience and collector\")\n\t\tauDone()\n\t\tfinalErr = combineErrors(ignCancel(<-th.auErrCh), finalErr)\n\t\tcolDone()\n\t\tfinalErr = combineErrors(ignCancel(<-th.colErrCh), finalErr)\n\t\tinterrupt = false\n\t}\n\twgau.Wait()\n\tauDone() // in case not called before.\n\n\t// Fourth stage: wait for the collector to finish.\n\tfinalErr = combineErrors(ignCancel(<-th.colErrCh), finalErr)\n\twgcol.Wait()\n\tcolDone() // in case not called before.\n\n\treturn finalErr\n}",
"func (a *ApplyTask) Start(taskChannel chan taskrunner.TaskResult) {\n\tgo func() {\n\t\ta.ApplyOptions.SetObjects(a.Objects)\n\t\terr := a.ApplyOptions.Run()\n\t\ttaskChannel <- taskrunner.TaskResult{\n\t\t\tErr: err,\n\t\t}\n\t}()\n}",
"func sampler(done <-chan struct{}, policies []GamePolicy, position <-chan job, outcome chan<- job) {\n\n\tfor task := range position {\n\t\tnode, decision := task.node, task.decision\n\n\t\tif node == nil {\n\t\t\tcontinue\n\t\t}\n\n\t\tnode.Lock()\n\t\tstate := node.state.Clone()\n\t\tnode.Unlock()\n\n\t\tswitch node.Status() {\n\t\tcase walked:\n\t\t\tnode.SetStatus(simulating)\n\t\t\t//log.Printf(\"sampler: %v node %p\\n\", node.Status(), node)\n\t\tdefault:\n\t\t\t//log.Printf(\"sampler: discarding already %v node %p\\n\", node.Status(), node)\n\t\t\tcontinue\n\t\t}\n\n\t\tsampled := decision.Join(state.Sample(done, policies[0]))\n\n\t\tselect {\n\t\tcase <-done:\n\t\t\treturn\n\t\tcase outcome <- job{node, sampled}:\n\t\t\tnode.SetStatus(simulated)\n\t\t}\n\t}\n}"
] | [
"0.5562858",
"0.5539677",
"0.55123633",
"0.5366656",
"0.5331807",
"0.52465326",
"0.5208377",
"0.5200585",
"0.5200585",
"0.5196328",
"0.519006",
"0.5174271",
"0.51725996",
"0.5159708",
"0.5113168",
"0.508091",
"0.5076553",
"0.5070337",
"0.50625813",
"0.50278485",
"0.5009344",
"0.4977987",
"0.49666947",
"0.49540582",
"0.49450517",
"0.49180079",
"0.4904785",
"0.48984167",
"0.48800418",
"0.48752105",
"0.48621044",
"0.48330545",
"0.48330545",
"0.4822027",
"0.48134968",
"0.47963178",
"0.47948095",
"0.47686386",
"0.47491297",
"0.47481728",
"0.47481728",
"0.47406173",
"0.47385454",
"0.47328922",
"0.47210163",
"0.47189596",
"0.4715502",
"0.47080487",
"0.4700663",
"0.46832323",
"0.4666738",
"0.4663425",
"0.4617637",
"0.46146268",
"0.46114936",
"0.46059483",
"0.46053588",
"0.46043923",
"0.460231",
"0.4593905",
"0.45904845",
"0.4588595",
"0.45791057",
"0.45710087",
"0.45671743",
"0.45459598",
"0.45035416",
"0.44909737",
"0.448569",
"0.44814396",
"0.4476559",
"0.44746113",
"0.44488338",
"0.44457883",
"0.44442856",
"0.44442856",
"0.44400132",
"0.44371286",
"0.4434898",
"0.44344488",
"0.44343358",
"0.44324964",
"0.44287524",
"0.442556",
"0.44132784",
"0.4412767",
"0.44095647",
"0.44095647",
"0.4406668",
"0.4405323",
"0.44018966",
"0.4399482",
"0.43990773",
"0.43961832",
"0.43929225",
"0.4391806",
"0.4386151",
"0.43850344",
"0.4381332"
] | 0.47064185 | 49 |
Associates a generic vertex attribute index with a named attribute variable | func BindAttribLocation(program uint32, index uint32, name *uint8) {
C.glowBindAttribLocation(gpBindAttribLocation, (C.GLuint)(program), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (e *rawData) AddAttribute(key string, val string) {}",
"func GetVertexAttribIuiv(index uint32, pname uint32, params *uint32) {\n C.glowGetVertexAttribIuiv(gpGetVertexAttribIuiv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLuint)(unsafe.Pointer(params)))\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n C.glowVertexAttribBinding(gpVertexAttribBinding, (C.GLuint)(attribindex), (C.GLuint)(bindingindex))\n}",
"func (g *GLTF) addAttributeToVBO(vbo *gls.VBO, attribName string, byteOffset uint32) {\n\n\taType, ok := AttributeName[attribName]\n\tif !ok {\n\t\tlog.Warn(fmt.Sprintf(\"Attribute %v is not supported!\", attribName))\n\t\treturn\n\t}\n\tvbo.AddAttribOffset(aType, byteOffset)\n}",
"func GetVertexAttribIiv(index uint32, pname uint32, params *int32) {\n C.glowGetVertexAttribIiv(gpGetVertexAttribIiv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func GetVertexAttribiv(index uint32, pname uint32, params *int32) {\n C.glowGetVertexAttribiv(gpGetVertexAttribiv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n\tsyscall.Syscall(gpVertexAttribBinding, 2, uintptr(attribindex), uintptr(bindingindex), 0)\n}",
"func BindAttribLocation(program uint32, index uint32, name *int8) {\n C.glowBindAttribLocation(gpBindAttribLocation, (C.GLuint)(program), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func GetVertexAttribfv(index uint32, pname uint32, params *float32) {\n C.glowGetVertexAttribfv(gpGetVertexAttribfv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLfloat)(unsafe.Pointer(params)))\n}",
"func EnableVertexAttribArray(index uint32) {\n C.glowEnableVertexAttribArray(gpEnableVertexAttribArray, (C.GLuint)(index))\n}",
"func VertexAttribFormat(attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n C.glowVertexAttribFormat(gpVertexAttribFormat, (C.GLuint)(attribindex), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLuint)(relativeoffset))\n}",
"func GetVertexAttribIuiv(index uint32, pname uint32, params *uint32) {\n\tC.glowGetVertexAttribIuiv(gpGetVertexAttribIuiv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLuint)(unsafe.Pointer(params)))\n}",
"func GetVertexAttribIuiv(index uint32, pname uint32, params *uint32) {\n\tC.glowGetVertexAttribIuiv(gpGetVertexAttribIuiv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLuint)(unsafe.Pointer(params)))\n}",
"func EnableVertexArrayAttrib(vaobj uint32, index uint32) {\n\tsyscall.Syscall(gpEnableVertexArrayAttrib, 2, uintptr(vaobj), uintptr(index), 0)\n}",
"func VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, pointer unsafe.Pointer) {\n C.glowVertexAttribPointer(gpVertexAttribPointer, (C.GLuint)(index), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLsizei)(stride), pointer)\n}",
"func GetVertexAttribIiv(index uint32, pname uint32, params *int32) {\n\tC.glowGetVertexAttribIiv(gpGetVertexAttribIiv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func GetVertexAttribIiv(index uint32, pname uint32, params *int32) {\n\tC.glowGetVertexAttribIiv(gpGetVertexAttribIiv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n\tC.glowVertexAttribBinding(gpVertexAttribBinding, (C.GLuint)(attribindex), (C.GLuint)(bindingindex))\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n\tC.glowVertexAttribBinding(gpVertexAttribBinding, (C.GLuint)(attribindex), (C.GLuint)(bindingindex))\n}",
"func GetVertexAttribi(src Attrib, pname Enum) int32 {\n\tvar result int32\n\tgl.GetVertexAttribiv(uint32(src.Value), uint32(pname), &result)\n\treturn result\n}",
"func (s *BasePlSqlParserListener) EnterIndex_attributes(ctx *Index_attributesContext) {}",
"func EnableVertexArrayAttrib(vaobj uint32, index uint32) {\n\tC.glowEnableVertexArrayAttrib(gpEnableVertexArrayAttrib, (C.GLuint)(vaobj), (C.GLuint)(index))\n}",
"func EnableVertexArrayAttrib(vaobj uint32, index uint32) {\n\tC.glowEnableVertexArrayAttrib(gpEnableVertexArrayAttrib, (C.GLuint)(vaobj), (C.GLuint)(index))\n}",
"func GetVertexAttribiv(index uint32, pname uint32, params *int32) {\n\tC.glowGetVertexAttribiv(gpGetVertexAttribiv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func GetVertexAttribiv(index uint32, pname uint32, params *int32) {\n\tC.glowGetVertexAttribiv(gpGetVertexAttribiv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func GetVertexAttribdv(index uint32, pname uint32, params *float64) {\n C.glowGetVertexAttribdv(gpGetVertexAttribdv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLdouble)(unsafe.Pointer(params)))\n}",
"func VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, pointer unsafe.Pointer) {\n\tsyscall.Syscall6(gpVertexAttribPointer, 6, uintptr(index), uintptr(size), uintptr(xtype), boolToUintptr(normalized), uintptr(stride), uintptr(pointer))\n}",
"func VertexAttribFormat(attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tsyscall.Syscall6(gpVertexAttribFormat, 5, uintptr(attribindex), uintptr(size), uintptr(xtype), boolToUintptr(normalized), uintptr(relativeoffset), 0)\n}",
"func (g *GLTF) loadAttributes(geom *geometry.Geometry, attributes map[string]int, indices math32.ArrayU32) error {\n\n\t// Indices of buffer views\n\tinterleavedVBOs := make(map[int]*gls.VBO, 0)\n\n\t// Load primitive attributes\n\tfor name, aci := range attributes {\n\t\taccessor := g.Accessors[aci]\n\n\t\t// Validate that accessor is compatible with attribute\n\t\terr := g.validateAccessorAttribute(accessor, name)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// Load data and add it to geometry's VBO\n\t\tif g.isInterleaved(accessor) {\n\t\t\tbvIdx := *accessor.BufferView\n\t\t\t// Check if we already loaded this buffer view\n\t\t\tvbo, ok := interleavedVBOs[bvIdx]\n\t\t\tif ok {\n\t\t\t\t// Already created VBO for this buffer view\n\t\t\t\t// Add attribute with correct byteOffset\n\t\t\t\tg.addAttributeToVBO(vbo, name, uint32(*accessor.ByteOffset))\n\t\t\t} else {\n\t\t\t\t// Load data and create vbo\n\t\t\t\tbuf, err := g.loadBufferView(bvIdx)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\t//\n\t\t\t\t// TODO: BUG HERE\n\t\t\t\t// If buffer view has accessors with different component type then this will have a read alignment problem!\n\t\t\t\t//\n\t\t\t\tdata, err := g.bytesToArrayF32(buf, accessor.ComponentType, accessor.Count*TypeSizes[accessor.Type])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tvbo := gls.NewVBO(data)\n\t\t\t\tg.addAttributeToVBO(vbo, name, 0)\n\t\t\t\t// Save reference to VBO keyed by index of the buffer view\n\t\t\t\tinterleavedVBOs[bvIdx] = vbo\n\t\t\t\t// Add VBO to geometry\n\t\t\t\tgeom.AddVBO(vbo)\n\t\t\t}\n\t\t} else {\n\t\t\tbuf, err := g.loadAccessorBytes(accessor)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdata, err := g.bytesToArrayF32(buf, accessor.ComponentType, accessor.Count*TypeSizes[accessor.Type])\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tvbo := gls.NewVBO(data)\n\t\t\tg.addAttributeToVBO(vbo, name, 0)\n\t\t\t// Add VBO to geometry\n\t\t\tgeom.AddVBO(vbo)\n\t\t}\n\t}\n\n\t// Set indices\n\tif len(indices) > 0 {\n\t\tgeom.SetIndices(indices)\n\t}\n\n\treturn nil\n}",
"func (s *BasePlSqlParserListener) EnterAttribute_name(ctx *Attribute_nameContext) {}",
"func GetVertexAttribPointerv(index uint32, pname uint32, pointer *unsafe.Pointer) {\n C.glowGetVertexAttribPointerv(gpGetVertexAttribPointerv, (C.GLuint)(index), (C.GLenum)(pname), pointer)\n}",
"func GetVertexAttribfv(index uint32, pname uint32, params *float32) {\n\tC.glowGetVertexAttribfv(gpGetVertexAttribfv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLfloat)(unsafe.Pointer(params)))\n}",
"func GetVertexAttribfv(index uint32, pname uint32, params *float32) {\n\tC.glowGetVertexAttribfv(gpGetVertexAttribfv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLfloat)(unsafe.Pointer(params)))\n}",
"func (s *BasevhdlListener) EnterAttribute_name_part(ctx *Attribute_name_partContext) {}",
"func BindAttribLocation(program uint32, index uint32, name *uint8) {\n\tsyscall.Syscall(gpBindAttribLocation, 3, uintptr(program), uintptr(index), uintptr(unsafe.Pointer(name)))\n}",
"func VertexAttribFormat(attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tC.glowVertexAttribFormat(gpVertexAttribFormat, (C.GLuint)(attribindex), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLuint)(relativeoffset))\n}",
"func VertexAttribFormat(attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tC.glowVertexAttribFormat(gpVertexAttribFormat, (C.GLuint)(attribindex), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLuint)(relativeoffset))\n}",
"func GetVertexAttribfv(index uint32, pname uint32, params *float32) {\n\tsyscall.Syscall(gpGetVertexAttribfv, 3, uintptr(index), uintptr(pname), uintptr(unsafe.Pointer(params)))\n}",
"func GetVertexAttribiv(index uint32, pname uint32, params *int32) {\n\tsyscall.Syscall(gpGetVertexAttribiv, 3, uintptr(index), uintptr(pname), uintptr(unsafe.Pointer(params)))\n}",
"func (s *BaseCGListener) EnterAttribute(ctx *AttributeContext) {}",
"func (i *Index) Attr(name string) (starlark.Value, error) {\n\tswitch name {\n\tcase \"name\":\n\t\treturn starlark.String(i.name), nil\n\tcase \"str\":\n\t\treturn &stringMethods{subject: i}, nil\n\t}\n\treturn nil, starlark.NoSuchAttrError(name)\n}",
"func selectAttribute(exprPos int, expr types.DataGetExpression,\n\tmapIndex_relId map[int]uuid.UUID, inSelect *[]string, nestingLevel int) error {\n\n\trelCode := getRelationCode(expr.Index, nestingLevel)\n\n\tatr, exists := cache.AttributeIdMap[expr.AttributeId.Bytes]\n\tif !exists {\n\t\treturn errors.New(\"attribute does not exist\")\n\t}\n\n\tcodeSelect := getExpressionCodeSelect(exprPos)\n\n\tif !expr.OutsideIn {\n\t\t// attribute is from index relation\n\t\tcode, err := getAttributeCode(expr.AttributeId.Bytes, relCode)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// prepare distinct paramenter, not useful for min/max/record\n\t\tvar distinct = \"\"\n\t\tif expr.Distincted {\n\t\t\tdistinct = \"DISTINCT \"\n\t\t}\n\n\t\t// apply aggregator if desired\n\t\tswitch expr.Aggregator.String {\n\t\tcase \"array\":\n\t\t\t*inSelect = append(*inSelect, fmt.Sprintf(\"JSON_AGG(%s%s) AS %s\", distinct, code, codeSelect))\n\t\tcase \"avg\":\n\t\t\t*inSelect = append(*inSelect, fmt.Sprintf(\"AVG(%s%s)::NUMERIC(20,2) AS %s\", distinct, code, codeSelect))\n\t\tcase \"count\":\n\t\t\t*inSelect = append(*inSelect, fmt.Sprintf(\"COUNT(%s%s) AS %s\", distinct, code, codeSelect))\n\t\tcase \"list\":\n\t\t\t*inSelect = append(*inSelect, fmt.Sprintf(\"STRING_AGG(%s%s::TEXT, ', ') AS %s\", distinct, code, codeSelect))\n\t\tcase \"max\":\n\t\t\t*inSelect = append(*inSelect, fmt.Sprintf(\"MAX(%s) AS %s\", code, codeSelect))\n\t\tcase \"min\":\n\t\t\t*inSelect = append(*inSelect, fmt.Sprintf(\"MIN(%s) AS %s\", code, codeSelect))\n\t\tcase \"sum\":\n\t\t\t*inSelect = append(*inSelect, fmt.Sprintf(\"SUM(%s%s) AS %s\", distinct, code, codeSelect))\n\t\tcase \"record\":\n\t\t\t// groups record IDs for attribute relation (via index)\n\t\t\t// allows access to individual record IDs and attribute values while other aggregations are active\n\t\t\t*inSelect = append(*inSelect, fmt.Sprintf(\"FIRST(%s) AS %s\", code, codeSelect))\n\t\tdefault:\n\t\t\t*inSelect = append(*inSelect, fmt.Sprintf(\"%s%s AS %s\", distinct, code, codeSelect))\n\t\t}\n\t\treturn nil\n\t}\n\n\t// attribute comes via relationship from other relation (or self reference from same relation)\n\tshipRel, exists := cache.RelationIdMap[atr.RelationId]\n\tif !exists {\n\t\treturn errors.New(\"relation does not exist\")\n\t}\n\n\tshipMod, exists := cache.ModuleIdMap[shipRel.ModuleId]\n\tif !exists {\n\t\treturn errors.New(\"module does not exist\")\n\t}\n\n\t// get tupel IDs from other relation\n\tif expr.AttributeIdNm.Status != pgtype.Present {\n\n\t\t// from other relation, collect tupel IDs in relationship with given index tupel\n\t\t*inSelect = append(*inSelect, fmt.Sprintf(`(\n\t\t\tSELECT JSON_AGG(id)\n\t\t\tFROM \"%s\".\"%s\"\n\t\t\tWHERE \"%s\".\"%s\" = \"%s\".\"%s\"\n\t\t) AS %s`,\n\t\t\tshipMod.Name, shipRel.Name,\n\t\t\tshipRel.Name, atr.Name, relCode, lookups.PkName,\n\t\t\tcodeSelect))\n\n\t} else {\n\t\tshipAtrNm, exists := cache.AttributeIdMap[expr.AttributeIdNm.Bytes]\n\t\tif !exists {\n\t\t\treturn errors.New(\"attribute does not exist\")\n\t\t}\n\n\t\t// from other relation, collect tupel IDs from n:m relationship attribute\n\t\t*inSelect = append(*inSelect, fmt.Sprintf(`(\n\t\t\tSELECT JSON_AGG(%s)\n\t\t\tFROM \"%s\".\"%s\"\n\t\t\tWHERE \"%s\".\"%s\" = \"%s\".\"%s\"\n\t\t) AS %s`,\n\t\t\tshipAtrNm.Name,\n\t\t\tshipMod.Name, shipRel.Name,\n\t\t\tshipRel.Name, atr.Name, relCode, lookups.PkName,\n\t\t\tcodeSelect))\n\t}\n\treturn nil\n}",
"func BindAttribLocation(program Program, index uint32, name string) {\n\tgl.BindAttribLocation(uint32(program), index, gl.Str(name+\"\\x00\"))\n}",
"func VertexArrayAttribFormat(vaobj uint32, attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tsyscall.Syscall6(gpVertexArrayAttribFormat, 6, uintptr(vaobj), uintptr(attribindex), uintptr(size), uintptr(xtype), boolToUintptr(normalized), uintptr(relativeoffset))\n}",
"func (s *BasevhdlListener) EnterAttribute_declaration(ctx *Attribute_declarationContext) {}",
"func setVertexTypedProperty(theType string, vertex *graphson.Vertex, key string, value interface{}) {\n\tgv := graphson.GenericValue{Type: \"string\", Value: key}\n\tpv := graphson.VertexPropertyValue{\n\t\tID: gv,\n\t\tLabel: key,\n\t\tValue: value,\n\t}\n\tvertexProperty := graphson.VertexProperty{Type: theType, Value: pv}\n\tvertexProperties := []graphson.VertexProperty{vertexProperty}\n\tvertex.Value.Properties[key] = vertexProperties\n}",
"func GetVertexAttribLdv(index uint32, pname uint32, params *float64) {\n C.glowGetVertexAttribLdv(gpGetVertexAttribLdv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLdouble)(unsafe.Pointer(params)))\n}",
"func setVertexIntProperty(vertex *graphson.Vertex, key string, value int) {\n\tsetVertexTypedProperty(\"int\", vertex, key, value)\n}",
"func GetVertexAttribdv(index uint32, pname uint32, params *float64) {\n\tC.glowGetVertexAttribdv(gpGetVertexAttribdv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLdouble)(unsafe.Pointer(params)))\n}",
"func GetVertexAttribdv(index uint32, pname uint32, params *float64) {\n\tC.glowGetVertexAttribdv(gpGetVertexAttribdv, (C.GLuint)(index), (C.GLenum)(pname), (*C.GLdouble)(unsafe.Pointer(params)))\n}",
"func (vao VertexArrayObject) VertexAttribPointer(attrIndex int, attrType Type, normalized bool, byteStride int, byteOffset int) {\n\tglx := vao.glx\n\tbufferType, bufferItemsPerVertex, err := attrType.asAttribute()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"converting attribute type %s to attribute: %w\", attrType, err))\n\t}\n\tglx.constants.VertexAttribPointer(\n\t\tglx.factory.Number(float64(attrIndex)),\n\t\tglx.factory.Number(float64(bufferItemsPerVertex)),\n\t\tglx.typeConverter.ToJs(bufferType),\n\t\tglx.factory.Boolean(normalized),\n\t\tglx.factory.Number(float64(byteStride)),\n\t\tglx.factory.Number(float64(byteOffset)),\n\t)\n}",
"func GetVertexAttribiv(index Uint, pname Enum, params []Int) {\n\tcindex, _ := (C.GLuint)(index), cgoAllocsUnknown\n\tcpname, _ := (C.GLenum)(pname), cgoAllocsUnknown\n\tcparams, _ := (*C.GLint)(unsafe.Pointer((*sliceHeader)(unsafe.Pointer(¶ms)).Data)), cgoAllocsUnknown\n\tC.glGetVertexAttribiv(cindex, cpname, cparams)\n}",
"func VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, pointer unsafe.Pointer) {\n\tC.glowVertexAttribPointer(gpVertexAttribPointer, (C.GLuint)(index), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLsizei)(stride), pointer)\n}",
"func VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, pointer unsafe.Pointer) {\n\tC.glowVertexAttribPointer(gpVertexAttribPointer, (C.GLuint)(index), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLsizei)(stride), pointer)\n}",
"func VertexArrayAttribFormat(vaobj uint32, attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tC.glowVertexArrayAttribFormat(gpVertexArrayAttribFormat, (C.GLuint)(vaobj), (C.GLuint)(attribindex), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLuint)(relativeoffset))\n}",
"func VertexArrayAttribFormat(vaobj uint32, attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tC.glowVertexArrayAttribFormat(gpVertexArrayAttribFormat, (C.GLuint)(vaobj), (C.GLuint)(attribindex), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLuint)(relativeoffset))\n}",
"func (r *Relationship) Attribute(key string, value string) *Relationship {\n\tr.Attr[key] = value\n\treturn r\n}",
"func (s *Basegff3Listener) EnterAttribute(ctx *AttributeContext) {}",
"func (graph *Graph) SetAttr(x, y int, attr byte) {\n\tgraph.Tiles[y][x].Attr = attr\n}",
"func (native *OpenGL) VertexAttribOffset(index uint32, size int32, attribType uint32, normalized bool, stride int32, offset int) {\n\tgl.VertexAttribPointer(index, size, attribType, normalized, stride, gl.PtrOffset(offset))\n}",
"func TPUReplicatedInputIndex(value int64) TPUReplicatedInputAttr {\n\treturn func(m optionalAttr) {\n\t\tm[\"index\"] = value\n\t}\n}",
"func (native *OpenGL) BindAttribLocation(program uint32, index uint32, name string) {\n\tgl.BindAttribLocation(program, index, gl.Str(name+\"\\x00\"))\n}",
"func GetVertexAttribdv(index uint32, pname uint32, params *float64) {\n\tsyscall.Syscall(gpGetVertexAttribdv, 3, uintptr(index), uintptr(pname), uintptr(unsafe.Pointer(params)))\n}",
"func (fs *FS) encryptXattrName(attr string) (cAttr string) {\n\t// xattr names are encrypted like file names, but with a fixed IV.\n\tcAttr = xattrStorePrefix + fs.nameTransform.EncryptName(attr, xattrNameIV)\n\treturn cAttr\n}",
"func assetID(t int, name string) aid { return aid(t) + aid(stringHash(name))<<32 }",
"func VertexAttrib1fv(index uint32, value []float32) {\n\tgl.VertexAttrib1fv(index, &value[0])\n}",
"func (v Vertex) GetIndex() int {\n return v.index\n}",
"func (e *explainer) attr(nodeName, fieldName, attr string) {\n\te.entries = append(e.entries, explainEntry{\n\t\tlevel: e.level - 1,\n\t\tfield: fieldName,\n\t\tfieldVal: attr,\n\t})\n}",
"func EnableVertexAttribArray(index uint32) {\n\tsyscall.Syscall(gpEnableVertexAttribArray, 1, uintptr(index), 0, 0)\n}",
"func Attribute(key string, value interface{}) Markup {\n\treturn markupFunc(func(h *HTML) {\n\t\tif h.attributes == nil {\n\t\t\th.attributes = make(map[string]interface{})\n\t\t}\n\t\th.attributes[key] = value\n\t})\n}",
"func VertexAttribDivisor(index uint32, divisor uint32) {\n C.glowVertexAttribDivisor(gpVertexAttribDivisor, (C.GLuint)(index), (C.GLuint)(divisor))\n}",
"func (a *PositionalAttribute) Key() string {\n\treturn AttrPositionalIndex + strconv.Itoa(a.Index)\n}",
"func EnableVertexAttribArray(index uint32) {\n\tC.glowEnableVertexAttribArray(gpEnableVertexAttribArray, (C.GLuint)(index))\n}",
"func EnableVertexAttribArray(index uint32) {\n\tC.glowEnableVertexAttribArray(gpEnableVertexAttribArray, (C.GLuint)(index))\n}",
"func (self *SinglePad) SetIndexA(member int) {\n self.Object.Set(\"index\", member)\n}",
"func (e *tag) AddAttribute(key string, val string) {\n\te.attributes = append(e.attributes, keyVal{Key: key, Value: val})\n}",
"func EnableVertexAttribArray(index Uint) {\n\tcindex, _ := (C.GLuint)(index), cgoAllocsUnknown\n\tC.glEnableVertexAttribArray(cindex)\n}",
"func PushAttrib(mask uint32) {\n\tsyscall.Syscall(gpPushAttrib, 1, uintptr(mask), 0, 0)\n}",
"func PushAttrib(mask uint32) {\n C.glowPushAttrib(gpPushAttrib, (C.GLbitfield)(mask))\n}",
"func PushAttrib(mask uint32) {\n\tC.glowPushAttrib(gpPushAttrib, (C.GLbitfield)(mask))\n}",
"func (self *Graphics) SetKeyA(member interface{}) {\n self.Object.Set(\"key\", member)\n}",
"func (a Attributes) Set(key interface{}, val interface{}) {\n\ta[key] = val\n}",
"func (s *BaseCGListener) EnterAttributeset(ctx *AttributesetContext) {}",
"func NewNamedAttribute(key string, value interface{}) (*Attribute, error) {\n\t// value = Reduce(value, strings.TrimSpace)\n\tkey = strings.TrimSpace(key)\n\tif key == AttrOpts { // Handle the alias\n\t\tkey = AttrOptions\n\t}\n\treturn &Attribute{\n\t\tKey: key,\n\t\tValue: value,\n\t}, nil\n}",
"func GetVertexAttribPointerv(index uint32, pname uint32, pointer *unsafe.Pointer) {\n\tC.glowGetVertexAttribPointerv(gpGetVertexAttribPointerv, (C.GLuint)(index), (C.GLenum)(pname), pointer)\n}",
"func GetVertexAttribPointerv(index uint32, pname uint32, pointer *unsafe.Pointer) {\n\tC.glowGetVertexAttribPointerv(gpGetVertexAttribPointerv, (C.GLuint)(index), (C.GLenum)(pname), pointer)\n}",
"func addListAttribute(item map[string]*dynamodb.AttributeValue, key string, value []*dynamodb.AttributeValue) {\n\titem[key] = &dynamodb.AttributeValue{L: value}\n}",
"func (debugging *debuggingOpenGL) VertexAttribOffset(index uint32, size int32, attribType uint32, normalized bool, stride int32, offset int) {\n\tdebugging.recordEntry(\"VertexAttribOffset\", index, size, attribType, normalized, stride, offset)\n\tdebugging.gl.VertexAttribOffset(index, size, attribType, normalized, stride, offset)\n\tdebugging.recordExit(\"VertexAttribOffset\")\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *int8) {\n C.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func VertexAttrib3fv(index uint32, value []float32) {\n\tgl.VertexAttrib3fv(index, &value[0])\n}",
"func (v *Vec3i) SetByName(name string, value int32) {\n\tswitch name {\n\tcase \"x\", \"X\":\n\t\tv.X = value\n\tcase \"y\", \"Y\":\n\t\tv.Y = value\n\tcase \"z\", \"Z\":\n\t\tv.Z = value\n\tdefault:\n\t\tpanic(\"Invalid Vec3i component name: \" + name)\n\t}\n}",
"func (f *FieldType) PutAttribute(key, value string) {\n\terr := f.checkIfFrozen()\n\tif err != nil {\n\t\treturn\n\t}\n\n\tf.attributes[key] = value\n}",
"func generateAttrib() int {\n\treturn random(8, 18)\n}",
"func GetVertexAttribPointerv(index uint32, pname uint32, pointer *unsafe.Pointer) {\n\tsyscall.Syscall(gpGetVertexAttribPointerv, 3, uintptr(index), uintptr(pname), uintptr(unsafe.Pointer(pointer)))\n}",
"func GetVertexAttribfv(index Uint, pname Enum, params []Float) {\n\tcindex, _ := (C.GLuint)(index), cgoAllocsUnknown\n\tcpname, _ := (C.GLenum)(pname), cgoAllocsUnknown\n\tcparams, _ := (*C.GLfloat)(unsafe.Pointer((*sliceHeader)(unsafe.Pointer(¶ms)).Data)), cgoAllocsUnknown\n\tC.glGetVertexAttribfv(cindex, cpname, cparams)\n}",
"func (g *Generator) declareIndexAndNameVar(run []Value, typeName string) {\n\tindex, name := g.createIndexAndNameDecl(run, typeName, \"\")\n\tg.Printf(\"const %s\\n\", name)\n\tg.Printf(\"var %s\\n\", index)\n}",
"func (s *Shader) setUniform(name string, value int32) {\n location:=gl.GetUniformLocation(s.idPrograma, gl.Str(name + \"\\x00\"))\n if location != -1 { // Si existe ese nombre de variable\n gl.Uniform1i(location, value)\n }\n}",
"func (s *ImageSpec) AttributeInt(name string, defaultVal ...int) int {\n\tvar defVal int\n\tif len(defaultVal) > 0 {\n\t\tdefVal = defaultVal[0]\n\t}\n\tc_str := C.CString(name)\n\tdefer C.free(unsafe.Pointer(c_str))\n\tret := int(C.ImageSpec_get_int_attribute(s.ptr, c_str, C.int(defVal)))\n\truntime.KeepAlive(s)\n\treturn ret\n}",
"func BindAttribLocation(p Program, a Attrib, name string) {\n\tgl.BindAttribLocation(p.Value, uint32(a.Value), gl.Str(name+\"\\x00\"))\n}"
] | [
"0.6013094",
"0.5846795",
"0.5826425",
"0.5823346",
"0.5788625",
"0.57018775",
"0.56844145",
"0.5656396",
"0.5579057",
"0.5578576",
"0.5543737",
"0.5539951",
"0.5539951",
"0.55333734",
"0.5447235",
"0.5420258",
"0.5420258",
"0.5413475",
"0.5413475",
"0.53973305",
"0.53862524",
"0.5382523",
"0.5382523",
"0.53781325",
"0.53781325",
"0.53705263",
"0.53416336",
"0.5314448",
"0.5298322",
"0.52732277",
"0.5257088",
"0.52542794",
"0.52542794",
"0.52533984",
"0.52439016",
"0.52303493",
"0.52303493",
"0.5204933",
"0.5203359",
"0.51764077",
"0.51612145",
"0.51601905",
"0.5117153",
"0.51033646",
"0.5076756",
"0.50675756",
"0.5066308",
"0.5047494",
"0.503816",
"0.503816",
"0.502874",
"0.5025436",
"0.5018698",
"0.5018698",
"0.50138074",
"0.50138074",
"0.5009046",
"0.50090146",
"0.4998356",
"0.4991372",
"0.49627015",
"0.49428922",
"0.49387372",
"0.49307638",
"0.49204606",
"0.4918631",
"0.49044088",
"0.49017817",
"0.489816",
"0.48970667",
"0.4883548",
"0.48807305",
"0.48749682",
"0.48749682",
"0.48684916",
"0.48591208",
"0.48513356",
"0.4850058",
"0.48370117",
"0.48351514",
"0.4823009",
"0.48192167",
"0.481765",
"0.48159522",
"0.48156297",
"0.48156297",
"0.48041722",
"0.4800098",
"0.47958377",
"0.4794166",
"0.47923547",
"0.47913572",
"0.47771433",
"0.4776355",
"0.4773533",
"0.47714296",
"0.47673956",
"0.4764772",
"0.47638828"
] | 0.5192346 | 40 |
bind a named buffer object | func BindBuffer(target uint32, buffer uint32) {
C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (buffer Buffer) Bind(target gl.Enum) {\n\tgl.BindBuffer(gl.Enum(target), gl.Uint(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBuffer, 2, uintptr(target), uintptr(buffer), 0)\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBufferBase, 3, uintptr(target), uintptr(index), uintptr(buffer))\n}",
"func BindBuffer(target Enum, b Buffer) {\n\tgl.BindBuffer(uint32(target), b.Value)\n}",
"func (self *RawHandlerDefault) Bind(name string, ctx *gozmq.Context) error {\n\t//this only needs to be done once for a particular name, even if you call\n\t//Shutdown() and Bind() again.\n\tif self.Identity == \"\" {\n\t\taddress, err := GetHandlerSpec(name)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tself.PullSpec = address.PullSpec\n\t\tself.PubSpec = address.PubSpec\n\t\tself.Identity = address.Identity\n\t}\n\n\tif self.InSocket == nil {\n\t\terr := self.InitZMQ(ctx)\n\t\tif err != nil {\n\t\t\treturn errors.New(\"0mq init:\" + err.Error())\n\t\t}\n\t}\n\treturn nil\n}",
"func BindBuffer(target uint32, buffer uint32) {\n C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func NamedBufferData(buffer uint32, size int, data unsafe.Pointer, usage uint32) {\n\tsyscall.Syscall6(gpNamedBufferData, 4, uintptr(buffer), uintptr(size), uintptr(data), uintptr(usage), 0, 0)\n}",
"func (buffer Buffer) BindBufferBase(target gl.Enum, index uint) {\n\tgl.BindBufferBase(gl.Enum(target), gl.Uint(index), gl.Uint(buffer))\n}",
"func BindBuffer(target Enum, buffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcbuffer, _ := (C.GLuint)(buffer), cgoAllocsUnknown\n\tC.glBindBuffer(ctarget, cbuffer)\n}",
"func (gl *WebGL) BindBuffer(target GLEnum, buffer WebGLBuffer) {\n\tgl.context.Call(\"bindBuffer\", target, buffer)\n}",
"func (native *OpenGL) BindBuffer(target uint32, buffer uint32) {\n\tgl.BindBuffer(target, buffer)\n}",
"func NamedBufferStorage(buffer uint32, size int, data unsafe.Pointer, flags uint32) {\n\tsyscall.Syscall6(gpNamedBufferStorage, 4, uintptr(buffer), uintptr(size), uintptr(data), uintptr(flags), 0, 0)\n}",
"func (b *Binding) Set(buf uint32) {\n\tgl.BindBufferBase(gl.SHADER_STORAGE_BUFFER, b.uint32, buf)\n}",
"func (db *DB) BindNamed(query string, arg interface{}) (string, []interface{}, error) {\n return bindNamedMapper(BindType(db.driverName), query, arg, db.Mapper)\n}",
"func NamedFramebufferDrawBuffer(framebuffer uint32, buf uint32) {\n\tsyscall.Syscall(gpNamedFramebufferDrawBuffer, 2, uintptr(framebuffer), uintptr(buf), 0)\n}",
"func (s *Service) Bind(ctx context.Context, address string) error {\n\ts.mutex.Lock()\n\tif s.running {\n\t\ts.mutex.Unlock()\n\t\treturn fmt.Errorf(\"Init(): already running\")\n\t}\n\ts.mutex.Unlock()\n\n\ts.parseAddress(address)\n\n\terr := s.setListener(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func (debugging *debuggingOpenGL) BindBuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindBuffer\", target, buffer)\n\tdebugging.gl.BindBuffer(target, buffer)\n\tdebugging.recordExit(\"BindBuffer\")\n}",
"func (c *Client) Bind(proto uint16, ch ReadHandler) {\n\tc.events[proto] = append(c.events[proto], ch)\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tsyscall.Syscall6(gpBindBuffersBase, 4, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), 0, 0)\n}",
"func (s *DefaultSubscriber) QueueBind(name, key string) error {\n\terr := s.channel.QueueBind(name, key, s.exchange, false, nil)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Failed to bind queue %s with key %s on exchange '%s' (%s)\", name, key, s.exchange, err)\n\t}\n\treturn nil\n}",
"func (db *DB) BindNamed(query string, arg interface{}) (string, interface{}, error) {\n\treturn sqlx.BindNamed(sqlx.BindType(db.driver), query, arg)\n}",
"func NamedFramebufferReadBuffer(framebuffer uint32, src uint32) {\n\tsyscall.Syscall(gpNamedFramebufferReadBuffer, 2, uintptr(framebuffer), uintptr(src), 0)\n}",
"func NamedBufferData(buffer uint32, size int, data unsafe.Pointer, usage uint32) {\n\tC.glowNamedBufferData(gpNamedBufferData, (C.GLuint)(buffer), (C.GLsizeiptr)(size), data, (C.GLenum)(usage))\n}",
"func NamedBufferData(buffer uint32, size int, data unsafe.Pointer, usage uint32) {\n\tC.glowNamedBufferData(gpNamedBufferData, (C.GLuint)(buffer), (C.GLsizeiptr)(size), data, (C.GLenum)(usage))\n}",
"func (b *Buffer) Attach(buffer []byte) {\n b.AttachBytes(buffer, 0, len(buffer))\n}",
"func (e *connectionedEndpoint) Bind(addr Address) *syserr.Error {\n\te.Lock()\n\tdefer e.Unlock()\n\tif e.isBound() || e.ListeningLocked() {\n\t\treturn syserr.ErrAlreadyBound\n\t}\n\tif addr.Addr == \"\" {\n\t\t// The empty string is not permitted.\n\t\treturn syserr.ErrBadLocalAddress\n\t}\n\n\t// Save the bound address.\n\te.path = addr.Addr\n\treturn nil\n}",
"func (eb *Bus) Bind(fn Bindable, name string, callerID int) {\n\n\tbOpt := BindingOption{}\n\tbOpt.Event = Event{\n\t\tName: name,\n\t\tCallerID: callerID,\n\t}\n\n\tdlog.Verb(\"Binding \", callerID, \" with name \", name)\n\n\teb.BindPriority(fn, bOpt)\n}",
"func MapNamedBuffer(buffer uint32, access uint32) unsafe.Pointer {\n\tret, _, _ := syscall.Syscall(gpMapNamedBuffer, 2, uintptr(buffer), uintptr(access), 0)\n\treturn (unsafe.Pointer)(ret)\n}",
"func (c *Connection) queueBind(queue string, routingKey string, exchange string, opts *QueueBindOpts) error {\n\terr := c.Channel.QueueBind(\n\t\tqueue,\n\t\troutingKey,\n\t\texchange,\n\t\topts.NoWait,\n\t\topts.Args,\n\t)\n\n\treturn err\n}",
"func (c *Client) QueueBind(\n\texchange, queue, key string,\n\topts *QueueBindOpts,\n\tconnOpts *ConnectOpts) error {\n\n\tdefaultOpts := DefaultQueueBindOpts()\n\n\tif opts != nil {\n\t\tdefaultOpts = opts\n\t}\n\n\tdefaultConnOpts := DefaultConnectOpts()\n\tif connOpts != nil {\n\t\tdefaultConnOpts = connOpts\n\t}\n\n\tconn, err := c.connect(defaultConnOpts)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer conn.Close()\n\n\tch, err := conn.Channel()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer ch.Close()\n\n\terr = ch.QueueBind(\n\t\tqueue,\n\t\tkey,\n\t\texchange,\n\t\tdefaultOpts.NoWait,\n\t\tdefaultOpts.Args,\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func bind(s socket, addr *unix.SockaddrALG) (*conn, error) {\n\tif err := s.Bind(addr); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &conn{\n\t\ts: s,\n\t\taddr: addr,\n\t}, nil\n}",
"func Bind(msg string, c Compo) *Binding {\n\treturn bind(msg, c)\n}",
"func NamedFramebufferDrawBuffer(framebuffer uint32, buf uint32) {\n\tC.glowNamedFramebufferDrawBuffer(gpNamedFramebufferDrawBuffer, (C.GLuint)(framebuffer), (C.GLenum)(buf))\n}",
"func NamedFramebufferDrawBuffer(framebuffer uint32, buf uint32) {\n\tC.glowNamedFramebufferDrawBuffer(gpNamedFramebufferDrawBuffer, (C.GLuint)(framebuffer), (C.GLenum)(buf))\n}",
"func (*broker) Bind(context context.Context, instanceID, bindingID string, details brokerapi.BindDetails) (brokerapi.Binding, error) {\n\treturn brokerapi.Binding{\n\t\tCredentials: \"random-credentials\",\n\t\tSyslogDrainURL: \"random-syslog-url\",\n\t\tRouteServiceURL: \"random-route-service-url\",\n\t\tVolumeMounts: []brokerapi.VolumeMount{},\n\t}, nil\n}",
"func (ex *Exchange) Bind(cq, routingKey string) {\n\n}",
"func NewAttachedBuffer(buffer *Buffer) *Buffer {\n result := NewEmptyBuffer()\n result.AttachBuffer(buffer)\n return result\n}",
"func NewAttached(buffer []byte) *Buffer {\n result := NewEmptyBuffer()\n result.Attach(buffer)\n return result\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpBindBufferRange, 5, uintptr(target), uintptr(index), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func (s *Server) handleBind(req *SocksRequest) error {\n\t// TODO: Support bind\n\tconn := req.Conn\n\tif err := sendReply(conn, commandNotSupported, nil); err != nil {\n\t\treturn fmt.Errorf(\"Failed to send reply: %v\", err)\n\t}\n\treturn nil\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n C.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n C.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n NewEnumsModel(buffer),\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyEnumsFunc(func(model *EnumsModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func NamedBufferStorage(buffer uint32, size int, data unsafe.Pointer, flags uint32) {\n\tC.glowNamedBufferStorage(gpNamedBufferStorage, (C.GLuint)(buffer), (C.GLsizeiptr)(size), data, (C.GLbitfield)(flags))\n}",
"func NamedBufferStorage(buffer uint32, size int, data unsafe.Pointer, flags uint32) {\n\tC.glowNamedBufferStorage(gpNamedBufferStorage, (C.GLuint)(buffer), (C.GLsizeiptr)(size), data, (C.GLbitfield)(flags))\n}",
"func (c *Context) Bind(name string, val interface{}) error {\n\tv, err := NewValue(val)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tn := C.CString(name)\n\tdefer C.free(unsafe.Pointer(n))\n\n\tif _, err = C.context_bind(c.context, n, v.Ptr()); err != nil {\n\t\tv.Destroy()\n\t\treturn fmt.Errorf(\"Binding value '%v' to context failed\", val)\n\t}\n\n\tc.values[name] = v\n\n\treturn nil\n}",
"func (server Server) Bind(name string, cp clientproxy.ClientProxy) {\n\tparams := make([]interface{}, 2)\n\tparams[0] = name\n\tparams[1] = cp\n\trequest := utils.Request{Op: \"Bind\", Params: params}\n\tinvocation := utils.Invocation{Host: server.IP, Port: server.Port, Request: request}\n\treqtor := requestor.Requestor{}\n\t// getting the result\n\treply := reqtor.Invoke(invocation).([]interface{})\n\tif reply[0] != nil {\n\t\terr := reply[0].(error)\n\t\tutils.PrintError(err, \"unable to bind on naming proxy\")\n\t}\n}",
"func NamedFramebufferReadBuffer(framebuffer uint32, src uint32) {\n\tC.glowNamedFramebufferReadBuffer(gpNamedFramebufferReadBuffer, (C.GLuint)(framebuffer), (C.GLenum)(src))\n}",
"func NamedFramebufferReadBuffer(framebuffer uint32, src uint32) {\n\tC.glowNamedFramebufferReadBuffer(gpNamedFramebufferReadBuffer, (C.GLuint)(framebuffer), (C.GLenum)(src))\n}",
"func NewNamedBufferedChannel(ctx Context, name string, size int) Channel {\n\treturn &channelImpl{name: name, size: size}\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n NewOrderModel(buffer),\n NewBalanceModel(buffer),\n NewAccountModel(buffer),\n nil,\n nil,\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyOrderFunc(func(model *OrderModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyBalanceFunc(func(model *BalanceModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyAccountFunc(func(model *AccountModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func Bind(uri interface{}, ichan interface{}, echan chan error) error {\n\treturn DefaultBinder.Bind(uri, ichan, echan)\n}",
"func (*TrackLocalRTP_RTPSenderPassthrough) Bind(ctx context.Context, track TrackLocal, sender RTPSender) error {\n\trtpTrack, ok := track.(TrackLocalRTP)\n\tif !ok {\n\t\treturn ErrIncompatible\n\t}\n\tptSender, ok := sender.(RTPSenderPassthrough)\n\tif !ok {\n\t\treturn ErrIncompatible\n\t}\n\teg, ctx2 := errgroup.WithContext(ctx)\n\t// RTP packets written via TrackLocalRTP.WriteRTP() will be\n\t// read by TrackLocalRTP.pipeReader.ReadRTP().\n\teg.Go(func() error {\n\t\treturn rtpengine.Copy(ctx2, ptSender, rtpTrack.pipeReader())\n\t})\n\teg.Go(func() error {\n\t\treturn rtpengine.CopyFeedback(ctx2, rtpTrack.pipeReader(), ptSender)\n\t})\n\treturn eg.Wait()\n}",
"func (w *Wrapper) Bind(destination interface{}) *Wrapper {\n\tw.destination = destination\n\treturn w\n}",
"func (buffer Buffer) BindBufferRange(target gl.Enum, index uint, offset int, size uint) {\n\tgl.BindBufferRange(gl.Enum(target), gl.Uint(index), gl.Uint(buffer), gl.Intptr(offset), gl.Sizeiptr(size))\n}",
"func Bind(obj interface{}) error {\n\treturn bind(obj, false)\n\n}",
"func (s *DbRecorder) Bind(tableName string, ar Record) Recorder {\n\n\t// \"To be is to be the value of a bound variable.\" - W. O. Quine\n\n\t// Get the table name\n\ts.table = tableName\n\n\t// Get the fields\n\ts.scanFields(ar)\n\n\ts.record = ar\n\n\treturn Recorder(s)\n}",
"func NewBind(key string, value interface{}) *Bind {\n\tbind := new(Bind)\n\tbind.Key = key\n\tbind.Data = value.([]byte)\n\treturn bind\n}",
"func QueueBind(ch *amqp.Channel, qName, rKey, exchange string, noWait bool) error {\n\terr := ch.QueueBind(\n\t\tqName, // queue name\n\t\trKey, // routing key\n\t\texchange, // exchange\n\t\tnoWait,\n\t\tnil,\n\t)\n\treturn err\n}",
"func (du *DescriptorSet) AddBuffer(dstBinding int, dtype vk.DescriptorType, b *Buffer, offset int) {\n\tvar descriptorBufferInfo = vk.DescriptorBufferInfo{}\n\tdescriptorBufferInfo.Buffer = b.VKBuffer\n\tdescriptorBufferInfo.Offset = vk.DeviceSize(offset)\n\tdescriptorBufferInfo.Range = vk.DeviceSize(b.Size)\n\n\tvar writeDescriptorSet = vk.WriteDescriptorSet{}\n\twriteDescriptorSet.SType = vk.StructureTypeWriteDescriptorSet\n\twriteDescriptorSet.DstBinding = uint32(dstBinding) // write to the first, and only binding.\n\twriteDescriptorSet.DescriptorCount = 1 // update a single descriptor.\n\twriteDescriptorSet.DescriptorType = dtype\n\twriteDescriptorSet.PBufferInfo = []vk.DescriptorBufferInfo{descriptorBufferInfo}\n\n\tif du.VKWriteDiscriptorSet == nil {\n\t\tdu.VKWriteDiscriptorSet = make([]vk.WriteDescriptorSet, 0)\n\t}\n\tdu.VKWriteDiscriptorSet = append(du.VKWriteDiscriptorSet, writeDescriptorSet)\n}",
"func newBuffer(e []byte) *Buffer {\n\tp := buffer_pool.Get().(*Buffer)\n\tp.buf = e\n\treturn p\n}",
"func (q *Queue) Bind(UserID int) error {\n\terr := q.channel.QueueBind(\n\t\tq.queue.Name,\n\t\t\"\",\n\t\tq.exchangeName,\n\t\ttrue,\n\t\tq.getBindTable(UserID),\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (naming *NamingService) Bind(name string, proxy clientproxy.ClientProxy) error {\n\t_, present := naming.Repository[name]\n\tif present {\n\t\treturn errors.New(\"Unable to bind \" + name + \". Name already exists.\")\n\t}\n\tnaming.Repository[name] = proxy\n\treturn nil\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tsyscall.Syscall6(gpBindVertexBuffer, 4, uintptr(bindingindex), uintptr(buffer), uintptr(offset), uintptr(stride), 0, 0)\n}",
"func (bs *Bindings) Bind(ctx *Ctx, x interface{}) interface{} {\r\n\treturn bs.replaceBindings(ctx, x)\r\n}",
"func (n *NatsSubscriber) Bind(mq *MessageQueue) {\n\tn.mq = mq\n\tn.nc.Opts.DisconnectedCB = func(_ *nats.Conn) {\n\t\tlog.Printf(\"Got disconnected! Queued %d messagse\\n\", n.mq.Len())\n\t}\n}",
"func (p *Protocol) Bind(node *noise.Node) error {\n\tp.node = node\n\tp.table = NewTable(p.node.ID())\n\n\tif p.logger == nil {\n\t\tp.logger = p.node.Logger()\n\t}\n\n\tnode.RegisterMessage(Ping{}, UnmarshalPing)\n\tnode.RegisterMessage(Pong{}, UnmarshalPong)\n\tnode.RegisterMessage(FindNodeRequest{}, UnmarshalFindNodeRequest)\n\tnode.RegisterMessage(FindNodeResponse{}, UnmarshalFindNodeResponse)\n\n\tnode.Handle(p.Handle)\n\n\treturn nil\n}",
"func MapNamedBuffer(buffer uint32, access uint32) unsafe.Pointer {\n\tret := C.glowMapNamedBuffer(gpMapNamedBuffer, (C.GLuint)(buffer), (C.GLenum)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func MapNamedBuffer(buffer uint32, access uint32) unsafe.Pointer {\n\tret := C.glowMapNamedBuffer(gpMapNamedBuffer, (C.GLuint)(buffer), (C.GLenum)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func (q *Queue) Bind(afFamily int) error {\n\tif q.cH == nil {\n\t\treturn ErrNotInitialized\n\t}\n\n\t/* Errors in nfq_bind_pf are non-fatal ...\n\t * This function just tells the kernel that nfnetlink_queue is\n\t * the chosen module to queue packets to userspace.\n\t */\n\t_ = C.nfq_bind_pf(q.cH, C.u_int16_t(afFamily))\n\n\treturn nil\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func (e *endpoint) Bind(addr tcpip.FullAddress, commit func() *tcpip.Error) *tcpip.Error {\n\te.mu.Lock()\n\tdefer e.mu.Unlock()\n\n\terr := e.bindLocked(addr, commit)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\te.bindNICID = addr.NIC\n\te.bindAddr = addr.Addr\n\n\treturn nil\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tsyscall.Syscall6(gpBindBuffersRange, 6, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(sizes)))\n}",
"func (s *Serializer) Bind(val interface{}) *BoundVariable {\n\tb, ok := val.(*BoundVariable)\n\tif !ok {\n\t\tb = Bind(val)\n\t}\n\n\tif _, ok := s.vpos[b]; !ok {\n\t\ts.vals = append(s.vals, b)\n\t\ts.vpos[b] = len(s.vals)\n\t}\n\n\treturn b\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n C.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func (s *stmt) bindBlob(pstmt unsafe.Pointer, idx1 int, value []byte) (err error) {\n\tp, err := s.malloc(len(value))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ts.allocs = append(s.allocs, p)\n\tcrt.CopyBytes(p, value, false)\n\tif rc := bin.Xsqlite3_bind_blob(s.tls, pstmt, int32(idx1), p, int32(len(value)), nil); rc != bin.XSQLITE_OK {\n\t\treturn s.errstr(rc)\n\t}\n\n\treturn nil\n}",
"func (r *RouteMatch) Bind(name string, param BindParam) {\n\tif r.router.bindParamMap == nil {\n\t\tr.router.bindParamMap = make(map[string]BindParam)\n\t}\n\tr.router.bindParamMap[name] = param\n}",
"func MBind(addr unsafe.Pointer, length, mode, flags int, nodemask Bitmask) error {\n\treturn syscall.ENOSYS\n}",
"func (k *KIT) Bind(id string) bool {\n\tvar kid *C.char\n\tif id != \"\" {\n\t\tkid = C.CString(id)\n\t} else {\n\t\tkid = C.CString(KIT_DEFAULT_ID)\n\t}\n\tret := C.kit_bind(kid, &k._instance)\n\tC.free(unsafe.Pointer(kid))\n\treturn ret != 0\n}",
"func (f *Pub) Bind(rx Publisher, cl bool) {\n\tf.branches.Add(rx)\n\trx.UseRoot(f)\n\n\tif cl {\n\t\tf.enders.Add(rx)\n\t}\n}",
"func (b *VBO) Bind(m *Mesh) {\n\tif !b.genBound {\n\t\tpanic(\"A VBO buffer ID has not been generated. Call GenBuffer first.\")\n\t}\n\n\tgl.BindBuffer(gl.ARRAY_BUFFER, b.vboID)\n\tfloatSize := int(unsafe.Sizeof(float32(0)))\n\tgl.BufferData(gl.ARRAY_BUFFER, len(m.Vertices)*floatSize, gl.Ptr(m.Vertices), gl.STATIC_DRAW)\n}",
"func Bind(r *http.Request, v Binder) error {\n\tif err := Decode(r, v); err != nil {\n\t\treturn err\n\t}\n\treturn binder(r, v)\n}",
"func Bind(keystring string, cb KeybinderHandler, data unsafe.Pointer) bool {\n\n\tkeybinderHandlerRegistry.Lock()\n\tid := keybinderHandlerRegistry.next\n\tkeybinderHandlerRegistry.next++\n\tkeybinderHandlerRegistry.m[id] =\n\t\tkeybinderHandlerData{fn: cb, keystring: keystring, data: data}\n\tkeybinderHandlerRegistry.Unlock()\n\n\tcstr := C.CString(keystring)\n\tdefer C.free(unsafe.Pointer(cstr))\n\tc := C._keybinder_bind(cstr, unsafe.Pointer(uintptr(id)))\n\n\treturn gobool(c)\n}",
"func (client *ClientWrapper) Bind(key string, value interface{}) (int, os.Error) {\n\tbind := registry.NewBind(key, value)\n\tvar handle int\n\terr := client.Client.Call(\"Registry.Bind\", bind, &handle)\n\treturn handle, err\n}",
"func bindTrebuchet(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {\n\tparsed, err := abi.JSON(strings.NewReader(TrebuchetABI))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil\n}",
"func bind(endpoint string) (*socket, error) {\n\tzmqSocket, err := zmq.NewSocket(zmq.ROUTER)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\ts := socket{\n\t\tzmqSocket: zmqSocket,\n\t\tChannels: make([]*channel, 0),\n\t}\n\n\tif err := s.zmqSocket.Bind(endpoint); err != nil {\n\t\treturn nil, err\n\t}\n\n\tlog.Printf(\"ZeroRPC socket bound to %s\", endpoint)\n\n\tgo s.listen()\n\n\treturn &s, nil\n}",
"func BindStruct(key string, dst any) error { return dc.BindStruct(key, dst) }",
"func NewBindTransmitter(sequence uint32, body *BindBody) *BindTransmitter {\n\treturn &BindTransmitter{\n\t\tHeader: &Header{\n\t\t\tCommandLength: 0,\n\t\t\tCommandId: protocol.BindTransmitter,\n\t\t\tCommandStatus: protocol.EsmeRok,\n\t\t\tSequenceNumber: sequence,\n\t\t},\n\t\tBody: body,\n\t}\n}",
"func (client *serviceManagerClient) Bind(binding *types.ServiceBinding, q *Parameters) (*types.ServiceBinding, string, error) {\n\tvar newBinding *types.ServiceBinding\n\tlocation, err := client.register(binding, web.ServiceBindingsURL, q, &newBinding)\n\tif err != nil {\n\t\treturn nil, \"\", err\n\t}\n\treturn newBinding, location, nil\n}",
"func NewWlBufferWithID(c *wire.Conn, id wire.ID) *WlBuffer {\n\to := &WlBuffer{Base: Base{c, id}}\n\tc.RegisterObject(o)\n\treturn o\n}",
"func NamedFramebufferDrawBuffers(framebuffer uint32, n int32, bufs *uint32) {\n\tsyscall.Syscall(gpNamedFramebufferDrawBuffers, 3, uintptr(framebuffer), uintptr(n), uintptr(unsafe.Pointer(bufs)))\n}",
"func (a *AbstractSocketNamespace) Bind(name string, ep transport.BoundEndpoint, rc refs.RefCounter) error {\n\ta.mu.Lock()\n\tdefer a.mu.Unlock()\n\n\tif ep, ok := a.endpoints[name]; ok {\n\t\tif rc := ep.wr.Get(); rc != nil {\n\t\t\trc.DecRef()\n\t\t\treturn syscall.EADDRINUSE\n\t\t}\n\t}\n\n\tae := abstractEndpoint{ep: ep, name: name, ns: a}\n\tae.wr = refs.NewWeakRef(rc, &ae)\n\ta.endpoints[name] = ae\n\treturn nil\n}",
"func bindListener(g *G.Gilmour) {\n\tg.ReplyTo(\"test.handler.one\", fetchReply(g), nil)\n}",
"func bindTokenStorage(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {\n\tparsed, err := abi.JSON(strings.NewReader(TokenStorageABI))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil\n}",
"func (r *Route) Bind(b Binder) {\n\tr.Binder = b\n}"
] | [
"0.65895575",
"0.6543274",
"0.64173114",
"0.63751084",
"0.63027453",
"0.61116695",
"0.603623",
"0.60335517",
"0.60216767",
"0.60066944",
"0.60041535",
"0.59543073",
"0.5899972",
"0.58810574",
"0.58643997",
"0.585782",
"0.5836036",
"0.5794475",
"0.5794475",
"0.578019",
"0.57713205",
"0.5752086",
"0.57084715",
"0.5708054",
"0.5707671",
"0.5681674",
"0.5681674",
"0.567947",
"0.5677805",
"0.56701374",
"0.565385",
"0.56267107",
"0.5618465",
"0.5602354",
"0.5602129",
"0.5579831",
"0.5579831",
"0.55447495",
"0.55335975",
"0.551991",
"0.5512259",
"0.55111516",
"0.55098504",
"0.5506439",
"0.54989076",
"0.5496529",
"0.54742104",
"0.54742104",
"0.5445208",
"0.5440509",
"0.5439796",
"0.5439796",
"0.54389375",
"0.5432465",
"0.5418635",
"0.540854",
"0.54026175",
"0.5402272",
"0.5399705",
"0.5388625",
"0.5375446",
"0.536614",
"0.5362179",
"0.53466123",
"0.5321361",
"0.53181505",
"0.5315853",
"0.5311356",
"0.53015375",
"0.5298647",
"0.5285225",
"0.5285225",
"0.5255866",
"0.525515",
"0.525515",
"0.52504885",
"0.524665",
"0.5246445",
"0.5235136",
"0.52300394",
"0.5219364",
"0.52074367",
"0.5199801",
"0.5194179",
"0.5189672",
"0.51878095",
"0.5186121",
"0.5182636",
"0.5181693",
"0.5174517",
"0.51720893",
"0.5171347",
"0.5167738",
"0.5166444",
"0.5163083",
"0.5161578",
"0.51611257",
"0.5156312",
"0.5154576"
] | 0.5710418 | 23 |
bind a buffer object to an indexed buffer target | func BindBufferBase(target uint32, index uint32, buffer uint32) {
C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBufferBase, 3, uintptr(target), uintptr(index), uintptr(buffer))\n}",
"func (buffer Buffer) BindBufferBase(target gl.Enum, index uint) {\n\tgl.BindBufferBase(gl.Enum(target), gl.Uint(index), gl.Uint(buffer))\n}",
"func (buffer Buffer) Bind(target gl.Enum) {\n\tgl.BindBuffer(gl.Enum(target), gl.Uint(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBuffer, 2, uintptr(target), uintptr(buffer), 0)\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n C.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBuffer(target Enum, b Buffer) {\n\tgl.BindBuffer(uint32(target), b.Value)\n}",
"func (native *OpenGL) BindBuffer(target uint32, buffer uint32) {\n\tgl.BindBuffer(target, buffer)\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpBindBufferRange, 5, uintptr(target), uintptr(index), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func (buffer Buffer) BindBufferRange(target gl.Enum, index uint, offset int, size uint) {\n\tgl.BindBufferRange(gl.Enum(target), gl.Uint(index), gl.Uint(buffer), gl.Intptr(offset), gl.Sizeiptr(size))\n}",
"func (debugging *debuggingOpenGL) BindBuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindBuffer\", target, buffer)\n\tdebugging.gl.BindBuffer(target, buffer)\n\tdebugging.recordExit(\"BindBuffer\")\n}",
"func BindBuffer(target Enum, buffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcbuffer, _ := (C.GLuint)(buffer), cgoAllocsUnknown\n\tC.glBindBuffer(ctarget, cbuffer)\n}",
"func (gl *WebGL) BindBuffer(target GLEnum, buffer WebGLBuffer) {\n\tgl.context.Call(\"bindBuffer\", target, buffer)\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n C.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n C.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tsyscall.Syscall6(gpBindBuffersBase, 4, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), 0, 0)\n}",
"func (b *Binding) Set(buf uint32) {\n\tgl.BindBufferBase(gl.SHADER_STORAGE_BUFFER, b.uint32, buf)\n}",
"func (b *defaultByteBuffer) AppendBuffer(buf ByteBuffer) (n int, err error) {\n\tsubBuf := buf.(*defaultByteBuffer)\n\tn = subBuf.writeIdx\n\tb.ensureWritable(n)\n\tcopy(b.buff[b.writeIdx:b.writeIdx+n], subBuf.buff)\n\tb.writeIdx += n\n\tbuf.Release(nil)\n\treturn\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n C.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func (self Source) SetBuffer(buffer Buffer) {\n\tself.Seti(AlBuffer, int32(buffer))\n}",
"func (o *GetFetchParams) bindIndex(rawData []string, hasKey bool, formats strfmt.Registry) error {\n\tvar raw string\n\tif len(rawData) > 0 {\n\t\traw = rawData[len(rawData)-1]\n\t}\n\n\t// Required: false\n\t// AllowEmptyValue: false\n\tif raw == \"\" { // empty values pass all other validations\n\t\treturn nil\n\t}\n\n\tvalue, err := swag.ConvertInt64(raw)\n\tif err != nil {\n\t\treturn errors.InvalidType(\"index\", \"query\", \"int64\", raw)\n\t}\n\to.Index = &value\n\n\tif err := o.validateIndex(formats); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tsyscall.Syscall6(gpBindBuffersRange, 6, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(sizes)))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func (p *Buffer) Rewind() {\n\tp.index = 0\n}",
"func (b *buffer) index(i uint32) *unsafe.Pointer {\n\treturn risky.Index(unsafe.Pointer(&b.data), ptrSize, uintptr(i))\n}",
"func (geom Geometry) Buffer(distance float64, segments int) Geometry {\n\tnewGeom := C.OGR_G_Buffer(geom.cval, C.double(distance), C.int(segments))\n\treturn Geometry{newGeom}\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n C.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func (bw *BufferedWriterMongo) writeBuffer() (err error) {\n\n\tif len(bw.buffer) == 0 {\n\t\treturn nil\n\t}\n\n\tcoll := bw.client.Database(bw.db).Collection(bw.collection)\n\t_, err = coll.InsertMany(bw.ctx, bw.buffer)\n\treturn err\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func (src *Source) SetBuffer(buf []byte) {\n\tsrc.buf = buf\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *int8) {\n C.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tsyscall.Syscall6(gpBindVertexBuffer, 4, uintptr(bindingindex), uintptr(buffer), uintptr(offset), uintptr(stride), 0, 0)\n}",
"func (b *Buffer) Attach(buffer []byte) {\n b.AttachBytes(buffer, 0, len(buffer))\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n\tC.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n\tC.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func (g *Gaffer) AddBuffer(u *Update) {\n\n\tfor _, v := range u.entities {\n\t\tg.AddEntity(v)\n\t}\n\n\tfor _, v := range u.edges {\n\t\tg.AddEdge(v)\n\t}\n\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tC.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tC.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func (self Source) QueueBuffer(buffer Buffer) {\n\tC.walSourceQueueBuffer(C.ALuint(self), C.ALuint(buffer))\n}",
"func BufferInit(target Enum, size int, usage Enum) {\n\tgl.BufferData(uint32(target), size, nil, uint32(usage))\n}",
"func (du *DescriptorSet) AddBuffer(dstBinding int, dtype vk.DescriptorType, b *Buffer, offset int) {\n\tvar descriptorBufferInfo = vk.DescriptorBufferInfo{}\n\tdescriptorBufferInfo.Buffer = b.VKBuffer\n\tdescriptorBufferInfo.Offset = vk.DeviceSize(offset)\n\tdescriptorBufferInfo.Range = vk.DeviceSize(b.Size)\n\n\tvar writeDescriptorSet = vk.WriteDescriptorSet{}\n\twriteDescriptorSet.SType = vk.StructureTypeWriteDescriptorSet\n\twriteDescriptorSet.DstBinding = uint32(dstBinding) // write to the first, and only binding.\n\twriteDescriptorSet.DescriptorCount = 1 // update a single descriptor.\n\twriteDescriptorSet.DescriptorType = dtype\n\twriteDescriptorSet.PBufferInfo = []vk.DescriptorBufferInfo{descriptorBufferInfo}\n\n\tif du.VKWriteDiscriptorSet == nil {\n\t\tdu.VKWriteDiscriptorSet = make([]vk.WriteDescriptorSet, 0)\n\t}\n\tdu.VKWriteDiscriptorSet = append(du.VKWriteDiscriptorSet, writeDescriptorSet)\n}",
"func (buf *ListBuffer) Set(idx BufferIndex, item Item) (*error.Error) {\n\tinRange, initialized := buf.legalIndex(idx)\n\tif !inRange {\n\t\tdesc := fmt.Sprintf(\n\t\t\t\"idx, %d, is out of range for IndexBuffer of length %d.\",\n\t\t\tidx, len(buf.Buffer),\n\t\t)\n\t\treturn error.New(error.Value, desc)\n\t} else if !initialized {\n\t\tdesc := fmt.Sprintf(\n\t\t\t\"Item at idx, %d, has the Type value Uninitialized.\", idx,\n\t\t)\n\t\treturn error.New(error.Value, desc)\n\t}\n\n\tbuf.Buffer[idx].Item = item\n\treturn nil\n}",
"func bindAccessIndexor(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {\n\tparsed, err := ParsedABI(K_AccessIndexor)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn bind.NewBoundContract(address, *parsed, caller, transactor, filterer), nil\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n NewEnumsModel(buffer),\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyEnumsFunc(func(model *EnumsModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n C.glowBindRenderbuffer(gpBindRenderbuffer, (C.GLenum)(target), (C.GLuint)(renderbuffer))\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tsyscall.Syscall6(gpBindFragDataLocationIndexed, 4, uintptr(program), uintptr(colorNumber), uintptr(index), uintptr(unsafe.Pointer(name)), 0, 0)\n}",
"func (Operators) Buffer(notifier Observable) OperatorFunc {\n\treturn func(source Observable) Observable {\n\t\top := bufferOperator{notifier}\n\t\treturn source.Lift(op.Call)\n\t}\n}",
"func MapBuffer(target uint32, access uint32) unsafe.Pointer {\n ret := C.glowMapBuffer(gpMapBuffer, (C.GLenum)(target), (C.GLenum)(access))\n return (unsafe.Pointer)(ret)\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n NewOrderModel(buffer),\n NewBalanceModel(buffer),\n NewAccountModel(buffer),\n nil,\n nil,\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyOrderFunc(func(model *OrderModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyBalanceFunc(func(model *BalanceModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyAccountFunc(func(model *AccountModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func (c *Client) Bind(proto uint16, ch ReadHandler) {\n\tc.events[proto] = append(c.events[proto], ch)\n}",
"func NewBuffer(aSlice interface{}) *Buffer {\n return &Buffer{buffer: sliceValue(aSlice, false), handler: valueHandler{}}\n}",
"func OptBuffer(buffer int) func(*Beam) {\n\treturn func(b *Beam) { b.buffer = buffer }\n}",
"func BufferData(target Enum, src []byte, usage Enum) {\n\tgl.BufferData(uint32(target), int(len(src)), gl.Ptr(&src[0]), uint32(usage))\n}",
"func MapBuffer(target uint32, access uint32) unsafe.Pointer {\n\tret, _, _ := syscall.Syscall(gpMapBuffer, 2, uintptr(target), uintptr(access), 0)\n\treturn (unsafe.Pointer)(ret)\n}",
"func (al *AudioListener) setBuffer(size int) {\n\tal.Lock()\n\tdefer al.Unlock()\n\n\tal.buffer = make([]gumble.AudioPacket, 0, size)\n}",
"func (p *Buffer) saveIndex(ptr unsafe.Pointer, idx uint) {\n\tif p.array_indexes == nil {\n\t\t// the 1st time we need to allocate\n\t\tp.array_indexes = make(map[unsafe.Pointer]uint)\n\t}\n\tp.array_indexes[ptr] = idx\n}",
"func (w *Wrapper) Bind(destination interface{}) *Wrapper {\n\tw.destination = destination\n\treturn w\n}",
"func (it iterator) index(b *ringBuf) uint64 {\n\treturn b.buf[it].Index\n}",
"func (w *Writer) SetBuffer(raw []byte) {\n\tif w.err != nil {\n\t\treturn\n\t}\n\tw.b = w.b[:0]\n\tw.b = append(w.b, raw...)\n}",
"func MapBuffer(target gl.Enum, access gl.Enum) {\n\tgl.MapBuffer(gl.Enum(target), gl.Enum(access))\n}",
"func newBuffer(e []byte) *Buffer {\n\tp := buffer_pool.Get().(*Buffer)\n\tp.buf = e\n\treturn p\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n C.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func (b *VBO) Bind(m *Mesh) {\n\tif !b.genBound {\n\t\tpanic(\"A VBO buffer ID has not been generated. Call GenBuffer first.\")\n\t}\n\n\tgl.BindBuffer(gl.ARRAY_BUFFER, b.vboID)\n\tfloatSize := int(unsafe.Sizeof(float32(0)))\n\tgl.BufferData(gl.ARRAY_BUFFER, len(m.Vertices)*floatSize, gl.Ptr(m.Vertices), gl.STATIC_DRAW)\n}",
"func (vao *VAO) AddIndexBuffer(ibo *ibo.IBO) {\n\tvao.indexBuffer = ibo\n}",
"func new_buffer(conn *websocket.Conn, ctrl chan struct{}, txqueuelen int) *Buffer {\n\tbuf := Buffer{conn: conn}\n\tbuf.pending = make(chan []byte, txqueuelen)\n\tbuf.ctrl = ctrl\n\tbuf.cache = make([]byte, packet.PACKET_LIMIT+2)\n\treturn &buf\n}",
"func (w *windowImpl) bindBackBuffer() {\n\t// w.mu.Lock()\n\t// size := w.Sz\n\t// w.mu.Unlock()\n\t//\n\tw.backBufferBound = true\n\t// gl.BindFramebuffer(gl.FRAMEBUFFER, 0)\n\t// gl.Viewport(0, 0, int32(size.X), int32(size.Y))\n}",
"func poolSetIndex(a interface{}, i int) {\n\ta.(*freeClientPoolEntry).index = i\n}",
"func (native *OpenGL) BindFramebuffer(target, buffer uint32) {\n\tgl.BindFramebuffer(target, buffer)\n}",
"func (p *InfluxDBProxy) BufferSeries(series *influxdb.Series) {\n\t// add series to buffer map\n\tp.buflock.Lock()\n\tnormalizedIdx, exists := p.bufIdx[series.Name]\n\tif !exists {\n\t\tp.bufIdx[series.Name] = len(p.buf)\n\t\tp.buf = append(p.buf, series)\n\t} else {\n\t\tnormalizedSeries := p.buf[normalizedIdx]\n\t\tmergeSeries(normalizedSeries, series)\n\t}\n\tp.buflock.Unlock()\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n C.glowVertexAttribBinding(gpVertexAttribBinding, (C.GLuint)(attribindex), (C.GLuint)(bindingindex))\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n\tsyscall.Syscall(gpBindRenderbuffer, 2, uintptr(target), uintptr(renderbuffer), 0)\n}",
"func NewAttachedBuffer(buffer *Buffer) *Buffer {\n result := NewEmptyBuffer()\n result.AttachBuffer(buffer)\n return result\n}",
"func (c *Client) ExchangeBuffer(inbuf []byte, a string, outbuf []byte) (n int, err error) {\n\tw := new(reply)\n\tw.client = c\n\tw.addr = a\n\tif c.Hijacked == nil {\n\t\tif err = w.Dial(); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t\tdefer w.Close()\n\t}\n\tif c.Hijacked != nil {\n\t\tw.conn = c.Hijacked\n\t}\n\tif n, err = w.writeClient(inbuf); err != nil {\n\t\treturn 0, err\n\t}\n\t//Why cant we set the buf here?? TODO(MG)\n\tif n, err = w.readClient(outbuf); err != nil {\n\t\treturn n, err\n\t}\n\treturn n, nil\n}",
"func advanceBuffer(buff *bytes.Buffer, num int) {\n\tbuff.Next(num)\n\t// move buffer from num offset to 0\n\tbytearr := buff.Bytes()\n\tbuff.Reset()\n\tbuff.Write(bytearr)\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n proto.NewProxyWithBuffer(buffer),\n NewStructSimpleModel(buffer),\n NewStructOptionalModel(buffer),\n NewStructNestedModel(buffer),\n NewStructBytesModel(buffer),\n NewStructArrayModel(buffer),\n NewStructVectorModel(buffer),\n NewStructListModel(buffer),\n NewStructSetModel(buffer),\n NewStructMapModel(buffer),\n NewStructHashModel(buffer),\n NewStructHashExModel(buffer),\n NewStructEmptyModel(buffer),\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyStructSimpleFunc(func(model *StructSimpleModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructOptionalFunc(func(model *StructOptionalModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructNestedFunc(func(model *StructNestedModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructBytesFunc(func(model *StructBytesModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructArrayFunc(func(model *StructArrayModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructVectorFunc(func(model *StructVectorModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructListFunc(func(model *StructListModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructSetFunc(func(model *StructSetModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructMapFunc(func(model *StructMapModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructHashFunc(func(model *StructHashModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructHashExFunc(func(model *StructHashExModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructEmptyFunc(func(model *StructEmptyModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func newBuffer(r io.Reader, offset int64) *buffer {\n\treturn &buffer{\n\t\tr: r,\n\t\toffset: offset,\n\t\tbuf: make([]byte, 0, 4096),\n\t\tallowObjptr: true,\n\t\tallowStream: true,\n\t}\n}",
"func NewBuffer(capacity int) Buffer {\n\treturn Buffer{\n\t\tcapacity: capacity,\n\t\tcurrentSize: 0,\n\t\tcontents: map[entity.Key]inventoryapi.PostDeltaBody{},\n\t}\n}",
"func BindRenderbuffer(target GLEnum, renderbuffer Renderbuffer) {\n\tgl.BindRenderbuffer(uint32(target), uint32(renderbuffer))\n}",
"func (desc BindDescriptor) Index() int {\n\tindex := int(desc>>bindIndexShift) - 1\n\t// debugf(\"BindDescriptor=%v, class=%v, index=%v\", desc, desc.Class(), index)\n\treturn index\n}",
"func newBuffer(b []byte) *buffer {\n\treturn &buffer{proto.NewBuffer(b), 0}\n}",
"func NewWlBufferWithID(c *wire.Conn, id wire.ID) *WlBuffer {\n\to := &WlBuffer{Base: Base{c, id}}\n\tc.RegisterObject(o)\n\treturn o\n}",
"func (g *GLTF) loadBuffer(bufIdx int) ([]byte, error) {\n\n\t// Check if provided buffer index is valid\n\tif bufIdx < 0 || bufIdx >= len(g.Buffers) {\n\t\treturn nil, fmt.Errorf(\"invalid buffer index\")\n\t}\n\tbufData := &g.Buffers[bufIdx]\n\t// Return cached if available\n\tif bufData.cache != nil {\n\t\tlog.Debug(\"Fetching Buffer %d (cached)\", bufIdx)\n\t\treturn bufData.cache, nil\n\t}\n\tlog.Debug(\"Loading Buffer %d\", bufIdx)\n\n\t// If buffer URI use the chunk data field\n\tif bufData.Uri == \"\" {\n\t\treturn g.data, nil\n\t}\n\n\t// Checks if buffer URI is a data URI\n\tvar data []byte\n\tvar err error\n\tif isDataURL(bufData.Uri) {\n\t\tdata, err = loadDataURL(bufData.Uri)\n\t} else {\n\t\t// Try to load buffer from file\n\t\tdata, err = g.loadFileBytes(bufData.Uri)\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Checks data length\n\tif len(data) != bufData.ByteLength {\n\t\treturn nil, fmt.Errorf(\"buffer:%d read data length:%d expected:%d\", bufIdx, len(data), bufData.ByteLength)\n\t}\n\t// Cache buffer data\n\tg.Buffers[bufIdx].cache = data\n\tlog.Debug(\"cache data:%v\", len(bufData.cache))\n\treturn data, nil\n}",
"func (ex *Exchange) Bind(cq, routingKey string) {\n\n}",
"func (v *View) OpenBuffer(buf *Buffer) {\n\tv.buf = buf\n\tv.topline = 0\n\t// Put the cursor at the first spot\n\tv.cursor = Cursor{\n\t\tx: 0,\n\t\ty: 0,\n\t\tv: v,\n\t}\n\tv.cursor.ResetSelection()\n\n\tv.eh = NewEventHandler(v)\n\n\tv.matches = Match(v)\n\n\t// Set mouseReleased to true because we assume the mouse is not being pressed when\n\t// the editor is opened\n\tv.mouseReleased = true\n\tv.lastClickTime = time.Time{}\n}",
"func (c webgl) BufferDataX(target Enum, d interface{}, usage Enum) {\n\tc.ctx.Call(\"bufferData\", target, conv(d), usage)\n}",
"func paramBinding(index, paramIndex int, typ reflect.Type) *binding {\n\treturn &binding{\n\t\tDependency: &Dependency{Handle: paramDependencyHandler(paramIndex), DestType: typ, Source: getSource()},\n\t\tInput: newInput(typ, index, nil),\n\t}\n}",
"func (debugging *debuggingOpenGL) BufferData(target uint32, size int, data interface{}, usage uint32) {\n\tdebugging.recordEntry(\"BufferData\", target, size, data, usage)\n\tdebugging.gl.BufferData(target, size, data, usage)\n\tdebugging.recordExit(\"BufferData\")\n}",
"func BindRenderbuffer(target Enum, renderbuffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcrenderbuffer, _ := (C.GLuint)(renderbuffer), cgoAllocsUnknown\n\tC.glBindRenderbuffer(ctarget, crenderbuffer)\n}",
"func TextureBuffer(texture uint32, internalformat uint32, buffer uint32) {\n\tsyscall.Syscall(gpTextureBuffer, 3, uintptr(texture), uintptr(internalformat), uintptr(buffer))\n}",
"func (b *BufferManager) SetBuffer(peer *PeerSession) {\n\tb.lock.Lock()\n\tdefer b.lock.Unlock()\n\toffset, ok := b.freeIndex.TryDequeue()\n\tif ok {\n\t\tpeer.bufferOffst = offset.(int64)\n\t\tpeer.buffers = b.buffers[peer.bufferOffst : peer.bufferOffst+int64(b.bufferSize)]\n\t} else {\n\t\tif b.totalBytes-int64(b.bufferSize) < b.currentIndex {\n\t\t\tpeer.buffers = make([]byte, b.bufferSize)\n\t\t\tpeer.bufferOffst = -1\n\t\t\t//The buffer pool is empty.\n\t\t\t//return false\n\t\t} else {\n\t\t\tpeer.bufferOffst = b.currentIndex\n\t\t\tpeer.buffers = b.buffers[peer.bufferOffst : peer.bufferOffst+int64(b.bufferSize)]\n\t\t\tb.currentIndex += int64(b.bufferSize)\n\t\t}\n\t}\n\t//return true\n}"
] | [
"0.67690474",
"0.6754688",
"0.6504956",
"0.6449982",
"0.621203",
"0.62079",
"0.6192015",
"0.61312145",
"0.6001993",
"0.5949003",
"0.5904131",
"0.58839715",
"0.58665264",
"0.5838533",
"0.58151007",
"0.5801717",
"0.5793947",
"0.5793947",
"0.57462114",
"0.57462114",
"0.5730476",
"0.55920064",
"0.55402195",
"0.5526103",
"0.55176324",
"0.54693395",
"0.5468977",
"0.5464268",
"0.5464268",
"0.5397043",
"0.5377533",
"0.53124857",
"0.5282292",
"0.52406067",
"0.5227775",
"0.5227775",
"0.5224073",
"0.5206469",
"0.5188086",
"0.5188086",
"0.5185989",
"0.5149407",
"0.51109326",
"0.51109326",
"0.5099161",
"0.50988156",
"0.50988156",
"0.5098714",
"0.5094839",
"0.5091553",
"0.508695",
"0.5084112",
"0.5083292",
"0.50813115",
"0.50769776",
"0.5053909",
"0.5049765",
"0.50489664",
"0.50445116",
"0.50379676",
"0.5030379",
"0.5025532",
"0.49953553",
"0.4987513",
"0.498484",
"0.49793565",
"0.49728608",
"0.49724492",
"0.49720043",
"0.49621192",
"0.49578437",
"0.4955031",
"0.49511907",
"0.49481735",
"0.49412012",
"0.49186882",
"0.49017736",
"0.4898862",
"0.489539",
"0.48949954",
"0.48937926",
"0.4893609",
"0.48777163",
"0.48708495",
"0.48620555",
"0.48619446",
"0.485763",
"0.4856549",
"0.48445928",
"0.4840721",
"0.48392364",
"0.48392355",
"0.4838161",
"0.4832884",
"0.48242274",
"0.4813583",
"0.48129526",
"0.47909266",
"0.4776066"
] | 0.6422142 | 5 |
bind a range within a buffer object to an indexed buffer target | func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {
C.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpBindBufferRange, 5, uintptr(target), uintptr(index), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n C.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func (buffer Buffer) BindBufferRange(target gl.Enum, index uint, offset int, size uint) {\n\tgl.BindBufferRange(gl.Enum(target), gl.Uint(index), gl.Uint(buffer), gl.Intptr(offset), gl.Sizeiptr(size))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tsyscall.Syscall6(gpBindBuffersRange, 6, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(sizes)))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n C.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBufferBase, 3, uintptr(target), uintptr(index), uintptr(buffer))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func TexBufferRange(target uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpTexBufferRange, 5, uintptr(target), uintptr(internalformat), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func MapBufferRange(target uint32, offset int, length int, access uint32) unsafe.Pointer {\n ret := C.glowMapBufferRange(gpMapBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length), (C.GLbitfield)(access))\n return (unsafe.Pointer)(ret)\n}",
"func TexBufferRange(target uint32, internalformat uint32, buffer uint32, offset int, size int) {\n C.glowTexBufferRange(gpTexBufferRange, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func MapBufferRange(target uint32, offset int, length int, access uint32) unsafe.Pointer {\n\tret, _, _ := syscall.Syscall6(gpMapBufferRange, 4, uintptr(target), uintptr(offset), uintptr(length), uintptr(access), 0, 0)\n\treturn (unsafe.Pointer)(ret)\n}",
"func (buffer Buffer) Bind(target gl.Enum) {\n\tgl.BindBuffer(gl.Enum(target), gl.Uint(buffer))\n}",
"func (buffer Buffer) BindBufferBase(target gl.Enum, index uint) {\n\tgl.BindBufferBase(gl.Enum(target), gl.Uint(index), gl.Uint(buffer))\n}",
"func TexBufferRange(target uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tC.glowTexBufferRange(gpTexBufferRange, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func TexBufferRange(target uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tC.glowTexBufferRange(gpTexBufferRange, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func TextureBufferRange(texture uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpTextureBufferRange, 5, uintptr(texture), uintptr(internalformat), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func MapBufferRange(target uint32, offset int, length int, access uint32) unsafe.Pointer {\n\tret := C.glowMapBufferRange(gpMapBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length), (C.GLbitfield)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func MapBufferRange(target uint32, offset int, length int, access uint32) unsafe.Pointer {\n\tret := C.glowMapBufferRange(gpMapBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length), (C.GLbitfield)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func MapNamedBufferRange(buffer uint32, offset int, length int, access uint32) unsafe.Pointer {\n\tret, _, _ := syscall.Syscall6(gpMapNamedBufferRange, 4, uintptr(buffer), uintptr(offset), uintptr(length), uintptr(access), 0, 0)\n\treturn (unsafe.Pointer)(ret)\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBuffer, 2, uintptr(target), uintptr(buffer), 0)\n}",
"func BindBuffer(target uint32, buffer uint32) {\n C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBuffer(target Enum, b Buffer) {\n\tgl.BindBuffer(uint32(target), b.Value)\n}",
"func MapNamedBufferRange(buffer uint32, offset int, length int, access uint32) unsafe.Pointer {\n\tret := C.glowMapNamedBufferRange(gpMapNamedBufferRange, (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(length), (C.GLbitfield)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func MapNamedBufferRange(buffer uint32, offset int, length int, access uint32) unsafe.Pointer {\n\tret := C.glowMapNamedBufferRange(gpMapNamedBufferRange, (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(length), (C.GLbitfield)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func TextureBufferRange(texture uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tC.glowTextureBufferRange(gpTextureBufferRange, (C.GLuint)(texture), (C.GLenum)(internalformat), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func TextureBufferRange(texture uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tC.glowTextureBufferRange(gpTextureBufferRange, (C.GLuint)(texture), (C.GLenum)(internalformat), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tsyscall.Syscall6(gpBindBuffersBase, 4, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), 0, 0)\n}",
"func (o *GetFetchParams) bindIndex(rawData []string, hasKey bool, formats strfmt.Registry) error {\n\tvar raw string\n\tif len(rawData) > 0 {\n\t\traw = rawData[len(rawData)-1]\n\t}\n\n\t// Required: false\n\t// AllowEmptyValue: false\n\tif raw == \"\" { // empty values pass all other validations\n\t\treturn nil\n\t}\n\n\tvalue, err := swag.ConvertInt64(raw)\n\tif err != nil {\n\t\treturn errors.InvalidType(\"index\", \"query\", \"int64\", raw)\n\t}\n\to.Index = &value\n\n\tif err := o.validateIndex(formats); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tsyscall.Syscall6(gpBindFragDataLocationIndexed, 4, uintptr(program), uintptr(colorNumber), uintptr(index), uintptr(unsafe.Pointer(name)), 0, 0)\n}",
"func (b *ChangeBuffer) Range() <-chan *list.Element {\n\tch := make(chan *list.Element)\n\tgo func() {\n\t\tdefer close(ch)\n\t\tfor c := b.Front(); c != nil; c = c.Next() {\n\t\t\tch <- c\n\t\t}\n\t}()\n\treturn ch\n}",
"func (ba *FilterBitArray) SetRange(begin uint, end uint) {\n\t// Location of i in the array index is floor(i/byte_size) + 1. If it exceeds the\n\t// current byte array, we'll make a new one large enough to include the\n\t// specified bit-index\n\tstartByteIndex := ba.byteIndex(begin)\n\tendByteIndex := ba.byteIndex(end)\n\n\tif end >= ba.Capacity() {\n\t\tba.expand(endByteIndex + 1)\n\t}\n\n\tfirstByteMask := byteMask << (begin % byteSize)\n\tlastByteMask := byteMask >> ((byteSize - end - 1) % byteSize)\n\n\tif startByteIndex == endByteIndex {\n\t\t(*ba)[startByteIndex] |= (firstByteMask & lastByteMask)\n\t} else {\n\t\t(*ba)[startByteIndex] |= firstByteMask\n\t\tfor i := startByteIndex + 1; i < endByteIndex; i++ {\n\t\t\t(*ba)[i] = byteMask\n\t\t}\n\t\t(*ba)[endByteIndex] |= lastByteMask\n\t}\n}",
"func (q *Query) Range(indexName string, start, end interface{}) *Query {\n\t// For an index range search,\n\t// it is non-sensical to pass two nils\n\t// Set the error and return the query unchanged\n\tif start == nil && end == nil {\n\t\tq.err = errors.New(ErrNilInputsRangeIndexQuery)\n\t\treturn q\n\t}\n\tq.start = start\n\tq.end = end\n\tq.isIndexQuery = true\n\tq.indexName = []byte(indexName)\n\treturn q\n}",
"func BindBuffer(target Enum, buffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcbuffer, _ := (C.GLuint)(buffer), cgoAllocsUnknown\n\tC.glBindBuffer(ctarget, cbuffer)\n}",
"func FlushMappedBufferRange(target uint32, offset int, length int) {\n C.glowFlushMappedBufferRange(gpFlushMappedBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func (b *Binding) Set(buf uint32) {\n\tgl.BindBufferBase(gl.SHADER_STORAGE_BUFFER, b.uint32, buf)\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *int8) {\n C.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n C.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n C.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func bindAccessIndexor(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {\n\tparsed, err := ParsedABI(K_AccessIndexor)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn bind.NewBoundContract(address, *parsed, caller, transactor, filterer), nil\n}",
"func (geom Geometry) Buffer(distance float64, segments int) Geometry {\n\tnewGeom := C.OGR_G_Buffer(geom.cval, C.double(distance), C.int(segments))\n\treturn Geometry{newGeom}\n}",
"func (b *buffer) index(i uint32) *unsafe.Pointer {\n\treturn risky.Index(unsafe.Pointer(&b.data), ptrSize, uintptr(i))\n}",
"func FlushMappedBufferRange(target uint32, offset int, length int) {\n\tsyscall.Syscall(gpFlushMappedBufferRange, 3, uintptr(target), uintptr(offset), uintptr(length))\n}",
"func (debugging *debuggingOpenGL) BindBuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindBuffer\", target, buffer)\n\tdebugging.gl.BindBuffer(target, buffer)\n\tdebugging.recordExit(\"BindBuffer\")\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tC.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tC.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func (native *OpenGL) BindBuffer(target uint32, buffer uint32) {\n\tgl.BindBuffer(target, buffer)\n}",
"func (b *logEventBuffer) normalRange(start, end int) (int, int) {\n\tif end < start || end == 0 {\n\t\t// invalid range\n\t\treturn -1, -1\n\t}\n\tsize := b.bufferSize()\n\tif start == 0 {\n\t\t// we reduce start by 1 to make it easier to calculate the index,\n\t\t// but we need to ensure we don't go below 0.\n\t\tstart++\n\t}\n\tif start == end {\n\t\t// ensure we have at least one block in range\n\t\tend++\n\t}\n\tif end-start > size {\n\t\t// ensure we don't have more than the buffer size\n\t\tstart = (end - size) + 1\n\t}\n\tstart = (start - 1) % size\n\tend = end % size\n\n\treturn start, end\n}",
"func TransformFeedbackBufferRange(xfb uint32, index uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpTransformFeedbackBufferRange, 5, uintptr(xfb), uintptr(index), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func (l *Listener) EnterBound(ctx *parser.BoundContext) {\n\tl.lineStart(ctx)\n\t// if l.target.Close {\n\t// \tpanic(fmt.Sprintf(\n\t// \t\t\"%d:%d short lookup does not make a sense on bounded areas\",\n\t// \t\tctx.GetStart().GetLine(), ctx.GetStart().GetColumn()))\n\t// }\n\tlower, _ := strconv.Atoi(ctx.IntLit(0).GetText())\n\tupper, _ := strconv.Atoi(ctx.IntLit(1).GetText())\n\tif lower == 0 {\n\t\ttoken := ctx.IntLit(0).GetSymbol()\n\t\tpanic(fmt.Sprintf(\"%d:%d offset value must be greater than 0\", token.GetLine(), token.GetColumn()+1))\n\t}\n\tif upper < lower {\n\t\ttoken := ctx.IntLit(1).GetSymbol()\n\t\tpanic(fmt.Sprintf(\n\t\t\t\"%d:%d upper bound must be greater than lower\",\n\t\t\ttoken.GetLine(),\n\t\t\ttoken.GetColumn()+1,\n\t\t))\n\t}\n\tl.target.SetBound(lower, upper)\n}",
"func (gl *WebGL) BindBuffer(target GLEnum, buffer WebGLBuffer) {\n\tgl.context.Call(\"bindBuffer\", target, buffer)\n}",
"func (idx *Index) FitToBounds(bounds *dvid.OptionalBounds) error {\n\tif bounds == nil {\n\t\treturn nil\n\t}\n\tfor zyx := range idx.Blocks {\n\t\tx, y, z := DecodeBlockIndex(zyx)\n\t\tblockPt := dvid.ChunkPoint3d{x, y, z}\n\t\tif bounds.BeyondZ(blockPt) {\n\t\t\tbreak\n\t\t}\n\t\tif bounds.Outside(blockPt) {\n\t\t\tcontinue\n\t\t}\n\t\tdelete(idx.Blocks, zyx)\n\t}\n\treturn nil\n}",
"func (r sUintRangeIterator) Range(handlers ...UintHandler) error {\n\treturn UintRange(r.iter, handlers...)\n}",
"func (s *BasevhdlListener) EnterExplicit_range(ctx *Explicit_rangeContext) {}",
"func BufferData(target Enum, src []byte, usage Enum) {\n\tgl.BufferData(uint32(target), int(len(src)), gl.Ptr(&src[0]), uint32(usage))\n}",
"func (s *BasejossListener) EnterRange_(ctx *Range_Context) {}",
"func (s *Statement) Bind(start_column int, values... interface{}) (e error, index int) {\n\tcolumn := QueryParameter(start_column)\n\tfor i, v := range values {\n\t\tcolumn++\n\t\tif e = column.Bind(s, v); e != nil {\n\t\t\tindex = i\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}",
"func FlushMappedNamedBufferRange(buffer uint32, offset int, length int) {\n\tsyscall.Syscall(gpFlushMappedNamedBufferRange, 3, uintptr(buffer), uintptr(offset), uintptr(length))\n}",
"func FlushMappedBufferRange(target uint32, offset int, length int) {\n\tC.glowFlushMappedBufferRange(gpFlushMappedBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func FlushMappedBufferRange(target uint32, offset int, length int) {\n\tC.glowFlushMappedBufferRange(gpFlushMappedBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func (self Source) SetBuffer(buffer Buffer) {\n\tself.Seti(AlBuffer, int32(buffer))\n}",
"func (buf *ListBuffer) Set(idx BufferIndex, item Item) (*error.Error) {\n\tinRange, initialized := buf.legalIndex(idx)\n\tif !inRange {\n\t\tdesc := fmt.Sprintf(\n\t\t\t\"idx, %d, is out of range for IndexBuffer of length %d.\",\n\t\t\tidx, len(buf.Buffer),\n\t\t)\n\t\treturn error.New(error.Value, desc)\n\t} else if !initialized {\n\t\tdesc := fmt.Sprintf(\n\t\t\t\"Item at idx, %d, has the Type value Uninitialized.\", idx,\n\t\t)\n\t\treturn error.New(error.Value, desc)\n\t}\n\n\tbuf.Buffer[idx].Item = item\n\treturn nil\n}",
"func (r *baseNsRange) Set(start, end int) { r.start, r.end = start, end }",
"func (i *inflight) CommitRange(minIndex, maxIndex uint64) {\n\ti.Lock()\n\tdefer i.Unlock()\n\n\t// Update the minimum index\n\tminIndex = max(i.minCommit, minIndex)\n\n\t// Commit each index\n\tfor idx := minIndex; idx <= maxIndex; idx++ {\n\t\ti.commit(idx)\n\t}\n}",
"func OptBuffer(buffer int) func(*Beam) {\n\treturn func(b *Beam) { b.buffer = buffer }\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func (it iterator) index(b *ringBuf) uint64 {\n\treturn b.buf[it].Index\n}",
"func (desc BindDescriptor) Index() int {\n\tindex := int(desc>>bindIndexShift) - 1\n\t// debugf(\"BindDescriptor=%v, class=%v, index=%v\", desc, desc.Class(), index)\n\treturn index\n}",
"func (p *Buffer) Rewind() {\n\tp.index = 0\n}",
"func poolSetIndex(a interface{}, i int) {\n\ta.(*freeClientPoolEntry).index = i\n}",
"func (r Range) iterate(fn func(*buffer.View)) {\n\tr.pk.buf.SubApply(r.offset, r.length, fn)\n}",
"func BufferSubData(target Enum, offset int, data []byte) {\n\tgl.BufferSubData(uint32(target), offset, int(len(data)), gl.Ptr(&data[0]))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tsyscall.Syscall6(gpBindVertexBuffer, 4, uintptr(bindingindex), uintptr(buffer), uintptr(offset), uintptr(stride), 0, 0)\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tsyscall.Syscall(gpBindSampler, 2, uintptr(unit), uintptr(sampler), 0)\n}",
"func TransformFeedbackBufferRange(xfb uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowTransformFeedbackBufferRange(gpTransformFeedbackBufferRange, (C.GLuint)(xfb), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func TransformFeedbackBufferRange(xfb uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowTransformFeedbackBufferRange(gpTransformFeedbackBufferRange, (C.GLuint)(xfb), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func (ex *Exchange) Bind(cq, routingKey string) {\n\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n C.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func (s *Serializer) Bind(val interface{}) *BoundVariable {\n\tb, ok := val.(*BoundVariable)\n\tif !ok {\n\t\tb = Bind(val)\n\t}\n\n\tif _, ok := s.vpos[b]; !ok {\n\t\ts.vals = append(s.vals, b)\n\t\ts.vpos[b] = len(s.vals)\n\t}\n\n\treturn b\n}",
"func (x *Index) Lookup(s []byte, n int) (result []int) {}",
"func (w *Wrapper) Bind(destination interface{}) *Wrapper {\n\tw.destination = destination\n\treturn w\n}",
"func (ns *EsIndexer) IndexBlocksInRange(fromBlockHeight uint64, toBlockHeight uint64) {\n\tctx := context.Background()\n\tchannel := make(chan EsType, 1000)\n\tdone := make(chan struct{})\n\ttxChannel := make(chan EsType, 20000)\n\tnameChannel := make(chan EsType, 5000)\n\tgenerator := func() error {\n\t\tdefer close(channel)\n\t\tdefer close(done)\n\t\tns.log.Info().Msg(fmt.Sprintf(\"Indexing %d missing blocks [%d..%d]\", (1 + toBlockHeight - fromBlockHeight), fromBlockHeight, toBlockHeight))\n\t\tfor blockHeight := fromBlockHeight; blockHeight <= toBlockHeight; blockHeight++ {\n\t\t\tblockQuery := make([]byte, 8)\n\t\t\tbinary.LittleEndian.PutUint64(blockQuery, uint64(blockHeight))\n\t\t\tblock, err := ns.grpcClient.GetBlock(context.Background(), &types.SingleBytes{Value: blockQuery})\n\t\t\tif err != nil {\n\t\t\t\tns.log.Warn().Uint64(\"blockHeight\", blockHeight).Err(err).Msg(\"Failed to get block\")\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif len(block.Body.Txs) > 0 {\n\t\t\t\tns.IndexTxs(block, block.Body.Txs, txChannel, nameChannel)\n\t\t\t}\n\t\t\td := ConvBlock(block)\n\t\t\tselect {\n\t\t\tcase channel <- d:\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn ctx.Err()\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}\n\n\twaitForTx := func() error {\n\t\tdefer close(txChannel)\n\t\t<-done\n\t\treturn nil\n\t}\n\tgo BulkIndexer(ctx, ns.log, ns.client, txChannel, waitForTx, ns.indexNamePrefix+\"tx\", \"tx\", 10000, false)\n\n\twaitForNames := func() error {\n\t\tdefer close(nameChannel)\n\t\t<-done\n\t\treturn nil\n\t}\n\tgo BulkIndexer(ctx, ns.log, ns.client, nameChannel, waitForNames, ns.indexNamePrefix+\"name\", \"name\", 2500, true)\n\n\tBulkIndexer(ctx, ns.log, ns.client, channel, generator, ns.indexNamePrefix+\"block\", \"block\", 500, false)\n\n\tns.OnSyncComplete()\n}",
"func (buf *ListBuffer) legalIndex(idx BufferIndex) (inRange, initialized bool) {\n\tinRange = idx >= 0 && idx < BufferIndex(len(buf.Buffer))\n\tif inRange {\n\t\tinitialized = buf.Buffer[idx].Item.Type != Uninitialized\n\t} else {\n\t\tinitialized = true\n\t}\n\treturn inRange, initialized\n}",
"func MapBuffer(target uint32, access uint32) unsafe.Pointer {\n\tret, _, _ := syscall.Syscall(gpMapBuffer, 2, uintptr(target), uintptr(access), 0)\n\treturn (unsafe.Pointer)(ret)\n}",
"func DrawRangeElementsBaseVertex(mode uint32, start uint32, end uint32, count int32, xtype uint32, indices unsafe.Pointer, basevertex int32) {\n C.glowDrawRangeElementsBaseVertex(gpDrawRangeElementsBaseVertex, (C.GLenum)(mode), (C.GLuint)(start), (C.GLuint)(end), (C.GLsizei)(count), (C.GLenum)(xtype), indices, (C.GLint)(basevertex))\n}",
"func MapBuffer(target gl.Enum, access gl.Enum) {\n\tgl.MapBuffer(gl.Enum(target), gl.Enum(access))\n}",
"func (o *OGN) Range(ctx context.Context, f func(Data)) error {\n\tfor ctx.Err() == nil {\n\t\tvalue, ok := o.next()\n\t\tif !ok {\n\t\t\treturn nil\n\t\t}\n\t\tif value != nil && ctx.Err() == nil {\n\t\t\tf(*value)\n\t\t}\n\t}\n\treturn ctx.Err()\n}",
"func (c *Computer) MapRange(from, to int, rfn data.SoftRead, wfn data.SoftWrite) {\n\tfor addr := from; addr < to; addr++ {\n\t\tc.smap.SetRead(addr, rfn)\n\t\tc.smap.SetWrite(addr, wfn)\n\t}\n}",
"func (r *ShardReader) seek(requestedRange biopb.CoordRange) {\n\t// For each field (except coord; more on that below), find the subset of index\n\t// blocks that contain requestedRange.\n\tcoordRange := requestedRange\n\tfor f := int(gbam.FieldCoord + 1); f < gbam.NumFields; f++ {\n\t\tfr := r.fieldReaders[f]\n\t\tif fr == nil {\n\t\t\tcontinue\n\t\t}\n\t\tfor _, b := range fr.index.Blocks {\n\t\t\tif blockIntersectsRange(b.StartAddr, b.EndAddr, requestedRange) {\n\t\t\t\tfr.blocks = append(fr.blocks, b)\n\t\t\t}\n\t\t}\n\t\tif len(fr.blocks) == 0 {\n\t\t\t// There's no record to be read in the range. We'll report EOF when\n\t\t\t// reading later. Usually, if fr.blocks is empty for one field, it's\n\t\t\t// empty for any other field too.\n\t\t\treturn\n\t\t}\n\t\tcoordRange.Start = coordRange.Start.Min(fr.blocks[0].StartAddr)\n\t}\n\n\t// We need to advance the read pointer of each field to the first record at or\n\t// after requestedRange.Start. We do the following:\n\t//\n\t// 1. Assume that (say) FieldSeq has three recordio blocks {b0, b1, b2}, that\n\t// intersect with requestedRange.\n\t//\n\t// 2. Read the recordio blocks for FieldCoord so that they cover (b0,b1,b2).\n\t// Then sequentially scan these blocks and find b0.StartAddr.\n\t//\n\t// 3. Sequentially scan both FieldCoord and FieldSeq simultaneously, until the\n\t// the read pointer for FieldCoord is at requestedRange.Start.\n\t//\n\t// The below code does this for all the fields in parallel.\n\t// Read FieldCoord so that it covers all the recordioblocks read by other\n\t// fields.\n\tfr := r.fieldReaders[gbam.FieldCoord]\n\tfor _, b := range fr.index.Blocks {\n\t\tif blockIntersectsRange(b.StartAddr, b.EndAddr, coordRange) {\n\t\t\tfr.blocks = append(fr.blocks, b)\n\t\t}\n\t}\n\tif len(fr.blocks) == 0 {\n\t\t// This shouldn't happen, unless is the file is corrupt\n\t\terr := errors.Errorf(\"%v: Cannot find blocks for coords in range %+v, index: %+v\", fr.label, coordRange, fr.index)\n\t\tvlog.Error(err)\n\t\tr.err.Set(err)\n\t\treturn\n\t}\n\n\t// readingField is for eliding calls to addr.GE() below in the fast path.\n\tvar readingField [gbam.NumFields]bool\n\n\t// getReader() returns a fieldReader for the given reader, or nil if the field\n\t// is dropped by the user, or all its data blocks are after \"addr\"\n\tgetReader := func(f gbam.FieldType, addr biopb.Coord) *fieldReader {\n\t\tfr = r.fieldReaders[f]\n\t\tif fr != nil {\n\t\t\tif !fr.maybeReadNextBlock() {\n\t\t\t\tvlog.Fatalf(\"%v: EOF while reading %+v\", fr.label, addr)\n\t\t\t}\n\t\t\tif readingField[f] {\n\t\t\t\treturn fr\n\t\t\t}\n\t\t\tif addr.GE(fr.fb.index.StartAddr) {\n\t\t\t\treadingField[f] = true\n\t\t\t\treturn fr\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}\n\n\t// Seek the field pointers to requestedRange.Start\n\tfor {\n\t\tif !r.maybeReadNextCoordBlock() {\n\t\t\t// No data to read\n\t\t\tvlog.VI(1).Infof(\"Reached end of data, %+v\", r.addrGenerator)\n\t\t\treturn\n\t\t}\n\t\tfr := r.fieldReaders[gbam.FieldCoord]\n\t\trefID, pos := fr.fb.peekCoordField()\n\t\tsave := r.addrGenerator\n\t\taddr := r.addrGenerator.Generate(refID, pos)\n\t\tif addr.GE(requestedRange.Start) {\n\t\t\tr.addrGenerator = save\n\t\t\treturn\n\t\t}\n\t\tfr.fb.readCoordField()\n\t\tif fr := getReader(gbam.FieldFlags, addr); fr != nil {\n\t\t\tfr.fb.readFlagsField()\n\t\t}\n\t\tif fr := getReader(gbam.FieldMapq, addr); fr != nil {\n\t\t\tfr.fb.readMapqField()\n\t\t}\n\t\tif fr := getReader(gbam.FieldMateRefID, addr); fr != nil {\n\t\t\tfr.fb.readVarintDeltaField()\n\t\t}\n\t\tif fr := getReader(gbam.FieldMatePos, addr); fr != nil {\n\t\t\tfr.fb.readVarintDeltaField()\n\t\t}\n\t\tif fr := getReader(gbam.FieldTempLen, addr); fr != nil {\n\t\t\tfr.fb.readVarintField()\n\t\t}\n\t\tif fr := getReader(gbam.FieldCigar, addr); fr != nil {\n\t\t\tfr.fb.skipCigarField()\n\t\t}\n\t\tif fr := getReader(gbam.FieldName, addr); fr != nil {\n\t\t\tfr.fb.skipStringDeltaField()\n\t\t}\n\t\tif fr := getReader(gbam.FieldSeq, addr); fr != nil {\n\t\t\tfr.fb.skipSeqField()\n\t\t}\n\t\tif fr := getReader(gbam.FieldQual, addr); fr != nil {\n\t\t\tfr.fb.skipQualField()\n\t\t}\n\t\tif fr := getReader(gbam.FieldAux, addr); fr != nil {\n\t\t\tfr.fb.skipAuxField()\n\t\t}\n\t}\n}",
"func FlushMappedNamedBufferRange(buffer uint32, offset int, length int) {\n\tC.glowFlushMappedNamedBufferRange(gpFlushMappedNamedBufferRange, (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func FlushMappedNamedBufferRange(buffer uint32, offset int, length int) {\n\tC.glowFlushMappedNamedBufferRange(gpFlushMappedNamedBufferRange, (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func NewRange(meta RangeMetadata, engine Engine, allocator *allocator, gossip *gossip.Gossip) *Range {\n\tr := &Range{\n\t\tMeta: meta,\n\t\tengine: engine,\n\t\tallocator: allocator,\n\t\tgossip: gossip,\n\t\tpending: make(chan *LogEntry, 100 /* TODO(spencer): what's correct value? */),\n\t\tcloser: make(chan struct{}),\n\t}\n\treturn r\n}",
"func (uni *Uniform1fv) TransferIdx(gl *GLS, pos, count int) {\n\n\tgl.Uniform1fv(uni.LocationIdx(gl, pos), 1, uni.v[pos:])\n}"
] | [
"0.728777",
"0.7266032",
"0.7006068",
"0.6596149",
"0.6426957",
"0.62520826",
"0.6189875",
"0.6189875",
"0.61769366",
"0.5988925",
"0.59874284",
"0.5968938",
"0.59425527",
"0.5915116",
"0.58076537",
"0.57425576",
"0.57425576",
"0.5718545",
"0.56829077",
"0.56829077",
"0.567064",
"0.567064",
"0.56485206",
"0.5631993",
"0.54507595",
"0.54350233",
"0.5398574",
"0.5398574",
"0.52581",
"0.52581",
"0.5218289",
"0.5193478",
"0.51687974",
"0.5156708",
"0.5132237",
"0.5116173",
"0.51078105",
"0.51041955",
"0.5078439",
"0.5078439",
"0.50670373",
"0.50644225",
"0.50612533",
"0.5058464",
"0.5044655",
"0.50184363",
"0.50100106",
"0.5005755",
"0.49717763",
"0.49413237",
"0.49413237",
"0.49201763",
"0.48878416",
"0.4859477",
"0.4857651",
"0.4849976",
"0.4801293",
"0.47905806",
"0.478474",
"0.47692364",
"0.4763296",
"0.47620204",
"0.47459736",
"0.47436723",
"0.47436723",
"0.4739635",
"0.473616",
"0.47350815",
"0.47208914",
"0.47127184",
"0.4709458",
"0.4709458",
"0.4703856",
"0.46983188",
"0.46700934",
"0.46571165",
"0.46344268",
"0.46214497",
"0.461001",
"0.46051952",
"0.45887592",
"0.45887592",
"0.45835465",
"0.458072",
"0.45797008",
"0.45746958",
"0.45731676",
"0.45623094",
"0.4554075",
"0.45540217",
"0.45478",
"0.45394328",
"0.45105174",
"0.45103133",
"0.44988668",
"0.44965833",
"0.44965833",
"0.44920865",
"0.44854608"
] | 0.6876788 | 4 |
bind one or more buffer objects to a sequence of indexed buffer targets | func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {
C.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n C.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tsyscall.Syscall6(gpBindBuffersRange, 6, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(sizes)))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n C.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tsyscall.Syscall6(gpBindBuffersBase, 4, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), 0, 0)\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n C.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpBindBufferRange, 5, uintptr(target), uintptr(index), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBufferBase, 3, uintptr(target), uintptr(index), uintptr(buffer))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func (buffer Buffer) BindBufferRange(target gl.Enum, index uint, offset int, size uint) {\n\tgl.BindBufferRange(gl.Enum(target), gl.Uint(index), gl.Uint(buffer), gl.Intptr(offset), gl.Sizeiptr(size))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func (s *Stream) bindOps() {\n\ts.log.Print(\"binding operators\")\n\tif s.ops == nil {\n\t\treturn\n\t}\n\tfor i, op := range s.ops {\n\t\tif i == 0 { // link 1st to source\n\t\t\top.SetInput(s.source.GetOutput())\n\t\t} else {\n\t\t\top.SetInput(s.ops[i-1].GetOutput())\n\t\t}\n\t}\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n C.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n C.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (buffer Buffer) BindBufferBase(target gl.Enum, index uint) {\n\tgl.BindBufferBase(gl.Enum(target), gl.Uint(index), gl.Uint(buffer))\n}",
"func (buffer Buffer) Bind(target gl.Enum) {\n\tgl.BindBuffer(gl.Enum(target), gl.Uint(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBuffer, 2, uintptr(target), uintptr(buffer), 0)\n}",
"func BindBuffer(target uint32, buffer uint32) {\n C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tsyscall.Syscall6(gpBindVertexBuffers, 5, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(strides)), 0)\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindBuffer(target Enum, b Buffer) {\n\tgl.BindBuffer(uint32(target), b.Value)\n}",
"func BindBuffer(target Enum, buffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcbuffer, _ := (C.GLuint)(buffer), cgoAllocsUnknown\n\tC.glBindBuffer(ctarget, cbuffer)\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n C.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindImageTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tsyscall.Syscall(gpBindSamplers, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(samplers)))\n}",
"func (g *Gaffer) AddBuffer(u *Update) {\n\n\tfor _, v := range u.entities {\n\t\tg.AddEntity(v)\n\t}\n\n\tfor _, v := range u.edges {\n\t\tg.AddEdge(v)\n\t}\n\n}",
"func bind(vm *VM, block block, factory bindFactory) {\n\tfor i := block.first(); i != 0; i = i.next(vm) {\n\t\tptr, ok := i.ptr(vm)\n\t\tif ok {\n\t\t\tobj := value(vm.read(ptr))\n\t\t\tkind := obj.kind()\n\t\t\tvm.bindFunc[kind](vm, ptr, factory)\n\t\t}\n\t}\n}",
"func Bindables(cmdr command.Commander, driver gxui.Driver, theme *basic.Theme) []bind.Bindable {\n\tvar b []bind.Bindable\n\tb = append(b, project.Bindables(driver, theme)...)\n\tb = append(b,\n\t\tNewFileOpener(driver, theme),\n\t\tQuit{},\n\t\tFullscreen{},\n\t\t&caret.Mover{},\n\t\t&scroll.Scroller{},\n\t\tfocus.NewLocation(driver),\n\t\tFileHook{Theme: theme},\n\t\tEditHook{Theme: theme, Driver: driver},\n\t\tViewHook{},\n\t\tNavHook{Commander: cmdr},\n\t)\n\tb = append(b, history.Bindables(cmdr, driver, theme)...)\n\treturn b\n}",
"func (j *JSONSerializer) Bind(events ...Event) {\n\tfor _, event := range events {\n\t\teventType, t := EventType(event)\n\t\tj.eventTypes[eventType] = t\n\t}\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n C.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n C.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func AssignBuf(dst, src any, buf AccumulativeBuffer) (ok bool) {\n\tfor _, fn := range assignFnRegistry {\n\t\tif ok = fn(dst, src, buf); ok {\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tC.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tC.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func NewBuffer(aSlice interface{}) *Buffer {\n return &Buffer{buffer: sliceValue(aSlice, false), handler: valueHandler{}}\n}",
"func (native *OpenGL) BindBuffer(target uint32, buffer uint32) {\n\tgl.BindBuffer(target, buffer)\n}",
"func (s *Statement) BindAll(values... interface{}) (e error, index int) {\n\treturn s.Bind(0, values...)\n}",
"func (du *DescriptorSet) AddBuffer(dstBinding int, dtype vk.DescriptorType, b *Buffer, offset int) {\n\tvar descriptorBufferInfo = vk.DescriptorBufferInfo{}\n\tdescriptorBufferInfo.Buffer = b.VKBuffer\n\tdescriptorBufferInfo.Offset = vk.DeviceSize(offset)\n\tdescriptorBufferInfo.Range = vk.DeviceSize(b.Size)\n\n\tvar writeDescriptorSet = vk.WriteDescriptorSet{}\n\twriteDescriptorSet.SType = vk.StructureTypeWriteDescriptorSet\n\twriteDescriptorSet.DstBinding = uint32(dstBinding) // write to the first, and only binding.\n\twriteDescriptorSet.DescriptorCount = 1 // update a single descriptor.\n\twriteDescriptorSet.DescriptorType = dtype\n\twriteDescriptorSet.PBufferInfo = []vk.DescriptorBufferInfo{descriptorBufferInfo}\n\n\tif du.VKWriteDiscriptorSet == nil {\n\t\tdu.VKWriteDiscriptorSet = make([]vk.WriteDescriptorSet, 0)\n\t}\n\tdu.VKWriteDiscriptorSet = append(du.VKWriteDiscriptorSet, writeDescriptorSet)\n}",
"func (Operators) Buffer(notifier Observable) OperatorFunc {\n\treturn func(source Observable) Observable {\n\t\top := bufferOperator{notifier}\n\t\treturn source.Lift(op.Call)\n\t}\n}",
"func (sr *Stackers) Bind(r Publisher, cl bool) {\n\tvar lr Connector\n\tvar err error\n\n\tif lr, err = sr.Last(); err != nil {\n\t\tsr.Publisher.Bind(r, cl)\n\t\tsr.ro.Lock()\n\t\t{\n\t\t\tsr.stacks = append(sr.stacks, r)\n\t\t}\n\t\tsr.ro.Unlock()\n\t\treturn\n\t}\n\n\tlr.Bind(r, cl)\n\tsr.ro.Lock()\n\t{\n\t\tsr.stacks = append(sr.stacks, r)\n\t}\n\tsr.ro.Unlock()\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (ab *Buffer) AddMany(ctx context.Context, objs ...interface{}) {\n\tif ab.Tracer != nil {\n\t\tfinisher := ab.Tracer.StartAddMany(ctx)\n\t\tdefer finisher.Finish(nil)\n\t}\n\tvar bufferLength int\n\tif ab.Stats != nil {\n\t\tab.maybeStatCount(ctx, MetricAddMany, 1)\n\t\tab.maybeStatCount(ctx, MetricAddManyItemCount, len(objs))\n\t\tstart := time.Now().UTC()\n\t\tdefer func() {\n\t\t\tab.maybeStatGauge(ctx, MetricBufferLength, float64(bufferLength))\n\t\t\tab.maybeStatElapsed(ctx, MetricAddManyElapsed, start)\n\t\t}()\n\t}\n\n\tvar flushes [][]interface{}\n\tab.contentsMu.Lock()\n\tbufferLength = ab.contents.Len()\n\tfor _, obj := range objs {\n\t\tab.contents.Enqueue(obj)\n\t\tif ab.contents.Len() >= ab.MaxLen {\n\t\t\tflushes = append(flushes, ab.contents.Drain())\n\t\t}\n\t}\n\tab.contentsMu.Unlock()\n\tfor _, flush := range flushes {\n\t\tab.unsafeFlushAsync(ctx, flush)\n\t}\n}",
"func (debugging *debuggingOpenGL) BindBuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindBuffer\", target, buffer)\n\tdebugging.gl.BindBuffer(target, buffer)\n\tdebugging.recordExit(\"BindBuffer\")\n}",
"func (p *InfluxDBProxy) BufferSeries(series *influxdb.Series) {\n\t// add series to buffer map\n\tp.buflock.Lock()\n\tnormalizedIdx, exists := p.bufIdx[series.Name]\n\tif !exists {\n\t\tp.bufIdx[series.Name] = len(p.buf)\n\t\tp.buf = append(p.buf, series)\n\t} else {\n\t\tnormalizedSeries := p.buf[normalizedIdx]\n\t\tmergeSeries(normalizedSeries, series)\n\t}\n\tp.buflock.Unlock()\n}",
"func (c *Client) Bind(proto uint16, ch ReadHandler) {\n\tc.events[proto] = append(c.events[proto], ch)\n}",
"func (b *Buffer) Merge(bs ...Buffer) {\n\tfor _, buf := range bs {\n\t\tfor p, v := range buf.CellMap {\n\t\t\tb.Set(p.X, p.Y, v)\n\t\t}\n\t\tb.SetArea(b.Area.Union(buf.Area))\n\t}\n}",
"func (gl *WebGL) BindBuffer(target GLEnum, buffer WebGLBuffer) {\n\tgl.context.Call(\"bindBuffer\", target, buffer)\n}",
"func (c *HAProxyController) handleBinds() (err error) {\n\tvar errors utils.Errors\n\tfrontends := make(map[string]int64, 2)\n\tprotos := make(map[string]string, 2)\n\tif !c.osArgs.DisableHTTP {\n\t\tfrontends[c.Cfg.FrontHTTP] = c.osArgs.HTTPBindPort\n\t}\n\tif !c.osArgs.DisableHTTPS {\n\t\tfrontends[c.Cfg.FrontHTTPS] = c.osArgs.HTTPSBindPort\n\t}\n\tif !c.osArgs.DisableIPV4 {\n\t\tprotos[\"v4\"] = c.osArgs.IPV4BindAddr\n\t}\n\tif !c.osArgs.DisableIPV6 {\n\t\tprotos[\"v6\"] = c.osArgs.IPV6BindAddr\n\n\t\t// IPv6 not disabled, so add v6 listening to stats frontend\n\t\terrors.Add(c.Client.FrontendBindCreate(\"stats\",\n\t\t\tmodels.Bind{\n\t\t\t\tName: \"v6\",\n\t\t\t\tAddress: \":::1024\",\n\t\t\t\tV4v6: false,\n\t\t\t}))\n\t}\n\tfor ftName, ftPort := range frontends {\n\t\tfor proto, addr := range protos {\n\t\t\tbind := models.Bind{\n\t\t\t\tName: proto,\n\t\t\t\tAddress: addr,\n\t\t\t\tPort: utils.PtrInt64(ftPort),\n\t\t\t}\n\t\t\tif err = c.Client.FrontendBindEdit(ftName, bind); err != nil {\n\t\t\t\terrors.Add(c.Client.FrontendBindCreate(ftName, bind))\n\t\t\t}\n\t\t}\n\t}\n\treturn errors.Result()\n}",
"func (s *Statement) Bind(start_column int, values... interface{}) (e error, index int) {\n\tcolumn := QueryParameter(start_column)\n\tfor i, v := range values {\n\t\tcolumn++\n\t\tif e = column.Bind(s, v); e != nil {\n\t\t\tindex = i\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}",
"func BufferInit(target Enum, size int, usage Enum) {\n\tgl.BufferData(uint32(target), size, nil, uint32(usage))\n}",
"func (b *Buffer) Attach(buffer []byte) {\n b.AttachBytes(buffer, 0, len(buffer))\n}",
"func WithBuffer(n int) JSONSourceOption {\n\treturn func(j *JSONSource) {\n\t\tif n > -1 {\n\t\t\tj.records = make(chan record, n)\n\t\t}\n\t}\n}",
"func addBindings(a, b bindingFrame) bindingFrame {\n\tresult := make(bindingFrame, len(a))\n\n\tfor k, v := range a {\n\t\tresult[k] = v\n\t}\n\n\tfor k, v := range b {\n\t\tresult[k] = v\n\t}\n\n\treturn result\n}",
"func (b *Binding) Set(buf uint32) {\n\tgl.BindBufferBase(gl.SHADER_STORAGE_BUFFER, b.uint32, buf)\n}",
"func BufferPool(pool httputil.BufferPool) optSetter {\n\treturn func(f *Forwarder) error {\n\t\tf.bufferPool = pool\n\t\treturn nil\n\t}\n}",
"func BufferData(target Enum, src []byte, usage Enum) {\n\tgl.BufferData(uint32(target), int(len(src)), gl.Ptr(&src[0]), uint32(usage))\n}",
"func (tb *TransactionBuffer) SetNewTransactionBuffer(txs []TxPublish) {\n\ttb.Clear()\n\tfor _, t := range txs {\n\t\ttb.AddTx(t)\n\t}\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n proto.NewProxyWithBuffer(buffer),\n NewStructSimpleModel(buffer),\n NewStructOptionalModel(buffer),\n NewStructNestedModel(buffer),\n NewStructBytesModel(buffer),\n NewStructArrayModel(buffer),\n NewStructVectorModel(buffer),\n NewStructListModel(buffer),\n NewStructSetModel(buffer),\n NewStructMapModel(buffer),\n NewStructHashModel(buffer),\n NewStructHashExModel(buffer),\n NewStructEmptyModel(buffer),\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyStructSimpleFunc(func(model *StructSimpleModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructOptionalFunc(func(model *StructOptionalModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructNestedFunc(func(model *StructNestedModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructBytesFunc(func(model *StructBytesModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructArrayFunc(func(model *StructArrayModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructVectorFunc(func(model *StructVectorModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructListFunc(func(model *StructListModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructSetFunc(func(model *StructSetModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructMapFunc(func(model *StructMapModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructHashFunc(func(model *StructHashModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructHashExFunc(func(model *StructHashExModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructEmptyFunc(func(model *StructEmptyModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func TexBufferRange(target uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpTexBufferRange, 5, uintptr(target), uintptr(internalformat), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func (c Container) bind(resolver interface{}, singleton bool) error {\n\treflectedResolver := reflect.TypeOf(resolver)\n\tif reflectedResolver.Kind() != reflect.Func {\n\t\treturn errors.New(\"container: the resolver must be a function\")\n\t}\n\n\tfor i := 0; i < reflectedResolver.NumOut(); i++ {\n\t\tif singleton {\n\t\t\tinstance, err := c.invoke(resolver)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tc[reflectedResolver.Out(i)] = binding{resolver: resolver, instance: instance}\n\t\t} else {\n\t\t\tc[reflectedResolver.Out(i)] = binding{resolver: resolver}\n\t\t}\n\t}\n\n\treturn nil\n}",
"func (bw *BufferedWriterMongo) writeBuffer() (err error) {\n\n\tif len(bw.buffer) == 0 {\n\t\treturn nil\n\t}\n\n\tcoll := bw.client.Database(bw.db).Collection(bw.collection)\n\t_, err = coll.InsertMany(bw.ctx, bw.buffer)\n\treturn err\n}",
"func GenBuffers(buffers []Buffer) {\n\tgl.GenBuffers(gl.Sizei(len(buffers)), (*gl.Uint)(&buffers[0]))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tsyscall.Syscall6(gpBindVertexBuffer, 4, uintptr(bindingindex), uintptr(buffer), uintptr(offset), uintptr(stride), 0, 0)\n}",
"func annotateBuffer(text *string) []Annotation {\n\ttextLength := primitives.Cursor(len(*text))\n\treturn []Annotation{\n\t\t{\n\t\t\tTag: BUFFER,\n\t\t\tRegion: primitives.Region{\n\t\t\t\tLeft: primitives.Span{A: 0, B: 0},\n\t\t\t\tRight: primitives.Span{A: textLength, B: textLength},\n\t\t\t},\n\t\t},\n\t}\n}",
"func (b *defaultByteBuffer) AppendBuffer(buf ByteBuffer) (n int, err error) {\n\tsubBuf := buf.(*defaultByteBuffer)\n\tn = subBuf.writeIdx\n\tb.ensureWritable(n)\n\tcopy(b.buff[b.writeIdx:b.writeIdx+n], subBuf.buff)\n\tb.writeIdx += n\n\tbuf.Release(nil)\n\treturn\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n NewOrderModel(buffer),\n NewBalanceModel(buffer),\n NewAccountModel(buffer),\n nil,\n nil,\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyOrderFunc(func(model *OrderModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyBalanceFunc(func(model *BalanceModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyAccountFunc(func(model *AccountModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func (c *HTTPCollector) createBuffer() []*zipkincore.Span {\n\treturn c.batchPool.Get().([]*zipkincore.Span)\n}",
"func (ab *AutoflushBuffer) AddMany(objs ...interface{}) {\n\tab.Lock()\n\tdefer ab.Unlock()\n\n\tfor _, obj := range objs {\n\t\tab.Contents.Enqueue(obj)\n\t\tif ab.Contents.Len() >= ab.MaxLen {\n\t\t\tab.flushUnsafeAsync(ab.Background(), ab.Contents.Drain())\n\t\t}\n\t}\n}",
"func (geom Geometry) Buffer(distance float64, segments int) Geometry {\n\tnewGeom := C.OGR_G_Buffer(geom.cval, C.double(distance), C.int(segments))\n\treturn Geometry{newGeom}\n}",
"func (al *AudioListener) setBuffer(size int) {\n\tal.Lock()\n\tdefer al.Unlock()\n\n\tal.buffer = make([]gumble.AudioPacket, 0, size)\n}",
"func (c webgl) BufferDataX(target Enum, d interface{}, usage Enum) {\n\tc.ctx.Call(\"bufferData\", target, conv(d), usage)\n}",
"func bindBindings(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {\n\tparsed, err := abi.JSON(strings.NewReader(BindingsABI))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil\n}",
"func TextureBufferRange(texture uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpTextureBufferRange, 5, uintptr(texture), uintptr(internalformat), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func (c *Client) QueueBind(\n\texchange, queue, key string,\n\topts *QueueBindOpts,\n\tconnOpts *ConnectOpts) error {\n\n\tdefaultOpts := DefaultQueueBindOpts()\n\n\tif opts != nil {\n\t\tdefaultOpts = opts\n\t}\n\n\tdefaultConnOpts := DefaultConnectOpts()\n\tif connOpts != nil {\n\t\tdefaultConnOpts = connOpts\n\t}\n\n\tconn, err := c.connect(defaultConnOpts)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer conn.Close()\n\n\tch, err := conn.Channel()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer ch.Close()\n\n\terr = ch.QueueBind(\n\t\tqueue,\n\t\tkey,\n\t\texchange,\n\t\tdefaultOpts.NoWait,\n\t\tdefaultOpts.Args,\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func BindAttribLocation(program Uint, index Uint, name string) {\n\tcprogram, _ := (C.GLuint)(program), cgoAllocsUnknown\n\tcindex, _ := (C.GLuint)(index), cgoAllocsUnknown\n\tcname, _ := unpackPCharString(name)\n\tC.glBindAttribLocation(cprogram, cindex, cname)\n}",
"func BindVertexArray(array uint32) {\n C.glowBindVertexArray(gpBindVertexArray, (C.GLuint)(array))\n}",
"func MapBufferRange(target uint32, offset int, length int, access uint32) unsafe.Pointer {\n ret := C.glowMapBufferRange(gpMapBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length), (C.GLbitfield)(access))\n return (unsafe.Pointer)(ret)\n}",
"func TexBufferRange(target uint32, internalformat uint32, buffer uint32, offset int, size int) {\n C.glowTexBufferRange(gpTexBufferRange, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func (self Source) SetBuffer(buffer Buffer) {\n\tself.Seti(AlBuffer, int32(buffer))\n}",
"func (b *endpointBuffers) flushBuffers(\n\tconn net.Conn, pkt *transport.TransportPacket) error {\n\n\tvbs := make([]*c.VbKeyVersions, 0, len(b.vbs))\n\tfor _, vb := range b.vbs {\n\t\tvbs = append(vbs, vb)\n\t}\n\tb.vbs = make(map[string]*c.VbKeyVersions)\n\n\tif err := pkt.Send(conn, vbs); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (b binder) BindFromAll() HTTPError {\n\tif err := b.setFromDefaults(b.reflector.Underlying()); err != nil {\n\t\treturn err\n\t}\n\tif err := b.setFromHeaders(); err != nil {\n\t\treturn err\n\t}\n\tif err := b.setFromJSONBody(); err != nil {\n\t\treturn err\n\t}\n\tif err := b.setFromForm(); err != nil {\n\t\treturn err\n\t}\n\tif err := b.setFromQueryParams(); err != nil {\n\t\treturn err\n\t}\n\tif err := b.setFromPathParams(); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (s *Stmt) Bind(args []driver.Value) error {\n\t// Binds all arguments on the query.\n\tif err := s.si.Bind(args); err != nil {\n\t\treturn err\n\t}\n\t// Parses the statement to manage it as expected by Google Adwords.\n\tstmts, err := parser.NewParser(strings.NewReader(s.si.SrcQuery)).Parse()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif len(stmts) > 1 {\n\t\treturn ErrMultipleQueries\n\t}\n\ts.p = stmts[0]\n\n\treturn nil\n}",
"func (n *NatsSubscriber) Bind(mq *MessageQueue) {\n\tn.mq = mq\n\tn.nc.Opts.DisconnectedCB = func(_ *nats.Conn) {\n\t\tlog.Printf(\"Got disconnected! Queued %d messagse\\n\", n.mq.Len())\n\t}\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n C.glowVertexAttribBinding(gpVertexAttribBinding, (C.GLuint)(attribindex), (C.GLuint)(bindingindex))\n}",
"func NamedFramebufferDrawBuffers(framebuffer uint32, n int32, bufs *uint32) {\n\tsyscall.Syscall(gpNamedFramebufferDrawBuffers, 3, uintptr(framebuffer), uintptr(n), uintptr(unsafe.Pointer(bufs)))\n}",
"func (o StreamOptimizer) batch(ctx context.Context, chq <-chan *tree.SyncChange) <-chan *ChangeBuffer {\n\tcbQ := make(chan *ChangeBuffer, 1)\n\n\tvar nid string\n\tvar change *tree.SyncChange\n\tbuf := newBuffer()\n\n\tgo func() {\n\t\tdefer func() { close(cbQ) }()\n\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn\n\t\t\tcase change = <-chq:\n\t\t\t\t// TODO why do we sometimes receive nil?\n\t\t\t\tif change == nil {\n\t\t\t\t\tif !buf.isEmpty() {\n\t\t\t\t\t\t// transmit the last buffer before returning\n\t\t\t\t\t\tcbQ <- buf\n\t\t\t\t\t}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tif nid != change.NodeId {\n\t\t\t\t\tif buf.isEmpty() {\n\t\t\t\t\t\t// Drop it on the floor: it happens on the very first iteration\n\t\t\t\t\t} else {\n\t\t\t\t\t\tcbQ <- buf\n\t\t\t\t\t}\n\t\t\t\t\tbuf = newBuffer()\n\t\t\t\t\tnid = change.NodeId\n\t\t\t\t}\n\t\t\t\tbuf.Append(change)\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn cbQ\n}"
] | [
"0.64197224",
"0.6338076",
"0.623192",
"0.6152027",
"0.6152027",
"0.60921204",
"0.6068137",
"0.5997602",
"0.59825665",
"0.59725565",
"0.57663065",
"0.57108283",
"0.5709153",
"0.5709153",
"0.57091343",
"0.57068443",
"0.57066387",
"0.57066387",
"0.5680168",
"0.56305957",
"0.5609726",
"0.5431791",
"0.54091674",
"0.53862166",
"0.5384525",
"0.5384525",
"0.5377182",
"0.5377182",
"0.5328919",
"0.53218716",
"0.52927303",
"0.52069116",
"0.5203129",
"0.5195619",
"0.511253",
"0.51096374",
"0.51070315",
"0.5075128",
"0.50632614",
"0.49542016",
"0.49542016",
"0.4939179",
"0.49281496",
"0.49281496",
"0.4917322",
"0.4890124",
"0.48774412",
"0.4871185",
"0.48473608",
"0.4817467",
"0.48088926",
"0.48088926",
"0.47664067",
"0.4746623",
"0.471176",
"0.46983767",
"0.4690697",
"0.46898603",
"0.46655503",
"0.4637861",
"0.4625504",
"0.46104968",
"0.46101722",
"0.46090692",
"0.459445",
"0.45935228",
"0.4575911",
"0.45757183",
"0.45697135",
"0.456481",
"0.4563468",
"0.4562471",
"0.45359403",
"0.4529812",
"0.45279956",
"0.45199907",
"0.45078057",
"0.45054767",
"0.45054767",
"0.44946852",
"0.4485509",
"0.44836175",
"0.44812784",
"0.44798183",
"0.4476375",
"0.4469654",
"0.44688678",
"0.44595543",
"0.44576186",
"0.4451258",
"0.44453442",
"0.4440483",
"0.443981",
"0.44386208",
"0.44326213",
"0.44310188",
"0.44270915",
"0.44218636",
"0.4421018"
] | 0.58832186 | 11 |
bind ranges of one or more buffer objects to a sequence of indexed buffer targets | func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {
C.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tsyscall.Syscall6(gpBindBuffersRange, 6, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(sizes)))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n C.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpBindBufferRange, 5, uintptr(target), uintptr(index), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n C.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func (buffer Buffer) BindBufferRange(target gl.Enum, index uint, offset int, size uint) {\n\tgl.BindBufferRange(gl.Enum(target), gl.Uint(index), gl.Uint(buffer), gl.Intptr(offset), gl.Sizeiptr(size))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tsyscall.Syscall6(gpBindBuffersBase, 4, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), 0, 0)\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n C.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBufferBase, 3, uintptr(target), uintptr(index), uintptr(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func TexBufferRange(target uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpTexBufferRange, 5, uintptr(target), uintptr(internalformat), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func TextureBufferRange(texture uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpTextureBufferRange, 5, uintptr(texture), uintptr(internalformat), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func (s *Stream) bindOps() {\n\ts.log.Print(\"binding operators\")\n\tif s.ops == nil {\n\t\treturn\n\t}\n\tfor i, op := range s.ops {\n\t\tif i == 0 { // link 1st to source\n\t\t\top.SetInput(s.source.GetOutput())\n\t\t} else {\n\t\t\top.SetInput(s.ops[i-1].GetOutput())\n\t\t}\n\t}\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n C.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func TexBufferRange(target uint32, internalformat uint32, buffer uint32, offset int, size int) {\n C.glowTexBufferRange(gpTexBufferRange, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func (buffer Buffer) BindBufferBase(target gl.Enum, index uint) {\n\tgl.BindBufferBase(gl.Enum(target), gl.Uint(index), gl.Uint(buffer))\n}",
"func MapBufferRange(target uint32, offset int, length int, access uint32) unsafe.Pointer {\n ret := C.glowMapBufferRange(gpMapBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length), (C.GLbitfield)(access))\n return (unsafe.Pointer)(ret)\n}",
"func (buffer Buffer) Bind(target gl.Enum) {\n\tgl.BindBuffer(gl.Enum(target), gl.Uint(buffer))\n}",
"func MapBufferRange(target uint32, offset int, length int, access uint32) unsafe.Pointer {\n\tret, _, _ := syscall.Syscall6(gpMapBufferRange, 4, uintptr(target), uintptr(offset), uintptr(length), uintptr(access), 0, 0)\n\treturn (unsafe.Pointer)(ret)\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n C.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBuffer, 2, uintptr(target), uintptr(buffer), 0)\n}",
"func TexBufferRange(target uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tC.glowTexBufferRange(gpTexBufferRange, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func TexBufferRange(target uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tC.glowTexBufferRange(gpTexBufferRange, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tsyscall.Syscall(gpBindSamplers, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(samplers)))\n}",
"func MapNamedBufferRange(buffer uint32, offset int, length int, access uint32) unsafe.Pointer {\n\tret, _, _ := syscall.Syscall6(gpMapNamedBufferRange, 4, uintptr(buffer), uintptr(offset), uintptr(length), uintptr(access), 0, 0)\n\treturn (unsafe.Pointer)(ret)\n}",
"func MapBufferRange(target uint32, offset int, length int, access uint32) unsafe.Pointer {\n\tret := C.glowMapBufferRange(gpMapBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length), (C.GLbitfield)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func MapBufferRange(target uint32, offset int, length int, access uint32) unsafe.Pointer {\n\tret := C.glowMapBufferRange(gpMapBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length), (C.GLbitfield)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func (b *Buffer) Merge(bs ...Buffer) {\n\tfor _, buf := range bs {\n\t\tfor p, v := range buf.CellMap {\n\t\t\tb.Set(p.X, p.Y, v)\n\t\t}\n\t\tb.SetArea(b.Area.Union(buf.Area))\n\t}\n}",
"func BindBuffer(target uint32, buffer uint32) {\n C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBuffer(target Enum, b Buffer) {\n\tgl.BindBuffer(uint32(target), b.Value)\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (b *ChangeBuffer) Range() <-chan *list.Element {\n\tch := make(chan *list.Element)\n\tgo func() {\n\t\tdefer close(ch)\n\t\tfor c := b.Front(); c != nil; c = c.Next() {\n\t\t\tch <- c\n\t\t}\n\t}()\n\treturn ch\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n C.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func TextureBufferRange(texture uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tC.glowTextureBufferRange(gpTextureBufferRange, (C.GLuint)(texture), (C.GLenum)(internalformat), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func TextureBufferRange(texture uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tC.glowTextureBufferRange(gpTextureBufferRange, (C.GLuint)(texture), (C.GLenum)(internalformat), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindImageTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func MapNamedBufferRange(buffer uint32, offset int, length int, access uint32) unsafe.Pointer {\n\tret := C.glowMapNamedBufferRange(gpMapNamedBufferRange, (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(length), (C.GLbitfield)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func MapNamedBufferRange(buffer uint32, offset int, length int, access uint32) unsafe.Pointer {\n\tret := C.glowMapNamedBufferRange(gpMapNamedBufferRange, (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(length), (C.GLbitfield)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func BindBuffer(target Enum, buffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcbuffer, _ := (C.GLuint)(buffer), cgoAllocsUnknown\n\tC.glBindBuffer(ctarget, cbuffer)\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tsyscall.Syscall6(gpBindVertexBuffers, 5, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(strides)), 0)\n}",
"func DrawRangeElements(mode uint32, start uint32, end uint32, count int32, xtype uint32, indices unsafe.Pointer) {\n C.glowDrawRangeElements(gpDrawRangeElements, (C.GLenum)(mode), (C.GLuint)(start), (C.GLuint)(end), (C.GLsizei)(count), (C.GLenum)(xtype), indices)\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n C.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func TransformFeedbackBufferRange(xfb uint32, index uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpTransformFeedbackBufferRange, 5, uintptr(xfb), uintptr(index), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func bind(vm *VM, block block, factory bindFactory) {\n\tfor i := block.first(); i != 0; i = i.next(vm) {\n\t\tptr, ok := i.ptr(vm)\n\t\tif ok {\n\t\t\tobj := value(vm.read(ptr))\n\t\t\tkind := obj.kind()\n\t\t\tvm.bindFunc[kind](vm, ptr, factory)\n\t\t}\n\t}\n}",
"func (g *Gaffer) AddBuffer(u *Update) {\n\n\tfor _, v := range u.entities {\n\t\tg.AddEntity(v)\n\t}\n\n\tfor _, v := range u.edges {\n\t\tg.AddEdge(v)\n\t}\n\n}",
"func (j *JSONSerializer) Bind(events ...Event) {\n\tfor _, event := range events {\n\t\teventType, t := EventType(event)\n\t\tj.eventTypes[eventType] = t\n\t}\n}",
"func (s *Statement) Bind(start_column int, values... interface{}) (e error, index int) {\n\tcolumn := QueryParameter(start_column)\n\tfor i, v := range values {\n\t\tcolumn++\n\t\tif e = column.Bind(s, v); e != nil {\n\t\t\tindex = i\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}",
"func Bindables(cmdr command.Commander, driver gxui.Driver, theme *basic.Theme) []bind.Bindable {\n\tvar b []bind.Bindable\n\tb = append(b, project.Bindables(driver, theme)...)\n\tb = append(b,\n\t\tNewFileOpener(driver, theme),\n\t\tQuit{},\n\t\tFullscreen{},\n\t\t&caret.Mover{},\n\t\t&scroll.Scroller{},\n\t\tfocus.NewLocation(driver),\n\t\tFileHook{Theme: theme},\n\t\tEditHook{Theme: theme, Driver: driver},\n\t\tViewHook{},\n\t\tNavHook{Commander: cmdr},\n\t)\n\tb = append(b, history.Bindables(cmdr, driver, theme)...)\n\treturn b\n}",
"func annotateBuffer(text *string) []Annotation {\n\ttextLength := primitives.Cursor(len(*text))\n\treturn []Annotation{\n\t\t{\n\t\t\tTag: BUFFER,\n\t\t\tRegion: primitives.Region{\n\t\t\t\tLeft: primitives.Span{A: 0, B: 0},\n\t\t\t\tRight: primitives.Span{A: textLength, B: textLength},\n\t\t\t},\n\t\t},\n\t}\n}",
"func AssignBuf(dst, src any, buf AccumulativeBuffer) (ok bool) {\n\tfor _, fn := range assignFnRegistry {\n\t\tif ok = fn(dst, src, buf); ok {\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tC.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tC.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n C.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func FlushMappedBufferRange(target uint32, offset int, length int) {\n C.glowFlushMappedBufferRange(gpFlushMappedBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func (s *Statement) BindAll(values... interface{}) (e error, index int) {\n\treturn s.Bind(0, values...)\n}",
"func Sink(zipkinSpans <-chan proxy.Span) {\n\tfor span := range zipkinSpans {\n\t\tsinkSpan(span)\n\t}\n}",
"func FlushMappedBufferRange(target uint32, offset int, length int) {\n\tsyscall.Syscall(gpFlushMappedBufferRange, 3, uintptr(target), uintptr(offset), uintptr(length))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func modifyRanges(b []byte, ranges [][]int, f modifier) []byte {\n\tidx := 0\n\tresult := make([]byte, 0, len(b)) // Heuristic\n\tfor _, interval := range ranges {\n\t\tlow, high := interval[0], interval[1]\n\t\tresult = append(result, b[idx:low]...)\n\t\tresult = append(result, f(b[low:high])...)\n\t\tidx = high\n\t}\n\treturn append(result, b[idx:]...)\n}",
"func (r *PortAllocator) ForEach(fn func(int)) {\n\tr.alloc.ForEach(func(offset int) {\n\t\tfn(r.portRange.Base + offset)\n\t})\n}",
"func (Operators) Buffer(notifier Observable) OperatorFunc {\n\treturn func(source Observable) Observable {\n\t\top := bufferOperator{notifier}\n\t\treturn source.Lift(op.Call)\n\t}\n}",
"func (geom Geometry) Buffer(distance float64, segments int) Geometry {\n\tnewGeom := C.OGR_G_Buffer(geom.cval, C.double(distance), C.int(segments))\n\treturn Geometry{newGeom}\n}",
"func TransformFeedbackBufferRange(xfb uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowTransformFeedbackBufferRange(gpTransformFeedbackBufferRange, (C.GLuint)(xfb), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func TransformFeedbackBufferRange(xfb uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowTransformFeedbackBufferRange(gpTransformFeedbackBufferRange, (C.GLuint)(xfb), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func (d *diff) Ranges(ctx context.Context, ranges []Range, resBuf []RangeResult) (results []RangeResult, err error) {\n\td.mu.RLock()\n\tdefer d.mu.RUnlock()\n\n\tresults = resBuf[:0]\n\tfor _, r := range ranges {\n\t\tresults = append(results, d.getRange(r))\n\t}\n\treturn\n}",
"func BufferData(target Enum, src []byte, usage Enum) {\n\tgl.BufferData(uint32(target), int(len(src)), gl.Ptr(&src[0]), uint32(usage))\n}",
"func (s *Shard) addReferencesBatch(ctx context.Context,\n\trefs kinds.BatchReferences) map[int]error {\n\tmaxPerTransaction := 30\n\n\tm := &sync.Mutex{}\n\terrs := map[int]error{} // int represents original index\n\n\twg := &sync.WaitGroup{}\n\tfor i := 0; i < len(refs); i += maxPerTransaction {\n\t\tend := i + maxPerTransaction\n\t\tif end > len(refs) {\n\t\t\tend = len(refs)\n\t\t}\n\n\t\tbatch := refs[i:end]\n\t\twg.Add(1)\n\t\tgo func(i int, batch kinds.BatchReferences) {\n\t\t\tdefer wg.Done()\n\t\t\tvar affectedIndices []int\n\t\t\tif err := s.db.Batch(func(tx *bolt.Tx) error {\n\t\t\t\tfor j := range batch {\n\t\t\t\t\t// so we can reference potential errors\n\t\t\t\t\taffectedIndices = append(affectedIndices, i+j)\n\t\t\t\t}\n\n\t\t\t\tfor _, ref := range batch {\n\t\t\t\t\tuuidParsed, err := uuid.Parse(ref.From.TargetID.String())\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn errors.Wrap(err, \"invalid id\")\n\t\t\t\t\t}\n\n\t\t\t\t\tidBytes, err := uuidParsed.MarshalBinary()\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\n\t\t\t\t\tmergeDoc := mergeDocFromBatchReference(ref)\n\t\t\t\t\t_, err = s.mergeObjectInTx(tx, mergeDoc, idBytes)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn nil\n\t\t\t}); err != nil {\n\t\t\t\tm.Lock()\n\t\t\t\terr = errors.Wrap(err, \"bolt batch tx\")\n\t\t\t\tfor _, affected := range affectedIndices {\n\t\t\t\t\terrs[affected] = err\n\t\t\t\t}\n\t\t\t\tm.Unlock()\n\t\t\t}\n\t\t}(i, batch)\n\t}\n\twg.Wait()\n\n\t// adding references can not alter the vector position, so no need to alter\n\t// the vector index\n\n\treturn errs\n}",
"func NewBuffer(aSlice interface{}) *Buffer {\n return &Buffer{buffer: sliceValue(aSlice, false), handler: valueHandler{}}\n}",
"func TestMultiRangeBoundedBatchScan(t *testing.T) {\n\tdefer leaktest.AfterTest(t)()\n\ts, _, _ := serverutils.StartServer(t, base.TestServerArgs{})\n\tdefer s.Stopper().Stop()\n\n\tdb := setupMultipleRanges(t, s, \"a\", \"b\", \"c\", \"d\", \"e\", \"f\")\n\tfor _, key := range []string{\"a1\", \"a2\", \"a3\", \"b1\", \"b2\", \"c1\", \"c2\", \"d1\", \"f1\", \"f2\", \"f3\"} {\n\t\tif err := db.Put(key, \"value\"); err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}\n\n\t// These are the expected results if there is no bound.\n\texpResults := [][]string{\n\t\t{\"a1\", \"a2\", \"a3\", \"b1\", \"b2\"},\n\t\t{\"b1\", \"b2\", \"c1\"},\n\t\t{\"c1\", \"c2\", \"d1\", \"f1\", \"f2\", \"f3\"},\n\t\t{\"f2\"},\n\t}\n\tmaxExpCount := 0\n\tfor _, res := range expResults {\n\t\tmaxExpCount += len(res)\n\t}\n\n\tfor bound := 1; bound <= 20; bound++ {\n\t\tb := &client.Batch{}\n\t\tb.Header.MaxSpanRequestKeys = int64(bound)\n\n\t\tspans := [][]string{{\"a\", \"c\"}, {\"b\", \"c2\"}, {\"c\", \"g\"}, {\"f1a\", \"f2a\"}}\n\t\tfor _, span := range spans {\n\t\t\tb.Scan(span[0], span[1])\n\t\t}\n\t\tif err := db.Run(b); err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\n\t\texpCount := maxExpCount\n\t\tif bound < maxExpCount {\n\t\t\texpCount = bound\n\t\t}\n\t\tcheckScanResults(t, spans, b.Results, expResults, expCount)\n\n\t\t// Re-query using the resume spans that were returned; check that all\n\t\t// spans are read properly.\n\t\tif bound < maxExpCount {\n\t\t\tnewB := &client.Batch{}\n\t\t\tfor _, res := range b.Results {\n\t\t\t\tif res.ResumeSpan.Key != nil {\n\t\t\t\t\tnewB.Scan(res.ResumeSpan.Key, res.ResumeSpan.EndKey)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := db.Run(newB); err != nil {\n\t\t\t\tt.Fatal(err)\n\t\t\t}\n\t\t\t// Add the results to the previous results.\n\t\t\tj := 0\n\t\t\tfor i, res := range b.Results {\n\t\t\t\tif res.ResumeSpan.Key != nil {\n\t\t\t\t\tb.Results[i].Rows = append(b.Results[i].Rows, newB.Results[j].Rows...)\n\t\t\t\t\tb.Results[i].ResumeSpan = newB.Results[j].ResumeSpan\n\t\t\t\t\tj++\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Check that the Scan results contain all the expected results.\n\t\t\tcheckScanResults(t, spans, b.Results, expResults, maxExpCount)\n\t\t}\n\t}\n}",
"func (sr *Stackers) Bind(r Publisher, cl bool) {\n\tvar lr Connector\n\tvar err error\n\n\tif lr, err = sr.Last(); err != nil {\n\t\tsr.Publisher.Bind(r, cl)\n\t\tsr.ro.Lock()\n\t\t{\n\t\t\tsr.stacks = append(sr.stacks, r)\n\t\t}\n\t\tsr.ro.Unlock()\n\t\treturn\n\t}\n\n\tlr.Bind(r, cl)\n\tsr.ro.Lock()\n\t{\n\t\tsr.stacks = append(sr.stacks, r)\n\t}\n\tsr.ro.Unlock()\n}",
"func DrawRangeElementsBaseVertex(mode uint32, start uint32, end uint32, count int32, xtype uint32, indices unsafe.Pointer, basevertex int32) {\n C.glowDrawRangeElementsBaseVertex(gpDrawRangeElementsBaseVertex, (C.GLenum)(mode), (C.GLuint)(start), (C.GLuint)(end), (C.GLsizei)(count), (C.GLenum)(xtype), indices, (C.GLint)(basevertex))\n}",
"func FlushMappedNamedBufferRange(buffer uint32, offset int, length int) {\n\tsyscall.Syscall(gpFlushMappedNamedBufferRange, 3, uintptr(buffer), uintptr(offset), uintptr(length))\n}",
"func BufferPool(pool httputil.BufferPool) optSetter {\n\treturn func(f *Forwarder) error {\n\t\tf.bufferPool = pool\n\t\treturn nil\n\t}\n}",
"func TestMultiRangeBoundedBatchReverseScan(t *testing.T) {\n\tdefer leaktest.AfterTest(t)()\n\ts, _, _ := serverutils.StartServer(t, base.TestServerArgs{})\n\tdefer s.Stopper().Stop()\n\n\tdb := setupMultipleRanges(t, s, \"a\", \"b\", \"c\", \"d\", \"e\", \"f\")\n\tfor _, key := range []string{\"a1\", \"a2\", \"a3\", \"b1\", \"b2\", \"c1\", \"c2\", \"d1\", \"f1\", \"f2\", \"f3\"} {\n\t\tif err := db.Put(key, \"value\"); err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}\n\n\t// These are the expected results if there is no bound\n\texpResults := [][]string{\n\t\t{\"b2\", \"b1\", \"a3\", \"a2\", \"a1\"},\n\t\t{\"c1\", \"b2\", \"b1\"},\n\t\t{\"f3\", \"f2\", \"f1\", \"d1\", \"c2\", \"c1\"},\n\t\t{\"f2\"},\n\t}\n\tmaxExpCount := 0\n\tfor _, res := range expResults {\n\t\tmaxExpCount += len(res)\n\t}\n\n\tfor bound := 1; bound <= 20; bound++ {\n\t\tb := &client.Batch{}\n\t\tb.Header.MaxSpanRequestKeys = int64(bound)\n\n\t\tspans := [][]string{{\"a\", \"c\"}, {\"b\", \"c2\"}, {\"c\", \"g\"}, {\"f1a\", \"f2a\"}}\n\t\tfor _, span := range spans {\n\t\t\tb.ReverseScan(span[0], span[1])\n\t\t}\n\t\tif err := db.Run(b); err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\n\t\texpCount := maxExpCount\n\t\tif bound < maxExpCount {\n\t\t\texpCount = bound\n\t\t}\n\t\tcheckReverseScanResults(t, spans, b.Results, expResults, expCount)\n\n\t\t// Re-query using the resume spans that were returned; check that all\n\t\t// spans are read properly.\n\t\tif bound < maxExpCount {\n\t\t\tnewB := &client.Batch{}\n\t\t\tfor _, res := range b.Results {\n\t\t\t\tif res.ResumeSpan.Key != nil {\n\t\t\t\t\tnewB.ReverseScan(res.ResumeSpan.Key, res.ResumeSpan.EndKey)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := db.Run(newB); err != nil {\n\t\t\t\tt.Fatal(err)\n\t\t\t}\n\t\t\t// Add the results to the previous results.\n\t\t\tj := 0\n\t\t\tfor i, res := range b.Results {\n\t\t\t\tif res.ResumeSpan.Key != nil {\n\t\t\t\t\tb.Results[i].Rows = append(b.Results[i].Rows, newB.Results[j].Rows...)\n\t\t\t\t\tb.Results[i].ResumeSpan = newB.Results[j].ResumeSpan\n\t\t\t\t\tj++\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Check that the ReverseScan results contain all the expected\n\t\t\t// results.\n\t\t\tcheckReverseScanResults(t, spans, b.Results, expResults, maxExpCount)\n\t\t}\n\t}\n}",
"func addBindings(a, b bindingFrame) bindingFrame {\n\tresult := make(bindingFrame, len(a))\n\n\tfor k, v := range a {\n\t\tresult[k] = v\n\t}\n\n\tfor k, v := range b {\n\t\tresult[k] = v\n\t}\n\n\treturn result\n}",
"func DrawRangeElements(mode uint32, start uint32, end uint32, count int32, xtype uint32, indices unsafe.Pointer) {\n\tsyscall.Syscall6(gpDrawRangeElements, 6, uintptr(mode), uintptr(start), uintptr(end), uintptr(count), uintptr(xtype), uintptr(indices))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (p *InfluxDBProxy) BufferSeries(series *influxdb.Series) {\n\t// add series to buffer map\n\tp.buflock.Lock()\n\tnormalizedIdx, exists := p.bufIdx[series.Name]\n\tif !exists {\n\t\tp.bufIdx[series.Name] = len(p.buf)\n\t\tp.buf = append(p.buf, series)\n\t} else {\n\t\tnormalizedSeries := p.buf[normalizedIdx]\n\t\tmergeSeries(normalizedSeries, series)\n\t}\n\tp.buflock.Unlock()\n}",
"func (o StreamOptimizer) batch(ctx context.Context, chq <-chan *tree.SyncChange) <-chan *ChangeBuffer {\n\tcbQ := make(chan *ChangeBuffer, 1)\n\n\tvar nid string\n\tvar change *tree.SyncChange\n\tbuf := newBuffer()\n\n\tgo func() {\n\t\tdefer func() { close(cbQ) }()\n\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn\n\t\t\tcase change = <-chq:\n\t\t\t\t// TODO why do we sometimes receive nil?\n\t\t\t\tif change == nil {\n\t\t\t\t\tif !buf.isEmpty() {\n\t\t\t\t\t\t// transmit the last buffer before returning\n\t\t\t\t\t\tcbQ <- buf\n\t\t\t\t\t}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tif nid != change.NodeId {\n\t\t\t\t\tif buf.isEmpty() {\n\t\t\t\t\t\t// Drop it on the floor: it happens on the very first iteration\n\t\t\t\t\t} else {\n\t\t\t\t\t\tcbQ <- buf\n\t\t\t\t\t}\n\t\t\t\t\tbuf = newBuffer()\n\t\t\t\t\tnid = change.NodeId\n\t\t\t\t}\n\t\t\t\tbuf.Append(change)\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn cbQ\n}",
"func FlushMappedBufferRange(target uint32, offset int, length int) {\n\tC.glowFlushMappedBufferRange(gpFlushMappedBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func FlushMappedBufferRange(target uint32, offset int, length int) {\n\tC.glowFlushMappedBufferRange(gpFlushMappedBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func (o *Mesh) setBryTagMaps(cellBryMap *map[int]BoundaryDataSet, vertBryMap *map[int]VertexSet, cell *Cell, tagList []int, locVerts [][]int) {\n\n\t// loop over each tag attached to a side of the cell\n\tfor localID, tag := range tagList {\n\n\t\t// there is a tag (i.e. it's nonzero)\n\t\tif tag != 0 {\n\n\t\t\t// set edgeTag => cells map\n\t\t\t(*cellBryMap)[tag] = append((*cellBryMap)[tag], &BoundaryData{localID, cell})\n\n\t\t\t// loop over local edges of cell\n\t\t\tfor _, locVid := range locVerts[localID] {\n\n\t\t\t\t// find vertex\n\t\t\t\tvid := cell.V[locVid] // local vertex id => global vertex id (vid)\n\t\t\t\tvert := o.Verts[vid] // pointer to vertex\n\n\t\t\t\t// find whether this edgeTag is present in the map or not\n\t\t\t\tif vertsOnEdge, ok := (*vertBryMap)[tag]; ok {\n\n\t\t\t\t\t// find whether this vertex is in the slice attached to edgeTag or not\n\t\t\t\t\tfound := false\n\t\t\t\t\tfor _, v := range vertsOnEdge {\n\t\t\t\t\t\tif vert.ID == v.ID {\n\t\t\t\t\t\t\tfound = true\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\t// add vertex to (unique) slice attached to edgeTag\n\t\t\t\t\tif !found {\n\t\t\t\t\t\t(*vertBryMap)[tag] = append(vertsOnEdge, vert)\n\t\t\t\t\t}\n\n\t\t\t\t\t// edgeTag is not in the map => create new slice with the first vertex in it\n\t\t\t\t} else {\n\t\t\t\t\t(*vertBryMap)[tag] = []*Vertex{vert}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tsyscall.Syscall6(gpBindFragDataLocationIndexed, 4, uintptr(program), uintptr(colorNumber), uintptr(index), uintptr(unsafe.Pointer(name)), 0, 0)\n}",
"func DrawRangeElements(mode uint32, start uint32, end uint32, count int32, xtype uint32, indices unsafe.Pointer) {\n\tC.glowDrawRangeElements(gpDrawRangeElements, (C.GLenum)(mode), (C.GLuint)(start), (C.GLuint)(end), (C.GLsizei)(count), (C.GLenum)(xtype), indices)\n}",
"func DrawRangeElements(mode uint32, start uint32, end uint32, count int32, xtype uint32, indices unsafe.Pointer) {\n\tC.glowDrawRangeElements(gpDrawRangeElements, (C.GLenum)(mode), (C.GLuint)(start), (C.GLuint)(end), (C.GLsizei)(count), (C.GLenum)(xtype), indices)\n}",
"func (pk PacketBufferPtr) AsSlices() [][]byte {\n\tvar views [][]byte\n\toffset := pk.headerOffset()\n\tpk.buf.SubApply(offset, int(pk.buf.Size())-offset, func(v *buffer.View) {\n\t\tviews = append(views, v.AsSlice())\n\t})\n\treturn views\n}",
"func (r sUintRangeIterator) Range(handlers ...UintHandler) error {\n\treturn UintRange(r.iter, handlers...)\n}",
"func bindBindings(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {\n\tparsed, err := abi.JSON(strings.NewReader(BindingsABI))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil\n}"
] | [
"0.70568985",
"0.7008023",
"0.68640035",
"0.6768027",
"0.64906543",
"0.64906543",
"0.6451645",
"0.5825164",
"0.5819655",
"0.5744454",
"0.5607061",
"0.557628",
"0.54991025",
"0.54833347",
"0.54833347",
"0.5462898",
"0.5458053",
"0.5366444",
"0.5366444",
"0.5327488",
"0.5318707",
"0.5307695",
"0.5284783",
"0.5274743",
"0.5273028",
"0.5251303",
"0.518547",
"0.51661974",
"0.51661974",
"0.5090453",
"0.5079287",
"0.5075611",
"0.5075611",
"0.50527537",
"0.50465626",
"0.50337094",
"0.50139886",
"0.50139886",
"0.5009071",
"0.49855894",
"0.49658814",
"0.49658814",
"0.49560258",
"0.49441117",
"0.49441117",
"0.49320865",
"0.49223006",
"0.49083748",
"0.48738497",
"0.48596355",
"0.48596355",
"0.48564985",
"0.48536322",
"0.48228982",
"0.47454736",
"0.4732889",
"0.47322676",
"0.4717945",
"0.47165298",
"0.47066408",
"0.47066408",
"0.46870878",
"0.46740624",
"0.46690372",
"0.46624273",
"0.46549785",
"0.4643647",
"0.4643647",
"0.46421158",
"0.4598112",
"0.45956796",
"0.4577708",
"0.4577222",
"0.4577222",
"0.4566897",
"0.45668215",
"0.4561504",
"0.4555727",
"0.45507842",
"0.45499712",
"0.45498297",
"0.45343778",
"0.45215106",
"0.451697",
"0.44915786",
"0.44914508",
"0.44884256",
"0.44884256",
"0.44710308",
"0.44533187",
"0.44476703",
"0.44476703",
"0.4442344",
"0.4440704",
"0.4429257",
"0.4429257",
"0.4428379",
"0.4422278",
"0.44218624"
] | 0.6809925 | 4 |
bind a userdefined varying out variable to a fragment shader color number | func BindFragDataLocation(program uint32, color uint32, name *uint8) {
C.glowBindFragDataLocation(gpBindFragDataLocation, (C.GLuint)(program), (C.GLuint)(color), (*C.GLchar)(unsafe.Pointer(name)))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindFragDataLocation(program uint32, color uint32, name *int8) {\n C.glowBindFragDataLocation(gpBindFragDataLocation, (C.GLuint)(program), (C.GLuint)(color), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func (s *Shader) setUniform(name string, value int32) {\n location:=gl.GetUniformLocation(s.idPrograma, gl.Str(name + \"\\x00\"))\n if location != -1 { // Si existe ese nombre de variable\n gl.Uniform1i(location, value)\n }\n}",
"func ProgramUniform2fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform2fv(gpProgramUniform2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *int8) {\n C.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func Uniform2fv(location int32, count int32, value *float32) {\n C.glowUniform2fv(gpUniform2fv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func (u *Uniform) Bind() {\n\tif u.id < 0 {\n\t\tu.id = gl.GetUniformLocation(u.program, glString(u.name))\n\t\tif u.id < 0 {\n\t\t\tpanic(fmt.Errorf(\"glGetUniformLocation for \\\"%s\\\" (program %d) returned -1, GL error: %d\", u.name, u.program, gl.GetError()))\n\t\t}\n\t}\n\tswitch value := u.value.(type) {\n\tcase uint32:\n\t\tgl.Uniform1ui(u.id, value)\n\tcase []uint32:\n\t\tswitch len(value) {\n\t\tcase 1:\n\t\t\tgl.Uniform1uiv(u.id, 1, &value[0])\n\t\tcase 2:\n\t\t\tgl.Uniform2uiv(u.id, 2, &value[0])\n\t\tcase 3:\n\t\t\tgl.Uniform3uiv(u.id, 3, &value[0])\n\t\tcase 4:\n\t\t\tgl.Uniform4uiv(u.id, 4, &value[0])\n\t\tdefault:\n\t\t\tpanic(ErrUniformInvalidType)\n\t\t}\n\tcase int32:\n\t\tgl.Uniform1i(u.id, value)\n\tcase []int32:\n\t\tswitch len(value) {\n\t\tcase 1:\n\t\t\tgl.Uniform1iv(u.id, 1, &value[0])\n\t\tcase 2:\n\t\t\tgl.Uniform2iv(u.id, 2, &value[0])\n\t\tcase 3:\n\t\t\tgl.Uniform3iv(u.id, 3, &value[0])\n\t\tcase 4:\n\t\t\tgl.Uniform4iv(u.id, 4, &value[0])\n\t\tdefault:\n\t\t\tpanic(ErrUniformInvalidType)\n\t\t}\n\tcase float32:\n\t\tgl.Uniform1f(u.id, value)\n\tcase []float32:\n\t\tswitch len(value) {\n\t\tcase 1:\n\t\t\tgl.Uniform1fv(u.id, 1, &value[0])\n\t\tcase 2:\n\t\t\tgl.Uniform2fv(u.id, 2, &value[0])\n\t\tcase 3:\n\t\t\tgl.Uniform3fv(u.id, 3, &value[0])\n\t\tcase 4:\n\t\t\tgl.Uniform4fv(u.id, 4, &value[0])\n\t\tdefault:\n\t\t\tpanic(ErrUniformInvalidType)\n\t\t}\n\tcase float64:\n\t\tgl.Uniform1d(u.id, value)\n\tcase []float64:\n\t\tswitch len(value) {\n\t\tcase 1:\n\t\t\tgl.Uniform1dv(u.id, 1, &value[0])\n\t\tcase 2:\n\t\t\tgl.Uniform2dv(u.id, 2, &value[0])\n\t\tcase 3:\n\t\t\tgl.Uniform3dv(u.id, 3, &value[0])\n\t\tcase 4:\n\t\t\tgl.Uniform4dv(u.id, 4, &value[0])\n\t\tdefault:\n\t\t\tpanic(ErrUniformInvalidType)\n\t\t}\n\tcase mgl32.Mat2:\n\t\tgl.UniformMatrix2fv(u.id, 1, false, &value[0])\n\tcase mgl32.Mat3:\n\t\tgl.UniformMatrix3fv(u.id, 1, false, &value[0])\n\tcase mgl32.Mat4:\n\t\tgl.UniformMatrix4fv(u.id, 1, false, &value[0])\n\tcase *Texture:\n\t\tvalue.Bind(0) //TODO support multiple textures per-shader\n\t\terr := value.SetUniform(u.id)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\tcase color.Color:\n\t\tr, g, b, a := toGLColor(value)\n\t\tgl.Uniform4f(u.id, r, g, b, a)\n\tdefault:\n\t\tpanic(ErrUniformInvalidType)\n\t}\n}",
"func ProgramUniform3fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform3fv(gpProgramUniform3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform4fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform4fv(gpProgramUniform4fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2f(program uint32, location int32, v0 float32, v1 float32) {\n C.glowProgramUniform2f(gpProgramUniform2f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1))\n}",
"func Uniform2f(location int32, v0 float32, v1 float32) {\n C.glowUniform2f(gpUniform2f, (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1))\n}",
"func ProgramUniformMatrix2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix2fv(gpProgramUniformMatrix2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2fv(program uint32, location int32, count int32, value *float32) {\n\tC.glowProgramUniform2fv(gpProgramUniform2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2fv(program uint32, location int32, count int32, value *float32) {\n\tC.glowProgramUniform2fv(gpProgramUniform2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2iv(program uint32, location int32, count int32, value *int32) {\n C.glowProgramUniform2iv(gpProgramUniform2iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform1fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform1fv(gpProgramUniform1fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2uiv(program uint32, location int32, count int32, value *uint32) {\n C.glowProgramUniform2uiv(gpProgramUniform2uiv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tC.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tC.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func Uniform3fv(location int32, count int32, value *float32) {\n C.glowUniform3fv(gpUniform3fv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniformMatrix4x2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix4x2fv(gpProgramUniformMatrix4x2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform2uiv(location int32, count int32, value *uint32) {\n C.glowUniform2uiv(gpUniform2uiv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func Uniform4fv(location int32, count int32, value *float32) {\n C.glowUniform4fv(gpUniform4fv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func (native *OpenGL) BindFragDataLocation(program uint32, color uint32, name string) {\n\tgl.BindFragDataLocation(program, color, gl.Str(name+\"\\x00\"))\n}",
"func GetUniformfv(program uint32, location int32, params *float32) {\n C.glowGetUniformfv(gpGetUniformfv, (C.GLuint)(program), (C.GLint)(location), (*C.GLfloat)(unsafe.Pointer(params)))\n}",
"func PassThrough(token float32) {\n C.glowPassThrough(gpPassThrough, (C.GLfloat)(token))\n}",
"func Uniform1f(location int32, v0 float32) {\n C.glowUniform1f(gpUniform1f, (C.GLint)(location), (C.GLfloat)(v0))\n}",
"func ProgramUniform4f(program uint32, location int32, v0 float32, v1 float32, v2 float32, v3 float32) {\n C.glowProgramUniform4f(gpProgramUniform4f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1), (C.GLfloat)(v2), (C.GLfloat)(v3))\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tsyscall.Syscall6(gpBindFragDataLocationIndexed, 4, uintptr(program), uintptr(colorNumber), uintptr(index), uintptr(unsafe.Pointer(name)), 0, 0)\n}",
"func Uniform2iv(location int32, count int32, value *int32) {\n C.glowUniform2iv(gpUniform2iv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func Uniform1fv(location int32, count int32, value *float32) {\n C.glowUniform1fv(gpUniform1fv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform2ui(location int32, v0 uint32, v1 uint32) {\n C.glowUniform2ui(gpUniform2ui, (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1))\n}",
"func (s *Shader) setUniformMatrix(name string, value *mgl32.Mat4) {\n location:=gl.GetUniformLocation(s.idPrograma, gl.Str(name + \"\\x00\"))\n if location != -1 { // Si existe ese nombre de variable\n\n bb := new([16]float32) // Creamos un buffer de floats\n for i:=0; i<4; i++{\n for j:=0; j<4; j++ {\n bb[j+i*4] = float32(value.At(i,j))\n }\n }\n gl.UniformMatrix4fv(location, 1, false, &bb[0]) // Enviar a shader matriz PROJECTION * SCALE\n }\n}",
"func Uniform2fv(location int32, count int32, value *float32) {\n\tC.glowUniform2fv(gpUniform2fv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform2fv(location int32, count int32, value *float32) {\n\tC.glowUniform2fv(gpUniform2fv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func GetActiveUniformsiv(program uint32, uniformCount int32, uniformIndices *uint32, pname uint32, params *int32) {\n C.glowGetActiveUniformsiv(gpGetActiveUniformsiv, (C.GLuint)(program), (C.GLsizei)(uniformCount), (*C.GLuint)(unsafe.Pointer(uniformIndices)), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func ProgramUniformMatrix4x3fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix4x3fv(gpProgramUniformMatrix4x3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func (native *OpenGL) GLBindFragDataLocation(program uint32, color uint32, name *uint8) {\n\tgl.BindFragDataLocation(program, color, name)\n}",
"func ProgramUniformMatrix3x2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix3x2fv(gpProgramUniformMatrix3x2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform3f(location int32, v0 float32, v1 float32, v2 float32) {\n C.glowUniform3f(gpUniform3f, (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1), (C.GLfloat)(v2))\n}",
"func Uniform4f(location int32, v0 float32, v1 float32, v2 float32, v3 float32) {\n C.glowUniform4f(gpUniform4f, (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1), (C.GLfloat)(v2), (C.GLfloat)(v3))\n}",
"func ShaderProgramFill(r, g, b, a byte) *shaderir.Program {\n\tir, err := graphics.CompileShader([]byte(fmt.Sprintf(`//kage:unit pixels\n\npackage main\n\nfunc Fragment(position vec4, texCoord vec2, color vec4) vec4 {\n\treturn vec4(%0.9f, %0.9f, %0.9f, %0.9f)\n}\n`, float64(r)/0xff, float64(g)/0xff, float64(b)/0xff, float64(a)/0xff)))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn ir\n}",
"func ProgramUniformMatrix2x4fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix2x4fv(gpProgramUniformMatrix2x4fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform3f(program uint32, location int32, v0 float32, v1 float32, v2 float32) {\n C.glowProgramUniform3f(gpProgramUniform3f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1), (C.GLfloat)(v2))\n}",
"func ProgramUniformMatrix2x3fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix2x3fv(gpProgramUniformMatrix2x3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform3uiv(program uint32, location int32, count int32, value *uint32) {\n C.glowProgramUniform3uiv(gpProgramUniform3uiv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func UniformMatrix2fv(location int32, count int32, transpose bool, value *float32) {\n C.glowUniformMatrix2fv(gpUniformMatrix2fv, (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func AttachShader(program uint32, shader uint32) {\n C.glowAttachShader(gpAttachShader, (C.GLuint)(program), (C.GLuint)(shader))\n}",
"func ProgramUniform3iv(program uint32, location int32, count int32, value *int32) {\n C.glowProgramUniform3iv(gpProgramUniform3iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func BindAttribLocation(program uint32, index uint32, name *int8) {\n C.glowBindAttribLocation(gpBindAttribLocation, (C.GLuint)(program), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func ProgramUniform4uiv(program uint32, location int32, count int32, value *uint32) {\n C.glowProgramUniform4uiv(gpProgramUniform4uiv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2fv(program uint32, location int32, count int32, value *float32) {\n\tsyscall.Syscall6(gpProgramUniform2fv, 4, uintptr(program), uintptr(location), uintptr(count), uintptr(unsafe.Pointer(value)), 0, 0)\n}",
"func ProgramUniform2i(program uint32, location int32, v0 int32, v1 int32) {\n C.glowProgramUniform2i(gpProgramUniform2i, (C.GLuint)(program), (C.GLint)(location), (C.GLint)(v0), (C.GLint)(v1))\n}",
"func ProgramUniform2ui(program uint32, location int32, v0 uint32, v1 uint32) {\n C.glowProgramUniform2ui(gpProgramUniform2ui, (C.GLuint)(program), (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1))\n}",
"func ProgramUniform4iv(program uint32, location int32, count int32, value *int32) {\n C.glowProgramUniform4iv(gpProgramUniform4iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func BlendColor(red float32, green float32, blue float32, alpha float32) {\n C.glowBlendColor(gpBlendColor, (C.GLfloat)(red), (C.GLfloat)(green), (C.GLfloat)(blue), (C.GLfloat)(alpha))\n}",
"func Uniform3ui(location int32, v0 uint32, v1 uint32, v2 uint32) {\n C.glowUniform3ui(gpUniform3ui, (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1), (C.GLuint)(v2))\n}",
"func Uniform3uiv(location int32, count int32, value *uint32) {\n C.glowUniform3uiv(gpUniform3uiv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2uiv(program uint32, location int32, count int32, value *uint32) {\n\tC.glowProgramUniform2uiv(gpProgramUniform2uiv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2uiv(program uint32, location int32, count int32, value *uint32) {\n\tC.glowProgramUniform2uiv(gpProgramUniform2uiv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func ProgramUniformMatrix3x4fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix3x4fv(gpProgramUniformMatrix3x4fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform4ui(location int32, v0 uint32, v1 uint32, v2 uint32, v3 uint32) {\n C.glowUniform4ui(gpUniform4ui, (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1), (C.GLuint)(v2), (C.GLuint)(v3))\n}",
"func Uniform2i(location int32, v0 int32, v1 int32) {\n C.glowUniform2i(gpUniform2i, (C.GLint)(location), (C.GLint)(v0), (C.GLint)(v1))\n}",
"func Uniform2uiv(location int32, count int32, value *uint32) {\n\tC.glowUniform2uiv(gpUniform2uiv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func Uniform2uiv(location int32, count int32, value *uint32) {\n\tC.glowUniform2uiv(gpUniform2uiv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func ProgramUniformMatrix3fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix3fv(gpProgramUniformMatrix3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform3iv(location int32, count int32, value *int32) {\n C.glowUniform3iv(gpUniform3iv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform1f(program uint32, location int32, v0 float32) {\n C.glowProgramUniform1f(gpProgramUniform1f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0))\n}",
"func ProgramUniform1uiv(program uint32, location int32, count int32, value *uint32) {\n C.glowProgramUniform1uiv(gpProgramUniform1uiv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func (uni *Uniform3fv) SetColor(idx int, color *math32.Color) {\n\n\tpos := idx * 3\n\tuni.v[pos] = color.R\n\tuni.v[pos+1] = color.G\n\tuni.v[pos+2] = color.B\n}",
"func ProgramUniform2iv(program uint32, location int32, count int32, value *int32) {\n\tC.glowProgramUniform2iv(gpProgramUniform2iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2iv(program uint32, location int32, count int32, value *int32) {\n\tC.glowProgramUniform2iv(gpProgramUniform2iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func Uniform4uiv(location int32, count int32, value *uint32) {\n C.glowUniform4uiv(gpUniform4uiv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func Uniform1i(location int32, v0 int32) {\n C.glowUniform1i(gpUniform1i, (C.GLint)(location), (C.GLint)(v0))\n}",
"func ProgramUniformMatrix4fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix4fv(gpProgramUniformMatrix4fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform1ui(location int32, v0 uint32) {\n C.glowUniform1ui(gpUniform1ui, (C.GLint)(location), (C.GLuint)(v0))\n}",
"func Uniform3i(location int32, v0 int32, v1 int32, v2 int32) {\n C.glowUniform3i(gpUniform3i, (C.GLint)(location), (C.GLint)(v0), (C.GLint)(v1), (C.GLint)(v2))\n}",
"func ProgramUniform3fv(program uint32, location int32, count int32, value *float32) {\n\tC.glowProgramUniform3fv(gpProgramUniform3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform3fv(program uint32, location int32, count int32, value *float32) {\n\tC.glowProgramUniform3fv(gpProgramUniform3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func GetUniformfv(program uint32, location int32, params *float32) {\n\tC.glowGetUniformfv(gpGetUniformfv, (C.GLuint)(program), (C.GLint)(location), (*C.GLfloat)(unsafe.Pointer(params)))\n}",
"func GetUniformfv(program uint32, location int32, params *float32) {\n\tC.glowGetUniformfv(gpGetUniformfv, (C.GLuint)(program), (C.GLint)(location), (*C.GLfloat)(unsafe.Pointer(params)))\n}",
"func GetActiveUniformsiv(program uint32, uniformCount int32, uniformIndices *uint32, pname uint32, params *int32) {\n\tC.glowGetActiveUniformsiv(gpGetActiveUniformsiv, (C.GLuint)(program), (C.GLsizei)(uniformCount), (*C.GLuint)(unsafe.Pointer(uniformIndices)), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func GetActiveUniformsiv(program uint32, uniformCount int32, uniformIndices *uint32, pname uint32, params *int32) {\n\tC.glowGetActiveUniformsiv(gpGetActiveUniformsiv, (C.GLuint)(program), (C.GLsizei)(uniformCount), (*C.GLuint)(unsafe.Pointer(uniformIndices)), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func (uni *Uniform1fv) SetColor(pos int, color *math32.Color) {\n\n\tuni.v[pos] = color.R\n\tuni.v[pos+1] = color.G\n\tuni.v[pos+2] = color.B\n}",
"func ProgramUniformMatrix2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n\tC.glowProgramUniformMatrix2fv(gpProgramUniformMatrix2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniformMatrix2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n\tC.glowProgramUniformMatrix2fv(gpProgramUniformMatrix2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform1iv(program uint32, location int32, count int32, value *int32) {\n C.glowProgramUniform1iv(gpProgramUniform1iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func Uniform4i(location int32, v0 int32, v1 int32, v2 int32, v3 int32) {\n C.glowUniform4i(gpUniform4i, (C.GLint)(location), (C.GLint)(v0), (C.GLint)(v1), (C.GLint)(v2), (C.GLint)(v3))\n}",
"func ProgramUniform3ui(program uint32, location int32, v0 uint32, v1 uint32, v2 uint32) {\n C.glowProgramUniform3ui(gpProgramUniform3ui, (C.GLuint)(program), (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1), (C.GLuint)(v2))\n}",
"func ProgramUniform4ui(program uint32, location int32, v0 uint32, v1 uint32, v2 uint32, v3 uint32) {\n C.glowProgramUniform4ui(gpProgramUniform4ui, (C.GLuint)(program), (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1), (C.GLuint)(v2), (C.GLuint)(v3))\n}",
"func VPBLENDVB(xy, mxy, xy1, xy2 operand.Op) { ctx.VPBLENDVB(xy, mxy, xy1, xy2) }",
"func VBLENDPD(i, mxy, xy, xy1 operand.Op) { ctx.VBLENDPD(i, mxy, xy, xy1) }",
"func Uniform4iv(location int32, count int32, value *int32) {\n C.glowUniform4iv(gpUniform4iv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func Uniform2fv(location int32, count int32, value *float32) {\n\tsyscall.Syscall(gpUniform2fv, 3, uintptr(location), uintptr(count), uintptr(unsafe.Pointer(value)))\n}",
"func ProgramUniform4i(program uint32, location int32, v0 int32, v1 int32, v2 int32, v3 int32) {\n C.glowProgramUniform4i(gpProgramUniform4i, (C.GLuint)(program), (C.GLint)(location), (C.GLint)(v0), (C.GLint)(v1), (C.GLint)(v2), (C.GLint)(v3))\n}",
"func ProgramUniform2uiv(program uint32, location int32, count int32, value *uint32) {\n\tsyscall.Syscall6(gpProgramUniform2uiv, 4, uintptr(program), uintptr(location), uintptr(count), uintptr(unsafe.Pointer(value)), 0, 0)\n}",
"func Uniform2ui(location int32, v0 uint32, v1 uint32) {\n\tC.glowUniform2ui(gpUniform2ui, (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1))\n}",
"func Uniform2ui(location int32, v0 uint32, v1 uint32) {\n\tC.glowUniform2ui(gpUniform2ui, (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1))\n}",
"func (self *TileSprite) SetShaderA(member *AbstractFilter) {\n self.Object.Set(\"shader\", member)\n}",
"func UniformMatrix4x2fv(location int32, count int32, transpose bool, value *float32) {\n C.glowUniformMatrix4x2fv(gpUniformMatrix4x2fv, (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}"
] | [
"0.5981919",
"0.5931926",
"0.5892054",
"0.5875066",
"0.5700229",
"0.55121195",
"0.54862076",
"0.54636985",
"0.5428842",
"0.5414932",
"0.53912",
"0.5385941",
"0.5385941",
"0.53845733",
"0.535564",
"0.53540623",
"0.5346713",
"0.5346713",
"0.5333384",
"0.5319861",
"0.5264814",
"0.5258429",
"0.5230074",
"0.52268445",
"0.5202362",
"0.5195115",
"0.5193753",
"0.51926017",
"0.51859313",
"0.51821554",
"0.51814324",
"0.5180512",
"0.5176397",
"0.5176397",
"0.51709014",
"0.51675314",
"0.5161747",
"0.51454186",
"0.5143153",
"0.51412815",
"0.5139799",
"0.51387393",
"0.5128003",
"0.51205766",
"0.51198715",
"0.5115409",
"0.5111234",
"0.511105",
"0.510581",
"0.50903845",
"0.5082028",
"0.50745517",
"0.50734353",
"0.5068621",
"0.50676113",
"0.5065191",
"0.5053862",
"0.5053725",
"0.5053725",
"0.5050902",
"0.50472933",
"0.50331885",
"0.5016642",
"0.5016642",
"0.50161755",
"0.5014282",
"0.5011886",
"0.49750045",
"0.49737933",
"0.49717554",
"0.49717554",
"0.49641392",
"0.49636847",
"0.4948793",
"0.49340263",
"0.49290526",
"0.49241143",
"0.49241143",
"0.49232805",
"0.49232805",
"0.49208194",
"0.49208194",
"0.4919429",
"0.49155128",
"0.49155128",
"0.49032545",
"0.48940736",
"0.48935792",
"0.48924527",
"0.4879198",
"0.4874797",
"0.4869381",
"0.48661268",
"0.48647702",
"0.48591262",
"0.48538622",
"0.48538622",
"0.48385972",
"0.48357353"
] | 0.5486758 | 7 |
bind a userdefined varying out variable to a fragment shader color number and index | func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {
C.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *int8) {\n C.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindFragDataLocation(program uint32, color uint32, name *int8) {\n C.glowBindFragDataLocation(gpBindFragDataLocation, (C.GLuint)(program), (C.GLuint)(color), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func (s *Shader) setUniform(name string, value int32) {\n location:=gl.GetUniformLocation(s.idPrograma, gl.Str(name + \"\\x00\"))\n if location != -1 { // Si existe ese nombre de variable\n gl.Uniform1i(location, value)\n }\n}",
"func ProgramUniform2fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform2fv(gpProgramUniform2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tsyscall.Syscall6(gpBindFragDataLocationIndexed, 4, uintptr(program), uintptr(colorNumber), uintptr(index), uintptr(unsafe.Pointer(name)), 0, 0)\n}",
"func Uniform2fv(location int32, count int32, value *float32) {\n C.glowUniform2fv(gpUniform2fv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2iv(program uint32, location int32, count int32, value *int32) {\n C.glowProgramUniform2iv(gpProgramUniform2iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func BindFragDataLocation(program uint32, color uint32, name *uint8) {\n\tC.glowBindFragDataLocation(gpBindFragDataLocation, (C.GLuint)(program), (C.GLuint)(color), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindFragDataLocation(program uint32, color uint32, name *uint8) {\n\tC.glowBindFragDataLocation(gpBindFragDataLocation, (C.GLuint)(program), (C.GLuint)(color), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func ProgramUniform3fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform3fv(gpProgramUniform3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniformMatrix2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix2fv(gpProgramUniformMatrix2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func (u *Uniform) Bind() {\n\tif u.id < 0 {\n\t\tu.id = gl.GetUniformLocation(u.program, glString(u.name))\n\t\tif u.id < 0 {\n\t\t\tpanic(fmt.Errorf(\"glGetUniformLocation for \\\"%s\\\" (program %d) returned -1, GL error: %d\", u.name, u.program, gl.GetError()))\n\t\t}\n\t}\n\tswitch value := u.value.(type) {\n\tcase uint32:\n\t\tgl.Uniform1ui(u.id, value)\n\tcase []uint32:\n\t\tswitch len(value) {\n\t\tcase 1:\n\t\t\tgl.Uniform1uiv(u.id, 1, &value[0])\n\t\tcase 2:\n\t\t\tgl.Uniform2uiv(u.id, 2, &value[0])\n\t\tcase 3:\n\t\t\tgl.Uniform3uiv(u.id, 3, &value[0])\n\t\tcase 4:\n\t\t\tgl.Uniform4uiv(u.id, 4, &value[0])\n\t\tdefault:\n\t\t\tpanic(ErrUniformInvalidType)\n\t\t}\n\tcase int32:\n\t\tgl.Uniform1i(u.id, value)\n\tcase []int32:\n\t\tswitch len(value) {\n\t\tcase 1:\n\t\t\tgl.Uniform1iv(u.id, 1, &value[0])\n\t\tcase 2:\n\t\t\tgl.Uniform2iv(u.id, 2, &value[0])\n\t\tcase 3:\n\t\t\tgl.Uniform3iv(u.id, 3, &value[0])\n\t\tcase 4:\n\t\t\tgl.Uniform4iv(u.id, 4, &value[0])\n\t\tdefault:\n\t\t\tpanic(ErrUniformInvalidType)\n\t\t}\n\tcase float32:\n\t\tgl.Uniform1f(u.id, value)\n\tcase []float32:\n\t\tswitch len(value) {\n\t\tcase 1:\n\t\t\tgl.Uniform1fv(u.id, 1, &value[0])\n\t\tcase 2:\n\t\t\tgl.Uniform2fv(u.id, 2, &value[0])\n\t\tcase 3:\n\t\t\tgl.Uniform3fv(u.id, 3, &value[0])\n\t\tcase 4:\n\t\t\tgl.Uniform4fv(u.id, 4, &value[0])\n\t\tdefault:\n\t\t\tpanic(ErrUniformInvalidType)\n\t\t}\n\tcase float64:\n\t\tgl.Uniform1d(u.id, value)\n\tcase []float64:\n\t\tswitch len(value) {\n\t\tcase 1:\n\t\t\tgl.Uniform1dv(u.id, 1, &value[0])\n\t\tcase 2:\n\t\t\tgl.Uniform2dv(u.id, 2, &value[0])\n\t\tcase 3:\n\t\t\tgl.Uniform3dv(u.id, 3, &value[0])\n\t\tcase 4:\n\t\t\tgl.Uniform4dv(u.id, 4, &value[0])\n\t\tdefault:\n\t\t\tpanic(ErrUniformInvalidType)\n\t\t}\n\tcase mgl32.Mat2:\n\t\tgl.UniformMatrix2fv(u.id, 1, false, &value[0])\n\tcase mgl32.Mat3:\n\t\tgl.UniformMatrix3fv(u.id, 1, false, &value[0])\n\tcase mgl32.Mat4:\n\t\tgl.UniformMatrix4fv(u.id, 1, false, &value[0])\n\tcase *Texture:\n\t\tvalue.Bind(0) //TODO support multiple textures per-shader\n\t\terr := value.SetUniform(u.id)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\tcase color.Color:\n\t\tr, g, b, a := toGLColor(value)\n\t\tgl.Uniform4f(u.id, r, g, b, a)\n\tdefault:\n\t\tpanic(ErrUniformInvalidType)\n\t}\n}",
"func ProgramUniform2uiv(program uint32, location int32, count int32, value *uint32) {\n C.glowProgramUniform2uiv(gpProgramUniform2uiv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func BindAttribLocation(program uint32, index uint32, name *int8) {\n C.glowBindAttribLocation(gpBindAttribLocation, (C.GLuint)(program), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func ProgramUniform4fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform4fv(gpProgramUniform4fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2f(program uint32, location int32, v0 float32, v1 float32) {\n C.glowProgramUniform2f(gpProgramUniform2f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1))\n}",
"func ProgramUniform1fv(program uint32, location int32, count int32, value *float32) {\n C.glowProgramUniform1fv(gpProgramUniform1fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func GetActiveUniformsiv(program uint32, uniformCount int32, uniformIndices *uint32, pname uint32, params *int32) {\n C.glowGetActiveUniformsiv(gpGetActiveUniformsiv, (C.GLuint)(program), (C.GLsizei)(uniformCount), (*C.GLuint)(unsafe.Pointer(uniformIndices)), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func ProgramUniform2fv(program uint32, location int32, count int32, value *float32) {\n\tC.glowProgramUniform2fv(gpProgramUniform2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2fv(program uint32, location int32, count int32, value *float32) {\n\tC.glowProgramUniform2fv(gpProgramUniform2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniformMatrix4x2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix4x2fv(gpProgramUniformMatrix4x2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func (native *OpenGL) BindFragDataLocation(program uint32, color uint32, name string) {\n\tgl.BindFragDataLocation(program, color, gl.Str(name+\"\\x00\"))\n}",
"func Uniform2uiv(location int32, count int32, value *uint32) {\n C.glowUniform2uiv(gpUniform2uiv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2i(program uint32, location int32, v0 int32, v1 int32) {\n C.glowProgramUniform2i(gpProgramUniform2i, (C.GLuint)(program), (C.GLint)(location), (C.GLint)(v0), (C.GLint)(v1))\n}",
"func Uniform2iv(location int32, count int32, value *int32) {\n C.glowUniform2iv(gpUniform2iv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func (s *Shader) setUniformMatrix(name string, value *mgl32.Mat4) {\n location:=gl.GetUniformLocation(s.idPrograma, gl.Str(name + \"\\x00\"))\n if location != -1 { // Si existe ese nombre de variable\n\n bb := new([16]float32) // Creamos un buffer de floats\n for i:=0; i<4; i++{\n for j:=0; j<4; j++ {\n bb[j+i*4] = float32(value.At(i,j))\n }\n }\n gl.UniformMatrix4fv(location, 1, false, &bb[0]) // Enviar a shader matriz PROJECTION * SCALE\n }\n}",
"func ProgramUniform3uiv(program uint32, location int32, count int32, value *uint32) {\n C.glowProgramUniform3uiv(gpProgramUniform3uiv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform3iv(program uint32, location int32, count int32, value *int32) {\n C.glowProgramUniform3iv(gpProgramUniform3iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func (native *OpenGL) GLBindFragDataLocation(program uint32, color uint32, name *uint8) {\n\tgl.BindFragDataLocation(program, color, name)\n}",
"func Uniform3fv(location int32, count int32, value *float32) {\n C.glowUniform3fv(gpUniform3fv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniformMatrix4x3fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix4x3fv(gpProgramUniformMatrix4x3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniformMatrix3x2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix3x2fv(gpProgramUniformMatrix3x2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniformMatrix2x3fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix2x3fv(gpProgramUniformMatrix2x3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func GetUniformfv(program uint32, location int32, params *float32) {\n C.glowGetUniformfv(gpGetUniformfv, (C.GLuint)(program), (C.GLint)(location), (*C.GLfloat)(unsafe.Pointer(params)))\n}",
"func ShaderProgramFill(r, g, b, a byte) *shaderir.Program {\n\tir, err := graphics.CompileShader([]byte(fmt.Sprintf(`//kage:unit pixels\n\npackage main\n\nfunc Fragment(position vec4, texCoord vec2, color vec4) vec4 {\n\treturn vec4(%0.9f, %0.9f, %0.9f, %0.9f)\n}\n`, float64(r)/0xff, float64(g)/0xff, float64(b)/0xff, float64(a)/0xff)))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn ir\n}",
"func ProgramUniformMatrix2x4fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix2x4fv(gpProgramUniformMatrix2x4fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform2f(location int32, v0 float32, v1 float32) {\n C.glowUniform2f(gpUniform2f, (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1))\n}",
"func ProgramUniform4uiv(program uint32, location int32, count int32, value *uint32) {\n C.glowProgramUniform4uiv(gpProgramUniform4uiv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform4iv(program uint32, location int32, count int32, value *int32) {\n C.glowProgramUniform4iv(gpProgramUniform4iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2uiv(program uint32, location int32, count int32, value *uint32) {\n\tC.glowProgramUniform2uiv(gpProgramUniform2uiv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2uiv(program uint32, location int32, count int32, value *uint32) {\n\tC.glowProgramUniform2uiv(gpProgramUniform2uiv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform4f(program uint32, location int32, v0 float32, v1 float32, v2 float32, v3 float32) {\n C.glowProgramUniform4f(gpProgramUniform4f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1), (C.GLfloat)(v2), (C.GLfloat)(v3))\n}",
"func UniformMatrix2fv(location int32, count int32, transpose bool, value *float32) {\n C.glowUniformMatrix2fv(gpUniformMatrix2fv, (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2iv(program uint32, location int32, count int32, value *int32) {\n\tC.glowProgramUniform2iv(gpProgramUniform2iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2iv(program uint32, location int32, count int32, value *int32) {\n\tC.glowProgramUniform2iv(gpProgramUniform2iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform2ui(program uint32, location int32, v0 uint32, v1 uint32) {\n C.glowProgramUniform2ui(gpProgramUniform2ui, (C.GLuint)(program), (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1))\n}",
"func ProgramUniform1uiv(program uint32, location int32, count int32, value *uint32) {\n C.glowProgramUniform1uiv(gpProgramUniform1uiv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func Uniform2ui(location int32, v0 uint32, v1 uint32) {\n C.glowUniform2ui(gpUniform2ui, (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1))\n}",
"func ProgramUniform2fv(program uint32, location int32, count int32, value *float32) {\n\tsyscall.Syscall6(gpProgramUniform2fv, 4, uintptr(program), uintptr(location), uintptr(count), uintptr(unsafe.Pointer(value)), 0, 0)\n}",
"func ProgramUniformMatrix3x4fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix3x4fv(gpProgramUniformMatrix3x4fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform4fv(location int32, count int32, value *float32) {\n C.glowUniform4fv(gpUniform4fv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform2i(location int32, v0 int32, v1 int32) {\n C.glowUniform2i(gpUniform2i, (C.GLint)(location), (C.GLint)(v0), (C.GLint)(v1))\n}",
"func Uniform3uiv(location int32, count int32, value *uint32) {\n C.glowUniform3uiv(gpUniform3uiv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform3f(program uint32, location int32, v0 float32, v1 float32, v2 float32) {\n C.glowProgramUniform3f(gpProgramUniform3f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1), (C.GLfloat)(v2))\n}",
"func BindAttribLocation(program uint32, index uint32, name *uint8) {\n\tC.glowBindAttribLocation(gpBindAttribLocation, (C.GLuint)(program), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindAttribLocation(program uint32, index uint32, name *uint8) {\n\tC.glowBindAttribLocation(gpBindAttribLocation, (C.GLuint)(program), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func GetActiveUniformsiv(program uint32, uniformCount int32, uniformIndices *uint32, pname uint32, params *int32) {\n\tC.glowGetActiveUniformsiv(gpGetActiveUniformsiv, (C.GLuint)(program), (C.GLsizei)(uniformCount), (*C.GLuint)(unsafe.Pointer(uniformIndices)), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func GetActiveUniformsiv(program uint32, uniformCount int32, uniformIndices *uint32, pname uint32, params *int32) {\n\tC.glowGetActiveUniformsiv(gpGetActiveUniformsiv, (C.GLuint)(program), (C.GLsizei)(uniformCount), (*C.GLuint)(unsafe.Pointer(uniformIndices)), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func ProgramUniformMatrix3fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix3fv(gpProgramUniformMatrix3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform1fv(location int32, count int32, value *float32) {\n C.glowUniform1fv(gpUniform1fv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform3iv(location int32, count int32, value *int32) {\n C.glowUniform3iv(gpUniform3iv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func ProgramUniform1iv(program uint32, location int32, count int32, value *int32) {\n C.glowProgramUniform1iv(gpProgramUniform1iv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLint)(unsafe.Pointer(value)))\n}",
"func Uniform3ui(location int32, v0 uint32, v1 uint32, v2 uint32) {\n C.glowUniform3ui(gpUniform3ui, (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1), (C.GLuint)(v2))\n}",
"func Uniform2fv(location int32, count int32, value *float32) {\n\tC.glowUniform2fv(gpUniform2fv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform2fv(location int32, count int32, value *float32) {\n\tC.glowUniform2fv(gpUniform2fv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform2uiv(location int32, count int32, value *uint32) {\n\tC.glowUniform2uiv(gpUniform2uiv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func Uniform2uiv(location int32, count int32, value *uint32) {\n\tC.glowUniform2uiv(gpUniform2uiv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func Uniform1i(location int32, v0 int32) {\n C.glowUniform1i(gpUniform1i, (C.GLint)(location), (C.GLint)(v0))\n}",
"func Uniform3i(location int32, v0 int32, v1 int32, v2 int32) {\n C.glowUniform3i(gpUniform3i, (C.GLint)(location), (C.GLint)(v0), (C.GLint)(v1), (C.GLint)(v2))\n}",
"func BindVertexArray(array uint32) {\n C.glowBindVertexArray(gpBindVertexArray, (C.GLuint)(array))\n}",
"func Uniform1f(location int32, v0 float32) {\n C.glowUniform1f(gpUniform1f, (C.GLint)(location), (C.GLfloat)(v0))\n}",
"func ProgramUniform4i(program uint32, location int32, v0 int32, v1 int32, v2 int32, v3 int32) {\n C.glowProgramUniform4i(gpProgramUniform4i, (C.GLuint)(program), (C.GLint)(location), (C.GLint)(v0), (C.GLint)(v1), (C.GLint)(v2), (C.GLint)(v3))\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n C.glowVertexAttribBinding(gpVertexAttribBinding, (C.GLuint)(attribindex), (C.GLuint)(bindingindex))\n}",
"func (uni *Uniform3fv) SetColor(idx int, color *math32.Color) {\n\n\tpos := idx * 3\n\tuni.v[pos] = color.R\n\tuni.v[pos+1] = color.G\n\tuni.v[pos+2] = color.B\n}",
"func ProgramUniform3i(program uint32, location int32, v0 int32, v1 int32, v2 int32) {\n C.glowProgramUniform3i(gpProgramUniform3i, (C.GLuint)(program), (C.GLint)(location), (C.GLint)(v0), (C.GLint)(v1), (C.GLint)(v2))\n}",
"func Uniform4uiv(location int32, count int32, value *uint32) {\n C.glowUniform4uiv(gpUniform4uiv, (C.GLint)(location), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(value)))\n}",
"func ProgramUniformMatrix4fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n C.glowProgramUniformMatrix4fv(gpProgramUniformMatrix4fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func Uniform3f(location int32, v0 float32, v1 float32, v2 float32) {\n C.glowUniform3f(gpUniform3f, (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1), (C.GLfloat)(v2))\n}",
"func ProgramUniform3ui(program uint32, location int32, v0 uint32, v1 uint32, v2 uint32) {\n C.glowProgramUniform3ui(gpProgramUniform3ui, (C.GLuint)(program), (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1), (C.GLuint)(v2))\n}",
"func ProgramUniform1f(program uint32, location int32, v0 float32) {\n C.glowProgramUniform1f(gpProgramUniform1f, (C.GLuint)(program), (C.GLint)(location), (C.GLfloat)(v0))\n}",
"func VBLENDPD(i, mxy, xy, xy1 operand.Op) { ctx.VBLENDPD(i, mxy, xy, xy1) }",
"func ProgramUniform1i(program uint32, location int32, v0 int32) {\n C.glowProgramUniform1i(gpProgramUniform1i, (C.GLuint)(program), (C.GLint)(location), (C.GLint)(v0))\n}",
"func AttachShader(program uint32, shader uint32) {\n C.glowAttachShader(gpAttachShader, (C.GLuint)(program), (C.GLuint)(shader))\n}",
"func Uniform4ui(location int32, v0 uint32, v1 uint32, v2 uint32, v3 uint32) {\n C.glowUniform4ui(gpUniform4ui, (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1), (C.GLuint)(v2), (C.GLuint)(v3))\n}",
"func BindAttribLocation(program Program, index uint32, name string) {\n\tgl.BindAttribLocation(uint32(program), index, gl.Str(name+\"\\x00\"))\n}",
"func ProgramUniform2uiv(program uint32, location int32, count int32, value *uint32) {\n\tsyscall.Syscall6(gpProgramUniform2uiv, 4, uintptr(program), uintptr(location), uintptr(count), uintptr(unsafe.Pointer(value)), 0, 0)\n}",
"func Uniform4f(location int32, v0 float32, v1 float32, v2 float32, v3 float32) {\n C.glowUniform4f(gpUniform4f, (C.GLint)(location), (C.GLfloat)(v0), (C.GLfloat)(v1), (C.GLfloat)(v2), (C.GLfloat)(v3))\n}",
"func ProgramUniformMatrix2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n\tC.glowProgramUniformMatrix2fv(gpProgramUniformMatrix2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniformMatrix2fv(program uint32, location int32, count int32, transpose bool, value *float32) {\n\tC.glowProgramUniformMatrix2fv(gpProgramUniformMatrix2fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (C.GLboolean)(boolToInt(transpose)), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func PassThrough(token float32) {\n C.glowPassThrough(gpPassThrough, (C.GLfloat)(token))\n}",
"func GetShaderiv(shader uint32, pname uint32, params *int32) {\n C.glowGetShaderiv(gpGetShaderiv, (C.GLuint)(shader), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func TexParameterfv(target, pname Enum, params []float32) {\n\tgl.TexParameterfv(uint32(target), uint32(pname), ¶ms[0])\n}",
"func GetUniformiv(program uint32, location int32, params *int32) {\n C.glowGetUniformiv(gpGetUniformiv, (C.GLuint)(program), (C.GLint)(location), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func Uniform4i(location int32, v0 int32, v1 int32, v2 int32, v3 int32) {\n C.glowUniform4i(gpUniform4i, (C.GLint)(location), (C.GLint)(v0), (C.GLint)(v1), (C.GLint)(v2), (C.GLint)(v3))\n}",
"func ProgramUniform4ui(program uint32, location int32, v0 uint32, v1 uint32, v2 uint32, v3 uint32) {\n C.glowProgramUniform4ui(gpProgramUniform4ui, (C.GLuint)(program), (C.GLint)(location), (C.GLuint)(v0), (C.GLuint)(v1), (C.GLuint)(v2), (C.GLuint)(v3))\n}",
"func GetActiveUniformsiv(program uint32, uniformCount int32, uniformIndices *uint32, pname uint32, params *int32) {\n\tsyscall.Syscall6(gpGetActiveUniformsiv, 5, uintptr(program), uintptr(uniformCount), uintptr(unsafe.Pointer(uniformIndices)), uintptr(pname), uintptr(unsafe.Pointer(params)), 0)\n}",
"func ProgramUniform3fv(program uint32, location int32, count int32, value *float32) {\n\tC.glowProgramUniform3fv(gpProgramUniform3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func ProgramUniform3fv(program uint32, location int32, count int32, value *float32) {\n\tC.glowProgramUniform3fv(gpProgramUniform3fv, (C.GLuint)(program), (C.GLint)(location), (C.GLsizei)(count), (*C.GLfloat)(unsafe.Pointer(value)))\n}",
"func VPBLENDVB(xy, mxy, xy1, xy2 operand.Op) { ctx.VPBLENDVB(xy, mxy, xy1, xy2) }"
] | [
"0.6564883",
"0.6111448",
"0.5995493",
"0.59532267",
"0.59309816",
"0.56320804",
"0.56115055",
"0.5607158",
"0.5607158",
"0.5533097",
"0.5522427",
"0.55083036",
"0.5505781",
"0.5493252",
"0.54778075",
"0.5424576",
"0.54136854",
"0.5412841",
"0.54081506",
"0.54081506",
"0.5405872",
"0.53600186",
"0.5339069",
"0.5330449",
"0.53103495",
"0.52853715",
"0.5284444",
"0.52802366",
"0.52707034",
"0.52634996",
"0.52564806",
"0.5256085",
"0.5249842",
"0.52497494",
"0.52441746",
"0.524235",
"0.52398694",
"0.52318555",
"0.5207568",
"0.51715195",
"0.51715195",
"0.5167137",
"0.5162593",
"0.51581424",
"0.51581424",
"0.5158098",
"0.515739",
"0.5151043",
"0.51505184",
"0.5148012",
"0.51476085",
"0.51447725",
"0.5141567",
"0.51351434",
"0.512875",
"0.512875",
"0.5127322",
"0.5127322",
"0.51200867",
"0.51185757",
"0.5097607",
"0.5086248",
"0.50703",
"0.5068876",
"0.5068876",
"0.5061439",
"0.5061439",
"0.5046528",
"0.5045693",
"0.5043295",
"0.50314903",
"0.5029781",
"0.50294703",
"0.50228906",
"0.50192285",
"0.50159776",
"0.50151753",
"0.5012167",
"0.50072074",
"0.5006123",
"0.49935645",
"0.49927232",
"0.49899182",
"0.49883407",
"0.4987328",
"0.49754617",
"0.4975034",
"0.49745277",
"0.49745277",
"0.49724028",
"0.4967859",
"0.4962221",
"0.49580166",
"0.49553287",
"0.49503133",
"0.4944864",
"0.4937621",
"0.4937621",
"0.49300465"
] | 0.60473156 | 3 |
bind a framebuffer to a framebuffer target | func BindFramebuffer(target uint32, framebuffer uint32) {
C.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindFramebuffer(target uint32, framebuffer uint32) {\n C.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n\tsyscall.Syscall(gpBindFramebuffer, 2, uintptr(target), uintptr(framebuffer), 0)\n}",
"func (debugging *debuggingOpenGL) BindFramebuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindFramebuffer\", target, buffer)\n\tdebugging.gl.BindFramebuffer(target, buffer)\n\tdebugging.recordExit(\"BindFramebuffer\")\n}",
"func (native *OpenGL) BindFramebuffer(target, buffer uint32) {\n\tgl.BindFramebuffer(target, buffer)\n}",
"func BindFramebuffer(target Enum, fb Framebuffer) {\n\tgl.BindFramebuffer(uint32(target), fb.Value)\n}",
"func BindFramebuffer(target Enum, framebuffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcframebuffer, _ := (C.GLuint)(framebuffer), cgoAllocsUnknown\n\tC.glBindFramebuffer(ctarget, cframebuffer)\n}",
"func (native *OpenGL) BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tgl.BlitFramebuffer(srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter)\n}",
"func BindBuffer(target uint32, buffer uint32) {\n C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n C.glowBindRenderbuffer(gpBindRenderbuffer, (C.GLenum)(target), (C.GLuint)(renderbuffer))\n}",
"func BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tsyscall.Syscall12(gpBlitFramebuffer, 10, uintptr(srcX0), uintptr(srcY0), uintptr(srcX1), uintptr(srcY1), uintptr(dstX0), uintptr(dstY0), uintptr(dstX1), uintptr(dstY1), uintptr(mask), uintptr(filter), 0, 0)\n}",
"func (w *windowImpl) bindBackBuffer() {\n\t// w.mu.Lock()\n\t// size := w.Sz\n\t// w.mu.Unlock()\n\t//\n\tw.backBufferBound = true\n\t// gl.BindFramebuffer(gl.FRAMEBUFFER, 0)\n\t// gl.Viewport(0, 0, int32(size.X), int32(size.Y))\n}",
"func BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n C.glowBlitFramebuffer(gpBlitFramebuffer, (C.GLint)(srcX0), (C.GLint)(srcY0), (C.GLint)(srcX1), (C.GLint)(srcY1), (C.GLint)(dstX0), (C.GLint)(dstY0), (C.GLint)(dstX1), (C.GLint)(dstY1), (C.GLbitfield)(mask), (C.GLenum)(filter))\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n\tsyscall.Syscall(gpBindRenderbuffer, 2, uintptr(target), uintptr(renderbuffer), 0)\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBuffer, 2, uintptr(target), uintptr(buffer), 0)\n}",
"func BindBuffer(target Enum, b Buffer) {\n\tgl.BindBuffer(uint32(target), b.Value)\n}",
"func (debugging *debuggingOpenGL) BindBuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindBuffer\", target, buffer)\n\tdebugging.gl.BindBuffer(target, buffer)\n\tdebugging.recordExit(\"BindBuffer\")\n}",
"func (buffer Buffer) Bind(target gl.Enum) {\n\tgl.BindBuffer(gl.Enum(target), gl.Uint(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func (gl *WebGL) BindBuffer(target GLEnum, buffer WebGLBuffer) {\n\tgl.context.Call(\"bindBuffer\", target, buffer)\n}",
"func BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tC.glowBlitFramebuffer(gpBlitFramebuffer, (C.GLint)(srcX0), (C.GLint)(srcY0), (C.GLint)(srcX1), (C.GLint)(srcY1), (C.GLint)(dstX0), (C.GLint)(dstY0), (C.GLint)(dstX1), (C.GLint)(dstY1), (C.GLbitfield)(mask), (C.GLenum)(filter))\n}",
"func BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tC.glowBlitFramebuffer(gpBlitFramebuffer, (C.GLint)(srcX0), (C.GLint)(srcY0), (C.GLint)(srcX1), (C.GLint)(srcY1), (C.GLint)(dstX0), (C.GLint)(dstY0), (C.GLint)(dstX1), (C.GLint)(dstY1), (C.GLbitfield)(mask), (C.GLenum)(filter))\n}",
"func (native *OpenGL) BindBuffer(target uint32, buffer uint32) {\n\tgl.BindBuffer(target, buffer)\n}",
"func initFramebuffer(width, height int) {\n\tlog.Printf(\"[Video]: Initializing HW render (%v x %v).\\n\", width, height)\n\n\tgl.GenFramebuffers(1, &fboID)\n\tgl.BindFramebuffer(gl.FRAMEBUFFER, fboID)\n\n\t//gl.GenTextures(1, &video.texID)\n\tgl.BindTexture(gl.TEXTURE_2D, texID)\n\tgl.TexStorage2D(gl.TEXTURE_2D, 1, gl.RGBA8, int32(width), int32(height))\n\n\tgl.FramebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texID, 0)\n\n\thw := state.Global.Core.HWRenderCallback\n\n\tgl.BindRenderbuffer(gl.RENDERBUFFER, 0)\n\n\tif gl.CheckFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE {\n\t\tlog.Fatalln(\"[Video] Framebuffer is not complete.\")\n\t}\n\n\tgl.ClearColor(0, 0, 0, 1)\n\tif hw.Depth && hw.Stencil {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT | gl.STENCIL_BUFFER_BIT)\n\t} else if hw.Depth {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT)\n\t} else {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT)\n\t}\n\n\tgl.BindFramebuffer(gl.FRAMEBUFFER, 0)\n}",
"func BindRenderbuffer(target GLEnum, renderbuffer Renderbuffer) {\n\tgl.BindRenderbuffer(uint32(target), uint32(renderbuffer))\n}",
"func BindRenderbuffer(target Enum, rb Renderbuffer) {\n\tgl.BindRenderbuffer(uint32(target), rb.Value)\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBufferBase, 3, uintptr(target), uintptr(index), uintptr(buffer))\n}",
"func FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n C.glowFramebufferRenderbuffer(gpFramebufferRenderbuffer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLenum)(renderbuffertarget), (C.GLuint)(renderbuffer))\n}",
"func NewFramebuffer(w, h int) *Framebuffer {\n\treturn &Framebuffer{\n\t\tfb: image.NewRGBA(image.Rectangle{image.Point{0, 0}, image.Point{w, h}}),\n\t}\n}",
"func NamedFramebufferDrawBuffer(framebuffer uint32, buf uint32) {\n\tsyscall.Syscall(gpNamedFramebufferDrawBuffer, 2, uintptr(framebuffer), uintptr(buf), 0)\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n C.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n\tC.glowBindRenderbuffer(gpBindRenderbuffer, (C.GLenum)(target), (C.GLuint)(renderbuffer))\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n\tC.glowBindRenderbuffer(gpBindRenderbuffer, (C.GLenum)(target), (C.GLuint)(renderbuffer))\n}",
"func BlitNamedFramebuffer(readFramebuffer uint32, drawFramebuffer uint32, srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tsyscall.Syscall12(gpBlitNamedFramebuffer, 12, uintptr(readFramebuffer), uintptr(drawFramebuffer), uintptr(srcX0), uintptr(srcY0), uintptr(srcX1), uintptr(srcY1), uintptr(dstX0), uintptr(dstY0), uintptr(dstX1), uintptr(dstY1), uintptr(mask), uintptr(filter))\n}",
"func newFramebufferFromTexture(context *context, texture textureNative, width, height int) (*framebuffer, error) {\n\tnative, err := context.newFramebuffer(texture)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &framebuffer{\n\t\tnative: native,\n\t\twidth: width,\n\t\theight: height,\n\t}, nil\n}",
"func newScreenFramebuffer(context *context, width, height int) *framebuffer {\n\treturn &framebuffer{\n\t\tnative: context.getScreenFramebuffer(),\n\t\twidth: width,\n\t\theight: height,\n\t}\n}",
"func (debugging *debuggingOpenGL) BindRenderbuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindRenderbuffer\", target, buffer)\n\tdebugging.gl.BindRenderbuffer(target, buffer)\n\tdebugging.recordExit(\"BindRenderbuffer\")\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n C.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func BlitNamedFramebuffer(readFramebuffer uint32, drawFramebuffer uint32, srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tC.glowBlitNamedFramebuffer(gpBlitNamedFramebuffer, (C.GLuint)(readFramebuffer), (C.GLuint)(drawFramebuffer), (C.GLint)(srcX0), (C.GLint)(srcY0), (C.GLint)(srcX1), (C.GLint)(srcY1), (C.GLint)(dstX0), (C.GLint)(dstY0), (C.GLint)(dstX1), (C.GLint)(dstY1), (C.GLbitfield)(mask), (C.GLenum)(filter))\n}",
"func BlitNamedFramebuffer(readFramebuffer uint32, drawFramebuffer uint32, srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tC.glowBlitNamedFramebuffer(gpBlitNamedFramebuffer, (C.GLuint)(readFramebuffer), (C.GLuint)(drawFramebuffer), (C.GLint)(srcX0), (C.GLint)(srcY0), (C.GLint)(srcX1), (C.GLint)(srcY1), (C.GLint)(dstX0), (C.GLint)(dstY0), (C.GLint)(dstX1), (C.GLint)(dstY1), (C.GLbitfield)(mask), (C.GLenum)(filter))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n C.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func NamedFramebufferDrawBuffer(framebuffer uint32, buf uint32) {\n\tC.glowNamedFramebufferDrawBuffer(gpNamedFramebufferDrawBuffer, (C.GLuint)(framebuffer), (C.GLenum)(buf))\n}",
"func NamedFramebufferDrawBuffer(framebuffer uint32, buf uint32) {\n\tC.glowNamedFramebufferDrawBuffer(gpNamedFramebufferDrawBuffer, (C.GLuint)(framebuffer), (C.GLenum)(buf))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpBindBufferRange, 5, uintptr(target), uintptr(index), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func BindRenderbuffer(target Enum, renderbuffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcrenderbuffer, _ := (C.GLuint)(renderbuffer), cgoAllocsUnknown\n\tC.glBindRenderbuffer(ctarget, crenderbuffer)\n}",
"func (native *OpenGL) BindRenderbuffer(target uint32, renderbuffer uint32) {\n\tgl.BindRenderbuffer(target, renderbuffer)\n}",
"func (f *Framebuffer) Renderbuffer(attachment gfx.FramebufferAttachment, buf gfx.Renderbuffer) {\n\tf.useState()\n\tf.ctx.O.Call(\n\t\t\"framebufferTexture2D\",\n\t\tf.ctx.FRAMEBUFFER,\n\t\tf.ctx.Enums[int(attachment)],\n\t\tf.ctx.RENDERBUFFER,\n\t\tbuf.Object().(*js.Object),\n\t\t0,\n\t)\n}",
"func createFB(tex wasm.WebGLTexture) wasm.WebGLFramebuffer {\n\tfb := gl.CreateFramebuffer()\n\tgl.BindFramebuffer(wasm.FRAMEBUFFER, fb)\n\tgl.FramebufferTexture2D(wasm.FRAMEBUFFER, wasm.COLOR_ATTACHMENT0, wasm.TEXTURE_2D, tex, 0)\n\treturn fb\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tsyscall.Syscall6(gpBindBuffersRange, 6, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(sizes)))\n}",
"func BindTexture(target uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTexture, 2, uintptr(target), uintptr(texture), 0)\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tsyscall.Syscall6(gpBindBuffersBase, 4, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), 0, 0)\n}",
"func FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n\tsyscall.Syscall6(gpFramebufferRenderbuffer, 4, uintptr(target), uintptr(attachment), uintptr(renderbuffertarget), uintptr(renderbuffer), 0, 0)\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func FramebufferTexture(target uint32, attachment uint32, texture uint32, level int32) {\n C.glowFramebufferTexture(gpFramebufferTexture, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level))\n}",
"func BindTexture(target uint32, texture uint32) {\n C.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func GetBoundFramebuffer() Framebuffer {\n\tvar b int32\n\tgl.GetIntegerv(FRAMEBUFFER_BINDING, &b)\n\treturn Framebuffer{Value: uint32(b)}\n}",
"func (buffer Buffer) BindBufferRange(target gl.Enum, index uint, offset int, size uint) {\n\tgl.BindBufferRange(gl.Enum(target), gl.Uint(index), gl.Uint(buffer), gl.Intptr(offset), gl.Sizeiptr(size))\n}",
"func BindBuffer(target Enum, buffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcbuffer, _ := (C.GLuint)(buffer), cgoAllocsUnknown\n\tC.glBindBuffer(ctarget, cbuffer)\n}",
"func IsFramebuffer(framebuffer uint32) bool {\n ret := C.glowIsFramebuffer(gpIsFramebuffer, (C.GLuint)(framebuffer))\n return ret == TRUE\n}",
"func (debugging *debuggingOpenGL) FramebufferTexture(target uint32, attachment uint32, texture uint32, level int32) {\n\tdebugging.recordEntry(\"FramebufferTexture\", target, attachment, texture, level)\n\tdebugging.gl.FramebufferTexture(target, attachment, texture, level)\n\tdebugging.recordExit(\"FramebufferTexture\")\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func (buffer Buffer) BindBufferBase(target gl.Enum, index uint) {\n\tgl.BindBufferBase(gl.Enum(target), gl.Uint(index), gl.Uint(buffer))\n}",
"func (debugging *debuggingOpenGL) FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n\tdebugging.recordEntry(\"FramebufferRenderbuffer\", target, attachment, renderbuffertarget, renderbuffer)\n\tdebugging.gl.FramebufferRenderbuffer(target, attachment, renderbuffertarget, renderbuffer)\n\tdebugging.recordExit(\"FramebufferRenderbuffer\")\n}",
"func (debugging *debuggingOpenGL) BindTexture(target uint32, texture uint32) {\n\tdebugging.recordEntry(\"BindTexture\", target, texture)\n\tdebugging.gl.BindTexture(target, texture)\n\tdebugging.recordExit(\"BindTexture\")\n}",
"func (native *OpenGL) FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n\tgl.FramebufferRenderbuffer(target, attachment, renderbuffertarget, renderbuffer)\n}",
"func InvalidateFramebuffer(target uint32, numAttachments int32, attachments *uint32) {\n C.glowInvalidateFramebuffer(gpInvalidateFramebuffer, (C.GLenum)(target), (C.GLsizei)(numAttachments), (*C.GLenum)(unsafe.Pointer(attachments)))\n}",
"func (gl *WebGL) BindTexture(target GLEnum, texture WebGLTexture) {\n\tgl.context.Call(\"bindTexture\", target, texture)\n}",
"func FramebufferRenderbuffer(target, attachment, rbTarget Enum, rb Renderbuffer) {\n\tgl.FramebufferRenderbuffer(uint32(target), uint32(attachment), uint32(rbTarget), rb.Value)\n}",
"func (b *Binding) Set(buf uint32) {\n\tgl.BindBufferBase(gl.SHADER_STORAGE_BUFFER, b.uint32, buf)\n}",
"func FramebufferTextureLayer(target uint32, attachment uint32, texture uint32, level int32, layer int32) {\n C.glowFramebufferTextureLayer(gpFramebufferTextureLayer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(layer))\n}",
"func FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n\tC.glowFramebufferRenderbuffer(gpFramebufferRenderbuffer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLenum)(renderbuffertarget), (C.GLuint)(renderbuffer))\n}",
"func FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n\tC.glowFramebufferRenderbuffer(gpFramebufferRenderbuffer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLenum)(renderbuffertarget), (C.GLuint)(renderbuffer))\n}",
"func BlitContextFramebufferAMD(dstCtx unsafe.Pointer, srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) unsafe.Pointer {\n\tret, _, _ := syscall.Syscall12(gpBlitContextFramebufferAMD, 11, uintptr(dstCtx), uintptr(srcX0), uintptr(srcY0), uintptr(srcX1), uintptr(srcY1), uintptr(dstX0), uintptr(dstY0), uintptr(dstX1), uintptr(dstY1), uintptr(mask), uintptr(filter), 0)\n\treturn (unsafe.Pointer)(ret)\n}",
"func FrameBuffer(m rv.RenderModel) {\n\tframebuffer(m)\n}",
"func (t *Texture2D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_2D, t.ID)\n}",
"func NewFrameBuffer(width int, height int, c color.Color) FrameBuffer {\n\timg, _ := ebiten.NewImage(width, height, ebiten.FilterDefault)\n\tfb := FrameBuffer{width, height, img}\n\treturn fb\n}",
"func InvalidateFramebuffer(target uint32, numAttachments int32, attachments *uint32) {\n\tsyscall.Syscall(gpInvalidateFramebuffer, 3, uintptr(target), uintptr(numAttachments), uintptr(unsafe.Pointer(attachments)))\n}",
"func BindTexture(target GLEnum, texture Texture) {\n\tgl.BindTexture(uint32(target), uint32(texture))\n}",
"func NamedFramebufferDrawBuffers(framebuffer uint32, n int32, bufs *uint32) {\n\tsyscall.Syscall(gpNamedFramebufferDrawBuffers, 3, uintptr(framebuffer), uintptr(n), uintptr(unsafe.Pointer(bufs)))\n}",
"func NamedFramebufferReadBuffer(framebuffer uint32, src uint32) {\n\tsyscall.Syscall(gpNamedFramebufferReadBuffer, 2, uintptr(framebuffer), uintptr(src), 0)\n}",
"func IsFramebuffer(framebuffer uint32) bool {\n\tret := C.glowIsFramebuffer(gpIsFramebuffer, (C.GLuint)(framebuffer))\n\treturn ret == TRUE\n}",
"func IsFramebuffer(framebuffer uint32) bool {\n\tret := C.glowIsFramebuffer(gpIsFramebuffer, (C.GLuint)(framebuffer))\n\treturn ret == TRUE\n}",
"func (native *OpenGL) BindTexture(target uint32, texture uint32) {\n\tgl.BindTexture(target, texture)\n}",
"func FramebufferTextureLayer(target uint32, attachment uint32, texture uint32, level int32, layer int32) {\n\tsyscall.Syscall6(gpFramebufferTextureLayer, 5, uintptr(target), uintptr(attachment), uintptr(texture), uintptr(level), uintptr(layer), 0)\n}",
"func BindTexture(target Enum, t Texture) {\n\tgl.BindTexture(uint32(target), t.Value)\n}",
"func FramebufferTexture(target uint32, attachment uint32, texture uint32, level int32) {\n\tC.glowFramebufferTexture(gpFramebufferTexture, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level))\n}",
"func FramebufferTexture(target uint32, attachment uint32, texture uint32, level int32) {\n\tC.glowFramebufferTexture(gpFramebufferTexture, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level))\n}",
"func NamedFramebufferDrawBuffers(framebuffer uint32, n int32, bufs *uint32) {\n\tC.glowNamedFramebufferDrawBuffers(gpNamedFramebufferDrawBuffers, (C.GLuint)(framebuffer), (C.GLsizei)(n), (*C.GLenum)(unsafe.Pointer(bufs)))\n}",
"func NamedFramebufferDrawBuffers(framebuffer uint32, n int32, bufs *uint32) {\n\tC.glowNamedFramebufferDrawBuffers(gpNamedFramebufferDrawBuffers, (C.GLuint)(framebuffer), (C.GLsizei)(n), (*C.GLenum)(unsafe.Pointer(bufs)))\n}",
"func FramebufferTexture2D(target uint32, attachment uint32, textarget uint32, texture uint32, level int32) {\n\tsyscall.Syscall6(gpFramebufferTexture2D, 5, uintptr(target), uintptr(attachment), uintptr(textarget), uintptr(texture), uintptr(level), 0)\n}",
"func InvalidateSubFramebuffer(target uint32, numAttachments int32, attachments *uint32, x int32, y int32, width int32, height int32) {\n C.glowInvalidateSubFramebuffer(gpInvalidateSubFramebuffer, (C.GLenum)(target), (C.GLsizei)(numAttachments), (*C.GLenum)(unsafe.Pointer(attachments)), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func IsFramebuffer(framebuffer uint32) bool {\n\tret, _, _ := syscall.Syscall(gpIsFramebuffer, 1, uintptr(framebuffer), 0, 0)\n\treturn ret != 0\n}"
] | [
"0.83441293",
"0.80777943",
"0.78251433",
"0.7756177",
"0.7617672",
"0.7587925",
"0.67025834",
"0.66922104",
"0.6677471",
"0.6654382",
"0.6506856",
"0.64703983",
"0.6437566",
"0.64246845",
"0.6412704",
"0.6367643",
"0.635242",
"0.63509125",
"0.63509125",
"0.6326969",
"0.63087565",
"0.6295861",
"0.6295861",
"0.6282172",
"0.62528044",
"0.62354475",
"0.6166203",
"0.61294055",
"0.61290556",
"0.6117561",
"0.6108479",
"0.609775",
"0.609775",
"0.6094487",
"0.6092373",
"0.6092373",
"0.60917157",
"0.6084253",
"0.59916514",
"0.59902334",
"0.5976297",
"0.5960117",
"0.5960117",
"0.5937684",
"0.5937684",
"0.59212416",
"0.5896087",
"0.5896087",
"0.5876962",
"0.58619386",
"0.5854933",
"0.5853674",
"0.5843373",
"0.58279973",
"0.58065337",
"0.5793833",
"0.5781992",
"0.57760733",
"0.57760733",
"0.57702845",
"0.57658786",
"0.57448626",
"0.57329583",
"0.57280105",
"0.5711334",
"0.5710093",
"0.5679382",
"0.5679382",
"0.5666335",
"0.5648807",
"0.5627482",
"0.56230277",
"0.56143767",
"0.55866987",
"0.5586572",
"0.55828017",
"0.55651397",
"0.55293816",
"0.55293816",
"0.55267364",
"0.55229926",
"0.54974085",
"0.5477855",
"0.5464344",
"0.5461694",
"0.546063",
"0.54503363",
"0.5422933",
"0.5422933",
"0.5413622",
"0.5395694",
"0.5382533",
"0.5376821",
"0.5376821",
"0.53763515",
"0.53763515",
"0.537495",
"0.5356795",
"0.5355656"
] | 0.7746551 | 5 |
bind a level of a texture to an image unit | func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {
C.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n C.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTextureUnit, 2, uintptr(unit), uintptr(texture), 0)\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tsyscall.Syscall9(gpBindImageTexture, 7, uintptr(unit), uintptr(texture), uintptr(level), boolToUintptr(layered), uintptr(layer), uintptr(access), uintptr(format), 0, 0)\n}",
"func BindTexture(target uint32, texture uint32) {\n C.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func BindTexture(target Enum, t Texture) {\n\tgl.BindTexture(uint32(target), t.Value)\n}",
"func BindSampler(unit uint32, sampler uint32) {\n C.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n C.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (t *Texture2D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_2D, t.ID)\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n C.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func (self *TileSprite) SetTextureA(member *Texture) {\n self.Object.Set(\"texture\", member)\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n C.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func TexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage2D(gpTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func BindTexture(target uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTexture, 2, uintptr(target), uintptr(texture), 0)\n}",
"func FramebufferTexture(target uint32, attachment uint32, texture uint32, level int32) {\n C.glowFramebufferTexture(gpFramebufferTexture, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level))\n}",
"func TexImage2D(target uint32, level int32, internalformat int32, width int32, height int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexImage2D(gpTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func (self *TileSprite) SetTexture(texture *Texture) {\n self.Object.Call(\"setTexture\", texture)\n}",
"func FramebufferTextureLayer(target uint32, attachment uint32, texture uint32, level int32, layer int32) {\n C.glowFramebufferTextureLayer(gpFramebufferTextureLayer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(layer))\n}",
"func BindTexture(target uint32, texture uint32) {\n\tC.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func BindTexture(target uint32, texture uint32) {\n\tC.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func (bm Blendmap) Texture() *gl.Texture {\n\treturn bm.Map.id\n}",
"func (debugging *debuggingOpenGL) BindTexture(target uint32, texture uint32) {\n\tdebugging.recordEntry(\"BindTexture\", target, texture)\n\tdebugging.gl.BindTexture(target, texture)\n\tdebugging.recordExit(\"BindTexture\")\n}",
"func TexSubImage1D(target uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage1D(gpTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func (self *TileSprite) SetTextureI(args ...interface{}) {\n self.Object.Call(\"setTexture\", args)\n}",
"func BindTexture(target GLEnum, texture Texture) {\n\tgl.BindTexture(uint32(target), uint32(texture))\n}",
"func BindTexture(target Enum, texture Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tctexture, _ := (C.GLuint)(texture), cgoAllocsUnknown\n\tC.glBindTexture(ctarget, ctexture)\n}",
"func (gl *WebGL) BindTexture(target GLEnum, texture WebGLTexture) {\n\tgl.context.Call(\"bindTexture\", target, texture)\n}",
"func (native *OpenGL) BindTexture(target uint32, texture uint32) {\n\tgl.BindTexture(target, texture)\n}",
"func (adapter *LevelAdapter) LevelTextureID(index int) (id int) {\n\tids := adapter.LevelTextureIDs()\n\tif index < len(ids) {\n\t\tid = ids[index]\n\t} else {\n\t\tid = -1\n\t}\n\n\treturn\n}",
"func TexImage2D(target Enum, level int, width, height int, format Enum, ty Enum, data []byte) {\n\tp := unsafe.Pointer(nil)\n\tif len(data) > 0 {\n\t\tp = gl.Ptr(&data[0])\n\t}\n\tgl.TexImage2D(uint32(target), int32(level), int32(format), int32(width), int32(height), 0, uint32(format), uint32(ty), p)\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tC.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tC.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func (tex Texture) Sub(ctx gl.Context, lvl int, width int, height int, data []byte) {\n\tctx.TexSubImage2D(gl.TEXTURE_2D, lvl, 0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, data)\n\tif lvl > 0 {\n\t\tctx.GenerateMipmap(gl.TEXTURE_2D)\n\t}\n}",
"func (t Texture3D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_3D, t.id)\n}",
"func TexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage3D(gpTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func CopyTexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, x int32, y int32, width int32, height int32) {\n C.glowCopyTexSubImage2D(gpCopyTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func TextureSubImage2D(texture uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall9(gpTextureSubImage2D, 9, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(width), uintptr(height), uintptr(format), uintptr(xtype), uintptr(pixels))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func (self *TileSprite) SetTexture1O(texture *Texture, destroy bool) {\n self.Object.Call(\"setTexture\", texture, destroy)\n}",
"func TextureSubImage2D(texture uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage2D(gpTextureSubImage2D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TextureSubImage2D(texture uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage2D(gpTextureSubImage2D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func CopyTexImage2D(target uint32, level int32, internalformat uint32, x int32, y int32, width int32, height int32, border int32) {\n C.glowCopyTexImage2D(gpCopyTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border))\n}",
"func TexStorage2D(target uint32, levels int32, internalformat uint32, width int32, height int32) {\n C.glowTexStorage2D(gpTexStorage2D, (C.GLenum)(target), (C.GLsizei)(levels), (C.GLenum)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func (self *TileSprite) LoadTexture(key interface{}) {\n self.Object.Call(\"loadTexture\", key)\n}",
"func TexImage2D(target uint32, level int32, internalformat int32, width int32, height int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexImage2D(gpTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexImage2D(target uint32, level int32, internalformat int32, width int32, height int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexImage2D(gpTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindImageTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func ActiveTexture(texture uint32) {\n C.glowActiveTexture(gpActiveTexture, (C.GLenum)(texture))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func GetTextureSubImage(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, bufSize int32, pixels unsafe.Pointer) {\n\tC.glowGetTextureSubImage(gpGetTextureSubImage, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), (C.GLsizei)(bufSize), pixels)\n}",
"func GetTextureSubImage(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, bufSize int32, pixels unsafe.Pointer) {\n\tC.glowGetTextureSubImage(gpGetTextureSubImage, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), (C.GLsizei)(bufSize), pixels)\n}",
"func TextureSubImage1D(texture uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall9(gpTextureSubImage1D, 7, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(width), uintptr(format), uintptr(xtype), uintptr(pixels), 0, 0)\n}",
"func (self *TileSprite) SetTilingTextureA(member *PIXITexture) {\n self.Object.Set(\"tilingTexture\", member)\n}",
"func CopyTexSubImage1D(target uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n C.glowCopyTexSubImage1D(gpCopyTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func TexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexSubImage2D(gpTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexSubImage2D(gpTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexImage1D(target uint32, level int32, internalformat int32, width int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexImage1D(gpTexImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func (self *TileSprite) OnTextureUpdate(event interface{}) {\n self.Object.Call(\"onTextureUpdate\", event)\n}",
"func (self *GameObjectCreator) RenderTexture1O(width int) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", width)}\n}",
"func (c *Context) BindTexture(texture *Texture) {\n\tif texture == nil {\n\t\treturn\n\t}\n\tif c.currentTexture == nil || texture.id != c.currentTexture.id {\n\t\tgl.BindTexture(gl.TEXTURE_2D, texture.id)\n\t\tc.currentTexture = texture\n\t}\n}",
"func TextureSubImage1D(texture uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage1D(gpTextureSubImage1D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TextureSubImage1D(texture uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage1D(gpTextureSubImage1D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func (level *Level) SetTextures(newIds []int) {\n\tblockStore := level.store.Get(res.ResourceID(4000 + level.id*100 + 7))\n\tvar ids [54]uint16\n\ttoCopy := len(ids)\n\n\tif len(newIds) < toCopy {\n\t\ttoCopy = len(newIds)\n\t}\n\tfor index := 0; index < len(ids); index++ {\n\t\tids[index] = uint16(newIds[index])\n\t}\n\n\tbuffer := bytes.NewBuffer(nil)\n\tbinary.Write(buffer, binary.LittleEndian, &ids)\n\tblockStore.SetBlockData(0, buffer.Bytes())\n}",
"func InvalidateTexImage(texture uint32, level int32) {\n C.glowInvalidateTexImage(gpInvalidateTexImage, (C.GLuint)(texture), (C.GLint)(level))\n}",
"func (self *TileSprite) OnTextureUpdateI(args ...interface{}) {\n self.Object.Call(\"onTextureUpdate\", args)\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n\tC.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n\tC.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func TextureSubImage3D(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage3D(gpTextureSubImage3D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TextureSubImage3D(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage3D(gpTextureSubImage3D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TextureSubImage3D(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall12(gpTextureSubImage3D, 11, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(zoffset), uintptr(width), uintptr(height), uintptr(depth), uintptr(format), uintptr(xtype), uintptr(pixels), 0)\n}",
"func CopyTextureSubImage1D(texture uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tC.glowCopyTextureSubImage1D(gpCopyTextureSubImage1D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func CopyTextureSubImage1D(texture uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tC.glowCopyTextureSubImage1D(gpCopyTextureSubImage1D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func CopyTextureSubImage2D(texture uint32, level int32, xoffset int32, yoffset int32, x int32, y int32, width int32, height int32) {\n\tC.glowCopyTextureSubImage2D(gpCopyTextureSubImage2D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func CopyTextureSubImage2D(texture uint32, level int32, xoffset int32, yoffset int32, x int32, y int32, width int32, height int32) {\n\tC.glowCopyTextureSubImage2D(gpCopyTextureSubImage2D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func (self *TileSprite) Texture() *Texture{\n return &Texture{self.Object.Get(\"texture\")}\n}",
"func updateTextureVbo(data []float32, vbo uint32) {\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferSubData(gl.ARRAY_BUFFER, 0, len(data)*4, gl.Ptr(data))\n\tgl.BindBuffer(gl.ARRAY_BUFFER, 0)\n}",
"func (adapter *LevelAdapter) OnLevelTexturesChanged(callback func()) {\n\tadapter.levelTextures.addObserver(callback)\n}",
"func CopyTextureSubImage3D(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, x int32, y int32, width int32, height int32) {\n\tC.glowCopyTextureSubImage3D(gpCopyTextureSubImage3D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func CopyTextureSubImage3D(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, x int32, y int32, width int32, height int32) {\n\tC.glowCopyTextureSubImage3D(gpCopyTextureSubImage3D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func (self *GameObjectCreator) RenderTexture2O(width int, height int) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", width, height)}\n}",
"func (debugging *debuggingOpenGL) TexImage2D(target uint32, level int32, internalFormat uint32, width int32, height int32,\n\tborder int32, format uint32, xtype uint32, pixels interface{}) {\n\tdebugging.recordEntry(\"TexImage2D\", target, level, internalFormat, width, height, border, format, xtype, pixels)\n\tdebugging.gl.TexImage2D(target, level, internalFormat, width, height, border, format, xtype, pixels)\n\tdebugging.recordExit(\"TexImage2D\")\n}",
"func GetTextureSubImage(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, bufSize int32, pixels unsafe.Pointer) {\n\tsyscall.Syscall12(gpGetTextureSubImage, 12, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(zoffset), uintptr(width), uintptr(height), uintptr(depth), uintptr(format), uintptr(xtype), uintptr(bufSize), uintptr(pixels))\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n\tsyscall.Syscall9(gpTextureView, 8, uintptr(texture), uintptr(target), uintptr(origtexture), uintptr(internalformat), uintptr(minlevel), uintptr(numlevels), uintptr(minlayer), uintptr(numlayers), 0)\n}",
"func TexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexSubImage3D(gpTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexSubImage3D(gpTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexImage2D(target GLEnum, level int32, internalformat GLEnum, width, height, border int32, format, xtype GLEnum, pixels []float32) {\n\tgl.TexImage2D(uint32(target), level, int32(internalformat), width, height, border, uint32(format), uint32(xtype), unsafe.Pointer(&pixels[0]))\n}",
"func TexImage2D(target uint32, level int32, internalformat int32, width int32, height int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall9(gpTexImage2D, 9, uintptr(target), uintptr(level), uintptr(internalformat), uintptr(width), uintptr(height), uintptr(border), uintptr(format), uintptr(xtype), uintptr(pixels))\n}",
"func TexImage3D(target uint32, level int32, internalformat int32, width int32, height int32, depth int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexImage3D(gpTexImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func (self *TileSprite) LoadTexture1O(key interface{}, frame interface{}) {\n self.Object.Call(\"loadTexture\", key, frame)\n}",
"func CopyTexImage1D(target uint32, level int32, internalformat uint32, x int32, y int32, width int32, border int32) {\n C.glowCopyTexImage1D(gpCopyTexImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLint)(border))\n}",
"func TexSubImage2D(target Enum, level int, x, y, width, height int, format, ty Enum, data []byte) {\n\tgl.TexSubImage2D(uint32(target), int32(level), int32(x), int32(y), int32(width), int32(height), uint32(format), uint32(ty), gl.Ptr(&data[0]))\n}",
"func CopyTexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, x int32, y int32, width int32, height int32) {\n C.glowCopyTexSubImage3D(gpCopyTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func (self *TileSprite) SetTintedTextureA(member *Canvas) {\n self.Object.Set(\"tintedTexture\", member)\n}",
"func (self *TileSprite) LoadTextureI(args ...interface{}) {\n self.Object.Call(\"loadTexture\", args)\n}",
"func CompressedTexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, imageSize int32, data unsafe.Pointer) {\n C.glowCompressedTexSubImage2D(gpCompressedTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(format), (C.GLsizei)(imageSize), data)\n}"
] | [
"0.77538586",
"0.72047174",
"0.72047174",
"0.6774037",
"0.6741786",
"0.66661984",
"0.6403448",
"0.6268379",
"0.6223522",
"0.6147719",
"0.61377084",
"0.6133744",
"0.6116373",
"0.60852635",
"0.6049272",
"0.60422444",
"0.60406405",
"0.6007053",
"0.6000937",
"0.59634435",
"0.59634435",
"0.59466213",
"0.59409845",
"0.59171224",
"0.59027624",
"0.5891615",
"0.5823998",
"0.5822855",
"0.5802199",
"0.57991695",
"0.57854867",
"0.5778224",
"0.5778224",
"0.5771572",
"0.57640475",
"0.57587844",
"0.5742206",
"0.57089984",
"0.5687862",
"0.5687862",
"0.5681317",
"0.56775814",
"0.5672627",
"0.5672627",
"0.56616724",
"0.5660655",
"0.5638005",
"0.56317383",
"0.56317383",
"0.5631157",
"0.56284684",
"0.5624484",
"0.5624484",
"0.5614699",
"0.5614699",
"0.5612945",
"0.56102616",
"0.5593209",
"0.5593084",
"0.5593084",
"0.5586089",
"0.55748504",
"0.5546489",
"0.554473",
"0.5540993",
"0.5540993",
"0.5540658",
"0.55374783",
"0.5516752",
"0.5488588",
"0.5488588",
"0.5483708",
"0.5483708",
"0.54763156",
"0.5454605",
"0.5454605",
"0.54489857",
"0.54489857",
"0.54455566",
"0.5436168",
"0.54352176",
"0.5434782",
"0.5434782",
"0.54313654",
"0.54304075",
"0.5429589",
"0.5415",
"0.5413983",
"0.5413983",
"0.5396161",
"0.5386226",
"0.5384452",
"0.53817356",
"0.53801143",
"0.537824",
"0.53771234",
"0.53735524",
"0.53556484",
"0.53556377"
] | 0.6746263 | 5 |
bind one or more named texture images to a sequence of consecutive image units | func BindImageTextures(first uint32, count int32, textures *uint32) {
C.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n C.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n C.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindImageTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func LoadTextures(eng sprite.Engine) map[string]sprite.SubTex {\n\tallTexs := make(map[string]sprite.SubTex)\n\tboundedImgs := []string{\"Clubs-2.png\", \"Clubs-3.png\", \"Clubs-4.png\", \"Clubs-5.png\", \"Clubs-6.png\", \"Clubs-7.png\", \"Clubs-8.png\",\n\t\t\"Clubs-9.png\", \"Clubs-10.png\", \"Clubs-Jack.png\", \"Clubs-Queen.png\", \"Clubs-King.png\", \"Clubs-Ace.png\",\n\t\t\"Diamonds-2.png\", \"Diamonds-3.png\", \"Diamonds-4.png\", \"Diamonds-5.png\", \"Diamonds-6.png\", \"Diamonds-7.png\", \"Diamonds-8.png\",\n\t\t\"Diamonds-9.png\", \"Diamonds-10.png\", \"Diamonds-Jack.png\", \"Diamonds-Queen.png\", \"Diamonds-King.png\", \"Diamonds-Ace.png\",\n\t\t\"Spades-2.png\", \"Spades-3.png\", \"Spades-4.png\", \"Spades-5.png\", \"Spades-6.png\", \"Spades-7.png\", \"Spades-8.png\",\n\t\t\"Spades-9.png\", \"Spades-10.png\", \"Spades-Jack.png\", \"Spades-Queen.png\", \"Spades-King.png\", \"Spades-Ace.png\",\n\t\t\"Hearts-2.png\", \"Hearts-3.png\", \"Hearts-4.png\", \"Hearts-5.png\", \"Hearts-6.png\", \"Hearts-7.png\", \"Hearts-8.png\",\n\t\t\"Hearts-9.png\", \"Hearts-10.png\", \"Hearts-Jack.png\", \"Hearts-Queen.png\", \"Hearts-King.png\", \"Hearts-Ace.png\", \"BakuSquare.png\",\n\t}\n\tunboundedImgs := []string{\"Club.png\", \"Diamond.png\", \"Spade.png\", \"Heart.png\", \"gray.jpeg\", \"blue.png\", \"trickDrop.png\",\n\t\t\"trickDropBlue.png\", \"player0.jpeg\", \"player1.jpeg\", \"player2.jpeg\", \"player3.jpeg\", \"laptopIcon.png\", \"watchIcon.png\",\n\t\t\"phoneIcon.png\", \"tabletIcon.png\", \"A-Upper.png\", \"B-Upper.png\", \"C-Upper.png\", \"D-Upper.png\", \"E-Upper.png\", \"F-Upper.png\",\n\t\t\"G-Upper.png\", \"H-Upper.png\", \"I-Upper.png\", \"J-Upper.png\", \"K-Upper.png\", \"L-Upper.png\", \"M-Upper.png\", \"N-Upper.png\",\n\t\t\"O-Upper.png\", \"P-Upper.png\", \"Q-Upper.png\", \"R-Upper.png\", \"S-Upper.png\", \"T-Upper.png\", \"U-Upper.png\", \"V-Upper.png\",\n\t\t\"W-Upper.png\", \"X-Upper.png\", \"Y-Upper.png\", \"Z-Upper.png\", \"A-Lower.png\", \"B-Lower.png\", \"C-Lower.png\", \"D-Lower.png\",\n\t\t\"E-Lower.png\", \"F-Lower.png\", \"G-Lower.png\", \"H-Lower.png\", \"I-Lower.png\", \"J-Lower.png\", \"K-Lower.png\", \"L-Lower.png\",\n\t\t\"M-Lower.png\", \"N-Lower.png\", \"O-Lower.png\", \"P-Lower.png\", \"Q-Lower.png\", \"R-Lower.png\", \"S-Lower.png\", \"T-Lower.png\",\n\t\t\"U-Lower.png\", \"V-Lower.png\", \"W-Lower.png\", \"X-Lower.png\", \"Y-Lower.png\", \"Z-Lower.png\", \"Space.png\", \"Colon.png\", \"Bang.png\",\n\t\t\"Apostrophe.png\", \"1.png\", \"2.png\", \"3.png\", \"4.png\", \"5.png\", \"6.png\", \"7.png\", \"8.png\", \"9.png\", \"0.png\", \"1-Red.png\",\n\t\t\"2-Red.png\", \"3-Red.png\", \"4-Red.png\", \"5-Red.png\", \"6-Red.png\", \"7-Red.png\", \"8-Red.png\", \"9-Red.png\", \"0-Red.png\",\n\t\t\"1-DBlue.png\", \"2-DBlue.png\", \"3-DBlue.png\", \"4-DBlue.png\", \"5-DBlue.png\", \"6-DBlue.png\", \"7-DBlue.png\", \"8-DBlue.png\",\n\t\t\"9-DBlue.png\", \"0-DBlue.png\", \"A-Upper-DBlue.png\", \"B-Upper-DBlue.png\",\n\t\t\"C-Upper-DBlue.png\", \"D-Upper-DBlue.png\", \"E-Upper-DBlue.png\", \"F-Upper-DBlue.png\", \"G-Upper-DBlue.png\", \"H-Upper-DBlue.png\",\n\t\t\"I-Upper-DBlue.png\", \"J-Upper-DBlue.png\", \"K-Upper-DBlue.png\", \"L-Upper-DBlue.png\", \"M-Upper-DBlue.png\", \"N-Upper-DBlue.png\",\n\t\t\"O-Upper-DBlue.png\", \"P-Upper-DBlue.png\", \"Q-Upper-DBlue.png\", \"R-Upper-DBlue.png\", \"S-Upper-DBlue.png\", \"T-Upper-DBlue.png\",\n\t\t\"U-Upper-DBlue.png\", \"V-Upper-DBlue.png\", \"W-Upper-DBlue.png\", \"X-Upper-DBlue.png\", \"Y-Upper-DBlue.png\", \"Z-Upper-DBlue.png\",\n\t\t\"A-Lower-DBlue.png\", \"B-Lower-DBlue.png\", \"C-Lower-DBlue.png\", \"D-Lower-DBlue.png\", \"E-Lower-DBlue.png\", \"F-Lower-DBlue.png\",\n\t\t\"G-Lower-DBlue.png\", \"H-Lower-DBlue.png\", \"I-Lower-DBlue.png\", \"J-Lower-DBlue.png\", \"K-Lower-DBlue.png\", \"L-Lower-DBlue.png\",\n\t\t\"M-Lower-DBlue.png\", \"N-Lower-DBlue.png\", \"O-Lower-DBlue.png\", \"P-Lower-DBlue.png\", \"Q-Lower-DBlue.png\", \"R-Lower-DBlue.png\",\n\t\t\"S-Lower-DBlue.png\", \"T-Lower-DBlue.png\", \"U-Lower-DBlue.png\", \"V-Lower-DBlue.png\", \"W-Lower-DBlue.png\", \"X-Lower-DBlue.png\",\n\t\t\"Y-Lower-DBlue.png\", \"Z-Lower-DBlue.png\", \"Apostrophe-DBlue.png\", \"Space-DBlue.png\", \"A-Upper-LBlue.png\", \"B-Upper-LBlue.png\",\n\t\t\"C-Upper-LBlue.png\", \"D-Upper-LBlue.png\", \"E-Upper-LBlue.png\", \"F-Upper-LBlue.png\", \"G-Upper-LBlue.png\", \"H-Upper-LBlue.png\",\n\t\t\"I-Upper-LBlue.png\", \"J-Upper-LBlue.png\", \"K-Upper-LBlue.png\", \"L-Upper-LBlue.png\", \"M-Upper-LBlue.png\", \"N-Upper-LBlue.png\",\n\t\t\"O-Upper-LBlue.png\", \"P-Upper-LBlue.png\", \"Q-Upper-LBlue.png\", \"R-Upper-LBlue.png\", \"S-Upper-LBlue.png\", \"T-Upper-LBlue.png\",\n\t\t\"U-Upper-LBlue.png\", \"V-Upper-LBlue.png\", \"W-Upper-LBlue.png\", \"X-Upper-LBlue.png\", \"Y-Upper-LBlue.png\", \"Z-Upper-LBlue.png\",\n\t\t\"A-Lower-LBlue.png\", \"B-Lower-LBlue.png\", \"C-Lower-LBlue.png\", \"D-Lower-LBlue.png\", \"E-Lower-LBlue.png\", \"F-Lower-LBlue.png\",\n\t\t\"G-Lower-LBlue.png\", \"H-Lower-LBlue.png\", \"I-Lower-LBlue.png\", \"J-Lower-LBlue.png\", \"K-Lower-LBlue.png\", \"L-Lower-LBlue.png\",\n\t\t\"M-Lower-LBlue.png\", \"N-Lower-LBlue.png\", \"O-Lower-LBlue.png\", \"P-Lower-LBlue.png\", \"Q-Lower-LBlue.png\", \"R-Lower-LBlue.png\",\n\t\t\"S-Lower-LBlue.png\", \"T-Lower-LBlue.png\", \"U-Lower-LBlue.png\", \"V-Lower-LBlue.png\", \"W-Lower-LBlue.png\", \"X-Lower-LBlue.png\",\n\t\t\"Y-Lower-LBlue.png\", \"Z-Lower-LBlue.png\", \"A-Upper-Gray.png\", \"B-Upper-Gray.png\", \"C-Upper-Gray.png\", \"D-Upper-Gray.png\",\n\t\t\"E-Upper-Gray.png\", \"F-Upper-Gray.png\", \"G-Upper-Gray.png\", \"H-Upper-Gray.png\", \"I-Upper-Gray.png\", \"J-Upper-Gray.png\",\n\t\t\"K-Upper-Gray.png\", \"L-Upper-Gray.png\", \"M-Upper-Gray.png\", \"N-Upper-Gray.png\", \"O-Upper-Gray.png\", \"P-Upper-Gray.png\",\n\t\t\"Q-Upper-Gray.png\", \"R-Upper-Gray.png\", \"S-Upper-Gray.png\", \"T-Upper-Gray.png\", \"U-Upper-Gray.png\", \"V-Upper-Gray.png\",\n\t\t\"W-Upper-Gray.png\", \"X-Upper-Gray.png\", \"Y-Upper-Gray.png\", \"Z-Upper-Gray.png\", \"A-Lower-Gray.png\", \"B-Lower-Gray.png\",\n\t\t\"C-Lower-Gray.png\", \"D-Lower-Gray.png\", \"E-Lower-Gray.png\", \"F-Lower-Gray.png\", \"G-Lower-Gray.png\", \"H-Lower-Gray.png\",\n\t\t\"I-Lower-Gray.png\", \"J-Lower-Gray.png\", \"K-Lower-Gray.png\", \"L-Lower-Gray.png\", \"M-Lower-Gray.png\", \"N-Lower-Gray.png\",\n\t\t\"O-Lower-Gray.png\", \"P-Lower-Gray.png\", \"Q-Lower-Gray.png\", \"R-Lower-Gray.png\", \"S-Lower-Gray.png\", \"T-Lower-Gray.png\",\n\t\t\"U-Lower-Gray.png\", \"V-Lower-Gray.png\", \"W-Lower-Gray.png\", \"X-Lower-Gray.png\", \"Y-Lower-Gray.png\", \"Z-Lower-Gray.png\",\n\t\t\"Space-Gray.png\", \"RoundedRectangle-DBlue.png\", \"RoundedRectangle-LBlue.png\", \"RoundedRectangle-Gray.png\", \"Rectangle-LBlue.png\",\n\t\t\"Rectangle-DBlue.png\", \"HorizontalPullTab.png\", \"VerticalPullTab.png\", \"NewGamePressed.png\", \"NewGameUnpressed.png\",\n\t\t\"NewRoundPressed.png\", \"NewRoundUnpressed.png\", \"JoinGamePressed.png\", \"JoinGameUnpressed.png\", \"Period.png\",\n\t\t\"SitSpotPressed.png\", \"SitSpotUnpressed.png\", \"WatchSpotPressed.png\", \"WatchSpotUnpressed.png\", \"StartBlue.png\", \"StartGray.png\",\n\t\t\"StartBluePressed.png\", \"Restart.png\", \"Visibility.png\", \"VisibilityOff.png\", \"QuitPressed.png\", \"QuitUnpressed.png\",\n\t\t\"PassPressed.png\", \"PassUnpressed.png\", \"RightArrowBlue.png\", \"LeftArrowBlue.png\", \"AcrossArrowBlue.png\", \"RightArrowGray.png\",\n\t\t\"LeftArrowGray.png\", \"AcrossArrowGray.png\", \"TakeTrickTableUnpressed.png\", \"TakeTrickTablePressed.png\", \"TakeTrickHandPressed.png\",\n\t\t\"TakeTrickHandUnpressed.png\", \"android.png\", \"cat.png\", \"man.png\", \"woman.png\", \"TakeUnpressed.png\", \"TakePressed.png\",\n\t\t\"UnplayedBorder1.png\", \"UnplayedBorder2.png\", \"RejoinPressed.png\", \"RejoinUnpressed.png\",\n\t}\n\tfor _, f := range boundedImgs {\n\t\ta, err := asset.Open(f)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\timg, _, err := image.Decode(a)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tt, err := eng.LoadTexture(img)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\timgWidth, imgHeight := t.Bounds()\n\t\tallTexs[f] = sprite.SubTex{t, image.Rect(0, 0, imgWidth, imgHeight)}\n\t\ta.Close()\n\t}\n\tfor _, f := range unboundedImgs {\n\t\ta, err := asset.Open(f)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\timg, _, err := image.Decode(a)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tt, err := eng.LoadTexture(img)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\timgWidth, imgHeight := t.Bounds()\n\t\tallTexs[f] = sprite.SubTex{t, image.Rect(1, 1, imgWidth-1, imgHeight-1)}\n\t\ta.Close()\n\t}\n\treturn allTexs\n}",
"func loadTextures() {\n\tfor i := 0; i < 7; i++ {\n\n\t\ttextures[i], _, _ = ebutil.NewImageFromFile(\"assets/image/\"+colors[i]+\".png\", eb.FilterDefault)\n\t}\n\ttextures[7], _, _ = ebutil.NewImageFromFile(\"assets/image/tetris_backgraund.png\", eb.FilterDefault)\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n C.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func (level *Level) SetTextures(newIds []int) {\n\tblockStore := level.store.Get(res.ResourceID(4000 + level.id*100 + 7))\n\tvar ids [54]uint16\n\ttoCopy := len(ids)\n\n\tif len(newIds) < toCopy {\n\t\ttoCopy = len(newIds)\n\t}\n\tfor index := 0; index < len(ids); index++ {\n\t\tids[index] = uint16(newIds[index])\n\t}\n\n\tbuffer := bytes.NewBuffer(nil)\n\tbinary.Write(buffer, binary.LittleEndian, &ids)\n\tblockStore.SetBlockData(0, buffer.Bytes())\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tsyscall.Syscall9(gpBindImageTexture, 7, uintptr(unit), uintptr(texture), uintptr(level), boolToUintptr(layered), uintptr(layer), uintptr(access), uintptr(format), 0, 0)\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTextureUnit, 2, uintptr(unit), uintptr(texture), 0)\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tC.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tC.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func SetupImages(i Images) {\n\timages = i\n}",
"func TextureSubImage1D(texture uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall9(gpTextureSubImage1D, 7, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(width), uintptr(format), uintptr(xtype), uintptr(pixels), 0, 0)\n}",
"func (am *Manager) LoadTextures(names ...string) ([]*Texture, error) {\n\tvar (\n\t\ttextures = make([]*Texture, len(names))\n\t\terr error\n\t)\n\n\tfor i, name := range names {\n\t\ttextures[i], err = am.LoadTexture(name)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn textures, nil\n}",
"func getTextureIds(texInfos []TexInfo) map[string]int {\n\ttextureIds := make(map[string]int)\n\tnextId := 0\n\tfor i := 0; i < len(texInfos); i++ {\n\t\ttexInfo := texInfos[i]\n\n\t\t// convert filename byte array to string\n\t\tfilename := \"\"\n\t\tfor j := 0; j < len(texInfo.TextureName); j++ {\n\t\t\t// end of string\n\t\t\tif texInfo.TextureName[j] == 0 {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tfilename += string(texInfo.TextureName[j])\n\t\t}\n\n\t\t// generate new id for texture if necessary\n\t\t_, exists := textureIds[filename]\n\t\tif !exists {\n\t\t\ttextureIds[filename] = nextId\n\t\t\tnextId++\n\t\t}\n\t}\n\treturn textureIds\n}",
"func Prepare(paths []string) []*bimg.Image {\n\tvar imgs []*bimg.Image\n\tfor _, path := range paths {\n\t\timgs = append(imgs, NewImage(path))\n\t}\n\treturn imgs\n}",
"func (self *TileSprite) SetTextureI(args ...interface{}) {\n self.Object.Call(\"setTexture\", args)\n}",
"func BindTexture(target uint32, texture uint32) {\n C.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func BindTexture(target Enum, t Texture) {\n\tgl.BindTexture(uint32(target), t.Value)\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n C.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func loadAllReferenceTextures(compMesh *component.Mesh) {\n\tfor _, texFile := range compMesh.Material.Textures {\n\t\tdoLoadTexture(texFile)\n\t}\n\tif len(compMesh.Material.DiffuseTexture) > 0 {\n\t\tdoLoadTexture(compMesh.Material.DiffuseTexture)\n\t}\n\tif len(compMesh.Material.NormalsTexture) > 0 {\n\t\tdoLoadTexture(compMesh.Material.NormalsTexture)\n\t}\n\tif len(compMesh.Material.SpecularTexture) > 0 {\n\t\tdoLoadTexture(compMesh.Material.SpecularTexture)\n\t}\n}",
"func (t Texture3D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_3D, t.id)\n}",
"func (t *Texture2D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_2D, t.ID)\n}",
"func TagImages(sources []string, target func(string) string) error {\n\n\tfor _, source := range sources {\n\t\terr := tagImage(source, target)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}",
"func BindTexture(target uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTexture, 2, uintptr(target), uintptr(texture), 0)\n}",
"func BindTexture(target Enum, texture Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tctexture, _ := (C.GLuint)(texture), cgoAllocsUnknown\n\tC.glBindTexture(ctarget, ctexture)\n}",
"func InjectImages(values map[string]interface{}, v imagevector.ImageVector, names []string, opts ...imagevector.FindOptionFunc) (map[string]interface{}, error) {\n\timages, err := imagevector.FindImages(v, names, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvalues = utils.ShallowCopyMapStringInterface(values)\n\tvalues[\"images\"] = imagevector.ImageMapToValues(images)\n\treturn values, nil\n}",
"func TextureSubImage1D(texture uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage1D(gpTextureSubImage1D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TextureSubImage1D(texture uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage1D(gpTextureSubImage1D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func BindSampler(unit uint32, sampler uint32) {\n C.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func ShaderProgramImages(numImages int) *shaderir.Program {\n\tif numImages <= 0 {\n\t\tpanic(\"testing: numImages must be >= 1\")\n\t}\n\n\tvar exprs []string\n\tfor i := 0; i < numImages; i++ {\n\t\texprs = append(exprs, fmt.Sprintf(\"imageSrc%dUnsafeAt(texCoord)\", i))\n\t}\n\n\tir, err := graphics.CompileShader([]byte(fmt.Sprintf(`//kage:unit pixels\n\npackage main\n\nfunc Fragment(position vec4, texCoord vec2, color vec4) vec4 {\n\treturn %s\n}\n`, strings.Join(exprs, \" + \"))))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn ir\n}",
"func EGLImageTargetTextureStorageEXT(texture uint32, image unsafe.Pointer, attrib_list *int32) {\n\tsyscall.Syscall(gpEGLImageTargetTextureStorageEXT, 3, uintptr(texture), uintptr(image), uintptr(unsafe.Pointer(attrib_list)))\n}",
"func getStringImgs(input, color string, texs map[string]sprite.SubTex) []sprite.SubTex {\n\timgs := make([]sprite.SubTex, 0)\n\tfor _, char := range input {\n\t\tkey := \"\"\n\t\tif char == 32 {\n\t\t\tkey += \"Space\"\n\t\t} else if char == 33 {\n\t\t\tkey += \"Bang\"\n\t\t} else if char == 39 {\n\t\t\tkey += \"Apostrophe\"\n\t\t} else if char == 46 {\n\t\t\tkey += \"Period\"\n\t\t} else if char == 58 {\n\t\t\tkey += \"Colon\"\n\t\t} else if char >= 48 && char <= 57 {\n\t\t\t// if char is a number\n\t\t\tkey += string(char)\n\t\t} else {\n\t\t\t// if char is a letter\n\t\t\tkey += strings.ToUpper(string(char))\n\t\t\tif char > 90 {\n\t\t\t\tkey += \"-Lower\"\n\t\t\t} else {\n\t\t\t\tkey += \"-Upper\"\n\t\t\t}\n\t\t}\n\t\tif color != \"\" {\n\t\t\tkey += \"-\" + color\n\t\t}\n\t\tkey += \".png\"\n\t\timg := texs[key]\n\t\timgs = append(imgs, img)\n\t}\n\treturn imgs\n}",
"func (script Script) RenderGlyphImages() {\n\tfor _, g := range script.Glyphs {\n\t\tg.Render()\n\t}\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tC.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tC.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func EGLImageTargetTextureStorageEXT(texture uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTextureStorageEXT(gpEGLImageTargetTextureStorageEXT, (C.GLuint)(texture), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func EGLImageTargetTextureStorageEXT(texture uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTextureStorageEXT(gpEGLImageTargetTextureStorageEXT, (C.GLuint)(texture), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func TextureSubImage2D(texture uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall9(gpTextureSubImage2D, 9, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(width), uintptr(height), uintptr(format), uintptr(xtype), uintptr(pixels))\n}",
"func TexSubImage1D(target uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage1D(gpTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n C.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func (self *TileSprite) LoadTextureI(args ...interface{}) {\n self.Object.Call(\"loadTexture\", args)\n}",
"func (self *Graphics) GenerateTextureI(args ...interface{}) *Texture{\n return &Texture{self.Object.Call(\"generateTexture\", args)}\n}",
"func PrioritizeTextures(n int32, textures *uint32, priorities *float32) {\n C.glowPrioritizeTextures(gpPrioritizeTextures, (C.GLsizei)(n), (*C.GLuint)(unsafe.Pointer(textures)), (*C.GLfloat)(unsafe.Pointer(priorities)))\n}",
"func Run(ctx context.Context, params *Params) error {\n\tif ctx == nil {\n\t\treturn errors.New(\"Context must not be nil\")\n\t}\n\tif params == nil {\n\t\treturn errors.New(\"Params must not be nil\")\n\t}\n\tif !params.Format.IsValid() {\n\t\treturn errors.New(\"Invalid 'Format' parameter\")\n\t}\n\n\tctx, cancelCtx := context.WithCancel(ctx)\n\tdefer cancelCtx()\n\n\t// Validate the parameters\n\tif err := params.validateRequiredParameters(); err != nil {\n\t\treturn err\n\t}\n\tparams.applySensibleDefaults()\n\n\t// Read the images from the input directory\n\tsprites, err := readAssetStream(ctx, params.Input, params.Padding)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// TODO allow sorting algorithm to be specified\n\tsort.Sort(packing.ByArea(sprites))\n\n\ttotalNumberOfSprites := len(sprites)\n\ttotalNumberOfAtlases := 0\n\tcompletedSprites := make([]packing.Block, 0, totalNumberOfSprites)\n\tincompleteSprites := make([]packing.Block, 0, totalNumberOfSprites)\n\twg := &sync.WaitGroup{}\n\terrc := make(chan error)\n\tfor {\n\t\t// Return error if maxAtlases param exceeded\n\t\tif params.MaxAtlases > 0 && totalNumberOfAtlases == params.MaxAtlases {\n\t\t\treturn fmt.Errorf(\"Maximum number of atlases (%d) exceeded\", params.MaxAtlases)\n\t\t}\n\n\t\t// Arrange the images into the atlas space\n\t\tcompletedSprites = completedSprites[:0]\n\t\tincompleteSprites = incompleteSprites[:0]\n\t\tpacker := packing.NewBinPacker(params.Width, params.Height)\n\t\tfor _, sprite := range sprites {\n\t\t\tswitch packer.Pack(sprite) {\n\t\t\tcase packing.ErrInputTooLarge:\n\t\t\t\treturn packing.ErrInputTooLarge\n\t\t\tcase packing.ErrOutOfRoom:\n\t\t\t\tincompleteSprites = append(incompleteSprites, sprite)\n\t\t\tdefault:\n\t\t\t\tcompletedSprites = append(completedSprites, sprite)\n\t\t\t}\n\t\t}\n\n\t\ttotalNumberOfAtlases++\n\t\tatlasName := fmt.Sprintf(\"%s-%d\", params.Name, totalNumberOfAtlases)\n\t\tatlas := &atlas{\n\t\t\tName: atlasName,\n\t\t\tSprites: completedSprites,\n\t\t\tDescFilename: fmt.Sprintf(\"%s.%s\", atlasName, params.Format.Ext),\n\t\t\t// TODO add image type parameter\n\t\t\tImageFilename: fmt.Sprintf(\"%s.%s\", atlasName, \"png\"),\n\t\t\tWidth: params.Width,\n\t\t\tHeight: params.Height,\n\t\t}\n\t\twg.Add(1)\n\n\t\tgo func(ctx context.Context, errc chan<- error, wg *sync.WaitGroup) {\n\t\t\tselect {\n\t\t\tcase errc <- atlas.Output(params.Output, params.Format.Template):\n\t\t\tcase <-ctx.Done():\n\t\t\t}\n\t\t\twg.Done()\n\t\t}(ctx, errc, wg)\n\n\t\ttotalNumberOfIncompletedSprites := len(incompleteSprites)\n\t\t// If there are no more sprites that are incomplete, we are done!\n\t\tif totalNumberOfIncompletedSprites == 0 {\n\t\t\tbreak\n\t\t}\n\t\t// If we don't make any progress, then we've failed\n\t\tif totalNumberOfIncompletedSprites == totalNumberOfSprites {\n\t\t\treturn packing.ErrOutOfRoom\n\t\t}\n\t\t// Otherwise continue\n\t\tsprites = incompleteSprites\n\t}\n\n\tgo func() {\n\t\twg.Wait()\n\t\tclose(errc)\n\t}()\n\n\tfor err := range errc {\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}",
"func (native *OpenGL) GenTextures(n int32) []uint32 {\n\tids := make([]uint32, n)\n\tgl.GenTextures(n, &ids[0])\n\treturn ids\n}",
"func TextureSubImage3D(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall12(gpTextureSubImage3D, 11, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(zoffset), uintptr(width), uintptr(height), uintptr(depth), uintptr(format), uintptr(xtype), uintptr(pixels), 0)\n}",
"func GenTextures(n Sizei, textures []Uint) {\n\tcn, _ := (C.GLsizei)(n), cgoAllocsUnknown\n\tctextures, _ := (*C.GLuint)(unsafe.Pointer((*sliceHeader)(unsafe.Pointer(&textures)).Data)), cgoAllocsUnknown\n\tC.glGenTextures(cn, ctextures)\n}",
"func TexImage2D(target Enum, level int, width, height int, format Enum, ty Enum, data []byte) {\n\tp := unsafe.Pointer(nil)\n\tif len(data) > 0 {\n\t\tp = gl.Ptr(&data[0])\n\t}\n\tgl.TexImage2D(uint32(target), int32(level), int32(format), int32(width), int32(height), 0, uint32(format), uint32(ty), p)\n}",
"func (level *Level) TextureAnimations() (result []model.TextureAnimation) {\n\tlevel.mutex.Lock()\n\tdefer level.mutex.Unlock()\n\tvar rawEntries [4]data.TextureAnimationEntry\n\n\tresult = make([]model.TextureAnimation, len(rawEntries))\n\tlevel.readTable(42, &rawEntries)\n\tfor index := 0; index < len(rawEntries); index++ {\n\t\tresultEntry := &result[index]\n\t\trawEntry := &rawEntries[index]\n\n\t\tresultEntry.FrameCount = intAsPointer(int(rawEntry.FrameCount))\n\t\tresultEntry.FrameTime = intAsPointer(int(rawEntry.FrameTime))\n\t\tresultEntry.LoopType = intAsPointer(int(rawEntry.LoopType))\n\t}\n\treturn\n}",
"func assignImages() {\n\tvar iconsSize = 18\n\tSetPict(obj.ButtonExit, \"\")\n\tSetPict(obj.ButtonProceed, \"\")\n\tSetPict(obj.EditButtonClose, \"\")\n\tSetPict(obj.EditSpinAudioDelay, \"\")\n\tSetPict(obj.EditSpinAudioTrack, \"\")\n\tSetPict(obj.EditSpinCutSec, \"\")\n\tSetPict(obj.EditSpinCutSecDuration, \"\")\n\tSetPict(obj.EditSpinSplit, \"\")\n\tSetPict(obj.EditSpinTextTrack, \"\")\n\tSetPict(obj.EditWindow, \"\")\n\tSetPict(obj.InfosButtonClose, \"\")\n\tSetPict(obj.InfosButtonShowFilesList, \"\")\n\tSetPict(obj.MainToolButtonClear, \"\")\n\tSetPict(obj.MainToolButtonEdit, \"\")\n\tSetPict(obj.MainToolButtonInvertChecked, \"\")\n\tSetPict(obj.MainToolButtonUnckeckAll, \"\")\n\tSetPict(obj.MainWindow, movieIcon, iconsSize)\n\tSetPict(obj.WindowInfos, movieIcon, iconsSize)\n}",
"func TexImage1D(target uint32, level int32, internalformat int32, width int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall9(gpTexImage1D, 8, uintptr(target), uintptr(level), uintptr(internalformat), uintptr(width), uintptr(border), uintptr(format), uintptr(xtype), uintptr(pixels), 0)\n}",
"func (self *TileSprite) SetTextureA(member *Texture) {\n self.Object.Set(\"texture\", member)\n}",
"func WrapImages(images []model.Image, cfg *config.Giffy) []Image {\n\toutput := make([]Image, len(images))\n\tfor x := 0; x < len(images); x++ {\n\t\toutput[x] = NewImage(images[x], cfg)\n\t}\n\treturn output\n}",
"func (self *TileSprite) SetTexture1O(texture *Texture, destroy bool) {\n self.Object.Call(\"setTexture\", texture, destroy)\n}",
"func TextureStorage2DMultisample(texture uint32, samples int32, internalformat uint32, width int32, height int32, fixedsamplelocations bool) {\n\tsyscall.Syscall6(gpTextureStorage2DMultisample, 6, uintptr(texture), uintptr(samples), uintptr(internalformat), uintptr(width), uintptr(height), boolToUintptr(fixedsamplelocations))\n}",
"func BookletFromImages(ctx *model.Context, fileNames []string, nup *model.NUp, pagesDict types.Dict, pagesIndRef *types.IndirectRef) error {\n\t// The order of images in fileNames corresponds to a desired booklet page sequence.\n\tselectedPages := types.IntSet{}\n\tfor i := 1; i <= len(fileNames); i++ {\n\t\tselectedPages[i] = true\n\t}\n\n\tif nup.PageGrid {\n\t\tnup.PageDim.Width *= nup.Grid.Width\n\t\tnup.PageDim.Height *= nup.Grid.Height\n\t}\n\n\txRefTable := ctx.XRefTable\n\tformsResDict := types.NewDict()\n\tvar buf bytes.Buffer\n\trr := nup.RectsForGrid()\n\n\tfor i, bp := range sortSelectedPagesForBooklet(selectedPages, nup) {\n\n\t\tif i > 0 && i%len(rr) == 0 {\n\n\t\t\t// Wrap complete page.\n\t\t\tif err := wrapUpPage(ctx, nup, formsResDict, buf, pagesDict, pagesIndRef); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tbuf.Reset()\n\t\t\tformsResDict = types.NewDict()\n\t\t}\n\n\t\trDest := rr[i%len(rr)]\n\n\t\tif bp.number == 0 {\n\t\t\t// This is an empty page at the end of a booklet.\n\t\t\tif nup.BgColor != nil {\n\t\t\t\tdraw.FillRectNoBorder(&buf, rDest, *nup.BgColor)\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\n\t\tf, err := os.Open(fileNames[bp.number-1])\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\timgIndRef, w, h, err := model.CreateImageResource(xRefTable, f, false, false)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif err := f.Close(); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tformIndRef, err := createNUpFormForImage(xRefTable, imgIndRef, w, h, i)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tformResID := fmt.Sprintf(\"Fm%d\", i)\n\t\tformsResDict.Insert(formResID, *formIndRef)\n\n\t\t// Append to content stream of booklet page i.\n\t\tenforceOrientation := false\n\t\tmodel.NUpTilePDFBytes(&buf, types.RectForDim(float64(w), float64(h)), rr[i%len(rr)], formResID, nup, bp.rotate, enforceOrientation)\n\t}\n\n\t// Wrap incomplete booklet page.\n\treturn wrapUpPage(ctx, nup, formsResDict, buf, pagesDict, pagesIndRef)\n}",
"func BindTexture(target GLEnum, texture Texture) {\n\tgl.BindTexture(uint32(target), uint32(texture))\n}",
"func (animation *AnimationSet) AddTexture(texture *Texture) {\n\tsimlog.FuncIn()\n\tanimation.textures = append(animation.textures, texture)\n\tsimlog.FuncOut()\n}",
"func (self *TileSprite) LoadTexture1O(key interface{}, frame interface{}) {\n self.Object.Call(\"loadTexture\", key, frame)\n}",
"func (addon Addon) Images(clusterVersion *version.Version, imageTag string) []string {\n\timages := []string{}\n\tfor _, cb := range addon.getImageCallbacks {\n\t\timage := cb(clusterVersion, imageTag)\n\t\tif image != \"\" {\n\t\t\timages = append(images, image)\n\t\t}\n\t}\n\treturn images\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tsyscall.Syscall(gpBindSamplers, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(samplers)))\n}",
"func CopyTextureSubImage1D(texture uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tsyscall.Syscall6(gpCopyTextureSubImage1D, 6, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(x), uintptr(y), uintptr(width))\n}",
"func (self *TileSprite) SetTexture(texture *Texture) {\n self.Object.Call(\"setTexture\", texture)\n}",
"func (self *Graphics) GenerateTexture1O(resolution int) *Texture{\n return &Texture{self.Object.Call(\"generateTexture\", resolution)}\n}",
"func MultiWrite(tagToImage map[name.Tag]v1.Image, w io.Writer, opts ...WriteOption) error {\n\trefToImage := make(map[name.Reference]v1.Image, len(tagToImage))\n\tfor i, d := range tagToImage {\n\t\trefToImage[i] = d\n\t}\n\treturn MultiRefWrite(refToImage, w, opts...)\n}",
"func TexImage2D(target GLEnum, level int32, internalformat GLEnum, width, height, border int32, format, xtype GLEnum, pixels []float32) {\n\tgl.TexImage2D(uint32(target), level, int32(internalformat), width, height, border, uint32(format), uint32(xtype), unsafe.Pointer(&pixels[0]))\n}",
"func (self *GameObjectCreator) RenderTexture1O(width int) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", width)}\n}",
"func (w *Worley) GenerateTexture(tex *texture.Texture) {\n\tgl.BindImageTexture(0, tex.GetHandle(), 0, false, 0, gl.READ_WRITE, gl.RGBA32F)\n\tgl.BindImageTexture(1, w.noisetexture.GetHandle(), 0, false, 0, gl.READ_ONLY, gl.RGBA32F)\n\n\tw.computeshader.Use()\n\tw.computeshader.UpdateInt32(\"uWidth\", w.width)\n\tw.computeshader.UpdateInt32(\"uHeight\", w.height)\n\tw.computeshader.UpdateInt32(\"uResolution\", w.resolution)\n\tw.computeshader.UpdateInt32(\"uOctaves\", w.octaves)\n\tw.computeshader.UpdateFloat32(\"uRadius\", w.radius)\n\tw.computeshader.UpdateFloat32(\"uRadiusScale\", w.radiusscale)\n\tw.computeshader.UpdateFloat32(\"uBrightness\", w.brightness)\n\tw.computeshader.UpdateFloat32(\"uContrast\", w.contrast)\n\tw.computeshader.UpdateFloat32(\"uScale\", w.scale)\n\tw.computeshader.UpdateFloat32(\"uPersistance\", w.persistance)\n\tw.computeshader.Compute(uint32(w.width), uint32(w.height), 1)\n\tw.computeshader.Compute(1024, 1024, 1)\n\tw.computeshader.Release()\n\n\tgl.MemoryBarrier(gl.ALL_BARRIER_BITS)\n\n\tgl.BindImageTexture(0, 0, 0, false, 0, gl.WRITE_ONLY, gl.RGBA32F)\n\tgl.BindImageTexture(1, 0, 0, false, 0, gl.READ_ONLY, gl.RGBA32F)\n}",
"func BindTexture(target uint32, texture uint32) {\n\tC.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func BindTexture(target uint32, texture uint32) {\n\tC.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func TexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage2D(gpTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func CopyTextureSubImage1D(texture uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tC.glowCopyTextureSubImage1D(gpCopyTextureSubImage1D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func CopyTextureSubImage1D(texture uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tC.glowCopyTextureSubImage1D(gpCopyTextureSubImage1D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func TexImage2D(target uint32, level int32, internalformat int32, width int32, height int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexImage2D(gpTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TextureSubImage2D(texture uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage2D(gpTextureSubImage2D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TextureSubImage2D(texture uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage2D(gpTextureSubImage2D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TextureStorage1D(texture uint32, levels int32, internalformat uint32, width int32) {\n\tsyscall.Syscall6(gpTextureStorage1D, 4, uintptr(texture), uintptr(levels), uintptr(internalformat), uintptr(width), 0, 0)\n}",
"func TexImage1D(target uint32, level int32, internalformat int32, width int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexImage1D(gpTexImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexImage2D(target uint32, level int32, internalformat int32, width int32, height int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall9(gpTexImage2D, 9, uintptr(target), uintptr(level), uintptr(internalformat), uintptr(width), uintptr(height), uintptr(border), uintptr(format), uintptr(xtype), uintptr(pixels))\n}",
"func (debugging *debuggingOpenGL) GenTextures(n int32) []uint32 {\n\tdebugging.recordEntry(\"GenTextures\", n)\n\tresult := debugging.gl.GenTextures(n)\n\tdebugging.recordExit(\"GenTextures\", result)\n\treturn result\n}",
"func TexSubImage1D(target uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexSubImage1D(gpTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexSubImage1D(target uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexSubImage1D(gpTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func LoadImageAsTexture(name string, path string) error {\n\tif Textures == nil {\n\t\tlog.Print(\"Initialize resource manager\")\n\t\tTextures = make(map[string]*ebiten.Image)\n\t}\n\timg, err := LoadImage(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tTextures[name] = img\n\treturn nil\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func TextureSubImage3D(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage3D(gpTextureSubImage3D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TextureSubImage3D(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage3D(gpTextureSubImage3D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func EGLImageTargetTexStorageEXT(target uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTexStorageEXT(gpEGLImageTargetTexStorageEXT, (C.GLenum)(target), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func EGLImageTargetTexStorageEXT(target uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTexStorageEXT(gpEGLImageTargetTexStorageEXT, (C.GLenum)(target), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tC.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tC.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func TexImage1D(target uint32, level int32, internalformat int32, width int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexImage1D(gpTexImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexImage1D(target uint32, level int32, internalformat int32, width int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexImage1D(gpTexImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}"
] | [
"0.7422068",
"0.7230596",
"0.7105752",
"0.7088535",
"0.6817541",
"0.6817541",
"0.63797003",
"0.6125447",
"0.6102882",
"0.5765335",
"0.5724267",
"0.56164443",
"0.56164443",
"0.5536132",
"0.5475197",
"0.5475197",
"0.54274994",
"0.53896785",
"0.53550667",
"0.5339524",
"0.5264366",
"0.5249974",
"0.5245183",
"0.5233986",
"0.5216165",
"0.5150757",
"0.5146245",
"0.51354235",
"0.5134238",
"0.51181835",
"0.511814",
"0.5084747",
"0.50817466",
"0.50817466",
"0.5073931",
"0.5030674",
"0.502715",
"0.5013086",
"0.5007265",
"0.4989806",
"0.4989806",
"0.49741575",
"0.49741575",
"0.4965323",
"0.4953127",
"0.4946943",
"0.4933173",
"0.49247763",
"0.49217406",
"0.49212518",
"0.4913446",
"0.48988324",
"0.48926648",
"0.48870227",
"0.48696408",
"0.48560914",
"0.4848609",
"0.48417884",
"0.48339587",
"0.48119068",
"0.48035038",
"0.48007253",
"0.4789258",
"0.4780689",
"0.47666913",
"0.4764956",
"0.47622132",
"0.47610104",
"0.47525868",
"0.47480905",
"0.47428876",
"0.4741765",
"0.47407603",
"0.4716971",
"0.47116128",
"0.47116128",
"0.47114286",
"0.47077653",
"0.47077653",
"0.4704787",
"0.47040135",
"0.47040135",
"0.47036752",
"0.47020438",
"0.4693647",
"0.4693434",
"0.4690612",
"0.4690612",
"0.4689111",
"0.46821207",
"0.46821207",
"0.46811268",
"0.46811268",
"0.46799156",
"0.46799156",
"0.46709397",
"0.46709397",
"0.46674222",
"0.46674222"
] | 0.67685163 | 7 |
bind a program pipeline to the current context | func BindProgramPipeline(pipeline uint32) {
C.glowBindProgramPipeline(gpBindProgramPipeline, (C.GLuint)(pipeline))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindProgramPipeline(pipeline uint32) {\n\tsyscall.Syscall(gpBindProgramPipeline, 1, uintptr(pipeline), 0, 0)\n}",
"func BindProgramPipeline(pipeline uint32) {\n C.glowBindProgramPipeline(gpBindProgramPipeline, (C.GLuint)(pipeline))\n}",
"func (s *BaselimboListener) EnterProgram(ctx *ProgramContext) {}",
"func (s *BaseednListener) EnterProgram(ctx *ProgramContext) {}",
"func (s *BaseAspidaListener) EnterProgram(ctx *ProgramContext) {}",
"func UseProgramStages(pipeline uint32, stages uint32, program uint32) {\n\tsyscall.Syscall(gpUseProgramStages, 3, uintptr(pipeline), uintptr(stages), uintptr(program))\n}",
"func UseProgramStages(pipeline uint32, stages uint32, program uint32) {\n C.glowUseProgramStages(gpUseProgramStages, (C.GLuint)(pipeline), (C.GLbitfield)(stages), (C.GLuint)(program))\n}",
"func main() {\n\tPipeline1()\n\t// Pipeline2()\n\t// RunDirectionalChannel()\n\tfmt.Println(\"YYY\")\n}",
"func (s *BaseBrainfuckListener) EnterProgram(ctx *ProgramContext) {}",
"func (ap *ActivePipelines) Append(p gaia.Pipeline) {\n\tap.Lock()\n\tdefer ap.Unlock()\n\n\tap.Pipelines = append(ap.Pipelines, p)\n}",
"func (s *BasemumpsListener) EnterProgram(ctx *ProgramContext) {}",
"func (r *Runtime) Pipe(rxHandler func(rxstream Stream) Stream) {\n\tfac := NewFactory()\n\t// create a RxStream from raw bytes channel.\n\trxstream := fac.FromChannel(context.Background(), r.rawBytesChan)\n\n\t// run RxHandler and get a new RxStream.\n\tr.stream = rxHandler(rxstream)\n}",
"func Pipeline(\n\tctx context.Context,\n\tin chan interface{},\n\n\t// newAccumulator will be used to produce a single accumulator object for\n\t// each Go routine withine the pipeline.\n\tnewAccumulator func() Accumulator,\n) Accumulator {\n\t//uid := fmt.Sprintf(\"%8X\", rand.Uint32())\n\n\t// Keep track of GOMAXPROCS accumulators using a slice.\n\tgomaxprocs := runtime.GOMAXPROCS(-1)\n\taccumulators := make([]Accumulator, gomaxprocs)\n\n\t// Start GOMAXPROCS Go routines to read values from the input channel; we'll\n\t// track their execution using a sync.WaitGroup instance.\n\tvar waitGroup sync.WaitGroup\n\twaitGroup.Add(gomaxprocs)\n\tfor i := 0; i < gomaxprocs; i++ {\n\n\t\t// Use the provided newAccumulator function to produce an accumulator\n\t\t// value for this Go routine and add it to a new context.Context instance.\n\t\taccumulator := newAccumulator()\n\t\taccumulators[i] = accumulator\n\n\t\t// Start a new Go routine passing the index value for logging.\n\t\tgo func(i int) {\n\n\t\t\t// Process values until the input channel is closed, then signal this go\n\t\t\t// routine has finished processing.\n\t\t\tfor object := range in {\n\t\t\t\taccumulator.Accumulate(ctx, object)\n\t\t\t}\n\t\t\twaitGroup.Done()\n\t\t}(i)\n\t}\n\n\t// Wait for the processors to exit.\n\twaitGroup.Wait()\n\n\t// Combine the accumulators.\n\taccumulator := accumulators[0]\n\tfor i := 1; i < len(accumulators); i++ {\n\t\taccumulator.Combine(accumulators[i])\n\t}\n\n\t// Return a single accumulator.\n\treturn accumulator\n}",
"func main() {\n\n\tapp := &cli.App{\n\t\tName: \"pipeline\",\n\t\tUsage: \"Scans a directory path for image files and injects them into a pipeline for processing.\",\n\t\tCommands: []*cli.Command{\n\t\t\t&cmd.Start,\n\t\t},\n\t\tFlags: []cli.Flag{\n\t\t\t&cli.BoolFlag{\n\t\t\t\tName: \"debug\",\n\t\t\t\tUsage: \"enables debug logging\",\n\t\t\t},\n\t\t},\n\t}\n\n\terr := app.Run(os.Args)\n\tif IsError(err, func(err error) {\n\t\tfmt.Printf(\"The application encountered an error: %v\", err)\n\t\tos.Exit(kApplicationFailure)\n\t}) {\n\t\tfmt.Println(\"The application has completed successfully.\")\n\t\tos.Exit(kApplicationSuccess)\n\t}\n}",
"func (c *MyConn) Pipeline() *MyPipeline {\n\treturn NewMyPipeline(c)\n}",
"func Pipeline(g *graph.Graph, id string, factory *Factory, top Values) executor.Pipeline {\n\tp := pipelineGen{Graph: g, RenderingPlant: factory, Top: top, ID: id}\n\treturn executor.NewPipeline().\n\t\tAndThen(p.maybeTransformRoot).\n\t\tAndThen(p.prepareNode).\n\t\tAndThen(p.wrapTask)\n}",
"func (t *cliTransHandler) SetPipeline(p *remote.TransPipeline) {\n\tt.transPipe = p\n}",
"func UseProgramStages(pipeline uint32, stages uint32, program uint32) {\n\tC.glowUseProgramStages(gpUseProgramStages, (C.GLuint)(pipeline), (C.GLbitfield)(stages), (C.GLuint)(program))\n}",
"func UseProgramStages(pipeline uint32, stages uint32, program uint32) {\n\tC.glowUseProgramStages(gpUseProgramStages, (C.GLuint)(pipeline), (C.GLbitfield)(stages), (C.GLuint)(program))\n}",
"func connectPipeline(commandList []exec.Cmd) error {\n\tvar err error\n\n\tfor i := range commandList {\n\t\tif i == len(commandList)-1 {\n\t\t\tbreak\n\t\t}\n\t\tif commandList[i+1].Stdin != nil || commandList[i].Stdout != nil {\n\t\t\treturn errors.New(\"Ambiguous input for file redirection and pipe\")\n\t\t}\n\t\tcommandList[i+1].Stdin, err = commandList[i].StdoutPipe()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif commandList[0].Stdin == nil {\n\t\tcommandList[0].Stdin = os.Stdin\n\t}\n\tif commandList[len(commandList)-1].Stdout == nil {\n\t\tcommandList[len(commandList)-1].Stdout = os.Stdout\n\t}\n\treturn nil\n}",
"func main() {\n\tadapter.RunStage(split, chunk, join)\n}",
"func (pool *Pool) Pipeline() *Pipeline {\n\treturn BlankPipeline(int64(pool.DB))\n}",
"func (pl *Pipeline) Exec() error {\n link, linkerr := pl.linkPipes()\n\n if linkerr != nil {\n return linkerr\n }\n\n starterr := pl.start()\n if starterr != nil {\n return starterr\n }\n\n setuperr := pl.copyPipes(link)\n if setuperr != nil {\n return setuperr\n }\n\n waiterr := pl.wait()\n if waiterr != nil {\n return waiterr\n }\n\n return nil\n}",
"func (f *Pub) Bind(rx Publisher, cl bool) {\n\tf.branches.Add(rx)\n\trx.UseRoot(f)\n\n\tif cl {\n\t\tf.enders.Add(rx)\n\t}\n}",
"func ProgramIsolatedTransformer(args ...string) IsolatedTransformer {\n\treturn func(ctx context.Context, dir string) error {\n\t\tlogging.Infof(ctx, \"Invoking transform_program: %q\", args)\n\t\ttProg := exec.CommandContext(ctx, args[0], args[1:]...)\n\t\ttProg.Stdout = os.Stderr\n\t\ttProg.Stderr = os.Stderr\n\t\ttProg.Dir = dir\n\t\treturn errors.Annotate(tProg.Run(), \"running transform_program\").Err()\n\t}\n}",
"func NewPipe(opt Opition) (*Pipe, error) {\n\tp := &Pipe{\n\t\tOpition: opt,\n\t\tApps: []*Context{},\n\n\t\tdata: map[string]interface{}{},\n\t}\n\n\tfor _, source := range p.Sources {\n\t\tif _, ok := sourceLoaders[source]; !ok {\n\t\t\treturn nil, fmt.Errorf(\"source %s not exist\", source)\n\t\t}\n\t}\n\n\tfor _, handler := range p.Handlers {\n\t\tif _, ok := sourceHandlers[handler]; !ok {\n\t\t\treturn nil, fmt.Errorf(\"handler %s not exist\", handler)\n\t\t}\n\t}\n\n\treturn p, nil\n}",
"func (sr *Stackers) Bind(r Publisher, cl bool) {\n\tvar lr Connector\n\tvar err error\n\n\tif lr, err = sr.Last(); err != nil {\n\t\tsr.Publisher.Bind(r, cl)\n\t\tsr.ro.Lock()\n\t\t{\n\t\t\tsr.stacks = append(sr.stacks, r)\n\t\t}\n\t\tsr.ro.Unlock()\n\t\treturn\n\t}\n\n\tlr.Bind(r, cl)\n\tsr.ro.Lock()\n\t{\n\t\tsr.stacks = append(sr.stacks, r)\n\t}\n\tsr.ro.Unlock()\n}",
"func (h *PipelineManager) Use(mids ...Middleware) {\n\th.mids = append(h.mids, mids...)\n}",
"func StartPipeline(service *app.Service, stages ...Stage) *Pipeline {\n\tstartPipelineMutex.Lock()\n\tdefer startPipelineMutex.Unlock()\n\n\tif pipeline := GetPipeline(PipelineID(service.ID())); pipeline != nil {\n\t\treturn pipeline\n\t}\n\n\tcheckArgs := func() {\n\t\tif service == nil {\n\t\t\tpanic(\"A pipeline requires a service to run\")\n\t\t}\n\t\tif !service.Alive() {\n\t\t\tpanic(app.ServiceNotAliveError(service.ID()))\n\t\t}\n\t\tif len(stages) == 0 {\n\t\t\tpanic(\"A pipeline must have at least 1 stage\")\n\t\t}\n\t\tfor _, stage := range stages {\n\t\t\tif stage.Command().run == nil {\n\t\t\t\tpanic(fmt.Sprintf(\"Stage Command run function was nil for : ServiceID(0x%x)\", service.ID()))\n\t\t\t}\n\t\t}\n\n\t\tserviceID := service.ID()\n\t\tfor _, metricID := range COUNTER_METRIC_IDS {\n\t\t\tif app.MetricRegistry.Counter(serviceID, metricID) == nil {\n\t\t\t\tpanic(fmt.Sprintf(\"Counter metric is missing : MetricID(0x%x)\", metricID))\n\t\t\t}\n\t\t}\n\t\tfor _, metricID := range COUNTER_VECTOR_METRIC_IDS {\n\t\t\tif app.MetricRegistry.CounterVector(serviceID, metricID) == nil {\n\t\t\t\tpanic(fmt.Sprintf(\"Counter vector metric is missing : MetricID(0x%x)\", metricID))\n\t\t\t}\n\t\t}\n\t\tfor _, metricID := range GAUGE_METRIC_IDS {\n\t\t\tif app.MetricRegistry.Gauge(serviceID, metricID) == nil {\n\t\t\t\tpanic(fmt.Sprintf(\"Gauge metric is missing : MetricID(0x%x)\", metricID))\n\t\t\t}\n\t\t}\n\t}\n\n\tcheckArgs()\n\n\tserviceID := service.ID()\n\n\tpipeline := &Pipeline{\n\t\tService: service,\n\t\tstartedOn: time.Now(),\n\t\tin: make(chan context.Context),\n\t\tout: make(chan context.Context),\n\t\tstages: stages,\n\n\t\trunCounter: app.MetricRegistry.Counter(serviceID, PIPELINE_RUN_COUNT),\n\t\tfailedCounter: app.MetricRegistry.Counter(serviceID, PIPELINE_FAILED_COUNT),\n\t\tcontextExpiredCounter: app.MetricRegistry.Counter(serviceID, PIPELINE_CONTEXT_EXPIRED_COUNT),\n\t\tprocessingTime: app.MetricRegistry.Counter(serviceID, PIPELINE_PROCESSING_TIME_SEC),\n\t\tprocessingFailedTime: app.MetricRegistry.Counter(serviceID, PIPELINE_PROCESSING_TIME_SEC_FAILED),\n\t\tchannelDeliveryTime: app.MetricRegistry.Counter(serviceID, PIPELINE_CHANNEL_DELIVERY_TIME_SEC),\n\n\t\tpingPongCounter: app.MetricRegistry.Counter(serviceID, PIPELINE_PING_PONG_COUNT),\n\t\tpingPongTime: app.MetricRegistry.Counter(serviceID, PIPELINE_PING_PONG_TIME_SEC),\n\t\tpingExpiredCounter: app.MetricRegistry.Counter(serviceID, PIPELINE_PING_EXPIRED_COUNT),\n\t\tpingExpiredTime: app.MetricRegistry.Counter(serviceID, PIPELINE_PING_EXPIRED_TIME_SEC),\n\n\t\tconsecutiveSuccessCounter: app.MetricRegistry.Gauge(serviceID, PIPELINE_CONSECUTIVE_SUCCESS_COUNT),\n\t\tconsecutiveFailureCounter: app.MetricRegistry.Gauge(serviceID, PIPELINE_CONSECUTIVE_FAILURE_COUNT),\n\t\tconsecutiveExpiredCounter: app.MetricRegistry.Gauge(serviceID, PIPELINE_CONSECUTIVE_EXPIRED_COUNT),\n\n\t\tlastSuccessTime: app.MetricRegistry.Gauge(serviceID, PIPELINE_LAST_SUCCESS_TIME),\n\t\tlastFailureTime: app.MetricRegistry.Gauge(serviceID, PIPELINE_LAST_FAILURE_TIME),\n\t\tlastExpiredTime: app.MetricRegistry.Gauge(serviceID, PIPELINE_LAST_EXPIRED_TIME),\n\t\tlastPingSuccessTime: app.MetricRegistry.Gauge(serviceID, PIPELINE_LAST_PING_SUCCESS_TIME),\n\t\tlastPingExpiredTime: app.MetricRegistry.Gauge(serviceID, PIPELINE_LAST_PING_EXPIRED_TIME),\n\t}\n\n\tfirstStageCommandID := pipeline.stages[0].cmd.id\n\tvar build func(stages []Stage, in, out chan context.Context)\n\tbuild = func(stages []Stage, in, out chan context.Context) {\n\t\tcreateStageWorkers := func(stage Stage, process func(ctx context.Context)) {\n\t\t\tfor i := 0; i < int(stage.PoolSize()); i++ {\n\t\t\t\tif stage.cmd.id == firstStageCommandID {\n\t\t\t\t\tservice.Go(func() error {\n\t\t\t\t\t\tfor {\n\t\t\t\t\t\t\tselect {\n\t\t\t\t\t\t\tcase <-service.Dying():\n\t\t\t\t\t\t\t\treturn nil\n\t\t\t\t\t\t\tcase ctx := <-in:\n\t\t\t\t\t\t\t\tselect {\n\t\t\t\t\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\t\t\t\t\tpipelineContextExpired(ctx, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\t\t\t\t\tdefault:\n\t\t\t\t\t\t\t\t\t// record the time when the context started the workflow, i.e., entered the first stage of the pipeline\n\t\t\t\t\t\t\t\t\tctx = startWorkflowTimer(ctx)\n\t\t\t\t\t\t\t\t\tpipeline.runCounter.Inc()\n\t\t\t\t\t\t\t\t\tprocess(ctx)\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t} else {\n\t\t\t\t\tservice.Go(func() error {\n\t\t\t\t\t\tfor {\n\t\t\t\t\t\t\tselect {\n\t\t\t\t\t\t\tcase <-service.Dying():\n\t\t\t\t\t\t\t\treturn nil\n\t\t\t\t\t\t\tcase ctx := <-in:\n\t\t\t\t\t\t\t\tselect {\n\t\t\t\t\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\t\t\t\t\tpipelineContextExpired(ctx, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\t\t\t\t\tdefault:\n\t\t\t\t\t\t\t\t\tprocess(ctx)\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t}\n\n\t\t\t}\n\t\t}\n\t\tstage := stages[0]\n\t\tif len(stages) == 1 {\n\t\t\tcreateStageWorkers(stage, func(ctx context.Context) {\n\t\t\t\tif IsPing(ctx) {\n\t\t\t\t\t// reply with pong\n\t\t\t\t\tctx = withPong(ctx)\n\t\t\t\t\tout, ok := OutputChannel(ctx)\n\t\t\t\t\tif !ok {\n\t\t\t\t\t\tout = pipeline.out\n\t\t\t\t\t}\n\n\t\t\t\t\tselect {\n\t\t\t\t\tcase <-service.Dying():\n\t\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\t\tpipelineContextExpired(ctx, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\t\tcase out <- ctx:\n\t\t\t\t\t\tpipeline.lastPingSuccessTime.Set(float64(time.Now().Unix()))\n\t\t\t\t\t}\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\tresult := stage.run(ctx)\n\t\t\t\tprocessedTime := time.Now()\n\t\t\t\tprocessingDuration := time.Now().Sub(WorkflowStartTime(ctx))\n\t\t\t\tworkflowTime := processingDuration.Seconds()\n\t\t\t\tpipeline.processingTime.Add(workflowTime)\n\t\t\t\tif err := Error(result); err != nil {\n\t\t\t\t\tcontextFailed(pipeline, ctx)\n\t\t\t\t\tpipeline.failedCounter.Inc()\n\t\t\t\t\tpipeline.processingFailedTime.Add(workflowTime)\n\t\t\t\t\tresult = WithError(result, stage.Command().id, err)\n\t\t\t\t\tpipeline.lastFailureTime.Set(float64(time.Now().Unix()))\n\t\t\t\t\tpipeline.consecutiveFailureCounter.Inc()\n\t\t\t\t\tpipeline.consecutiveSuccessCounter.Set(0)\n\t\t\t\t}\n\n\t\t\t\tout, ok := OutputChannel(result)\n\t\t\t\tif !ok {\n\t\t\t\t\tout = pipeline.out\n\t\t\t\t}\n\n\t\t\t\tselect {\n\t\t\t\tcase <-service.Dying():\n\t\t\t\t\treturn\n\t\t\t\tcase <-result.Done():\n\t\t\t\t\tpipelineContextExpired(result, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\tcase out <- result:\n\t\t\t\t\tdeliveryTime := time.Now().Sub(processedTime).Seconds()\n\t\t\t\t\tpipeline.channelDeliveryTime.Add(deliveryTime)\n\n\t\t\t\t\tif Error(result) == nil {\n\t\t\t\t\t\tpipeline.lastSuccessTime.Set(float64(time.Now().Unix()))\n\t\t\t\t\t\tpipeline.consecutiveSuccessCounter.Inc()\n\t\t\t\t\t\tpipeline.consecutiveFailureCounter.Set(0)\n\t\t\t\t\t\tpipeline.consecutiveExpiredCounter.Set(0)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn\n\t\t}\n\n\t\tcreateStageWorkers(stage, func(ctx context.Context) {\n\t\t\tif IsPing(ctx) {\n\t\t\t\t// send the context downstream, i.e., to the next stage\n\t\t\t\tselect {\n\t\t\t\tcase <-service.Dying():\n\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\tpipelineContextExpired(ctx, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\tcase out <- ctx:\n\t\t\t\t}\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tresult := stage.run(ctx)\n\t\t\tprocessedTime := time.Now()\n\t\t\tif err := Error(result); err != nil {\n\t\t\t\tcontextFailed(pipeline, ctx)\n\t\t\t\tpipeline.failedCounter.Inc()\n\t\t\t\tresult = WithError(result, stage.Command().id, err)\n\t\t\t\tpipeline.lastFailureTime.Set(float64(time.Now().Unix()))\n\t\t\t\tselect {\n\t\t\t\tcase <-service.Dying():\n\t\t\t\t\treturn\n\t\t\t\tcase <-result.Done():\n\t\t\t\t\tpipelineContextExpired(result, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\tcase pipeline.out <- result:\n\t\t\t\t\tdeliveryTime := time.Now().Sub(processedTime).Seconds()\n\t\t\t\t\tpipeline.channelDeliveryTime.Add(deliveryTime)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tselect {\n\t\t\t\tcase <-service.Dying():\n\t\t\t\t\treturn\n\t\t\t\tcase <-result.Done():\n\t\t\t\t\tpipelineContextExpired(result, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\tcase out <- result:\n\t\t\t\t\tdeliveryTime := time.Now().Sub(processedTime).Seconds()\n\t\t\t\t\tpipeline.channelDeliveryTime.Add(deliveryTime)\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\n\t\tbuild(stages[1:], out, make(chan context.Context))\n\t}\n\n\tbuild(stages, pipeline.in, make(chan context.Context))\n\n\tgo func() {\n\t\tdefer unregisterPipeline(pipeline.ID())\n\t\tselect {\n\t\tcase <-service.Dying():\n\t\tcase <-app.Dying():\n\t\t}\n\t}()\n\n\tregisterPipeline(pipeline)\n\tapp.SERVICE_STARTED.Log(service.Logger().Info()).Msg(\"Pipeline started\")\n\n\treturn pipeline\n}",
"func (s *BaseSyslParserListener) EnterApplication(ctx *ApplicationContext) {}",
"func PipelineFromContext(ctx *cli.Context) drone.Pipeline {\n\treturn drone.Pipeline{\n\t\tBuild: buildFromContext(ctx),\n\t\tRepo: repoFromContext(ctx),\n\t\tCommit: commitFromContext(ctx),\n\t\tStage: stageFromContext(ctx),\n\t\tStep: stepFromContext(ctx),\n\t\tSemVer: semVerFromContext(ctx),\n\t\tCalVer: calVerFromContext(ctx),\n\t\tSystem: systemFromContext(ctx),\n\t}\n}",
"func (c *Context) Use(h ...Handler) {\n if h != nil {\n for _, e := range h {\n c.pipeline = c.pipeline.Add(e)\n }\n }\n}",
"func (c *Client) Pipeline() runtime.Pipeline {\n\treturn c.pl\n}",
"func (j *Junction) AddPipeline(key interface{}, p *Pipeline) *Junction {\n\tj.router[key] = p\n\treturn j\n}",
"func GenProgramPipelines(n int32, pipelines *uint32) {\n\tC.glowGenProgramPipelines(gpGenProgramPipelines, (C.GLsizei)(n), (*C.GLuint)(unsafe.Pointer(pipelines)))\n}",
"func GenProgramPipelines(n int32, pipelines *uint32) {\n\tC.glowGenProgramPipelines(gpGenProgramPipelines, (C.GLsizei)(n), (*C.GLuint)(unsafe.Pointer(pipelines)))\n}",
"func (h SpanHook) BeforeProcessPipeline(ctx context.Context, cmds []redis.Cmder) (context.Context, error) {\n\treturn h.startChildSpan(ctx, \"pipeline\"), nil\n}",
"func (s *BaseLittleDuckListener) EnterPrograma(ctx *ProgramaContext) {}",
"func NewPipeline() Pipeline {\n\n\tp := &pipeline{}\n\tp.head = newHandlerContext(p, headHandler{}, nil, nil)\n\tp.tail = newHandlerContext(p, tailHandler{}, nil, nil)\n\n\tp.head.next = p.tail\n\tp.tail.prev = p.head\n\n\t// head + tail\n\tp.size = 2\n\treturn p\n}",
"func GenProgramPipelines(n int32, pipelines *uint32) {\n C.glowGenProgramPipelines(gpGenProgramPipelines, (C.GLsizei)(n), (*C.GLuint)(unsafe.Pointer(pipelines)))\n}",
"func newProgram(e *Env, ast *Ast, opts []ProgramOption) (Program, error) {\n\t// Build the dispatcher, interpreter, and default program value.\n\tdisp := interpreter.NewDispatcher()\n\n\t// Ensure the default attribute factory is set after the adapter and provider are\n\t// configured.\n\tp := &prog{\n\t\tEnv: e,\n\t\tdecorators: []interpreter.InterpretableDecorator{},\n\t\tdispatcher: disp,\n\t}\n\n\t// Configure the program via the ProgramOption values.\n\tvar err error\n\tfor _, opt := range opts {\n\t\tp, err = opt(p)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// Add the function bindings created via Function() options.\n\tfor _, fn := range e.functions {\n\t\tbindings, err := fn.bindings()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\terr = disp.Add(bindings...)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// Set the attribute factory after the options have been set.\n\tvar attrFactory interpreter.AttributeFactory\n\tif p.evalOpts&OptPartialEval == OptPartialEval {\n\t\tattrFactory = interpreter.NewPartialAttributeFactory(e.Container, e.adapter, e.provider)\n\t} else {\n\t\tattrFactory = interpreter.NewAttributeFactory(e.Container, e.adapter, e.provider)\n\t}\n\tinterp := interpreter.NewInterpreter(disp, e.Container, e.provider, e.adapter, attrFactory)\n\tp.interpreter = interp\n\n\t// Translate the EvalOption flags into InterpretableDecorator instances.\n\tdecorators := make([]interpreter.InterpretableDecorator, len(p.decorators))\n\tcopy(decorators, p.decorators)\n\n\t// Enable interrupt checking if there's a non-zero check frequency\n\tif p.interruptCheckFrequency > 0 {\n\t\tdecorators = append(decorators, interpreter.InterruptableEval())\n\t}\n\t// Enable constant folding first.\n\tif p.evalOpts&OptOptimize == OptOptimize {\n\t\tdecorators = append(decorators, interpreter.Optimize())\n\t\tp.regexOptimizations = append(p.regexOptimizations, interpreter.MatchesRegexOptimization)\n\t}\n\t// Enable regex compilation of constants immediately after folding constants.\n\tif len(p.regexOptimizations) > 0 {\n\t\tdecorators = append(decorators, interpreter.CompileRegexConstants(p.regexOptimizations...))\n\t}\n\t// Enable compile-time checking of syntax/cardinality for string.format calls.\n\tif p.evalOpts&OptCheckStringFormat == OptCheckStringFormat {\n\t\tvar isValidType func(id int64, validTypes ...*types.TypeValue) (bool, error)\n\t\tif ast.IsChecked() {\n\t\t\tisValidType = func(id int64, validTypes ...*types.TypeValue) (bool, error) {\n\t\t\t\tt, err := ExprTypeToType(ast.typeMap[id])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn false, err\n\t\t\t\t}\n\t\t\t\tif t.kind == DynKind {\n\t\t\t\t\treturn true, nil\n\t\t\t\t}\n\t\t\t\tfor _, vt := range validTypes {\n\t\t\t\t\tk, err := typeValueToKind(vt)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn false, err\n\t\t\t\t\t}\n\t\t\t\t\tif k == t.kind {\n\t\t\t\t\t\treturn true, nil\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn false, nil\n\t\t\t}\n\t\t} else {\n\t\t\t// if the AST isn't type-checked, short-circuit validation\n\t\t\tisValidType = func(id int64, validTypes ...*types.TypeValue) (bool, error) {\n\t\t\t\treturn true, nil\n\t\t\t}\n\t\t}\n\t\tdecorators = append(decorators, interpreter.InterpolateFormattedString(isValidType))\n\t}\n\n\t// Enable exhaustive eval, state tracking and cost tracking last since they require a factory.\n\tif p.evalOpts&(OptExhaustiveEval|OptTrackState|OptTrackCost) != 0 {\n\t\tfactory := func(state interpreter.EvalState, costTracker *interpreter.CostTracker) (Program, error) {\n\t\t\tcostTracker.Estimator = p.callCostEstimator\n\t\t\tcostTracker.Limit = p.costLimit\n\t\t\t// Limit capacity to guarantee a reallocation when calling 'append(decs, ...)' below. This\n\t\t\t// prevents the underlying memory from being shared between factory function calls causing\n\t\t\t// undesired mutations.\n\t\t\tdecs := decorators[:len(decorators):len(decorators)]\n\t\t\tvar observers []interpreter.EvalObserver\n\n\t\t\tif p.evalOpts&(OptExhaustiveEval|OptTrackState) != 0 {\n\t\t\t\t// EvalStateObserver is required for OptExhaustiveEval.\n\t\t\t\tobservers = append(observers, interpreter.EvalStateObserver(state))\n\t\t\t}\n\t\t\tif p.evalOpts&OptTrackCost == OptTrackCost {\n\t\t\t\tobservers = append(observers, interpreter.CostObserver(costTracker))\n\t\t\t}\n\n\t\t\t// Enable exhaustive eval over a basic observer since it offers a superset of features.\n\t\t\tif p.evalOpts&OptExhaustiveEval == OptExhaustiveEval {\n\t\t\t\tdecs = append(decs, interpreter.ExhaustiveEval(), interpreter.Observe(observers...))\n\t\t\t} else if len(observers) > 0 {\n\t\t\t\tdecs = append(decs, interpreter.Observe(observers...))\n\t\t\t}\n\n\t\t\treturn p.clone().initInterpretable(ast, decs)\n\t\t}\n\t\treturn newProgGen(factory)\n\t}\n\treturn p.initInterpretable(ast, decorators)\n}",
"func pipeline(handler httpHandler, middlewares ...Middleware) httpHandler {\n\tif len(middlewares) == 0 {\n\t\treturn handler\n\t}\n\tmiddleware := middlewares[len(middlewares)-1]\n\tmiddlewares = middlewares[:len(middlewares)-1]\n\tcomplexHandler := func(ctx *Context, args ...string) {\n\t\t//create a composite pipeline using middleware\n\t\tmiddleware.Invoke(ctx, func(arguments ...string) HTTPHandler {\n\t\t\treturn func(context *Context) {\n\t\t\t\thandler(context, arguments...)\n\t\t\t}\n\t\t}(args...))\n\t}\n\treturn pipeline(complexHandler, middlewares...)\n}",
"func GenProgramPipelines(n int32, pipelines *uint32) {\n\tsyscall.Syscall(gpGenProgramPipelines, 2, uintptr(n), uintptr(unsafe.Pointer(pipelines)), 0)\n}",
"func (s *state) evalPipeline(dot reflect.Value, pipe *parse.PipeNode) (value reflect.Value) {\n\tif pipe == nil {\n\t\treturn\n\t}\n\ts.at(pipe)\n\tfor _, cmd := range pipe.Cmds {\n\t\tvalue = s.evalCommand(dot, cmd, value) // previous value is this one's final arg.\n\t\t// If the object has type interface{}, dig down one level to the thing inside.\n\t\tif value.Kind() == reflect.Interface && value.Type().NumMethod() == 0 {\n\t\t\tvalue = reflect.ValueOf(value.Interface()) // lovely!\n\t\t}\n\t}\n\tfor _, variable := range pipe.Decl {\n\t\ts.push(variable.Ident[0], value)\n\t}\n\treturn value\n}",
"func (p *Pipeline) Run(ctx context.Context) {\n\tp.runMutex.Lock()\n\tdefer p.runMutex.Unlock()\n\tif p.status == STATUS_RUN {\n\t\treturn\n\t}\n\t//logrus.Debug(\"mysql position\", p.Input.Options.Position)\n\tmyCtx, cancel := context.WithCancel(ctx)\n\tp.ctx = myCtx\n\tgo func() {\n\t\tvar err error\n\t\tdefer func() {\n\t\t\tif r := recover(); r != nil {\n\t\t\t\tlogrus.Errorln(\"pipeline run panic, \", r)\n\t\t\t}\n\t\t\tcancel()\n\t\t}()\n\t\tif err = p.Input.Run(myCtx); err != nil {\n\t\t\tevent.Event(event2.NewErrorPipeline(p.Options.Pipeline.Name, \"Start error: \"+err.Error()))\n\t\t\treturn\n\t\t}\n\t\tif err = p.Filter.Run(myCtx); err != nil {\n\t\t\tevent.Event(event2.NewErrorPipeline(p.Options.Pipeline.Name, \"Start error: \"+err.Error()))\n\t\t\treturn\n\t\t}\n\t\tif err = p.Output.Run(myCtx); err != nil {\n\t\t\tevent.Event(event2.NewErrorPipeline(p.Options.Pipeline.Name, \"Start error: \"+err.Error()))\n\t\t\treturn\n\t\t}\n\t\tevent.Event(event2.NewInfoPipeline(p.Options.Pipeline.Name, \"Start succeeded\"))\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\t{\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tcase <-p.Input.Context().Done():\n\t\t\t\t{\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tcase <-p.Filter.Context().Done():\n\t\t\t\t{\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tcase <-p.Output.Context().Done():\n\t\t\t\t{\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}()\n}",
"func bindContext(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {\n\tparsed, err := abi.JSON(strings.NewReader(ContextABI))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil\n}",
"func bindContext(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {\n\tparsed, err := abi.JSON(strings.NewReader(ContextABI))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil\n}",
"func bindContext(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {\n\tparsed, err := abi.JSON(strings.NewReader(ContextABI))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil\n}",
"func bindContext(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {\n\tparsed, err := abi.JSON(strings.NewReader(ContextABI))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil\n}",
"func Program(p []syscall.BpfInsn) func(*NetworkTap) error {\n\treturn func(filterdev *NetworkTap) error {\n\t\terr := filterdev.SetFilter(p)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t}\n}",
"func (r *Reconciler) bind(\n\tlogger *log.Log,\n\tbm *ServiceBinder,\n\tsbrStatus *v1alpha1.ServiceBindingRequestStatus,\n) (\n\treconcile.Result,\n\terror,\n) {\n\tlogger = logger.WithName(\"bind\")\n\n\tlogger.Info(\"Binding applications with intermediary secret...\")\n\treturn bm.Bind()\n}",
"func main() {\n\tprocessor.RegisterProcessors(Process)\n}",
"func (bot *botContext) runPipeline(t interface{}, interactive bool, ptype pipelineType, command string, args ...string) {\n\ttask, plugin, job := getTask(t) // NOTE: later _ will be job; this is where notifies will be sent\n\tisPlugin := plugin != nil\n\tisJob := !isPlugin\n\tverbose := (isJob && job.Verbose) || ptype == runJob\n\tbot.pipeName = task.name\n\tbot.pipeDesc = task.Description\n\tbot.NameSpace = task.NameSpace\n\t// TODO: Replace the waitgroup, pluginsRunning, defer func(), etc.\n\trobot.Add(1)\n\trobot.Lock()\n\trobot.pluginsRunning++\n\thistory := robot.history\n\ttz := robot.timeZone\n\trobot.Unlock()\n\tdefer func() {\n\t\trobot.Lock()\n\t\trobot.pluginsRunning--\n\t\t// TODO: this check shouldn't be necessary; remove and test\n\t\tif robot.pluginsRunning >= 0 {\n\t\t\trobot.Done()\n\t\t}\n\t\trobot.Unlock()\n\t}()\n\tvar runIndex int\n\tif task.HistoryLogs > 0 || isJob {\n\t\tvar th taskHistory\n\t\trememberRuns := task.HistoryLogs\n\t\tif rememberRuns == 0 {\n\t\t\trememberRuns = 1\n\t\t}\n\t\tkey := histPrefix + bot.pipeName\n\t\ttok, _, ret := checkoutDatum(key, &th, true)\n\t\tif ret != Ok {\n\t\t\tLog(Error, fmt.Sprintf(\"Error checking out '%s', no history will be remembered for '%s'\", key, bot.pipeName))\n\t\t} else {\n\t\t\tvar start time.Time\n\t\t\tif tz != nil {\n\t\t\t\tstart = time.Now().In(tz)\n\t\t\t} else {\n\t\t\t\tstart = time.Now()\n\t\t\t}\n\t\t\trunIndex = th.NextIndex\n\t\t\thist := historyLog{\n\t\t\t\tLogIndex: runIndex,\n\t\t\t\tCreateTime: start.Format(\"Mon Jan 2 15:04:05 MST 2006\"),\n\t\t\t}\n\t\t\tth.NextIndex++\n\t\t\tth.Histories = append(th.Histories, hist)\n\t\t\tl := len(th.Histories)\n\t\t\tif l > rememberRuns {\n\t\t\t\tth.Histories = th.Histories[l-rememberRuns:]\n\t\t\t}\n\t\t\tret := updateDatum(key, tok, th)\n\t\t\tif ret != Ok {\n\t\t\t\tLog(Error, fmt.Sprintf(\"Error updating '%s', no history will be remembered for '%s'\", key, bot.pipeName))\n\t\t\t} else {\n\t\t\t\tif task.HistoryLogs > 0 {\n\t\t\t\t\tpipeHistory, err := history.NewHistory(bot.pipeName, hist.LogIndex, task.HistoryLogs)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tLog(Error, fmt.Sprintf(\"Error starting history for '%s', no history will be recorded: %v\", bot.pipeName, err))\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbot.logger = pipeHistory\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Set up the environment for the pipeline, in order of precedence high-low.\n\t// Done in reverse order with existence checking because the context may\n\t// already have dynamically provided environment vars, which are highest\n\t// precedence. Environment vars are retrievable as environment variables for\n\t// scripts, or using GetParameter(...) in Go plugins.\n\tif isJob {\n\t\tfor _, p := range job.Parameters {\n\t\t\t// Dynamically provided parameters take precedence over configured parameters\n\t\t\t_, exists := bot.environment[p.Name]\n\t\t\tif !exists {\n\t\t\t\tbot.environment[p.Name] = p.Value\n\t\t\t}\n\t\t}\n\t}\n\tstoredEnv := make(map[string]string)\n\t// Global environment for pipeline from first task\n\t_, exists, _ := checkoutDatum(paramPrefix+task.NameSpace, &storedEnv, false)\n\tif exists {\n\t\tfor key, value := range storedEnv {\n\t\t\t// Dynamically provided and configured parameters take precedence over stored parameters\n\t\t\t_, exists := bot.environment[key]\n\t\t\tif !exists {\n\t\t\t\tbot.environment[key] = value\n\t\t\t}\n\t\t}\n\t}\n\tbot.pipeStarting = true\n\tfor _, p := range envPassThrough {\n\t\t_, exists := bot.environment[p]\n\t\tif !exists {\n\t\t\t// Note that we even pass through empty vars - any harm?\n\t\t\tbot.environment[p] = os.Getenv(p)\n\t\t}\n\t}\n\n\t// Once Active, we need to use the Mutex for access to some fields; see\n\t// botcontext/type botContext\n\tbot.registerActive()\n\tr := bot.makeRobot()\n\tvar errString string\n\tvar ret TaskRetVal\n\tif verbose {\n\t\tr.Say(fmt.Sprintf(\"Starting job '%s', run %d\", task.name, runIndex))\n\t}\n\tfor {\n\t\t// NOTE: if RequireAdmin is true, the user can't access the plugin at all if not an admin\n\t\tif isPlugin && len(plugin.AdminCommands) > 0 {\n\t\t\tadminRequired := false\n\t\t\tfor _, i := range plugin.AdminCommands {\n\t\t\t\tif command == i {\n\t\t\t\t\tadminRequired = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t\tif adminRequired {\n\t\t\t\tif !r.CheckAdmin() {\n\t\t\t\t\tr.Say(\"Sorry, that command is only available to bot administrators\")\n\t\t\t\t\tret = Fail\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif !bot.bypassSecurityChecks {\n\t\t\tif bot.checkAuthorization(t, command, args...) != Success {\n\t\t\t\tret = Fail\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tif !bot.elevated {\n\t\t\t\teret, required := bot.checkElevation(t, command)\n\t\t\t\tif eret != Success {\n\t\t\t\t\tret = Fail\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tif required {\n\t\t\t\t\tbot.elevated = true\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tswitch ptype {\n\t\tcase plugCommand:\n\t\t\temit(CommandTaskRan) // for testing, otherwise noop\n\t\tcase plugMessage:\n\t\t\temit(AmbientTaskRan)\n\t\tcase catchAll:\n\t\t\temit(CatchAllTaskRan)\n\t\tcase jobTrigger:\n\t\t\temit(TriggeredTaskRan)\n\t\tcase scheduled:\n\t\t\temit(ScheduledTaskRan)\n\t\tcase runJob:\n\t\t\temit(RunJobTaskRan)\n\t\t}\n\t\tbot.debug(fmt.Sprintf(\"Running task with command '%s' and arguments: %v\", command, args), false)\n\t\terrString, ret = bot.callTask(t, command, args...)\n\t\tbot.debug(fmt.Sprintf(\"Task finished with return value: %s\", ret), false)\n\n\t\tif ret != Normal {\n\t\t\tif interactive && errString != \"\" {\n\t\t\t\tr.Reply(errString)\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\t\tif len(bot.nextTasks) > 0 {\n\t\t\tvar ts taskSpec\n\t\t\tts, bot.nextTasks = bot.nextTasks[0], bot.nextTasks[1:]\n\t\t\t_, plugin, _ := getTask(ts.task)\n\t\t\tisPlugin = plugin != nil\n\t\t\tif isPlugin {\n\t\t\t\tcommand = ts.Command\n\t\t\t\targs = ts.Arguments\n\t\t\t} else {\n\t\t\t\tcommand = \"run\"\n\t\t\t\targs = []string{}\n\t\t\t}\n\t\t\tt = ts.task\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\tbot.deregister()\n\tif bot.logger != nil {\n\t\tbot.logger.Section(\"done\", \"pipeline has completed\")\n\t\tbot.logger.Close()\n\t}\n\tif ret == Normal && verbose {\n\t\tr.Say(fmt.Sprintf(\"Finished job '%s', run %d\", bot.pipeName, runIndex))\n\t}\n\tif ret != Normal && isJob {\n\t\ttask, _, _ := getTask(t)\n\t\tr.Reply(fmt.Sprintf(\"Job '%s', run number %d failed in task: '%s'\", bot.pipeName, runIndex, task.name))\n\t}\n}",
"func (p *Pipeline) Context() context.Context {\n\treturn p.ctx\n}",
"func (p *Pipeline) Context() context.Context {\n\treturn p.ctx\n}",
"func (p *Pipeline) Context() context.Context {\n\treturn p.ctx\n}",
"func Program(name string, env []string) Runner {\n\treturn &program{\n\t\tname: name,\n\t\tenv: append(os.Environ(), env...),\n\t}\n}",
"func ActiveShaderProgram(pipeline uint32, program uint32) {\n C.glowActiveShaderProgram(gpActiveShaderProgram, (C.GLuint)(pipeline), (C.GLuint)(program))\n}",
"func (bf *BuiltInFunc) Bind(instance *LoxInstance) Callable {\n\tbf.instance = instance\n\t// return itself.\n\treturn bf\n}",
"func Push(context *endly.Context, process *model.Process) {\n\tvar processes = processes(context)\n\tif process.Source != nil {\n\t\tcontext.Source = process.Source\n\t}\n\tprocesses.Push(process)\n}",
"func main() {\n\tcmd.CliHandling()\n}",
"func IsProgramPipeline(pipeline uint32) bool {\n ret := C.glowIsProgramPipeline(gpIsProgramPipeline, (C.GLuint)(pipeline))\n return ret == TRUE\n}",
"func pipeProcess() {\n\tcmd := findCommandPath(os.Args[0], false)\n\targs := []string{}\n\tif len(os.Args) > 1 {\n\t\targs = os.Args[1:]\n\t}\n\n\tp := exec.Command(cmd, args...)\n\tp.Stdout = os.Stdout\n\tp.Stdin = os.Stdin\n\tp.Stderr = os.Stderr\n\n\terr := p.Start()\n\tif err != nil {\n\t\texitWithError(\"unable to launch\", cmd, args, err)\n\t}\n\n\tp.Wait()\n}",
"func BindContext(hndl http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {\n\t\tprint(\"Binding context\\n\")\n\t\tctx := OpenCtx(req)\n\t\tprint(\"BindContext: \", ctx, \"\\n\")\n\n\t\tdefer closeCtx(req)\n\t\thndl.ServeHTTP(w, req)\n\t})\n}",
"func main() {\n\tfmt.Println(\"Start Test....!\")\n\tinputDB := setupDB(\"mysql\", \"root:root123@tcp(127.0.0.1:13306)/srcDB\")\n\textractDP := processors.NewSQLReader(inputDB, mypkg.Query(5))\n\n\ttransformDP := mypkg.NewMyTransformer()\n\tfmt.Println(transformDP)\n\n\toutputDB := setupDB(\"mysql\", \"root:root123@tcp(127.0.0.1:13306)/dstDB\")\n\toutputTable := \"krew_info\"\n\tloadDP := processors.NewSQLWriter(outputDB, outputTable)\n\n\tpipeline := ratchet.NewPipeline(extractDP, transformDP, loadDP)\n\tpipeline.Name = \"My Pipeline\"\n\n\terr := <-pipeline.Run()\n\tif err != nil {\n\t\tlogger.ErrorWithoutTrace(pipeline.Name, \":\", err)\n\t\tlogger.ErrorWithoutTrace(pipeline.Stats())\n\t} else {\n\t\tlogger.Info(pipeline.Name, \": Completed successfully.\")\n\t}\n}",
"func (p Pipe) Pipe(j ...Stage) Pipe {\n\tswitch len(j) {\n\tcase 0:\n\t\treturn p\n\tcase 1:\n\t\treturn j[0].pipe(p)\n\tdefault:\n\t\treturn j[0].pipe(p).Pipe(j[1:]...)\n\t}\n}",
"func newPipelineCommandHandler(repository eventstore.Repository) *pipelineCommandHandler {\n\treturn &pipelineCommandHandler{\n\t\trepository: repository,\n\t}\n}",
"func NewPipeline(ls ...interface{}) (*Pipe, error) {\n\tvar pipe []interface{}\n\n\tp := &Pipe{\n\t\tls: pipe,\n\t}\n\n\tfor _, f := range ls {\n\t\tif err := p.Add(f); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn p, nil\n}",
"func ActiveShaderProgram(pipeline uint32, program uint32) {\n\tsyscall.Syscall(gpActiveShaderProgram, 2, uintptr(pipeline), uintptr(program), 0)\n}",
"func New(stdin io.Reader, stdout io.Writer, stderr io.Writer) *Pipeline {\n pl := &Pipeline{}\n pl.input = stdin\n pl.output = stdout\n pl.err = stderr\n pl.tasks = []*exec.Cmd{}\n return pl\n}",
"func IsProgramPipeline(pipeline uint32) bool {\n\tret, _, _ := syscall.Syscall(gpIsProgramPipeline, 1, uintptr(pipeline), 0, 0)\n\treturn ret != 0\n}",
"func (ans *answer) setPipelineCaller(c *lockedConn, m capnp.Method, pcall capnp.PipelineCaller) {\n\tc.assertIs(ans.c)\n\n\tif !ans.flags.Contains(resultsReady) {\n\t\tans.pcall = pcall\n\t\tans.promise = capnp.NewPromise(m, pcall)\n\t}\n}",
"func main() {\n\terr := app.Execute()\n\tif err != nil {\n\t\tlog.WithError(err).Error(\"Error while Execute lookatch\")\n\t}\n}",
"func bindProcessToCPU(pid string, cpus ...int) error {\n\tvar procStr, sep string\n\tfor _, proc := range cpus {\n\t\tprocStr += sep + strconv.Itoa(proc)\n\t\tsep = \",\"\n\t}\n\tlog.Debugf(\"binding pid %s to processor %v\", pid, cpus)\n\tif err := exec.Command(\"taskset\", \"--all-tasks\", \"-cp\", procStr, pid).Run(); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}",
"func NewPipeline() *Pipeline {\n\treturn &Pipeline{\n\t\tmake(chan struct{}),\n\t\tsync.WaitGroup{},\n\t\tsync.Mutex{},\n\t\tnil,\n\t}\n}",
"func reusableBind(c echo.Context, form any) error {\n\tif c.Request().Body != nil {\n\t\tbody := c.Request().Body\n\t\tdefer func() { _ = body.Close() }()\n\t\tbuffer := bytes.Buffer{}\n\t\tc.Request().Body = io.NopCloser(io.TeeReader(body, &buffer))\n\t\terr := c.Bind(form)\n\t\tc.Request().Body = io.NopCloser(&buffer)\n\t\treturn err\n\t}\n\treturn c.Bind(form)\n}",
"func main() {\n\tpgs.Init().RegisterModule(&cMod{&pgs.ModuleBase{}}).Render()\n}",
"func main() {\n\tgollery.CliAccess()\n}",
"func (cfg *Config) bind() (*Build, error) {\n\tnamedTypes := cfg.buildNamedTypes()\n\n\tprog, err := cfg.loadProgram(namedTypes, true)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"loading failed\")\n\t}\n\n\timports := buildImports(namedTypes, cfg.Exec.Dir())\n\tcfg.bindTypes(imports, namedTypes, cfg.Exec.Dir(), prog)\n\n\tobjects, err := cfg.buildObjects(namedTypes, prog, imports)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tinputs, err := cfg.buildInputs(namedTypes, prog, imports)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tb := &Build{\n\t\tPackageName: cfg.Exec.Package,\n\t\tObjects: objects,\n\t\tInterfaces: cfg.buildInterfaces(namedTypes, prog),\n\t\tInputs: inputs,\n\t\tImports: imports.finalize(),\n\t}\n\n\tif qr, ok := cfg.schema.EntryPoints[\"query\"]; ok {\n\t\tb.QueryRoot = b.Objects.ByName(qr.TypeName())\n\t}\n\n\tif mr, ok := cfg.schema.EntryPoints[\"mutation\"]; ok {\n\t\tb.MutationRoot = b.Objects.ByName(mr.TypeName())\n\t}\n\n\tif sr, ok := cfg.schema.EntryPoints[\"subscription\"]; ok {\n\t\tb.SubscriptionRoot = b.Objects.ByName(sr.TypeName())\n\t}\n\n\tif b.QueryRoot == nil {\n\t\treturn b, fmt.Errorf(\"query entry point missing\")\n\t}\n\n\t// Poke a few magic methods into query\n\tq := b.Objects.ByName(b.QueryRoot.GQLType)\n\tq.Fields = append(q.Fields, Field{\n\t\tType: &Type{namedTypes[\"__Schema\"], []string{modPtr}, \"\"},\n\t\tGQLName: \"__schema\",\n\t\tNoErr: true,\n\t\tGoMethodName: \"ec.introspectSchema\",\n\t\tObject: q,\n\t})\n\tq.Fields = append(q.Fields, Field{\n\t\tType: &Type{namedTypes[\"__Type\"], []string{modPtr}, \"\"},\n\t\tGQLName: \"__type\",\n\t\tNoErr: true,\n\t\tGoMethodName: \"ec.introspectType\",\n\t\tArgs: []FieldArgument{\n\t\t\t{GQLName: \"name\", Type: &Type{namedTypes[\"String\"], []string{}, \"\"}, Object: &Object{}},\n\t\t},\n\t\tObject: q,\n\t})\n\n\treturn b, nil\n}",
"func (o *GetBeneficiariesParams) bindProgramID(rawData []string, hasKey bool, formats strfmt.Registry) error {\n\tif !hasKey {\n\t\treturn errors.Required(\"programId\", \"query\", rawData)\n\t}\n\tvar raw string\n\tif len(rawData) > 0 {\n\t\traw = rawData[len(rawData)-1]\n\t}\n\n\t// Required: true\n\t// AllowEmptyValue: false\n\tif err := validate.RequiredString(\"programId\", \"query\", raw); err != nil {\n\t\treturn err\n\t}\n\n\to.ProgramID = raw\n\n\treturn nil\n}",
"func (c *client) pipeline(commands []commandPair) (interface{}, error) {\n\treturn c.withRetry(func() (interface{}, error) { return c.doPipeline(commands) })\n}",
"func ValidateProgramPipeline(pipeline uint32) {\n C.glowValidateProgramPipeline(gpValidateProgramPipeline, (C.GLuint)(pipeline))\n}",
"func main() {\n\tprocess_command_line()\n}",
"func BindAuth(app *app.App) {\n\tapp.BindFilter(authFilter)\n\tapp.Bind(AuthCommand, authCmd)\n}",
"func main() {\n\tmodule.Create(\"init\", 0)\n\tmodule.Log()\n\tReadLine(\"D:/input.txt\", processLine)\n\tmodule.Show_pcb(\"r\")\n\tfmt.Println()\n\tmodule.List_all_process()\n\tfmt.Println()\n\tmodule.List_all_resource()\n}",
"func (m *Mainloop) Bind(sig os.Signal, f func()) (err error) {\n\tfor s, _ := range m.Bindings {\n\t\tif sig == s {\n\t\t\terr = SignalAlreadyBoundError\n\t\t\treturn\n\t\t}\n\t}\n\tm.Bindings[sig] = f\n\treturn nil\n}",
"func main() {\n\tlambda.Start(wflambda.Wrapper(handler))\n}",
"func main() {\n\tlambda.Start(wflambda.Wrapper(handler))\n}",
"func main() {\n\tlambda.Start(wflambda.Wrapper(handler))\n}",
"func NewPipeline(definitionPath, environmentPath string, environment types.StringMap, ignoredSteps types.StringSet, selectedSteps types.StringSet) (*Pipeline, error) {\n\tp := &Pipeline{}\n\tvar err error\n\t// Load environment\n\tp.Environment, err = NewPipelineEnvironment(environmentPath, environment, ignoredSteps, selectedSteps)\n\tif err != nil {\n\t\t// As environment files are optional, handle if non is accessible\n\t\tif e, ok := err.(*os.PathError); ok && e.Err == syscall.ENOENT {\n\t\t\tlog.Print(\"No environment file is used\")\n\t\t} else {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\t// Load definition\n\tp.Definition, err = NewPipelineDefinition(definitionPath, p.Environment)\n\tp.localRunner = NewLocalRunner(\"pipeline\", os.Stdout, os.Stderr)\n\tp.noopRunner = NewNoopRunner(false)\n\treturn p, err\n}",
"func (w *worker) registerActive(parent *worker) {\n\t// Only needed for bots not created by IncomingMessage\n\tif w.maps == nil {\n\t\tcurrentUCMaps.Lock()\n\t\tw.maps = currentUCMaps.ucmap\n\t\tcurrentUCMaps.Unlock()\n\t}\n\tif len(w.ProtocolUser) == 0 && len(w.User) > 0 {\n\t\tif idRegex.MatchString(w.User) {\n\t\t\tw.ProtocolUser = w.User\n\t\t} else if ui, ok := w.maps.user[w.User]; ok {\n\t\t\tw.ProtocolUser = bracket(ui.UserID)\n\t\t\tw.BotUser = ui.BotUser\n\t\t} else {\n\t\t\tw.ProtocolUser = w.User\n\t\t}\n\t}\n\tif len(w.ProtocolChannel) == 0 && len(w.Channel) > 0 {\n\t\tif idRegex.MatchString(w.Channel) {\n\t\t\tw.ProtocolChannel = w.Channel\n\t\t} else if ci, ok := w.maps.channel[w.Channel]; ok {\n\t\t\tw.ProtocolChannel = bracket(ci.ChannelID)\n\t\t} else {\n\t\t\tw.ProtocolChannel = w.Channel\n\t\t}\n\t}\n\n\tactivePipelines.Lock()\n\tif len(w.eid) == 0 {\n\t\tvar eid string\n\t\tfor {\n\t\t\t// 4 bytes of entropy per pipeline\n\t\t\tb := make([]byte, 4)\n\t\t\trand.Read(b)\n\t\t\teid = fmt.Sprintf(\"%02x%02x%02x%02x\", b[0], b[1], b[2], b[3])\n\t\t\tif _, ok := activePipelines.eids[eid]; !ok {\n\t\t\t\tactivePipelines.eids[eid] = struct{}{}\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tw.eid = eid\n\t}\n\tif parent != nil {\n\t\tparent._child = w\n\t\tw._parent = parent\n\t}\n\tactivePipelines.i[w.id] = w\n\tactivePipelines.Unlock()\n\tw.active = true\n}",
"func UseProgram(p Program) {\n\tgl.UseProgram(p.Value)\n}",
"func (r *Robot) pipeTask(pflavor pipeAddFlavor, ptype pipeAddType, name string, args ...string) RetVal {\n\tc := r.getContext()\n\tif c.stage != primaryTasks {\n\t\ttask, _, _ := getTask(c.currentTask)\n\t\tr.Log(Error, \"request to modify pipeline outside of initial pipeline in task '%s'\", task.name)\n\t\treturn InvalidStage\n\t}\n\tt := c.tasks.getTaskByName(name)\n\tif t == nil {\n\t\ttask, _, _ := getTask(c.currentTask)\n\t\tr.Log(Error, \"task '%s' not found updating pipeline from task '%s'\", name, task.name)\n\t\treturn TaskNotFound\n\t}\n\ttask, plugin, job := getTask(t)\n\tisPlugin := plugin != nil\n\tisJob := job != nil\n\tif task.Disabled {\n\t\tr.Log(Error, \"attempt to add disabled task '%s' to pipeline\", name)\n\t\treturn TaskDisabled\n\t}\n\tif ptype == typePlugin && !isPlugin {\n\t\tr.Log(Error, \"adding command to pipeline - not a plugin: %s\", name)\n\t\treturn InvalidTaskType\n\t}\n\tif ptype == typeJob && !isJob {\n\t\tr.Log(Error, \"adding job to pipeline - not a job: %s\", name)\n\t\treturn InvalidTaskType\n\t}\n\tif ptype == typeTask && (isJob || isPlugin) {\n\t\tr.Log(Error, \"adding task to pipeline - not a task: %s\", name)\n\t\treturn InvalidTaskType\n\t}\n\tvar command string\n\tvar cmdargs []string\n\tif isPlugin {\n\t\tif len(args) == 0 {\n\t\t\tr.Log(Error, \"added plugin '%s' to pipeline with no command\", name)\n\t\t\treturn MissingArguments\n\t\t}\n\t\tif len(args[0]) == 0 {\n\t\t\tr.Log(Error, \"added plugin '%s' to pipeline with no command\", name)\n\t\t\treturn MissingArguments\n\t\t}\n\t\tcmsg := args[0]\n\t\tc.debugT(t, fmt.Sprintf(\"Checking %d command matchers against pipe command: '%s'\", len(plugin.CommandMatchers), cmsg), false)\n\t\tmatched := false\n\t\tfor _, matcher := range plugin.CommandMatchers {\n\t\t\tLog(Trace, \"Checking '%s' against '%s'\", cmsg, matcher.Regex)\n\t\t\tmatches := matcher.re.FindAllStringSubmatch(cmsg, -1)\n\t\t\tif matches != nil {\n\t\t\t\tc.debugT(t, fmt.Sprintf(\"Matched command regex '%s', command: %s\", matcher.Regex, matcher.Command), false)\n\t\t\t\tmatched = true\n\t\t\t\tLog(Trace, \"pipeline command '%s' matches '%s'\", cmsg, matcher.Command)\n\t\t\t\tcommand = matcher.Command\n\t\t\t\tcmdargs = matches[0][1:]\n\t\t\t\tbreak\n\t\t\t} else {\n\t\t\t\tc.debugT(t, fmt.Sprintf(\"Not matched: %s\", matcher.Regex), false)\n\t\t\t}\n\t\t}\n\t\tif !matched {\n\t\t\tr.Log(Error, \"Command '%s' didn't match any CommandMatchers while adding plugin '%s' to pipeline\", cmsg, name)\n\t\t\treturn CommandNotMatched\n\t\t}\n\t} else {\n\t\tcommand = \"run\"\n\t\tcmdargs = args\n\t}\n\tts := TaskSpec{\n\t\tName: name,\n\t\tCommand: command,\n\t\tArguments: cmdargs,\n\t\ttask: t,\n\t}\n\targstr := strings.Join(args, \" \")\n\tr.Log(Debug, \"Adding pipeline task %s/%s: %s %s\", pflavor, ptype, name, argstr)\n\tswitch pflavor {\n\tcase flavorAdd:\n\t\tc.nextTasks = append(c.nextTasks, ts)\n\tcase flavorFinal:\n\t\t// Final tasks are FILO/LIFO (run in reverse order of being added)\n\t\tc.finalTasks = append([]TaskSpec{ts}, c.finalTasks...)\n\tcase flavorFail:\n\t\tc.failTasks = append(c.failTasks, ts)\n\tcase flavorSpawn:\n\t\tsb := c.clone()\n\t\tgo sb.startPipeline(nil, t, spawnedTask, command, args...)\n\t}\n\treturn Ok\n}",
"func (s *BaseAspidaListener) EnterMain(ctx *MainContext) {}",
"func ValidateProgramPipeline(pipeline uint32) {\n\tsyscall.Syscall(gpValidateProgramPipeline, 1, uintptr(pipeline), 0, 0)\n}",
"func newProcessRunner(pipeID string, p phono.Processor) (*processRunner, error) {\n\tfn, err := p.Process(pipeID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tr := processRunner{\n\t\tfn: fn,\n\t\tProcessor: p,\n\t\thooks: bindHooks(p),\n\t}\n\treturn &r, nil\n}",
"func (c *client) compileStages(p *yaml.Build, _pipeline *library.Pipeline, tmpls map[string]*yaml.Template, r *pipeline.RuleData) (*pipeline.Build, *library.Pipeline, error) {\n\tvar err error\n\n\t// check if the pipeline disabled the clone\n\tif p.Metadata.Clone == nil || *p.Metadata.Clone {\n\t\t// inject the clone stage\n\t\tp, err = c.CloneStage(p)\n\t\tif err != nil {\n\t\t\treturn nil, _pipeline, err\n\t\t}\n\t}\n\n\t// inject the init stage\n\tp, err = c.InitStage(p)\n\tif err != nil {\n\t\treturn nil, _pipeline, err\n\t}\n\n\t// inject the templates into the stages\n\tp, err = c.ExpandStages(p, tmpls, r)\n\tif err != nil {\n\t\treturn nil, _pipeline, err\n\t}\n\n\tif c.ModificationService.Endpoint != \"\" {\n\t\t// send config to external endpoint for modification\n\t\tp, err = c.modifyConfig(p, c.build, c.repo)\n\t\tif err != nil {\n\t\t\treturn nil, _pipeline, err\n\t\t}\n\t}\n\n\t// validate the yaml configuration\n\terr = c.Validate(p)\n\tif err != nil {\n\t\treturn nil, _pipeline, err\n\t}\n\n\t// Create some default global environment inject vars\n\t// these are used below to overwrite to an empty\n\t// map if they should not be injected into a container\n\tenvGlobalServices, envGlobalSecrets, envGlobalSteps := p.Environment, p.Environment, p.Environment\n\n\tif !p.Metadata.HasEnvironment(\"services\") {\n\t\tenvGlobalServices = make(raw.StringSliceMap)\n\t}\n\n\tif !p.Metadata.HasEnvironment(\"secrets\") {\n\t\tenvGlobalSecrets = make(raw.StringSliceMap)\n\t}\n\n\tif !p.Metadata.HasEnvironment(\"steps\") {\n\t\tenvGlobalSteps = make(raw.StringSliceMap)\n\t}\n\n\t// inject the environment variables into the services\n\tp.Services, err = c.EnvironmentServices(p.Services, envGlobalServices)\n\tif err != nil {\n\t\treturn nil, _pipeline, err\n\t}\n\n\t// inject the environment variables into the secrets\n\tp.Secrets, err = c.EnvironmentSecrets(p.Secrets, envGlobalSecrets)\n\tif err != nil {\n\t\treturn nil, _pipeline, err\n\t}\n\n\t// inject the environment variables into the stages\n\tp.Stages, err = c.EnvironmentStages(p.Stages, envGlobalSteps)\n\tif err != nil {\n\t\treturn nil, _pipeline, err\n\t}\n\n\t// inject the substituted environment variables into the stages\n\tp.Stages, err = c.SubstituteStages(p.Stages)\n\tif err != nil {\n\t\treturn nil, _pipeline, err\n\t}\n\n\t// inject the scripts into the stages\n\tp.Stages, err = c.ScriptStages(p.Stages)\n\tif err != nil {\n\t\treturn nil, _pipeline, err\n\t}\n\n\t// create executable representation\n\tbuild, err := c.TransformStages(r, p)\n\tif err != nil {\n\t\treturn nil, _pipeline, err\n\t}\n\n\treturn build, _pipeline, nil\n}",
"func createPipeline(params CRDCreationParameters) (*syntax.ParsedPipeline, error) {\n\tsteps, err := buildSteps(params)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"unable to create app extending pipeline steps\")\n\t}\n\n\tstage := syntax.Stage{\n\t\tName: appExtensionStageName,\n\t\tSteps: steps,\n\t\tAgent: &syntax.Agent{\n\t\t\tImage: determineDefaultStepImage(params.DefaultImage),\n\t\t},\n\t}\n\n\tparsedPipeline := &syntax.ParsedPipeline{\n\t\tStages: []syntax.Stage{stage},\n\t}\n\n\tenv := buildEnvParams(params)\n\tparsedPipeline.AddContainerEnvVarsToPipeline(env)\n\n\treturn parsedPipeline, nil\n}",
"func IsProgramPipeline(pipeline uint32) bool {\n\tret := C.glowIsProgramPipeline(gpIsProgramPipeline, (C.GLuint)(pipeline))\n\treturn ret == TRUE\n}"
] | [
"0.73413885",
"0.7065253",
"0.60420555",
"0.5810229",
"0.5754384",
"0.57189494",
"0.5652629",
"0.56116164",
"0.5575586",
"0.5401441",
"0.5378565",
"0.53634435",
"0.5341183",
"0.53165364",
"0.5250999",
"0.5221876",
"0.5196014",
"0.5189741",
"0.5189741",
"0.5172633",
"0.5168675",
"0.5155762",
"0.5152833",
"0.51259357",
"0.511084",
"0.50840235",
"0.5078312",
"0.50637543",
"0.50529975",
"0.50423753",
"0.50191164",
"0.49901655",
"0.49785164",
"0.49769643",
"0.4974815",
"0.4974815",
"0.497414",
"0.49698713",
"0.4964828",
"0.4960231",
"0.49596536",
"0.4956734",
"0.49565798",
"0.49414185",
"0.4928312",
"0.4918154",
"0.4918154",
"0.4918154",
"0.4918154",
"0.49095666",
"0.48953617",
"0.48814622",
"0.48798653",
"0.4865233",
"0.4865233",
"0.4865233",
"0.48513865",
"0.48508757",
"0.48415384",
"0.48401624",
"0.48216194",
"0.48208407",
"0.48150358",
"0.4811443",
"0.48112428",
"0.48006853",
"0.479882",
"0.479819",
"0.47952938",
"0.47872105",
"0.47788695",
"0.47545704",
"0.47355372",
"0.47311297",
"0.47290653",
"0.47278172",
"0.47247228",
"0.47168788",
"0.47119755",
"0.4708406",
"0.4705921",
"0.47035626",
"0.46970648",
"0.46906275",
"0.46867207",
"0.46759418",
"0.467405",
"0.467405",
"0.467405",
"0.4673657",
"0.4666476",
"0.4665009",
"0.46617305",
"0.46554217",
"0.46526232",
"0.46502158",
"0.4649928",
"0.4649347",
"0.46488503"
] | 0.67760444 | 3 |
bind a renderbuffer to a renderbuffer target | func BindRenderbuffer(target uint32, renderbuffer uint32) {
C.glowBindRenderbuffer(gpBindRenderbuffer, (C.GLenum)(target), (C.GLuint)(renderbuffer))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n C.glowBindRenderbuffer(gpBindRenderbuffer, (C.GLenum)(target), (C.GLuint)(renderbuffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n\tsyscall.Syscall(gpBindRenderbuffer, 2, uintptr(target), uintptr(renderbuffer), 0)\n}",
"func BindBuffer(target Enum, b Buffer) {\n\tgl.BindBuffer(uint32(target), b.Value)\n}",
"func (debugging *debuggingOpenGL) BindBuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindBuffer\", target, buffer)\n\tdebugging.gl.BindBuffer(target, buffer)\n\tdebugging.recordExit(\"BindBuffer\")\n}",
"func (native *OpenGL) BindBuffer(target uint32, buffer uint32) {\n\tgl.BindBuffer(target, buffer)\n}",
"func (gl *WebGL) BindBuffer(target GLEnum, buffer WebGLBuffer) {\n\tgl.context.Call(\"bindBuffer\", target, buffer)\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBuffer, 2, uintptr(target), uintptr(buffer), 0)\n}",
"func BindRenderbuffer(target GLEnum, renderbuffer Renderbuffer) {\n\tgl.BindRenderbuffer(uint32(target), uint32(renderbuffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func (buffer Buffer) Bind(target gl.Enum) {\n\tgl.BindBuffer(gl.Enum(target), gl.Uint(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBufferBase, 3, uintptr(target), uintptr(index), uintptr(buffer))\n}",
"func BindRenderbuffer(target Enum, rb Renderbuffer) {\n\tgl.BindRenderbuffer(uint32(target), rb.Value)\n}",
"func (debugging *debuggingOpenGL) BindRenderbuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindRenderbuffer\", target, buffer)\n\tdebugging.gl.BindRenderbuffer(target, buffer)\n\tdebugging.recordExit(\"BindRenderbuffer\")\n}",
"func (native *OpenGL) BindRenderbuffer(target uint32, renderbuffer uint32) {\n\tgl.BindRenderbuffer(target, renderbuffer)\n}",
"func BindRenderbuffer(target Enum, renderbuffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcrenderbuffer, _ := (C.GLuint)(renderbuffer), cgoAllocsUnknown\n\tC.glBindRenderbuffer(ctarget, crenderbuffer)\n}",
"func (buffer Buffer) BindBufferBase(target gl.Enum, index uint) {\n\tgl.BindBufferBase(gl.Enum(target), gl.Uint(index), gl.Uint(buffer))\n}",
"func (w *windowImpl) bindBackBuffer() {\n\t// w.mu.Lock()\n\t// size := w.Sz\n\t// w.mu.Unlock()\n\t//\n\tw.backBufferBound = true\n\t// gl.BindFramebuffer(gl.FRAMEBUFFER, 0)\n\t// gl.Viewport(0, 0, int32(size.X), int32(size.Y))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n C.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBuffer(target Enum, buffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcbuffer, _ := (C.GLuint)(buffer), cgoAllocsUnknown\n\tC.glBindBuffer(ctarget, cbuffer)\n}",
"func (f *Framebuffer) Renderbuffer(attachment gfx.FramebufferAttachment, buf gfx.Renderbuffer) {\n\tf.useState()\n\tf.ctx.O.Call(\n\t\t\"framebufferTexture2D\",\n\t\tf.ctx.FRAMEBUFFER,\n\t\tf.ctx.Enums[int(attachment)],\n\t\tf.ctx.RENDERBUFFER,\n\t\tbuf.Object().(*js.Object),\n\t\t0,\n\t)\n}",
"func (native *OpenGL) BindFramebuffer(target, buffer uint32) {\n\tgl.BindFramebuffer(target, buffer)\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n C.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func (debugging *debuggingOpenGL) BindFramebuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindFramebuffer\", target, buffer)\n\tdebugging.gl.BindFramebuffer(target, buffer)\n\tdebugging.recordExit(\"BindFramebuffer\")\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n C.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))\n}",
"func FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n C.glowFramebufferRenderbuffer(gpFramebufferRenderbuffer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLenum)(renderbuffertarget), (C.GLuint)(renderbuffer))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpBindBufferRange, 5, uintptr(target), uintptr(index), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func (buffer Buffer) BindBufferRange(target gl.Enum, index uint, offset int, size uint) {\n\tgl.BindBufferRange(gl.Enum(target), gl.Uint(index), gl.Uint(buffer), gl.Intptr(offset), gl.Sizeiptr(size))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tsyscall.Syscall6(gpBindBuffersBase, 4, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), 0, 0)\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n\tsyscall.Syscall6(gpFramebufferRenderbuffer, 4, uintptr(target), uintptr(attachment), uintptr(renderbuffertarget), uintptr(renderbuffer), 0, 0)\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n C.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n C.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n\tsyscall.Syscall(gpBindFramebuffer, 2, uintptr(target), uintptr(framebuffer), 0)\n}",
"func (native *OpenGL) FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n\tgl.FramebufferRenderbuffer(target, attachment, renderbuffertarget, renderbuffer)\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n\tC.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n\tC.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func BindFramebuffer(target Enum, fb Framebuffer) {\n\tgl.BindFramebuffer(uint32(target), fb.Value)\n}",
"func FramebufferRenderbuffer(target, attachment, rbTarget Enum, rb Renderbuffer) {\n\tgl.FramebufferRenderbuffer(uint32(target), uint32(attachment), uint32(rbTarget), rb.Value)\n}",
"func (b *Binding) Set(buf uint32) {\n\tgl.BindBufferBase(gl.SHADER_STORAGE_BUFFER, b.uint32, buf)\n}",
"func (t *Texture2D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_2D, t.ID)\n}",
"func TextureBuffer(texture uint32, internalformat uint32, buffer uint32) {\n\tsyscall.Syscall(gpTextureBuffer, 3, uintptr(texture), uintptr(internalformat), uintptr(buffer))\n}",
"func FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n\tC.glowFramebufferRenderbuffer(gpFramebufferRenderbuffer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLenum)(renderbuffertarget), (C.GLuint)(renderbuffer))\n}",
"func FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n\tC.glowFramebufferRenderbuffer(gpFramebufferRenderbuffer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLenum)(renderbuffertarget), (C.GLuint)(renderbuffer))\n}",
"func BindFramebuffer(target Enum, framebuffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcframebuffer, _ := (C.GLuint)(framebuffer), cgoAllocsUnknown\n\tC.glBindFramebuffer(ctarget, cframebuffer)\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n\tC.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n\tC.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n C.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func NamedFramebufferDrawBuffer(framebuffer uint32, buf uint32) {\n\tsyscall.Syscall(gpNamedFramebufferDrawBuffer, 2, uintptr(framebuffer), uintptr(buf), 0)\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tsyscall.Syscall6(gpBindBuffersRange, 6, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(sizes)))\n}",
"func (debugging *debuggingOpenGL) FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n\tdebugging.recordEntry(\"FramebufferRenderbuffer\", target, attachment, renderbuffertarget, renderbuffer)\n\tdebugging.gl.FramebufferRenderbuffer(target, attachment, renderbuffertarget, renderbuffer)\n\tdebugging.recordExit(\"FramebufferRenderbuffer\")\n}",
"func (gl *WebGL) BindTexture(target GLEnum, texture WebGLTexture) {\n\tgl.context.Call(\"bindTexture\", target, texture)\n}",
"func BindTexture(target uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTexture, 2, uintptr(target), uintptr(texture), 0)\n}",
"func (b *VBO) Bind(m *Mesh) {\n\tif !b.genBound {\n\t\tpanic(\"A VBO buffer ID has not been generated. Call GenBuffer first.\")\n\t}\n\n\tgl.BindBuffer(gl.ARRAY_BUFFER, b.vboID)\n\tfloatSize := int(unsafe.Sizeof(float32(0)))\n\tgl.BufferData(gl.ARRAY_BUFFER, len(m.Vertices)*floatSize, gl.Ptr(m.Vertices), gl.STATIC_DRAW)\n}",
"func (debugging *debuggingOpenGL) BindTexture(target uint32, texture uint32) {\n\tdebugging.recordEntry(\"BindTexture\", target, texture)\n\tdebugging.gl.BindTexture(target, texture)\n\tdebugging.recordExit(\"BindTexture\")\n}",
"func MapBuffer(target uint32, access uint32) unsafe.Pointer {\n ret := C.glowMapBuffer(gpMapBuffer, (C.GLenum)(target), (C.GLenum)(access))\n return (unsafe.Pointer)(ret)\n}",
"func FramebufferRenderbuffer(target Enum, attachment Enum, renderbuffertarget Enum, renderbuffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcattachment, _ := (C.GLenum)(attachment), cgoAllocsUnknown\n\tcrenderbuffertarget, _ := (C.GLenum)(renderbuffertarget), cgoAllocsUnknown\n\tcrenderbuffer, _ := (C.GLuint)(renderbuffer), cgoAllocsUnknown\n\tC.glFramebufferRenderbuffer(ctarget, cattachment, crenderbuffertarget, crenderbuffer)\n}",
"func BindTexture(target uint32, texture uint32) {\n C.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func MapBuffer(target gl.Enum, access gl.Enum) {\n\tgl.MapBuffer(gl.Enum(target), gl.Enum(access))\n}",
"func NamedFramebufferDrawBuffer(framebuffer uint32, buf uint32) {\n\tC.glowNamedFramebufferDrawBuffer(gpNamedFramebufferDrawBuffer, (C.GLuint)(framebuffer), (C.GLenum)(buf))\n}",
"func NamedFramebufferDrawBuffer(framebuffer uint32, buf uint32) {\n\tC.glowNamedFramebufferDrawBuffer(gpNamedFramebufferDrawBuffer, (C.GLuint)(framebuffer), (C.GLenum)(buf))\n}",
"func FrameBuffer(m rv.RenderModel) {\n\tframebuffer(m)\n}",
"func BindTransformFeedback(target uint32, id uint32) {\n C.glowBindTransformFeedback(gpBindTransformFeedback, (C.GLenum)(target), (C.GLuint)(id))\n}",
"func (native *OpenGL) BindTexture(target uint32, texture uint32) {\n\tgl.BindTexture(target, texture)\n}",
"func BufferInit(target Enum, size int, usage Enum) {\n\tgl.BufferData(uint32(target), size, nil, uint32(usage))\n}",
"func MapBuffer(target uint32, access uint32) unsafe.Pointer {\n\tret := C.glowMapBuffer(gpMapBuffer, (C.GLenum)(target), (C.GLenum)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func MapBuffer(target uint32, access uint32) unsafe.Pointer {\n\tret := C.glowMapBuffer(gpMapBuffer, (C.GLenum)(target), (C.GLenum)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func BindTexture(target GLEnum, texture Texture) {\n\tgl.BindTexture(uint32(target), uint32(texture))\n}",
"func TextureBuffer(texture uint32, internalformat uint32, buffer uint32) {\n\tC.glowTextureBuffer(gpTextureBuffer, (C.GLuint)(texture), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func TextureBuffer(texture uint32, internalformat uint32, buffer uint32) {\n\tC.glowTextureBuffer(gpTextureBuffer, (C.GLuint)(texture), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func (t Texture3D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_3D, t.id)\n}",
"func BindTexture(target Enum, t Texture) {\n\tgl.BindTexture(uint32(target), t.Value)\n}",
"func MapBuffer(target uint32, access uint32) unsafe.Pointer {\n\tret, _, _ := syscall.Syscall(gpMapBuffer, 2, uintptr(target), uintptr(access), 0)\n\treturn (unsafe.Pointer)(ret)\n}",
"func BindTexture(target Enum, texture Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tctexture, _ := (C.GLuint)(texture), cgoAllocsUnknown\n\tC.glBindTexture(ctarget, ctexture)\n}",
"func (c *Context) BindTexture(texture *Texture) {\n\tif texture == nil {\n\t\treturn\n\t}\n\tif c.currentTexture == nil || texture.id != c.currentTexture.id {\n\t\tgl.BindTexture(gl.TEXTURE_2D, texture.id)\n\t\tc.currentTexture = texture\n\t}\n}",
"func DrawBuffer(buf uint32) {\n\tsyscall.Syscall(gpDrawBuffer, 1, uintptr(buf), 0, 0)\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTextureUnit, 2, uintptr(unit), uintptr(texture), 0)\n}",
"func FramebufferTextureLayer(target uint32, attachment uint32, texture uint32, level int32, layer int32) {\n\tsyscall.Syscall6(gpFramebufferTextureLayer, 5, uintptr(target), uintptr(attachment), uintptr(texture), uintptr(level), uintptr(layer), 0)\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func DrawBuffer(mode uint32) {\n C.glowDrawBuffer(gpDrawBuffer, (C.GLenum)(mode))\n}",
"func (self Source) SetBuffer(buffer Buffer) {\n\tself.Seti(AlBuffer, int32(buffer))\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n C.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func (src *Source) SetBuffer(buf []byte) {\n\tsrc.buf = buf\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tsyscall.Syscall6(gpBindVertexBuffer, 4, uintptr(bindingindex), uintptr(buffer), uintptr(offset), uintptr(stride), 0, 0)\n}",
"func NewAttachedBuffer(buffer *Buffer) *Buffer {\n result := NewEmptyBuffer()\n result.AttachBuffer(buffer)\n return result\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n NewEnumsModel(buffer),\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyEnumsFunc(func(model *EnumsModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func FramebufferTextureLayer(target uint32, attachment uint32, texture uint32, level int32, layer int32) {\n C.glowFramebufferTextureLayer(gpFramebufferTextureLayer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(layer))\n}",
"func BindTexImageARB(hPbuffer unsafe.Pointer, iBuffer unsafe.Pointer) unsafe.Pointer {\n\tret, _, _ := syscall.Syscall(gpBindTexImageARB, 2, uintptr(hPbuffer), uintptr(iBuffer), 0)\n\treturn (unsafe.Pointer)(ret)\n}",
"func TexBufferRange(target uint32, internalformat uint32, buffer uint32, offset int, size int) {\n C.glowTexBufferRange(gpTexBufferRange, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}"
] | [
"0.7353657",
"0.7281036",
"0.7191413",
"0.7169069",
"0.71594304",
"0.7144179",
"0.7136114",
"0.7112289",
"0.7110479",
"0.7098034",
"0.6998474",
"0.6998474",
"0.6992969",
"0.6992969",
"0.69723177",
"0.69301564",
"0.6866204",
"0.6865999",
"0.6822502",
"0.6761374",
"0.6670392",
"0.66554624",
"0.6653076",
"0.6636351",
"0.65267754",
"0.64796376",
"0.6402505",
"0.6402505",
"0.63801163",
"0.6373238",
"0.63531446",
"0.6323311",
"0.6221761",
"0.62137055",
"0.6149921",
"0.6126912",
"0.6126912",
"0.6107412",
"0.6075072",
"0.6051607",
"0.60312223",
"0.59727764",
"0.5946186",
"0.5946186",
"0.5939061",
"0.5933168",
"0.5926269",
"0.59120744",
"0.59007746",
"0.58848876",
"0.58848876",
"0.5882069",
"0.5880312",
"0.5880312",
"0.58715636",
"0.58498025",
"0.58498025",
"0.58484036",
"0.58001524",
"0.57889926",
"0.57636225",
"0.57489264",
"0.57483757",
"0.5746197",
"0.57319987",
"0.5714548",
"0.569132",
"0.56904083",
"0.5677744",
"0.5677744",
"0.56421113",
"0.5601084",
"0.5600562",
"0.5575487",
"0.55738884",
"0.55738884",
"0.554742",
"0.55360514",
"0.55360514",
"0.5520251",
"0.55074835",
"0.5496564",
"0.5490649",
"0.54812473",
"0.54539055",
"0.5444916",
"0.54419583",
"0.5406436",
"0.5406436",
"0.54023516",
"0.5373107",
"0.5366657",
"0.5352091",
"0.5350569",
"0.53308475",
"0.53288823",
"0.53284335",
"0.5325952",
"0.5324592"
] | 0.6807672 | 20 |
bind a named sampler to a texturing target | func BindSampler(unit uint32, sampler uint32) {
C.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindSampler(unit uint32, sampler uint32) {\n C.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tsyscall.Syscall(gpBindSampler, 2, uintptr(unit), uintptr(sampler), 0)\n}",
"func (debugging *debuggingOpenGL) BindSampler(unit uint32, sampler uint32) {\n\tdebugging.recordEntry(\"BindSampler\", unit, sampler)\n\tdebugging.gl.BindSampler(unit, sampler)\n\tdebugging.recordExit(\"BindSampler\")\n}",
"func (native *OpenGL) BindSampler(unit uint32, sampler uint32) {\n\tgl.BindSampler(unit, sampler)\n}",
"func (g *GLTF) applySampler(samplerIdx int, tex *texture.Texture2D) error {\n\n\tlog.Debug(\"Applying Sampler %d\", samplerIdx)\n\t// Check if provided sampler index is valid\n\tif samplerIdx < 0 || samplerIdx >= len(g.Samplers) {\n\t\treturn fmt.Errorf(\"invalid sampler index\")\n\t}\n\tsampler := g.Samplers[samplerIdx]\n\n\t// Magnification filter\n\tmagFilter := gls.LINEAR\n\tif sampler.MagFilter != nil {\n\t\tmagFilter = *sampler.MagFilter\n\t}\n\ttex.SetMagFilter(uint32(magFilter))\n\n\t// Minification filter\n\tminFilter := gls.LINEAR_MIPMAP_LINEAR\n\tif sampler.MinFilter != nil {\n\t\tminFilter = *sampler.MinFilter\n\t}\n\ttex.SetMinFilter(uint32(minFilter))\n\n\t// S coordinate wrapping mode\n\twrapS := gls.REPEAT\n\tif sampler.WrapS != nil {\n\t\twrapS = *sampler.WrapS\n\t}\n\ttex.SetWrapS(uint32(wrapS))\n\n\t// T coordinate wrapping mode\n\twrapT := gls.REPEAT\n\tif sampler.WrapT != nil {\n\t\twrapT = *sampler.WrapT\n\t}\n\ttex.SetWrapT(uint32(wrapT))\n\n\treturn nil\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n C.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func BindTexture(target uint32, texture uint32) {\n C.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tsyscall.Syscall(gpBindSamplers, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(samplers)))\n}",
"func BindTexture(target Enum, t Texture) {\n\tgl.BindTexture(uint32(target), t.Value)\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tC.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tC.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func (gl *WebGL) BindTexture(target GLEnum, texture WebGLTexture) {\n\tgl.context.Call(\"bindTexture\", target, texture)\n}",
"func SetSampler(fixedTarget uint32, fallbackRate float64) {\n\tsegment.SetSampler(utils.NewSampler(fixedTarget, fallbackRate))\n}",
"func BindTexture(target uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTexture, 2, uintptr(target), uintptr(texture), 0)\n}",
"func BindTexture(target GLEnum, texture Texture) {\n\tgl.BindTexture(uint32(target), uint32(texture))\n}",
"func Sampler(sampler jaeger.Sampler) Option {\n\treturn func(c *Options) {\n\t\tc.sampler = sampler\n\t}\n}",
"func TraceSampler(o Options) trace.Sampler {\n\treturn trace.ProbabilitySampler(o.SamplingProbability)\n}",
"func BindTransformFeedback(target uint32, id uint32) {\n C.glowBindTransformFeedback(gpBindTransformFeedback, (C.GLenum)(target), (C.GLuint)(id))\n}",
"func (debugging *debuggingOpenGL) BindTexture(target uint32, texture uint32) {\n\tdebugging.recordEntry(\"BindTexture\", target, texture)\n\tdebugging.gl.BindTexture(target, texture)\n\tdebugging.recordExit(\"BindTexture\")\n}",
"func (t *Texture2D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_2D, t.ID)\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n C.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func (native *OpenGL) BindTexture(target uint32, texture uint32) {\n\tgl.BindTexture(target, texture)\n}",
"func bindListener(g *G.Gilmour) {\n\tg.ReplyTo(\"test.handler.one\", fetchReply(g), nil)\n}",
"func (b bindRequest) TargetName() (string, error) {\n\treturn b.Parameters.String(TargetNameKey)\n}",
"func BindBuffer(target uint32, buffer uint32) {\n C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindTexture(target uint32, texture uint32) {\n\tC.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func BindTexture(target uint32, texture uint32) {\n\tC.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func (s *fakeTracerProviderStore) RegisterSampler(sampler sdktrace.Sampler) {\n\ts.sampler = sampler\n}",
"func TexParameteri(target, pname GLEnum, param int32) {\n\tgl.TexParameteri(uint32(target), uint32(pname), param)\n}",
"func NewSampler(n int) Sampler {\n\treturn Sampler{\n\t\tn: n,\n\t}\n}",
"func BindTexture(target Enum, texture Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tctexture, _ := (C.GLuint)(texture), cgoAllocsUnknown\n\tC.glBindTexture(ctarget, ctexture)\n}",
"func NewAllSampler() RateSampler { return NewRateSampler(1) }",
"func (s Sampling) Sampler() trace.Sampler {\n\tif s == Disabled {\n\t\treturn trace.NeverSample()\n\t}\n\treturn trace.ProbabilitySampler(float64(s))\n}",
"func (s *opentelemetryTracerProviderStore) RegisterSampler(sampler sdktrace.Sampler) {\n\ts.sampler = sampler\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n C.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTextureUnit, 2, uintptr(unit), uintptr(texture), 0)\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n C.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n C.glowBindRenderbuffer(gpBindRenderbuffer, (C.GLenum)(target), (C.GLuint)(renderbuffer))\n}",
"func (t *Tracer) Sampler() SamplerV2 {\n\treturn t.sampler\n}",
"func TexParameteri(target, pname Enum, param int) {\n\tgl.TexParameteri(uint32(target), uint32(pname), int32(param))\n}",
"func newSampler(extraRate float64, maxTPS float64) *Sampler {\n\ts := &Sampler{\n\t\tBackend: NewMemoryBackend(defaultDecayPeriod, defaultDecayFactor),\n\t\textraRate: extraRate,\n\t\tmaxTPS: maxTPS,\n\t\trateThresholdTo1: defaultSamplingRateThresholdTo1,\n\t\tsignatureScoreOffset: atomic.NewFloat(0),\n\t\tsignatureScoreSlope: atomic.NewFloat(0),\n\t\tsignatureScoreFactor: atomic.NewFloat(0),\n\n\t\texit: make(chan struct{}),\n\t}\n\n\ts.SetSignatureCoefficients(initialSignatureScoreOffset, defaultSignatureScoreSlope)\n\n\treturn s\n}",
"func (t Texture3D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_3D, t.id)\n}",
"func (buffer Buffer) Bind(target gl.Enum) {\n\tgl.BindBuffer(gl.Enum(target), gl.Uint(buffer))\n}",
"func IsSampler(sampler uint32) bool {\n ret := C.glowIsSampler(gpIsSampler, (C.GLuint)(sampler))\n return ret == TRUE\n}",
"func BindTransformFeedback(target uint32, id uint32) {\n\tsyscall.Syscall(gpBindTransformFeedback, 2, uintptr(target), uintptr(id), 0)\n}",
"func releaseSampler(s *Sampler) {\n\tif s.clSampler != nil {\n\t\tC.clReleaseSampler(s.clSampler)\n\t\ts.clSampler = nil\n\t}\n}",
"func (q *Query) AddTarget(t string) *Query {\n\tq.targets = append(q.targets, t)\n\treturn q\n}",
"func (s *jsiiProxy_SqsEventSource) Bind(target awslambda.IFunction) {\n\t_jsii_.InvokeVoid(\n\t\ts,\n\t\t\"bind\",\n\t\t[]interface{}{target},\n\t)\n}",
"func (m *DomainDnsSrvRecord) SetNameTarget(value *string)() {\n err := m.GetBackingStore().Set(\"nameTarget\", value)\n if err != nil {\n panic(err)\n }\n}",
"func (s *Layer) Use(phase string, handler ...interface{}) {\n\ts.register(phase, Normal, handler...)\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTransformFeedback(target uint32, id uint32) {\n\tC.glowBindTransformFeedback(gpBindTransformFeedback, (C.GLenum)(target), (C.GLuint)(id))\n}",
"func BindTransformFeedback(target uint32, id uint32) {\n\tC.glowBindTransformFeedback(gpBindTransformFeedback, (C.GLenum)(target), (C.GLuint)(id))\n}",
"func TexParameterf(target, pname GLEnum, param float32) {\n\tgl.TexParameterf(uint32(target), uint32(pname), param)\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func (m *monitor) withTargetName(targetName string) *monitor {\n\tm.targetName = targetName\n\treturn m\n}",
"func TexStorage2DMultisample(target uint32, samples int32, internalformat uint32, width int32, height int32, fixedsamplelocations bool) {\n C.glowTexStorage2DMultisample(gpTexStorage2DMultisample, (C.GLenum)(target), (C.GLsizei)(samples), (C.GLenum)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLboolean)(boolToInt(fixedsamplelocations)))\n}",
"func (c *Config) Sample(dst io.Writer, path config.Path, ctx config.CtxMap) {\n\tconfig.WriteSample(dst, path, nil,\n\t\tconfig.StringSampler{\n\t\t\tText: fmt.Sprintf(loggingFileSample, ctx[config.ID]),\n\t\t\tName: \"file\",\n\t\t},\n\t\tconfig.StringSampler{\n\t\t\tText: loggingConsoleSample,\n\t\t\tName: \"console\",\n\t\t},\n\t)\n}",
"func (e *CachedTestExecutor) ListenTarget(ctx context.Context, fn func(ev interface{})) {\n\te.Called(ctx, fn)\n\n\t// Simulate a network event\n\tev := &network.EventResponseReceived{\n\t\tRequestID: \"testing\",\n\t\tType: network.ResourceTypeDocument,\n\t\tResponse: &network.Response{\n\t\t\tURL: URL,\n\t\t\tStatus: Status,\n\t\t\tStatusText: StatusText,\n\t\t\tTiming: &network.ResourceTiming{\n\t\t\t\tConnectStart: -1,\n\t\t\t\tReceiveHeadersEnd: ReceiveHeadersEnd,\n\t\t\t},\n\t\t},\n\t}\n\n\tfn(ev)\n}",
"func TexParam(wrap TextureWrap, filter TextureFilter) TextureParameters {\n\treturn TextureParameters{wrap, wrap, filter, filter}\n}",
"func Target(id, endpoint string) string {\n\treturn fmt.Sprintf(\"%s://%s/%s\", scheme, id, endpoint)\n}",
"func (gl *WebGL) BindBuffer(target GLEnum, buffer WebGLBuffer) {\n\tgl.context.Call(\"bindBuffer\", target, buffer)\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n C.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func TexParameterf(target, pname Enum, param float32) {\n\tgl.TexParameterf(uint32(target), uint32(pname), param)\n}",
"func TexImage2DMultisample(target uint32, samples int32, internalformat uint32, width int32, height int32, fixedsamplelocations bool) {\n C.glowTexImage2DMultisample(gpTexImage2DMultisample, (C.GLenum)(target), (C.GLsizei)(samples), (C.GLenum)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLboolean)(boolToInt(fixedsamplelocations)))\n}",
"func (c *Context) BindTexture(texture *Texture) {\n\tif texture == nil {\n\t\treturn\n\t}\n\tif c.currentTexture == nil || texture.id != c.currentTexture.id {\n\t\tgl.BindTexture(gl.TEXTURE_2D, texture.id)\n\t\tc.currentTexture = texture\n\t}\n}",
"func BindBuffer(target Enum, b Buffer) {\n\tgl.BindBuffer(uint32(target), b.Value)\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tsyscall.Syscall9(gpBindImageTexture, 7, uintptr(unit), uintptr(texture), uintptr(level), boolToUintptr(layered), uintptr(layer), uintptr(access), uintptr(format), 0, 0)\n}",
"func poolTarget(name, resource string) string {\n\treturn fmt.Sprintf(\"module.worker-%v.%v\", name, resource)\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n C.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (f *staticPodFallback) withTargetName(targetName string) *staticPodFallback {\n\tf.targetName = targetName\n\treturn f\n}",
"func (c *browserClient) targetListener(ev interface{}) {\n\tswitch ev := ev.(type) { //nolint:gocritic\n\tcase *network.EventRequestWillBeSent:\n\t\tif ev.Request.URL == `https://signin.aws.amazon.com/saml` {\n\t\t\t// parse and unescape the query string for the key value pair that contains SAMLResponse=xxxx\n\t\t\tqs, err := url.ParseQuery(ev.Request.PostData)\n\t\t\tif err != nil {\n\t\t\t\tc.Logger.Errorf(\"Error parsing SAMLResponse: %v\", err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tsaml := qs.Get(\"SAMLResponse\")\n\t\t\tsamlassert := credentials.SamlAssertion(saml)\n\t\t\tc.saml = &samlassert\n\t\t\tdone.Done()\n\t\t}\n\t}\n}",
"func (c *ConfigManager) AddTarget(n ...string) {\n\tfor _, v := range n {\n\t\tc.targets = append(c.targets, v)\n\t}\n}",
"func IsSampler(sampler uint32) bool {\n\tret := C.glowIsSampler(gpIsSampler, (C.GLuint)(sampler))\n\treturn ret == TRUE\n}",
"func IsSampler(sampler uint32) bool {\n\tret := C.glowIsSampler(gpIsSampler, (C.GLuint)(sampler))\n\treturn ret == TRUE\n}",
"func GenSamplers(count int32, samplers *uint32) {\n C.glowGenSamplers(gpGenSamplers, (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tC.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tC.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func (self *Tween) SetTargetA(member interface{}) {\n self.Object.Set(\"target\", member)\n}",
"func Bind(logger *zap.SugaredLogger) scribe.LoggerFactories {\n\treturn scribe.LoggerFactories{\n\t\tscribe.Trace: func(level scribe.Level, scene scribe.Scene) scribe.Logger {\n\t\t\treturn enrich(logger, scene).Debugf\n\t\t},\n\t\tscribe.Debug: func(level scribe.Level, scene scribe.Scene) scribe.Logger {\n\t\t\treturn enrich(logger, scene).Debugf\n\t\t},\n\t\tscribe.Info: func(level scribe.Level, scene scribe.Scene) scribe.Logger {\n\t\t\treturn enrich(logger, scene).Infof\n\t\t},\n\t\tscribe.Warn: func(level scribe.Level, scene scribe.Scene) scribe.Logger {\n\t\t\treturn enrich(logger, scene).Warnf\n\t\t},\n\t\tscribe.Error: func(level scribe.Level, scene scribe.Scene) scribe.Logger {\n\t\t\treturn enrich(logger, scene).Errorf\n\t\t},\n\t}\n}",
"func (obj *Device) SetSamplerState(\n\tsampler uint32,\n\ttyp SAMPLERSTATETYPE,\n\tvalue uint32,\n) Error {\n\tret, _, _ := syscall.Syscall6(\n\t\tobj.vtbl.SetSamplerState,\n\t\t4,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(sampler),\n\t\tuintptr(typ),\n\t\tuintptr(value),\n\t\t0,\n\t\t0,\n\t)\n\treturn toErr(ret)\n}",
"func (debugging *debuggingOpenGL) TexParameteri(target uint32, pname uint32, param int32) {\n\tdebugging.recordEntry(\"TexParameteri\", target, pname, param)\n\tdebugging.gl.TexParameteri(target, pname, param)\n\tdebugging.recordExit(\"TexParameteri\")\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n\tC.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n\tC.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func NamedFramebufferTextureLayer(framebuffer uint32, attachment uint32, texture uint32, level int32, layer int32) {\n\tC.glowNamedFramebufferTextureLayer(gpNamedFramebufferTextureLayer, (C.GLuint)(framebuffer), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(layer))\n}",
"func NamedFramebufferTextureLayer(framebuffer uint32, attachment uint32, texture uint32, level int32, layer int32) {\n\tC.glowNamedFramebufferTextureLayer(gpNamedFramebufferTextureLayer, (C.GLuint)(framebuffer), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(layer))\n}",
"func (gl *WebGL) TexParameteri(target GLEnum, param GLEnum, value int) {\n\tgl.context.Call(\"texParameteri\", target, param, value)\n}",
"func (gl *WebGL) UnbindTexture(target GLEnum) {\n\tgl.context.Call(\"bindTexture\", target, nil)\n}",
"func (debugging *debuggingOpenGL) BindBuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindBuffer\", target, buffer)\n\tdebugging.gl.BindBuffer(target, buffer)\n\tdebugging.recordExit(\"BindBuffer\")\n}",
"func TexImage3DMultisample(target uint32, samples int32, internalformat uint32, width int32, height int32, depth int32, fixedsamplelocations bool) {\n C.glowTexImage3DMultisample(gpTexImage3DMultisample, (C.GLenum)(target), (C.GLsizei)(samples), (C.GLenum)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLboolean)(boolToInt(fixedsamplelocations)))\n}",
"func (native *OpenGL) TexParameteri(target uint32, pname uint32, param int32) {\n\tgl.TexParameteri(target, pname, param)\n}"
] | [
"0.68192595",
"0.663926",
"0.63527334",
"0.634395",
"0.5842493",
"0.5665984",
"0.5508882",
"0.54997027",
"0.54741216",
"0.5448002",
"0.5448002",
"0.5300093",
"0.5271571",
"0.5180923",
"0.5178225",
"0.5158692",
"0.5154688",
"0.51445144",
"0.5083899",
"0.5067762",
"0.50090045",
"0.49756637",
"0.4974471",
"0.49702162",
"0.4957859",
"0.49486247",
"0.49486247",
"0.49388784",
"0.49388784",
"0.49210912",
"0.48994663",
"0.48874712",
"0.4877405",
"0.48158628",
"0.4815349",
"0.4803877",
"0.4794443",
"0.47878727",
"0.47842425",
"0.47726354",
"0.4747037",
"0.47418308",
"0.473681",
"0.47316158",
"0.47238126",
"0.47176358",
"0.46877128",
"0.46840543",
"0.46664208",
"0.46608832",
"0.46429637",
"0.46182463",
"0.4616427",
"0.46039486",
"0.4599139",
"0.4599139",
"0.458758",
"0.458758",
"0.45727578",
"0.45722333",
"0.45722333",
"0.45613748",
"0.45551708",
"0.45540836",
"0.454247",
"0.4527345",
"0.45216608",
"0.45200518",
"0.45081523",
"0.45043105",
"0.450114",
"0.4499409",
"0.44959995",
"0.44852456",
"0.44833332",
"0.4458628",
"0.4453002",
"0.4449436",
"0.4443738",
"0.4441246",
"0.4441246",
"0.44403204",
"0.44392592",
"0.44392592",
"0.44361794",
"0.44361794",
"0.44185078",
"0.4401006",
"0.4392968",
"0.43848458",
"0.43822804",
"0.43822804",
"0.43809626",
"0.43809626",
"0.4376342",
"0.43736285",
"0.43613532",
"0.43610936",
"0.4361081"
] | 0.6406075 | 3 |
bind one or more named sampler objects to a sequence of consecutive sampler units | func BindSamplers(first uint32, count int32, samplers *uint32) {
C.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n C.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tsyscall.Syscall(gpBindSamplers, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(samplers)))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n C.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tsyscall.Syscall(gpBindSampler, 2, uintptr(unit), uintptr(sampler), 0)\n}",
"func NewAllSampler() RateSampler { return NewRateSampler(1) }",
"func BindSampler(unit uint32, sampler uint32) {\n\tC.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tC.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n C.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (debugging *debuggingOpenGL) BindSampler(unit uint32, sampler uint32) {\n\tdebugging.recordEntry(\"BindSampler\", unit, sampler)\n\tdebugging.gl.BindSampler(unit, sampler)\n\tdebugging.recordExit(\"BindSampler\")\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func (native *OpenGL) BindSampler(unit uint32, sampler uint32) {\n\tgl.BindSampler(unit, sampler)\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n C.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func GenSamplers(count int32, samplers *uint32) {\n\tsyscall.Syscall(gpGenSamplers, 2, uintptr(count), uintptr(unsafe.Pointer(samplers)), 0)\n}",
"func GenSamplers(count int32, samplers *uint32) {\n C.glowGenSamplers(gpGenSamplers, (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func (ss servers) sampling(s *stats) {\n\tfor _, svr := range ss {\n\t\ts.serverSessNum += load(&svr.sessNum)\n\t\ts.serverTotalReq += load(&svr.totalReq)\n\t\ts.serverSuccReq += load(&svr.succReq)\n\t}\n}",
"func RegisterAgentAndScale(a *Agent, scale int) {\n\tfor i := 0; i < scale; i++ {\n\t\tagt := *a\n\t\tname := fmt.Sprintf(\"%v_%v\", a.Name, i)\n\t\tagt.Name = name\n\t\tGetAgentStoreInstance().Add(&agt)\n\t}\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindImageTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func (j *JSONSerializer) Bind(events ...Event) {\n\tfor _, event := range events {\n\t\teventType, t := EventType(event)\n\t\tj.eventTypes[eventType] = t\n\t}\n}",
"func (c *client) sampling(s *stats) {\n\ts.dialNum, s.dialTotalConn, s.clientWorkerNum = load(&c.d.dialNum), load(&c.d.totalConn), load(&c.workerNum)\n\tfor _, d := range (c.pool.Stats()).Destinations {\n\t\ts.clientTotalConn += d.Total\n\t\ts.clientIdleConn += d.Idle\n\t}\n\tfor _, addr := range c.s.addrs {\n\t\ts.clientTotalReq += load(&addr.totalReq)\n\t\ts.clientSuccReq += load(&addr.succReq)\n\t}\n}",
"func (s *Samples) loadSamples(dir string) error {\n\tlog.Printf(\"loading samples from dir %s\\n\", dir)\n\tglob := filepath.Join(dir, \"*.wav\")\n\tlog.Printf(\"loading samples with glob %s\\n\", glob)\n\tsamples, err := filepath.Glob(glob)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"listing files with wildcard pattern\")\n\t}\n\tfor i, sample := range samples {\n\t\tif i >= numSlots {\n\t\t\tbreak\n\t\t}\n\t\tif err := s.Add(i, sample); err != nil {\n\t\t\treturn errors.Wrap(err, \"adding sample\")\n\t\t}\n\t}\n\treturn nil\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (s *Storage) Append(smpl *model.Sample) error {\n\ts.mtx.RLock()\n\n\tvar snew model.Sample\n\tsnew = *smpl\n\tsnew.Metric = smpl.Metric.Clone()\n\n\tfor ln, lv := range s.externalLabels {\n\t\tif _, ok := smpl.Metric[ln]; !ok {\n\t\t\tsnew.Metric[ln] = lv\n\t\t}\n\t}\n\ts.mtx.RUnlock()\n\n\tfor _, q := range s.queues {\n\t\tq.Append(&snew)\n\t}\n\treturn nil\n}",
"func GenSamplers(count int32, samplers *uint32) {\n\tC.glowGenSamplers(gpGenSamplers, (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func GenSamplers(count int32, samplers *uint32) {\n\tC.glowGenSamplers(gpGenSamplers, (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func (sr *Stackers) Bind(r Publisher, cl bool) {\n\tvar lr Connector\n\tvar err error\n\n\tif lr, err = sr.Last(); err != nil {\n\t\tsr.Publisher.Bind(r, cl)\n\t\tsr.ro.Lock()\n\t\t{\n\t\t\tsr.stacks = append(sr.stacks, r)\n\t\t}\n\t\tsr.ro.Unlock()\n\t\treturn\n\t}\n\n\tlr.Bind(r, cl)\n\tsr.ro.Lock()\n\t{\n\t\tsr.stacks = append(sr.stacks, r)\n\t}\n\tsr.ro.Unlock()\n}",
"func NewSampler(n int) Sampler {\n\treturn Sampler{\n\t\tn: n,\n\t}\n}",
"func bind(vm *VM, block block, factory bindFactory) {\n\tfor i := block.first(); i != 0; i = i.next(vm) {\n\t\tptr, ok := i.ptr(vm)\n\t\tif ok {\n\t\t\tobj := value(vm.read(ptr))\n\t\t\tkind := obj.kind()\n\t\t\tvm.bindFunc[kind](vm, ptr, factory)\n\t\t}\n\t}\n}",
"func NamedRenderbufferStorageMultisample(renderbuffer uint32, samples int32, internalformat uint32, width int32, height int32) {\n\tsyscall.Syscall6(gpNamedRenderbufferStorageMultisample, 5, uintptr(renderbuffer), uintptr(samples), uintptr(internalformat), uintptr(width), uintptr(height), 0)\n}",
"func (tw *MultiTimingsWrapper) Add(names []string, elapsed time.Duration) {\n\tif tw.name == \"\" {\n\t\ttw.timings.Add(names, elapsed)\n\t\treturn\n\t}\n\tnewlabels := combineLabels(tw.name, names)\n\ttw.timings.Add(newlabels, elapsed)\n}",
"func (du *DescriptorSet) AddCombinedImageSampler(dstBinding int, layout vk.ImageLayout, imageView vk.ImageView, sampler vk.Sampler) {\n\n\tvar descriptorImageInfo = vk.DescriptorImageInfo{}\n\tdescriptorImageInfo.ImageView = imageView\n\tdescriptorImageInfo.ImageLayout = layout\n\tdescriptorImageInfo.Sampler = sampler\n\n\tvar writeDescriptorSet = vk.WriteDescriptorSet{}\n\twriteDescriptorSet.SType = vk.StructureTypeWriteDescriptorSet\n\twriteDescriptorSet.DstBinding = uint32(dstBinding) // write to the first, and only binding.\n\twriteDescriptorSet.DescriptorCount = 1 // update a single descriptor.\n\twriteDescriptorSet.DescriptorType = vk.DescriptorTypeCombinedImageSampler\n\twriteDescriptorSet.PImageInfo = []vk.DescriptorImageInfo{descriptorImageInfo}\n\n\tif du.VKWriteDiscriptorSet == nil {\n\t\tdu.VKWriteDiscriptorSet = make([]vk.WriteDescriptorSet, 0)\n\t}\n\tdu.VKWriteDiscriptorSet = append(du.VKWriteDiscriptorSet, writeDescriptorSet)\n\n}",
"func (s *fakeTracerProviderStore) RegisterSampler(sampler sdktrace.Sampler) {\n\ts.sampler = sampler\n}",
"func releaseSampler(s *Sampler) {\n\tif s.clSampler != nil {\n\t\tC.clReleaseSampler(s.clSampler)\n\t\ts.clSampler = nil\n\t}\n}",
"func NamedRenderbufferStorageMultisample(renderbuffer uint32, samples int32, internalformat uint32, width int32, height int32) {\n\tC.glowNamedRenderbufferStorageMultisample(gpNamedRenderbufferStorageMultisample, (C.GLuint)(renderbuffer), (C.GLsizei)(samples), (C.GLenum)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func NamedRenderbufferStorageMultisample(renderbuffer uint32, samples int32, internalformat uint32, width int32, height int32) {\n\tC.glowNamedRenderbufferStorageMultisample(gpNamedRenderbufferStorageMultisample, (C.GLuint)(renderbuffer), (C.GLsizei)(samples), (C.GLenum)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func (group *Group) Use(handler ...Handler) {\n\tfor _, h := range handler{\n\t\tgroup.handlers = append(group.handlers, h)\n\t}\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n C.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func NewSamples(dir, scsynthAddr string) (*Samples, error) {\n\tsamp, err := sampler.New(scsynthAddr)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ts := &Samples{\n\t\tSampler: samp,\n\t}\n\tif err := s.loadSamples(dir); err != nil {\n\t\treturn nil, errors.Wrap(err, \"loading samples\")\n\t}\n\tlog.Println(\"loaded samples\")\n\n\treturn s, nil\n}",
"func (s *opentelemetryTracerProviderStore) RegisterSampler(sampler sdktrace.Sampler) {\n\ts.sampler = sampler\n}",
"func (c *Collector) CollectSamples(resourceName string, sampler StateSampler) {\n\tsamples := make([]Sample, 0)\n\tfor {\n\t\tselect {\n\t\tdefault:\n\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\"ResourceName\": resourceName,\n\t\t\t}).Debug(\"Getting sample\")\n\t\t\tsample, err := sampler.Sample(resourceName)\n\t\t\tif err != nil {\n\t\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\t\"ResourceName\": resourceName,\n\t\t\t\t}).Error(\"Warning error reading sample\")\n\t\t\t} else {\n\t\t\t\tsamples = append(samples, sample)\n\t\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\t\"ResourceName\": resourceName,\n\t\t\t\t\t\"SampleCount\": len(samples),\n\t\t\t\t}).Debug(\"Retrieved sample\")\n\t\t\t\ttime.Sleep(time.Duration(c.SampleRate) * time.Second)\n\t\t\t}\n\t\tcase <-c.Collect:\n\t\t\tsampleCount := len(samples)\n\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\"ResourceName\": resourceName,\n\t\t\t\t\"SampleCount\": sampleCount,\n\t\t\t}).Debug(\"Received notification to collect samples\")\n\t\t\tc.SampleResults <- samples\n\t\t\t// Creating a new sample array with last sample as the 1st sample of the next collection\n\t\t\tif sampleCount > 0 {\n\t\t\t\tlastSample := samples[sampleCount-1]\n\t\t\t\tsamples = make([]Sample, 1)\n\t\t\t\tsamples[0] = lastSample\n\t\t\t} else {\n\t\t\t\t// no previous samples\n\t\t\t\tsamples = make([]Sample, 0)\n\t\t\t}\n\t\tcase <-c.Done:\n\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\"ResourceName\": resourceName,\n\t\t\t}).Debug(\"Received notification to stop collecting\")\n\t\t\treturn\n\t\t}\n\t}\n}",
"func (s *Service) Use(h ...Handler) {\n if h != nil {\n for _, e := range h {\n s.pipeline = s.pipeline.Add(e)\n }\n }\n}",
"func prepareBackends(base string, n int) []*core.Backend {\n\tbackends := make([]*core.Backend, n)\n\n\tfor i := 0; i < n; i++ {\n\t\tbackends[i] = &core.Backend{\n\t\t\tTarget: core.Target{\n\t\t\t\tHost: fmt.Sprintf(\"%s.%d\", base, i+1),\n\t\t\t\tPort: fmt.Sprintf(\"%d\", 1000+i),\n\t\t\t},\n\t\t}\n\t}\n\n\treturn backends\n}",
"func newSampler(extraRate float64, maxTPS float64) *Sampler {\n\ts := &Sampler{\n\t\tBackend: NewMemoryBackend(defaultDecayPeriod, defaultDecayFactor),\n\t\textraRate: extraRate,\n\t\tmaxTPS: maxTPS,\n\t\trateThresholdTo1: defaultSamplingRateThresholdTo1,\n\t\tsignatureScoreOffset: atomic.NewFloat(0),\n\t\tsignatureScoreSlope: atomic.NewFloat(0),\n\t\tsignatureScoreFactor: atomic.NewFloat(0),\n\n\t\texit: make(chan struct{}),\n\t}\n\n\ts.SetSignatureCoefficients(initialSignatureScoreOffset, defaultSignatureScoreSlope)\n\n\treturn s\n}",
"func (c *Collector) Collect(sampleContainers []stats.SampleContainer) {\n\tselect {\n\tcase <-c.stopSendingMetricsCh:\n\t\treturn\n\tdefault:\n\t}\n\n\tif c.referenceID == \"\" {\n\t\treturn\n\t}\n\n\tnewSamples := []*Sample{}\n\tnewHTTPTrails := []*httpext.Trail{}\n\n\tfor _, sampleContainer := range sampleContainers {\n\t\tswitch sc := sampleContainer.(type) {\n\t\tcase *httpext.Trail:\n\t\t\tsc = useCloudTags(sc)\n\t\t\t// Check if aggregation is enabled,\n\t\t\tif c.config.AggregationPeriod.Duration > 0 {\n\t\t\t\tnewHTTPTrails = append(newHTTPTrails, sc)\n\t\t\t} else {\n\t\t\t\tnewSamples = append(newSamples, NewSampleFromTrail(sc))\n\t\t\t}\n\t\tcase *netext.NetTrail:\n\t\t\t// TODO: aggregate?\n\t\t\tvalues := map[string]float64{\n\t\t\t\tmetrics.DataSent.Name: float64(sc.BytesWritten),\n\t\t\t\tmetrics.DataReceived.Name: float64(sc.BytesRead),\n\t\t\t}\n\n\t\t\tif sc.FullIteration {\n\t\t\t\tvalues[metrics.IterationDuration.Name] = stats.D(sc.EndTime.Sub(sc.StartTime))\n\t\t\t\tvalues[metrics.Iterations.Name] = 1\n\t\t\t}\n\n\t\t\tnewSamples = append(newSamples, &Sample{\n\t\t\t\tType: DataTypeMap,\n\t\t\t\tMetric: \"iter_li_all\",\n\t\t\t\tData: &SampleDataMap{\n\t\t\t\t\tTime: toMicroSecond(sc.GetTime()),\n\t\t\t\t\tTags: sc.GetTags(),\n\t\t\t\t\tValues: values,\n\t\t\t\t},\n\t\t\t})\n\t\tdefault:\n\t\t\tfor _, sample := range sampleContainer.GetSamples() {\n\t\t\t\tnewSamples = append(newSamples, &Sample{\n\t\t\t\t\tType: DataTypeSingle,\n\t\t\t\t\tMetric: sample.Metric.Name,\n\t\t\t\t\tData: &SampleDataSingle{\n\t\t\t\t\t\tType: sample.Metric.Type,\n\t\t\t\t\t\tTime: toMicroSecond(sample.Time),\n\t\t\t\t\t\tTags: sample.Tags,\n\t\t\t\t\t\tValue: sample.Value,\n\t\t\t\t\t},\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(newSamples) > 0 || len(newHTTPTrails) > 0 {\n\t\tc.bufferMutex.Lock()\n\t\tc.bufferSamples = append(c.bufferSamples, newSamples...)\n\t\tc.bufferHTTPTrails = append(c.bufferHTTPTrails, newHTTPTrails...)\n\t\tc.bufferMutex.Unlock()\n\t}\n}",
"func sampler(done <-chan struct{}, policies []GamePolicy, position <-chan job, outcome chan<- job) {\n\n\tfor task := range position {\n\t\tnode, decision := task.node, task.decision\n\n\t\tif node == nil {\n\t\t\tcontinue\n\t\t}\n\n\t\tnode.Lock()\n\t\tstate := node.state.Clone()\n\t\tnode.Unlock()\n\n\t\tswitch node.Status() {\n\t\tcase walked:\n\t\t\tnode.SetStatus(simulating)\n\t\t\t//log.Printf(\"sampler: %v node %p\\n\", node.Status(), node)\n\t\tdefault:\n\t\t\t//log.Printf(\"sampler: discarding already %v node %p\\n\", node.Status(), node)\n\t\t\tcontinue\n\t\t}\n\n\t\tsampled := decision.Join(state.Sample(done, policies[0]))\n\n\t\tselect {\n\t\tcase <-done:\n\t\t\treturn\n\t\tcase outcome <- job{node, sampled}:\n\t\t\tnode.SetStatus(simulated)\n\t\t}\n\t}\n}",
"func (g *GLTF) applySampler(samplerIdx int, tex *texture.Texture2D) error {\n\n\tlog.Debug(\"Applying Sampler %d\", samplerIdx)\n\t// Check if provided sampler index is valid\n\tif samplerIdx < 0 || samplerIdx >= len(g.Samplers) {\n\t\treturn fmt.Errorf(\"invalid sampler index\")\n\t}\n\tsampler := g.Samplers[samplerIdx]\n\n\t// Magnification filter\n\tmagFilter := gls.LINEAR\n\tif sampler.MagFilter != nil {\n\t\tmagFilter = *sampler.MagFilter\n\t}\n\ttex.SetMagFilter(uint32(magFilter))\n\n\t// Minification filter\n\tminFilter := gls.LINEAR_MIPMAP_LINEAR\n\tif sampler.MinFilter != nil {\n\t\tminFilter = *sampler.MinFilter\n\t}\n\ttex.SetMinFilter(uint32(minFilter))\n\n\t// S coordinate wrapping mode\n\twrapS := gls.REPEAT\n\tif sampler.WrapS != nil {\n\t\twrapS = *sampler.WrapS\n\t}\n\ttex.SetWrapS(uint32(wrapS))\n\n\t// T coordinate wrapping mode\n\twrapT := gls.REPEAT\n\tif sampler.WrapT != nil {\n\t\twrapT = *sampler.WrapT\n\t}\n\ttex.SetWrapT(uint32(wrapT))\n\n\treturn nil\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n C.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func Sink(zipkinSpans <-chan proxy.Span) {\n\tfor span := range zipkinSpans {\n\t\tsinkSpan(span)\n\t}\n}",
"func (p *Parameters) BindArguments(ctx *Context, args ...Value) {\n\tfor index, name := range p.names {\n\t\tvar arg Value\n\t\tif index < len(args) {\n\t\t\targ = args[index]\n\t\t}\n\t\tctx.AddSymbol(name, arg)\n\t}\n}",
"func (level *Level) SetTextures(newIds []int) {\n\tblockStore := level.store.Get(res.ResourceID(4000 + level.id*100 + 7))\n\tvar ids [54]uint16\n\ttoCopy := len(ids)\n\n\tif len(newIds) < toCopy {\n\t\ttoCopy = len(newIds)\n\t}\n\tfor index := 0; index < len(ids); index++ {\n\t\tids[index] = uint16(newIds[index])\n\t}\n\n\tbuffer := bytes.NewBuffer(nil)\n\tbinary.Write(buffer, binary.LittleEndian, &ids)\n\tblockStore.SetBlockData(0, buffer.Bytes())\n}",
"func newAudioSampler(clockRate uint32, latency time.Duration) samplerFunc {\n\tsamples := uint32(math.Round(float64(clockRate) * latency.Seconds()))\n\treturn samplerFunc(func() uint32 {\n\t\treturn samples\n\t})\n}",
"func (c *Collector) Collect(scs []stats.SampleContainer) {\n\tc.lock.Lock()\n\tfor _, sc := range scs {\n\t\tc.Samples = append(c.Samples, sc.GetSamples()...)\n\t}\n\tc.lock.Unlock()\n}",
"func WithSinks(sinks ...phono.Sink) Option {\n\tfor _, sink := range sinks {\n\t\tif sink.ID() == \"\" {\n\t\t\tpanic(ErrComponentNoID)\n\t\t}\n\t}\n\treturn func(p *Pipe) error {\n\t\tfor _, sink := range sinks {\n\t\t\tr, err := newSinkRunner(p.ID(), sink)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tp.sinks = append(p.sinks, r)\n\t\t}\n\t\treturn nil\n\t}\n}",
"func push(q *availableUnits, units ...*workUnit) {\n\tfor _, unit := range units {\n\t\tq.Add(unit)\n\t}\n}",
"func initExamples(obj exampler, attr *expr.AttributeExpr, r *expr.ExampleGenerator) {\n\texamples := attr.ExtractUserExamples()\n\tswitch {\n\tcase len(examples) > 1:\n\t\trefs := make(map[string]*ExampleRef, len(examples))\n\t\tfor _, ex := range examples {\n\t\t\texample := &Example{\n\t\t\t\tSummary: ex.Summary,\n\t\t\t\tDescription: ex.Description,\n\t\t\t\tValue: ex.Value,\n\t\t\t}\n\t\t\trefs[ex.Summary] = &ExampleRef{Value: example}\n\t\t}\n\t\tobj.setExamples(refs)\n\t\treturn\n\tcase len(examples) > 0:\n\t\tobj.setExample(examples[0].Value)\n\tdefault:\n\t\tobj.setExample(attr.Example(r))\n\t}\n}",
"func init() {\n\tf := GeneralMatrices\n\tfor level := 0; level < 2; level++ {\n\t\tmf := MutateFixtures(f, f)\n\t\tf = append(f, mf...)\n\t}\n\tTestFixtures = f\n\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTextureUnit, 2, uintptr(unit), uintptr(texture), 0)\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func (c *Context) Use(h ...Handler) {\n if h != nil {\n for _, e := range h {\n c.pipeline = c.pipeline.Add(e)\n }\n }\n}",
"func AllCandidateSampler(scope *Scope, true_classes tf.Output, num_true int64, num_sampled int64, unique bool, optional ...AllCandidateSamplerAttr) (sampled_candidates tf.Output, true_expected_count tf.Output, sampled_expected_count tf.Output) {\n\tif scope.Err() != nil {\n\t\treturn\n\t}\n\tattrs := map[string]interface{}{\"num_true\": num_true, \"num_sampled\": num_sampled, \"unique\": unique}\n\tfor _, a := range optional {\n\t\ta(attrs)\n\t}\n\topspec := tf.OpSpec{\n\t\tType: \"AllCandidateSampler\",\n\t\tInput: []tf.Input{\n\t\t\ttrue_classes,\n\t\t},\n\t\tAttrs: attrs,\n\t}\n\top := scope.AddOperation(opspec)\n\treturn op.Output(0), op.Output(1), op.Output(2)\n}",
"func (bs Bindings) AddInjection(i interface{}) Bindings {\n\tfor _, b := range bs {\n\t\tb.AddInjection(i)\n\t}\n\treturn bs\n}",
"func multiWriter(writers ...io.Writer) (*multiWriterHolder, error) {\n\tmw := &multiWriterHolder{}\n\tfor x := range writers {\n\t\terr := mw.add(writers[x])\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn mw, nil\n}",
"func multiWriter(writers ...io.Writer) (*multiWriterHolder, error) {\n\tmw := &multiWriterHolder{}\n\tfor x := range writers {\n\t\terr := mw.add(writers[x])\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn mw, nil\n}",
"func TraceSampler(o Options) trace.Sampler {\n\treturn trace.ProbabilitySampler(o.SamplingProbability)\n}",
"func RenderbufferStorageMultisample(target uint32, samples int32, internalformat uint32, width int32, height int32) {\n C.glowRenderbufferStorageMultisample(gpRenderbufferStorageMultisample, (C.GLenum)(target), (C.GLsizei)(samples), (C.GLenum)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func (r *Reflex) batch(out chan<- string, in <-chan string) {\n\tfor name := range in {\n\t\tr.backlog.Add(name)\n\t\ttimer := time.NewTimer(300 * time.Millisecond)\n\touter:\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase name := <-in:\n\t\t\t\tr.backlog.Add(name)\n\t\t\tcase <-timer.C:\n\t\t\t\tfor {\n\t\t\t\t\tselect {\n\t\t\t\t\tcase name := <-in:\n\t\t\t\t\t\tr.backlog.Add(name)\n\t\t\t\t\tcase out <- r.backlog.Next():\n\t\t\t\t\t\tif r.backlog.RemoveOne() {\n\t\t\t\t\t\t\tbreak outer\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}",
"func multiWriter(writers ...io.Writer) (*multiWriterHolder, error) {\n\tmw := &multiWriterHolder{}\n\tfor x := range writers {\n\t\terr := mw.Add(writers[x])\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn mw, nil\n}",
"func LoadTextures(eng sprite.Engine) map[string]sprite.SubTex {\n\tallTexs := make(map[string]sprite.SubTex)\n\tboundedImgs := []string{\"Clubs-2.png\", \"Clubs-3.png\", \"Clubs-4.png\", \"Clubs-5.png\", \"Clubs-6.png\", \"Clubs-7.png\", \"Clubs-8.png\",\n\t\t\"Clubs-9.png\", \"Clubs-10.png\", \"Clubs-Jack.png\", \"Clubs-Queen.png\", \"Clubs-King.png\", \"Clubs-Ace.png\",\n\t\t\"Diamonds-2.png\", \"Diamonds-3.png\", \"Diamonds-4.png\", \"Diamonds-5.png\", \"Diamonds-6.png\", \"Diamonds-7.png\", \"Diamonds-8.png\",\n\t\t\"Diamonds-9.png\", \"Diamonds-10.png\", \"Diamonds-Jack.png\", \"Diamonds-Queen.png\", \"Diamonds-King.png\", \"Diamonds-Ace.png\",\n\t\t\"Spades-2.png\", \"Spades-3.png\", \"Spades-4.png\", \"Spades-5.png\", \"Spades-6.png\", \"Spades-7.png\", \"Spades-8.png\",\n\t\t\"Spades-9.png\", \"Spades-10.png\", \"Spades-Jack.png\", \"Spades-Queen.png\", \"Spades-King.png\", \"Spades-Ace.png\",\n\t\t\"Hearts-2.png\", \"Hearts-3.png\", \"Hearts-4.png\", \"Hearts-5.png\", \"Hearts-6.png\", \"Hearts-7.png\", \"Hearts-8.png\",\n\t\t\"Hearts-9.png\", \"Hearts-10.png\", \"Hearts-Jack.png\", \"Hearts-Queen.png\", \"Hearts-King.png\", \"Hearts-Ace.png\", \"BakuSquare.png\",\n\t}\n\tunboundedImgs := []string{\"Club.png\", \"Diamond.png\", \"Spade.png\", \"Heart.png\", \"gray.jpeg\", \"blue.png\", \"trickDrop.png\",\n\t\t\"trickDropBlue.png\", \"player0.jpeg\", \"player1.jpeg\", \"player2.jpeg\", \"player3.jpeg\", \"laptopIcon.png\", \"watchIcon.png\",\n\t\t\"phoneIcon.png\", \"tabletIcon.png\", \"A-Upper.png\", \"B-Upper.png\", \"C-Upper.png\", \"D-Upper.png\", \"E-Upper.png\", \"F-Upper.png\",\n\t\t\"G-Upper.png\", \"H-Upper.png\", \"I-Upper.png\", \"J-Upper.png\", \"K-Upper.png\", \"L-Upper.png\", \"M-Upper.png\", \"N-Upper.png\",\n\t\t\"O-Upper.png\", \"P-Upper.png\", \"Q-Upper.png\", \"R-Upper.png\", \"S-Upper.png\", \"T-Upper.png\", \"U-Upper.png\", \"V-Upper.png\",\n\t\t\"W-Upper.png\", \"X-Upper.png\", \"Y-Upper.png\", \"Z-Upper.png\", \"A-Lower.png\", \"B-Lower.png\", \"C-Lower.png\", \"D-Lower.png\",\n\t\t\"E-Lower.png\", \"F-Lower.png\", \"G-Lower.png\", \"H-Lower.png\", \"I-Lower.png\", \"J-Lower.png\", \"K-Lower.png\", \"L-Lower.png\",\n\t\t\"M-Lower.png\", \"N-Lower.png\", \"O-Lower.png\", \"P-Lower.png\", \"Q-Lower.png\", \"R-Lower.png\", \"S-Lower.png\", \"T-Lower.png\",\n\t\t\"U-Lower.png\", \"V-Lower.png\", \"W-Lower.png\", \"X-Lower.png\", \"Y-Lower.png\", \"Z-Lower.png\", \"Space.png\", \"Colon.png\", \"Bang.png\",\n\t\t\"Apostrophe.png\", \"1.png\", \"2.png\", \"3.png\", \"4.png\", \"5.png\", \"6.png\", \"7.png\", \"8.png\", \"9.png\", \"0.png\", \"1-Red.png\",\n\t\t\"2-Red.png\", \"3-Red.png\", \"4-Red.png\", \"5-Red.png\", \"6-Red.png\", \"7-Red.png\", \"8-Red.png\", \"9-Red.png\", \"0-Red.png\",\n\t\t\"1-DBlue.png\", \"2-DBlue.png\", \"3-DBlue.png\", \"4-DBlue.png\", \"5-DBlue.png\", \"6-DBlue.png\", \"7-DBlue.png\", \"8-DBlue.png\",\n\t\t\"9-DBlue.png\", \"0-DBlue.png\", \"A-Upper-DBlue.png\", \"B-Upper-DBlue.png\",\n\t\t\"C-Upper-DBlue.png\", \"D-Upper-DBlue.png\", \"E-Upper-DBlue.png\", \"F-Upper-DBlue.png\", \"G-Upper-DBlue.png\", \"H-Upper-DBlue.png\",\n\t\t\"I-Upper-DBlue.png\", \"J-Upper-DBlue.png\", \"K-Upper-DBlue.png\", \"L-Upper-DBlue.png\", \"M-Upper-DBlue.png\", \"N-Upper-DBlue.png\",\n\t\t\"O-Upper-DBlue.png\", \"P-Upper-DBlue.png\", \"Q-Upper-DBlue.png\", \"R-Upper-DBlue.png\", \"S-Upper-DBlue.png\", \"T-Upper-DBlue.png\",\n\t\t\"U-Upper-DBlue.png\", \"V-Upper-DBlue.png\", \"W-Upper-DBlue.png\", \"X-Upper-DBlue.png\", \"Y-Upper-DBlue.png\", \"Z-Upper-DBlue.png\",\n\t\t\"A-Lower-DBlue.png\", \"B-Lower-DBlue.png\", \"C-Lower-DBlue.png\", \"D-Lower-DBlue.png\", \"E-Lower-DBlue.png\", \"F-Lower-DBlue.png\",\n\t\t\"G-Lower-DBlue.png\", \"H-Lower-DBlue.png\", \"I-Lower-DBlue.png\", \"J-Lower-DBlue.png\", \"K-Lower-DBlue.png\", \"L-Lower-DBlue.png\",\n\t\t\"M-Lower-DBlue.png\", \"N-Lower-DBlue.png\", \"O-Lower-DBlue.png\", \"P-Lower-DBlue.png\", \"Q-Lower-DBlue.png\", \"R-Lower-DBlue.png\",\n\t\t\"S-Lower-DBlue.png\", \"T-Lower-DBlue.png\", \"U-Lower-DBlue.png\", \"V-Lower-DBlue.png\", \"W-Lower-DBlue.png\", \"X-Lower-DBlue.png\",\n\t\t\"Y-Lower-DBlue.png\", \"Z-Lower-DBlue.png\", \"Apostrophe-DBlue.png\", \"Space-DBlue.png\", \"A-Upper-LBlue.png\", \"B-Upper-LBlue.png\",\n\t\t\"C-Upper-LBlue.png\", \"D-Upper-LBlue.png\", \"E-Upper-LBlue.png\", \"F-Upper-LBlue.png\", \"G-Upper-LBlue.png\", \"H-Upper-LBlue.png\",\n\t\t\"I-Upper-LBlue.png\", \"J-Upper-LBlue.png\", \"K-Upper-LBlue.png\", \"L-Upper-LBlue.png\", \"M-Upper-LBlue.png\", \"N-Upper-LBlue.png\",\n\t\t\"O-Upper-LBlue.png\", \"P-Upper-LBlue.png\", \"Q-Upper-LBlue.png\", \"R-Upper-LBlue.png\", \"S-Upper-LBlue.png\", \"T-Upper-LBlue.png\",\n\t\t\"U-Upper-LBlue.png\", \"V-Upper-LBlue.png\", \"W-Upper-LBlue.png\", \"X-Upper-LBlue.png\", \"Y-Upper-LBlue.png\", \"Z-Upper-LBlue.png\",\n\t\t\"A-Lower-LBlue.png\", \"B-Lower-LBlue.png\", \"C-Lower-LBlue.png\", \"D-Lower-LBlue.png\", \"E-Lower-LBlue.png\", \"F-Lower-LBlue.png\",\n\t\t\"G-Lower-LBlue.png\", \"H-Lower-LBlue.png\", \"I-Lower-LBlue.png\", \"J-Lower-LBlue.png\", \"K-Lower-LBlue.png\", \"L-Lower-LBlue.png\",\n\t\t\"M-Lower-LBlue.png\", \"N-Lower-LBlue.png\", \"O-Lower-LBlue.png\", \"P-Lower-LBlue.png\", \"Q-Lower-LBlue.png\", \"R-Lower-LBlue.png\",\n\t\t\"S-Lower-LBlue.png\", \"T-Lower-LBlue.png\", \"U-Lower-LBlue.png\", \"V-Lower-LBlue.png\", \"W-Lower-LBlue.png\", \"X-Lower-LBlue.png\",\n\t\t\"Y-Lower-LBlue.png\", \"Z-Lower-LBlue.png\", \"A-Upper-Gray.png\", \"B-Upper-Gray.png\", \"C-Upper-Gray.png\", \"D-Upper-Gray.png\",\n\t\t\"E-Upper-Gray.png\", \"F-Upper-Gray.png\", \"G-Upper-Gray.png\", \"H-Upper-Gray.png\", \"I-Upper-Gray.png\", \"J-Upper-Gray.png\",\n\t\t\"K-Upper-Gray.png\", \"L-Upper-Gray.png\", \"M-Upper-Gray.png\", \"N-Upper-Gray.png\", \"O-Upper-Gray.png\", \"P-Upper-Gray.png\",\n\t\t\"Q-Upper-Gray.png\", \"R-Upper-Gray.png\", \"S-Upper-Gray.png\", \"T-Upper-Gray.png\", \"U-Upper-Gray.png\", \"V-Upper-Gray.png\",\n\t\t\"W-Upper-Gray.png\", \"X-Upper-Gray.png\", \"Y-Upper-Gray.png\", \"Z-Upper-Gray.png\", \"A-Lower-Gray.png\", \"B-Lower-Gray.png\",\n\t\t\"C-Lower-Gray.png\", \"D-Lower-Gray.png\", \"E-Lower-Gray.png\", \"F-Lower-Gray.png\", \"G-Lower-Gray.png\", \"H-Lower-Gray.png\",\n\t\t\"I-Lower-Gray.png\", \"J-Lower-Gray.png\", \"K-Lower-Gray.png\", \"L-Lower-Gray.png\", \"M-Lower-Gray.png\", \"N-Lower-Gray.png\",\n\t\t\"O-Lower-Gray.png\", \"P-Lower-Gray.png\", \"Q-Lower-Gray.png\", \"R-Lower-Gray.png\", \"S-Lower-Gray.png\", \"T-Lower-Gray.png\",\n\t\t\"U-Lower-Gray.png\", \"V-Lower-Gray.png\", \"W-Lower-Gray.png\", \"X-Lower-Gray.png\", \"Y-Lower-Gray.png\", \"Z-Lower-Gray.png\",\n\t\t\"Space-Gray.png\", \"RoundedRectangle-DBlue.png\", \"RoundedRectangle-LBlue.png\", \"RoundedRectangle-Gray.png\", \"Rectangle-LBlue.png\",\n\t\t\"Rectangle-DBlue.png\", \"HorizontalPullTab.png\", \"VerticalPullTab.png\", \"NewGamePressed.png\", \"NewGameUnpressed.png\",\n\t\t\"NewRoundPressed.png\", \"NewRoundUnpressed.png\", \"JoinGamePressed.png\", \"JoinGameUnpressed.png\", \"Period.png\",\n\t\t\"SitSpotPressed.png\", \"SitSpotUnpressed.png\", \"WatchSpotPressed.png\", \"WatchSpotUnpressed.png\", \"StartBlue.png\", \"StartGray.png\",\n\t\t\"StartBluePressed.png\", \"Restart.png\", \"Visibility.png\", \"VisibilityOff.png\", \"QuitPressed.png\", \"QuitUnpressed.png\",\n\t\t\"PassPressed.png\", \"PassUnpressed.png\", \"RightArrowBlue.png\", \"LeftArrowBlue.png\", \"AcrossArrowBlue.png\", \"RightArrowGray.png\",\n\t\t\"LeftArrowGray.png\", \"AcrossArrowGray.png\", \"TakeTrickTableUnpressed.png\", \"TakeTrickTablePressed.png\", \"TakeTrickHandPressed.png\",\n\t\t\"TakeTrickHandUnpressed.png\", \"android.png\", \"cat.png\", \"man.png\", \"woman.png\", \"TakeUnpressed.png\", \"TakePressed.png\",\n\t\t\"UnplayedBorder1.png\", \"UnplayedBorder2.png\", \"RejoinPressed.png\", \"RejoinUnpressed.png\",\n\t}\n\tfor _, f := range boundedImgs {\n\t\ta, err := asset.Open(f)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\timg, _, err := image.Decode(a)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tt, err := eng.LoadTexture(img)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\timgWidth, imgHeight := t.Bounds()\n\t\tallTexs[f] = sprite.SubTex{t, image.Rect(0, 0, imgWidth, imgHeight)}\n\t\ta.Close()\n\t}\n\tfor _, f := range unboundedImgs {\n\t\ta, err := asset.Open(f)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\timg, _, err := image.Decode(a)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tt, err := eng.LoadTexture(img)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\timgWidth, imgHeight := t.Bounds()\n\t\tallTexs[f] = sprite.SubTex{t, image.Rect(1, 1, imgWidth-1, imgHeight-1)}\n\t\ta.Close()\n\t}\n\treturn allTexs\n}",
"func (i *Interpreter) learnTechniques(techniques []Technique) {\n\ti.techniquesLock <- empty{}\n\ti.Techniques = append(i.Techniques, techniques...)\n\t<-i.techniquesLock\n}",
"func DeleteSamplers(count int32, samplers *uint32) {\n C.glowDeleteSamplers(gpDeleteSamplers, (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func (v Var[V]) Bind(s *Spec) Var[V] {\n\ts.testingTB.Helper()\n\tfor _, s := range s.specsFromCurrent() {\n\t\tif s.vars.Knows(v.ID) {\n\t\t\treturn v\n\t\t}\n\t}\n\treturn v.Let(s, v.Init)\n}",
"func (h *PipelineManager) Use(mids ...Middleware) {\n\th.mids = append(h.mids, mids...)\n}",
"func (s *Stream) bindOps() {\n\ts.log.Print(\"binding operators\")\n\tif s.ops == nil {\n\t\treturn\n\t}\n\tfor i, op := range s.ops {\n\t\tif i == 0 { // link 1st to source\n\t\t\top.SetInput(s.source.GetOutput())\n\t\t} else {\n\t\t\top.SetInput(s.ops[i-1].GetOutput())\n\t\t}\n\t}\n}",
"func txs(cpu *CPU, step *runStep) {\n\tcpu.sp = cpu.x\n}",
"func (c *Context) Timing(stat string, value float64) {\n\tfor _, sink := range c.sinks {\n\t\tsink.Timing(c, stat, value)\n\t}\n}",
"func SetUnitNames(u1, u2, u3, u4, u5, u6, u7 string) {\n\tunitNames[0] = u1\n\tunitNames[1] = u2\n\tunitNames[2] = u3\n\tunitNames[3] = u4\n\tunitNames[4] = u5\n\tunitNames[5] = u6\n\tunitNames[6] = u7\n}",
"func (s *shards) sendSamplesWithBackoff(samples model.Samples) {\n\tbackoff := s.qm.cfg.MinBackoff\n\treq := ToWriteRequest(samples)\n\n\tfor { // Upstream now retries indefinitely\n\t\tbegin := time.Now()\n\t\terr := s.qm.client.Store(s.ctx, req)\n\n\t\tsentBatchDuration.WithLabelValues(s.qm.queueName).Observe(time.Since(begin).Seconds())\n\t\tif err == nil {\n\t\t\tsucceededSamplesTotal.WithLabelValues(s.qm.queueName).Add(float64(len(samples)))\n\t\t\treturn\n\t\t}\n\n\t\tlevel.Warn(s.qm.logger).Log(\"msg\", \"Error sending samples to remote storage\", \"count\", len(samples), \"err\", err)\n\t\tif _, ok := err.(recoverableError); !ok {\n\t\t\tbreak\n\t\t}\n\t\ttime.Sleep(time.Duration(backoff))\n\t\tbackoff = backoff * 2\n\t\tif backoff > s.qm.cfg.MaxBackoff {\n\t\t\tbackoff = s.qm.cfg.MaxBackoff\n\t\t}\n\t}\n\n\tfailedSamplesTotal.WithLabelValues(s.qm.queueName).Add(float64(len(samples)))\n}",
"func runAmplifiers(initMem interpreter.Program, settings *phaseSettings) (signal int) {\n\t// 0 -> Amp A -> Amp B -> Amp C -> Amp D -> Amp E -> (to thrusters)\n\t// 5 amps, 6 channels\n\tchs := [ampCount + 1]chan int{}\n\tfor i := range chs {\n\t\tchs[i] = make(chan int)\n\t}\n\n\tfor i := 0; i < ampCount; i++ {\n\t\tgo func(icpy int) {\n\t\t\tinterpreter.New(initMem, makeInputDevice(settings[icpy], chs[icpy]), makeOutputDevice(chs[icpy+1])).Run()\n\t\t}(i)\n\t}\n\n\tchs[0] <- initialInput\n\treturn <-chs[ampCount]\n}",
"func (m *MeterImpl) collect(ctx context.Context, labels []attribute.KeyValue, measurements []Measurement) {\n\tm.provider.addMeasurement(Batch{\n\t\tCtx: ctx,\n\t\tLabels: labels,\n\t\tMeasurements: measurements,\n\t\tLibrary: m.library,\n\t})\n}",
"func (tw *MultiTimingsWrapper) Record(names []string, startTime time.Time) {\n\tif tw.name == \"\" {\n\t\ttw.timings.Record(names, startTime)\n\t\treturn\n\t}\n\tnewlabels := combineLabels(tw.name, names)\n\ttw.timings.Record(newlabels, startTime)\n}",
"func SetSampler(fixedTarget uint32, fallbackRate float64) {\n\tsegment.SetSampler(utils.NewSampler(fixedTarget, fallbackRate))\n}",
"func addStrings(s string, count int) {\n\twg.Add(count) // we will create \"count\" new goroutines\n\tfor n := 0; n < count; n++ {\n\t\tgo addString(s)\n\t}\n}",
"func (ms Mutations) Append(source Mutations) Mutations {\n\tif ms == nil {\n\t\tms = make(map[Context][]MutatorFunc)\n\t}\n\tfor id, fns := range source {\n\t\tif _, ok := ms[id]; ok {\n\t\t\tms[id] = append(ms[id], fns...)\n\t\t} else {\n\t\t\tms[id] = fns\n\t\t}\n\t}\n\treturn ms\n}",
"func (gen *Generator) Add(infos ...Info) {\n\tfor _, info := range infos {\n\t\tgen.enqueue(info)\n\t}\n}",
"func Distribute(rs Publisher, ms ...Sender) Publisher {\n\treturn rs.React(func(r Publisher, err error, data interface{}) {\n\t\tfor _, mi := range ms {\n\t\t\tgo func(rec Sender) {\n\t\t\t\tif err != nil {\n\t\t\t\t\trec.SendError(err)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\trec.Send(data)\n\t\t\t}(mi)\n\t\t}\n\t}, true)\n}",
"func (a *Ability) storeSamples(text string, samples []int32, sampleRate, significantBits int) (id string, err error) {\n\t// Create id\n\tid = filepath.Join(time.Now().Format(\"2006-01-02\"), xid.New().String())\n\n\t// Store samples wav\n\tif err = a.storeSamplesWav(id, samples, sampleRate, significantBits); err != nil {\n\t\terr = errors.Wrap(err, \"astiunderstanding: storing samples wav failed\")\n\t\treturn\n\t}\n\n\t// Store samples txt\n\tif err = a.storeSamplesTxt(id, text); err != nil {\n\t\terr = errors.Wrap(err, \"astiunderstanding: storing samples txt failed\")\n\t\treturn\n\t}\n\treturn\n}",
"func (b *builder) buildMetricsFromSamples() {\n\tfor _, sample := range b.Samples {\n\t\t// Get or create the metric family.\n\t\tmetricName := sample.Labels.Get(model.MetricNameLabel)\n\t\tmf := b.getOrCreateMetricFamily(metricName)\n\n\t\t// Retrieve the *dto.Metric based on labels.\n\t\tm := getOrCreateMetric(mf, sample.Labels)\n\t\tif sample.PrintTimestamp {\n\t\t\tm.TimestampMs = pointer.Int64(sample.Timestamp)\n\t\t}\n\n\t\tswitch familyType(mf) {\n\t\tcase dto.MetricType_COUNTER:\n\t\t\tm.Counter = &dto.Counter{\n\t\t\t\tValue: pointer.Float64(sample.Value),\n\t\t\t}\n\n\t\tcase dto.MetricType_GAUGE:\n\t\t\tm.Gauge = &dto.Gauge{\n\t\t\t\tValue: pointer.Float64(sample.Value),\n\t\t\t}\n\n\t\tcase dto.MetricType_SUMMARY:\n\t\t\tif m.Summary == nil {\n\t\t\t\tm.Summary = &dto.Summary{}\n\t\t\t}\n\n\t\t\tswitch {\n\t\t\tcase metricName == mf.GetName()+\"_count\":\n\t\t\t\tval := uint64(sample.Value)\n\t\t\t\tm.Summary.SampleCount = &val\n\t\t\tcase metricName == mf.GetName()+\"_sum\":\n\t\t\t\tm.Summary.SampleSum = pointer.Float64(sample.Value)\n\t\t\tcase metricName == mf.GetName():\n\t\t\t\tquantile, err := strconv.ParseFloat(sample.Labels.Get(model.QuantileLabel), 64)\n\t\t\t\tif err != nil {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\n\t\t\t\tm.Summary.Quantile = append(m.Summary.Quantile, &dto.Quantile{\n\t\t\t\t\tQuantile: &quantile,\n\t\t\t\t\tValue: pointer.Float64(sample.Value),\n\t\t\t\t})\n\t\t\t}\n\n\t\tcase dto.MetricType_UNTYPED:\n\t\t\tm.Untyped = &dto.Untyped{\n\t\t\t\tValue: pointer.Float64(sample.Value),\n\t\t\t}\n\n\t\tcase dto.MetricType_HISTOGRAM:\n\t\t\tif m.Histogram == nil {\n\t\t\t\tm.Histogram = &dto.Histogram{}\n\t\t\t}\n\n\t\t\tswitch {\n\t\t\tcase metricName == mf.GetName()+\"_count\":\n\t\t\t\tval := uint64(sample.Value)\n\t\t\t\tm.Histogram.SampleCount = &val\n\t\t\tcase metricName == mf.GetName()+\"_sum\":\n\t\t\t\tm.Histogram.SampleSum = pointer.Float64(sample.Value)\n\t\t\tcase metricName == mf.GetName()+\"_bucket\":\n\t\t\t\tboundary, err := strconv.ParseFloat(sample.Labels.Get(model.BucketLabel), 64)\n\t\t\t\tif err != nil {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\n\t\t\t\tcount := uint64(sample.Value)\n\n\t\t\t\tm.Histogram.Bucket = append(m.Histogram.Bucket, &dto.Bucket{\n\t\t\t\t\tUpperBound: &boundary,\n\t\t\t\t\tCumulativeCount: &count,\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\t}\n}",
"func Adapt(next http.Handler, chains ...Middleware) http.Handler {\n\tfor _, item := range chains {\n\t\tnext = item(next)\n\t}\n\treturn next\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n C.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func (a *Ability) websocketListenerSamples(c *astiws.Client, eventName string, payload json.RawMessage) error {\n\t// Unmarshal payload\n\tvar p PayloadSamples\n\tif err := json.Unmarshal(payload, &p); err != nil {\n\t\tastilog.Error(errors.Wrapf(err, \"astiunderstanding: json unmarshaling %s into %#v failed\", payload, p))\n\t\treturn nil\n\t}\n\n\t// Dispatch\n\ta.ch <- p\n\treturn nil\n}",
"func (w *World) initializeMobs(mobs []*Mob) {\n\tw.mobs = make(map[int]*Mob)\n\n\tfor k, v := range mobs {\n\t\tcell := w.getWorldCell(int(v.SpawnX), int(v.SpawnY))\n\t\tif cell == nil {\n\t\t\tlog.Error(\"Invalid world cell\")\n\t\t\tcontinue\n\t\t}\n\n\t\tmob := mobs[k]\n\t\tmob.Id = k + 1\n\t\tmob.world = w\n\t\tmob.cell = cell\n\n\t\tif parentMob := w.manager.GetMob(mob.Species); parentMob != nil {\n\t\t\tmob.Merge(parentMob)\n\t\t}\n\n\t\tw.mobs[mob.Id] = mob\n\t\tmob.Initialize()\n\t\tcell.AddMob(mob)\n\t}\n}",
"func init() {\n\tgather.Register(sqsRegName, &sqsCreator{})\n}",
"func (self *Weights) addMultiple(w weight,multiples int) {\n\tfor x:=multiples; x > 0; x-- {\n\t\tself.add(w)\n\t}\n}",
"func (b *Baa) Use(m ...Middleware) {\n\tfor i := range m {\n\t\tif m[i] != nil {\n\t\t\tb.middleware = append(b.middleware, wrapMiddleware(m[i]))\n\t\t}\n\t}\n}"
] | [
"0.62764835",
"0.62232316",
"0.61485314",
"0.61434525",
"0.59006494",
"0.58029324",
"0.58029324",
"0.56394106",
"0.56152266",
"0.5580354",
"0.5500185",
"0.5363383",
"0.5363383",
"0.5175494",
"0.51684535",
"0.50905794",
"0.50654525",
"0.49745017",
"0.48865214",
"0.47808418",
"0.4703786",
"0.46844378",
"0.46671176",
"0.46671176",
"0.46661937",
"0.46592826",
"0.46592826",
"0.46562526",
"0.46439195",
"0.46193424",
"0.46170014",
"0.46067128",
"0.4600074",
"0.45956868",
"0.4594039",
"0.45592916",
"0.45592916",
"0.4535705",
"0.45256498",
"0.44749916",
"0.44719538",
"0.44706523",
"0.44657856",
"0.44632512",
"0.44532806",
"0.44515884",
"0.44361785",
"0.44270796",
"0.44241667",
"0.44241667",
"0.442182",
"0.44195244",
"0.44077206",
"0.44051054",
"0.43888718",
"0.43847",
"0.4382091",
"0.4360172",
"0.43533036",
"0.43137512",
"0.428535",
"0.42799422",
"0.42799422",
"0.4278561",
"0.42784846",
"0.42774126",
"0.4273712",
"0.4273712",
"0.4267705",
"0.42651558",
"0.42528802",
"0.42528304",
"0.42518145",
"0.42213777",
"0.42104268",
"0.42060453",
"0.4204293",
"0.41942164",
"0.41820553",
"0.4177655",
"0.4175702",
"0.41694093",
"0.41680783",
"0.41649088",
"0.4151336",
"0.41469258",
"0.41467026",
"0.41429973",
"0.4140714",
"0.41399541",
"0.41337186",
"0.41335547",
"0.4131624",
"0.4131031",
"0.41267648",
"0.41257706",
"0.4123902",
"0.41225913",
"0.41217119"
] | 0.6016027 | 5 |
bind a named texture to a texturing target | func BindTexture(target uint32, texture uint32) {
C.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindTexture(target uint32, texture uint32) {\n C.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func BindTexture(target Enum, t Texture) {\n\tgl.BindTexture(uint32(target), t.Value)\n}",
"func BindTexture(target uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTexture, 2, uintptr(target), uintptr(texture), 0)\n}",
"func BindTexture(target GLEnum, texture Texture) {\n\tgl.BindTexture(uint32(target), uint32(texture))\n}",
"func (gl *WebGL) BindTexture(target GLEnum, texture WebGLTexture) {\n\tgl.context.Call(\"bindTexture\", target, texture)\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n C.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func (debugging *debuggingOpenGL) BindTexture(target uint32, texture uint32) {\n\tdebugging.recordEntry(\"BindTexture\", target, texture)\n\tdebugging.gl.BindTexture(target, texture)\n\tdebugging.recordExit(\"BindTexture\")\n}",
"func (native *OpenGL) BindTexture(target uint32, texture uint32) {\n\tgl.BindTexture(target, texture)\n}",
"func BindTexture(target Enum, texture Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tctexture, _ := (C.GLuint)(texture), cgoAllocsUnknown\n\tC.glBindTexture(ctarget, ctexture)\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTextureUnit, 2, uintptr(unit), uintptr(texture), 0)\n}",
"func (t *Texture2D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_2D, t.ID)\n}",
"func (t Texture3D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_3D, t.id)\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tsyscall.Syscall9(gpBindImageTexture, 7, uintptr(unit), uintptr(texture), uintptr(level), boolToUintptr(layered), uintptr(layer), uintptr(access), uintptr(format), 0, 0)\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n C.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func (c *Context) BindTexture(texture *Texture) {\n\tif texture == nil {\n\t\treturn\n\t}\n\tif c.currentTexture == nil || texture.id != c.currentTexture.id {\n\t\tgl.BindTexture(gl.TEXTURE_2D, texture.id)\n\t\tc.currentTexture = texture\n\t}\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tC.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tC.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func FramebufferTexture(target uint32, attachment uint32, texture uint32, level int32) {\n C.glowFramebufferTexture(gpFramebufferTexture, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n C.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n C.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func (self *TileSprite) SetTexture(texture *Texture) {\n self.Object.Call(\"setTexture\", texture)\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func NamedFramebufferTextureLayer(framebuffer uint32, attachment uint32, texture uint32, level int32, layer int32) {\n\tsyscall.Syscall6(gpNamedFramebufferTextureLayer, 5, uintptr(framebuffer), uintptr(attachment), uintptr(texture), uintptr(level), uintptr(layer), 0)\n}",
"func (gl *WebGL) UnbindTexture(target GLEnum) {\n\tgl.context.Call(\"bindTexture\", target, nil)\n}",
"func (self *TileSprite) SetTextureA(member *Texture) {\n self.Object.Set(\"texture\", member)\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n C.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func NamedFramebufferTextureLayer(framebuffer uint32, attachment uint32, texture uint32, level int32, layer int32) {\n\tC.glowNamedFramebufferTextureLayer(gpNamedFramebufferTextureLayer, (C.GLuint)(framebuffer), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(layer))\n}",
"func NamedFramebufferTextureLayer(framebuffer uint32, attachment uint32, texture uint32, level int32, layer int32) {\n\tC.glowNamedFramebufferTextureLayer(gpNamedFramebufferTextureLayer, (C.GLuint)(framebuffer), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(layer))\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n C.glowBindRenderbuffer(gpBindRenderbuffer, (C.GLenum)(target), (C.GLuint)(renderbuffer))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n C.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n\tsyscall.Syscall9(gpTextureView, 8, uintptr(texture), uintptr(target), uintptr(origtexture), uintptr(internalformat), uintptr(minlevel), uintptr(numlevels), uintptr(minlayer), uintptr(numlayers), 0)\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n\tC.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n\tC.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func (gl *WebGL) ActiveTexture(target GLEnum) {\n\tgl.context.Call(\"activeTexture\", target)\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func TexImage2D(target uint32, level int32, internalformat int32, width int32, height int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexImage2D(gpTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func (am *Manager) AddTexture(t *Texture) error {\n\tif _, ok := am.Textures[t.Name]; ok {\n\t\treturn fmt.Errorf(\"asset.Manager.AddTexture error: texture %s already exists\", t.Name)\n\t}\n\n\tam.Textures[t.Name] = t\n\n\treturn nil\n}",
"func FramebufferTextureLayer(target uint32, attachment uint32, texture uint32, level int32, layer int32) {\n C.glowFramebufferTextureLayer(gpFramebufferTextureLayer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(layer))\n}",
"func EGLImageTargetTexStorageEXT(target uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTexStorageEXT(gpEGLImageTargetTexStorageEXT, (C.GLenum)(target), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func EGLImageTargetTexStorageEXT(target uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTexStorageEXT(gpEGLImageTargetTexStorageEXT, (C.GLenum)(target), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func ActiveTexture(texture uint32) {\n C.glowActiveTexture(gpActiveTexture, (C.GLenum)(texture))\n}",
"func (c *Button) loadTextureFromTTF() {\n\tvar err error\n\tc.font = engosdl.GetFontManager().CreateFont(c.GetName(), c.FontFile, c.FontSize)\n\tc.texture = c.font.GetTextureFromFont(c.Message, c.Color)\n\t_, _, c.width, c.height, err = c.texture.Query()\n\tif err != nil {\n\t\tengosdl.Logger.Error().Err(err).Msg(\"Query error\")\n\t\tpanic(err)\n\t}\n\tc.GetEntity().GetTransform().SetDim(engosdl.NewVector(float64(c.width), float64(c.height)))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tC.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tC.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func (obj *Device) SetTexture(sampler uint32, texture BaseTextureImpl) Error {\n\tvar base uintptr\n\tif texture != nil {\n\t\tbase = texture.baseTexturePointer()\n\t}\n\tret, _, _ := syscall.Syscall(\n\t\tobj.vtbl.SetTexture,\n\t\t3,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(sampler),\n\t\tbase,\n\t)\n\treturn toErr(ret)\n}",
"func (self *TileSprite) SetTexture1O(texture *Texture, destroy bool) {\n self.Object.Call(\"setTexture\", texture, destroy)\n}",
"func (bm Blendmap) Texture() *gl.Texture {\n\treturn bm.Map.id\n}",
"func EGLImageTargetTexStorageEXT(target uint32, image unsafe.Pointer, attrib_list *int32) {\n\tsyscall.Syscall(gpEGLImageTargetTexStorageEXT, 3, uintptr(target), uintptr(image), uintptr(unsafe.Pointer(attrib_list)))\n}",
"func (self *TileSprite) SetTintedTextureA(member *Canvas) {\n self.Object.Set(\"tintedTexture\", member)\n}",
"func TextureBuffer(texture uint32, internalformat uint32, buffer uint32) {\n\tsyscall.Syscall(gpTextureBuffer, 3, uintptr(texture), uintptr(internalformat), uintptr(buffer))\n}",
"func TexParameterf(target, pname GLEnum, param float32) {\n\tgl.TexParameterf(uint32(target), uint32(pname), param)\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n C.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))\n}",
"func TexParameteri(target, pname GLEnum, param int32) {\n\tgl.TexParameteri(uint32(target), uint32(pname), param)\n}",
"func (spriteBatch *SpriteBatch) SetTexture(newtexture ITexture) {\n\tspriteBatch.texture = newtexture\n}",
"func (debugging *debuggingOpenGL) FramebufferTexture(target uint32, attachment uint32, texture uint32, level int32) {\n\tdebugging.recordEntry(\"FramebufferTexture\", target, attachment, texture, level)\n\tdebugging.gl.FramebufferTexture(target, attachment, texture, level)\n\tdebugging.recordExit(\"FramebufferTexture\")\n}",
"func TexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage2D(gpTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexParameterfv(target, pname Enum, params []float32) {\n\tgl.TexParameterfv(uint32(target), uint32(pname), ¶ms[0])\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tsyscall.Syscall(gpBindSampler, 2, uintptr(unit), uintptr(sampler), 0)\n}",
"func (animation *AnimationSet) AddTexture(texture *Texture) {\n\tsimlog.FuncIn()\n\tanimation.textures = append(animation.textures, texture)\n\tsimlog.FuncOut()\n}",
"func EGLImageTargetTextureStorageEXT(texture uint32, image unsafe.Pointer, attrib_list *int32) {\n\tsyscall.Syscall(gpEGLImageTargetTextureStorageEXT, 3, uintptr(texture), uintptr(image), uintptr(unsafe.Pointer(attrib_list)))\n}",
"func TexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage3D(gpTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func ActiveTexture(texture Enum) {\n\tgl.ActiveTexture(uint32(texture))\n}",
"func TexParam(wrap TextureWrap, filter TextureFilter) TextureParameters {\n\treturn TextureParameters{wrap, wrap, filter, filter}\n}",
"func TexImage2D(target uint32, level int32, internalformat int32, width int32, height int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexImage2D(gpTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexImage2D(target uint32, level int32, internalformat int32, width int32, height int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexImage2D(gpTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindImageTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func (native *OpenGL) BindSampler(unit uint32, sampler uint32) {\n\tgl.BindSampler(unit, sampler)\n}",
"func (obj *Device) UpdateTexture(sourceTexture, destTexture *BaseTexture) Error {\n\tret, _, _ := syscall.Syscall(\n\t\tobj.vtbl.UpdateTexture,\n\t\t3,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(unsafe.Pointer(sourceTexture)),\n\t\tuintptr(unsafe.Pointer(destTexture)),\n\t)\n\treturn toErr(ret)\n}",
"func NewTexture(scene *Scene, element *Element) *Texture {\n\tt := &Texture{\n\t\tObject: *NewObject(scene, element),\n\t}\n\treturn t\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n\tC.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n\tC.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func (self *TileSprite) LoadTexture(key interface{}) {\n self.Object.Call(\"loadTexture\", key)\n}",
"func TexParameterf(target, pname Enum, param float32) {\n\tgl.TexParameterf(uint32(target), uint32(pname), param)\n}",
"func FramebufferTexture(target uint32, attachment uint32, texture uint32, level int32) {\n\tC.glowFramebufferTexture(gpFramebufferTexture, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level))\n}",
"func FramebufferTexture(target uint32, attachment uint32, texture uint32, level int32) {\n\tC.glowFramebufferTexture(gpFramebufferTexture, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level))\n}",
"func (debugging *debuggingOpenGL) BindSampler(unit uint32, sampler uint32) {\n\tdebugging.recordEntry(\"BindSampler\", unit, sampler)\n\tdebugging.gl.BindSampler(unit, sampler)\n\tdebugging.recordExit(\"BindSampler\")\n}",
"func (self *TileSprite) SetTextureI(args ...interface{}) {\n self.Object.Call(\"setTexture\", args)\n}",
"func BindTransformFeedback(target uint32, id uint32) {\n C.glowBindTransformFeedback(gpBindTransformFeedback, (C.GLenum)(target), (C.GLuint)(id))\n}",
"func (w *Worley) GenerateTexture(tex *texture.Texture) {\n\tgl.BindImageTexture(0, tex.GetHandle(), 0, false, 0, gl.READ_WRITE, gl.RGBA32F)\n\tgl.BindImageTexture(1, w.noisetexture.GetHandle(), 0, false, 0, gl.READ_ONLY, gl.RGBA32F)\n\n\tw.computeshader.Use()\n\tw.computeshader.UpdateInt32(\"uWidth\", w.width)\n\tw.computeshader.UpdateInt32(\"uHeight\", w.height)\n\tw.computeshader.UpdateInt32(\"uResolution\", w.resolution)\n\tw.computeshader.UpdateInt32(\"uOctaves\", w.octaves)\n\tw.computeshader.UpdateFloat32(\"uRadius\", w.radius)\n\tw.computeshader.UpdateFloat32(\"uRadiusScale\", w.radiusscale)\n\tw.computeshader.UpdateFloat32(\"uBrightness\", w.brightness)\n\tw.computeshader.UpdateFloat32(\"uContrast\", w.contrast)\n\tw.computeshader.UpdateFloat32(\"uScale\", w.scale)\n\tw.computeshader.UpdateFloat32(\"uPersistance\", w.persistance)\n\tw.computeshader.Compute(uint32(w.width), uint32(w.height), 1)\n\tw.computeshader.Compute(1024, 1024, 1)\n\tw.computeshader.Release()\n\n\tgl.MemoryBarrier(gl.ALL_BARRIER_BITS)\n\n\tgl.BindImageTexture(0, 0, 0, false, 0, gl.WRITE_ONLY, gl.RGBA32F)\n\tgl.BindImageTexture(1, 0, 0, false, 0, gl.READ_ONLY, gl.RGBA32F)\n}",
"func (t *Three) Texture() *Texture {\n\tp := t.ctx.Get(\"Texture\")\n\treturn TextureFromJSObject(p)\n}",
"func TexImage1D(target uint32, level int32, internalformat int32, width int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexImage1D(gpTexImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexSubImage1D(target uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage1D(gpTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func (self *TileSprite) SetTilingTextureA(member *PIXITexture) {\n self.Object.Set(\"tilingTexture\", member)\n}",
"func (am *AssetManager) LoadTexture(name, iname string) {\n\tif strings.Contains(name, \".png\") {\n\t\tpic, err := LoadPng(am.texturesDir + name)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tam.Textures[iname] = pic\n\t} else {\n\t\tlog.Fatal(\"unable to find texture \" + (am.modelsDir + name))\n\t}\n}",
"func TexImage2D(target Enum, level int, width, height int, format Enum, ty Enum, data []byte) {\n\tp := unsafe.Pointer(nil)\n\tif len(data) > 0 {\n\t\tp = gl.Ptr(&data[0])\n\t}\n\tgl.TexImage2D(uint32(target), int32(level), int32(format), int32(width), int32(height), 0, uint32(format), uint32(ty), p)\n}",
"func (app *controlsTestApplication) Texturize(bmp *graphics.Bitmap) *graphics.BitmapTexture {\n\treturn graphics.NewBitmapTexture(app.gl, bmp.Width, bmp.Height, bmp.Pixels)\n}",
"func (tx *TextureBase) NewTex() gpu.Texture2D {\n\tif tx.Tex != nil {\n\t\treturn tx.Tex\n\t}\n\ttx.Tex = gpu.TheGPU.NewTexture2D(tx.Nm)\n\ttx.Tex.SetBotZero(tx.Bot0)\n\treturn tx.Tex\n}",
"func TexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexSubImage3D(gpTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexSubImage3D(gpTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func ActiveTexture(texture uint32) {\n\tsyscall.Syscall(gpActiveTexture, 1, uintptr(texture), 0, 0)\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func EGLImageTargetTextureStorageEXT(texture uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTextureStorageEXT(gpEGLImageTargetTextureStorageEXT, (C.GLuint)(texture), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func EGLImageTargetTextureStorageEXT(texture uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTextureStorageEXT(gpEGLImageTargetTextureStorageEXT, (C.GLuint)(texture), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func TexImage3D(target uint32, level int32, internalformat int32, width int32, height int32, depth int32, border int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexImage3D(gpTexImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLint)(border), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func (f *Font) GetTexture() *Texture { return f.texture }"
] | [
"0.75365347",
"0.73968655",
"0.72056687",
"0.70569825",
"0.69771206",
"0.6951689",
"0.69334",
"0.67844176",
"0.66177547",
"0.66002494",
"0.66002494",
"0.64977026",
"0.6485806",
"0.63824624",
"0.634882",
"0.6293033",
"0.6278351",
"0.61021316",
"0.61021316",
"0.60868174",
"0.6084762",
"0.60693765",
"0.6038776",
"0.5992881",
"0.59776294",
"0.5965854",
"0.59000325",
"0.5872619",
"0.5871173",
"0.5871173",
"0.58221185",
"0.5821153",
"0.58028716",
"0.5780977",
"0.5780977",
"0.57741874",
"0.5766956",
"0.5766956",
"0.5713922",
"0.56918055",
"0.56614244",
"0.56386274",
"0.5638389",
"0.5638389",
"0.5634006",
"0.562014",
"0.5618289",
"0.5618289",
"0.5602372",
"0.55946386",
"0.55793315",
"0.55678177",
"0.5562965",
"0.5551435",
"0.55229",
"0.5514305",
"0.55119276",
"0.5510295",
"0.5506419",
"0.55003303",
"0.54941094",
"0.54873836",
"0.5484185",
"0.5475656",
"0.54728484",
"0.5458107",
"0.54531574",
"0.54480124",
"0.54480124",
"0.54462665",
"0.54455626",
"0.5434489",
"0.54342735",
"0.5431306",
"0.5431306",
"0.5428534",
"0.5422497",
"0.54216844",
"0.54216844",
"0.541534",
"0.5410518",
"0.5408418",
"0.5398642",
"0.5397622",
"0.53966624",
"0.5390723",
"0.5388322",
"0.5385341",
"0.53851324",
"0.5373339",
"0.53692657",
"0.53346926",
"0.53346926",
"0.53286767",
"0.5320246",
"0.5316825",
"0.5316825",
"0.5312745",
"0.5299344"
] | 0.6833865 | 8 |
bind an existing texture object to the specified texture unit | func BindTextureUnit(unit uint32, texture uint32) {
C.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindTextureUnit(unit uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTextureUnit, 2, uintptr(unit), uintptr(texture), 0)\n}",
"func BindTexture(target uint32, texture uint32) {\n C.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func BindTexture(target Enum, t Texture) {\n\tgl.BindTexture(uint32(target), t.Value)\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n C.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func BindTexture(target uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTexture, 2, uintptr(target), uintptr(texture), 0)\n}",
"func (debugging *debuggingOpenGL) BindTexture(target uint32, texture uint32) {\n\tdebugging.recordEntry(\"BindTexture\", target, texture)\n\tdebugging.gl.BindTexture(target, texture)\n\tdebugging.recordExit(\"BindTexture\")\n}",
"func (t *Texture2D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_2D, t.ID)\n}",
"func (gl *WebGL) BindTexture(target GLEnum, texture WebGLTexture) {\n\tgl.context.Call(\"bindTexture\", target, texture)\n}",
"func (t Texture3D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_3D, t.id)\n}",
"func BindTexture(target GLEnum, texture Texture) {\n\tgl.BindTexture(uint32(target), uint32(texture))\n}",
"func BindTexture(target Enum, texture Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tctexture, _ := (C.GLuint)(texture), cgoAllocsUnknown\n\tC.glBindTexture(ctarget, ctexture)\n}",
"func (native *OpenGL) BindTexture(target uint32, texture uint32) {\n\tgl.BindTexture(target, texture)\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tsyscall.Syscall9(gpBindImageTexture, 7, uintptr(unit), uintptr(texture), uintptr(level), boolToUintptr(layered), uintptr(layer), uintptr(access), uintptr(format), 0, 0)\n}",
"func (c *Context) BindTexture(texture *Texture) {\n\tif texture == nil {\n\t\treturn\n\t}\n\tif c.currentTexture == nil || texture.id != c.currentTexture.id {\n\t\tgl.BindTexture(gl.TEXTURE_2D, texture.id)\n\t\tc.currentTexture = texture\n\t}\n}",
"func BindTexture(target uint32, texture uint32) {\n\tC.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func BindTexture(target uint32, texture uint32) {\n\tC.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func (self *TileSprite) SetTextureA(member *Texture) {\n self.Object.Set(\"texture\", member)\n}",
"func (c *Context) AddTextureObject(cfg *TextureConfig) (*TextureObject, error) {\n\n\t// var pbo uint32\n\t// gl.GenBuffers(1, &pbo)\n\t// gl.BindBuffer(gl.PIXEL_UNPACK_BUFFER, pbo)\n\t// // Write PBO with nil to initialize the space\n\t// gl.BufferData(gl.PIXEL_UNPACK_BUFFER, len(cfg.Image.Pix), nil, gl.STREAM_DRAW)\n\n\tmode := int32(gl.LINEAR)\n\tif cfg.Mode != 0 {\n\t\tmode = cfg.Mode\n\t}\n\n\tvar texID uint32\n\tgl.GenTextures(1, &texID)\n\tgl.ActiveTexture(gl.TEXTURE0)\n\tgl.BindTexture(gl.TEXTURE_2D, texID)\n\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, mode)\n\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, mode)\n\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)\n\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)\n\n\t// write texture with nil pointer to initialize the space\n\tgl.TexImage2D(gl.TEXTURE_2D, 0, gl.RGBA,\n\t\tint32(cfg.Image.Rect.Size().X), int32(cfg.Image.Rect.Size().Y),\n\t\t0, gl.RGBA, gl.UNSIGNED_BYTE, gl.Ptr(cfg.Image.Pix))\n\n\ttexLoc := c.GetUniformLocation(cfg.UniformName)\n\tgl.Uniform1i(texLoc, 0)\n\n\ttex := &TextureObject{\n\t\ttexID: texID,\n\t\ttexLoc: texLoc,\n\t\t//pbo: pbo,\n\t\timage: cfg.Image,\n\t}\n\tc.textures = append(c.textures, tex)\n\treturn tex, nil\n}",
"func BindSampler(unit uint32, sampler uint32) {\n C.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func (self *TileSprite) SetTexture(texture *Texture) {\n self.Object.Call(\"setTexture\", texture)\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tC.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tC.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n C.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (self *TileSprite) SetTexture1O(texture *Texture, destroy bool) {\n self.Object.Call(\"setTexture\", texture, destroy)\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n C.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n C.glowBindRenderbuffer(gpBindRenderbuffer, (C.GLenum)(target), (C.GLuint)(renderbuffer))\n}",
"func updateTextureVbo(data []float32, vbo uint32) {\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferSubData(gl.ARRAY_BUFFER, 0, len(data)*4, gl.Ptr(data))\n\tgl.BindBuffer(gl.ARRAY_BUFFER, 0)\n}",
"func (obj *Device) UpdateTexture(sourceTexture, destTexture *BaseTexture) Error {\n\tret, _, _ := syscall.Syscall(\n\t\tobj.vtbl.UpdateTexture,\n\t\t3,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(unsafe.Pointer(sourceTexture)),\n\t\tuintptr(unsafe.Pointer(destTexture)),\n\t)\n\treturn toErr(ret)\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (obj *Device) SetTexture(sampler uint32, texture BaseTextureImpl) Error {\n\tvar base uintptr\n\tif texture != nil {\n\t\tbase = texture.baseTexturePointer()\n\t}\n\tret, _, _ := syscall.Syscall(\n\t\tobj.vtbl.SetTexture,\n\t\t3,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(sampler),\n\t\tbase,\n\t)\n\treturn toErr(ret)\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tC.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tC.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func (self *GameObjectCreator) RenderTexture3O(width int, height int, key string) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", width, height, key)}\n}",
"func (self *GameObjectCreator) RenderTexture2O(width int, height int) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", width, height)}\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n C.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (gl *WebGL) UnbindTexture(target GLEnum) {\n\tgl.context.Call(\"bindTexture\", target, nil)\n}",
"func (self *GameObjectCreator) RenderTexture() *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\")}\n}",
"func (self *TileSprite) SetTextureI(args ...interface{}) {\n self.Object.Call(\"setTexture\", args)\n}",
"func (debugging *debuggingOpenGL) BindSampler(unit uint32, sampler uint32) {\n\tdebugging.recordEntry(\"BindSampler\", unit, sampler)\n\tdebugging.gl.BindSampler(unit, sampler)\n\tdebugging.recordExit(\"BindSampler\")\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tsyscall.Syscall(gpBindSampler, 2, uintptr(unit), uintptr(sampler), 0)\n}",
"func BindRenderbuffer(target GLEnum, renderbuffer Renderbuffer) {\n\tgl.BindRenderbuffer(uint32(target), uint32(renderbuffer))\n}",
"func FramebufferTexture(target uint32, attachment uint32, texture uint32, level int32) {\n C.glowFramebufferTexture(gpFramebufferTexture, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level))\n}",
"func (self *GameObjectCreator) RenderTexture1O(width int) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", width)}\n}",
"func (t Texture3D) Unbind() {\n\tgl.BindTexture(gl.TEXTURE_3D, 0)\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n\tC.glowBindRenderbuffer(gpBindRenderbuffer, (C.GLenum)(target), (C.GLuint)(renderbuffer))\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n\tC.glowBindRenderbuffer(gpBindRenderbuffer, (C.GLenum)(target), (C.GLuint)(renderbuffer))\n}",
"func (native *OpenGL) BindSampler(unit uint32, sampler uint32) {\n\tgl.BindSampler(unit, sampler)\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n\tsyscall.Syscall9(gpTextureView, 8, uintptr(texture), uintptr(target), uintptr(origtexture), uintptr(internalformat), uintptr(minlevel), uintptr(numlevels), uintptr(minlayer), uintptr(numlayers), 0)\n}",
"func BindRenderbuffer(target Enum, rb Renderbuffer) {\n\tgl.BindRenderbuffer(uint32(target), rb.Value)\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindImageTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func (self *TileSprite) LoadTexture1O(key interface{}, frame interface{}) {\n self.Object.Call(\"loadTexture\", key, frame)\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n\tC.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func TextureView(texture uint32, target uint32, origtexture uint32, internalformat uint32, minlevel uint32, numlevels uint32, minlayer uint32, numlayers uint32) {\n\tC.glowTextureView(gpTextureView, (C.GLuint)(texture), (C.GLenum)(target), (C.GLuint)(origtexture), (C.GLenum)(internalformat), (C.GLuint)(minlevel), (C.GLuint)(numlevels), (C.GLuint)(minlayer), (C.GLuint)(numlayers))\n}",
"func (self *TileSprite) LoadTexture(key interface{}) {\n self.Object.Call(\"loadTexture\", key)\n}",
"func (self *GameObjectCreator) RenderTexture4O(width int, height int, key string, addToCache bool) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", width, height, key, addToCache)}\n}",
"func (debugging *debuggingOpenGL) BindRenderbuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindRenderbuffer\", target, buffer)\n\tdebugging.gl.BindRenderbuffer(target, buffer)\n\tdebugging.recordExit(\"BindRenderbuffer\")\n}",
"func BindBuffer(target uint32, buffer uint32) {\n C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func (bm Blendmap) Texture() *gl.Texture {\n\treturn bm.Map.id\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n\tsyscall.Syscall(gpBindRenderbuffer, 2, uintptr(target), uintptr(renderbuffer), 0)\n}",
"func (self *TileSprite) Texture() *Texture{\n return &Texture{self.Object.Get(\"texture\")}\n}",
"func NewTexture(scene *Scene, element *Element) *Texture {\n\tt := &Texture{\n\t\tObject: *NewObject(scene, element),\n\t}\n\treturn t\n}",
"func (debugging *debuggingOpenGL) FramebufferTexture(target uint32, attachment uint32, texture uint32, level int32) {\n\tdebugging.recordEntry(\"FramebufferTexture\", target, attachment, texture, level)\n\tdebugging.gl.FramebufferTexture(target, attachment, texture, level)\n\tdebugging.recordExit(\"FramebufferTexture\")\n}",
"func (self *TileSprite) LoadTexture2O(key interface{}, frame interface{}, stopAnimation bool) {\n self.Object.Call(\"loadTexture\", key, frame, stopAnimation)\n}",
"func (spriteBatch *SpriteBatch) SetTexture(newtexture ITexture) {\n\tspriteBatch.texture = newtexture\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func TextureBuffer(texture uint32, internalformat uint32, buffer uint32) {\n\tsyscall.Syscall(gpTextureBuffer, 3, uintptr(texture), uintptr(internalformat), uintptr(buffer))\n}",
"func (self *Graphics) GenerateTexture() *Texture{\n return &Texture{self.Object.Call(\"generateTexture\")}\n}",
"func (am *Manager) AddTexture(t *Texture) error {\n\tif _, ok := am.Textures[t.Name]; ok {\n\t\treturn fmt.Errorf(\"asset.Manager.AddTexture error: texture %s already exists\", t.Name)\n\t}\n\n\tam.Textures[t.Name] = t\n\n\treturn nil\n}",
"func (self *TileSprite) SetTilingTextureA(member *PIXITexture) {\n self.Object.Set(\"tilingTexture\", member)\n}",
"func BindRenderbuffer(target Enum, renderbuffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcrenderbuffer, _ := (C.GLuint)(renderbuffer), cgoAllocsUnknown\n\tC.glBindRenderbuffer(ctarget, crenderbuffer)\n}",
"func (am *AssetManager) LoadTexture(name, iname string) {\n\tif strings.Contains(name, \".png\") {\n\t\tpic, err := LoadPng(am.texturesDir + name)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tam.Textures[iname] = pic\n\t} else {\n\t\tlog.Fatal(\"unable to find texture \" + (am.modelsDir + name))\n\t}\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n C.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))\n}",
"func (native *OpenGL) BindRenderbuffer(target uint32, renderbuffer uint32) {\n\tgl.BindRenderbuffer(target, renderbuffer)\n}",
"func UniformBlockBinding(program uint32, uniformBlockIndex uint32, uniformBlockBinding uint32) {\n C.glowUniformBlockBinding(gpUniformBlockBinding, (C.GLuint)(program), (C.GLuint)(uniformBlockIndex), (C.GLuint)(uniformBlockBinding))\n}",
"func (t *Three) Texture() *Texture {\n\tp := t.ctx.Get(\"Texture\")\n\treturn TextureFromJSObject(p)\n}",
"func (f *Framebuffer) Texture2D(attachment gfx.FramebufferAttachment, target gfx.TextureTarget, tex gfx.Texture) {\n\tf.useState()\n\tf.ctx.O.Call(\n\t\t\"framebufferTexture2D\",\n\t\tf.ctx.FRAMEBUFFER,\n\t\tf.ctx.Enums[int(attachment)],\n\t\tf.ctx.Enums[int(target)],\n\t\ttex.Object().(*js.Object),\n\t\t0,\n\t)\n}",
"func (this *RectangleShape) SetTexture(texture *Texture, resetRect bool) {\n\tC.sfRectangleShape_setTexture(this.cptr, texture.cptr, goBool2C(resetRect))\n\tthis.texture = texture\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func (gl *WebGL) ActiveTexture(target GLEnum) {\n\tgl.context.Call(\"activeTexture\", target)\n}",
"func (self *GameObjectCreator) RenderTextureI(args ...interface{}) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", args)}\n}",
"func (w *Worley) GenerateTexture(tex *texture.Texture) {\n\tgl.BindImageTexture(0, tex.GetHandle(), 0, false, 0, gl.READ_WRITE, gl.RGBA32F)\n\tgl.BindImageTexture(1, w.noisetexture.GetHandle(), 0, false, 0, gl.READ_ONLY, gl.RGBA32F)\n\n\tw.computeshader.Use()\n\tw.computeshader.UpdateInt32(\"uWidth\", w.width)\n\tw.computeshader.UpdateInt32(\"uHeight\", w.height)\n\tw.computeshader.UpdateInt32(\"uResolution\", w.resolution)\n\tw.computeshader.UpdateInt32(\"uOctaves\", w.octaves)\n\tw.computeshader.UpdateFloat32(\"uRadius\", w.radius)\n\tw.computeshader.UpdateFloat32(\"uRadiusScale\", w.radiusscale)\n\tw.computeshader.UpdateFloat32(\"uBrightness\", w.brightness)\n\tw.computeshader.UpdateFloat32(\"uContrast\", w.contrast)\n\tw.computeshader.UpdateFloat32(\"uScale\", w.scale)\n\tw.computeshader.UpdateFloat32(\"uPersistance\", w.persistance)\n\tw.computeshader.Compute(uint32(w.width), uint32(w.height), 1)\n\tw.computeshader.Compute(1024, 1024, 1)\n\tw.computeshader.Release()\n\n\tgl.MemoryBarrier(gl.ALL_BARRIER_BITS)\n\n\tgl.BindImageTexture(0, 0, 0, false, 0, gl.WRITE_ONLY, gl.RGBA32F)\n\tgl.BindImageTexture(1, 0, 0, false, 0, gl.READ_ONLY, gl.RGBA32F)\n}",
"func TextureSubImage3D(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall12(gpTextureSubImage3D, 11, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(zoffset), uintptr(width), uintptr(height), uintptr(depth), uintptr(format), uintptr(xtype), uintptr(pixels), 0)\n}",
"func (c *Button) loadTextureFromTTF() {\n\tvar err error\n\tc.font = engosdl.GetFontManager().CreateFont(c.GetName(), c.FontFile, c.FontSize)\n\tc.texture = c.font.GetTextureFromFont(c.Message, c.Color)\n\t_, _, c.width, c.height, err = c.texture.Query()\n\tif err != nil {\n\t\tengosdl.Logger.Error().Err(err).Msg(\"Query error\")\n\t\tpanic(err)\n\t}\n\tc.GetEntity().GetTransform().SetDim(engosdl.NewVector(float64(c.width), float64(c.height)))\n}",
"func ActiveTexture(texture uint32) {\n C.glowActiveTexture(gpActiveTexture, (C.GLenum)(texture))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n C.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func TexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage3D(gpTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func (self *TileSprite) SetTintedTextureA(member *Canvas) {\n self.Object.Set(\"tintedTexture\", member)\n}",
"func TexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage2D(gpTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func (self *TileSprite) OnTextureUpdate(event interface{}) {\n self.Object.Call(\"onTextureUpdate\", event)\n}",
"func TextureSubImage2D(texture uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall9(gpTextureSubImage2D, 9, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(width), uintptr(height), uintptr(format), uintptr(xtype), uintptr(pixels))\n}",
"func FramebufferTextureLayer(target uint32, attachment uint32, texture uint32, level int32, layer int32) {\n C.glowFramebufferTextureLayer(gpFramebufferTextureLayer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(layer))\n}",
"func Make(width, height int, internalformat int32, format, pixelType uint32,\n\tdata unsafe.Pointer, min, mag, s, t int32) Texture {\n\n\ttexture := Texture{0, gl.TEXTURE_2D, 0}\n\n\t// generate and bind texture\n\tgl.GenTextures(1, &texture.handle)\n\ttexture.Bind(0)\n\n\t// set texture properties\n\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, min)\n\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, mag)\n\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, s)\n\tgl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, t)\n\n\t// specify a texture image\n\tgl.TexImage2D(gl.TEXTURE_2D, 0, internalformat, int32(width), int32(height),\n\t\t0, format, pixelType, data)\n\n\t// unbind texture\n\ttexture.Unbind()\n\n\treturn texture\n}",
"func (animation *AnimationSet) AddTexture(texture *Texture) {\n\tsimlog.FuncIn()\n\tanimation.textures = append(animation.textures, texture)\n\tsimlog.FuncOut()\n}",
"func (self *Graphics) GenerateTexture1O(resolution int) *Texture{\n return &Texture{self.Object.Call(\"generateTexture\", resolution)}\n}",
"func BindBuffer(target Enum, b Buffer) {\n\tgl.BindBuffer(uint32(target), b.Value)\n}",
"func (self *Graphics) GenerateTexture2O(resolution int, scaleMode int) *Texture{\n return &Texture{self.Object.Call(\"generateTexture\", resolution, scaleMode)}\n}"
] | [
"0.7944882",
"0.7141327",
"0.7066944",
"0.70379597",
"0.69296485",
"0.6858803",
"0.6800018",
"0.6781621",
"0.6781285",
"0.6778453",
"0.6636072",
"0.6617039",
"0.6567981",
"0.65639764",
"0.64981437",
"0.64981437",
"0.6295844",
"0.62708545",
"0.6270802",
"0.6261718",
"0.6245072",
"0.6245072",
"0.6235691",
"0.6209771",
"0.612629",
"0.6105346",
"0.60828143",
"0.6070847",
"0.59799135",
"0.59710383",
"0.59710383",
"0.59539014",
"0.5931821",
"0.5931821",
"0.5918407",
"0.58965087",
"0.58862257",
"0.58838946",
"0.582722",
"0.58149177",
"0.5792632",
"0.5792193",
"0.5771302",
"0.57566863",
"0.5710121",
"0.5702316",
"0.57020956",
"0.57020956",
"0.56998354",
"0.56969744",
"0.56934637",
"0.5684608",
"0.5669352",
"0.5636915",
"0.5636915",
"0.56252205",
"0.5607815",
"0.5607313",
"0.5605351",
"0.56029904",
"0.56011385",
"0.56002367",
"0.55896777",
"0.5557968",
"0.55155665",
"0.5492506",
"0.5492208",
"0.5481271",
"0.5476883",
"0.54641664",
"0.5454424",
"0.54541165",
"0.5452608",
"0.5452039",
"0.54422134",
"0.54096574",
"0.5401207",
"0.5386257",
"0.5385246",
"0.5380177",
"0.5380177",
"0.5373289",
"0.53729904",
"0.53639513",
"0.5350581",
"0.5349032",
"0.5324349",
"0.53219783",
"0.53150004",
"0.5304096",
"0.5299353",
"0.52902514",
"0.52742726",
"0.52643794",
"0.5260727",
"0.5256475",
"0.52495277",
"0.5244961",
"0.5241873"
] | 0.798627 | 1 |
bind one or more named textures to a sequence of consecutive texture units | func BindTextures(first uint32, count int32, textures *uint32) {
C.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindTextures(first uint32, count int32, textures *uint32) {\n C.glowBindTextures(gpBindTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n C.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tsyscall.Syscall(gpBindImageTextures, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(textures)))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func BindImageTextures(first uint32, count int32, textures *uint32) {\n\tC.glowBindImageTextures(gpBindImageTextures, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(textures)))\n}",
"func LoadTextures(eng sprite.Engine) map[string]sprite.SubTex {\n\tallTexs := make(map[string]sprite.SubTex)\n\tboundedImgs := []string{\"Clubs-2.png\", \"Clubs-3.png\", \"Clubs-4.png\", \"Clubs-5.png\", \"Clubs-6.png\", \"Clubs-7.png\", \"Clubs-8.png\",\n\t\t\"Clubs-9.png\", \"Clubs-10.png\", \"Clubs-Jack.png\", \"Clubs-Queen.png\", \"Clubs-King.png\", \"Clubs-Ace.png\",\n\t\t\"Diamonds-2.png\", \"Diamonds-3.png\", \"Diamonds-4.png\", \"Diamonds-5.png\", \"Diamonds-6.png\", \"Diamonds-7.png\", \"Diamonds-8.png\",\n\t\t\"Diamonds-9.png\", \"Diamonds-10.png\", \"Diamonds-Jack.png\", \"Diamonds-Queen.png\", \"Diamonds-King.png\", \"Diamonds-Ace.png\",\n\t\t\"Spades-2.png\", \"Spades-3.png\", \"Spades-4.png\", \"Spades-5.png\", \"Spades-6.png\", \"Spades-7.png\", \"Spades-8.png\",\n\t\t\"Spades-9.png\", \"Spades-10.png\", \"Spades-Jack.png\", \"Spades-Queen.png\", \"Spades-King.png\", \"Spades-Ace.png\",\n\t\t\"Hearts-2.png\", \"Hearts-3.png\", \"Hearts-4.png\", \"Hearts-5.png\", \"Hearts-6.png\", \"Hearts-7.png\", \"Hearts-8.png\",\n\t\t\"Hearts-9.png\", \"Hearts-10.png\", \"Hearts-Jack.png\", \"Hearts-Queen.png\", \"Hearts-King.png\", \"Hearts-Ace.png\", \"BakuSquare.png\",\n\t}\n\tunboundedImgs := []string{\"Club.png\", \"Diamond.png\", \"Spade.png\", \"Heart.png\", \"gray.jpeg\", \"blue.png\", \"trickDrop.png\",\n\t\t\"trickDropBlue.png\", \"player0.jpeg\", \"player1.jpeg\", \"player2.jpeg\", \"player3.jpeg\", \"laptopIcon.png\", \"watchIcon.png\",\n\t\t\"phoneIcon.png\", \"tabletIcon.png\", \"A-Upper.png\", \"B-Upper.png\", \"C-Upper.png\", \"D-Upper.png\", \"E-Upper.png\", \"F-Upper.png\",\n\t\t\"G-Upper.png\", \"H-Upper.png\", \"I-Upper.png\", \"J-Upper.png\", \"K-Upper.png\", \"L-Upper.png\", \"M-Upper.png\", \"N-Upper.png\",\n\t\t\"O-Upper.png\", \"P-Upper.png\", \"Q-Upper.png\", \"R-Upper.png\", \"S-Upper.png\", \"T-Upper.png\", \"U-Upper.png\", \"V-Upper.png\",\n\t\t\"W-Upper.png\", \"X-Upper.png\", \"Y-Upper.png\", \"Z-Upper.png\", \"A-Lower.png\", \"B-Lower.png\", \"C-Lower.png\", \"D-Lower.png\",\n\t\t\"E-Lower.png\", \"F-Lower.png\", \"G-Lower.png\", \"H-Lower.png\", \"I-Lower.png\", \"J-Lower.png\", \"K-Lower.png\", \"L-Lower.png\",\n\t\t\"M-Lower.png\", \"N-Lower.png\", \"O-Lower.png\", \"P-Lower.png\", \"Q-Lower.png\", \"R-Lower.png\", \"S-Lower.png\", \"T-Lower.png\",\n\t\t\"U-Lower.png\", \"V-Lower.png\", \"W-Lower.png\", \"X-Lower.png\", \"Y-Lower.png\", \"Z-Lower.png\", \"Space.png\", \"Colon.png\", \"Bang.png\",\n\t\t\"Apostrophe.png\", \"1.png\", \"2.png\", \"3.png\", \"4.png\", \"5.png\", \"6.png\", \"7.png\", \"8.png\", \"9.png\", \"0.png\", \"1-Red.png\",\n\t\t\"2-Red.png\", \"3-Red.png\", \"4-Red.png\", \"5-Red.png\", \"6-Red.png\", \"7-Red.png\", \"8-Red.png\", \"9-Red.png\", \"0-Red.png\",\n\t\t\"1-DBlue.png\", \"2-DBlue.png\", \"3-DBlue.png\", \"4-DBlue.png\", \"5-DBlue.png\", \"6-DBlue.png\", \"7-DBlue.png\", \"8-DBlue.png\",\n\t\t\"9-DBlue.png\", \"0-DBlue.png\", \"A-Upper-DBlue.png\", \"B-Upper-DBlue.png\",\n\t\t\"C-Upper-DBlue.png\", \"D-Upper-DBlue.png\", \"E-Upper-DBlue.png\", \"F-Upper-DBlue.png\", \"G-Upper-DBlue.png\", \"H-Upper-DBlue.png\",\n\t\t\"I-Upper-DBlue.png\", \"J-Upper-DBlue.png\", \"K-Upper-DBlue.png\", \"L-Upper-DBlue.png\", \"M-Upper-DBlue.png\", \"N-Upper-DBlue.png\",\n\t\t\"O-Upper-DBlue.png\", \"P-Upper-DBlue.png\", \"Q-Upper-DBlue.png\", \"R-Upper-DBlue.png\", \"S-Upper-DBlue.png\", \"T-Upper-DBlue.png\",\n\t\t\"U-Upper-DBlue.png\", \"V-Upper-DBlue.png\", \"W-Upper-DBlue.png\", \"X-Upper-DBlue.png\", \"Y-Upper-DBlue.png\", \"Z-Upper-DBlue.png\",\n\t\t\"A-Lower-DBlue.png\", \"B-Lower-DBlue.png\", \"C-Lower-DBlue.png\", \"D-Lower-DBlue.png\", \"E-Lower-DBlue.png\", \"F-Lower-DBlue.png\",\n\t\t\"G-Lower-DBlue.png\", \"H-Lower-DBlue.png\", \"I-Lower-DBlue.png\", \"J-Lower-DBlue.png\", \"K-Lower-DBlue.png\", \"L-Lower-DBlue.png\",\n\t\t\"M-Lower-DBlue.png\", \"N-Lower-DBlue.png\", \"O-Lower-DBlue.png\", \"P-Lower-DBlue.png\", \"Q-Lower-DBlue.png\", \"R-Lower-DBlue.png\",\n\t\t\"S-Lower-DBlue.png\", \"T-Lower-DBlue.png\", \"U-Lower-DBlue.png\", \"V-Lower-DBlue.png\", \"W-Lower-DBlue.png\", \"X-Lower-DBlue.png\",\n\t\t\"Y-Lower-DBlue.png\", \"Z-Lower-DBlue.png\", \"Apostrophe-DBlue.png\", \"Space-DBlue.png\", \"A-Upper-LBlue.png\", \"B-Upper-LBlue.png\",\n\t\t\"C-Upper-LBlue.png\", \"D-Upper-LBlue.png\", \"E-Upper-LBlue.png\", \"F-Upper-LBlue.png\", \"G-Upper-LBlue.png\", \"H-Upper-LBlue.png\",\n\t\t\"I-Upper-LBlue.png\", \"J-Upper-LBlue.png\", \"K-Upper-LBlue.png\", \"L-Upper-LBlue.png\", \"M-Upper-LBlue.png\", \"N-Upper-LBlue.png\",\n\t\t\"O-Upper-LBlue.png\", \"P-Upper-LBlue.png\", \"Q-Upper-LBlue.png\", \"R-Upper-LBlue.png\", \"S-Upper-LBlue.png\", \"T-Upper-LBlue.png\",\n\t\t\"U-Upper-LBlue.png\", \"V-Upper-LBlue.png\", \"W-Upper-LBlue.png\", \"X-Upper-LBlue.png\", \"Y-Upper-LBlue.png\", \"Z-Upper-LBlue.png\",\n\t\t\"A-Lower-LBlue.png\", \"B-Lower-LBlue.png\", \"C-Lower-LBlue.png\", \"D-Lower-LBlue.png\", \"E-Lower-LBlue.png\", \"F-Lower-LBlue.png\",\n\t\t\"G-Lower-LBlue.png\", \"H-Lower-LBlue.png\", \"I-Lower-LBlue.png\", \"J-Lower-LBlue.png\", \"K-Lower-LBlue.png\", \"L-Lower-LBlue.png\",\n\t\t\"M-Lower-LBlue.png\", \"N-Lower-LBlue.png\", \"O-Lower-LBlue.png\", \"P-Lower-LBlue.png\", \"Q-Lower-LBlue.png\", \"R-Lower-LBlue.png\",\n\t\t\"S-Lower-LBlue.png\", \"T-Lower-LBlue.png\", \"U-Lower-LBlue.png\", \"V-Lower-LBlue.png\", \"W-Lower-LBlue.png\", \"X-Lower-LBlue.png\",\n\t\t\"Y-Lower-LBlue.png\", \"Z-Lower-LBlue.png\", \"A-Upper-Gray.png\", \"B-Upper-Gray.png\", \"C-Upper-Gray.png\", \"D-Upper-Gray.png\",\n\t\t\"E-Upper-Gray.png\", \"F-Upper-Gray.png\", \"G-Upper-Gray.png\", \"H-Upper-Gray.png\", \"I-Upper-Gray.png\", \"J-Upper-Gray.png\",\n\t\t\"K-Upper-Gray.png\", \"L-Upper-Gray.png\", \"M-Upper-Gray.png\", \"N-Upper-Gray.png\", \"O-Upper-Gray.png\", \"P-Upper-Gray.png\",\n\t\t\"Q-Upper-Gray.png\", \"R-Upper-Gray.png\", \"S-Upper-Gray.png\", \"T-Upper-Gray.png\", \"U-Upper-Gray.png\", \"V-Upper-Gray.png\",\n\t\t\"W-Upper-Gray.png\", \"X-Upper-Gray.png\", \"Y-Upper-Gray.png\", \"Z-Upper-Gray.png\", \"A-Lower-Gray.png\", \"B-Lower-Gray.png\",\n\t\t\"C-Lower-Gray.png\", \"D-Lower-Gray.png\", \"E-Lower-Gray.png\", \"F-Lower-Gray.png\", \"G-Lower-Gray.png\", \"H-Lower-Gray.png\",\n\t\t\"I-Lower-Gray.png\", \"J-Lower-Gray.png\", \"K-Lower-Gray.png\", \"L-Lower-Gray.png\", \"M-Lower-Gray.png\", \"N-Lower-Gray.png\",\n\t\t\"O-Lower-Gray.png\", \"P-Lower-Gray.png\", \"Q-Lower-Gray.png\", \"R-Lower-Gray.png\", \"S-Lower-Gray.png\", \"T-Lower-Gray.png\",\n\t\t\"U-Lower-Gray.png\", \"V-Lower-Gray.png\", \"W-Lower-Gray.png\", \"X-Lower-Gray.png\", \"Y-Lower-Gray.png\", \"Z-Lower-Gray.png\",\n\t\t\"Space-Gray.png\", \"RoundedRectangle-DBlue.png\", \"RoundedRectangle-LBlue.png\", \"RoundedRectangle-Gray.png\", \"Rectangle-LBlue.png\",\n\t\t\"Rectangle-DBlue.png\", \"HorizontalPullTab.png\", \"VerticalPullTab.png\", \"NewGamePressed.png\", \"NewGameUnpressed.png\",\n\t\t\"NewRoundPressed.png\", \"NewRoundUnpressed.png\", \"JoinGamePressed.png\", \"JoinGameUnpressed.png\", \"Period.png\",\n\t\t\"SitSpotPressed.png\", \"SitSpotUnpressed.png\", \"WatchSpotPressed.png\", \"WatchSpotUnpressed.png\", \"StartBlue.png\", \"StartGray.png\",\n\t\t\"StartBluePressed.png\", \"Restart.png\", \"Visibility.png\", \"VisibilityOff.png\", \"QuitPressed.png\", \"QuitUnpressed.png\",\n\t\t\"PassPressed.png\", \"PassUnpressed.png\", \"RightArrowBlue.png\", \"LeftArrowBlue.png\", \"AcrossArrowBlue.png\", \"RightArrowGray.png\",\n\t\t\"LeftArrowGray.png\", \"AcrossArrowGray.png\", \"TakeTrickTableUnpressed.png\", \"TakeTrickTablePressed.png\", \"TakeTrickHandPressed.png\",\n\t\t\"TakeTrickHandUnpressed.png\", \"android.png\", \"cat.png\", \"man.png\", \"woman.png\", \"TakeUnpressed.png\", \"TakePressed.png\",\n\t\t\"UnplayedBorder1.png\", \"UnplayedBorder2.png\", \"RejoinPressed.png\", \"RejoinUnpressed.png\",\n\t}\n\tfor _, f := range boundedImgs {\n\t\ta, err := asset.Open(f)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\timg, _, err := image.Decode(a)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tt, err := eng.LoadTexture(img)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\timgWidth, imgHeight := t.Bounds()\n\t\tallTexs[f] = sprite.SubTex{t, image.Rect(0, 0, imgWidth, imgHeight)}\n\t\ta.Close()\n\t}\n\tfor _, f := range unboundedImgs {\n\t\ta, err := asset.Open(f)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\timg, _, err := image.Decode(a)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tt, err := eng.LoadTexture(img)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\timgWidth, imgHeight := t.Bounds()\n\t\tallTexs[f] = sprite.SubTex{t, image.Rect(1, 1, imgWidth-1, imgHeight-1)}\n\t\ta.Close()\n\t}\n\treturn allTexs\n}",
"func loadTextures() {\n\tfor i := 0; i < 7; i++ {\n\n\t\ttextures[i], _, _ = ebutil.NewImageFromFile(\"assets/image/\"+colors[i]+\".png\", eb.FilterDefault)\n\t}\n\ttextures[7], _, _ = ebutil.NewImageFromFile(\"assets/image/tetris_backgraund.png\", eb.FilterDefault)\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tC.glowBindTextureUnit(gpBindTextureUnit, (C.GLuint)(unit), (C.GLuint)(texture))\n}",
"func (level *Level) SetTextures(newIds []int) {\n\tblockStore := level.store.Get(res.ResourceID(4000 + level.id*100 + 7))\n\tvar ids [54]uint16\n\ttoCopy := len(ids)\n\n\tif len(newIds) < toCopy {\n\t\ttoCopy = len(newIds)\n\t}\n\tfor index := 0; index < len(ids); index++ {\n\t\tids[index] = uint16(newIds[index])\n\t}\n\n\tbuffer := bytes.NewBuffer(nil)\n\tbinary.Write(buffer, binary.LittleEndian, &ids)\n\tblockStore.SetBlockData(0, buffer.Bytes())\n}",
"func BindTextureUnit(unit uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTextureUnit, 2, uintptr(unit), uintptr(texture), 0)\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n C.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n C.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func (am *Manager) LoadTextures(names ...string) ([]*Texture, error) {\n\tvar (\n\t\ttextures = make([]*Texture, len(names))\n\t\terr error\n\t)\n\n\tfor i, name := range names {\n\t\ttextures[i], err = am.LoadTexture(name)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn textures, nil\n}",
"func BindTexture(target Enum, t Texture) {\n\tgl.BindTexture(uint32(target), t.Value)\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tC.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tC.glowBindSamplers(gpBindSamplers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(samplers)))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n C.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func (t Texture3D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_3D, t.id)\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tsyscall.Syscall9(gpBindImageTexture, 7, uintptr(unit), uintptr(texture), uintptr(level), boolToUintptr(layered), uintptr(layer), uintptr(access), uintptr(format), 0, 0)\n}",
"func BindTexture(target uint32, texture uint32) {\n C.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func BindSamplers(first uint32, count int32, samplers *uint32) {\n\tsyscall.Syscall(gpBindSamplers, 3, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(samplers)))\n}",
"func (t *Texture2D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_2D, t.ID)\n}",
"func BindTexture(target Enum, texture Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tctexture, _ := (C.GLuint)(texture), cgoAllocsUnknown\n\tC.glBindTexture(ctarget, ctexture)\n}",
"func BindTexture(target uint32, texture uint32) {\n\tsyscall.Syscall(gpBindTexture, 2, uintptr(target), uintptr(texture), 0)\n}",
"func getTextureIds(texInfos []TexInfo) map[string]int {\n\ttextureIds := make(map[string]int)\n\tnextId := 0\n\tfor i := 0; i < len(texInfos); i++ {\n\t\ttexInfo := texInfos[i]\n\n\t\t// convert filename byte array to string\n\t\tfilename := \"\"\n\t\tfor j := 0; j < len(texInfo.TextureName); j++ {\n\t\t\t// end of string\n\t\t\tif texInfo.TextureName[j] == 0 {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tfilename += string(texInfo.TextureName[j])\n\t\t}\n\n\t\t// generate new id for texture if necessary\n\t\t_, exists := textureIds[filename]\n\t\tif !exists {\n\t\t\ttextureIds[filename] = nextId\n\t\t\tnextId++\n\t\t}\n\t}\n\treturn textureIds\n}",
"func TextureSubImage1D(texture uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall9(gpTextureSubImage1D, 7, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(width), uintptr(format), uintptr(xtype), uintptr(pixels), 0, 0)\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n C.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func loadAllReferenceTextures(compMesh *component.Mesh) {\n\tfor _, texFile := range compMesh.Material.Textures {\n\t\tdoLoadTexture(texFile)\n\t}\n\tif len(compMesh.Material.DiffuseTexture) > 0 {\n\t\tdoLoadTexture(compMesh.Material.DiffuseTexture)\n\t}\n\tif len(compMesh.Material.NormalsTexture) > 0 {\n\t\tdoLoadTexture(compMesh.Material.NormalsTexture)\n\t}\n\tif len(compMesh.Material.SpecularTexture) > 0 {\n\t\tdoLoadTexture(compMesh.Material.SpecularTexture)\n\t}\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tC.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func BindImageTexture(unit uint32, texture uint32, level int32, layered bool, layer int32, access uint32, format uint32) {\n\tC.glowBindImageTexture(gpBindImageTexture, (C.GLuint)(unit), (C.GLuint)(texture), (C.GLint)(level), (C.GLboolean)(boolToInt(layered)), (C.GLint)(layer), (C.GLenum)(access), (C.GLenum)(format))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tC.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tC.glowBindSampler(gpBindSampler, (C.GLuint)(unit), (C.GLuint)(sampler))\n}",
"func BindSampler(unit uint32, sampler uint32) {\n\tsyscall.Syscall(gpBindSampler, 2, uintptr(unit), uintptr(sampler), 0)\n}",
"func (native *OpenGL) GenTextures(n int32) []uint32 {\n\tids := make([]uint32, n)\n\tgl.GenTextures(n, &ids[0])\n\treturn ids\n}",
"func BindTexture(target GLEnum, texture Texture) {\n\tgl.BindTexture(uint32(target), uint32(texture))\n}",
"func (self *TileSprite) SetTextureI(args ...interface{}) {\n self.Object.Call(\"setTexture\", args)\n}",
"func TextureSubImage1D(texture uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage1D(gpTextureSubImage1D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TextureSubImage1D(texture uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage1D(gpTextureSubImage1D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func (self *TileSprite) SetTextureA(member *Texture) {\n self.Object.Set(\"texture\", member)\n}",
"func GenTextures(n Sizei, textures []Uint) {\n\tcn, _ := (C.GLsizei)(n), cgoAllocsUnknown\n\tctextures, _ := (*C.GLuint)(unsafe.Pointer((*sliceHeader)(unsafe.Pointer(&textures)).Data)), cgoAllocsUnknown\n\tC.glGenTextures(cn, ctextures)\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func (level *Level) TextureAnimations() (result []model.TextureAnimation) {\n\tlevel.mutex.Lock()\n\tdefer level.mutex.Unlock()\n\tvar rawEntries [4]data.TextureAnimationEntry\n\n\tresult = make([]model.TextureAnimation, len(rawEntries))\n\tlevel.readTable(42, &rawEntries)\n\tfor index := 0; index < len(rawEntries); index++ {\n\t\tresultEntry := &result[index]\n\t\trawEntry := &rawEntries[index]\n\n\t\tresultEntry.FrameCount = intAsPointer(int(rawEntry.FrameCount))\n\t\tresultEntry.FrameTime = intAsPointer(int(rawEntry.FrameTime))\n\t\tresultEntry.LoopType = intAsPointer(int(rawEntry.LoopType))\n\t}\n\treturn\n}",
"func (debugging *debuggingOpenGL) GenTextures(n int32) []uint32 {\n\tdebugging.recordEntry(\"GenTextures\", n)\n\tresult := debugging.gl.GenTextures(n)\n\tdebugging.recordExit(\"GenTextures\", result)\n\treturn result\n}",
"func TexSubImage1D(target uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n C.glowTexSubImage1D(gpTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func PrioritizeTextures(n int32, textures *uint32, priorities *float32) {\n C.glowPrioritizeTextures(gpPrioritizeTextures, (C.GLsizei)(n), (*C.GLuint)(unsafe.Pointer(textures)), (*C.GLfloat)(unsafe.Pointer(priorities)))\n}",
"func BindTexture(target uint32, texture uint32) {\n\tC.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func BindTexture(target uint32, texture uint32) {\n\tC.glowBindTexture(gpBindTexture, (C.GLenum)(target), (C.GLuint)(texture))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n C.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func (self *TileSprite) SetTexture1O(texture *Texture, destroy bool) {\n self.Object.Call(\"setTexture\", texture, destroy)\n}",
"func NamedRenderbufferStorageMultisample(renderbuffer uint32, samples int32, internalformat uint32, width int32, height int32) {\n\tsyscall.Syscall6(gpNamedRenderbufferStorageMultisample, 5, uintptr(renderbuffer), uintptr(samples), uintptr(internalformat), uintptr(width), uintptr(height), 0)\n}",
"func (animation *AnimationSet) AddTexture(texture *Texture) {\n\tsimlog.FuncIn()\n\tanimation.textures = append(animation.textures, texture)\n\tsimlog.FuncOut()\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n C.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func (debugging *debuggingOpenGL) BindTexture(target uint32, texture uint32) {\n\tdebugging.recordEntry(\"BindTexture\", target, texture)\n\tdebugging.gl.BindTexture(target, texture)\n\tdebugging.recordExit(\"BindTexture\")\n}",
"func (self *TileSprite) LoadTexture1O(key interface{}, frame interface{}) {\n self.Object.Call(\"loadTexture\", key, frame)\n}",
"func (gl *WebGL) BindTexture(target GLEnum, texture WebGLTexture) {\n\tgl.context.Call(\"bindTexture\", target, texture)\n}",
"func NamedRenderbufferStorageMultisample(renderbuffer uint32, samples int32, internalformat uint32, width int32, height int32) {\n\tC.glowNamedRenderbufferStorageMultisample(gpNamedRenderbufferStorageMultisample, (C.GLuint)(renderbuffer), (C.GLsizei)(samples), (C.GLenum)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func NamedRenderbufferStorageMultisample(renderbuffer uint32, samples int32, internalformat uint32, width int32, height int32) {\n\tC.glowNamedRenderbufferStorageMultisample(gpNamedRenderbufferStorageMultisample, (C.GLuint)(renderbuffer), (C.GLsizei)(samples), (C.GLenum)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func TextureStorage1D(texture uint32, levels int32, internalformat uint32, width int32) {\n\tsyscall.Syscall6(gpTextureStorage1D, 4, uintptr(texture), uintptr(levels), uintptr(internalformat), uintptr(width), 0, 0)\n}",
"func (native *OpenGL) BindTexture(target uint32, texture uint32) {\n\tgl.BindTexture(target, texture)\n}",
"func TextureSubImage3D(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall12(gpTextureSubImage3D, 11, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(zoffset), uintptr(width), uintptr(height), uintptr(depth), uintptr(format), uintptr(xtype), uintptr(pixels), 0)\n}",
"func EGLImageTargetTextureStorageEXT(texture uint32, image unsafe.Pointer, attrib_list *int32) {\n\tsyscall.Syscall(gpEGLImageTargetTextureStorageEXT, 3, uintptr(texture), uintptr(image), uintptr(unsafe.Pointer(attrib_list)))\n}",
"func TextureStorage2DMultisample(texture uint32, samples int32, internalformat uint32, width int32, height int32, fixedsamplelocations bool) {\n\tsyscall.Syscall6(gpTextureStorage2DMultisample, 6, uintptr(texture), uintptr(samples), uintptr(internalformat), uintptr(width), uintptr(height), boolToUintptr(fixedsamplelocations))\n}",
"func (am *Manager) AddTexture(t *Texture) error {\n\tif _, ok := am.Textures[t.Name]; ok {\n\t\treturn fmt.Errorf(\"asset.Manager.AddTexture error: texture %s already exists\", t.Name)\n\t}\n\n\tam.Textures[t.Name] = t\n\n\treturn nil\n}",
"func NamedFramebufferTextureLayer(framebuffer uint32, attachment uint32, texture uint32, level int32, layer int32) {\n\tsyscall.Syscall6(gpNamedFramebufferTextureLayer, 5, uintptr(framebuffer), uintptr(attachment), uintptr(texture), uintptr(level), uintptr(layer), 0)\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n C.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func (self *GameObjectCreator) RenderTexture1O(width int) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", width)}\n}",
"func Prepare(paths []string) []*bimg.Image {\n\tvar imgs []*bimg.Image\n\tfor _, path := range paths {\n\t\timgs = append(imgs, NewImage(path))\n\t}\n\treturn imgs\n}",
"func TextureSubImage2D(texture uint32, level int32, xoffset int32, yoffset int32, width int32, height int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tsyscall.Syscall9(gpTextureSubImage2D, 9, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(width), uintptr(height), uintptr(format), uintptr(xtype), uintptr(pixels))\n}",
"func (self *TileSprite) SetTexture(texture *Texture) {\n self.Object.Call(\"setTexture\", texture)\n}",
"func getStringImgs(input, color string, texs map[string]sprite.SubTex) []sprite.SubTex {\n\timgs := make([]sprite.SubTex, 0)\n\tfor _, char := range input {\n\t\tkey := \"\"\n\t\tif char == 32 {\n\t\t\tkey += \"Space\"\n\t\t} else if char == 33 {\n\t\t\tkey += \"Bang\"\n\t\t} else if char == 39 {\n\t\t\tkey += \"Apostrophe\"\n\t\t} else if char == 46 {\n\t\t\tkey += \"Period\"\n\t\t} else if char == 58 {\n\t\t\tkey += \"Colon\"\n\t\t} else if char >= 48 && char <= 57 {\n\t\t\t// if char is a number\n\t\t\tkey += string(char)\n\t\t} else {\n\t\t\t// if char is a letter\n\t\t\tkey += strings.ToUpper(string(char))\n\t\t\tif char > 90 {\n\t\t\t\tkey += \"-Lower\"\n\t\t\t} else {\n\t\t\t\tkey += \"-Upper\"\n\t\t\t}\n\t\t}\n\t\tif color != \"\" {\n\t\t\tkey += \"-\" + color\n\t\t}\n\t\tkey += \".png\"\n\t\timg := texs[key]\n\t\timgs = append(imgs, img)\n\t}\n\treturn imgs\n}",
"func (native *OpenGL) BindSampler(unit uint32, sampler uint32) {\n\tgl.BindSampler(unit, sampler)\n}",
"func (self *Graphics) GenerateTextureI(args ...interface{}) *Texture{\n return &Texture{self.Object.Call(\"generateTexture\", args)}\n}",
"func (self *Graphics) GenerateTexture1O(resolution int) *Texture{\n return &Texture{self.Object.Call(\"generateTexture\", resolution)}\n}",
"func (debugging *debuggingOpenGL) BindSampler(unit uint32, sampler uint32) {\n\tdebugging.recordEntry(\"BindSampler\", unit, sampler)\n\tdebugging.gl.BindSampler(unit, sampler)\n\tdebugging.recordExit(\"BindSampler\")\n}",
"func (bm Blendmap) Texture() *gl.Texture {\n\treturn bm.Map.id\n}",
"func (self *TileSprite) LoadTextureI(args ...interface{}) {\n self.Object.Call(\"loadTexture\", args)\n}",
"func (c *Context) BindTexture(texture *Texture) {\n\tif texture == nil {\n\t\treturn\n\t}\n\tif c.currentTexture == nil || texture.id != c.currentTexture.id {\n\t\tgl.BindTexture(gl.TEXTURE_2D, texture.id)\n\t\tc.currentTexture = texture\n\t}\n}",
"func (am *AssetManager) LoadTexture(name, iname string) {\n\tif strings.Contains(name, \".png\") {\n\t\tpic, err := LoadPng(am.texturesDir + name)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tam.Textures[iname] = pic\n\t} else {\n\t\tlog.Fatal(\"unable to find texture \" + (am.modelsDir + name))\n\t}\n}",
"func (w *Worley) GenerateTexture(tex *texture.Texture) {\n\tgl.BindImageTexture(0, tex.GetHandle(), 0, false, 0, gl.READ_WRITE, gl.RGBA32F)\n\tgl.BindImageTexture(1, w.noisetexture.GetHandle(), 0, false, 0, gl.READ_ONLY, gl.RGBA32F)\n\n\tw.computeshader.Use()\n\tw.computeshader.UpdateInt32(\"uWidth\", w.width)\n\tw.computeshader.UpdateInt32(\"uHeight\", w.height)\n\tw.computeshader.UpdateInt32(\"uResolution\", w.resolution)\n\tw.computeshader.UpdateInt32(\"uOctaves\", w.octaves)\n\tw.computeshader.UpdateFloat32(\"uRadius\", w.radius)\n\tw.computeshader.UpdateFloat32(\"uRadiusScale\", w.radiusscale)\n\tw.computeshader.UpdateFloat32(\"uBrightness\", w.brightness)\n\tw.computeshader.UpdateFloat32(\"uContrast\", w.contrast)\n\tw.computeshader.UpdateFloat32(\"uScale\", w.scale)\n\tw.computeshader.UpdateFloat32(\"uPersistance\", w.persistance)\n\tw.computeshader.Compute(uint32(w.width), uint32(w.height), 1)\n\tw.computeshader.Compute(1024, 1024, 1)\n\tw.computeshader.Release()\n\n\tgl.MemoryBarrier(gl.ALL_BARRIER_BITS)\n\n\tgl.BindImageTexture(0, 0, 0, false, 0, gl.WRITE_ONLY, gl.RGBA32F)\n\tgl.BindImageTexture(1, 0, 0, false, 0, gl.READ_ONLY, gl.RGBA32F)\n}",
"func EGLImageTargetTextureStorageEXT(texture uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTextureStorageEXT(gpEGLImageTargetTextureStorageEXT, (C.GLuint)(texture), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func EGLImageTargetTextureStorageEXT(texture uint32, image unsafe.Pointer, attrib_list *int32) {\n\tC.glowEGLImageTargetTextureStorageEXT(gpEGLImageTargetTextureStorageEXT, (C.GLuint)(texture), (C.GLeglImageOES)(image), (*C.GLint)(unsafe.Pointer(attrib_list)))\n}",
"func TextureBufferRange(texture uint32, internalformat uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpTextureBufferRange, 5, uintptr(texture), uintptr(internalformat), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func TexSubImage1D(target uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexSubImage1D(gpTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TexSubImage1D(target uint32, level int32, xoffset int32, width int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTexSubImage1D(gpTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLsizei)(width), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func (self *GameObjectCreator) RenderTexture4O(width int, height int, key string, addToCache bool) *RenderTexture{\n return &RenderTexture{self.Object.Call(\"renderTexture\", width, height, key, addToCache)}\n}",
"func Assets(assetNames func() []string, asset func(string) ([]byte, error), options ...LoaderOption) Loader {\n\treturn load(assetNames(), asset, options...)\n}",
"func InjectImages(values map[string]interface{}, v imagevector.ImageVector, names []string, opts ...imagevector.FindOptionFunc) (map[string]interface{}, error) {\n\timages, err := imagevector.FindImages(v, names, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvalues = utils.ShallowCopyMapStringInterface(values)\n\tvalues[\"images\"] = imagevector.ImageMapToValues(images)\n\treturn values, nil\n}",
"func CopyTextureSubImage1D(texture uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tsyscall.Syscall6(gpCopyTextureSubImage1D, 6, uintptr(texture), uintptr(level), uintptr(xoffset), uintptr(x), uintptr(y), uintptr(width))\n}",
"func (j *JSONSerializer) Bind(events ...Event) {\n\tfor _, event := range events {\n\t\teventType, t := EventType(event)\n\t\tj.eventTypes[eventType] = t\n\t}\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tsyscall.Syscall6(gpBindBuffersRange, 6, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(sizes)))\n}",
"func NamedFramebufferTextureLayer(framebuffer uint32, attachment uint32, texture uint32, level int32, layer int32) {\n\tC.glowNamedFramebufferTextureLayer(gpNamedFramebufferTextureLayer, (C.GLuint)(framebuffer), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(layer))\n}",
"func NamedFramebufferTextureLayer(framebuffer uint32, attachment uint32, texture uint32, level int32, layer int32) {\n\tC.glowNamedFramebufferTextureLayer(gpNamedFramebufferTextureLayer, (C.GLuint)(framebuffer), (C.GLenum)(attachment), (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(layer))\n}",
"func CopyTextureSubImage1D(texture uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tC.glowCopyTextureSubImage1D(gpCopyTextureSubImage1D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func CopyTextureSubImage1D(texture uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tC.glowCopyTextureSubImage1D(gpCopyTextureSubImage1D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func TextureSubImage3D(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage3D(gpTextureSubImage3D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}",
"func TextureSubImage3D(texture uint32, level int32, xoffset int32, yoffset int32, zoffset int32, width int32, height int32, depth int32, format uint32, xtype uint32, pixels unsafe.Pointer) {\n\tC.glowTextureSubImage3D(gpTextureSubImage3D, (C.GLuint)(texture), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLsizei)(depth), (C.GLenum)(format), (C.GLenum)(xtype), pixels)\n}"
] | [
"0.7449541",
"0.7357423",
"0.7207478",
"0.6816224",
"0.645483",
"0.645483",
"0.6311786",
"0.6053184",
"0.59547555",
"0.59547555",
"0.59375024",
"0.5876461",
"0.5855425",
"0.5723819",
"0.5638595",
"0.549762",
"0.5497387",
"0.5497387",
"0.54557395",
"0.54470265",
"0.5420729",
"0.54025495",
"0.53273606",
"0.5299187",
"0.52905107",
"0.5219914",
"0.52191424",
"0.520414",
"0.5169365",
"0.51683396",
"0.51435715",
"0.51435715",
"0.5058484",
"0.5058484",
"0.5019217",
"0.50130033",
"0.49613854",
"0.49476174",
"0.492876",
"0.492876",
"0.4924479",
"0.4918655",
"0.49137813",
"0.49137813",
"0.48877722",
"0.48444444",
"0.4829026",
"0.4828867",
"0.48199847",
"0.48199847",
"0.48135567",
"0.4806936",
"0.48056015",
"0.477305",
"0.47562557",
"0.4752848",
"0.47500348",
"0.4749012",
"0.4748166",
"0.4748166",
"0.4712461",
"0.4708798",
"0.47043476",
"0.47021997",
"0.46939304",
"0.46857607",
"0.46836016",
"0.46760732",
"0.46744484",
"0.46708277",
"0.4651604",
"0.46506375",
"0.46489626",
"0.46290797",
"0.46256557",
"0.46239856",
"0.46206173",
"0.4609787",
"0.4606251",
"0.46051407",
"0.45961446",
"0.45820794",
"0.45749596",
"0.45749596",
"0.4566185",
"0.4558192",
"0.4558192",
"0.4544507",
"0.4535872",
"0.45330322",
"0.45299268",
"0.4528341",
"0.45151177",
"0.45094073",
"0.45094073",
"0.4508938",
"0.4508938",
"0.45080963",
"0.45080963"
] | 0.7039537 | 4 |
bind a transform feedback object | func BindTransformFeedback(target uint32, id uint32) {
C.glowBindTransformFeedback(gpBindTransformFeedback, (C.GLenum)(target), (C.GLuint)(id))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (self *ComponentScaleMinMax) TransformCallbackContext() interface{}{\n return self.Object.Get(\"transformCallbackContext\")\n}",
"func TransformFeedbackBufferBase(xfb uint32, index uint32, buffer uint32) {\n\tsyscall.Syscall(gpTransformFeedbackBufferBase, 3, uintptr(xfb), uintptr(index), uintptr(buffer))\n}",
"func (self *ComponentScaleMinMax) TransformCallback() interface{}{\n return self.Object.Get(\"transformCallback\")\n}",
"func TransformFeedbackBufferBase(xfb uint32, index uint32, buffer uint32) {\n\tC.glowTransformFeedbackBufferBase(gpTransformFeedbackBufferBase, (C.GLuint)(xfb), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func TransformFeedbackBufferBase(xfb uint32, index uint32, buffer uint32) {\n\tC.glowTransformFeedbackBufferBase(gpTransformFeedbackBufferBase, (C.GLuint)(xfb), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func (self *ComponentScaleMinMax) SetTransformCallbackContextA(member interface{}) {\n self.Object.Set(\"transformCallbackContext\", member)\n}",
"func (s *Surface) Transform(a, b, c, d, e, f float64) {\n\ts.Ctx.Call(\"transform\", a, b, c, d, e, f)\n}",
"func BindTransformFeedback(target uint32, id uint32) {\n C.glowBindTransformFeedback(gpBindTransformFeedback, (C.GLenum)(target), (C.GLuint)(id))\n}",
"func (track *AudioTrack) Transform(fns ...audio.TransformFunc) {\n\tsrc := track.Broadcaster.Source()\n\ttrack.Broadcaster.ReplaceSource(audio.Merge(fns...)(src))\n}",
"func TransformFeedbackBufferRange(xfb uint32, index uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpTransformFeedbackBufferRange, 5, uintptr(xfb), uintptr(index), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func (track *VideoTrack) Transform(fns ...video.TransformFunc) {\n\tsrc := track.Broadcaster.Source()\n\ttrack.Broadcaster.ReplaceSource(video.Merge(fns...)(src))\n}",
"func (self *ComponentScaleMinMax) SetTransformCallbackA(member interface{}) {\n self.Object.Set(\"transformCallback\", member)\n}",
"func BindTransformFeedback(target uint32, id uint32) {\n\tsyscall.Syscall(gpBindTransformFeedback, 2, uintptr(target), uintptr(id), 0)\n}",
"func TransformFeedbackBufferRange(xfb uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowTransformFeedbackBufferRange(gpTransformFeedbackBufferRange, (C.GLuint)(xfb), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func TransformFeedbackBufferRange(xfb uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowTransformFeedbackBufferRange(gpTransformFeedbackBufferRange, (C.GLuint)(xfb), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func transformSend(n *ir.SendStmt) {\n\tn.Value = assignconvfn(n.Value, n.Chan.Type().Elem())\n}",
"func TransformFeedbackVaryings(program uint32, count int32, varyings **uint8, bufferMode uint32) {\n\tC.glowTransformFeedbackVaryings(gpTransformFeedbackVaryings, (C.GLuint)(program), (C.GLsizei)(count), (**C.GLchar)(unsafe.Pointer(varyings)), (C.GLenum)(bufferMode))\n}",
"func TransformFeedbackVaryings(program uint32, count int32, varyings **uint8, bufferMode uint32) {\n\tC.glowTransformFeedbackVaryings(gpTransformFeedbackVaryings, (C.GLuint)(program), (C.GLsizei)(count), (**C.GLchar)(unsafe.Pointer(varyings)), (C.GLenum)(bufferMode))\n}",
"func TransformFeedbackVaryings(program uint32, count int32, varyings **int8, bufferMode uint32) {\n C.glowTransformFeedbackVaryings(gpTransformFeedbackVaryings, (C.GLuint)(program), (C.GLsizei)(count), (**C.GLchar)(unsafe.Pointer(varyings)), (C.GLenum)(bufferMode))\n}",
"func (v *Vectorizer) FitTranform() *FeatureMatrix {\n\n}",
"func (self Text) Transform() Transform {\n\tt := C.sfText_getTransform(self.Cref)\n\treturn Transform{&t}\n}",
"func (self *ComponentScaleMinMax) CheckTransform(wt *Matrix) {\n self.Object.Call(\"checkTransform\", wt)\n}",
"func (t *Transform) Transform() *Transform {\n\treturn t\n}",
"func (g *GLTF) applySampler(samplerIdx int, tex *texture.Texture2D) error {\n\n\tlog.Debug(\"Applying Sampler %d\", samplerIdx)\n\t// Check if provided sampler index is valid\n\tif samplerIdx < 0 || samplerIdx >= len(g.Samplers) {\n\t\treturn fmt.Errorf(\"invalid sampler index\")\n\t}\n\tsampler := g.Samplers[samplerIdx]\n\n\t// Magnification filter\n\tmagFilter := gls.LINEAR\n\tif sampler.MagFilter != nil {\n\t\tmagFilter = *sampler.MagFilter\n\t}\n\ttex.SetMagFilter(uint32(magFilter))\n\n\t// Minification filter\n\tminFilter := gls.LINEAR_MIPMAP_LINEAR\n\tif sampler.MinFilter != nil {\n\t\tminFilter = *sampler.MinFilter\n\t}\n\ttex.SetMinFilter(uint32(minFilter))\n\n\t// S coordinate wrapping mode\n\twrapS := gls.REPEAT\n\tif sampler.WrapS != nil {\n\t\twrapS = *sampler.WrapS\n\t}\n\ttex.SetWrapS(uint32(wrapS))\n\n\t// T coordinate wrapping mode\n\twrapT := gls.REPEAT\n\tif sampler.WrapT != nil {\n\t\twrapT = *sampler.WrapT\n\t}\n\ttex.SetWrapT(uint32(wrapT))\n\n\treturn nil\n}",
"func (g *Graph) Transform(f func(interface{}) ([]Event, error)) {\n\tg.transform = f\n}",
"func (geom Geometry) Transform(ct CoordinateTransform) error {\n\treturn C.OGR_G_Transform(geom.cval, ct.cval).Err()\n}",
"func transformationFeature(transformer Transformer) Feature {\n\ttransformationType := NewFeature(TransformationTypeFeature, -1)\n\tswitch transformer.(type) {\n\tcase logicalOperatorReplacement:\n\t\ttransformationType.Score = LogicalOperatorTransformation\n\tcase *adjacencyRange:\n\t\ttransformationType.Score = AdjacencyRangeTransformation\n\tcase meshExplosion:\n\t\ttransformationType.Score = MeshExplosionTransformation\n\tcase fieldRestrictions:\n\t\ttransformationType.Score = FieldRestrictionsTransformation\n\tcase adjacencyReplacement:\n\t\ttransformationType.Score = AdjacencyReplacementTransformation\n\tcase clauseRemoval:\n\t\ttransformationType.Score = ClauseRemovalTransformation\n\tcase cui2vecExpansion:\n\t\ttransformationType.Score = Cui2vecExpansionTransformation\n\tcase meshParent:\n\t\ttransformationType.Score = MeshParentTransformation\n\t}\n\treturn transformationType\n}",
"func (s *BaseSyslParserListener) EnterTransform_arg(ctx *Transform_argContext) {}",
"func (s *Stream) bindOps() {\n\ts.log.Print(\"binding operators\")\n\tif s.ops == nil {\n\t\treturn\n\t}\n\tfor i, op := range s.ops {\n\t\tif i == 0 { // link 1st to source\n\t\t\top.SetInput(s.source.GetOutput())\n\t\t} else {\n\t\t\top.SetInput(s.ops[i-1].GetOutput())\n\t\t}\n\t}\n}",
"func (s *BaseSyslParserListener) EnterTransform(ctx *TransformContext) {}",
"func (poly *PolynomialFeatures) FitTransform(X, Y mat.Matrix) (Xout, Yout *mat.Dense) {\n\tpoly.Fit(X, Y)\n\treturn poly.Transform(X, Y)\n}",
"func (canvas *Canvas) Transform(a, b, c, d, e, f float32) {\n\twriteCommand(canvas.contents, \"cm\", a, b, c, d, e, f)\n}",
"func (m *Binarizer) FitTransform(X, Y mat.Matrix) (Xout, Yout *mat.Dense) {\n\tm.Fit(X, Y)\n\treturn m.Transform(X, Y)\n}",
"func (*TrackLocalRTP_RTPSenderPassthrough) Bind(ctx context.Context, track TrackLocal, sender RTPSender) error {\n\trtpTrack, ok := track.(TrackLocalRTP)\n\tif !ok {\n\t\treturn ErrIncompatible\n\t}\n\tptSender, ok := sender.(RTPSenderPassthrough)\n\tif !ok {\n\t\treturn ErrIncompatible\n\t}\n\teg, ctx2 := errgroup.WithContext(ctx)\n\t// RTP packets written via TrackLocalRTP.WriteRTP() will be\n\t// read by TrackLocalRTP.pipeReader.ReadRTP().\n\teg.Go(func() error {\n\t\treturn rtpengine.Copy(ctx2, ptSender, rtpTrack.pipeReader())\n\t})\n\teg.Go(func() error {\n\t\treturn rtpengine.CopyFeedback(ctx2, rtpTrack.pipeReader(), ptSender)\n\t})\n\treturn eg.Wait()\n}",
"func (wc *watchChan) transform(e *event) (res *watch.Event) {\n\tcurObj, oldObj, err := wc.prepareObjs(e)\n\tif err != nil {\n\t\tlogrus.Errorf(\"failed to prepare current and previous objects: %v\", err)\n\t\twc.sendError(err)\n\t\treturn nil\n\t}\n\tswitch {\n\tcase e.isProgressNotify:\n\t\tobj := wc.watcher.newFunc()\n\t\t// todo: update object version\n\t\tres = &watch.Event{\n\t\t\tType: watch.Bookmark,\n\t\t\tObject: obj,\n\t\t}\n\tcase e.isDeleted:\n\t\tres = &watch.Event{\n\t\t\tType: watch.Deleted,\n\t\t\tObject: oldObj,\n\t\t}\n\tcase e.isCreated:\n\t\tres = &watch.Event{\n\t\t\tType: watch.Added,\n\t\t\tObject: curObj,\n\t\t}\n\tdefault:\n\t\t// TODO: emit ADDED if the modified object causes it to actually pass the filter but the previous one did not\n\t\tres = &watch.Event{\n\t\t\tType: watch.Modified,\n\t\t\tObject: curObj,\n\t\t}\n\t}\n\treturn res\n}",
"func (bf *BuiltInFunc) Bind(instance *LoxInstance) Callable {\n\tbf.instance = instance\n\t// return itself.\n\treturn bf\n}",
"func (o *AddPlayerParams) bindPoints(rawData []string, hasKey bool, formats strfmt.Registry) error {\n\tvar raw string\n\tif len(rawData) > 0 {\n\t\traw = rawData[len(rawData)-1]\n\t}\n\n\t// Required: true\n\t// Parameter is provided by construction from the route\n\to.Points = raw\n\n\treturn nil\n}",
"func transform(sc *scope, e sexpr) sexpr {\n\treturn e\n}",
"func (m *Shuffler) FitTransform(X, Y mat.Matrix) (Xout, Yout *mat.Dense) {\n\tm.Fit(X, Y)\n\treturn m.Transform(X, Y)\n}",
"func BeginTransformFeedback(primitiveMode uint32) {\n C.glowBeginTransformFeedback(gpBeginTransformFeedback, (C.GLenum)(primitiveMode))\n}",
"func (c *canvasRenderer) SetTransform(transform sprec.Mat4) {\n\tif c.currentLayer == c.topLayer {\n\t\tc.currentLayer.Transform = transform\n\t} else {\n\t\tc.currentLayer.Transform = sprec.Mat4Prod(\n\t\t\tc.currentLayer.previous.Transform,\n\t\t\ttransform,\n\t\t)\n\t}\n}",
"func (c *Curl) transform() {\n\tvar tmp [StateSize]int8\n\ttransform(&tmp, &c.state, uint(c.rounds))\n\t// for odd number of rounds we need to copy the buffer into the state\n\tif c.rounds%2 != 0 {\n\t\tcopy(c.state[:], tmp[:])\n\t}\n}",
"func (el *Fill) AnimateTransform() {}",
"func Transform(ctx context.Context, input <-chan CrawlResult, worker TransformFunc, parallelism int) <-chan TransformResult {\n\tt := &transformer{\n\t\tinput: input,\n\t\toutput: make(chan TransformResult, 1000),\n\t\tworkerBody: worker,\n\t\tparallelism: parallelism,\n\t}\n\tgo t.runWorkersToCompletion(ctx)\n\treturn t.output\n}",
"func (o *CanvasItem) ForceUpdateTransform() {\n\t//log.Println(\"Calling CanvasItem.ForceUpdateTransform()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"CanvasItem\", \"force_update_transform\")\n\n\t// Call the parent method.\n\t// void\n\tretPtr := gdnative.NewEmptyVoid()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n}",
"func (entity *Base) Transform() *Transform {\n\treturn &entity.transform\n}",
"func (s *BaseCGListener) EnterEquivalentto(ctx *EquivalenttoContext) {}",
"func (obj *transform) Input() string {\n\treturn obj.input\n}",
"func (h *pardo) PrepareTransform(tid string, t *pipepb.PTransform, comps *pipepb.Components) (*pipepb.Components, []string) {\n\n\t// ParDos are a pain in the butt.\n\t// Combines, by comparison, are dramatically simpler.\n\t// This is because for ParDos, how they are handled, and what kinds of transforms are in\n\t// and around the ParDo, the actual shape of the graph will change.\n\t// At their simplest, it's something a DoFn will handle on their own.\n\t// At their most complex, they require intimate interaction with the subgraph\n\t// bundling process, the data layer, state layers, and control layers.\n\t// But unlike combines, which have a clear urn for composite + special payload,\n\t// ParDos have the standard URN for composites with the standard payload.\n\t// So always, we need to first unmarshal the payload.\n\n\tpardoPayload := t.GetSpec().GetPayload()\n\tpdo := &pipepb.ParDoPayload{}\n\tif err := (proto.UnmarshalOptions{}).Unmarshal(pardoPayload, pdo); err != nil {\n\t\tpanic(fmt.Sprintf(\"unable to decode ParDoPayload for transform[%v]\", t.GetUniqueName()))\n\t}\n\n\t// Lets check for and remove anything that makes things less simple.\n\tif pdo.OnWindowExpirationTimerFamilySpec == \"\" &&\n\t\t!pdo.RequestsFinalization &&\n\t\t!pdo.RequiresStableInput &&\n\t\t!pdo.RequiresTimeSortedInput &&\n\t\tlen(pdo.StateSpecs) == 0 &&\n\t\tlen(pdo.TimerFamilySpecs) == 0 &&\n\t\tpdo.RestrictionCoderId == \"\" {\n\t\t// Which inputs are Side inputs don't change the graph further,\n\t\t// so they're not included here. Any nearly any ParDo can have them.\n\n\t\t// At their simplest, we don't need to do anything special at pre-processing time, and simply pass through as normal.\n\t\treturn &pipepb.Components{\n\t\t\tTransforms: map[string]*pipepb.PTransform{\n\t\t\t\ttid: t,\n\t\t\t},\n\t\t}, nil\n\t}\n\n\t// Side inputs add to topology and make fusion harder to deal with\n\t// (side input producers can't be in the same stage as their consumers)\n\t// But we don't have fusion yet, so no worries.\n\n\t// State, Timers, Stable Input, Time Sorted Input, and some parts of SDF\n\t// Are easier to deal including a fusion break. But We can do that with a\n\t// runner specific transform for stable input, and another for timesorted\n\t// input.\n\n\t// SplittableDoFns have 3 required phases and a 4th optional phase.\n\t//\n\t// PAIR_WITH_RESTRICTION which pairs elements with their restrictions\n\t// Input: element; := INPUT\n\t// Output: KV(element, restriction) := PWR\n\t//\n\t// SPLIT_AND_SIZE_RESTRICTIONS splits the pairs into sub element ranges\n\t// and a relative size for each, in a float64 format.\n\t// Input: KV(element, restriction) := PWR\n\t// Output: KV(KV(element, restriction), float64) := SPLITnSIZED\n\t//\n\t// PROCESS_SIZED_ELEMENTS_AND_RESTRICTIONS actually processes the\n\t// elements. This is also where splits need to be handled.\n\t// In particular, primary and residual splits have the same format as the input.\n\t// Input: KV(KV(element, restriction), size) := SPLITnSIZED\n\t// Output: DoFn's output. := OUTPUT\n\t//\n\t// TRUNCATE_SIZED_RESTRICTION is how the runner has an SDK turn an\n\t// unbounded transform into a bound one. Not needed until the pipeline\n\t// is told to drain.\n\t// Input: KV(KV(element, restriction), float64) := synthetic split results from above\n\t// Output: KV(KV(element, restriction), float64). := synthetic, truncated results sent as Split n Sized\n\t//\n\t// So with that, we can figure out the coders we need.\n\t//\n\t// cE - Element Coder (same as input coder)\n\t// cR - Restriction Coder\n\t// cS - Size Coder (float64)\n\t// ckvER - KV<Element, Restriction>\n\t// ckvERS - KV<KV<Element, Restriction>, Size>\n\t//\n\t// There could be a few output coders, but the outputs can be copied from\n\t// the original transform directly.\n\n\t// First lets get the parallel input coder ID.\n\tvar pcolInID, inputLocalID string\n\tfor localID, globalID := range t.GetInputs() {\n\t\t// The parallel input is the one that isn't a side input.\n\t\tif _, ok := pdo.SideInputs[localID]; !ok {\n\t\t\tinputLocalID = localID\n\t\t\tpcolInID = globalID\n\t\t\tbreak\n\t\t}\n\t}\n\tinputPCol := comps.GetPcollections()[pcolInID]\n\tcEID := inputPCol.GetCoderId()\n\tcRID := pdo.RestrictionCoderId\n\tcSID := \"c\" + tid + \"size\"\n\tckvERID := \"c\" + tid + \"kv_ele_rest\"\n\tckvERSID := ckvERID + \"_size\"\n\n\tcoder := func(urn string, componentIDs ...string) *pipepb.Coder {\n\t\treturn &pipepb.Coder{\n\t\t\tSpec: &pipepb.FunctionSpec{\n\t\t\t\tUrn: urn,\n\t\t\t},\n\t\t\tComponentCoderIds: componentIDs,\n\t\t}\n\t}\n\n\tcoders := map[string]*pipepb.Coder{\n\t\tckvERID: coder(urns.CoderKV, cEID, cRID),\n\t\tcSID: coder(urns.CoderDouble),\n\t\tckvERSID: coder(urns.CoderKV, ckvERID, cSID),\n\t}\n\n\t// PCollections only have two new ones.\n\t// INPUT -> same as ordinary DoFn\n\t// PWR, uses ckvER\n\t// SPLITnSIZED, uses ckvERS\n\t// OUTPUT -> same as ordinary outputs\n\n\tnPWRID := \"n\" + tid + \"_pwr\"\n\tnSPLITnSIZEDID := \"n\" + tid + \"_splitnsized\"\n\n\tpcol := func(name, coderID string) *pipepb.PCollection {\n\t\treturn &pipepb.PCollection{\n\t\t\tUniqueName: name,\n\t\t\tCoderId: coderID,\n\t\t\tIsBounded: inputPCol.GetIsBounded(),\n\t\t\tWindowingStrategyId: inputPCol.GetWindowingStrategyId(),\n\t\t}\n\t}\n\n\tpcols := map[string]*pipepb.PCollection{\n\t\tnPWRID: pcol(nPWRID, ckvERID),\n\t\tnSPLITnSIZEDID: pcol(nSPLITnSIZEDID, ckvERSID),\n\t}\n\n\t// PTransforms have 3 new ones, with process sized elements and restrictions\n\t// taking the brunt of the complexity, consuming the inputs\n\n\tePWRID := \"e\" + tid + \"_pwr\"\n\teSPLITnSIZEDID := \"e\" + tid + \"_splitnsize\"\n\teProcessID := \"e\" + tid + \"_processandsplit\"\n\n\ttform := func(name, urn, in, out string) *pipepb.PTransform {\n\t\treturn &pipepb.PTransform{\n\t\t\tUniqueName: name,\n\t\t\tSpec: &pipepb.FunctionSpec{\n\t\t\t\tUrn: urn,\n\t\t\t\tPayload: pardoPayload,\n\t\t\t},\n\t\t\tInputs: map[string]string{\n\t\t\t\tinputLocalID: in,\n\t\t\t},\n\t\t\tOutputs: map[string]string{\n\t\t\t\t\"i0\": out,\n\t\t\t},\n\t\t\tEnvironmentId: t.GetEnvironmentId(),\n\t\t}\n\t}\n\n\tnewInputs := maps.Clone(t.GetInputs())\n\tnewInputs[inputLocalID] = nSPLITnSIZEDID\n\n\ttforms := map[string]*pipepb.PTransform{\n\t\tePWRID: tform(ePWRID, urns.TransformPairWithRestriction, pcolInID, nPWRID),\n\t\teSPLITnSIZEDID: tform(eSPLITnSIZEDID, urns.TransformSplitAndSize, nPWRID, nSPLITnSIZEDID),\n\t\teProcessID: {\n\t\t\tUniqueName: eProcessID,\n\t\t\tSpec: &pipepb.FunctionSpec{\n\t\t\t\tUrn: urns.TransformProcessSizedElements,\n\t\t\t\tPayload: pardoPayload,\n\t\t\t},\n\t\t\tInputs: newInputs,\n\t\t\tOutputs: t.GetOutputs(),\n\t\t\tEnvironmentId: t.GetEnvironmentId(),\n\t\t},\n\t}\n\n\treturn &pipepb.Components{\n\t\tCoders: coders,\n\t\tPcollections: pcols,\n\t\tTransforms: tforms,\n\t}, t.GetSubtransforms()\n}",
"func (f *Pub) Bind(rx Publisher, cl bool) {\n\tf.branches.Add(rx)\n\trx.UseRoot(f)\n\n\tif cl {\n\t\tf.enders.Add(rx)\n\t}\n}",
"func (s *Stream) Transform(op api.UnOperation) *Stream {\n\toperator := unary.New(s.ctx)\n\toperator.SetOperation(op)\n\ts.ops = append(s.ops, operator)\n\treturn s\n}",
"func (m *OneHotEncoder) FitTransform(X, Y mat.Matrix) (Xout, Yout *mat.Dense) {\n\tm.Fit(X, Y)\n\treturn m.Transform(X, Y)\n}",
"func (c *canvasRenderer) Translate(delta sprec.Vec2) {\n\tc.currentLayer.Transform = sprec.Mat4Prod(\n\t\tc.currentLayer.Transform,\n\t\tsprec.TranslationMat4(delta.X, delta.Y, 0.0),\n\t)\n}",
"func (t *Transform) Translate(tx, ty float64) {\n\tout := fmt.Sprintf(\"translate(%g,%g)\", tx, ty)\n\n\tt.transforms = append(t.transforms, out)\n}",
"func (geom Geometry) TransformTo(sr SpatialReference) error {\n\treturn C.OGR_G_TransformTo(geom.cval, sr.cval).Err()\n}",
"func PauseTransformFeedback() {\n C.glowPauseTransformFeedback(gpPauseTransformFeedback)\n}",
"func Transform() TransformComponent {\n\treturn TransformComponent{\n\t\tLocal: Affine{Scale: vec3{1, 1, 1}},\n\t\tWorld: Affine{Scale: vec3{1, 1, 1}},\n\t}\n}",
"func (s *Surface) SetTransform(a, b, c, d, e, f float64) {\n\ts.Ctx.Call(\"setTransform\", a, b, c, d, e, f)\n}",
"func (coll *FeatureCollection) Transform(t Transformer) {\n\tfor _, feat := range *coll {\n\t\tfeat.Transform(t)\n\t}\n}",
"func (t *Texture2D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_2D, t.ID)\n}",
"func (c *Call) Activate(vm *VM, target, locals, context Interface, msg *Message) Interface {\n\treturn c\n}",
"func (t *transform) Set(modelView mgl32.Mat4) {\n\tt.dataLock.Lock()\n\tdefer t.dataLock.Unlock()\n\tt.modelView = modelView\n}",
"func (m *PowerTransformer) FitTransform(X, Y mat.Matrix) (Xout, Yout *mat.Dense) {\n\tXout = m.fit(X, Y, true)\n\tYout = base.ToDense(Y)\n\treturn\n}",
"func (*TrackRemoteRTP_RTPReceiverPassthrough) Bind(ctx context.Context, track TrackRemote, sender RTPReceiver) error {\n\trtpTrack, ok := track.(TrackRemoteRTP)\n\tif !ok {\n\t\treturn ErrIncompatible\n\t}\n\tptReceiver, ok := sender.(RTPReceiverPassthrough)\n\tif !ok {\n\t\treturn ErrIncompatible\n\t}\n\teg, ctx2 := errgroup.WithContext(ctx)\n\t// RTP packets written to TrackRemoteRTP.pipeWriter.WriteRTP() will be read from\n\t// TrackRemoteRTP.ReadRTP().\n\teg.Go(func() error {\n\t\treturn rtpengine.Copy(ctx2, rtpTrack.pipeWriter(), ptReceiver)\n\t})\n\teg.Go(func() error {\n\t\treturn rtpengine.CopyFeedback(ctx2, ptReceiver, rtpTrack.pipeWriter())\n\t})\n\treturn eg.Wait()\n}",
"func (t *readFramebuffer) Transform(ctx context.Context, id api.CmdID, cmd api.Cmd, out transform.Writer) error {\n\ts := out.State()\n\tst := GetState(s)\n\tif cmd, ok := cmd.(*InsertionCommand); ok {\n\t\tidx_string := keyFromIndex(cmd.idx)\n\t\tif r, ok := t.injections[idx_string]; ok {\n\t\t\t// If this command is FOR an EOF command, we want to mutate it, so that\n\t\t\t// we have the presentation info available.\n\t\t\tif cmd.callee != nil && cmd.callee.CmdFlags(ctx, id, s).IsEndOfFrame() {\n\t\t\t\tcmd.callee.Mutate(ctx, id, out.State(), nil, nil)\n\t\t\t}\n\t\t\tfor _, injection := range r {\n\t\t\t\tif err := injection.fn(ctx, cmd, injection.res, out); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn nil\n\t\t}\n\t}\n\tif err := out.MutateAndWrite(ctx, id, cmd); err != nil {\n\t\treturn err\n\t}\n\t// If we have no deferred submissions left, then we can terminate\n\tif len(t.pendingReads) > 0 && len(st.deferredSubmissions) == 0 {\n\t\tif id != api.CmdNoID {\n\t\t\treturn t.FlushPending(ctx, out)\n\t\t}\n\t}\n\treturn nil\n}",
"func (t Texture3D) Bind() {\n\tgl.BindTexture(gl.TEXTURE_3D, t.id)\n}",
"func (t *transform) Translate(translate mgl32.Vec3) {\n\tt.dataLock.Lock()\n\tdefer t.dataLock.Unlock()\n\tt.translation = t.translation.Add(translate)\n\ttrans := t.translation\n\tt.modelView = t.modelView.Mul4(mgl32.Translate3D(trans.X(), trans.Y(), trans.Z()))\n}",
"func fnPTransform(ctx Context, doc *JDoc, params []string) interface{} {\n\t// note: calling ptransform in sync or debug mode does not make sense - should we raise an error in such a scenario?\n\tstats := ctx.Value(EelTotalStats).(*ServiceStats)\n\tif params == nil || len(params) == 0 || len(params) > 1 {\n\t\tctx.Log().Error(\"error_type\", \"func_ptransform\", \"op\", \"etransform\", \"cause\", \"wrong_number_of_parameters\", \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"wrong number of parameters in call to ptransform function\"), \"etransform\", params})\n\t\treturn nil\n\t}\n\t// prepare event\n\trawEvent := extractStringParam(params[0])\n\tevent, err := NewJDocFromString(rawEvent)\n\tif err != nil {\n\t\tctx.Log().Error(\"error_type\", \"func_ptransform\", \"op\", \"etransform\", \"cause\", \"invalid_json\", \"params\", params, \"error\", err.Error())\n\t\tstats.IncErrors()\n\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"non json parameters in call to ptransform function\"), \"etransform\", params})\n\t\treturn nil\n\t}\n\t// apply debug logs\n\tlogParams := GetConfig(ctx).LogParams\n\tif logParams != nil {\n\t\tfor k, v := range logParams {\n\t\t\tev := event.ParseExpression(ctx, v)\n\t\t\tctx.AddLogValue(k, ev)\n\t\t}\n\t}\n\t// handle event and execute publisher(s)\n\t// both sync=true or debug=true would not make sense here\n\thandleEvent(ctx, stats, event, rawEvent, false, false)\n\treturn nil\n}",
"func fnTransform(ctx Context, doc *JDoc, params []string) interface{} {\n\tstats := ctx.Value(EelTotalStats).(*ServiceStats)\n\tif params == nil || len(params) == 0 || len(params) > 4 {\n\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"wrong_number_of_parameters\", \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"wrong number of parameters in call to transform function\"), \"transform\", params})\n\t\treturn nil\n\t}\n\th := GetCurrentHandlerConfig(ctx)\n\tif h == nil {\n\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"no_handler\", \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, RuntimeError{fmt.Sprintf(\"current handler not found in call to transform function\"), \"transform\", params})\n\t\treturn nil\n\t}\n\tif h.Transformations == nil {\n\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"no_named_transformations\", \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, RuntimeError{fmt.Sprintf(\"no named transformations found in call to transform function\"), \"transform\", params})\n\t\treturn nil\n\t}\n\tt := h.Transformations[extractStringParam(params[0])]\n\tif t == nil {\n\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"unknown_transformation\", \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, RuntimeError{fmt.Sprintf(\"no named transformation %s found in call to transform function\", extractStringParam(params[0])), \"transform\", params})\n\t\treturn nil\n\t}\n\tvar section interface{}\n\tsection = doc.GetOriginalObject()\n\tif len(params) >= 2 {\n\t\terr := json.Unmarshal([]byte(extractStringParam(params[1])), §ion)\n\t\tif err != nil {\n\t\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"invalid_json\", \"params\", params, \"error\", err.Error())\n\t\t\tstats.IncErrors()\n\t\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"non json parameters in call to transform function\"), \"transform\", params})\n\t\t\treturn nil\n\t\t}\n\t}\n\tvar pattern *JDoc\n\tif len(params) >= 3 && extractStringParam(params[2]) != \"\" {\n\t\tvar err error\n\t\tpattern, err = NewJDocFromString(extractStringParam(params[2]))\n\t\tif err != nil {\n\t\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"non_json_parameter\", \"params\", params, \"error\", err.Error())\n\t\t\tstats.IncErrors()\n\t\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"non json parameters in call to transform function\"), \"transform\", params})\n\t\t\treturn nil\n\t\t}\n\t}\n\tvar join *JDoc\n\tif len(params) == 4 && extractStringParam(params[3]) != \"\" {\n\t\tvar err error\n\t\tjoin, err = NewJDocFromString(extractStringParam(params[3]))\n\t\tif err != nil {\n\t\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"op\", \"transform\", \"cause\", \"non_json_parameter\", \"params\", params, \"error\", err.Error())\n\t\t\tstats.IncErrors()\n\t\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"non json parameters in call to transform function\"), \"transform\", params})\n\t\t\treturn nil\n\t\t}\n\t}\n\tif pattern != nil {\n\t\tc, _ := doc.contains(section, pattern.GetOriginalObject(), 0)\n\t\tif !c {\n\t\t\treturn section\n\t\t}\n\t}\n\tif join != nil {\n\t\tsection = doc.merge(join.GetOriginalObject(), section)\n\t}\n\tlittleDoc, err := NewJDocFromInterface(section)\n\tif err != nil {\n\t\tctx.Log().Error(\"error_type\", \"func_transform\", \"cause\", \"json_parse_error\", \"op\", \"transform\", \"error\", err.Error(), \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, RuntimeError{fmt.Sprintf(\"transformation error in call to transform function\"), \"transform\", params})\n\t\treturn nil\n\t}\n\tvar littleRes *JDoc\n\tif t.IsTransformationByExample {\n\t\tlittleRes = littleDoc.ApplyTransformationByExample(ctx, t.t)\n\t} else {\n\t\tlittleRes = littleDoc.ApplyTransformation(ctx, t.t)\n\t}\n\treturn littleRes.GetOriginalObject()\n}",
"func (app *Configurable) Transform(parameters map[string]string) interfaces.AppFunction {\n\ttransformType, ok := parameters[TransformType]\n\tif !ok {\n\t\tapp.lc.Errorf(\"Could not find '%s' parameter for Transform\", TransformType)\n\t\treturn nil\n\t}\n\n\ttransform := transforms.Conversion{}\n\n\tswitch strings.ToLower(transformType) {\n\tcase TransformXml:\n\t\treturn transform.TransformToXML\n\tcase TransformJson:\n\t\treturn transform.TransformToJSON\n\tdefault:\n\t\tapp.lc.Errorf(\n\t\t\t\"Invalid transform type '%s'. Must be '%s' or '%s'\",\n\t\t\ttransformType,\n\t\t\tTransformXml,\n\t\t\tTransformJson)\n\t\treturn nil\n\t}\n}",
"func pipelineTransform(arg *interface{}, container **[]interface{}) {\n\tswitch value := (*arg).(type) {\n\tcase []interface{}:\n\t\t*container = &value\n\tcase interface{}:\n\t\t*container = &[]interface{}{value}\n\tdefault:\n\t\t**container = nil\n\t}\n}",
"func NewTransform() *Transform {\n\treturn &Transform{}\n}",
"func (g *Group) SetTransform(m *algebra.Matrix) {\n\tif len(m.Get()) != 4 || len(m.Get()[0]) != 4 {\n\t\tpanic(algebra.ExpectedDimension(4))\n\t}\n\tg.transform = m\n}",
"func (m *Model) forward(x ag.Node) (s *State) {\n\tg := m.Graph()\n\ts = new(State)\n\tyPrev := m.prev()\n\th := nn.Affine(g, m.B, m.W, x)\n\tif yPrev != nil {\n\t\th = g.Add(h, g.Prod(m.WRec, yPrev))\n\t}\n\ts.Y = g.Invoke(m.Activation, h)\n\treturn\n}",
"func (m *Normalizer) FitTransform(X, Y mat.Matrix) (Xout, Yout *mat.Dense) {\n\tm.Fit(X, Y)\n\treturn m.Transform(X, Y)\n}",
"func Transform() TRANSFORM {\n\treturn TRANSFORM{\n\t\ttags: []ONETRANSFORM{},\n\t}\n}",
"func (scaler *RobustScaler) FitTransform(X, Y mat.Matrix) (Xout, Yout *mat.Dense) {\n\tscaler.Fit(X, Y)\n\treturn scaler.Transform(X, Y)\n}",
"func (self *ComponentScaleMinMax) CheckTransformI(args ...interface{}) {\n self.Object.Call(\"checkTransform\", args)\n}",
"func setBlendFunc(cmp pixel.ComposeMethod) {\n\tswitch cmp {\n\tcase pixel.ComposeOver:\n\t\tglhf.BlendFunc(glhf.One, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeIn:\n\t\tglhf.BlendFunc(glhf.DstAlpha, glhf.Zero)\n\tcase pixel.ComposeOut:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.Zero)\n\tcase pixel.ComposeAtop:\n\t\tglhf.BlendFunc(glhf.DstAlpha, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeRover:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.One)\n\tcase pixel.ComposeRin:\n\t\tglhf.BlendFunc(glhf.Zero, glhf.SrcAlpha)\n\tcase pixel.ComposeRout:\n\t\tglhf.BlendFunc(glhf.Zero, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeRatop:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.SrcAlpha)\n\tcase pixel.ComposeXor:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposePlus:\n\t\tglhf.BlendFunc(glhf.One, glhf.One)\n\tcase pixel.ComposeCopy:\n\t\tglhf.BlendFunc(glhf.One, glhf.Zero)\n\tdefault:\n\t\tpanic(errors.New(\"Canvas: invalid compose method\"))\n\t}\n}",
"func (self *Graphics) SetInputA(member interface{}) {\n self.Object.Set(\"input\", member)\n}",
"func MakeTransformFeedbackObject() TransformFeedbackObject {\n\tvar tfb uint32\n\tgl.GenTransformFeedbacks(1, &tfb)\n\treturn TransformFeedbackObject(tfb)\n}",
"func DrawTransformFeedback(mode uint32, id uint32) {\n C.glowDrawTransformFeedback(gpDrawTransformFeedback, (C.GLenum)(mode), (C.GLuint)(id))\n}",
"func (*Transformation) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_v1_options_transformation_transformation_proto_rawDescGZIP(), []int{5}\n}",
"func NewTransformer() Transformer {\n\treturn &execFigletTransformer{}\n}",
"func (s *Surface) ResetTransform() {\n\ts.Ctx.Call(\"resetTransform\")\n}",
"func (SendDataResource) Transform(txData interface{}, context *state.CheckState) TxDataResource {\n\tdata := txData.(*transaction.SendData)\n\tcoin := context.Coins().GetCoin(data.Coin)\n\n\treturn SendDataResource{\n\t\tTo: data.To.String(),\n\t\tValue: data.Value.String(),\n\t\tCoin: CoinResource{coin.ID().Uint32(), coin.GetFullSymbol()},\n\t}\n}",
"func (m *EducationFeedbackOutcome) SetFeedback(value EducationFeedbackable)() {\n m.feedback = value\n}",
"func (recv *Value) Transform(destValue *Value) bool {\n\tc_dest_value := (*C.GValue)(C.NULL)\n\tif destValue != nil {\n\t\tc_dest_value = (*C.GValue)(destValue.ToC())\n\t}\n\n\tretC := C.g_value_transform((*C.GValue)(recv.native), c_dest_value)\n\tretGo := retC == C.TRUE\n\n\treturn retGo\n}",
"func (m *Message) Activate(vm *VM, target, locals, context Interface, msg *Message) Interface {\n\treturn m\n}",
"func (ts *TextState) Translate(tx, ty float64) {\n\tts.Tm = transform.TranslationMatrix(tx, ty).Mult(ts.Tm)\n}",
"func (i *IngressFilter) Bind() *IngressFilter {\n\tif i.Client != nil {\n\t\ti.Client.Bind()\n\t}\n\n\ti.Vectorizer = GetActualValue(i.Vectorizer)\n\n\tif i.SearchFilters != nil {\n\t\ti.SearchFilters = GetActualValues(i.SearchFilters)\n\t}\n\tif i.InsertFilters != nil {\n\t\ti.InsertFilters = GetActualValues(i.InsertFilters)\n\t}\n\tif i.UpdateFilters != nil {\n\t\ti.UpdateFilters = GetActualValues(i.UpdateFilters)\n\t}\n\tif i.UpsertFilters != nil {\n\t\ti.UpsertFilters = GetActualValues(i.UpsertFilters)\n\t}\n\treturn i\n}",
"func (self *PhysicsP2) SetCallbackContextA(member interface{}) {\n self.Object.Set(\"callbackContext\", member)\n}",
"func (pt *PointTransform) Transform(p *Point) *image.Point {\n\treturn pt.TransformXY(p.X(), p.Y())\n}",
"func (self *AbstractFilter) SyncUniforms() {\n self.Object.Call(\"syncUniforms\")\n}",
"func (c *Patch) applyTransforms(input interface{}) (interface{}, error) {\n\tvar err error\n\tfor i, t := range c.Transforms {\n\t\tif input, err = t.Transform(input); err != nil {\n\t\t\treturn nil, errors.Wrapf(err, errFmtTransformAtIndex, i)\n\t\t}\n\t}\n\treturn input, nil\n}",
"func Transformer(name string, f interface{}) Option {\n\tv := reflect.ValueOf(f)\n\tif !function.IsType(v.Type(), function.Transformer) || v.IsNil() {\n\t\tpanic(fmt.Sprintf(\"invalid transformer function: %T\", f))\n\t}\n\tif name == \"\" {\n\t\tname = function.NameOf(v)\n\t\tif !identsRx.MatchString(name) {\n\t\t\tname = \"λ\" // Lambda-symbol as placeholder name\n\t\t}\n\t} else if !identsRx.MatchString(name) {\n\t\tpanic(fmt.Sprintf(\"invalid name: %q\", name))\n\t}\n\ttr := &transformer{name: name, fnc: reflect.ValueOf(f)}\n\tif ti := v.Type().In(0); ti.Kind() != reflect.Interface || ti.NumMethod() > 0 {\n\t\ttr.typ = ti\n\t}\n\treturn tr\n}",
"func BTL(ir, mr operand.Op) { ctx.BTL(ir, mr) }",
"func transformPlugin(dst *engine.Step, src *yaml.Container, _ *config.Config) {\n\tif dst.Environment == nil {\n\t\tdst.Environment = map[string]string{}\n\t}\n\tparamsToEnv(src.Vargs, dst.Environment)\n}",
"func (m *MaxAbsScaler) FitTransform(X, Y mat.Matrix) (Xout, Yout *mat.Dense) {\n\tm.Fit(X, Y)\n\treturn m.Transform(X, Y)\n}"
] | [
"0.597873",
"0.5743686",
"0.5680307",
"0.5622199",
"0.5622199",
"0.55374396",
"0.5507087",
"0.52361166",
"0.5157785",
"0.5142916",
"0.50832486",
"0.50082266",
"0.49723983",
"0.49582797",
"0.49582797",
"0.49286062",
"0.48971266",
"0.48971266",
"0.4894097",
"0.4888792",
"0.48309103",
"0.4798852",
"0.47763228",
"0.4736275",
"0.46963683",
"0.46903953",
"0.46832243",
"0.46810067",
"0.4668843",
"0.46683154",
"0.46642023",
"0.46503615",
"0.4618049",
"0.45854068",
"0.4561153",
"0.45407772",
"0.45388925",
"0.45210028",
"0.4512934",
"0.45005116",
"0.4500126",
"0.4499259",
"0.44943383",
"0.44590974",
"0.44561985",
"0.44513655",
"0.44411266",
"0.44231674",
"0.4420305",
"0.4394728",
"0.43915042",
"0.43906528",
"0.43737394",
"0.43662158",
"0.4364356",
"0.4351089",
"0.433687",
"0.4333691",
"0.43329412",
"0.43245763",
"0.43195337",
"0.43190658",
"0.43171114",
"0.42984998",
"0.42960846",
"0.42863077",
"0.42759445",
"0.4265391",
"0.4262118",
"0.42567655",
"0.42123097",
"0.42030168",
"0.42018482",
"0.42014307",
"0.41981187",
"0.41961327",
"0.41932532",
"0.41895053",
"0.41808358",
"0.4179419",
"0.4177159",
"0.41715562",
"0.4168319",
"0.41614607",
"0.41598734",
"0.41546205",
"0.4151181",
"0.41509748",
"0.4139762",
"0.41245508",
"0.41194725",
"0.4118397",
"0.4118362",
"0.41090488",
"0.41082424",
"0.41082147",
"0.41073245",
"0.41059467",
"0.4103904"
] | 0.48191622 | 22 |
bind a vertex array object | func BindVertexArray(array uint32) {
C.glowBindVertexArray(gpBindVertexArray, (C.GLuint)(array))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindVertexArray(array uint32) {\n C.glowBindVertexArray(gpBindVertexArray, (C.GLuint)(array))\n}",
"func BindVertexArray(array uint32) {\n\tsyscall.Syscall(gpBindVertexArray, 1, uintptr(array), 0, 0)\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n C.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func (b *VBO) Bind(m *Mesh) {\n\tif !b.genBound {\n\t\tpanic(\"A VBO buffer ID has not been generated. Call GenBuffer first.\")\n\t}\n\n\tgl.BindBuffer(gl.ARRAY_BUFFER, b.vboID)\n\tfloatSize := int(unsafe.Sizeof(float32(0)))\n\tgl.BufferData(gl.ARRAY_BUFFER, len(m.Vertices)*floatSize, gl.Ptr(m.Vertices), gl.STATIC_DRAW)\n}",
"func (debugging *debuggingOpenGL) BindVertexArray(array uint32) {\n\tdebugging.recordEntry(\"BindVertexArray\", array)\n\tdebugging.gl.BindVertexArray(array)\n\tdebugging.recordExit(\"BindVertexArray\")\n}",
"func (native *OpenGL) BindVertexArray(array uint32) {\n\tgl.BindVertexArray(array)\n}",
"func BindVertexArray(vao uint32) {\n\t//gl.BindVertexArrayAPPLE(vao)\n\tgl.BindVertexArray(vao)\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n C.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tsyscall.Syscall6(gpBindVertexBuffers, 5, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(strides)), 0)\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n\tsyscall.Syscall(gpVertexAttribBinding, 2, uintptr(attribindex), uintptr(bindingindex), 0)\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n C.glowVertexAttribBinding(gpVertexAttribBinding, (C.GLuint)(attribindex), (C.GLuint)(bindingindex))\n}",
"func VertexArrayVertexBuffer(vaobj uint32, bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tsyscall.Syscall6(gpVertexArrayVertexBuffer, 5, uintptr(vaobj), uintptr(bindingindex), uintptr(buffer), uintptr(offset), uintptr(stride), 0)\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tsyscall.Syscall(gpVertexArrayElementBuffer, 2, uintptr(vaobj), uintptr(buffer), 0)\n}",
"func VertexArrayBindingDivisor(vaobj uint32, bindingindex uint32, divisor uint32) {\n\tsyscall.Syscall(gpVertexArrayBindingDivisor, 3, uintptr(vaobj), uintptr(bindingindex), uintptr(divisor))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tsyscall.Syscall6(gpBindVertexBuffer, 4, uintptr(bindingindex), uintptr(buffer), uintptr(offset), uintptr(stride), 0, 0)\n}",
"func (va *VertexArray) SetLayout(layout VertexLayout) {\n\tif len(va.layout.layout) != 0 {\n\t\treturn\n\t}\n\n\tva.layout = layout\n\n\t// generate and bind the vertex array\n\tgl.GenVertexArrays(1, &va.vao) // generates the vertex array (or multiple)\n\tgl.BindVertexArray(va.vao) // binds the vertex array\n\n\t// make vertex array pointer attributes\n\t// offset is the offset in bytes to the first attribute\n\toffset := 0\n\n\t// calculate vertex stride\n\tstride := 0\n\tfor _, elem := range va.layout.layout {\n\t\tstride += elem.getByteSize()\n\n\t}\n\n\t// Vertex Buffer Object\n\tgl.GenBuffers(1, &va.vbo) // generates the buffer (or multiple)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, va.vbo)\n\n\tfor i, elem := range va.layout.layout {\n\n\t\t// define an array of generic vertex attribute data\n\t\t// index, size, type, normalized, stride of vertex (in bytes), pointer (offset)\n\t\t// point positions\n\t\tgl.VertexAttribPointer(uint32(i), int32(elem.getSize()),\n\t\t\telem.getGLType(), false, int32(stride), gl.PtrOffset(offset))\n\t\tgl.EnableVertexAttribArray(uint32(i))\n\t\toffset += elem.getByteSize()\n\t}\n\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n\tC.glowVertexAttribBinding(gpVertexAttribBinding, (C.GLuint)(attribindex), (C.GLuint)(bindingindex))\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n\tC.glowVertexAttribBinding(gpVertexAttribBinding, (C.GLuint)(attribindex), (C.GLuint)(bindingindex))\n}",
"func makeVao(points []float32) uint32 {\n\tvar vbo uint32\n\tgl.GenBuffers(1, &vbo)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(gl.ARRAY_BUFFER, 4*len(points), gl.Ptr(points), gl.STATIC_DRAW)\n\n\tvar vao uint32\n\tgl.GenVertexArrays(1, &vao)\n\tgl.BindVertexArray(vao)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tvar offset int = 6 * 4\n\tgl.VertexAttribPointer(0, 2, gl.FLOAT, false, 8*4, gl.PtrOffset(offset))\n\tgl.EnableVertexAttribArray(0)\n\t//gl.VertexAttribPointer(0, 3, gl.FLOAT, false, 0, nil)\n\n\treturn vao\n}",
"func makeVao(data []float32) uint32 {\n\tvar vbo uint32\n\tgl.GenBuffers(1, &vbo)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(gl.ARRAY_BUFFER, 4*len(data), gl.Ptr(data), gl.STATIC_DRAW)\n\n\tvar vao uint32\n\tgl.GenVertexArrays(1, &vao)\n\tgl.BindVertexArray(vao)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tvar offset int\n\n\t// position attribute\n\tgl.VertexAttribPointer(0, 3, gl.FLOAT, false, 8*4, gl.PtrOffset(offset))\n\tgl.EnableVertexAttribArray(0)\n\toffset += 3 * 4\n\n\t// color attribute\n\tgl.VertexAttribPointer(1, 3, gl.FLOAT, false, 8*4, gl.PtrOffset(offset))\n\tgl.EnableVertexAttribArray(1)\n\toffset += 3 * 4\n\n\t// texture coord attribute\n\tgl.VertexAttribPointer(2, 2, gl.FLOAT, false, 8*4, gl.PtrOffset(offset))\n\tgl.EnableVertexAttribArray(2)\n\toffset += 2 * 4\n\n\treturn vao\n}",
"func VertexArrayBindingDivisor(vaobj uint32, bindingindex uint32, divisor uint32) {\n\tC.glowVertexArrayBindingDivisor(gpVertexArrayBindingDivisor, (C.GLuint)(vaobj), (C.GLuint)(bindingindex), (C.GLuint)(divisor))\n}",
"func VertexArrayBindingDivisor(vaobj uint32, bindingindex uint32, divisor uint32) {\n\tC.glowVertexArrayBindingDivisor(gpVertexArrayBindingDivisor, (C.GLuint)(vaobj), (C.GLuint)(bindingindex), (C.GLuint)(divisor))\n}",
"func VertexArrayVertexBuffer(vaobj uint32, bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowVertexArrayVertexBuffer(gpVertexArrayVertexBuffer, (C.GLuint)(vaobj), (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func VertexArrayVertexBuffer(vaobj uint32, bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowVertexArrayVertexBuffer(gpVertexArrayVertexBuffer, (C.GLuint)(vaobj), (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func (va *VertexArray) SetData(data []float32) (err error) {\n\tgl.BindBuffer(gl.ARRAY_BUFFER, va.vbo) // tells OpenGL what kind of buffer this is\n\n\t// BufferData assigns data to the buffer.\n\t// there can only be one ARRAY_BUFFER bound at any time, so OpenGL knows which buffer we mean if we\n\t// tell it what type of buffer it is.\n\t//\t\t\t type\t\t\t size (in bytes) pointer to data\tusage\n\tgl.BufferData(gl.ARRAY_BUFFER, len(data)*4, gl.Ptr(data), gl.STATIC_DRAW)\n\n\treturn\n}",
"func VertexArrayVertexBuffers(vaobj uint32, first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tsyscall.Syscall6(gpVertexArrayVertexBuffers, 6, uintptr(vaobj), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(strides)))\n}",
"func makeVao(points []float32) uint32 {\n\tvar vbo uint32\n\tgl.GenBuffers(1, &vbo)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(gl.ARRAY_BUFFER, 4*len(points), gl.Ptr(points), gl.STATIC_DRAW)\n\n\tvar vao uint32\n\tgl.GenVertexArrays(1, &vao)\n\tgl.BindVertexArray(vao)\n\tgl.EnableVertexAttribArray(0)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.VertexAttribPointer(0, 3, gl.FLOAT, false, 0, nil)\n\n\treturn vao\n}",
"func (g *Geometry) Initialize() {\n\tif g.handle != 0 {\n\t\treturn\n\t}\n\n\t// Determine if geometry has an index buffer\n\tif g.IndexBuffer.ComponentType != 0 {\n\t\tg.hasIndices = true\n\t}\n\n\t// Calculate number of indices\n\tif g.hasIndices {\n\t\tcompSize := componentSizeFromType(g.IndexBuffer.ComponentType)\n\t\tg.numIndices = int32(len(g.IndexBuffer.Data)) / compSize\n\t} else {\n\t\tcompSize := componentSizeFromType(g.PositionBuffer.ComponentType)\n\t\tg.numIndices = int32(len(g.PositionBuffer.Data)) / compSize\n\t}\n\n\t// Set buffer targets\n\tg.IndexBuffer.target = gl.ELEMENT_ARRAY_BUFFER\n\tg.PositionBuffer.target = gl.ARRAY_BUFFER\n\tg.NormalBuffer.target = gl.ARRAY_BUFFER\n\tg.TexCoordBuffer.target = gl.ARRAY_BUFFER\n\tg.TangentBuffer.target = gl.ARRAY_BUFFER\n\n\t// Initialize buffers\n\tg.IndexBuffer.initialize()\n\tg.PositionBuffer.initialize()\n\tg.NormalBuffer.initialize()\n\tg.TexCoordBuffer.initialize()\n\tg.TangentBuffer.initialize()\n\n\t// Create and bind VertexArray\n\tgl.GenVertexArrays(1, &g.handle)\n\tgl.BindVertexArray(g.handle)\n\n\t// Bind/enable buffers within the VertexArray\n\tg.IndexBuffer.bind()\n\tg.PositionBuffer.enable(0)\n\tg.NormalBuffer.enable(1)\n\tg.TexCoordBuffer.enable(2)\n\tg.TangentBuffer.enable(3)\n\n\tgl.BindVertexArray(0)\n}",
"func VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, pointer unsafe.Pointer) {\n C.glowVertexAttribPointer(gpVertexAttribPointer, (C.GLuint)(index), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLsizei)(stride), pointer)\n}",
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tC.glowVertexArrayElementBuffer(gpVertexArrayElementBuffer, (C.GLuint)(vaobj), (C.GLuint)(buffer))\n}",
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tC.glowVertexArrayElementBuffer(gpVertexArrayElementBuffer, (C.GLuint)(vaobj), (C.GLuint)(buffer))\n}",
"func makeVao(points []float32) uint32 {\n\tvar vbo uint32\n\tvar vao uint32\n\tvar stride int32\n\n\t//points only 9\n\t//points and colors 18\n\tstride = int32(4 * len(points) / 3)\n\tprintln(\"stride: \", stride)\n\n\tgl.GenVertexArrays(1, &vao)\n\tgl.GenBuffers(1, &vbo)\n\tgl.BindVertexArray(vao)\n\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(gl.ARRAY_BUFFER, 4*len(points), gl.Ptr(points), gl.STATIC_DRAW)\n\n\tgl.EnableVertexAttribArray(0)\n\tgl.VertexAttribPointer(0, 3, gl.FLOAT, false, stride, gl.PtrOffset(0))\n\tprintln(\"triangle length: \", len(points))\n\tif len(points) >= 18 {\n\t\tlog.Println(\"In if\")\n\t\tgl.EnableVertexAttribArray(1)\n\t\tgl.VertexAttribPointer(1, 3, gl.FLOAT, false, stride, gl.PtrOffset(3*4))\n\t}\n\treturn vao\n}",
"func updateTextureVbo(data []float32, vbo uint32) {\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferSubData(gl.ARRAY_BUFFER, 0, len(data)*4, gl.Ptr(data))\n\tgl.BindBuffer(gl.ARRAY_BUFFER, 0)\n}",
"func (vao *VAO) AddVertexBuffer(vbo *vbo.VBO) {\n\tvao.vertexBuffers = append(vao.vertexBuffers, vbo)\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *int8) {\n C.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func makeVao(vertices []float32, textureCoords []float32) uint32 {\n\tvbos := make([]uint32, 2)\n\t// vertices\n\tgl.GenBuffers(1, &vbos[0])\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbos[0])\n\tgl.BufferData(gl.ARRAY_BUFFER, 4*len(vertices), gl.Ptr(vertices), gl.STATIC_DRAW)\n\n\t// texture coords\n\ttexInvertY(textureCoords)\n\tgl.GenBuffers(1, &vbos[1])\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbos[1])\n\tgl.BufferData(gl.ARRAY_BUFFER, 4*len(textureCoords), gl.Ptr(textureCoords), gl.STATIC_DRAW)\n\n\t// create vao\n\tvar vao uint32\n\tgl.GenVertexArrays(1, &vao)\n\tgl.BindVertexArray(vao)\n\n\t// bind vertices\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbos[0])\n\tgl.VertexAttribPointer(0, 3, gl.FLOAT, false, 0, nil)\n\tgl.EnableVertexAttribArray(0)\n\n\t// bind textures\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbos[1])\n\tgl.VertexAttribPointer(1, 2, gl.FLOAT, false, 0, nil)\n\tgl.EnableVertexAttribArray(1)\n\n\treturn vao\n}",
"func VertexArrayAttribFormat(vaobj uint32, attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tsyscall.Syscall6(gpVertexArrayAttribFormat, 6, uintptr(vaobj), uintptr(attribindex), uintptr(size), uintptr(xtype), boolToUintptr(normalized), uintptr(relativeoffset))\n}",
"func VertexArrayVertexBuffers(vaobj uint32, first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowVertexArrayVertexBuffers(gpVertexArrayVertexBuffers, (C.GLuint)(vaobj), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func VertexArrayVertexBuffers(vaobj uint32, first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowVertexArrayVertexBuffers(gpVertexArrayVertexBuffers, (C.GLuint)(vaobj), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func initFontVbo() {\n\tvar vertexAttributes = make([]float32, 5*6*len(charDatas))\n\ti := 0\n\tfor _, charData := range charDatas {\n\t\ttop := float32(charData.ty+charData.h) / 256\n\t\tbottom := float32(charData.ty) / 256\n\t\tright := float32(charData.tx+charData.w) / 256\n\t\tleft := float32(charData.tx) / 256\n\n\t\tw := float32(charData.w) / 256\n\t\th := float32(charData.h) / 256\n\n\t\t// tri 1\n\t\tvertexAttributes[i] = w\n\t\tvertexAttributes[i+1] = h\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = right\n\t\tvertexAttributes[i+4] = bottom\n\t\ti += 5\n\n\t\tvertexAttributes[i] = w\n\t\tvertexAttributes[i+1] = 0\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = right\n\t\tvertexAttributes[i+4] = top\n\t\ti += 5\n\n\t\tvertexAttributes[i] = 0\n\t\tvertexAttributes[i+1] = h\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = left\n\t\tvertexAttributes[i+4] = bottom\n\t\ti += 5\n\n\t\t// tri 2\n\t\tvertexAttributes[i] = w\n\t\tvertexAttributes[i+1] = 0\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = right\n\t\tvertexAttributes[i+4] = top\n\t\ti += 5\n\n\t\tvertexAttributes[i] = 0\n\t\tvertexAttributes[i+1] = 0\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = left\n\t\tvertexAttributes[i+4] = top\n\t\ti += 5\n\n\t\tvertexAttributes[i] = 0\n\t\tvertexAttributes[i+1] = h\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = left\n\t\tvertexAttributes[i+4] = bottom\n\t\ti += 5\n\t}\n\n\tgl.GenBuffers(1, &vbo)\n\tgl.GenVertexArrays(1, &vao)\n\tgl.BindVertexArray(vao)\n\tgl.EnableVertexAttribArray(0)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(\n\t\tgl.ARRAY_BUFFER,\n\t\t4*len(vertexAttributes),\n\t\tgl.Ptr(vertexAttributes),\n\t\tgl.STATIC_DRAW,\n\t)\n\tgl.VertexAttribPointer(0, 3, gl.FLOAT, false, 5*4, gl.PtrOffset(0))\n\tgl.EnableVertexAttribArray(0)\n\tgl.VertexAttribPointer(1, 2, gl.FLOAT, false, 5*4, gl.PtrOffset(4*3))\n\tgl.EnableVertexAttribArray(1)\n\t//unbind\n\tgl.BindVertexArray(0)\n\n}",
"func (va *VertexArray) SetIndexData(data []uint32) {\n\t// Index Buffer Object\n\tgl.GenBuffers(1, &va.ibo) // generates the buffer (or multiple)\n\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, va.ibo) // tells OpenGL what kind of buffer this is\n\n\t// BufferData assigns data to the buffer.\n\tgl.BufferData(gl.ELEMENT_ARRAY_BUFFER, len(data)*4, gl.Ptr(data), gl.STATIC_DRAW)\n\n\tva.vertices = len(data)\n}",
"func (vao VertexArrayObject) VertexAttribPointer(attrIndex int, attrType Type, normalized bool, byteStride int, byteOffset int) {\n\tglx := vao.glx\n\tbufferType, bufferItemsPerVertex, err := attrType.asAttribute()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"converting attribute type %s to attribute: %w\", attrType, err))\n\t}\n\tglx.constants.VertexAttribPointer(\n\t\tglx.factory.Number(float64(attrIndex)),\n\t\tglx.factory.Number(float64(bufferItemsPerVertex)),\n\t\tglx.typeConverter.ToJs(bufferType),\n\t\tglx.factory.Boolean(normalized),\n\t\tglx.factory.Number(float64(byteStride)),\n\t\tglx.factory.Number(float64(byteOffset)),\n\t)\n}",
"func DrawElementsBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, basevertex int32) {\n C.glowDrawElementsBaseVertex(gpDrawElementsBaseVertex, (C.GLenum)(mode), (C.GLsizei)(count), (C.GLenum)(xtype), indices, (C.GLint)(basevertex))\n}",
"func (g *Graph) init(numVertex int) {\n if g.vertex == nil {\n g.vertex = make([]*NodeG, numVertex)\n }\n}",
"func EnableVertexArrayAttrib(vaobj uint32, index uint32) {\n\tC.glowEnableVertexArrayAttrib(gpEnableVertexArrayAttrib, (C.GLuint)(vaobj), (C.GLuint)(index))\n}",
"func EnableVertexArrayAttrib(vaobj uint32, index uint32) {\n\tC.glowEnableVertexArrayAttrib(gpEnableVertexArrayAttrib, (C.GLuint)(vaobj), (C.GLuint)(index))\n}",
"func (shape *Shape) PolyShapeSetVertsRaw(verts []Vect) {\n\tC.cpPolyShapeSetVertsRaw(\n\t\t(*C.cpShape)(unsafe.Pointer(shape)),\n\t\tC.int(len(verts)),\n\t\t(*C.cpVect)(unsafe.Pointer(&verts[0])),\n\t)\n}",
"func VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, pointer unsafe.Pointer) {\n\tsyscall.Syscall6(gpVertexAttribPointer, 6, uintptr(index), uintptr(size), uintptr(xtype), boolToUintptr(normalized), uintptr(stride), uintptr(pointer))\n}",
"func init_vertex(x, y int) Vertex {\n return Vertex{x, y}\n}",
"func (s *BaseAspidaListener) EnterArray(ctx *ArrayContext) {}",
"func (vao *VAO) Render() {\n\tgl.BindVertexArray(vao.handle)\n\tif vao.indexBuffer != nil {\n\t\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, vao.indexBuffer.GetHandle())\n\t\tgl.DrawElements(vao.mode, vao.indexBuffer.Len(), gl.UNSIGNED_SHORT, nil)\n\t\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, 0)\n\t} else {\n\t\tgl.DrawArrays(vao.mode, 0, vao.vertexBuffers[0].Len())\n\t}\n\tgl.BindVertexArray(0)\n}",
"func VertexArrayAttribFormat(vaobj uint32, attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tC.glowVertexArrayAttribFormat(gpVertexArrayAttribFormat, (C.GLuint)(vaobj), (C.GLuint)(attribindex), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLuint)(relativeoffset))\n}",
"func VertexArrayAttribFormat(vaobj uint32, attribindex uint32, size int32, xtype uint32, normalized bool, relativeoffset uint32) {\n\tC.glowVertexArrayAttribFormat(gpVertexArrayAttribFormat, (C.GLuint)(vaobj), (C.GLuint)(attribindex), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLuint)(relativeoffset))\n}",
"func DrawElementsInstancedBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, instancecount int32, basevertex int32) {\n C.glowDrawElementsInstancedBaseVertex(gpDrawElementsInstancedBaseVertex, (C.GLenum)(mode), (C.GLsizei)(count), (C.GLenum)(xtype), indices, (C.GLsizei)(instancecount), (C.GLint)(basevertex))\n}",
"func VertexPointer(size int32, xtype uint32, stride int32, pointer unsafe.Pointer) {\n C.glowVertexPointer(gpVertexPointer, (C.GLint)(size), (C.GLenum)(xtype), (C.GLsizei)(stride), pointer)\n}",
"func EnableVertexArrayAttrib(vaobj uint32, index uint32) {\n\tsyscall.Syscall(gpEnableVertexArrayAttrib, 2, uintptr(vaobj), uintptr(index), 0)\n}",
"func (g *Graph) init(numVertex int) {\n if g.vertex == nil {\n g.vertex = make([]*NodeG, numVertex+1)\n }\n}",
"func GenVertexArrays(n int32, arrays *uint32) {\n\tsyscall.Syscall(gpGenVertexArrays, 2, uintptr(n), uintptr(unsafe.Pointer(arrays)), 0)\n}",
"func EnableVertexAttribArray(index uint32) {\n C.glowEnableVertexAttribArray(gpEnableVertexAttribArray, (C.GLuint)(index))\n}",
"func GenVertexArrays(n int32, arrays *uint32) {\n C.glowGenVertexArrays(gpGenVertexArrays, (C.GLsizei)(n), (*C.GLuint)(unsafe.Pointer(arrays)))\n}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n C.glowMultiDrawElementsBaseVertex(gpMultiDrawElementsBaseVertex, (C.GLenum)(mode), (*C.GLsizei)(unsafe.Pointer(count)), (C.GLenum)(xtype), indices, (C.GLsizei)(drawcount), (*C.GLint)(unsafe.Pointer(basevertex)))\n}",
"func (va *VertexArray) Draw() {\n\tgl.BindVertexArray(va.vao)\n\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, va.ibo)\n\tgl.DrawElements(gl.TRIANGLES, int32(va.vertices), gl.UNSIGNED_INT, nil)\n}",
"func MakeVertexBufferObject(sizeBytes int, data unsafe.Pointer) VertexBufferObject {\n\tvar vbo uint32\n\tgl.GenBuffers(1, &vbo)\n\tif sizeBytes > 0 {\n\t\tgl.NamedBufferData(vbo, sizeBytes, data, gl.DYNAMIC_DRAW)\n\t}\n\treturn VertexBufferObject(vbo)\n}",
"func MakeVertexArray(points []float32) uint32 {\n\tvar vbo uint32\n\tgl.GenBuffers(1, &vbo)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(gl.ARRAY_BUFFER, 4*len(points), gl.Ptr(points), gl.STATIC_DRAW)\n\n\tvar vao uint32\n\tgl.GenVertexArrays(1, &vao)\n\tgl.BindVertexArray(vao)\n\tgl.EnableVertexAttribArray(0)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.VertexAttribPointer(0, 3, gl.FLOAT, false, 0, nil)\n\n\treturn vao\n}",
"func VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, pointer unsafe.Pointer) {\n\tC.glowVertexAttribPointer(gpVertexAttribPointer, (C.GLuint)(index), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLsizei)(stride), pointer)\n}",
"func VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, pointer unsafe.Pointer) {\n\tC.glowVertexAttribPointer(gpVertexAttribPointer, (C.GLuint)(index), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLsizei)(stride), pointer)\n}",
"func DrawElementsBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, basevertex int32) {\n\tsyscall.Syscall6(gpDrawElementsBaseVertex, 5, uintptr(mode), uintptr(count), uintptr(xtype), uintptr(indices), uintptr(basevertex), 0)\n}",
"func BindAttribLocation(program uint32, index uint32, name *int8) {\n C.glowBindAttribLocation(gpBindAttribLocation, (C.GLuint)(program), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func PackVertex(v *gdbi.Vertex) map[string]interface{} {\n\treturn map[string]interface{}{\n\t\t\"gid\": v.ID,\n\t\t\"label\": v.Label,\n\t\t\"data\": v.Data,\n\t}\n}",
"func VertexAttrib1fv(index uint32, value []float32) {\n\tgl.VertexAttrib1fv(index, &value[0])\n}",
"func (vao *VAO) BuildBuffers(shaderProgramHandle uint32) {\n\tgl.BindVertexArray(vao.handle)\n\tfor _, vbo := range vao.vertexBuffers {\n\t\tvbo.BuildVertexAttributes(shaderProgramHandle)\n\t}\n\tgl.BindVertexArray(0)\n}",
"func (s *BaseConcertoListener) EnterArrayType(ctx *ArrayTypeContext) {}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n\tsyscall.Syscall6(gpMultiDrawElementsBaseVertex, 6, uintptr(mode), uintptr(unsafe.Pointer(count)), uintptr(xtype), uintptr(unsafe.Pointer(indices)), uintptr(drawcount), uintptr(unsafe.Pointer(basevertex)))\n}",
"func (s *BasePlSqlParserListener) EnterVarray_item(ctx *Varray_itemContext) {}",
"func packSFzVertex(v []FzVertex, ptr0 *C.fz_vertex) {\n\tconst m = 0x7fffffff\n\tfor i0 := range v {\n\t\tptr1 := (*(*[m / sizeOfFzVertexValue]C.fz_vertex)(unsafe.Pointer(ptr0)))[i0]\n\t\tv[i0] = *NewFzVertexRef(unsafe.Pointer(&ptr1))\n\t}\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tC.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tC.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func ArrayElement(i int32) {\n C.glowArrayElement(gpArrayElement, (C.GLint)(i))\n}",
"func GetVertexAttribPointerv(index uint32, pname uint32, pointer *unsafe.Pointer) {\n C.glowGetVertexAttribPointerv(gpGetVertexAttribPointerv, (C.GLuint)(index), (C.GLenum)(pname), pointer)\n}",
"func (native *OpenGL) VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, pointer unsafe.Pointer) {\n\tgl.VertexAttribPointer(index, size, xtype, normalized, stride, pointer)\n}",
"func (m *Manager) AddArrayAccessQuad() {\n\tindex := m.operands.Pop()\n\tarray := m.operands.Pop()\n\n\tdir, err := memory.Manager.GetNextAddr(constants.TYPEINT, memory.Temp)\n\tif err != nil {\n\t\tlog.Fatalf(\"Error: (GenerateQuad) %s\\n\", err)\n\t}\n\tvar result Element\n\t// validate if array is an attribute from an instance, set self dir if so\n\tif strings.Contains(array.ID(), \"self_\") {\n\t\tstrElements := strings.Split(array.ID(), \"_\")\n\t\tptrID := fmt.Sprintf(\"self_%s_ptr_%s\", strElements[1], m.getNextAvail())\n\t\tresult = NewElement(dir, ptrID, array.Type(), \"\")\n\t} else {\n\t\tresult = NewElement(dir, \"ptr_\"+m.getNextAvail(), array.Type(), \"\")\n\t}\n\n\tq := Quad{CALCDIR, array, index, result}\n\tm.quads = append(m.quads, q)\n\n\tm.operands.Push(result)\n}",
"func Draw(bp *BoundProgram) {\n\t// TODO might still need the buffer contents to be pushed:\n\t/*\n\t\tgl.BufferData(gl.ARRAY_BUFFER, len(cube_vertices)*4, gl.Ptr(cube_vertices), gl.STATIC_DRAW)\n\t*/\n\t// TODO might still need textures to be bound for the call:\n\t/*\n\t\tgl.ActiveTexture(gl.TEXTURE0)\n\t\tgl.BindTexture(gl.TEXTURE_2D, tCache.Get(\"placeholder\"))\n\t*/\n\t// TODO draw calls are themselves still specialized and param'd:\n\t/*\n\t\tgl.DrawArrays(gl.TRIANGLES, 0, 6*2*3)\n\t*/\n}",
"func GenerateVertexArray(n int32) uint32 {\n\tvar vao uint32\n\t//gl.GenVertexArraysAPPLE(n, &vao)\n\tgl.GenVertexArrays(n, &vao)\n\t/*if err := gl.GetError(); err != 0 {\n\t\tlog.Println(\"Error in GenVertexArrays!\", err)\n\t}*/\n\n\treturn vao\n}",
"func VertexAttrib1fv(index Uint, v []Float) {\n\tcindex, _ := (C.GLuint)(index), cgoAllocsUnknown\n\tcv, _ := (*C.GLfloat)(unsafe.Pointer((*sliceHeader)(unsafe.Pointer(&v)).Data)), cgoAllocsUnknown\n\tC.glVertexAttrib1fv(cindex, cv)\n}",
"func (b *Binding) Set(buf uint32) {\n\tgl.BindBufferBase(gl.SHADER_STORAGE_BUFFER, b.uint32, buf)\n}",
"func BindBuffer(target Enum, b Buffer) {\n\tgl.BindBuffer(uint32(target), b.Value)\n}",
"func BindFragDataLocation(program uint32, color uint32, name *int8) {\n C.glowBindFragDataLocation(gpBindFragDataLocation, (C.GLuint)(program), (C.GLuint)(color), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func DrawElementsInstancedBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, instancecount int32, basevertex int32) {\n\tsyscall.Syscall6(gpDrawElementsInstancedBaseVertex, 6, uintptr(mode), uintptr(count), uintptr(xtype), uintptr(indices), uintptr(instancecount), uintptr(basevertex))\n}",
"func (s *BasePlSqlParserListener) EnterVarray_storage_clause(ctx *Varray_storage_clauseContext) {}",
"func (buffer Buffer) Bind(target gl.Enum) {\n\tgl.BindBuffer(gl.Enum(target), gl.Uint(buffer))\n}",
"func VertexPointer(size int32, xtype uint32, stride int32, pointer unsafe.Pointer) {\n\tsyscall.Syscall6(gpVertexPointer, 4, uintptr(size), uintptr(xtype), uintptr(stride), uintptr(pointer), 0, 0)\n}",
"func (g *GLTF) loadAttributes(geom *geometry.Geometry, attributes map[string]int, indices math32.ArrayU32) error {\n\n\t// Indices of buffer views\n\tinterleavedVBOs := make(map[int]*gls.VBO, 0)\n\n\t// Load primitive attributes\n\tfor name, aci := range attributes {\n\t\taccessor := g.Accessors[aci]\n\n\t\t// Validate that accessor is compatible with attribute\n\t\terr := g.validateAccessorAttribute(accessor, name)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// Load data and add it to geometry's VBO\n\t\tif g.isInterleaved(accessor) {\n\t\t\tbvIdx := *accessor.BufferView\n\t\t\t// Check if we already loaded this buffer view\n\t\t\tvbo, ok := interleavedVBOs[bvIdx]\n\t\t\tif ok {\n\t\t\t\t// Already created VBO for this buffer view\n\t\t\t\t// Add attribute with correct byteOffset\n\t\t\t\tg.addAttributeToVBO(vbo, name, uint32(*accessor.ByteOffset))\n\t\t\t} else {\n\t\t\t\t// Load data and create vbo\n\t\t\t\tbuf, err := g.loadBufferView(bvIdx)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\t//\n\t\t\t\t// TODO: BUG HERE\n\t\t\t\t// If buffer view has accessors with different component type then this will have a read alignment problem!\n\t\t\t\t//\n\t\t\t\tdata, err := g.bytesToArrayF32(buf, accessor.ComponentType, accessor.Count*TypeSizes[accessor.Type])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tvbo := gls.NewVBO(data)\n\t\t\t\tg.addAttributeToVBO(vbo, name, 0)\n\t\t\t\t// Save reference to VBO keyed by index of the buffer view\n\t\t\t\tinterleavedVBOs[bvIdx] = vbo\n\t\t\t\t// Add VBO to geometry\n\t\t\t\tgeom.AddVBO(vbo)\n\t\t\t}\n\t\t} else {\n\t\t\tbuf, err := g.loadAccessorBytes(accessor)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdata, err := g.bytesToArrayF32(buf, accessor.ComponentType, accessor.Count*TypeSizes[accessor.Type])\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tvbo := gls.NewVBO(data)\n\t\t\tg.addAttributeToVBO(vbo, name, 0)\n\t\t\t// Add VBO to geometry\n\t\t\tgeom.AddVBO(vbo)\n\t\t}\n\t}\n\n\t// Set indices\n\tif len(indices) > 0 {\n\t\tgeom.SetIndices(indices)\n\t}\n\n\treturn nil\n}",
"func VertexAttrib1fv(dst Attrib, src []float32) {\n\tgl.VertexAttrib1fv(uint32(dst.Value), &src[0])\n}",
"func (spriteBatch *SpriteBatch) addv(verts []float32, mat *mgl32.Mat4, index int) error {\n\tif index == -1 && spriteBatch.count >= spriteBatch.size {\n\t\treturn fmt.Errorf(\"Sprite Batch Buffer Full\")\n\t}\n\n\tsprite := make([]float32, 8*4)\n\tfor i := 0; i < 32; i += 8 {\n\t\tj := (i / 2)\n\t\tsprite[i+0] = (mat[0] * verts[j+0]) + (mat[4] * verts[j+1]) + mat[12]\n\t\tsprite[i+1] = (mat[1] * verts[j+0]) + (mat[5] * verts[j+1]) + mat[13]\n\t\tsprite[i+2] = verts[j+2]\n\t\tsprite[i+3] = verts[j+3]\n\t\tsprite[i+4] = spriteBatch.color[0]\n\t\tsprite[i+5] = spriteBatch.color[1]\n\t\tsprite[i+6] = spriteBatch.color[2]\n\t\tsprite[i+7] = spriteBatch.color[3]\n\t}\n\n\tif index == -1 {\n\t\tspriteBatch.arrayBuf.fill(spriteBatch.count*4*8, sprite)\n\t\tspriteBatch.count++\n\t} else {\n\t\tspriteBatch.arrayBuf.fill(index*4*8, sprite)\n\t}\n\n\treturn nil\n}"
] | [
"0.6930566",
"0.64372987",
"0.631026",
"0.61983395",
"0.6163665",
"0.615646",
"0.613428",
"0.608097",
"0.59298235",
"0.5916304",
"0.58182645",
"0.58022517",
"0.57355267",
"0.57355267",
"0.5697444",
"0.5697444",
"0.5675388",
"0.56535226",
"0.56240255",
"0.5568131",
"0.55644774",
"0.55644774",
"0.5557026",
"0.55461437",
"0.55392206",
"0.55392206",
"0.55367863",
"0.55367863",
"0.5533228",
"0.5515788",
"0.5396138",
"0.53694576",
"0.53684604",
"0.53020024",
"0.53020024",
"0.5294789",
"0.5285507",
"0.5284588",
"0.5257028",
"0.52537894",
"0.52219707",
"0.52140677",
"0.52140677",
"0.52057487",
"0.5201724",
"0.5186281",
"0.5172159",
"0.5151583",
"0.5139731",
"0.5139731",
"0.5133171",
"0.51154244",
"0.5114543",
"0.5111939",
"0.51035976",
"0.50828254",
"0.50828254",
"0.5061899",
"0.5055822",
"0.50545245",
"0.50483507",
"0.50190187",
"0.5015987",
"0.49997747",
"0.49993148",
"0.49950385",
"0.49786785",
"0.497771",
"0.4976559",
"0.4976559",
"0.49752432",
"0.49731708",
"0.4971929",
"0.49716067",
"0.49600896",
"0.49389753",
"0.49263334",
"0.49259603",
"0.49028912",
"0.48839217",
"0.48839217",
"0.4877355",
"0.48639894",
"0.48566574",
"0.48522004",
"0.48322412",
"0.48266268",
"0.4792055",
"0.4784045",
"0.47839493",
"0.47724608",
"0.47616717",
"0.47612196",
"0.47529146",
"0.47398284",
"0.47262412",
"0.47183964",
"0.4709469",
"0.47087625"
] | 0.64410067 | 2 |
bind a buffer to a vertex buffer bind point | func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {
C.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n C.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tsyscall.Syscall6(gpBindVertexBuffer, 4, uintptr(bindingindex), uintptr(buffer), uintptr(offset), uintptr(stride), 0, 0)\n}",
"func (b *VBO) Bind(m *Mesh) {\n\tif !b.genBound {\n\t\tpanic(\"A VBO buffer ID has not been generated. Call GenBuffer first.\")\n\t}\n\n\tgl.BindBuffer(gl.ARRAY_BUFFER, b.vboID)\n\tfloatSize := int(unsafe.Sizeof(float32(0)))\n\tgl.BufferData(gl.ARRAY_BUFFER, len(m.Vertices)*floatSize, gl.Ptr(m.Vertices), gl.STATIC_DRAW)\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tsyscall.Syscall6(gpBindVertexBuffers, 5, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(strides)), 0)\n}",
"func (gl *WebGL) BindBuffer(target GLEnum, buffer WebGLBuffer) {\n\tgl.context.Call(\"bindBuffer\", target, buffer)\n}",
"func BindBuffer(target uint32, buffer uint32) {\n C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBuffer(target Enum, b Buffer) {\n\tgl.BindBuffer(uint32(target), b.Value)\n}",
"func (native *OpenGL) BindBuffer(target uint32, buffer uint32) {\n\tgl.BindBuffer(target, buffer)\n}",
"func (debugging *debuggingOpenGL) BindBuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindBuffer\", target, buffer)\n\tdebugging.gl.BindBuffer(target, buffer)\n\tdebugging.recordExit(\"BindBuffer\")\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBuffer, 2, uintptr(target), uintptr(buffer), 0)\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n C.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func (buffer Buffer) Bind(target gl.Enum) {\n\tgl.BindBuffer(gl.Enum(target), gl.Uint(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBufferBase, 3, uintptr(target), uintptr(index), uintptr(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n\tC.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func (buffer Buffer) BindBufferBase(target gl.Enum, index uint) {\n\tgl.BindBufferBase(gl.Enum(target), gl.Uint(index), gl.Uint(buffer))\n}",
"func (w *windowImpl) bindBackBuffer() {\n\t// w.mu.Lock()\n\t// size := w.Sz\n\t// w.mu.Unlock()\n\t//\n\tw.backBufferBound = true\n\t// gl.BindFramebuffer(gl.FRAMEBUFFER, 0)\n\t// gl.Viewport(0, 0, int32(size.X), int32(size.Y))\n}",
"func (b *Binding) Set(buf uint32) {\n\tgl.BindBufferBase(gl.SHADER_STORAGE_BUFFER, b.uint32, buf)\n}",
"func BindBuffer(target Enum, buffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcbuffer, _ := (C.GLuint)(buffer), cgoAllocsUnknown\n\tC.glBindBuffer(ctarget, cbuffer)\n}",
"func (geom Geometry) Buffer(distance float64, segments int) Geometry {\n\tnewGeom := C.OGR_G_Buffer(geom.cval, C.double(distance), C.int(segments))\n\treturn Geometry{newGeom}\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n C.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n C.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func (r Ruler) BufferPoint(p Point, buffer float64) Bbox {\n\tv := buffer / r.kx\n\th := buffer / r.ky\n\n\treturn Bbox{\n\t\tp[0] - h,\n\t\tp[1] - v,\n\t\tp[0] + h,\n\t\tp[1] + v,\n\t}\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tsyscall.Syscall6(gpBindBuffersBase, 4, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), 0, 0)\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n\tsyscall.Syscall(gpVertexAttribBinding, 2, uintptr(attribindex), uintptr(bindingindex), 0)\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n C.glowVertexAttribBinding(gpVertexAttribBinding, (C.GLuint)(attribindex), (C.GLuint)(bindingindex))\n}",
"func (buffer Buffer) BindBufferRange(target gl.Enum, index uint, offset int, size uint) {\n\tgl.BindBufferRange(gl.Enum(target), gl.Uint(index), gl.Uint(buffer), gl.Intptr(offset), gl.Sizeiptr(size))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tC.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n\tsyscall.Syscall6(gpBindBufferRange, 5, uintptr(target), uintptr(index), uintptr(buffer), uintptr(offset), uintptr(size), 0)\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func (g *Gaffer) AddBuffer(u *Update) {\n\n\tfor _, v := range u.entities {\n\t\tg.AddEntity(v)\n\t}\n\n\tfor _, v := range u.edges {\n\t\tg.AddEdge(v)\n\t}\n\n}",
"func BindFragDataLocation(program uint32, color uint32, name *int8) {\n C.glowBindFragDataLocation(gpBindFragDataLocation, (C.GLuint)(program), (C.GLuint)(color), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func updateTextureVbo(data []float32, vbo uint32) {\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferSubData(gl.ARRAY_BUFFER, 0, len(data)*4, gl.Ptr(data))\n\tgl.BindBuffer(gl.ARRAY_BUFFER, 0)\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n\tC.glowVertexAttribBinding(gpVertexAttribBinding, (C.GLuint)(attribindex), (C.GLuint)(bindingindex))\n}",
"func VertexAttribBinding(attribindex uint32, bindingindex uint32) {\n\tC.glowVertexAttribBinding(gpVertexAttribBinding, (C.GLuint)(attribindex), (C.GLuint)(bindingindex))\n}",
"func BindVertexArray(array uint32) {\n C.glowBindVertexArray(gpBindVertexArray, (C.GLuint)(array))\n}",
"func BindFragDataLocation(program uint32, color uint32, name *uint8) {\n\tC.glowBindFragDataLocation(gpBindFragDataLocation, (C.GLuint)(program), (C.GLuint)(color), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindFragDataLocation(program uint32, color uint32, name *uint8) {\n\tC.glowBindFragDataLocation(gpBindFragDataLocation, (C.GLuint)(program), (C.GLuint)(color), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindVertexArray(vao uint32) {\n\t//gl.BindVertexArrayAPPLE(vao)\n\tgl.BindVertexArray(vao)\n}",
"func BufferInit(target Enum, size int, usage Enum) {\n\tgl.BufferData(uint32(target), size, nil, uint32(usage))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n C.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func (native *OpenGL) BindFramebuffer(target, buffer uint32) {\n\tgl.BindFramebuffer(target, buffer)\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tsyscall.Syscall6(gpBindBuffersRange, 6, uintptr(target), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(sizes)))\n}",
"func (debugging *debuggingOpenGL) BufferData(target uint32, size int, data interface{}, usage uint32) {\n\tdebugging.recordEntry(\"BufferData\", target, size, data, usage)\n\tdebugging.gl.BufferData(target, size, data, usage)\n\tdebugging.recordExit(\"BufferData\")\n}",
"func (native *OpenGL) BufferData(target uint32, size int, data interface{}, usage uint32) {\n\tdataPtr, isPtr := data.(unsafe.Pointer)\n\tif isPtr {\n\t\tgl.BufferData(target, size, dataPtr, usage)\n\t} else {\n\t\tgl.BufferData(target, size, gl.Ptr(data), usage)\n\t}\n}",
"func (debugging *debuggingOpenGL) BindFramebuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindFramebuffer\", target, buffer)\n\tdebugging.gl.BindFramebuffer(target, buffer)\n\tdebugging.recordExit(\"BindFramebuffer\")\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n C.glowBindRenderbuffer(gpBindRenderbuffer, (C.GLenum)(target), (C.GLuint)(renderbuffer))\n}",
"func (b *VBO) GenBuffer() {\n\tgl.GenBuffers(1, &b.vboID)\n\tb.genBound = true\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *int8) {\n C.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindVertexArray(array uint32) {\n\tsyscall.Syscall(gpBindVertexArray, 1, uintptr(array), 0, 0)\n}",
"func BindAttribLocation(program uint32, index uint32, name *int8) {\n C.glowBindAttribLocation(gpBindAttribLocation, (C.GLuint)(program), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func (gl *WebGL) BufferData(target GLEnum, data interface{}, usage GLEnum) {\n\tvalues := sliceToTypedArray(data)\n\tgl.context.Call(\"bufferData\", target, values, usage)\n}",
"func (du *DescriptorSet) AddBuffer(dstBinding int, dtype vk.DescriptorType, b *Buffer, offset int) {\n\tvar descriptorBufferInfo = vk.DescriptorBufferInfo{}\n\tdescriptorBufferInfo.Buffer = b.VKBuffer\n\tdescriptorBufferInfo.Offset = vk.DeviceSize(offset)\n\tdescriptorBufferInfo.Range = vk.DeviceSize(b.Size)\n\n\tvar writeDescriptorSet = vk.WriteDescriptorSet{}\n\twriteDescriptorSet.SType = vk.StructureTypeWriteDescriptorSet\n\twriteDescriptorSet.DstBinding = uint32(dstBinding) // write to the first, and only binding.\n\twriteDescriptorSet.DescriptorCount = 1 // update a single descriptor.\n\twriteDescriptorSet.DescriptorType = dtype\n\twriteDescriptorSet.PBufferInfo = []vk.DescriptorBufferInfo{descriptorBufferInfo}\n\n\tif du.VKWriteDiscriptorSet == nil {\n\t\tdu.VKWriteDiscriptorSet = make([]vk.WriteDescriptorSet, 0)\n\t}\n\tdu.VKWriteDiscriptorSet = append(du.VKWriteDiscriptorSet, writeDescriptorSet)\n}",
"func BindRenderbuffer(target GLEnum, renderbuffer Renderbuffer) {\n\tgl.BindRenderbuffer(uint32(target), uint32(renderbuffer))\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n C.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))\n}",
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tsyscall.Syscall(gpVertexArrayElementBuffer, 2, uintptr(vaobj), uintptr(buffer), 0)\n}",
"func BindVertexArray(array uint32) {\n\tC.glowBindVertexArray(gpBindVertexArray, (C.GLuint)(array))\n}",
"func BindVertexArray(array uint32) {\n\tC.glowBindVertexArray(gpBindVertexArray, (C.GLuint)(array))\n}",
"func BindRenderbuffer(target uint32, renderbuffer uint32) {\n\tsyscall.Syscall(gpBindRenderbuffer, 2, uintptr(target), uintptr(renderbuffer), 0)\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n\tC.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func (vao *VAO) AddVertexBuffer(vbo *vbo.VBO) {\n\tvao.vertexBuffers = append(vao.vertexBuffers, vbo)\n}",
"func BindAttribLocation(program uint32, index uint32, name *uint8) {\n\tC.glowBindAttribLocation(gpBindAttribLocation, (C.GLuint)(program), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindAttribLocation(program uint32, index uint32, name *uint8) {\n\tC.glowBindAttribLocation(gpBindAttribLocation, (C.GLuint)(program), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func (debugging *debuggingOpenGL) BindVertexArray(array uint32) {\n\tdebugging.recordEntry(\"BindVertexArray\", array)\n\tdebugging.gl.BindVertexArray(array)\n\tdebugging.recordExit(\"BindVertexArray\")\n}",
"func DrawBuffer(buf uint32) {\n\tsyscall.Syscall(gpDrawBuffer, 1, uintptr(buf), 0, 0)\n}",
"func NewAttachedBuffer(buffer *Buffer) *Buffer {\n result := NewEmptyBuffer()\n result.AttachBuffer(buffer)\n return result\n}",
"func BindAttribLocation(p Program, a Attrib, name string) {\n\tgl.BindAttribLocation(p.Value, uint32(a.Value), gl.Str(name+\"\\x00\"))\n}",
"func BufferData(target uint32, size int, data unsafe.Pointer, usage uint32) {\n C.glowBufferData(gpBufferData, (C.GLenum)(target), (C.GLsizeiptr)(size), data, (C.GLenum)(usage))\n}",
"func BindAttribLocation(program Program, index uint32, name string) {\n\tgl.BindAttribLocation(uint32(program), index, gl.Str(name+\"\\x00\"))\n}",
"func BufferData(target Enum, src []byte, usage Enum) {\n\tgl.BufferData(uint32(target), int(len(src)), gl.Ptr(&src[0]), uint32(usage))\n}",
"func GetBufferPointerv(target uint32, pname uint32, params *unsafe.Pointer) {\n C.glowGetBufferPointerv(gpGetBufferPointerv, (C.GLenum)(target), (C.GLenum)(pname), params)\n}",
"func GetBufferPointerv(target uint32, pname uint32, params *unsafe.Pointer) {\n\tsyscall.Syscall(gpGetBufferPointerv, 3, uintptr(target), uintptr(pname), uintptr(unsafe.Pointer(params)))\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n\tsyscall.Syscall(gpBindFramebuffer, 2, uintptr(target), uintptr(framebuffer), 0)\n}",
"func GetBufferPointerv(target uint32, pname uint32, params *unsafe.Pointer) {\n\tC.glowGetBufferPointerv(gpGetBufferPointerv, (C.GLenum)(target), (C.GLenum)(pname), params)\n}",
"func GetBufferPointerv(target uint32, pname uint32, params *unsafe.Pointer) {\n\tC.glowGetBufferPointerv(gpGetBufferPointerv, (C.GLenum)(target), (C.GLenum)(pname), params)\n}",
"func (native *OpenGL) GLBindFragDataLocation(program uint32, color uint32, name *uint8) {\n\tgl.BindFragDataLocation(program, color, name)\n}",
"func BufferData(target Enum, size Sizeiptr, data unsafe.Pointer, usage Enum) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcsize, _ := (C.GLsizeiptr)(size), cgoAllocsUnknown\n\tcdata, _ := (unsafe.Pointer)(unsafe.Pointer(data)), cgoAllocsUnknown\n\tcusage, _ := (C.GLenum)(usage), cgoAllocsUnknown\n\tC.glBufferData(ctarget, csize, cdata, cusage)\n}",
"func (gl *WebGL) NewBuffer(target GLEnum, data interface{}, usage GLEnum) WebGLBuffer {\n\tbuffer := gl.CreateBuffer()\n\tgl.BindBuffer(target, buffer)\n\tgl.BufferData(target, data, usage)\n\treturn buffer\n}",
"func Draw(bp *BoundProgram) {\n\t// TODO might still need the buffer contents to be pushed:\n\t/*\n\t\tgl.BufferData(gl.ARRAY_BUFFER, len(cube_vertices)*4, gl.Ptr(cube_vertices), gl.STATIC_DRAW)\n\t*/\n\t// TODO might still need textures to be bound for the call:\n\t/*\n\t\tgl.ActiveTexture(gl.TEXTURE0)\n\t\tgl.BindTexture(gl.TEXTURE_2D, tCache.Get(\"placeholder\"))\n\t*/\n\t// TODO draw calls are themselves still specialized and param'd:\n\t/*\n\t\tgl.DrawArrays(gl.TRIANGLES, 0, 6*2*3)\n\t*/\n}",
"func (native *OpenGL) BindFragDataLocation(program uint32, color uint32, name string) {\n\tgl.BindFragDataLocation(program, color, gl.Str(name+\"\\x00\"))\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tC.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func BindFragDataLocationIndexed(program uint32, colorNumber uint32, index uint32, name *uint8) {\n\tC.glowBindFragDataLocationIndexed(gpBindFragDataLocationIndexed, (C.GLuint)(program), (C.GLuint)(colorNumber), (C.GLuint)(index), (*C.GLchar)(unsafe.Pointer(name)))\n}",
"func (w *Window) SetBuffersGeometry(width, height, format int) int {\n\treturn int(C.ANativeWindow_setBuffersGeometry(w.cptr(), C.int32_t(width), C.int32_t(height), C.int32_t(format)))\n}",
"func BindTransformFeedback(target uint32, id uint32) {\n C.glowBindTransformFeedback(gpBindTransformFeedback, (C.GLenum)(target), (C.GLuint)(id))\n}",
"func (native *OpenGL) BindVertexArray(array uint32) {\n\tgl.BindVertexArray(array)\n}",
"func (b *Buffer) Flush() error {\n\t// upload vertice + meta to opengl\n\tGlBindBuffer(ARRAY_BUFFER, b.GlBuffer)\n\tErrPanic()\n\tif len(b.Buf) == 0 {\n\t\treturn nil\n\t}\n\tGlBufferData(ARRAY_BUFFER, b.Buf, STATIC_DRAW)\n\tErrPanic()\n\t// FIXME: rebind shader attrs\n\tb.BufLen = len(b.Buf)\n\tb.Buf = b.Buf[:0]\n\treturn nil\n}",
"func NamedFramebufferDrawBuffer(framebuffer uint32, buf uint32) {\n\tsyscall.Syscall(gpNamedFramebufferDrawBuffer, 2, uintptr(framebuffer), uintptr(buf), 0)\n}",
"func VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, pointer unsafe.Pointer) {\n C.glowVertexAttribPointer(gpVertexAttribPointer, (C.GLuint)(index), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLsizei)(stride), pointer)\n}",
"func BindAttribLocation(program Uint, index Uint, name string) {\n\tcprogram, _ := (C.GLuint)(program), cgoAllocsUnknown\n\tcindex, _ := (C.GLuint)(index), cgoAllocsUnknown\n\tcname, _ := unpackPCharString(name)\n\tC.glBindAttribLocation(cprogram, cindex, cname)\n}",
"func (debugging *debuggingOpenGL) BindRenderbuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindRenderbuffer\", target, buffer)\n\tdebugging.gl.BindRenderbuffer(target, buffer)\n\tdebugging.recordExit(\"BindRenderbuffer\")\n}",
"func (o *AddPlayerParams) bindPoints(rawData []string, hasKey bool, formats strfmt.Registry) error {\n\tvar raw string\n\tif len(rawData) > 0 {\n\t\traw = rawData[len(rawData)-1]\n\t}\n\n\t// Required: true\n\t// Parameter is provided by construction from the route\n\to.Points = raw\n\n\treturn nil\n}"
] | [
"0.7717517",
"0.7427129",
"0.7165118",
"0.7128333",
"0.7120448",
"0.7113185",
"0.7086752",
"0.70815086",
"0.7040799",
"0.69675136",
"0.6961297",
"0.6934498",
"0.692607",
"0.6769211",
"0.6769211",
"0.6749382",
"0.6749382",
"0.67335546",
"0.67189157",
"0.67189157",
"0.65948665",
"0.6569516",
"0.6404518",
"0.6375763",
"0.6353991",
"0.63435",
"0.6300089",
"0.62887424",
"0.6280653",
"0.62196195",
"0.61863106",
"0.61239415",
"0.6082622",
"0.6082622",
"0.6070409",
"0.6044012",
"0.6044012",
"0.60335547",
"0.6017698",
"0.6016463",
"0.5893133",
"0.5893133",
"0.588929",
"0.5873381",
"0.5873381",
"0.5864152",
"0.58522516",
"0.58472914",
"0.5841113",
"0.58294725",
"0.58266973",
"0.5824561",
"0.5812181",
"0.5809232",
"0.58043283",
"0.5794824",
"0.57834315",
"0.5775713",
"0.5762553",
"0.57419175",
"0.5741724",
"0.5702363",
"0.56954396",
"0.56904423",
"0.56904423",
"0.56586874",
"0.56313604",
"0.56313604",
"0.56268924",
"0.5615928",
"0.5615928",
"0.5613681",
"0.559115",
"0.558279",
"0.55640733",
"0.55611604",
"0.5553383",
"0.55319136",
"0.55255294",
"0.55233043",
"0.5510982",
"0.55040735",
"0.55040735",
"0.55024827",
"0.5493097",
"0.5490741",
"0.54846334",
"0.5475098",
"0.5472428",
"0.5472428",
"0.5462394",
"0.5443767",
"0.5427308",
"0.54258853",
"0.5425188",
"0.54163843",
"0.54134613",
"0.5407442",
"0.5403435"
] | 0.7296413 | 3 |
attach multiple buffer objects to a vertex array object | func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {
C.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (g *Gaffer) AddBuffer(u *Update) {\n\n\tfor _, v := range u.entities {\n\t\tg.AddEntity(v)\n\t}\n\n\tfor _, v := range u.edges {\n\t\tg.AddEdge(v)\n\t}\n\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n C.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tsyscall.Syscall6(gpBindVertexBuffers, 5, uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(strides)), 0)\n}",
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tsyscall.Syscall(gpVertexArrayElementBuffer, 2, uintptr(vaobj), uintptr(buffer), 0)\n}",
"func VertexArrayVertexBuffers(vaobj uint32, first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tsyscall.Syscall6(gpVertexArrayVertexBuffers, 6, uintptr(vaobj), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(strides)))\n}",
"func (vao *VAO) BuildBuffers(shaderProgramHandle uint32) {\n\tgl.BindVertexArray(vao.handle)\n\tfor _, vbo := range vao.vertexBuffers {\n\t\tvbo.BuildVertexAttributes(shaderProgramHandle)\n\t}\n\tgl.BindVertexArray(0)\n}",
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tC.glowVertexArrayElementBuffer(gpVertexArrayElementBuffer, (C.GLuint)(vaobj), (C.GLuint)(buffer))\n}",
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tC.glowVertexArrayElementBuffer(gpVertexArrayElementBuffer, (C.GLuint)(vaobj), (C.GLuint)(buffer))\n}",
"func VertexArrayVertexBuffers(vaobj uint32, first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowVertexArrayVertexBuffers(gpVertexArrayVertexBuffers, (C.GLuint)(vaobj), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func VertexArrayVertexBuffers(vaobj uint32, first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowVertexArrayVertexBuffers(gpVertexArrayVertexBuffers, (C.GLuint)(vaobj), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n C.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func BindVertexArray(array uint32) {\n C.glowBindVertexArray(gpBindVertexArray, (C.GLuint)(array))\n}",
"func (vao *VAO) AddVertexBuffer(vbo *vbo.VBO) {\n\tvao.vertexBuffers = append(vao.vertexBuffers, vbo)\n}",
"func initFontVbo() {\n\tvar vertexAttributes = make([]float32, 5*6*len(charDatas))\n\ti := 0\n\tfor _, charData := range charDatas {\n\t\ttop := float32(charData.ty+charData.h) / 256\n\t\tbottom := float32(charData.ty) / 256\n\t\tright := float32(charData.tx+charData.w) / 256\n\t\tleft := float32(charData.tx) / 256\n\n\t\tw := float32(charData.w) / 256\n\t\th := float32(charData.h) / 256\n\n\t\t// tri 1\n\t\tvertexAttributes[i] = w\n\t\tvertexAttributes[i+1] = h\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = right\n\t\tvertexAttributes[i+4] = bottom\n\t\ti += 5\n\n\t\tvertexAttributes[i] = w\n\t\tvertexAttributes[i+1] = 0\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = right\n\t\tvertexAttributes[i+4] = top\n\t\ti += 5\n\n\t\tvertexAttributes[i] = 0\n\t\tvertexAttributes[i+1] = h\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = left\n\t\tvertexAttributes[i+4] = bottom\n\t\ti += 5\n\n\t\t// tri 2\n\t\tvertexAttributes[i] = w\n\t\tvertexAttributes[i+1] = 0\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = right\n\t\tvertexAttributes[i+4] = top\n\t\ti += 5\n\n\t\tvertexAttributes[i] = 0\n\t\tvertexAttributes[i+1] = 0\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = left\n\t\tvertexAttributes[i+4] = top\n\t\ti += 5\n\n\t\tvertexAttributes[i] = 0\n\t\tvertexAttributes[i+1] = h\n\t\tvertexAttributes[i+2] = 0\n\t\tvertexAttributes[i+3] = left\n\t\tvertexAttributes[i+4] = bottom\n\t\ti += 5\n\t}\n\n\tgl.GenBuffers(1, &vbo)\n\tgl.GenVertexArrays(1, &vao)\n\tgl.BindVertexArray(vao)\n\tgl.EnableVertexAttribArray(0)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(\n\t\tgl.ARRAY_BUFFER,\n\t\t4*len(vertexAttributes),\n\t\tgl.Ptr(vertexAttributes),\n\t\tgl.STATIC_DRAW,\n\t)\n\tgl.VertexAttribPointer(0, 3, gl.FLOAT, false, 5*4, gl.PtrOffset(0))\n\tgl.EnableVertexAttribArray(0)\n\tgl.VertexAttribPointer(1, 2, gl.FLOAT, false, 5*4, gl.PtrOffset(4*3))\n\tgl.EnableVertexAttribArray(1)\n\t//unbind\n\tgl.BindVertexArray(0)\n\n}",
"func BufferInit(target Enum, size int, usage Enum) {\n\tgl.BufferData(uint32(target), size, nil, uint32(usage))\n}",
"func VertexArrayVertexBuffer(vaobj uint32, bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tsyscall.Syscall6(gpVertexArrayVertexBuffer, 5, uintptr(vaobj), uintptr(bindingindex), uintptr(buffer), uintptr(offset), uintptr(stride), 0)\n}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n C.glowMultiDrawElementsBaseVertex(gpMultiDrawElementsBaseVertex, (C.GLenum)(mode), (*C.GLsizei)(unsafe.Pointer(count)), (C.GLenum)(xtype), indices, (C.GLsizei)(drawcount), (*C.GLint)(unsafe.Pointer(basevertex)))\n}",
"func (gl *WebGL) BufferData(target GLEnum, data interface{}, usage GLEnum) {\n\tvalues := sliceToTypedArray(data)\n\tgl.context.Call(\"bufferData\", target, values, usage)\n}",
"func (geom Geometry) Buffer(distance float64, segments int) Geometry {\n\tnewGeom := C.OGR_G_Buffer(geom.cval, C.double(distance), C.int(segments))\n\treturn Geometry{newGeom}\n}",
"func (b *Buffer) Attach(buffer []byte) {\n b.AttachBytes(buffer, 0, len(buffer))\n}",
"func (g *GLTF) loadAttributes(geom *geometry.Geometry, attributes map[string]int, indices math32.ArrayU32) error {\n\n\t// Indices of buffer views\n\tinterleavedVBOs := make(map[int]*gls.VBO, 0)\n\n\t// Load primitive attributes\n\tfor name, aci := range attributes {\n\t\taccessor := g.Accessors[aci]\n\n\t\t// Validate that accessor is compatible with attribute\n\t\terr := g.validateAccessorAttribute(accessor, name)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// Load data and add it to geometry's VBO\n\t\tif g.isInterleaved(accessor) {\n\t\t\tbvIdx := *accessor.BufferView\n\t\t\t// Check if we already loaded this buffer view\n\t\t\tvbo, ok := interleavedVBOs[bvIdx]\n\t\t\tif ok {\n\t\t\t\t// Already created VBO for this buffer view\n\t\t\t\t// Add attribute with correct byteOffset\n\t\t\t\tg.addAttributeToVBO(vbo, name, uint32(*accessor.ByteOffset))\n\t\t\t} else {\n\t\t\t\t// Load data and create vbo\n\t\t\t\tbuf, err := g.loadBufferView(bvIdx)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\t//\n\t\t\t\t// TODO: BUG HERE\n\t\t\t\t// If buffer view has accessors with different component type then this will have a read alignment problem!\n\t\t\t\t//\n\t\t\t\tdata, err := g.bytesToArrayF32(buf, accessor.ComponentType, accessor.Count*TypeSizes[accessor.Type])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tvbo := gls.NewVBO(data)\n\t\t\t\tg.addAttributeToVBO(vbo, name, 0)\n\t\t\t\t// Save reference to VBO keyed by index of the buffer view\n\t\t\t\tinterleavedVBOs[bvIdx] = vbo\n\t\t\t\t// Add VBO to geometry\n\t\t\t\tgeom.AddVBO(vbo)\n\t\t\t}\n\t\t} else {\n\t\t\tbuf, err := g.loadAccessorBytes(accessor)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdata, err := g.bytesToArrayF32(buf, accessor.ComponentType, accessor.Count*TypeSizes[accessor.Type])\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tvbo := gls.NewVBO(data)\n\t\t\tg.addAttributeToVBO(vbo, name, 0)\n\t\t\t// Add VBO to geometry\n\t\t\tgeom.AddVBO(vbo)\n\t\t}\n\t}\n\n\t// Set indices\n\tif len(indices) > 0 {\n\t\tgeom.SetIndices(indices)\n\t}\n\n\treturn nil\n}",
"func (du *DescriptorSet) AddBuffer(dstBinding int, dtype vk.DescriptorType, b *Buffer, offset int) {\n\tvar descriptorBufferInfo = vk.DescriptorBufferInfo{}\n\tdescriptorBufferInfo.Buffer = b.VKBuffer\n\tdescriptorBufferInfo.Offset = vk.DeviceSize(offset)\n\tdescriptorBufferInfo.Range = vk.DeviceSize(b.Size)\n\n\tvar writeDescriptorSet = vk.WriteDescriptorSet{}\n\twriteDescriptorSet.SType = vk.StructureTypeWriteDescriptorSet\n\twriteDescriptorSet.DstBinding = uint32(dstBinding) // write to the first, and only binding.\n\twriteDescriptorSet.DescriptorCount = 1 // update a single descriptor.\n\twriteDescriptorSet.DescriptorType = dtype\n\twriteDescriptorSet.PBufferInfo = []vk.DescriptorBufferInfo{descriptorBufferInfo}\n\n\tif du.VKWriteDiscriptorSet == nil {\n\t\tdu.VKWriteDiscriptorSet = make([]vk.WriteDescriptorSet, 0)\n\t}\n\tdu.VKWriteDiscriptorSet = append(du.VKWriteDiscriptorSet, writeDescriptorSet)\n}",
"func BindVertexArray(array uint32) {\n\tC.glowBindVertexArray(gpBindVertexArray, (C.GLuint)(array))\n}",
"func BindVertexArray(array uint32) {\n\tC.glowBindVertexArray(gpBindVertexArray, (C.GLuint)(array))\n}",
"func (b *Buffer) Flush() error {\n\t// upload vertice + meta to opengl\n\tGlBindBuffer(ARRAY_BUFFER, b.GlBuffer)\n\tErrPanic()\n\tif len(b.Buf) == 0 {\n\t\treturn nil\n\t}\n\tGlBufferData(ARRAY_BUFFER, b.Buf, STATIC_DRAW)\n\tErrPanic()\n\t// FIXME: rebind shader attrs\n\tb.BufLen = len(b.Buf)\n\tb.Buf = b.Buf[:0]\n\treturn nil\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func (g *Geometry) Initialize() {\n\tif g.handle != 0 {\n\t\treturn\n\t}\n\n\t// Determine if geometry has an index buffer\n\tif g.IndexBuffer.ComponentType != 0 {\n\t\tg.hasIndices = true\n\t}\n\n\t// Calculate number of indices\n\tif g.hasIndices {\n\t\tcompSize := componentSizeFromType(g.IndexBuffer.ComponentType)\n\t\tg.numIndices = int32(len(g.IndexBuffer.Data)) / compSize\n\t} else {\n\t\tcompSize := componentSizeFromType(g.PositionBuffer.ComponentType)\n\t\tg.numIndices = int32(len(g.PositionBuffer.Data)) / compSize\n\t}\n\n\t// Set buffer targets\n\tg.IndexBuffer.target = gl.ELEMENT_ARRAY_BUFFER\n\tg.PositionBuffer.target = gl.ARRAY_BUFFER\n\tg.NormalBuffer.target = gl.ARRAY_BUFFER\n\tg.TexCoordBuffer.target = gl.ARRAY_BUFFER\n\tg.TangentBuffer.target = gl.ARRAY_BUFFER\n\n\t// Initialize buffers\n\tg.IndexBuffer.initialize()\n\tg.PositionBuffer.initialize()\n\tg.NormalBuffer.initialize()\n\tg.TexCoordBuffer.initialize()\n\tg.TangentBuffer.initialize()\n\n\t// Create and bind VertexArray\n\tgl.GenVertexArrays(1, &g.handle)\n\tgl.BindVertexArray(g.handle)\n\n\t// Bind/enable buffers within the VertexArray\n\tg.IndexBuffer.bind()\n\tg.PositionBuffer.enable(0)\n\tg.NormalBuffer.enable(1)\n\tg.TexCoordBuffer.enable(2)\n\tg.TangentBuffer.enable(3)\n\n\tgl.BindVertexArray(0)\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n C.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func (debugging *debuggingOpenGL) BufferData(target uint32, size int, data interface{}, usage uint32) {\n\tdebugging.recordEntry(\"BufferData\", target, size, data, usage)\n\tdebugging.gl.BufferData(target, size, data, usage)\n\tdebugging.recordExit(\"BufferData\")\n}",
"func (va *VertexArray) SetData(data []float32) (err error) {\n\tgl.BindBuffer(gl.ARRAY_BUFFER, va.vbo) // tells OpenGL what kind of buffer this is\n\n\t// BufferData assigns data to the buffer.\n\t// there can only be one ARRAY_BUFFER bound at any time, so OpenGL knows which buffer we mean if we\n\t// tell it what type of buffer it is.\n\t//\t\t\t type\t\t\t size (in bytes) pointer to data\tusage\n\tgl.BufferData(gl.ARRAY_BUFFER, len(data)*4, gl.Ptr(data), gl.STATIC_DRAW)\n\n\treturn\n}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n\tsyscall.Syscall6(gpMultiDrawElementsBaseVertex, 6, uintptr(mode), uintptr(unsafe.Pointer(count)), uintptr(xtype), uintptr(unsafe.Pointer(indices)), uintptr(drawcount), uintptr(unsafe.Pointer(basevertex)))\n}",
"func VertexArrayVertexBuffer(vaobj uint32, bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowVertexArrayVertexBuffer(gpVertexArrayVertexBuffer, (C.GLuint)(vaobj), (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func VertexArrayVertexBuffer(vaobj uint32, bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tC.glowVertexArrayVertexBuffer(gpVertexArrayVertexBuffer, (C.GLuint)(vaobj), (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func (va *VertexArray) SetLayout(layout VertexLayout) {\n\tif len(va.layout.layout) != 0 {\n\t\treturn\n\t}\n\n\tva.layout = layout\n\n\t// generate and bind the vertex array\n\tgl.GenVertexArrays(1, &va.vao) // generates the vertex array (or multiple)\n\tgl.BindVertexArray(va.vao) // binds the vertex array\n\n\t// make vertex array pointer attributes\n\t// offset is the offset in bytes to the first attribute\n\toffset := 0\n\n\t// calculate vertex stride\n\tstride := 0\n\tfor _, elem := range va.layout.layout {\n\t\tstride += elem.getByteSize()\n\n\t}\n\n\t// Vertex Buffer Object\n\tgl.GenBuffers(1, &va.vbo) // generates the buffer (or multiple)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, va.vbo)\n\n\tfor i, elem := range va.layout.layout {\n\n\t\t// define an array of generic vertex attribute data\n\t\t// index, size, type, normalized, stride of vertex (in bytes), pointer (offset)\n\t\t// point positions\n\t\tgl.VertexAttribPointer(uint32(i), int32(elem.getSize()),\n\t\t\telem.getGLType(), false, int32(stride), gl.PtrOffset(offset))\n\t\tgl.EnableVertexAttribArray(uint32(i))\n\t\toffset += elem.getByteSize()\n\t}\n\n}",
"func BindVertexArray(array uint32) {\n\tsyscall.Syscall(gpBindVertexArray, 1, uintptr(array), 0, 0)\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n\tsyscall.Syscall6(gpBindVertexBuffer, 4, uintptr(bindingindex), uintptr(buffer), uintptr(offset), uintptr(stride), 0, 0)\n}",
"func BindVertexArray(vao uint32) {\n\t//gl.BindVertexArrayAPPLE(vao)\n\tgl.BindVertexArray(vao)\n}",
"func GenBuffers(buffers []Buffer) {\n\tgl.GenBuffers(gl.Sizei(len(buffers)), (*gl.Uint)(&buffers[0]))\n}",
"func (native *OpenGL) BufferData(target uint32, size int, data interface{}, usage uint32) {\n\tdataPtr, isPtr := data.(unsafe.Pointer)\n\tif isPtr {\n\t\tgl.BufferData(target, size, dataPtr, usage)\n\t} else {\n\t\tgl.BufferData(target, size, gl.Ptr(data), usage)\n\t}\n}",
"func (c webgl) BufferDataX(target Enum, d interface{}, usage Enum) {\n\tc.ctx.Call(\"bufferData\", target, conv(d), usage)\n}",
"func (b *VBO) Bind(m *Mesh) {\n\tif !b.genBound {\n\t\tpanic(\"A VBO buffer ID has not been generated. Call GenBuffer first.\")\n\t}\n\n\tgl.BindBuffer(gl.ARRAY_BUFFER, b.vboID)\n\tfloatSize := int(unsafe.Sizeof(float32(0)))\n\tgl.BufferData(gl.ARRAY_BUFFER, len(m.Vertices)*floatSize, gl.Ptr(m.Vertices), gl.STATIC_DRAW)\n}",
"func (debugging *debuggingOpenGL) BindVertexArray(array uint32) {\n\tdebugging.recordEntry(\"BindVertexArray\", array)\n\tdebugging.gl.BindVertexArray(array)\n\tdebugging.recordExit(\"BindVertexArray\")\n}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n\tC.glowMultiDrawElementsBaseVertex(gpMultiDrawElementsBaseVertex, (C.GLenum)(mode), (*C.GLsizei)(unsafe.Pointer(count)), (C.GLenum)(xtype), indices, (C.GLsizei)(drawcount), (*C.GLint)(unsafe.Pointer(basevertex)))\n}",
"func MultiDrawElementsBaseVertex(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32, basevertex *int32) {\n\tC.glowMultiDrawElementsBaseVertex(gpMultiDrawElementsBaseVertex, (C.GLenum)(mode), (*C.GLsizei)(unsafe.Pointer(count)), (C.GLenum)(xtype), indices, (C.GLsizei)(drawcount), (*C.GLint)(unsafe.Pointer(basevertex)))\n}",
"func (vao *VAO) AddIndexBuffer(ibo *ibo.IBO) {\n\tvao.indexBuffer = ibo\n}",
"func BufferData(target Enum, src []byte, usage Enum) {\n\tgl.BufferData(uint32(target), int(len(src)), gl.Ptr(&src[0]), uint32(usage))\n}",
"func GenBuffers(n int32, buffers *uint32) {\n C.glowGenBuffers(gpGenBuffers, (C.GLsizei)(n), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func GenVertexArrays(n int32, arrays *uint32) {\n C.glowGenVertexArrays(gpGenVertexArrays, (C.GLsizei)(n), (*C.GLuint)(unsafe.Pointer(arrays)))\n}",
"func NewAttachedBuffer(buffer *Buffer) *Buffer {\n result := NewEmptyBuffer()\n result.AttachBuffer(buffer)\n return result\n}",
"func (native *OpenGL) BindVertexArray(array uint32) {\n\tgl.BindVertexArray(array)\n}",
"func BufferData(target Enum, size Sizeiptr, data unsafe.Pointer, usage Enum) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcsize, _ := (C.GLsizeiptr)(size), cgoAllocsUnknown\n\tcdata, _ := (unsafe.Pointer)(unsafe.Pointer(data)), cgoAllocsUnknown\n\tcusage, _ := (C.GLenum)(usage), cgoAllocsUnknown\n\tC.glBufferData(ctarget, csize, cdata, cusage)\n}",
"func packSFzVertex(v []FzVertex, ptr0 *C.fz_vertex) {\n\tconst m = 0x7fffffff\n\tfor i0 := range v {\n\t\tptr1 := (*(*[m / sizeOfFzVertexValue]C.fz_vertex)(unsafe.Pointer(ptr0)))[i0]\n\t\tv[i0] = *NewFzVertexRef(unsafe.Pointer(&ptr1))\n\t}\n}",
"func MakeVertexBufferObject(sizeBytes int, data unsafe.Pointer) VertexBufferObject {\n\tvar vbo uint32\n\tgl.GenBuffers(1, &vbo)\n\tif sizeBytes > 0 {\n\t\tgl.NamedBufferData(vbo, sizeBytes, data, gl.DYNAMIC_DRAW)\n\t}\n\treturn VertexBufferObject(vbo)\n}",
"func makeVao(points []float32) uint32 {\n\tvar vbo uint32\n\tgl.GenBuffers(1, &vbo)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(gl.ARRAY_BUFFER, 4*len(points), gl.Ptr(points), gl.STATIC_DRAW)\n\n\tvar vao uint32\n\tgl.GenVertexArrays(1, &vao)\n\tgl.BindVertexArray(vao)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tvar offset int = 6 * 4\n\tgl.VertexAttribPointer(0, 2, gl.FLOAT, false, 8*4, gl.PtrOffset(offset))\n\tgl.EnableVertexAttribArray(0)\n\t//gl.VertexAttribPointer(0, 3, gl.FLOAT, false, 0, nil)\n\n\treturn vao\n}",
"func updateTextureVbo(data []float32, vbo uint32) {\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferSubData(gl.ARRAY_BUFFER, 0, len(data)*4, gl.Ptr(data))\n\tgl.BindBuffer(gl.ARRAY_BUFFER, 0)\n}",
"func BufferStorage(target uint32, size int, data unsafe.Pointer, flags uint32) {\n C.glowBufferStorage(gpBufferStorage, (C.GLenum)(target), (C.GLsizeiptr)(size), data, (C.GLbitfield)(flags))\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func DrawElementsBaseVertex(mode uint32, count int32, xtype uint32, indices unsafe.Pointer, basevertex int32) {\n C.glowDrawElementsBaseVertex(gpDrawElementsBaseVertex, (C.GLenum)(mode), (C.GLsizei)(count), (C.GLenum)(xtype), indices, (C.GLint)(basevertex))\n}",
"func makeVao(data []float32) uint32 {\n\tvar vbo uint32\n\tgl.GenBuffers(1, &vbo)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(gl.ARRAY_BUFFER, 4*len(data), gl.Ptr(data), gl.STATIC_DRAW)\n\n\tvar vao uint32\n\tgl.GenVertexArrays(1, &vao)\n\tgl.BindVertexArray(vao)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tvar offset int\n\n\t// position attribute\n\tgl.VertexAttribPointer(0, 3, gl.FLOAT, false, 8*4, gl.PtrOffset(offset))\n\tgl.EnableVertexAttribArray(0)\n\toffset += 3 * 4\n\n\t// color attribute\n\tgl.VertexAttribPointer(1, 3, gl.FLOAT, false, 8*4, gl.PtrOffset(offset))\n\tgl.EnableVertexAttribArray(1)\n\toffset += 3 * 4\n\n\t// texture coord attribute\n\tgl.VertexAttribPointer(2, 2, gl.FLOAT, false, 8*4, gl.PtrOffset(offset))\n\tgl.EnableVertexAttribArray(2)\n\toffset += 2 * 4\n\n\treturn vao\n}",
"func BufferData(target uint32, size int, data unsafe.Pointer, usage uint32) {\n C.glowBufferData(gpBufferData, (C.GLenum)(target), (C.GLsizeiptr)(size), data, (C.GLenum)(usage))\n}",
"func BindBuffersRange(target uint32, first uint32, count int32, buffers *uint32, offsets *int, sizes *int) {\n C.glowBindBuffersRange(gpBindBuffersRange, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizeiptr)(unsafe.Pointer(sizes)))\n}",
"func NewBuffer(aSlice interface{}) *Buffer {\n return &Buffer{buffer: sliceValue(aSlice, false), handler: valueHandler{}}\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func BindBuffersBase(target uint32, first uint32, count int32, buffers *uint32) {\n\tC.glowBindBuffersBase(gpBindBuffersBase, (C.GLenum)(target), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)))\n}",
"func MultiDrawArrays(mode uint32, first *int32, count *int32, drawcount int32) {\n C.glowMultiDrawArrays(gpMultiDrawArrays, (C.GLenum)(mode), (*C.GLint)(unsafe.Pointer(first)), (*C.GLsizei)(unsafe.Pointer(count)), (C.GLsizei)(drawcount))\n}",
"func (spriteBatch *SpriteBatch) addv(verts []float32, mat *mgl32.Mat4, index int) error {\n\tif index == -1 && spriteBatch.count >= spriteBatch.size {\n\t\treturn fmt.Errorf(\"Sprite Batch Buffer Full\")\n\t}\n\n\tsprite := make([]float32, 8*4)\n\tfor i := 0; i < 32; i += 8 {\n\t\tj := (i / 2)\n\t\tsprite[i+0] = (mat[0] * verts[j+0]) + (mat[4] * verts[j+1]) + mat[12]\n\t\tsprite[i+1] = (mat[1] * verts[j+0]) + (mat[5] * verts[j+1]) + mat[13]\n\t\tsprite[i+2] = verts[j+2]\n\t\tsprite[i+3] = verts[j+3]\n\t\tsprite[i+4] = spriteBatch.color[0]\n\t\tsprite[i+5] = spriteBatch.color[1]\n\t\tsprite[i+6] = spriteBatch.color[2]\n\t\tsprite[i+7] = spriteBatch.color[3]\n\t}\n\n\tif index == -1 {\n\t\tspriteBatch.arrayBuf.fill(spriteBatch.count*4*8, sprite)\n\t\tspriteBatch.count++\n\t} else {\n\t\tspriteBatch.arrayBuf.fill(index*4*8, sprite)\n\t}\n\n\treturn nil\n}",
"func NewAttached(buffer []byte) *Buffer {\n result := NewEmptyBuffer()\n result.Attach(buffer)\n return result\n}",
"func GenVertexArrays(n int32, arrays *uint32) {\n\tsyscall.Syscall(gpGenVertexArrays, 2, uintptr(n), uintptr(unsafe.Pointer(arrays)), 0)\n}",
"func BindBuffer(target Enum, b Buffer) {\n\tgl.BindBuffer(uint32(target), b.Value)\n}",
"func (vao *VAO) Render() {\n\tgl.BindVertexArray(vao.handle)\n\tif vao.indexBuffer != nil {\n\t\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, vao.indexBuffer.GetHandle())\n\t\tgl.DrawElements(vao.mode, vao.indexBuffer.Len(), gl.UNSIGNED_SHORT, nil)\n\t\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, 0)\n\t} else {\n\t\tgl.DrawArrays(vao.mode, 0, vao.vertexBuffers[0].Len())\n\t}\n\tgl.BindVertexArray(0)\n}",
"func (ab *Buffer) AddMany(ctx context.Context, objs ...interface{}) {\n\tif ab.Tracer != nil {\n\t\tfinisher := ab.Tracer.StartAddMany(ctx)\n\t\tdefer finisher.Finish(nil)\n\t}\n\tvar bufferLength int\n\tif ab.Stats != nil {\n\t\tab.maybeStatCount(ctx, MetricAddMany, 1)\n\t\tab.maybeStatCount(ctx, MetricAddManyItemCount, len(objs))\n\t\tstart := time.Now().UTC()\n\t\tdefer func() {\n\t\t\tab.maybeStatGauge(ctx, MetricBufferLength, float64(bufferLength))\n\t\t\tab.maybeStatElapsed(ctx, MetricAddManyElapsed, start)\n\t\t}()\n\t}\n\n\tvar flushes [][]interface{}\n\tab.contentsMu.Lock()\n\tbufferLength = ab.contents.Len()\n\tfor _, obj := range objs {\n\t\tab.contents.Enqueue(obj)\n\t\tif ab.contents.Len() >= ab.MaxLen {\n\t\t\tflushes = append(flushes, ab.contents.Drain())\n\t\t}\n\t}\n\tab.contentsMu.Unlock()\n\tfor _, flush := range flushes {\n\t\tab.unsafeFlushAsync(ctx, flush)\n\t}\n}",
"func DrawArrays(mode uint32, first int32, count int32) {\n C.glowDrawArrays(gpDrawArrays, (C.GLenum)(mode), (C.GLint)(first), (C.GLsizei)(count))\n}",
"func (va *VertexArray) SetIndexData(data []uint32) {\n\t// Index Buffer Object\n\tgl.GenBuffers(1, &va.ibo) // generates the buffer (or multiple)\n\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, va.ibo) // tells OpenGL what kind of buffer this is\n\n\t// BufferData assigns data to the buffer.\n\tgl.BufferData(gl.ELEMENT_ARRAY_BUFFER, len(data)*4, gl.Ptr(data), gl.STATIC_DRAW)\n\n\tva.vertices = len(data)\n}",
"func (gl *WebGL) BufferSubData(target GLEnum, offset int, data interface{}) {\n\tvalues := sliceToTypedArray(data)\n\tgl.context.Call(\"bufferSubData\", target, offset, values)\n}",
"func BufferSubData(target Enum, offset Intptr, size Sizeiptr, data unsafe.Pointer) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcoffset, _ := (C.GLintptr)(offset), cgoAllocsUnknown\n\tcsize, _ := (C.GLsizeiptr)(size), cgoAllocsUnknown\n\tcdata, _ := (unsafe.Pointer)(unsafe.Pointer(data)), cgoAllocsUnknown\n\tC.glBufferSubData(ctarget, coffset, csize, cdata)\n}",
"func MultiDrawElements(mode uint32, count *int32, xtype uint32, indices *unsafe.Pointer, drawcount int32) {\n C.glowMultiDrawElements(gpMultiDrawElements, (C.GLenum)(mode), (*C.GLsizei)(unsafe.Pointer(count)), (C.GLenum)(xtype), indices, (C.GLsizei)(drawcount))\n}",
"func (obj *Device) ProcessVertices(\n\tsrcStartIndex uint,\n\tdestIndex uint,\n\tvertexCount uint,\n\tdestBuffer *VertexBuffer,\n\tvertexDecl *VertexDeclaration,\n\tflags uint32,\n) Error {\n\tret, _, _ := syscall.Syscall9(\n\t\tobj.vtbl.ProcessVertices,\n\t\t7,\n\t\tuintptr(unsafe.Pointer(obj)),\n\t\tuintptr(srcStartIndex),\n\t\tuintptr(destIndex),\n\t\tuintptr(vertexCount),\n\t\tuintptr(unsafe.Pointer(destBuffer)),\n\t\tuintptr(unsafe.Pointer(vertexDecl)),\n\t\tuintptr(flags),\n\t\t0,\n\t\t0,\n\t)\n\treturn toErr(ret)\n}",
"func (g *Gaffer) AddElements(entities []*Entity, edges []*Edge) {\n\tg.bufferq <- &Update{\n\t\tentities: entities,\n\t\tedges: edges,\n\t}\n\n}",
"func (b *Buffer) AttachNew() {\n b.data = make([]byte, 0)\n b.size = 0\n b.offset = 0\n}",
"func EnableVertexAttribArray(index uint32) {\n C.glowEnableVertexAttribArray(gpEnableVertexAttribArray, (C.GLuint)(index))\n}",
"func (self *TileSprite) SetCanvasBufferA(member *PIXICanvasBuffer) {\n self.Object.Set(\"canvasBuffer\", member)\n}",
"func VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, pointer unsafe.Pointer) {\n C.glowVertexAttribPointer(gpVertexAttribPointer, (C.GLuint)(index), (C.GLint)(size), (C.GLenum)(xtype), (C.GLboolean)(boolToInt(normalized)), (C.GLsizei)(stride), pointer)\n}",
"func (gl *WebGL) NewBuffer(target GLEnum, data interface{}, usage GLEnum) WebGLBuffer {\n\tbuffer := gl.CreateBuffer()\n\tgl.BindBuffer(target, buffer)\n\tgl.BufferData(target, data, usage)\n\treturn buffer\n}",
"func makeVao(points []float32) uint32 {\n\tvar vbo uint32\n\tgl.GenBuffers(1, &vbo)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.BufferData(gl.ARRAY_BUFFER, 4*len(points), gl.Ptr(points), gl.STATIC_DRAW)\n\n\tvar vao uint32\n\tgl.GenVertexArrays(1, &vao)\n\tgl.BindVertexArray(vao)\n\tgl.EnableVertexAttribArray(0)\n\tgl.BindBuffer(gl.ARRAY_BUFFER, vbo)\n\tgl.VertexAttribPointer(0, 3, gl.FLOAT, false, 0, nil)\n\n\treturn vao\n}",
"func GenerateShapes() {\r\n\t// Square\r\n\tShapes[0].vertices = []gl.GLfloat{-1, -1, 1, -1, -1, 1, 1, 1}\r\n\tShapes[0].elements = []gl.GLushort{0, 1, 2, 2, 3, 1}\r\n\r\n\t// ___|\r\n\tShapes[1].vertices = []gl.GLfloat{-2, 0, -2, -1, 2, -1, 2, 0, 2, 1, 1, 1, 1, 0}\r\n\tShapes[1].elements = []gl.GLushort{0, 1, 2, 2, 3, 0, 3, 4, 5, 5, 6, 3}\r\n\r\n\t// _|_\r\n\tShapes[2].vertices = []gl.GLfloat{-1.5, 0, -0.5, 0, -0.5, 1, 0.5, 1, 0.5, 0, 1.5, 0, 1.5, -1, -1.5, -1}\r\n\tShapes[2].elements = []gl.GLushort{1, 2, 3, 3, 4, 1, 0, 7, 6, 6, 0, 5}\r\n\r\n\t// Snake\r\n\tShapes[3].vertices = []gl.GLfloat{-1.5, -1, -1.5, 0, -0.5, 0, -0.5, 1, 1.5, 1, 1.5, 0, 0.5, 0, 0.5, -1}\r\n\tShapes[3].elements = []gl.GLushort{0, 1, 6, 6, 7, 0, 2, 3, 4, 4, 5, 2}\r\n\r\n\t// Now fill out the rest automatically.\r\n\t// FIXME why doesn't using _, shape in this loop work ?\r\n\tfor i := range Shapes {\r\n\t\tShapes[i].vao = gl.GenVertexArray()\r\n\t\tShapes[i].vao.Bind()\r\n\t\tShapes[i].vbo = gl.GenBuffer()\r\n\t\tShapes[i].vbo.Bind(gl.ARRAY_BUFFER)\r\n\t\tgl.BufferData(gl.ARRAY_BUFFER, len(Shapes[i].vertices)*4, Shapes[i].vertices, gl.STATIC_DRAW)\r\n\t\tShapes[i].elementBuffer = gl.GenBuffer()\r\n\t\tShapes[i].elementBuffer.Bind(gl.ELEMENT_ARRAY_BUFFER)\r\n\t\tgl.BufferData(gl.ELEMENT_ARRAY_BUFFER, len(Shapes[i].elements)*2, Shapes[i].elements, gl.STATIC_DRAW)\r\n\t\tShapes[i].numElements = len(Shapes[i].elements)\r\n\r\n\t\tvertexAttribArray := shaderProgram.GetAttribLocation(\"position\")\r\n\t\tvertexAttribArray.AttribPointer(2, gl.FLOAT, false, 0, uintptr(0))\r\n\t\tvertexAttribArray.EnableArray()\r\n\t}\r\n}",
"func (n *BufferView) AddBuffer() *Buffer {\n\tn.UnfocusBuffers()\n\n\tconf := n.conf\n\tc := NewBuffer(conf, BufferConfig{\n\t\tconf.Theme.Background,\n\t\tconf.Theme.Foreground,\n\t\tconf.Theme.Cursor,\n\t\tconf.Theme.CursorInvert,\n\t\tconf.Theme.HighlightLineBackground,\n\t\tconf.Theme.GutterBackground,\n\t\tconf.Theme.GutterForeground,\n\t\tgui.GetDefaultFont(),\n\t}, n, len(n.buffers))\n\n\tc.SetFocus(true)\n\n\tw, h := n.GetSize()\n\n\tn.focusedBuff = c.index\n\tn.buffers = append(n.buffers, NewBufferPane(c))\n\tn.Resize(w, h)\n\n\treturn c\n}",
"func Draw(bp *BoundProgram) {\n\t// TODO might still need the buffer contents to be pushed:\n\t/*\n\t\tgl.BufferData(gl.ARRAY_BUFFER, len(cube_vertices)*4, gl.Ptr(cube_vertices), gl.STATIC_DRAW)\n\t*/\n\t// TODO might still need textures to be bound for the call:\n\t/*\n\t\tgl.ActiveTexture(gl.TEXTURE0)\n\t\tgl.BindTexture(gl.TEXTURE_2D, tCache.Get(\"placeholder\"))\n\t*/\n\t// TODO draw calls are themselves still specialized and param'd:\n\t/*\n\t\tgl.DrawArrays(gl.TRIANGLES, 0, 6*2*3)\n\t*/\n}",
"func (vao VertexArrayObject) VertexAttribPointer(attrIndex int, attrType Type, normalized bool, byteStride int, byteOffset int) {\n\tglx := vao.glx\n\tbufferType, bufferItemsPerVertex, err := attrType.asAttribute()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"converting attribute type %s to attribute: %w\", attrType, err))\n\t}\n\tglx.constants.VertexAttribPointer(\n\t\tglx.factory.Number(float64(attrIndex)),\n\t\tglx.factory.Number(float64(bufferItemsPerVertex)),\n\t\tglx.typeConverter.ToJs(bufferType),\n\t\tglx.factory.Boolean(normalized),\n\t\tglx.factory.Number(float64(byteStride)),\n\t\tglx.factory.Number(float64(byteOffset)),\n\t)\n}",
"func BindBuffer(target uint32, buffer uint32) {\n C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func (debugging *debuggingOpenGL) DrawBuffers(buffers []uint32) {\n\tdebugging.recordEntry(\"DrawBuffers\", buffers)\n\tdebugging.gl.DrawBuffers(buffers)\n\tdebugging.recordExit(\"DrawBuffers\")\n}",
"func (native *OpenGL) GenBuffers(n int32) []uint32 {\n\tbuffers := make([]uint32, n)\n\tgl.GenBuffers(n, &buffers[0])\n\treturn buffers\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n C.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func (b *Buffer) AttachBytes(buffer []byte, offset int, size int) {\n if len(buffer) < size {\n panic(\"invalid buffer\")\n }\n if size <= 0 {\n panic(\"invalid size\")\n }\n if offset > size {\n panic(\"invalid offset\")\n }\n\n b.data = buffer\n b.size = size\n b.offset = offset\n}",
"func (c *HTTPCollector) createBuffer() []*zipkincore.Span {\n\treturn c.batchPool.Get().([]*zipkincore.Span)\n}",
"func DrawBuffers(n int32, bufs *uint32) {\n C.glowDrawBuffers(gpDrawBuffers, (C.GLsizei)(n), (*C.GLenum)(unsafe.Pointer(bufs)))\n}",
"func (va *VertexArray) Draw() {\n\tgl.BindVertexArray(va.vao)\n\tgl.BindBuffer(gl.ELEMENT_ARRAY_BUFFER, va.ibo)\n\tgl.DrawElements(gl.TRIANGLES, int32(va.vertices), gl.UNSIGNED_INT, nil)\n}",
"func (al *AudioListener) setBuffer(size int) {\n\tal.Lock()\n\tdefer al.Unlock()\n\n\tal.buffer = make([]gumble.AudioPacket, 0, size)\n}",
"func (w *Window) SetBuffersGeometry(width, height, format int) int {\n\treturn int(C.ANativeWindow_setBuffersGeometry(w.cptr(), C.int32_t(width), C.int32_t(height), C.int32_t(format)))\n}"
] | [
"0.68458647",
"0.6728047",
"0.6564569",
"0.6445395",
"0.6325015",
"0.62588865",
"0.61179435",
"0.61179435",
"0.60733587",
"0.60733587",
"0.6057369",
"0.6017042",
"0.5837031",
"0.5797567",
"0.5795221",
"0.5682038",
"0.5667529",
"0.56582224",
"0.56328213",
"0.56065065",
"0.55949205",
"0.5581405",
"0.55657613",
"0.55657613",
"0.5545117",
"0.553444",
"0.553444",
"0.5530133",
"0.5500932",
"0.5485797",
"0.5470311",
"0.5468883",
"0.5464296",
"0.5464296",
"0.5437567",
"0.5428218",
"0.5417364",
"0.54132044",
"0.5412542",
"0.5385182",
"0.53775555",
"0.53668684",
"0.5366075",
"0.53152955",
"0.53152955",
"0.53141886",
"0.5291802",
"0.5267269",
"0.5267024",
"0.52623034",
"0.5262139",
"0.5247199",
"0.5215704",
"0.5195122",
"0.519114",
"0.51897824",
"0.5179577",
"0.51744974",
"0.51654035",
"0.51650923",
"0.5162638",
"0.5158044",
"0.51480085",
"0.5124774",
"0.5124774",
"0.5121097",
"0.5116536",
"0.5115693",
"0.511307",
"0.51116127",
"0.5103348",
"0.5101064",
"0.50934696",
"0.50774527",
"0.50758874",
"0.5075262",
"0.50591093",
"0.5051163",
"0.5050174",
"0.5038016",
"0.5027177",
"0.50250936",
"0.5012288",
"0.5011796",
"0.5002627",
"0.499752",
"0.49931103",
"0.49860734",
"0.49847734",
"0.4980496",
"0.4973803",
"0.49735957",
"0.49635926",
"0.49539772",
"0.49538687",
"0.49520993",
"0.49371314",
"0.49340647",
"0.49314183"
] | 0.6346768 | 5 |
set the blend color | func BlendColor(red float32, green float32, blue float32, alpha float32) {
C.glowBlendColor(gpBlendColor, (C.GLfloat)(red), (C.GLfloat)(green), (C.GLfloat)(blue), (C.GLfloat)(alpha))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func SetBlendColor(red Float, green Float, blue Float, alpha Float) {\n\tcred, _ := (C.GLfloat)(red), cgoAllocsUnknown\n\tcgreen, _ := (C.GLfloat)(green), cgoAllocsUnknown\n\tcblue, _ := (C.GLfloat)(blue), cgoAllocsUnknown\n\tcalpha, _ := (C.GLfloat)(alpha), cgoAllocsUnknown\n\tC.glBlendColor(cred, cgreen, cblue, calpha)\n}",
"func BlendColor(red float32, green float32, blue float32, alpha float32) {\n C.glowBlendColor(gpBlendColor, (C.GLfloat)(red), (C.GLfloat)(green), (C.GLfloat)(blue), (C.GLfloat)(alpha))\n}",
"func BlendColor(red, green, blue, alpha float32) {\n\tgl.BlendColor(red, green, blue, alpha)\n}",
"func BlendColor(red float32, green float32, blue float32, alpha float32) {\n\tsyscall.Syscall6(gpBlendColor, 4, uintptr(math.Float32bits(red)), uintptr(math.Float32bits(green)), uintptr(math.Float32bits(blue)), uintptr(math.Float32bits(alpha)), 0, 0)\n}",
"func BlendColor(red GLfloat, green GLfloat, blue GLfloat, alpha GLfloat) {\n\tC.glBlendColor(C.GLfloat(red), C.GLfloat(green), C.GLfloat(blue), C.GLfloat(alpha))\n}",
"func blend(a, b color.Color, blend float64) color.RGBA {\n\tinv := float64(0) // float64(1)-blend\n\tar, ag, ab, aa := a.RGBA()\n\tbr, bg, bb, ba := b.RGBA()\n\tor, og, ob, _ := uint32(float64(ar)*blend+float64(br)*inv),\n\t\tuint32(float64(ag)*blend+float64(bg)*inv),\n\t\tuint32(float64(ab)*blend+float64(bb)*inv),\n\t\tuint32(float64(aa)*blend+float64(ba)*inv)\n\treturn color.RGBA{uint8(or >> 8), uint8(og >> 8), uint8(ob >> 8), 255} //uint8(oa>>8)}\n}",
"func (c Color) Blend(pct float32, clr color.Color) Color {\n\tf32 := NRGBAf32Model.Convert(c).(NRGBAf32)\n\tothc := NRGBAf32Model.Convert(clr).(NRGBAf32)\n\tpct = mat32.Clamp(pct, 0, 100.0)\n\toth := pct / 100.0\n\tme := 1.0 - pct/100.0\n\tf32.R = me*f32.R + oth*othc.R\n\tf32.G = me*f32.G + oth*othc.G\n\tf32.B = me*f32.B + oth*othc.B\n\tf32.A = me*f32.A + oth*othc.A\n\treturn ColorModel.Convert(f32).(Color)\n}",
"func (c *Context) BlendColor(r, g, b, a float32) gfx.ContextStateValue {\n\treturn s.CSV{\n\t\tValue: [4]float32{r, g, b, a},\n\t\tDefaultValue: [4]float32{0, 0, 0, 0}, // TODO(slimsag): verify\n\t\tKey: csBlendColor,\n\t\tGLCall: c.glBlendColor,\n\t}\n}",
"func setBlendFunc(cmp pixel.ComposeMethod) {\n\tswitch cmp {\n\tcase pixel.ComposeOver:\n\t\tglhf.BlendFunc(glhf.One, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeIn:\n\t\tglhf.BlendFunc(glhf.DstAlpha, glhf.Zero)\n\tcase pixel.ComposeOut:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.Zero)\n\tcase pixel.ComposeAtop:\n\t\tglhf.BlendFunc(glhf.DstAlpha, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeRover:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.One)\n\tcase pixel.ComposeRin:\n\t\tglhf.BlendFunc(glhf.Zero, glhf.SrcAlpha)\n\tcase pixel.ComposeRout:\n\t\tglhf.BlendFunc(glhf.Zero, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeRatop:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.SrcAlpha)\n\tcase pixel.ComposeXor:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposePlus:\n\t\tglhf.BlendFunc(glhf.One, glhf.One)\n\tcase pixel.ComposeCopy:\n\t\tglhf.BlendFunc(glhf.One, glhf.Zero)\n\tdefault:\n\t\tpanic(errors.New(\"Canvas: invalid compose method\"))\n\t}\n}",
"func PBLENDVB(x, mx, x1 operand.Op) { ctx.PBLENDVB(x, mx, x1) }",
"func (originalColour Colour) Blend(NewColour Colour, Ratio float32) (result Colour) {\n\tif Ratio <= 0 {\n\t\treturn originalColour\n\t}\n\tif Ratio >= 1 {\n\t\treturn NewColour\n\t}\n\n\tresult.B = uint8((float32(NewColour.B)-float32(originalColour.B))*Ratio + float32(originalColour.B))\n\tresult.G = uint8((float32(NewColour.G)-float32(originalColour.G))*Ratio + float32(originalColour.G))\n\tresult.R = uint8((float32(NewColour.R)-float32(originalColour.R))*Ratio + float32(originalColour.R))\n\tresult.L = uint8((float32(NewColour.L)-float32(originalColour.L))*Ratio + float32(originalColour.L))\n\treturn result\n}",
"func (self *Graphics) SetBlendModeA(member int) {\n self.Object.Set(\"blendMode\", member)\n}",
"func VPBLENDVB(xy, mxy, xy1, xy2 operand.Op) { ctx.VPBLENDVB(xy, mxy, xy1, xy2) }",
"func (self *TileSprite) SetBlendModeA(member int) {\n self.Object.Set(\"blendMode\", member)\n}",
"func BlendEquation(mode uint32) {\n C.glowBlendEquation(gpBlendEquation, (C.GLenum)(mode))\n}",
"func (a Energy) Blend(b Energy, n float64) Energy {\n\treturn Energy(geom.Vector3(a).Lerp(geom.Vector3(b), n))\n}",
"func (h *Height) Blend(delta float64) {\n\tif !h.ended {\n\t\th.current += delta\n\t}\n}",
"func (y *Yeelight) SetRGB(value, effect, duration string) string {\n\tcmd := `{\"id\":3,\"method\":\"set_rgb\",\"params\":[` + value + `,\"` + effect + `\",` + duration + `]}`\n\treturn y.request(cmd)\n}",
"func (bl *Blend) Set(blendType string) error {\r\n\tif utils.Contains(bl.Modes, blendType) {\r\n\t\tbl.Current = blendType\r\n\t\treturn nil\r\n\t}\r\n\treturn fmt.Errorf(\"unsupported blend mode\")\r\n}",
"func (bl *Blend) SetLum(rgb Color, l float64) Color {\r\n\tdelta := l - bl.Lum(rgb)\r\n\treturn bl.clip(Color{\r\n\t\trgb.R + delta,\r\n\t\trgb.G + delta,\r\n\t\trgb.B + delta,\r\n\t})\r\n}",
"func VPBLENDMB(ops ...operand.Op) { ctx.VPBLENDMB(ops...) }",
"func VPBLENDD(i, mxy, xy, xy1 operand.Op) { ctx.VPBLENDD(i, mxy, xy, xy1) }",
"func SetBlendEquation(mode Enum) {\n\tcmode, _ := (C.GLenum)(mode), cgoAllocsUnknown\n\tC.glBlendEquation(cmode)\n}",
"func (c *canvas) Blend(i image.Image, pos BlendPosition, fn BlendFunc) error {\n\treturn c.mutate(func() (*pxl, error) {\n\t\treturn blend(c.pxl, i, pos, fn)\n\t})\n}",
"func (w *Width) Blend(delta float64) {\n\tif !w.ended {\n\t\tw.current += delta\n\t}\n}",
"func BLENDPS(i, mx, x operand.Op) { ctx.BLENDPS(i, mx, x) }",
"func VPBLENDMD(ops ...operand.Op) { ctx.VPBLENDMD(ops...) }",
"func BlendFunc(sfactor, dfactor Enum) {\n\tgl.BlendFunc(uint32(sfactor), uint32(dfactor))\n}",
"func BLENDPD(i, mx, x operand.Op) { ctx.BLENDPD(i, mx, x) }",
"func VPBLENDMW(ops ...operand.Op) { ctx.VPBLENDMW(ops...) }",
"func NewBlend() *Blend {\r\n\treturn &Blend{\r\n\t\tModes: []string{\r\n\t\t\tNormal,\r\n\t\t\tDarken,\r\n\t\t\tLighten,\r\n\t\t\tMultiply,\r\n\t\t\tScreen,\r\n\t\t\tOverlay,\r\n\t\t\tSoftLight,\r\n\t\t\tHardLight,\r\n\t\t\tColorDodge,\r\n\t\t\tColorBurn,\r\n\t\t\tDifference,\r\n\t\t\tExclusion,\r\n\t\t\tHue,\r\n\t\t\tSaturation,\r\n\t\t\tColorMode,\r\n\t\t\tLuminosity,\r\n\t\t},\r\n\t}\r\n}",
"func (self *Graphics) BlendMode() int{\n return self.Object.Get(\"blendMode\").Int()\n}",
"func BlendFuncSeparate(sfactorRGB uint32, dfactorRGB uint32, sfactorAlpha uint32, dfactorAlpha uint32) {\n C.glowBlendFuncSeparate(gpBlendFuncSeparate, (C.GLenum)(sfactorRGB), (C.GLenum)(dfactorRGB), (C.GLenum)(sfactorAlpha), (C.GLenum)(dfactorAlpha))\n}",
"func (foreground RGBA) BlendWith(background RGBA) (color RGBA) {\n\n\tfr, fg, fb, fa := uint(foreground.R), uint(foreground.G), uint(foreground.B), uint(foreground.A)\n\tbr, bg, bb, ba := uint(background.R), uint(background.G), uint(background.B), uint(255) // want background to be fully colored\n\n\topacity := fa\n\tbackgroundOpacity := (ba * (255 - fa)) >> 8\n\n\tnewColor := RGBA{\n\t\tuint8((fr*opacity)>>8 + (br*backgroundOpacity)>>8),\n\t\tuint8((fg*opacity)>>8 + (bg*backgroundOpacity)>>8),\n\t\tuint8((fb*opacity)>>8 + (bb*backgroundOpacity)>>8),\n\t\tuint8(opacity),\n\t}\n\n\treturn newColor\n}",
"func VPBLENDMD_BCST(ops ...operand.Op) { ctx.VPBLENDMD_BCST(ops...) }",
"func (p *Ball) SetColor(color *objects.Vector) { p.color = color }",
"func VPBLENDW(i, mxy, xy, xy1 operand.Op) { ctx.VPBLENDW(i, mxy, xy, xy1) }",
"func BlendFunc(sfactor GLenum, dfactor GLenum) {\n\tC.glBlendFunc(C.GLenum(sfactor), C.GLenum(dfactor))\n}",
"func VBLENDMPD(ops ...operand.Op) { ctx.VBLENDMPD(ops...) }",
"func BLENDVPS(x, mx, x1 operand.Op) { ctx.BLENDVPS(x, mx, x1) }",
"func VBLENDPD(i, mxy, xy, xy1 operand.Op) { ctx.VBLENDPD(i, mxy, xy, xy1) }",
"func BlendFunc(src, dst BlendFactor) {\n\tgl.BlendFunc(uint32(src), uint32(dst))\n}",
"func PBLENDW(i, mx, x operand.Op) { ctx.PBLENDW(i, mx, x) }",
"func blendPixelOverPixel(ic_old,ic_new uint8, al_new float64)(c_res uint8) {\n\n\tal_old := float64(1); _=al_old\n\tc_old := float64(ic_old)\n\tc_new := float64(ic_new)\n\n\talgo1 := c_old*(1-al_new) + c_new*al_new\n\tc_res = uint8( util.Min( util.Round(algo1),255) )\n\t//log.Printf(\"\\t\\t %3.1f + %3.1f = %3.1f\", c_old*(1-al_new),c_new*al_new, algo1)\n\n\treturn \n}",
"func BlendFunc(sfactor Enum, dfactor Enum) {\n\tcsfactor, _ := (C.GLenum)(sfactor), cgoAllocsUnknown\n\tcdfactor, _ := (C.GLenum)(dfactor), cgoAllocsUnknown\n\tC.glBlendFunc(csfactor, cdfactor)\n}",
"func blend(c1, c2 uint8, ratio float64) uint8 {\n\treturn uint8(math.Floor((float64(c1)*(1.0-ratio) + float64(c2)*ratio) + 0.5))\n}",
"func BLENDVPD(x, mx, x1 operand.Op) { ctx.BLENDVPD(x, mx, x1) }",
"func (bl *Blend) clip(rgb Color) Color {\r\n\tr, g, b := rgb.R, rgb.G, rgb.B\r\n\r\n\tl := bl.Lum(rgb)\r\n\tmin := utils.Min(r, g, b)\r\n\tmax := utils.Max(r, g, b)\r\n\r\n\tif min < 0 {\r\n\t\tr = l + (((r - l) * l) / (l - min))\r\n\t\tg = l + (((g - l) * l) / (l - min))\r\n\t\tb = l + (((b - l) * l) / (l - min))\r\n\t}\r\n\tif max > 1 {\r\n\t\tr = l + (((r - l) * (1 - l)) / (max - l))\r\n\t\tg = l + (((g - l) * (1 - l)) / (max - l))\r\n\t\tb = l + (((b - l) * (1 - l)) / (max - l))\r\n\t}\r\n\r\n\treturn Color{R: r, G: g, B: b}\r\n}",
"func BlendEquation(mode uint32) {\n\tsyscall.Syscall(gpBlendEquation, 1, uintptr(mode), 0, 0)\n}",
"func BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n C.glowBlendEquationSeparate(gpBlendEquationSeparate, (C.GLenum)(modeRGB), (C.GLenum)(modeAlpha))\n}",
"func (rgbw *Rgbw) SetColor(r uint8, g uint8, b uint8, w uint8) {\n\trgbw[0] = r\n\trgbw[1] = g\n\trgbw[2] = b\n\trgbw[3] = w\n}",
"func BlendEquation(mode uint32) {\n\tC.glowBlendEquation(gpBlendEquation, (C.GLenum)(mode))\n}",
"func BlendEquation(mode uint32) {\n\tC.glowBlendEquation(gpBlendEquation, (C.GLenum)(mode))\n}",
"func (spriteBatch *SpriteBatch) SetColor(vals ...float32) {\n\tspriteBatch.color = vals\n}",
"func ClearColor(red float32, green float32, blue float32, alpha float32) {\n C.glowClearColor(gpClearColor, (C.GLfloat)(red), (C.GLfloat)(green), (C.GLfloat)(blue), (C.GLfloat)(alpha))\n}",
"func (gl *WebGL) BlendFunc(sFactor GLEnum, gFactor GLEnum) {\n\tgl.context.Call(\"blendFunc\", sFactor, gFactor)\n}",
"func VBLENDMPD_BCST(ops ...operand.Op) { ctx.VBLENDMPD_BCST(ops...) }",
"func (icn *Icon) Blend() {\n // get pixels \"parent-pixels\" of Parent that are behind Window\n bg := icn.getBackground()\n\n // copy \"parent-pixels\" into buffer \"ximage\", overwriting existing completely\n xgraphics.Blend(icn.ximage, bg, image.Point{0,0})\n\n // alpha-blend Image into buffer \"ximage\"\n xgraphics.Blend(icn.ximage, icn.Image, image.Point{0,0})\n\n // swap ximage into Window as background\n icn.ximage.XSurfaceSet(icn.Window.Id)\n icn.ximage.XDraw()\n icn.ximage.XPaint(icn.Window.Id)\n\n // free the pixbuff memory!\n icn.ximage.Destroy()\n}",
"func (native *OpenGL) BlendFunc(sfactor uint32, dfactor uint32) {\n\tgl.BlendFunc(sfactor, dfactor)\n}",
"func BlendEquation(mode Enum) {\n\tgl.BlendEquation(uint32(mode))\n}",
"func VPBLENDMD_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMD_Z(mxyz, xyz, k, xyz1) }",
"func VBLENDVPD(xy, mxy, xy1, xy2 operand.Op) { ctx.VBLENDVPD(xy, mxy, xy1, xy2) }",
"func VPBLENDMB_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMB_Z(mxyz, xyz, k, xyz1) }",
"func VBLENDMPS(ops ...operand.Op) { ctx.VBLENDMPS(ops...) }",
"func (x *ImgSpanner) SetColor(c interface{}) {\n\tswitch c := c.(type) {\n\tcase color.Color:\n\t\tx.colorFunc = nil\n\t\tr, g, b, a := c.RGBA()\n\t\tif x.xpixel == true { // apparently r and b values swap in xgraphics.Image\n\t\t\tr, b = b, r\n\t\t}\n\t\tx.fgColor = color.RGBA{\n\t\t\tR: uint8(r >> 8),\n\t\t\tG: uint8(g >> 8),\n\t\t\tB: uint8(b >> 8),\n\t\t\tA: uint8(a >> 8)}\n\tcase rasterx.ColorFunc:\n\t\tx.colorFunc = c\n\t}\n}",
"func (canvas *Canvas) SetColor(r, g, b float32) {\n\twriteCommand(canvas.contents, \"rg\", r, g, b)\n}",
"func (uni *Uniform3fv) SetColor(idx int, color *math32.Color) {\n\n\tpos := idx * 3\n\tuni.v[pos] = color.R\n\tuni.v[pos+1] = color.G\n\tuni.v[pos+2] = color.B\n}",
"func VPBLENDMQ(ops ...operand.Op) { ctx.VPBLENDMQ(ops...) }",
"func (c Color) SetRGBA(v colour.Colour) {\n\tcolours[c] = colour.RGBAOf(v)\n\tpalette.changed = true\n}",
"func BlendFuncSeparate(sfactorRGB uint32, dfactorRGB uint32, sfactorAlpha uint32, dfactorAlpha uint32) {\n\tsyscall.Syscall6(gpBlendFuncSeparate, 4, uintptr(sfactorRGB), uintptr(dfactorRGB), uintptr(sfactorAlpha), uintptr(dfactorAlpha), 0, 0)\n}",
"func (c Color) SetB(b byte) Color {\n\treturn Color{c.R, c.G, b, c.A}\n}",
"func BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n\tsyscall.Syscall(gpBlendEquationSeparate, 2, uintptr(modeRGB), uintptr(modeAlpha), 0)\n}",
"func VPBLENDMQ_BCST(ops ...operand.Op) { ctx.VPBLENDMQ_BCST(ops...) }",
"func (buf *CommandBuffer) SetBlendConstants(blendConstants [4]float32) {\n\tC.domVkCmdSetBlendConstants(buf.fps[vkCmdSetBlendConstants], buf.hnd, (*C.float)(slice2ptr(uptr(&blendConstants))))\n}",
"func VPBLENDMW_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMW_Z(mxyz, xyz, k, xyz1) }",
"func VBLENDMPD_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VBLENDMPD_Z(mxyz, xyz, k, xyz1) }",
"func (debugging *debuggingOpenGL) BlendFunc(sfactor uint32, dfactor uint32) {\n\tdebugging.recordEntry(\"BlendFunc\", sfactor, dfactor)\n\tdebugging.gl.BlendFunc(sfactor, dfactor)\n\tdebugging.recordExit(\"BlendFunc\")\n}",
"func SetClearColor(r uint8, g uint8, b uint8) {\n\n\tgl.ClearColor(gl.Float(r)/255, gl.Float(g)/255, gl.Float(b)/255, 1.0)\n}",
"func (dw *DrawingWand) SetFillOpacity(opacity float64) {\n\tC.DrawSetFillOpacity(dw.dw, C.double(opacity))\n}",
"func ColourBlendEvaluator(i *Interpreter, b *Block) Value {\n\tcolour1 := i.Evaluate(b.SingleBlockValueWithName(i, \"COLOUR1\")).AsColour(i)\n\tcolour2 := i.Evaluate(b.SingleBlockValueWithName(i, \"COLOUR2\")).AsColour(i)\n\tratio := i.Evaluate(b.SingleBlockValueWithName(i, \"RATIO\")).AsNumber(i)\n\n\tratio = math.Min(1.0, math.Max(0.0, ratio))\n\treturn Colour{\n\t\tblend(colour1.Red, colour2.Red, ratio),\n\t\tblend(colour1.Green, colour2.Green, ratio),\n\t\tblend(colour1.Blue, colour2.Blue, ratio),\n\t}\n}",
"func (w *Wrapper) Set(c Color) error {\n\treturn w.SetMask(c, 0xFF)\n}",
"func (a HSV) Lerp(blend Color, t float64) Color {\n\tb := blend.HSV()\n\n\t// Calculate the shortest direction in the color wheel.\n\tvar h float64\n\td := b.H - a.H\n\tif d < 0 {\n\t\ta.H, b.H = b.H, a.H\n\t\td = -d\n\t\tt = 1 - t\n\t}\n\tif d > 0.5 {\n\t\ta.H = a.H + 1\n\t\th = math.Mod(a.H+t*(b.H-a.H), 1)\n\t} else if d <= 0.5 {\n\t\th = a.H + t*d\n\t}\n\n\treturn HSV{\n\t\tH: h,\n\t\tS: a.S + t*(b.S-a.S),\n\t\tV: a.V + t*(b.V-a.V),\n\t\tA: a.A + t*(b.A-a.A),\n\t}\n}",
"func SetColorTemp(temp int) {\n\t// An attempt to fix https://github.com/d4l3k/go-sct/issues/9\n\tif runtime.GOOS == \"windows\" {\n\t\tsetColorTemp(temp + 1)\n\t}\n\tsetColorTemp(temp)\n}",
"func VPBLENDMQ_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMQ_Z(mxyz, xyz, k, xyz1) }",
"func (self *TileSprite) BlendMode() int{\n return self.Object.Get(\"blendMode\").Int()\n}",
"func (c Color) SetAlpha(a uint8) Color {\n\tr, g, b, oa := c>>24, (c>>16)&0xFF, (c>>8)&0xFF, c&0xFF;\n\tif oa == 0 {\n\t\treturn 0\n\t}\n\tr = r * Color(a) / oa;\n\tif r < 0 {\n\t\tr = 0\n\t}\n\tif r > 0xFF {\n\t\tr = 0xFF\n\t}\n\tg = g * Color(a) / oa;\n\tif g < 0 {\n\t\tg = 0\n\t}\n\tif g > 0xFF {\n\t\tg = 0xFF\n\t}\n\tb = b * Color(a) / oa;\n\tif b < 0 {\n\t\tb = 0\n\t}\n\tif b > 0xFF {\n\t\tb = 0xFF\n\t}\n\treturn r<<24 | g<<16 | b<<8 | Color(a);\n}",
"func VBLENDPS(i, mxy, xy, xy1 operand.Op) { ctx.VBLENDPS(i, mxy, xy, xy1) }",
"func Blend(source, dest map[string]interface{}) error {\n\n\tif _, ok := functionMap[blendFunctionMergeDeep]; !ok {\n\t\tfunctionMap[blendFunctionMergeDeep] = MergeDeep\n\t}\n\n\tfor key, value := range source {\n\t\tif keyIsFunction(key) {\n\t\t\tif err := functionMap[key](value.(map[string]interface{}), dest); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else {\n\t\t\t// do default behaviour\n\t\t\tif err := defaultBlendFunc(source, dest); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}",
"func Set(p ...Attribute) (c *Color) {\n\tc = New(p...)\n\tc.Set()\n\n\treturn\n}",
"func VBLENDMPS_BCST(ops ...operand.Op) { ctx.VBLENDMPS_BCST(ops...) }",
"func (c *Context) VPBLENDVB(xy, mxy, xy1, xy2 operand.Op) {\n\tc.addinstruction(x86.VPBLENDVB(xy, mxy, xy1, xy2))\n}",
"func (c *Context) PBLENDVB(x, mx, x1 operand.Op) {\n\tc.addinstruction(x86.PBLENDVB(x, mx, x1))\n}",
"func (uni *Uniform1fv) SetColor(pos int, color *math32.Color) {\n\n\tuni.v[pos] = color.R\n\tuni.v[pos+1] = color.G\n\tuni.v[pos+2] = color.B\n}",
"func (y *Yeelight) SetBright(brightness, effect, duration string) string {\n\tcmd := `{\"id\":5,\"method\":\"set_bright\",\"params\":[` + brightness + `,\"` + effect + `\",` + duration + `]}`\n\treturn y.request(cmd)\n}",
"func (p *RGBAf) SetRGB(x, y int, c *Vector3) {\n\tif !(image.Point{x, y}.In(p.Rect)) {\n\t\treturn\n\t}\n\ti := p.PixOffset(x, y)\n\ts := p.Pix[i : i+4 : i+4] // Small cap improves performance, see https://golang.org/issue/27857\n\ts[0] = Clamp(c.e[0]*255.999, 0.0, 255.0)\n\ts[1] = Clamp(c.e[1]*255.999, 0.0, 255.0)\n\ts[2] = Clamp(c.e[2]*255.999, 0.0, 255.0)\n\ts[3] = 255.0\n}",
"func BlendFuncSeparate(sfactorRGB uint32, dfactorRGB uint32, sfactorAlpha uint32, dfactorAlpha uint32) {\n\tC.glowBlendFuncSeparate(gpBlendFuncSeparate, (C.GLenum)(sfactorRGB), (C.GLenum)(dfactorRGB), (C.GLenum)(sfactorAlpha), (C.GLenum)(dfactorAlpha))\n}",
"func BlendFuncSeparate(sfactorRGB uint32, dfactorRGB uint32, sfactorAlpha uint32, dfactorAlpha uint32) {\n\tC.glowBlendFuncSeparate(gpBlendFuncSeparate, (C.GLenum)(sfactorRGB), (C.GLenum)(dfactorRGB), (C.GLenum)(sfactorAlpha), (C.GLenum)(dfactorAlpha))\n}",
"func (t *TextRenderer) SetColor(red float32, green float32, blue float32, alpha float32) {\n\tt.shader.Use().SetVec4f(\"textColor\", mgl32.Vec4{red, green, blue, alpha})\n}",
"func (w *VT100Writer) SetColor(fg, bg Color, bold bool) {\n\tif bold {\n\t\tw.SetDisplayAttributes(fg, bg, DisplayBold)\n\t} else {\n\t\t// If using `DisplayDefualt`, it will be broken in some environment.\n\t\t// Details are https://github.com/c-bata/go-prompt/pull/85\n\t\tw.SetDisplayAttributes(fg, bg, DisplayReset)\n\t}\n}"
] | [
"0.7911841",
"0.7553172",
"0.74883896",
"0.74558616",
"0.7367328",
"0.6864653",
"0.66877264",
"0.66633636",
"0.64759445",
"0.64553934",
"0.64171994",
"0.6339601",
"0.6277886",
"0.6235966",
"0.6174357",
"0.61663425",
"0.61551267",
"0.614444",
"0.6050763",
"0.6037141",
"0.60077727",
"0.5976372",
"0.5976288",
"0.59511507",
"0.59439456",
"0.5918886",
"0.59175074",
"0.589624",
"0.5877391",
"0.5858687",
"0.58337444",
"0.58216256",
"0.5814978",
"0.57924205",
"0.57403475",
"0.57272077",
"0.5724097",
"0.5718282",
"0.5710967",
"0.5701975",
"0.56952006",
"0.5678635",
"0.56690276",
"0.56659245",
"0.56591415",
"0.56538993",
"0.5652055",
"0.56402117",
"0.562829",
"0.56133914",
"0.56107783",
"0.5605058",
"0.5605058",
"0.5604802",
"0.5566028",
"0.5542193",
"0.5537003",
"0.5517189",
"0.5509875",
"0.549543",
"0.5469629",
"0.5466274",
"0.5452621",
"0.5446347",
"0.5446112",
"0.5445458",
"0.5438214",
"0.54280174",
"0.54186577",
"0.5412952",
"0.54004073",
"0.53958195",
"0.5386836",
"0.53614557",
"0.5358965",
"0.5315605",
"0.53144264",
"0.530729",
"0.5300352",
"0.52985007",
"0.52952135",
"0.52920145",
"0.52844346",
"0.52560484",
"0.52554303",
"0.52232194",
"0.5214026",
"0.5206029",
"0.52024466",
"0.51908183",
"0.5188374",
"0.518525",
"0.5185014",
"0.51835823",
"0.51771647",
"0.516504",
"0.516504",
"0.5158185",
"0.5157466"
] | 0.7500396 | 3 |
specify the equation used for both the RGB blend equation and the Alpha blend equation | func BlendEquation(mode uint32) {
C.glowBlendEquation(gpBlendEquation, (C.GLenum)(mode))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func BlendEquation(mode uint32) {\n C.glowBlendEquation(gpBlendEquation, (C.GLenum)(mode))\n}",
"func PBLENDVB(x, mx, x1 operand.Op) { ctx.PBLENDVB(x, mx, x1) }",
"func VPBLENDMB(ops ...operand.Op) { ctx.VPBLENDMB(ops...) }",
"func BlendEquation(mode uint32) {\n\tsyscall.Syscall(gpBlendEquation, 1, uintptr(mode), 0, 0)\n}",
"func VPBLENDVB(xy, mxy, xy1, xy2 operand.Op) { ctx.VPBLENDVB(xy, mxy, xy1, xy2) }",
"func blendPixelOverPixel(ic_old,ic_new uint8, al_new float64)(c_res uint8) {\n\n\tal_old := float64(1); _=al_old\n\tc_old := float64(ic_old)\n\tc_new := float64(ic_new)\n\n\talgo1 := c_old*(1-al_new) + c_new*al_new\n\tc_res = uint8( util.Min( util.Round(algo1),255) )\n\t//log.Printf(\"\\t\\t %3.1f + %3.1f = %3.1f\", c_old*(1-al_new),c_new*al_new, algo1)\n\n\treturn \n}",
"func blend(a, b color.Color, blend float64) color.RGBA {\n\tinv := float64(0) // float64(1)-blend\n\tar, ag, ab, aa := a.RGBA()\n\tbr, bg, bb, ba := b.RGBA()\n\tor, og, ob, _ := uint32(float64(ar)*blend+float64(br)*inv),\n\t\tuint32(float64(ag)*blend+float64(bg)*inv),\n\t\tuint32(float64(ab)*blend+float64(bb)*inv),\n\t\tuint32(float64(aa)*blend+float64(ba)*inv)\n\treturn color.RGBA{uint8(or >> 8), uint8(og >> 8), uint8(ob >> 8), 255} //uint8(oa>>8)}\n}",
"func VPBLENDMQ(ops ...operand.Op) { ctx.VPBLENDMQ(ops...) }",
"func BlendColor(red float32, green float32, blue float32, alpha float32) {\n C.glowBlendColor(gpBlendColor, (C.GLfloat)(red), (C.GLfloat)(green), (C.GLfloat)(blue), (C.GLfloat)(alpha))\n}",
"func VPBLENDMW(ops ...operand.Op) { ctx.VPBLENDMW(ops...) }",
"func BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n C.glowBlendEquationSeparate(gpBlendEquationSeparate, (C.GLenum)(modeRGB), (C.GLenum)(modeAlpha))\n}",
"func VPBLENDW(i, mxy, xy, xy1 operand.Op) { ctx.VPBLENDW(i, mxy, xy, xy1) }",
"func VPBLENDMD(ops ...operand.Op) { ctx.VPBLENDMD(ops...) }",
"func BlendEquation(mode Enum) {\n\tgl.BlendEquation(uint32(mode))\n}",
"func VPBLENDD(i, mxy, xy, xy1 operand.Op) { ctx.VPBLENDD(i, mxy, xy, xy1) }",
"func VBLENDMPD(ops ...operand.Op) { ctx.VBLENDMPD(ops...) }",
"func setBlendFunc(cmp pixel.ComposeMethod) {\n\tswitch cmp {\n\tcase pixel.ComposeOver:\n\t\tglhf.BlendFunc(glhf.One, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeIn:\n\t\tglhf.BlendFunc(glhf.DstAlpha, glhf.Zero)\n\tcase pixel.ComposeOut:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.Zero)\n\tcase pixel.ComposeAtop:\n\t\tglhf.BlendFunc(glhf.DstAlpha, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeRover:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.One)\n\tcase pixel.ComposeRin:\n\t\tglhf.BlendFunc(glhf.Zero, glhf.SrcAlpha)\n\tcase pixel.ComposeRout:\n\t\tglhf.BlendFunc(glhf.Zero, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeRatop:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.SrcAlpha)\n\tcase pixel.ComposeXor:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposePlus:\n\t\tglhf.BlendFunc(glhf.One, glhf.One)\n\tcase pixel.ComposeCopy:\n\t\tglhf.BlendFunc(glhf.One, glhf.Zero)\n\tdefault:\n\t\tpanic(errors.New(\"Canvas: invalid compose method\"))\n\t}\n}",
"func BLENDPS(i, mx, x operand.Op) { ctx.BLENDPS(i, mx, x) }",
"func VPBLENDMQ_BCST(ops ...operand.Op) { ctx.VPBLENDMQ_BCST(ops...) }",
"func blend(c1, c2 uint8, ratio float64) uint8 {\n\treturn uint8(math.Floor((float64(c1)*(1.0-ratio) + float64(c2)*ratio) + 0.5))\n}",
"func BLENDVPS(x, mx, x1 operand.Op) { ctx.BLENDVPS(x, mx, x1) }",
"func VBLENDPS(i, mxy, xy, xy1 operand.Op) { ctx.VBLENDPS(i, mxy, xy, xy1) }",
"func BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n\tsyscall.Syscall(gpBlendEquationSeparate, 2, uintptr(modeRGB), uintptr(modeAlpha), 0)\n}",
"func VBLENDPD(i, mxy, xy, xy1 operand.Op) { ctx.VBLENDPD(i, mxy, xy, xy1) }",
"func VBLENDMPD_BCST(ops ...operand.Op) { ctx.VBLENDMPD_BCST(ops...) }",
"func VBLENDMPS(ops ...operand.Op) { ctx.VBLENDMPS(ops...) }",
"func VPBLENDMD_BCST(ops ...operand.Op) { ctx.VPBLENDMD_BCST(ops...) }",
"func BlendColor(red float32, green float32, blue float32, alpha float32) {\n\tsyscall.Syscall6(gpBlendColor, 4, uintptr(math.Float32bits(red)), uintptr(math.Float32bits(green)), uintptr(math.Float32bits(blue)), uintptr(math.Float32bits(alpha)), 0, 0)\n}",
"func PBLENDW(i, mx, x operand.Op) { ctx.PBLENDW(i, mx, x) }",
"func (originalColour Colour) Blend(NewColour Colour, Ratio float32) (result Colour) {\n\tif Ratio <= 0 {\n\t\treturn originalColour\n\t}\n\tif Ratio >= 1 {\n\t\treturn NewColour\n\t}\n\n\tresult.B = uint8((float32(NewColour.B)-float32(originalColour.B))*Ratio + float32(originalColour.B))\n\tresult.G = uint8((float32(NewColour.G)-float32(originalColour.G))*Ratio + float32(originalColour.G))\n\tresult.R = uint8((float32(NewColour.R)-float32(originalColour.R))*Ratio + float32(originalColour.R))\n\tresult.L = uint8((float32(NewColour.L)-float32(originalColour.L))*Ratio + float32(originalColour.L))\n\treturn result\n}",
"func VPBLENDMQ_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMQ_Z(mxyz, xyz, k, xyz1) }",
"func (debugging *debuggingOpenGL) BlendEquation(mode uint32) {\n\tdebugging.recordEntry(\"BlendEquation\", mode)\n\tdebugging.gl.BlendEquation(mode)\n\tdebugging.recordExit(\"BlendEquation\")\n}",
"func VPBLENDMB_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMB_Z(mxyz, xyz, k, xyz1) }",
"func BLENDPD(i, mx, x operand.Op) { ctx.BLENDPD(i, mx, x) }",
"func VPBLENDMW_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMW_Z(mxyz, xyz, k, xyz1) }",
"func BlendColor(red GLfloat, green GLfloat, blue GLfloat, alpha GLfloat) {\n\tC.glBlendColor(C.GLfloat(red), C.GLfloat(green), C.GLfloat(blue), C.GLfloat(alpha))\n}",
"func VBLENDVPD(xy, mxy, xy1, xy2 operand.Op) { ctx.VBLENDVPD(xy, mxy, xy1, xy2) }",
"func BlendColor(red, green, blue, alpha float32) {\n\tgl.BlendColor(red, green, blue, alpha)\n}",
"func BlendFuncSeparate(sfactorRGB uint32, dfactorRGB uint32, sfactorAlpha uint32, dfactorAlpha uint32) {\n C.glowBlendFuncSeparate(gpBlendFuncSeparate, (C.GLenum)(sfactorRGB), (C.GLenum)(dfactorRGB), (C.GLenum)(sfactorAlpha), (C.GLenum)(dfactorAlpha))\n}",
"func VBLENDMPD_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VBLENDMPD_Z(mxyz, xyz, k, xyz1) }",
"func BLENDVPD(x, mx, x1 operand.Op) { ctx.BLENDVPD(x, mx, x1) }",
"func BlendColor(red float32, green float32, blue float32, alpha float32) {\n\tC.glowBlendColor(gpBlendColor, (C.GLfloat)(red), (C.GLfloat)(green), (C.GLfloat)(blue), (C.GLfloat)(alpha))\n}",
"func BlendColor(red float32, green float32, blue float32, alpha float32) {\n\tC.glowBlendColor(gpBlendColor, (C.GLfloat)(red), (C.GLfloat)(green), (C.GLfloat)(blue), (C.GLfloat)(alpha))\n}",
"func VBLENDMPS_BCST(ops ...operand.Op) { ctx.VBLENDMPS_BCST(ops...) }",
"func (native *OpenGL) BlendEquation(mode uint32) {\n\tgl.BlendEquation(mode)\n}",
"func VPBLENDMD_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMD_Z(mxyz, xyz, k, xyz1) }",
"func (a Energy) Blend(b Energy, n float64) Energy {\n\treturn Energy(geom.Vector3(a).Lerp(geom.Vector3(b), n))\n}",
"func (self *Graphics) BlendMode() int{\n return self.Object.Get(\"blendMode\").Int()\n}",
"func VBLENDVPS(xy, mxy, xy1, xy2 operand.Op) { ctx.VBLENDVPS(xy, mxy, xy1, xy2) }",
"func BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n\tC.glowBlendEquationSeparate(gpBlendEquationSeparate, (C.GLenum)(modeRGB), (C.GLenum)(modeAlpha))\n}",
"func BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n\tC.glowBlendEquationSeparate(gpBlendEquationSeparate, (C.GLenum)(modeRGB), (C.GLenum)(modeAlpha))\n}",
"func BlendEquationSeparate(modeRGB, modeAlpha Enum) {\n\tgl.BlendEquationSeparate(uint32(modeRGB), uint32(modeAlpha))\n}",
"func VBLENDMPS_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VBLENDMPS_Z(mxyz, xyz, k, xyz1) }",
"func (c Color) Blend(pct float32, clr color.Color) Color {\n\tf32 := NRGBAf32Model.Convert(c).(NRGBAf32)\n\tothc := NRGBAf32Model.Convert(clr).(NRGBAf32)\n\tpct = mat32.Clamp(pct, 0, 100.0)\n\toth := pct / 100.0\n\tme := 1.0 - pct/100.0\n\tf32.R = me*f32.R + oth*othc.R\n\tf32.G = me*f32.G + oth*othc.G\n\tf32.B = me*f32.B + oth*othc.B\n\tf32.A = me*f32.A + oth*othc.A\n\treturn ColorModel.Convert(f32).(Color)\n}",
"func (debugging *debuggingOpenGL) BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n\tdebugging.recordEntry(\"BlendEquationSeparate\", modeRGB, modeAlpha)\n\tdebugging.gl.BlendEquationSeparate(modeRGB, modeAlpha)\n\tdebugging.recordExit(\"BlendEquationSeparate\")\n}",
"func BlendEquationSeparate(modeRGB Enum, modeAlpha Enum) {\n\tcmodeRGB, _ := (C.GLenum)(modeRGB), cgoAllocsUnknown\n\tcmodeAlpha, _ := (C.GLenum)(modeAlpha), cgoAllocsUnknown\n\tC.glBlendEquationSeparate(cmodeRGB, cmodeAlpha)\n}",
"func (c *Context) BlendEquation(eq gfx.BlendEquation) gfx.ContextStateValue {\n\treturn s.CSV{\n\t\tValue: c.Enums[int(eq)],\n\t\tDefaultValue: c.O.Get(\"FUNC_ADD\").Int(), // TODO(slimsag): verify\n\t\tKey: csBlendEquation,\n\t\tGLCall: c.glBlendEquation,\n\t}\n}",
"func (foreground RGBA) BlendWith(background RGBA) (color RGBA) {\n\n\tfr, fg, fb, fa := uint(foreground.R), uint(foreground.G), uint(foreground.B), uint(foreground.A)\n\tbr, bg, bb, ba := uint(background.R), uint(background.G), uint(background.B), uint(255) // want background to be fully colored\n\n\topacity := fa\n\tbackgroundOpacity := (ba * (255 - fa)) >> 8\n\n\tnewColor := RGBA{\n\t\tuint8((fr*opacity)>>8 + (br*backgroundOpacity)>>8),\n\t\tuint8((fg*opacity)>>8 + (bg*backgroundOpacity)>>8),\n\t\tuint8((fb*opacity)>>8 + (bb*backgroundOpacity)>>8),\n\t\tuint8(opacity),\n\t}\n\n\treturn newColor\n}",
"func (native *OpenGL) BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n\tgl.BlendEquationSeparate(modeRGB, modeAlpha)\n}",
"func BlendFuncSeparate(sfactorRGB uint32, dfactorRGB uint32, sfactorAlpha uint32, dfactorAlpha uint32) {\n\tsyscall.Syscall6(gpBlendFuncSeparate, 4, uintptr(sfactorRGB), uintptr(dfactorRGB), uintptr(sfactorAlpha), uintptr(dfactorAlpha), 0, 0)\n}",
"func (w *Width) Blend(delta float64) {\n\tif !w.ended {\n\t\tw.current += delta\n\t}\n}",
"func (self *Graphics) SetBlendModeA(member int) {\n self.Object.Set(\"blendMode\", member)\n}",
"func (c *Context) VPBLENDVB(xy, mxy, xy1, xy2 operand.Op) {\n\tc.addinstruction(x86.VPBLENDVB(xy, mxy, xy1, xy2))\n}",
"func (c *Context) VPBLENDW(i, mxy, xy, xy1 operand.Op) {\n\tc.addinstruction(x86.VPBLENDW(i, mxy, xy, xy1))\n}",
"func (c *canvas) Blend(i image.Image, pos BlendPosition, fn BlendFunc) error {\n\treturn c.mutate(func() (*pxl, error) {\n\t\treturn blend(c.pxl, i, pos, fn)\n\t})\n}",
"func blendAdd(S, D uint32) uint32 {\n\tadd := S + D\n\tif add < 65535 {\n\t\treturn add\n\t} else {\n\t\treturn 65535\n\t}\n}",
"func (h *Height) Blend(delta float64) {\n\tif !h.ended {\n\t\th.current += delta\n\t}\n}",
"func VPBLENDMQ_BCST_Z(m, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMQ_BCST_Z(m, xyz, k, xyz1) }",
"func AlphaFunc(xfunc uint32, ref float32) {\n C.glowAlphaFunc(gpAlphaFunc, (C.GLenum)(xfunc), (C.GLfloat)(ref))\n}",
"func SetBlendEquation(mode Enum) {\n\tcmode, _ := (C.GLenum)(mode), cgoAllocsUnknown\n\tC.glBlendEquation(cmode)\n}",
"func (c *Context) VPBLENDMQ(ops ...operand.Op) {\n\tc.addinstruction(x86.VPBLENDMQ(ops...))\n}",
"func (c *Context) VPBLENDMB(ops ...operand.Op) {\n\tc.addinstruction(x86.VPBLENDMB(ops...))\n}",
"func (self *TileSprite) SetBlendModeA(member int) {\n self.Object.Set(\"blendMode\", member)\n}",
"func VPBLENDMD_BCST_Z(m, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMD_BCST_Z(m, xyz, k, xyz1) }",
"func BlendFunc(sfactor, dfactor Enum) {\n\tgl.BlendFunc(uint32(sfactor), uint32(dfactor))\n}",
"func ColourBlendEvaluator(i *Interpreter, b *Block) Value {\n\tcolour1 := i.Evaluate(b.SingleBlockValueWithName(i, \"COLOUR1\")).AsColour(i)\n\tcolour2 := i.Evaluate(b.SingleBlockValueWithName(i, \"COLOUR2\")).AsColour(i)\n\tratio := i.Evaluate(b.SingleBlockValueWithName(i, \"RATIO\")).AsNumber(i)\n\n\tratio = math.Min(1.0, math.Max(0.0, ratio))\n\treturn Colour{\n\t\tblend(colour1.Red, colour2.Red, ratio),\n\t\tblend(colour1.Green, colour2.Green, ratio),\n\t\tblend(colour1.Blue, colour2.Blue, ratio),\n\t}\n}",
"func (c *Context) VBLENDPS(i, mxy, xy, xy1 operand.Op) {\n\tc.addinstruction(x86.VBLENDPS(i, mxy, xy, xy1))\n}",
"func VBLENDMPD_BCST_Z(m, xyz, k, xyz1 operand.Op) { ctx.VBLENDMPD_BCST_Z(m, xyz, k, xyz1) }",
"func BlendFunc(sfactor GLenum, dfactor GLenum) {\n\tC.glBlendFunc(C.GLenum(sfactor), C.GLenum(dfactor))\n}",
"func BlendFunc(sfactor Enum, dfactor Enum) {\n\tcsfactor, _ := (C.GLenum)(sfactor), cgoAllocsUnknown\n\tcdfactor, _ := (C.GLenum)(dfactor), cgoAllocsUnknown\n\tC.glBlendFunc(csfactor, cdfactor)\n}",
"func (c *Context) PBLENDVB(x, mx, x1 operand.Op) {\n\tc.addinstruction(x86.PBLENDVB(x, mx, x1))\n}",
"func (self *TileSprite) BlendMode() int{\n return self.Object.Get(\"blendMode\").Int()\n}",
"func DestinationAtopBlend(source color.RGBA, target color.RGBA, factor float64, keepAlpha bool) color.RGBA {\n\tSa := source.A\n\tDa := target.A\n\tif !keepAlpha {\n\t\tsource.A = DestinationAtopUnit(source.A, target.A, Sa, Da, factor)\n\t}\n\tsource.R = DestinationAtopUnit(source.R, target.R, Sa, Da, factor)\n\tsource.G = DestinationAtopUnit(source.G, target.G, Sa, Da, factor)\n\tsource.B = DestinationAtopUnit(source.B, target.B, Sa, Da, factor)\n\treturn source\n}",
"func (c *Context) VPBLENDMW(ops ...operand.Op) {\n\tc.addinstruction(x86.VPBLENDMW(ops...))\n}",
"func (c *Context) VPBLENDD(i, mxy, xy, xy1 operand.Op) {\n\tc.addinstruction(x86.VPBLENDD(i, mxy, xy, xy1))\n}",
"func BlendFuncSeparate(sfactorRGB Enum, dfactorRGB Enum, sfactorAlpha Enum, dfactorAlpha Enum) {\n\tcsfactorRGB, _ := (C.GLenum)(sfactorRGB), cgoAllocsUnknown\n\tcdfactorRGB, _ := (C.GLenum)(dfactorRGB), cgoAllocsUnknown\n\tcsfactorAlpha, _ := (C.GLenum)(sfactorAlpha), cgoAllocsUnknown\n\tcdfactorAlpha, _ := (C.GLenum)(dfactorAlpha), cgoAllocsUnknown\n\tC.glBlendFuncSeparate(csfactorRGB, cdfactorRGB, csfactorAlpha, cdfactorAlpha)\n}",
"func (bl *Blend) clip(rgb Color) Color {\r\n\tr, g, b := rgb.R, rgb.G, rgb.B\r\n\r\n\tl := bl.Lum(rgb)\r\n\tmin := utils.Min(r, g, b)\r\n\tmax := utils.Max(r, g, b)\r\n\r\n\tif min < 0 {\r\n\t\tr = l + (((r - l) * l) / (l - min))\r\n\t\tg = l + (((g - l) * l) / (l - min))\r\n\t\tb = l + (((b - l) * l) / (l - min))\r\n\t}\r\n\tif max > 1 {\r\n\t\tr = l + (((r - l) * (1 - l)) / (max - l))\r\n\t\tg = l + (((g - l) * (1 - l)) / (max - l))\r\n\t\tb = l + (((b - l) * (1 - l)) / (max - l))\r\n\t}\r\n\r\n\treturn Color{R: r, G: g, B: b}\r\n}",
"func VBLENDMPS_BCST_Z(m, xyz, k, xyz1 operand.Op) { ctx.VBLENDMPS_BCST_Z(m, xyz, k, xyz1) }",
"func SetBlendColor(red Float, green Float, blue Float, alpha Float) {\n\tcred, _ := (C.GLfloat)(red), cgoAllocsUnknown\n\tcgreen, _ := (C.GLfloat)(green), cgoAllocsUnknown\n\tcblue, _ := (C.GLfloat)(blue), cgoAllocsUnknown\n\tcalpha, _ := (C.GLfloat)(alpha), cgoAllocsUnknown\n\tC.glBlendColor(cred, cgreen, cblue, calpha)\n}",
"func Hsl(h, s, l float64) Color {\r\n if s == 0 {\r\n return Color{l, l, l}\r\n }\r\n\r\n var r, g, b float64\r\n var t1 float64\r\n var t2 float64\r\n var tr float64\r\n var tg float64\r\n var tb float64\r\n\r\n if l < 0.5 {\r\n t1 = l * (1.0 + s)\r\n } else {\r\n t1 = l + s - l*s\r\n }\r\n\r\n t2 = 2*l - t1\r\n h = h / 360\r\n tr = h + 1.0/3.0\r\n tg = h\r\n tb = h - 1.0/3.0\r\n\r\n if tr < 0 {\r\n tr += 1\r\n }\r\n if tr > 1 {\r\n tr -= 1\r\n }\r\n if tg < 0 {\r\n tg += 1\r\n }\r\n if tg > 1 {\r\n tg -= 1\r\n }\r\n if tb < 0 {\r\n tb += 1\r\n }\r\n if tb > 1 {\r\n tb -= 1\r\n }\r\n\r\n // Red\r\n if 6*tr < 1 {\r\n r = t2 + (t1-t2)*6*tr\r\n } else if 2*tr < 1 {\r\n r = t1\r\n } else if 3*tr < 2 {\r\n r = t2 + (t1-t2)*(2.0/3.0-tr)*6\r\n } else {\r\n r = t2\r\n }\r\n\r\n // Green\r\n if 6*tg < 1 {\r\n g = t2 + (t1-t2)*6*tg\r\n } else if 2*tg < 1 {\r\n g = t1\r\n } else if 3*tg < 2 {\r\n g = t2 + (t1-t2)*(2.0/3.0-tg)*6\r\n } else {\r\n g = t2\r\n }\r\n\r\n // Blue\r\n if 6*tb < 1 {\r\n b = t2 + (t1-t2)*6*tb\r\n } else if 2*tb < 1 {\r\n b = t1\r\n } else if 3*tb < 2 {\r\n b = t2 + (t1-t2)*(2.0/3.0-tb)*6\r\n } else {\r\n b = t2\r\n }\r\n\r\n return Color{r, g, b}\r\n}",
"func (bl *Blend) AlphaCompose(\r\n\tbackdropAlpha,\r\n\tsourceAlpha,\r\n\tcompositeAlpha,\r\n\tbackdropColor,\r\n\tsourceColor,\r\n\tcompositeColor float64,\r\n) float64 {\r\n\treturn ((1 - sourceAlpha/compositeAlpha) * backdropColor) +\r\n\t\t(sourceAlpha / compositeAlpha *\r\n\t\t\tmath.Round((1-backdropAlpha)*sourceColor+backdropAlpha*compositeColor))\r\n}",
"func BlendFuncSeparate(sfactorRGB, dfactorRGB, sfactorAlpha, dfactorAlpha Enum) {\n\tgl.BlendFuncSeparate(uint32(sfactorRGB), uint32(dfactorRGB), uint32(sfactorAlpha), uint32(dfactorAlpha))\n}",
"func BlendFuncSeparate(sfactorRGB uint32, dfactorRGB uint32, sfactorAlpha uint32, dfactorAlpha uint32) {\n\tC.glowBlendFuncSeparate(gpBlendFuncSeparate, (C.GLenum)(sfactorRGB), (C.GLenum)(dfactorRGB), (C.GLenum)(sfactorAlpha), (C.GLenum)(dfactorAlpha))\n}",
"func BlendFuncSeparate(sfactorRGB uint32, dfactorRGB uint32, sfactorAlpha uint32, dfactorAlpha uint32) {\n\tC.glowBlendFuncSeparate(gpBlendFuncSeparate, (C.GLenum)(sfactorRGB), (C.GLenum)(dfactorRGB), (C.GLenum)(sfactorAlpha), (C.GLenum)(dfactorAlpha))\n}",
"func (c *Context) VPBLENDMQ_Z(mxyz, xyz, k, xyz1 operand.Op) {\n\tc.addinstruction(x86.VPBLENDMQ_Z(mxyz, xyz, k, xyz1))\n}",
"func (c *Context) VPBLENDMQ_BCST(ops ...operand.Op) {\n\tc.addinstruction(x86.VPBLENDMQ_BCST(ops...))\n}",
"func (c *Context) VPBLENDMW_Z(mxyz, xyz, k, xyz1 operand.Op) {\n\tc.addinstruction(x86.VPBLENDMW_Z(mxyz, xyz, k, xyz1))\n}",
"func rgbasum(colorDay, colorNight color.Color, quantifier float64) color.RGBA {\n\tcDr, cDg, cDb, cDa := colorDay.RGBA()\n\tcNr, cNg, cNb, cNa := colorNight.RGBA()\n\n\t//https://en.wikipedia.org/wiki/Alpha_compositing#Alpha_blending\n\ta := cDa + cNa*(1-cDa)\n\tr := (float64(cDr)*quantifier + (float64(cNr) * (1 - quantifier)))\n\tg := (float64(cDg)*quantifier + (float64(cNg) * (1 - quantifier)))\n\tb := (float64(cDb)*quantifier + (float64(cNb) * (1 - quantifier)))\n\n\t//todo: alpha blending\n\t//https://jimdoescode.github.io/2015/05/22/manipulating-colors-in-go.html\n\treturn color.RGBA{uint8(r / 0x101), uint8(g / 0x101), uint8(b / 0x101), uint8(a / 0x101)}\n}",
"func BlendFunc(src, dst BlendFactor) {\n\tgl.BlendFunc(uint32(src), uint32(dst))\n}"
] | [
"0.7000268",
"0.6903696",
"0.6746614",
"0.67423177",
"0.6713342",
"0.66675776",
"0.6641725",
"0.66270536",
"0.6616733",
"0.657142",
"0.6564094",
"0.64331836",
"0.641082",
"0.63951325",
"0.638613",
"0.6352551",
"0.6342246",
"0.63367605",
"0.6317944",
"0.631385",
"0.6294172",
"0.6285769",
"0.6265544",
"0.62426317",
"0.62023216",
"0.62003165",
"0.61913705",
"0.6149126",
"0.6136863",
"0.6102803",
"0.60599566",
"0.60587764",
"0.60313666",
"0.6015536",
"0.6005629",
"0.59779286",
"0.59624296",
"0.59490985",
"0.59474856",
"0.59400636",
"0.59399986",
"0.59231335",
"0.59231335",
"0.5922179",
"0.5906083",
"0.5901434",
"0.5854817",
"0.58398414",
"0.58394045",
"0.58222836",
"0.58222836",
"0.5814161",
"0.5794939",
"0.5714206",
"0.5654328",
"0.56435686",
"0.56403375",
"0.5619029",
"0.56005657",
"0.5555868",
"0.5553243",
"0.5513539",
"0.547635",
"0.5463046",
"0.54471767",
"0.5425358",
"0.5424328",
"0.5414295",
"0.5413938",
"0.54090536",
"0.5396376",
"0.53794473",
"0.5379391",
"0.5339308",
"0.5333954",
"0.5320239",
"0.53130484",
"0.5301047",
"0.526368",
"0.5251279",
"0.5245702",
"0.5221212",
"0.52168477",
"0.5213424",
"0.5170895",
"0.5139719",
"0.51346725",
"0.512679",
"0.51168454",
"0.5116556",
"0.51128334",
"0.5105782",
"0.5088982",
"0.5088982",
"0.5078229",
"0.50782084",
"0.50685304",
"0.5044322",
"0.5043098"
] | 0.63829076 | 16 |
set the RGB blend equation and the alpha blend equation separately | func BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {
C.glowBlendEquationSeparate(gpBlendEquationSeparate, (C.GLenum)(modeRGB), (C.GLenum)(modeAlpha))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func PBLENDVB(x, mx, x1 operand.Op) { ctx.PBLENDVB(x, mx, x1) }",
"func BlendColor(red float32, green float32, blue float32, alpha float32) {\n C.glowBlendColor(gpBlendColor, (C.GLfloat)(red), (C.GLfloat)(green), (C.GLfloat)(blue), (C.GLfloat)(alpha))\n}",
"func VPBLENDVB(xy, mxy, xy1, xy2 operand.Op) { ctx.VPBLENDVB(xy, mxy, xy1, xy2) }",
"func VPBLENDMB(ops ...operand.Op) { ctx.VPBLENDMB(ops...) }",
"func blend(a, b color.Color, blend float64) color.RGBA {\n\tinv := float64(0) // float64(1)-blend\n\tar, ag, ab, aa := a.RGBA()\n\tbr, bg, bb, ba := b.RGBA()\n\tor, og, ob, _ := uint32(float64(ar)*blend+float64(br)*inv),\n\t\tuint32(float64(ag)*blend+float64(bg)*inv),\n\t\tuint32(float64(ab)*blend+float64(bb)*inv),\n\t\tuint32(float64(aa)*blend+float64(ba)*inv)\n\treturn color.RGBA{uint8(or >> 8), uint8(og >> 8), uint8(ob >> 8), 255} //uint8(oa>>8)}\n}",
"func setBlendFunc(cmp pixel.ComposeMethod) {\n\tswitch cmp {\n\tcase pixel.ComposeOver:\n\t\tglhf.BlendFunc(glhf.One, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeIn:\n\t\tglhf.BlendFunc(glhf.DstAlpha, glhf.Zero)\n\tcase pixel.ComposeOut:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.Zero)\n\tcase pixel.ComposeAtop:\n\t\tglhf.BlendFunc(glhf.DstAlpha, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeRover:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.One)\n\tcase pixel.ComposeRin:\n\t\tglhf.BlendFunc(glhf.Zero, glhf.SrcAlpha)\n\tcase pixel.ComposeRout:\n\t\tglhf.BlendFunc(glhf.Zero, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeRatop:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.SrcAlpha)\n\tcase pixel.ComposeXor:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposePlus:\n\t\tglhf.BlendFunc(glhf.One, glhf.One)\n\tcase pixel.ComposeCopy:\n\t\tglhf.BlendFunc(glhf.One, glhf.Zero)\n\tdefault:\n\t\tpanic(errors.New(\"Canvas: invalid compose method\"))\n\t}\n}",
"func BlendEquation(mode uint32) {\n C.glowBlendEquation(gpBlendEquation, (C.GLenum)(mode))\n}",
"func BlendColor(red float32, green float32, blue float32, alpha float32) {\n\tsyscall.Syscall6(gpBlendColor, 4, uintptr(math.Float32bits(red)), uintptr(math.Float32bits(green)), uintptr(math.Float32bits(blue)), uintptr(math.Float32bits(alpha)), 0, 0)\n}",
"func VPBLENDMW(ops ...operand.Op) { ctx.VPBLENDMW(ops...) }",
"func BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n C.glowBlendEquationSeparate(gpBlendEquationSeparate, (C.GLenum)(modeRGB), (C.GLenum)(modeAlpha))\n}",
"func BlendEquation(mode uint32) {\n\tsyscall.Syscall(gpBlendEquation, 1, uintptr(mode), 0, 0)\n}",
"func VPBLENDMQ(ops ...operand.Op) { ctx.VPBLENDMQ(ops...) }",
"func BlendColor(red GLfloat, green GLfloat, blue GLfloat, alpha GLfloat) {\n\tC.glBlendColor(C.GLfloat(red), C.GLfloat(green), C.GLfloat(blue), C.GLfloat(alpha))\n}",
"func BlendColor(red, green, blue, alpha float32) {\n\tgl.BlendColor(red, green, blue, alpha)\n}",
"func VPBLENDMD(ops ...operand.Op) { ctx.VPBLENDMD(ops...) }",
"func VPBLENDW(i, mxy, xy, xy1 operand.Op) { ctx.VPBLENDW(i, mxy, xy, xy1) }",
"func BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n\tsyscall.Syscall(gpBlendEquationSeparate, 2, uintptr(modeRGB), uintptr(modeAlpha), 0)\n}",
"func BlendColor(red float32, green float32, blue float32, alpha float32) {\n\tC.glowBlendColor(gpBlendColor, (C.GLfloat)(red), (C.GLfloat)(green), (C.GLfloat)(blue), (C.GLfloat)(alpha))\n}",
"func BlendColor(red float32, green float32, blue float32, alpha float32) {\n\tC.glowBlendColor(gpBlendColor, (C.GLfloat)(red), (C.GLfloat)(green), (C.GLfloat)(blue), (C.GLfloat)(alpha))\n}",
"func blendPixelOverPixel(ic_old,ic_new uint8, al_new float64)(c_res uint8) {\n\n\tal_old := float64(1); _=al_old\n\tc_old := float64(ic_old)\n\tc_new := float64(ic_new)\n\n\talgo1 := c_old*(1-al_new) + c_new*al_new\n\tc_res = uint8( util.Min( util.Round(algo1),255) )\n\t//log.Printf(\"\\t\\t %3.1f + %3.1f = %3.1f\", c_old*(1-al_new),c_new*al_new, algo1)\n\n\treturn \n}",
"func VPBLENDD(i, mxy, xy, xy1 operand.Op) { ctx.VPBLENDD(i, mxy, xy, xy1) }",
"func (self *Graphics) SetBlendModeA(member int) {\n self.Object.Set(\"blendMode\", member)\n}",
"func VBLENDMPD(ops ...operand.Op) { ctx.VBLENDMPD(ops...) }",
"func VPBLENDMQ_BCST(ops ...operand.Op) { ctx.VPBLENDMQ_BCST(ops...) }",
"func BLENDPS(i, mx, x operand.Op) { ctx.BLENDPS(i, mx, x) }",
"func VPBLENDMD_BCST(ops ...operand.Op) { ctx.VPBLENDMD_BCST(ops...) }",
"func SetBlendColor(red Float, green Float, blue Float, alpha Float) {\n\tcred, _ := (C.GLfloat)(red), cgoAllocsUnknown\n\tcgreen, _ := (C.GLfloat)(green), cgoAllocsUnknown\n\tcblue, _ := (C.GLfloat)(blue), cgoAllocsUnknown\n\tcalpha, _ := (C.GLfloat)(alpha), cgoAllocsUnknown\n\tC.glBlendColor(cred, cgreen, cblue, calpha)\n}",
"func BLENDVPS(x, mx, x1 operand.Op) { ctx.BLENDVPS(x, mx, x1) }",
"func VBLENDPD(i, mxy, xy, xy1 operand.Op) { ctx.VBLENDPD(i, mxy, xy, xy1) }",
"func VBLENDMPD_BCST(ops ...operand.Op) { ctx.VBLENDMPD_BCST(ops...) }",
"func (self *TileSprite) SetBlendModeA(member int) {\n self.Object.Set(\"blendMode\", member)\n}",
"func VBLENDMPS(ops ...operand.Op) { ctx.VBLENDMPS(ops...) }",
"func BlendEquation(mode Enum) {\n\tgl.BlendEquation(uint32(mode))\n}",
"func (originalColour Colour) Blend(NewColour Colour, Ratio float32) (result Colour) {\n\tif Ratio <= 0 {\n\t\treturn originalColour\n\t}\n\tif Ratio >= 1 {\n\t\treturn NewColour\n\t}\n\n\tresult.B = uint8((float32(NewColour.B)-float32(originalColour.B))*Ratio + float32(originalColour.B))\n\tresult.G = uint8((float32(NewColour.G)-float32(originalColour.G))*Ratio + float32(originalColour.G))\n\tresult.R = uint8((float32(NewColour.R)-float32(originalColour.R))*Ratio + float32(originalColour.R))\n\tresult.L = uint8((float32(NewColour.L)-float32(originalColour.L))*Ratio + float32(originalColour.L))\n\treturn result\n}",
"func PBLENDW(i, mx, x operand.Op) { ctx.PBLENDW(i, mx, x) }",
"func BlendEquation(mode uint32) {\n\tC.glowBlendEquation(gpBlendEquation, (C.GLenum)(mode))\n}",
"func BlendEquation(mode uint32) {\n\tC.glowBlendEquation(gpBlendEquation, (C.GLenum)(mode))\n}",
"func VBLENDPS(i, mxy, xy, xy1 operand.Op) { ctx.VBLENDPS(i, mxy, xy, xy1) }",
"func blend(c1, c2 uint8, ratio float64) uint8 {\n\treturn uint8(math.Floor((float64(c1)*(1.0-ratio) + float64(c2)*ratio) + 0.5))\n}",
"func VPBLENDMB_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMB_Z(mxyz, xyz, k, xyz1) }",
"func BLENDPD(i, mx, x operand.Op) { ctx.BLENDPD(i, mx, x) }",
"func BLENDVPD(x, mx, x1 operand.Op) { ctx.BLENDVPD(x, mx, x1) }",
"func VBLENDVPD(xy, mxy, xy1, xy2 operand.Op) { ctx.VBLENDVPD(xy, mxy, xy1, xy2) }",
"func VPBLENDMQ_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMQ_Z(mxyz, xyz, k, xyz1) }",
"func (self *Graphics) BlendMode() int{\n return self.Object.Get(\"blendMode\").Int()\n}",
"func SetBlendEquation(mode Enum) {\n\tcmode, _ := (C.GLenum)(mode), cgoAllocsUnknown\n\tC.glBlendEquation(cmode)\n}",
"func VPBLENDMW_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMW_Z(mxyz, xyz, k, xyz1) }",
"func VBLENDMPS_BCST(ops ...operand.Op) { ctx.VBLENDMPS_BCST(ops...) }",
"func (foreground RGBA) BlendWith(background RGBA) (color RGBA) {\n\n\tfr, fg, fb, fa := uint(foreground.R), uint(foreground.G), uint(foreground.B), uint(foreground.A)\n\tbr, bg, bb, ba := uint(background.R), uint(background.G), uint(background.B), uint(255) // want background to be fully colored\n\n\topacity := fa\n\tbackgroundOpacity := (ba * (255 - fa)) >> 8\n\n\tnewColor := RGBA{\n\t\tuint8((fr*opacity)>>8 + (br*backgroundOpacity)>>8),\n\t\tuint8((fg*opacity)>>8 + (bg*backgroundOpacity)>>8),\n\t\tuint8((fb*opacity)>>8 + (bb*backgroundOpacity)>>8),\n\t\tuint8(opacity),\n\t}\n\n\treturn newColor\n}",
"func BlendEquationSeparate(modeRGB, modeAlpha Enum) {\n\tgl.BlendEquationSeparate(uint32(modeRGB), uint32(modeAlpha))\n}",
"func VPBLENDMD_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMD_Z(mxyz, xyz, k, xyz1) }",
"func BlendFuncSeparate(sfactorRGB uint32, dfactorRGB uint32, sfactorAlpha uint32, dfactorAlpha uint32) {\n C.glowBlendFuncSeparate(gpBlendFuncSeparate, (C.GLenum)(sfactorRGB), (C.GLenum)(dfactorRGB), (C.GLenum)(sfactorAlpha), (C.GLenum)(dfactorAlpha))\n}",
"func (a Energy) Blend(b Energy, n float64) Energy {\n\treturn Energy(geom.Vector3(a).Lerp(geom.Vector3(b), n))\n}",
"func (debugging *debuggingOpenGL) BlendEquation(mode uint32) {\n\tdebugging.recordEntry(\"BlendEquation\", mode)\n\tdebugging.gl.BlendEquation(mode)\n\tdebugging.recordExit(\"BlendEquation\")\n}",
"func VBLENDMPD_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VBLENDMPD_Z(mxyz, xyz, k, xyz1) }",
"func (c *Context) VPBLENDVB(xy, mxy, xy1, xy2 operand.Op) {\n\tc.addinstruction(x86.VPBLENDVB(xy, mxy, xy1, xy2))\n}",
"func (debugging *debuggingOpenGL) BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n\tdebugging.recordEntry(\"BlendEquationSeparate\", modeRGB, modeAlpha)\n\tdebugging.gl.BlendEquationSeparate(modeRGB, modeAlpha)\n\tdebugging.recordExit(\"BlendEquationSeparate\")\n}",
"func VBLENDVPS(xy, mxy, xy1, xy2 operand.Op) { ctx.VBLENDVPS(xy, mxy, xy1, xy2) }",
"func BlendEquationSeparate(modeRGB Enum, modeAlpha Enum) {\n\tcmodeRGB, _ := (C.GLenum)(modeRGB), cgoAllocsUnknown\n\tcmodeAlpha, _ := (C.GLenum)(modeAlpha), cgoAllocsUnknown\n\tC.glBlendEquationSeparate(cmodeRGB, cmodeAlpha)\n}",
"func (c Color) Blend(pct float32, clr color.Color) Color {\n\tf32 := NRGBAf32Model.Convert(c).(NRGBAf32)\n\tothc := NRGBAf32Model.Convert(clr).(NRGBAf32)\n\tpct = mat32.Clamp(pct, 0, 100.0)\n\toth := pct / 100.0\n\tme := 1.0 - pct/100.0\n\tf32.R = me*f32.R + oth*othc.R\n\tf32.G = me*f32.G + oth*othc.G\n\tf32.B = me*f32.B + oth*othc.B\n\tf32.A = me*f32.A + oth*othc.A\n\treturn ColorModel.Convert(f32).(Color)\n}",
"func (c *canvas) Blend(i image.Image, pos BlendPosition, fn BlendFunc) error {\n\treturn c.mutate(func() (*pxl, error) {\n\t\treturn blend(c.pxl, i, pos, fn)\n\t})\n}",
"func (w *Width) Blend(delta float64) {\n\tif !w.ended {\n\t\tw.current += delta\n\t}\n}",
"func (native *OpenGL) BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n\tgl.BlendEquationSeparate(modeRGB, modeAlpha)\n}",
"func (native *OpenGL) BlendEquation(mode uint32) {\n\tgl.BlendEquation(mode)\n}",
"func VBLENDMPS_Z(mxyz, xyz, k, xyz1 operand.Op) { ctx.VBLENDMPS_Z(mxyz, xyz, k, xyz1) }",
"func (c *Context) PBLENDVB(x, mx, x1 operand.Op) {\n\tc.addinstruction(x86.PBLENDVB(x, mx, x1))\n}",
"func (h *Height) Blend(delta float64) {\n\tif !h.ended {\n\t\th.current += delta\n\t}\n}",
"func BlendFuncSeparate(sfactorRGB uint32, dfactorRGB uint32, sfactorAlpha uint32, dfactorAlpha uint32) {\n\tsyscall.Syscall6(gpBlendFuncSeparate, 4, uintptr(sfactorRGB), uintptr(dfactorRGB), uintptr(sfactorAlpha), uintptr(dfactorAlpha), 0, 0)\n}",
"func (c *Context) VPBLENDMB(ops ...operand.Op) {\n\tc.addinstruction(x86.VPBLENDMB(ops...))\n}",
"func (c *Context) VPBLENDW(i, mxy, xy, xy1 operand.Op) {\n\tc.addinstruction(x86.VPBLENDW(i, mxy, xy, xy1))\n}",
"func ColourBlendEvaluator(i *Interpreter, b *Block) Value {\n\tcolour1 := i.Evaluate(b.SingleBlockValueWithName(i, \"COLOUR1\")).AsColour(i)\n\tcolour2 := i.Evaluate(b.SingleBlockValueWithName(i, \"COLOUR2\")).AsColour(i)\n\tratio := i.Evaluate(b.SingleBlockValueWithName(i, \"RATIO\")).AsNumber(i)\n\n\tratio = math.Min(1.0, math.Max(0.0, ratio))\n\treturn Colour{\n\t\tblend(colour1.Red, colour2.Red, ratio),\n\t\tblend(colour1.Green, colour2.Green, ratio),\n\t\tblend(colour1.Blue, colour2.Blue, ratio),\n\t}\n}",
"func (y *Yeelight) SetRGB(value, effect, duration string) string {\n\tcmd := `{\"id\":3,\"method\":\"set_rgb\",\"params\":[` + value + `,\"` + effect + `\",` + duration + `]}`\n\treturn y.request(cmd)\n}",
"func (c Color) SetAlpha(a uint8) Color {\n\tr, g, b, oa := c>>24, (c>>16)&0xFF, (c>>8)&0xFF, c&0xFF;\n\tif oa == 0 {\n\t\treturn 0\n\t}\n\tr = r * Color(a) / oa;\n\tif r < 0 {\n\t\tr = 0\n\t}\n\tif r > 0xFF {\n\t\tr = 0xFF\n\t}\n\tg = g * Color(a) / oa;\n\tif g < 0 {\n\t\tg = 0\n\t}\n\tif g > 0xFF {\n\t\tg = 0xFF\n\t}\n\tb = b * Color(a) / oa;\n\tif b < 0 {\n\t\tb = 0\n\t}\n\tif b > 0xFF {\n\t\tb = 0xFF\n\t}\n\treturn r<<24 | g<<16 | b<<8 | Color(a);\n}",
"func VPBLENDMQ_BCST_Z(m, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMQ_BCST_Z(m, xyz, k, xyz1) }",
"func NewBlend() *Blend {\r\n\treturn &Blend{\r\n\t\tModes: []string{\r\n\t\t\tNormal,\r\n\t\t\tDarken,\r\n\t\t\tLighten,\r\n\t\t\tMultiply,\r\n\t\t\tScreen,\r\n\t\t\tOverlay,\r\n\t\t\tSoftLight,\r\n\t\t\tHardLight,\r\n\t\t\tColorDodge,\r\n\t\t\tColorBurn,\r\n\t\t\tDifference,\r\n\t\t\tExclusion,\r\n\t\t\tHue,\r\n\t\t\tSaturation,\r\n\t\t\tColorMode,\r\n\t\t\tLuminosity,\r\n\t\t},\r\n\t}\r\n}",
"func (self *TileSprite) BlendMode() int{\n return self.Object.Get(\"blendMode\").Int()\n}",
"func VPBLENDMD_BCST_Z(m, xyz, k, xyz1 operand.Op) { ctx.VPBLENDMD_BCST_Z(m, xyz, k, xyz1) }",
"func (icn *Icon) Blend() {\n // get pixels \"parent-pixels\" of Parent that are behind Window\n bg := icn.getBackground()\n\n // copy \"parent-pixels\" into buffer \"ximage\", overwriting existing completely\n xgraphics.Blend(icn.ximage, bg, image.Point{0,0})\n\n // alpha-blend Image into buffer \"ximage\"\n xgraphics.Blend(icn.ximage, icn.Image, image.Point{0,0})\n\n // swap ximage into Window as background\n icn.ximage.XSurfaceSet(icn.Window.Id)\n icn.ximage.XDraw()\n icn.ximage.XPaint(icn.Window.Id)\n\n // free the pixbuff memory!\n icn.ximage.Destroy()\n}",
"func DestinationAtopBlend(source color.RGBA, target color.RGBA, factor float64, keepAlpha bool) color.RGBA {\n\tSa := source.A\n\tDa := target.A\n\tif !keepAlpha {\n\t\tsource.A = DestinationAtopUnit(source.A, target.A, Sa, Da, factor)\n\t}\n\tsource.R = DestinationAtopUnit(source.R, target.R, Sa, Da, factor)\n\tsource.G = DestinationAtopUnit(source.G, target.G, Sa, Da, factor)\n\tsource.B = DestinationAtopUnit(source.B, target.B, Sa, Da, factor)\n\treturn source\n}",
"func (c *Context) VPBLENDMQ(ops ...operand.Op) {\n\tc.addinstruction(x86.VPBLENDMQ(ops...))\n}",
"func (c *Context) BlendColor(r, g, b, a float32) gfx.ContextStateValue {\n\treturn s.CSV{\n\t\tValue: [4]float32{r, g, b, a},\n\t\tDefaultValue: [4]float32{0, 0, 0, 0}, // TODO(slimsag): verify\n\t\tKey: csBlendColor,\n\t\tGLCall: c.glBlendColor,\n\t}\n}",
"func AlphaFunc(xfunc uint32, ref float32) {\n C.glowAlphaFunc(gpAlphaFunc, (C.GLenum)(xfunc), (C.GLfloat)(ref))\n}",
"func blendAdd(S, D uint32) uint32 {\n\tadd := S + D\n\tif add < 65535 {\n\t\treturn add\n\t} else {\n\t\treturn 65535\n\t}\n}",
"func (c *Context) VPBLENDMW(ops ...operand.Op) {\n\tc.addinstruction(x86.VPBLENDMW(ops...))\n}",
"func BlendFunc(sfactor, dfactor Enum) {\n\tgl.BlendFunc(uint32(sfactor), uint32(dfactor))\n}",
"func BlendFunc(sfactor Enum, dfactor Enum) {\n\tcsfactor, _ := (C.GLenum)(sfactor), cgoAllocsUnknown\n\tcdfactor, _ := (C.GLenum)(dfactor), cgoAllocsUnknown\n\tC.glBlendFunc(csfactor, cdfactor)\n}",
"func (c *Context) VPBLENDD(i, mxy, xy, xy1 operand.Op) {\n\tc.addinstruction(x86.VPBLENDD(i, mxy, xy, xy1))\n}",
"func (s *Sprite) SetColorAndAlpha(c sdl.Color) {\n\ts.Tex.SetColorMod(c.R, c.G, c.B)\n\ts.Tex.SetAlphaMod(c.A)\n}",
"func VBLENDMPD_BCST_Z(m, xyz, k, xyz1 operand.Op) { ctx.VBLENDMPD_BCST_Z(m, xyz, k, xyz1) }",
"func (c *Context) VBLENDPS(i, mxy, xy, xy1 operand.Op) {\n\tc.addinstruction(x86.VBLENDPS(i, mxy, xy, xy1))\n}",
"func (buf *CommandBuffer) SetBlendConstants(blendConstants [4]float32) {\n\tC.domVkCmdSetBlendConstants(buf.fps[vkCmdSetBlendConstants], buf.hnd, (*C.float)(slice2ptr(uptr(&blendConstants))))\n}",
"func (bl *Blend) clip(rgb Color) Color {\r\n\tr, g, b := rgb.R, rgb.G, rgb.B\r\n\r\n\tl := bl.Lum(rgb)\r\n\tmin := utils.Min(r, g, b)\r\n\tmax := utils.Max(r, g, b)\r\n\r\n\tif min < 0 {\r\n\t\tr = l + (((r - l) * l) / (l - min))\r\n\t\tg = l + (((g - l) * l) / (l - min))\r\n\t\tb = l + (((b - l) * l) / (l - min))\r\n\t}\r\n\tif max > 1 {\r\n\t\tr = l + (((r - l) * (1 - l)) / (max - l))\r\n\t\tg = l + (((g - l) * (1 - l)) / (max - l))\r\n\t\tb = l + (((b - l) * (1 - l)) / (max - l))\r\n\t}\r\n\r\n\treturn Color{R: r, G: g, B: b}\r\n}",
"func BlendFuncSeparate(sfactorRGB Enum, dfactorRGB Enum, sfactorAlpha Enum, dfactorAlpha Enum) {\n\tcsfactorRGB, _ := (C.GLenum)(sfactorRGB), cgoAllocsUnknown\n\tcdfactorRGB, _ := (C.GLenum)(dfactorRGB), cgoAllocsUnknown\n\tcsfactorAlpha, _ := (C.GLenum)(sfactorAlpha), cgoAllocsUnknown\n\tcdfactorAlpha, _ := (C.GLenum)(dfactorAlpha), cgoAllocsUnknown\n\tC.glBlendFuncSeparate(csfactorRGB, cdfactorRGB, csfactorAlpha, cdfactorAlpha)\n}",
"func BlendFunc(sfactor GLenum, dfactor GLenum) {\n\tC.glBlendFunc(C.GLenum(sfactor), C.GLenum(dfactor))\n}",
"func (col Color) RGBA() (r, g, b, a uint32) {\r\n r = uint32(col.R*65535.0+0.5)\r\n g = uint32(col.G*65535.0+0.5)\r\n b = uint32(col.B*65535.0+0.5)\r\n a = 0xFFFF\r\n return\r\n}",
"func RGBA(r, g, b, _ uint32) SGRColor {\n\tif r > 0xffff {\n\t\tr = 0xffff\n\t}\n\tif g > 0xffff {\n\t\tg = 0xffff\n\t}\n\tif b > 0xffff {\n\t\tb = 0xffff\n\t}\n\treturn RGB(uint8(r>>8), uint8(g>>8), uint8(b>>8))\n}",
"func CombineRGBA(r, g, b, a *image.Gray) (out *image.RGBA) {\n\tinputs := []*image.Gray{r, g, b, a}\n\tfor i := 1; i < len(inputs); i++ {\n\t\tif inputs[i].Bounds() != inputs[0].Bounds() {\n\t\t\tpanic(\"image bounds do not match\")\n\t\t}\n\t}\n\tout = image.NewRGBA(inputs[0].Bounds())\n\tfor x := out.Bounds().Min.X; x < out.Bounds().Max.X; x++ {\n\t\tfor y := out.Bounds().Min.Y; y < out.Bounds().Max.Y; y++ {\n\t\t\toffset1 := out.PixOffset(x, y)\n\t\t\toffset2 := r.PixOffset(x, y)\n\t\t\tout.Pix[offset1+RED] = r.Pix[offset2]\n\t\t\tout.Pix[offset1+GREEN] = g.Pix[offset2]\n\t\t\tout.Pix[offset1+BLUE] = b.Pix[offset2]\n\t\t\tout.Pix[offset1+ALPHA] = a.Pix[offset2]\n\t\t}\n\t}\n\treturn\n}",
"func (c NCMYKA80) RGBA() (uint32, uint32, uint32, uint32) {\n\tw := uint32(c.K)\n\tr := uint32(c.C) * w / 0xffff\n\tg := uint32(c.M) * w / 0xffff\n\tb := uint32(c.Y) * w / 0xffff\n\tif c.A == 0xffff {\n\t\treturn r, g, b, 0xffff\n\t}\n\tif c.A == 0 {\n\t\treturn 0, 0, 0, 0\n\t}\n\n\ta := uint32(c.A)\n\tr = r * a / 0xffff\n\tg = g * a / 0xffff\n\tb = b * a / 0xffff\n\treturn r, g, b, a\n}",
"func (c *Context) BlendEquation(eq gfx.BlendEquation) gfx.ContextStateValue {\n\treturn s.CSV{\n\t\tValue: c.Enums[int(eq)],\n\t\tDefaultValue: c.O.Get(\"FUNC_ADD\").Int(), // TODO(slimsag): verify\n\t\tKey: csBlendEquation,\n\t\tGLCall: c.glBlendEquation,\n\t}\n}"
] | [
"0.7103261",
"0.70349926",
"0.6937673",
"0.6889735",
"0.68769246",
"0.6785686",
"0.6758208",
"0.66305745",
"0.663032",
"0.66083694",
"0.6592942",
"0.65762967",
"0.64984715",
"0.64953494",
"0.6489191",
"0.6444187",
"0.64387745",
"0.64294606",
"0.64294606",
"0.6423835",
"0.6395065",
"0.6345924",
"0.634488",
"0.63334614",
"0.63281167",
"0.63197553",
"0.6309781",
"0.62513375",
"0.62440956",
"0.62360543",
"0.6221392",
"0.6200296",
"0.6190379",
"0.6184227",
"0.615566",
"0.61462444",
"0.61462444",
"0.61353004",
"0.6085638",
"0.60797143",
"0.6070531",
"0.6021848",
"0.6010976",
"0.6010132",
"0.59832776",
"0.59832746",
"0.5954745",
"0.5944525",
"0.5938471",
"0.59057915",
"0.589171",
"0.5845744",
"0.58435327",
"0.5842985",
"0.5813769",
"0.5770612",
"0.5764745",
"0.57519937",
"0.5741161",
"0.5726536",
"0.5686787",
"0.56858814",
"0.56847584",
"0.56844157",
"0.56483907",
"0.5601604",
"0.559203",
"0.55647546",
"0.5548086",
"0.5529388",
"0.54849017",
"0.5415575",
"0.54097563",
"0.5392801",
"0.53905463",
"0.5379927",
"0.53608656",
"0.5360608",
"0.53595483",
"0.5338673",
"0.53375906",
"0.52871656",
"0.52712226",
"0.5255316",
"0.5252752",
"0.52309394",
"0.5226696",
"0.5222112",
"0.52219903",
"0.52181256",
"0.5209377",
"0.5208995",
"0.51822907",
"0.5180361",
"0.51775444",
"0.51702046",
"0.51625985",
"0.5160624",
"0.51387584"
] | 0.5864464 | 52 |
specify pixel arithmetic for RGB and alpha components separately | func BlendFuncSeparate(sfactorRGB uint32, dfactorRGB uint32, sfactorAlpha uint32, dfactorAlpha uint32) {
C.glowBlendFuncSeparate(gpBlendFuncSeparate, (C.GLenum)(sfactorRGB), (C.GLenum)(dfactorRGB), (C.GLenum)(sfactorAlpha), (C.GLenum)(dfactorAlpha))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func blendPixelOverPixel(ic_old,ic_new uint8, al_new float64)(c_res uint8) {\n\n\tal_old := float64(1); _=al_old\n\tc_old := float64(ic_old)\n\tc_new := float64(ic_new)\n\n\talgo1 := c_old*(1-al_new) + c_new*al_new\n\tc_res = uint8( util.Min( util.Round(algo1),255) )\n\t//log.Printf(\"\\t\\t %3.1f + %3.1f = %3.1f\", c_old*(1-al_new),c_new*al_new, algo1)\n\n\treturn \n}",
"func rgb(c color.RGBA) (int, int, int) {\n\talpha := float64(c.A) / 255.0\n\talphaWhite := int(255 * (1.0 - alpha))\n\tr := int(float64(c.R)*alpha) + alphaWhite\n\tg := int(float64(c.G)*alpha) + alphaWhite\n\tb := int(float64(c.B)*alpha) + alphaWhite\n\treturn r, g, b\n}",
"func (c NCMYKA80) RGBA() (uint32, uint32, uint32, uint32) {\n\tw := uint32(c.K)\n\tr := uint32(c.C) * w / 0xffff\n\tg := uint32(c.M) * w / 0xffff\n\tb := uint32(c.Y) * w / 0xffff\n\tif c.A == 0xffff {\n\t\treturn r, g, b, 0xffff\n\t}\n\tif c.A == 0 {\n\t\treturn 0, 0, 0, 0\n\t}\n\n\ta := uint32(c.A)\n\tr = r * a / 0xffff\n\tg = g * a / 0xffff\n\tb = b * a / 0xffff\n\treturn r, g, b, a\n}",
"func (c NGrayA32) RGBA() (uint32, uint32, uint32, uint32) {\n\ty := uint32(c.Y)\n\tif c.A == 0xffff {\n\t\treturn y, y, y, 0xffff\n\t}\n\tif c.A == 0 {\n\t\treturn 0, 0, 0, 0\n\t}\n\ta := uint32(c.A)\n\ty = y * a / 0xffff\n\treturn y, y, y, a\n}",
"func (c NGrayA64) RGBA() (uint32, uint32, uint32, uint32) {\n\ty := fromFloat(float64(c.Y), 1.0/2.2)\n\tswitch {\n\tcase c.A >= 1:\n\t\treturn y, y, y, 0xffff\n\tcase c.A <= 0:\n\t\treturn 0, 0, 0, 0\n\t}\n\ta := uint32(c.A * 0xffff)\n\ty = y * a / 0xffff\n\treturn y, y, y, a\n}",
"func (c NGrayA) RGBA() (uint32, uint32, uint32, uint32) {\n\ty := uint32(c.Y) * 0x101\n\tif c.A == 0xff {\n\t\treturn y, y, y, 0xffff\n\t}\n\tif c.A == 0 {\n\t\treturn 0, 0, 0, 0\n\t}\n\ta := uint32(c.A) * 0x101\n\ty = y * a / 0xffff\n\treturn y, y, y, a\n}",
"func (col Color) RGBA() (r, g, b, a uint32) {\r\n r = uint32(col.R*65535.0+0.5)\r\n g = uint32(col.G*65535.0+0.5)\r\n b = uint32(col.B*65535.0+0.5)\r\n a = 0xFFFF\r\n return\r\n}",
"func (c Gray32) RGBA() (r, g, b, a uint32) {\n\tconst gamma = 1.0 / 2.2\n\ty := fromFloat(float64(c.Y), gamma)\n\treturn y, y, y, 0xffff\n}",
"func rgbaToPixel(r uint32, g uint32, b uint32, a uint32) Pixel {\n\treturn Pixel{\n\t\tR: int(r / 257),\n\t\tG: int(g / 257),\n\t\tB: int(b / 257),\n\t\tA: int(a / 257),\n\t}\n}",
"func (c NCMYKA) RGBA() (uint32, uint32, uint32, uint32) {\n\tw := uint32(c.K) * 0x10201\n\tr := uint32(c.C) * w / 0xffff\n\tg := uint32(c.M) * w / 0xffff\n\tb := uint32(c.Y) * w / 0xffff\n\tif c.A == 0xff {\n\t\treturn r, g, b, 0xffff\n\t}\n\tif c.A == 0 {\n\t\treturn 0, 0, 0, 0\n\t}\n\n\ta := uint32(c.A) * 0x101\n\tr = r * a / 0xffff\n\tg = g * a / 0xffff\n\tb = b * a / 0xffff\n\treturn r, g, b, a\n}",
"func rgbaToPixel(r uint32, g uint32, b uint32, a uint32, row int, rowPos int) Pixel {\n\treturn Pixel{\n\t\trgba: Rgba{\n\t\t\tint(r / 257),\n\t\t\tint(g / 257),\n\t\t\tint(b / 257),\n\t\t\tint(a / 257),\n\t\t},\n\t\tRow: row,\n\t\tRowPos: rowPos,\n\t}\n}",
"func averager(src image.Image, r image.Rectangle) color.Color {\n\tnpix := (r.Max.Y - r.Min.Y) * (r.Max.X - r.Min.X)\n\tvar sum [3]uint64\n\tfor j := r.Min.Y; j < r.Max.Y; j++ {\n\t\tfor i := r.Min.X; i < r.Max.X; i++ {\n\t\t\tr, g, b, _ := src.At(i, j).RGBA()\n\t\t\tsum[0] += uint64(r >> 8)\n\t\t\tsum[1] += uint64(g >> 8)\n\t\t\tsum[2] += uint64(b >> 8)\n\t\t}\n\t}\n\tvar avg [3]uint64\n\tavg[0] = sum[0] / uint64(npix)\n\tavg[1] = sum[1] / uint64(npix)\n\tavg[2] = sum[2] / uint64(npix)\n\n\treturn color.RGBA{R: uint8(avg[0]), G: uint8(avg[1]), B: uint8(avg[2]), A: 0xff}\n}",
"func (c Gray1) RGBA() (r, g, b, a uint32) {\n\tif c.Y {\n\t\treturn 0xffff, 0xffff, 0xffff, 0xffff\n\t}\n\treturn 0, 0, 0, 0xffff\n}",
"func (c NRGBA128) RGBA() (uint32, uint32, uint32, uint32) {\n\tconst gamma = 1.0 / 2.2\n\tr := fromFloat(float64(c.R), gamma)\n\tg := fromFloat(float64(c.G), gamma)\n\tb := fromFloat(float64(c.B), gamma)\n\tswitch {\n\tcase c.A >= 1:\n\t\treturn r, g, b, 0xffff\n\tcase c.A <= 0:\n\t\treturn 0, 0, 0, 0\n\t}\n\ta := uint32(c.A * 0xffff)\n\tr = r * a / 0xffff\n\tg = g * a / 0xffff\n\tb = b * a / 0xffff\n\treturn r, g, b, a\n}",
"func RawToRGBAlpha(v uint32) (out RGB) {\n\tout.red = uint8(v >> 24) & 0xFF\n\tout.green = uint8(v >> 16) & 0xFF\n\tout.blue = uint8(v >> 8) & 0xFF\n\tout.alpha = uint8(v) & 0xFF\n\n\treturn\n}",
"func RGBA(r, g, b, _ uint32) SGRColor {\n\tif r > 0xffff {\n\t\tr = 0xffff\n\t}\n\tif g > 0xffff {\n\t\tg = 0xffff\n\t}\n\tif b > 0xffff {\n\t\tb = 0xffff\n\t}\n\treturn RGB(uint8(r>>8), uint8(g>>8), uint8(b>>8))\n}",
"func (pw *PixelWand) GetAlpha() float64 {\n\tret := float64(C.PixelGetAlpha(pw.pw))\n\truntime.KeepAlive(pw)\n\treturn ret\n}",
"func CombineRGBA(r, g, b, a *image.Gray) (out *image.RGBA) {\n\tinputs := []*image.Gray{r, g, b, a}\n\tfor i := 1; i < len(inputs); i++ {\n\t\tif inputs[i].Bounds() != inputs[0].Bounds() {\n\t\t\tpanic(\"image bounds do not match\")\n\t\t}\n\t}\n\tout = image.NewRGBA(inputs[0].Bounds())\n\tfor x := out.Bounds().Min.X; x < out.Bounds().Max.X; x++ {\n\t\tfor y := out.Bounds().Min.Y; y < out.Bounds().Max.Y; y++ {\n\t\t\toffset1 := out.PixOffset(x, y)\n\t\t\toffset2 := r.PixOffset(x, y)\n\t\t\tout.Pix[offset1+RED] = r.Pix[offset2]\n\t\t\tout.Pix[offset1+GREEN] = g.Pix[offset2]\n\t\t\tout.Pix[offset1+BLUE] = b.Pix[offset2]\n\t\t\tout.Pix[offset1+ALPHA] = a.Pix[offset2]\n\t\t}\n\t}\n\treturn\n}",
"func (c Color) SetAlpha(a uint8) Color {\n\tr, g, b, oa := c>>24, (c>>16)&0xFF, (c>>8)&0xFF, c&0xFF;\n\tif oa == 0 {\n\t\treturn 0\n\t}\n\tr = r * Color(a) / oa;\n\tif r < 0 {\n\t\tr = 0\n\t}\n\tif r > 0xFF {\n\t\tr = 0xFF\n\t}\n\tg = g * Color(a) / oa;\n\tif g < 0 {\n\t\tg = 0\n\t}\n\tif g > 0xFF {\n\t\tg = 0xFF\n\t}\n\tb = b * Color(a) / oa;\n\tif b < 0 {\n\t\tb = 0\n\t}\n\tif b > 0xFF {\n\t\tb = 0xFF\n\t}\n\treturn r<<24 | g<<16 | b<<8 | Color(a);\n}",
"func (c RGBAFC) RGBA() (r, g, b, a uint32) {\n\tr = uint32(c.R)\n\tr |= r << 8\n\tg = uint32(c.G)\n\tg |= g << 8\n\tb = uint32(c.B)\n\tb |= b << 8\n\ta = uint32(c.A)\n\ta |= a << 8\n\treturn\n}",
"func (c CIEXYZ) RGBA() (r, g, b, a uint32) {\n\tr, g, b = cieXYZToRGB(c.X, c.Y, c.Z)\n\ta = 0xffff\n\treturn\n}",
"func (c *Color) Alpha() float32 {\n\treturn c.a\n}",
"func nextcolor(c color.RGBA) color.RGBA {\n\tswitch {\n\tcase c.R == 255 && c.G == 0 && c.B == 0:\n\t\tc.G += 5\n\tcase c.R == 255 && c.G != 255 && c.B == 0:\n\t\tc.G += 5\n\tcase c.G == 255 && c.R != 0:\n\t\tc.R -= 5\n\tcase c.R == 0 && c.B != 255:\n\t\tc.B += 5\n\tcase c.B == 255 && c.G != 0:\n\t\tc.G -= 5\n\tcase c.G == 0 && c.R != 255:\n\t\tc.R += 5\n\tdefault:\n\t\tc.B -= 5\n\t}\n\treturn c\n}",
"func BlendColor(red float32, green float32, blue float32, alpha float32) {\n C.glowBlendColor(gpBlendColor, (C.GLfloat)(red), (C.GLfloat)(green), (C.GLfloat)(blue), (C.GLfloat)(alpha))\n}",
"func (c RGBAf32) RGBA() (r, g, b, a uint32) {\n\tr = uint32(c.R*65535.0 + 0.5)\n\tg = uint32(c.G*65535.0 + 0.5)\n\tb = uint32(c.B*65535.0 + 0.5)\n\ta = uint32(c.A*65535.0 + 0.5)\n\treturn\n}",
"func (c Color) RGBA() (r, g, b, a uint32) {\n\tr = uint32(c.R)\n\tr |= r << 8\n\tg = uint32(c.G)\n\tg |= g << 8\n\tb = uint32(c.B)\n\tb |= b << 8\n\ta = uint32(c.A)\n\ta |= a << 8\n\treturn\n}",
"func (c Color) RGBA() (r, g, b, a uint32) {\n\tr = uint32(c.R)\n\tr |= r << 8\n\tg = uint32(c.G)\n\tg |= g << 8\n\tb = uint32(c.B)\n\tb |= b << 8\n\ta = uint32(c.A)\n\ta |= a << 8\n\treturn\n}",
"func AlphaFunc(xfunc uint32, ref float32) {\n C.glowAlphaFunc(gpAlphaFunc, (C.GLenum)(xfunc), (C.GLfloat)(ref))\n}",
"func linearImage(srcim image.Image, gamma float64) *image.NRGBA64 {\n\tdstim := image.NewNRGBA64(image.Rectangle{\n\t\tMax: image.Point{\n\t\t\tX: srcim.Bounds().Dx(),\n\t\t\tY: srcim.Bounds().Dy(),\n\t\t},\n\t})\n\tvar dsty int\n\tfor srcy := srcim.Bounds().Min.Y; srcy < srcim.Bounds().Max.Y; srcy++ {\n\t\tvar dstx int\n\t\tfor srcx := srcim.Bounds().Min.X; srcx < srcim.Bounds().Max.X; srcx++ {\n\t\t\tnrgba64 := color.NRGBA64Model.Convert(srcim.At(srcx, srcy)).(color.NRGBA64)\n\t\t\tnrgba64.R = uint16(nrgba64Max * math.Pow(float64(nrgba64.R)/nrgba64Max, gamma))\n\t\t\tnrgba64.G = uint16(nrgba64Max * math.Pow(float64(nrgba64.G)/nrgba64Max, gamma))\n\t\t\tnrgba64.B = uint16(nrgba64Max * math.Pow(float64(nrgba64.B)/nrgba64Max, gamma))\n\t\t\t// Alpha is not affected\n\t\t\tdstim.SetNRGBA64(dstx, dsty, nrgba64)\n\t\t\tdstx++\n\t\t}\n\t\tdsty++\n\t}\n\treturn dstim\n}",
"func (c SGRColor) RGBA() (r, g, b, a uint32) {\n\tr8, g8, b8 := c.RGB()\n\tr = uint32(r8)\n\tg = uint32(g8)\n\tb = uint32(b8)\n\treturn r | r<<8, g | g<<8, b | b<<8, 0xffff\n}",
"func (c CIELab) RGBA() (r, g, b, a uint32) {\n\tr, g, b = cieLabToRGB(c.L, c.A, c.B)\n\ta = 0xffff\n\treturn\n}",
"func reduceRGB(color colorful.Color) (uint16, uint16, uint16) {\n\tr, g, b, _ := color.RGBA()\n\n\treturn uint16(r >> 8), uint16(g >> 8), uint16(b >> 8)\n}",
"func uniform(rgba uint32) *image.Uniform {\n\treturn image.NewUniform(Hex(rgba))\n}",
"func Accum(op uint32, value float32) {\n C.glowAccum(gpAccum, (C.GLenum)(op), (C.GLfloat)(value))\n}",
"func (c ColorOrder) RawPixel(r, g, b byte) Pixel {\n\tswitch c {\n\tcase RGB:\n\t\treturn Pixel{[8]byte{\n\t\t\tzero &^ (r>>7&1 | r>>3&8 | r<<1&0x40),\n\t\t\tzero &^ (r>>4&1 | r>>0&8 | r<<4&0x40),\n\t\t\tzero &^ (r>>1&1 | r<<3&8 | g>>1&0x40),\n\t\t\tzero &^ (g>>6&1 | g>>2&8 | g<<2&0x40),\n\t\t\tzero &^ (g>>3&1 | g<<1&8 | g<<5&0x40),\n\t\t\tzero &^ (g>>0&1 | b>>4&8 | b>>0&0x40),\n\t\t\tzero &^ (b>>5&1 | b>>1&8 | b<<3&0x40),\n\t\t\tzero &^ (b>>2&1 | b<<2&8 | b<<6&0x40),\n\t\t}}\n\tcase GRB:\n\t\treturn Pixel{[8]byte{\n\t\t\tzero &^ (g>>7&1 | g>>3&8 | g<<1&0x40),\n\t\t\tzero &^ (g>>4&1 | g>>0&8 | g<<4&0x40),\n\t\t\tzero &^ (g>>1&1 | g<<3&8 | r>>1&0x40),\n\t\t\tzero &^ (r>>6&1 | r>>2&8 | r<<2&0x40),\n\t\t\tzero &^ (r>>3&1 | r<<1&8 | r<<5&0x40),\n\t\t\tzero &^ (r>>0&1 | b>>4&8 | b>>0&0x40),\n\t\t\tzero &^ (b>>5&1 | b>>1&8 | b<<3&0x40),\n\t\t\tzero &^ (b>>2&1 | b<<2&8 | b<<6&0x40),\n\t\t}}\n\t}\n\treturn Pixel{}\n}",
"func linuxuniform(rgba uint32) *image.Uniform {\n\tc := Hex(rgba)\n\tc.R, c.B = c.B, c.R\n\treturn image.NewUniform(c)\n}",
"func (color *Color) RGB() (uint8, uint8, uint8) {\n\treturn color.R, color.G, color.B\n}",
"func hueToRGB(p, q, t float64) float64 {\n if t < 0 {\n t += 1\n }\n if t > 1 {\n t -= 1\n }\n if t < 1.0/6 {\n return p + (q-p)*6*t\n }\n if t < 0.5 {\n return q\n }\n if t < 2.0/3 {\n return p + (q-p)*(2.0/3-t)*6\n }\n return p\n}",
"func blend(a, b color.Color, blend float64) color.RGBA {\n\tinv := float64(0) // float64(1)-blend\n\tar, ag, ab, aa := a.RGBA()\n\tbr, bg, bb, ba := b.RGBA()\n\tor, og, ob, _ := uint32(float64(ar)*blend+float64(br)*inv),\n\t\tuint32(float64(ag)*blend+float64(bg)*inv),\n\t\tuint32(float64(ab)*blend+float64(bb)*inv),\n\t\tuint32(float64(aa)*blend+float64(ba)*inv)\n\treturn color.RGBA{uint8(or >> 8), uint8(og >> 8), uint8(ob >> 8), 255} //uint8(oa>>8)}\n}",
"func Delta(a, b image.Image) (delta *image.RGBA, ok bool) {\n\tdelta = image.NewRGBA(a.Bounds())\n\tdirty := false\n\tfor y := a.Bounds().Min.Y; y < a.Bounds().Max.Y; y++ {\n\t\tfor x := a.Bounds().Min.X; x < a.Bounds().Max.X; x++ {\n\t\t\th := a.At(x, y)\n\t\t\tw := b.At(x, y)\n\t\t\tif EqualRGB(h, w) {\n\t\t\t\tdelta.Set(x, y, fg)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tdirty = true\n\t\t\tif EqualRGB(h, BG) {\n\t\t\t\tdelta.Set(x, y, color.RGBA{0, 0, 255, 255})\n\t\t\t} else {\n\t\t\t\tdelta.Set(x, y, color.RGBA{255, 0, 0, 255})\n\t\t\t}\n\t\t}\n\t}\n\treturn delta, !dirty\n}",
"func BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n C.glowBlendEquationSeparate(gpBlendEquationSeparate, (C.GLenum)(modeRGB), (C.GLenum)(modeAlpha))\n}",
"func getRGBA(element uint32) rgba {\n\tcolour := color.RGBA{\n\t\tR: uint8(0xFF & element),\n\t\tG: uint8(0xFF & (element >> 8)),\n\t\tB: uint8(0xFF & (element >> 16)),\n\t\tA: uint8(0xFF & (element >> 24)),\n\t}\n\trgba := rgba{\n\t\tRGBA: colour,\n\t}\n\trgba.Hex = rgba.printHex()\n\treturn rgba\n}",
"func (self *TraitPixbuf) AddAlpha(substitute_color bool, r uint8, g uint8, b uint8) (return__ *Pixbuf) {\n\t__cgo__substitute_color := C.gboolean(0)\n\tif substitute_color {\n\t\t__cgo__substitute_color = C.gboolean(1)\n\t}\n\tvar __cgo__return__ *C.GdkPixbuf\n\t__cgo__return__ = C.gdk_pixbuf_add_alpha(self.CPointer, __cgo__substitute_color, C.guchar(r), C.guchar(g), C.guchar(b))\n\tif __cgo__return__ != nil {\n\t\treturn__ = NewPixbufFromCPointer(unsafe.Pointer(reflect.ValueOf(__cgo__return__).Pointer()))\n\t}\n\treturn\n}",
"func (c NRGBAf32) RGBA() (r, g, b, a uint32) {\n\tr = uint32(c.R*c.A*65535.0 + 0.5)\n\tg = uint32(c.G*c.A*65535.0 + 0.5)\n\tb = uint32(c.B*c.A*65535.0 + 0.5)\n\ta = uint32(c.A*65535.0 + 0.5)\n\treturn\n}",
"func ColorRGB24(r, g, b int) Attribute {\n\treturn ColorRGB(r/51, g/51, b/51)\n}",
"func (rgb RGBColor) RGBA() (r, g, b, a uint32) {\n\tr = uint32(rgb.Red << 8)\n\tg = uint32(rgb.Green << 8)\n\tb = uint32(rgb.Blue << 8)\n\ta = 0xffff\n\treturn\n}",
"func CombineRGB(r, g, b *image.Gray) (out *image.RGBA) {\n\tinputs := []*image.Gray{r, g, b}\n\tfor i := 1; i < len(inputs); i++ {\n\t\tif inputs[i].Bounds() != inputs[0].Bounds() {\n\t\t\tpanic(\"image bounds do not match\")\n\t\t}\n\t}\n\tout = image.NewRGBA(inputs[0].Bounds())\n\tfor x := out.Bounds().Min.X; x < out.Bounds().Max.X; x++ {\n\t\tfor y := out.Bounds().Min.Y; y < out.Bounds().Max.Y; y++ {\n\t\t\toffset1 := out.PixOffset(x, y)\n\t\t\toffset2 := r.PixOffset(x, y)\n\t\t\tout.Pix[offset1+RED] = r.Pix[offset2]\n\t\t\tout.Pix[offset1+GREEN] = g.Pix[offset2]\n\t\t\tout.Pix[offset1+BLUE] = b.Pix[offset2]\n\t\t\tout.Pix[offset1+ALPHA] = 255\n\t\t}\n\t}\n\treturn\n}",
"func VPBLENDMD(ops ...operand.Op) { ctx.VPBLENDMD(ops...) }",
"func (r *ImageRef) PremultiplyAlpha() error {\n\tif r.preMultiplication != nil || !vipsHasAlpha(r.image) {\n\t\treturn nil\n\t}\n\n\tband := r.BandFormat()\n\n\tout, err := vipsPremultiplyAlpha(r.image)\n\tif err != nil {\n\t\treturn err\n\t}\n\tr.preMultiplication = &PreMultiplicationState{\n\t\tbandFormat: band,\n\t}\n\tr.setImage(out)\n\treturn nil\n}",
"func getRed(ftemp float64) int {\n\t//range from 0 to 20 now\n\tftempConv := ftemp - min\n\tred := (ftempConv / diff) * 255\n\treturn int(red)\n}",
"func RGB2VGAFg(r, g, b uint32) (bool, int) {\n\tred := r >> 8\n\tgreen := g >> 8\n\tblue := b >> 8\n\tswitch {\n\tcase red == 0 && green == 0 && blue == 0:\n\t\treturn true, 30\n\tcase red == 170 && green == 0 && blue == 0:\n\t\treturn true, 31\n\tcase red == 0 && green == 170 && blue == 0:\n\t\treturn true, 32\n\tcase red == 170 && green == 85 && blue == 0:\n\t\treturn true, 33\n\tcase red == 0 && green == 0 && blue == 170:\n\t\treturn true, 34\n\tcase red == 170 && green == 0 && blue == 170:\n\t\treturn true, 35\n\tcase red == 0 && green == 170 && blue == 170:\n\t\treturn true, 36\n\tcase red == 170 && green == 170 && blue == 170:\n\t\treturn true, 37\n\tcase red == 85 && green == 85 && blue == 85:\n\t\treturn true, 90\n\tcase red == 255 && green == 85 && blue == 85:\n\t\treturn true, 91\n\tcase red == 85 && green == 255 && blue == 85:\n\t\treturn true, 92\n\tcase red == 255 && green == 255 && blue == 85:\n\t\treturn true, 93\n\tcase red == 85 && green == 85 && blue == 255:\n\t\treturn true, 94\n\tcase red == 255 && green == 85 && blue == 255:\n\t\treturn true, 95\n\tcase red == 85 && green == 255 && blue == 255:\n\t\treturn true, 96\n\tcase red == 255 && green == 255 && blue == 255:\n\t\treturn true, 97\n\tdefault:\n\t\treturn false, 0\n\t}\n}",
"func (s *Surface) Alpha() float64 {\n\treturn s.Ctx.Get(\"globalAlpha\").Float()\n}",
"func Adjust(img image.Image, value float64) image.Image {\n\treturn utils.MapColor(img, AdjustC(value))\n}",
"func (c Color) WithA(a float32) Color {\n\tc.A = uint8(a * 255)\n\treturn c\n}",
"func getPixVal(c color.Color) float64 {\n\tr, _, _, _ := c.RGBA()\n\treturn float64(r >> 8)\n}",
"func NewAlpha(r Rectangle) *Alpha {\n\treturn &Alpha{\n\t\tPix: make([]uint8, pixelBufferLength(1, r, \"Alpha\")),\n\t\tStride: 1 * r.Dx(),\n\t\tRect: r,\n\t}\n}",
"func (c *Color) RGBA() (r, g, b, a uint32) {\n\treturn uint32(c.Red), uint32(c.Green), uint32(c.Blue), uint32(color.Opaque.A)\n}",
"func (c HSLA) RGBA() (r, g, b, a uint32) {\n\tfr, fg, fb := HSLtoRGBf32(c.H, c.S, c.L)\n\tr = uint32(fr*c.A*65535.0 + 0.5)\n\tg = uint32(fg*c.A*65535.0 + 0.5)\n\tb = uint32(fb*c.A*65535.0 + 0.5)\n\ta = uint32(c.A*65535.0 + 0.5)\n\treturn\n}",
"func BlendColor(red float32, green float32, blue float32, alpha float32) {\n\tsyscall.Syscall6(gpBlendColor, 4, uintptr(math.Float32bits(red)), uintptr(math.Float32bits(green)), uintptr(math.Float32bits(blue)), uintptr(math.Float32bits(alpha)), 0, 0)\n}",
"func VPBLENDMB(ops ...operand.Op) { ctx.VPBLENDMB(ops...) }",
"func testColorBoxRProperties(r, g, b, a uint8, w8, h8 int8) bool {\n\tc := color.RGBA{r, g, b, a}\n\tw := int(w8)\n\th := int(h8)\n\tsp := NewColorBoxR(w, h, c)\n\tw2, h2 := sp.GetDims()\n\tif w2 != w {\n\t\treturn false\n\t}\n\tif h2 != h {\n\t\treturn false\n\t}\n\timg := image.NewRGBA(image.Rect(0, 0, w, h))\n\tsp.Draw(img, 0, 0)\n\tfor x := 0; x < w; x++ {\n\t\tfor y := 0; y < h; y++ {\n\t\t\tcAt := img.RGBAAt(x, y)\n\t\t\tif c != cAt {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t}\n\treturn true\n}",
"func VBLENDPS(i, mxy, xy, xy1 operand.Op) { ctx.VBLENDPS(i, mxy, xy, xy1) }",
"func (c *Color) SetAlphaPreMult() bool {\n\tif c.A == 255 {\n\t\treturn false\n\t}\n\tr, g, b, a := c.ToFloat32()\n\tc.SetNPFloat32(r, g, b, a)\n\treturn true\n}",
"func (c *Color) Sub(dc Color) {\n\tr, g, b, a := c.RGBA() // uint32\n\tr = (r >> 8) - uint32(dc.R)\n\tg = (g >> 8) - uint32(dc.G)\n\tb = (b >> 8) - uint32(dc.B)\n\ta = (a >> 8) - uint32(dc.A)\n\tif r > 255 { // overflow\n\t\tr = 0\n\t}\n\tif g > 255 {\n\t\tg = 0\n\t}\n\tif b > 255 {\n\t\tb = 0\n\t}\n\tif a > 255 {\n\t\ta = 0\n\t}\n\tc.SetUInt8(uint8(r), uint8(g), uint8(b), uint8(a))\n}",
"func (v *Vector3) RGBA() (r, g, b, a uint32) {\n\t// Sqrt() for gamma-2 correction\n\t// color needs to be in hex\n\tr = uint32(util.Clamp(math.Sqrt(v.x), 0, 0.999) * toHex)\n\tg = uint32(util.Clamp(math.Sqrt(v.y), 0, 0.999) * toHex)\n\tb = uint32(util.Clamp(math.Sqrt(v.z), 0, 0.999) * toHex)\n\ta = uint32(toHex)\n\treturn\n}",
"func computeDistance(a *ColorRGBValue, b color.NRGBA) float64 {\n\tar := a.Red\n\tag := a.Green\n\tab := a.Blue\n\tbr := b.R\n\tbg := b.G\n\tbb := b.B\n\n\tdR := (br - ar) * (br - ar)\n\tdG := (bg - ag) * (bg - ag)\n\tdB := (bb - ab) * (bb - ab)\n\tdistance := math.Sqrt(float64(dR + dB + dG))\n\n\tdC := (255) ^ 2\n\tmaxColorDistance := math.Sqrt(float64(dC + dC + dC))\n\treturn distance / maxColorDistance\n}",
"func PBLENDVB(x, mx, x1 operand.Op) { ctx.PBLENDVB(x, mx, x1) }",
"func setBlendFunc(cmp pixel.ComposeMethod) {\n\tswitch cmp {\n\tcase pixel.ComposeOver:\n\t\tglhf.BlendFunc(glhf.One, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeIn:\n\t\tglhf.BlendFunc(glhf.DstAlpha, glhf.Zero)\n\tcase pixel.ComposeOut:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.Zero)\n\tcase pixel.ComposeAtop:\n\t\tglhf.BlendFunc(glhf.DstAlpha, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeRover:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.One)\n\tcase pixel.ComposeRin:\n\t\tglhf.BlendFunc(glhf.Zero, glhf.SrcAlpha)\n\tcase pixel.ComposeRout:\n\t\tglhf.BlendFunc(glhf.Zero, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposeRatop:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.SrcAlpha)\n\tcase pixel.ComposeXor:\n\t\tglhf.BlendFunc(glhf.OneMinusDstAlpha, glhf.OneMinusSrcAlpha)\n\tcase pixel.ComposePlus:\n\t\tglhf.BlendFunc(glhf.One, glhf.One)\n\tcase pixel.ComposeCopy:\n\t\tglhf.BlendFunc(glhf.One, glhf.Zero)\n\tdefault:\n\t\tpanic(errors.New(\"Canvas: invalid compose method\"))\n\t}\n}",
"func (p *RGBAf) SetRGB(x, y int, c *Vector3) {\n\tif !(image.Point{x, y}.In(p.Rect)) {\n\t\treturn\n\t}\n\ti := p.PixOffset(x, y)\n\ts := p.Pix[i : i+4 : i+4] // Small cap improves performance, see https://golang.org/issue/27857\n\ts[0] = Clamp(c.e[0]*255.999, 0.0, 255.0)\n\ts[1] = Clamp(c.e[1]*255.999, 0.0, 255.0)\n\ts[2] = Clamp(c.e[2]*255.999, 0.0, 255.0)\n\ts[3] = 255.0\n}",
"func (c *RGB) Difference(other *RGB) uint8 {\n\tvar diff uint8\n\n\tdiff += uint8(math.Abs(float64(c.r - other.r) / 3))\n\tdiff += uint8(math.Abs(float64(c.g - other.g) / 3))\n\tdiff += uint8(math.Abs(float64(c.b - other.b) / 3))\n\n\treturn diff\n}",
"func computeAverageColor(img image.Image) color.NRGBA {\n\tbounds := img.Bounds()\n\tpoints := uint64(0)\n\tred := uint64(0)\n\tgreen := uint64(0)\n\tblue := uint64(0)\n\tfor y := 0; y < bounds.Max.Y; y++ {\n\t\tfor x := 0; x < bounds.Max.X; x++ {\n\t\t\tcurColor := color.NRGBAModel.Convert(img.At(x, y)).(color.NRGBA)\n\t\t\tpoints++\n\t\t\tred += uint64(curColor.R)\n\t\t\tgreen += uint64(curColor.G)\n\t\t\tblue += uint64(curColor.B)\n\t\t}\n\t}\n\n\tavgRed := red / points\n\tavgGreen := green / points\n\tavgBlue := blue / points\n\n\treturn color.NRGBA{\n\t\tR: uint8(avgRed),\n\t\tG: uint8(avgGreen),\n\t\tB: uint8(avgBlue),\n\t\tA: 255,\n\t}\n}",
"func (c *Color) SetUInt32(r, g, b, a uint32) {\n\tc.R = uint8(r >> 8) // convert back to uint8\n\tc.G = uint8(g >> 8)\n\tc.B = uint8(b >> 8)\n\tc.A = uint8(a >> 8)\n}",
"func (hsi HSI) RGBA() (r, g, b, a uint32) {\n\th := hsi.H / 60\n\tz := 1 - math.Abs(math.Mod(h, 2)-1)\n\tchroma := 3 * hsi.I * hsi.S / (1 + z)\n\tx := chroma * z\n\n\tvar rFloat, gFloat, bFloat float64 = chroma, x, 0\n\n\tif h >= 5 {\n\t\trFloat = chroma\n\t\tgFloat = 0\n\t\tbFloat = x\n\t} else if h >= 4 {\n\t\trFloat = x\n\t\tgFloat = 0\n\t\tbFloat = chroma\n\t} else if h >= 3 {\n\t\trFloat = 0\n\t\tgFloat = x\n\t\tbFloat = chroma\n\t} else if h >= 2 {\n\t\trFloat = 0\n\t\tgFloat = chroma\n\t\tbFloat = x\n\t} else if h >= 1 {\n\t\trFloat = x\n\t\tgFloat = chroma\n\t\tbFloat = 0\n\t}\n\n\tm := hsi.I * (1 - hsi.S)\n\n\treturn uint32((rFloat + m) * 0xffff), uint32((gFloat + m) * 0xffff), uint32((bFloat + m) * 0xffff), 0xffff\n}",
"func (h *HSBA) RGB() color.RGBA {\n\trgba := h.RGBA()\n\trgba.A = 255\n\treturn rgba\n}",
"func RGB2VGABg(r, g, b uint32) (bool, int) {\n\tred := r >> 8\n\tgreen := g >> 8\n\tblue := b >> 8\n\tswitch {\n\tcase red == 0 && green == 0 && blue == 0:\n\t\treturn true, 40\n\tcase red == 170 && green == 0 && blue == 0:\n\t\treturn true, 41\n\tcase red == 0 && green == 170 && blue == 0:\n\t\treturn true, 42\n\tcase red == 170 && green == 85 && blue == 0:\n\t\treturn true, 43\n\tcase red == 0 && green == 0 && blue == 170:\n\t\treturn true, 44\n\tcase red == 170 && green == 0 && blue == 170:\n\t\treturn true, 45\n\tcase red == 0 && green == 170 && blue == 170:\n\t\treturn true, 46\n\tcase red == 170 && green == 170 && blue == 170:\n\t\treturn true, 47\n\tdefault:\n\t\treturn false, 0\n\t}\n}",
"func VPBLENDW(i, mxy, xy, xy1 operand.Op) { ctx.VPBLENDW(i, mxy, xy, xy1) }",
"func (p *Alpha16) Opaque() bool {\n\tif p.Rect.Empty() {\n\t\treturn true\n\t}\n\ti0, i1 := 0, p.Rect.Dx()*2\n\tfor y := p.Rect.Min.Y; y < p.Rect.Max.Y; y++ {\n\t\tfor i := i0; i < i1; i += 2 {\n\t\t\tif p.Pix[i+0] != 0xff || p.Pix[i+1] != 0xff {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t\ti0 += p.Stride\n\t\ti1 += p.Stride\n\t}\n\treturn true\n}",
"func (r *ImageRef) AddAlpha() error {\n\tif vipsHasAlpha(r.image) {\n\t\treturn nil\n\t}\n\n\tout, err := vipsAddAlpha(r.image)\n\tif err != nil {\n\t\treturn err\n\t}\n\tr.setImage(out)\n\treturn nil\n}",
"func colorPair(c color.Color) (hi, lo color.Color) {\n\tr, g, b, a := c.RGBA()\n\tr >>= 8\n\tg >>= 8\n\tb >>= 8\n\ta >>= 8\n\tmaxd := uint32(40)\n\tif r < maxd {\n\t\tmaxd = r\n\t}\n\tif g < maxd {\n\t\tmaxd = g\n\t}\n\tif b < maxd {\n\t\tmaxd = b\n\t}\n\tif r > 128 && (255-r) < maxd {\n\t\tmaxd = (255 - r)\n\t}\n\tif g > 128 && (255-g) < maxd {\n\t\tmaxd = (255 - g)\n\t}\n\tif b > 128 && (255-b) < maxd {\n\t\tmaxd = (255 - b)\n\t}\n\thi = color.RGBA{\n\t\tR: uint8(r + maxd),\n\t\tG: uint8(g + maxd),\n\t\tB: uint8(b + maxd),\n\t\tA: uint8(a),\n\t}\n\tlo = color.RGBA{\n\t\tR: uint8(r - maxd),\n\t\tG: uint8(g - maxd),\n\t\tB: uint8(b - maxd),\n\t\tA: uint8(a),\n\t}\n\treturn hi, lo\n}",
"func rgbasum(colorDay, colorNight color.Color, quantifier float64) color.RGBA {\n\tcDr, cDg, cDb, cDa := colorDay.RGBA()\n\tcNr, cNg, cNb, cNa := colorNight.RGBA()\n\n\t//https://en.wikipedia.org/wiki/Alpha_compositing#Alpha_blending\n\ta := cDa + cNa*(1-cDa)\n\tr := (float64(cDr)*quantifier + (float64(cNr) * (1 - quantifier)))\n\tg := (float64(cDg)*quantifier + (float64(cNg) * (1 - quantifier)))\n\tb := (float64(cDb)*quantifier + (float64(cNb) * (1 - quantifier)))\n\n\t//todo: alpha blending\n\t//https://jimdoescode.github.io/2015/05/22/manipulating-colors-in-go.html\n\treturn color.RGBA{uint8(r / 0x101), uint8(g / 0x101), uint8(b / 0x101), uint8(a / 0x101)}\n}",
"func (c Hex) RGB() (int, int, int) {\n\tr, err := strconv.ParseUint(c.Code[0:2], 16, 8)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tg, err := strconv.ParseUint(c.Code[2:4], 16, 8)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tb, err := strconv.ParseUint(c.Code[4:6], 16, 8)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\treturn int(r), int(g), int(b)\n}",
"func (self *Graphics) FillAlpha() int{\n return self.Object.Get(\"fillAlpha\").Int()\n}",
"func (c *Color) Add(dc Color) {\n\tr, g, b, a := c.RGBA() // uint32\n\tr = (r >> 8) + uint32(dc.R)\n\tg = (g >> 8) + uint32(dc.G)\n\tb = (b >> 8) + uint32(dc.B)\n\ta = (a >> 8) + uint32(dc.A)\n\tif r > 255 {\n\t\tr = 255\n\t}\n\tif g > 255 {\n\t\tg = 255\n\t}\n\tif b > 255 {\n\t\tb = 255\n\t}\n\tif a > 255 {\n\t\ta = 255\n\t}\n\tc.SetUInt8(uint8(r), uint8(g), uint8(b), uint8(a))\n}",
"func PBLENDW(i, mx, x operand.Op) { ctx.PBLENDW(i, mx, x) }",
"func drawDot(img *image.RGBA, q, r int, valueX, valueY float64, m int) {\n \n var myRed, myGreen, myBlue uint8\n i := inMandelSet(valueX, valueY, m)\n\n if (i > (m/2)) && (i < (3*m/2)) {myGreen = uint8( i * 255 / m)}\n if (i > (m/3)) && (i < (m/2)) {myRed = uint8( i * 255 / m)}\n myBlue = uint8( i * 255 / m )\n\n img.Set(q, r, color.RGBA{myRed, myGreen, myBlue , 255})\n return\n}",
"func rotateColorSpace(green_magenta, red_cyan, blue_yellow int) (red, green, blue int) {\n red = (green_magenta + blue_yellow )/2\n green = (red_cyan + blue_yellow )/2\n blue = (green_magenta + red_cyan )/2\n return\n}",
"func getP9RGBA(c color.Color) (uint8, uint8, uint8, uint8) {\n\tp := palette.Plan9\n\tr, g, b, a := color.Palette.Convert(p, c).RGBA()\n\treturn uint8(r >> 8), uint8(g >> 8), uint8(b >> 8), uint8(a >> 8)\n}",
"func (c Hex) RGBA() (r, g, b, a uint32) {\n\treturn c.getRGB().RGBA()\n}",
"func BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n\tsyscall.Syscall(gpBlendEquationSeparate, 2, uintptr(modeRGB), uintptr(modeAlpha), 0)\n}",
"func ALPHA() operators.Operator {\n\treturn operators.Alts(\n\t\t\"ALPHA\",\n\t\toperators.Range(\"%x41-5A\", []byte{65}, []byte{90}),\n\t\toperators.Range(\"%x61-7A\", []byte{97}, []byte{122}),\n\t)\n}",
"func blend(c1, c2 uint8, ratio float64) uint8 {\n\treturn uint8(math.Floor((float64(c1)*(1.0-ratio) + float64(c2)*ratio) + 0.5))\n}",
"func VBLENDPD(i, mxy, xy, xy1 operand.Op) { ctx.VBLENDPD(i, mxy, xy, xy1) }",
"func VPBLENDMW(ops ...operand.Op) { ctx.VPBLENDMW(ops...) }",
"func RGB(red, green, blue int) COLORREF {\n\treturn COLORREF(uint32(red) | uint32(green)<<8 | uint32(blue)<<16)\n}",
"func (s *ImageSpec) AlphaChannel() int {\n\tret := int(C.ImageSpec_alpha_channel(s.ptr))\n\truntime.KeepAlive(s)\n\treturn ret\n}",
"func Color(foreColor, backColor, mode gb.UINT8) {}",
"func (p *Alpha) Opaque() bool {\n\tif p.Rect.Empty() {\n\t\treturn true\n\t}\n\ti0, i1 := 0, p.Rect.Dx()\n\tfor y := p.Rect.Min.Y; y < p.Rect.Max.Y; y++ {\n\t\tfor i := i0; i < i1; i++ {\n\t\t\tif p.Pix[i] != 0xff {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t\ti0 += p.Stride\n\t\ti1 += p.Stride\n\t}\n\treturn true\n}",
"func VPBLENDVB(xy, mxy, xy1, xy2 operand.Op) { ctx.VPBLENDVB(xy, mxy, xy1, xy2) }",
"func Accum(op uint32, value float32) {\n\tC.glowAccum(gpAccum, (C.GLenum)(op), (C.GLfloat)(value))\n}",
"func BlendFuncSeparate(sfactorRGB uint32, dfactorRGB uint32, sfactorAlpha uint32, dfactorAlpha uint32) {\n C.glowBlendFuncSeparate(gpBlendFuncSeparate, (C.GLenum)(sfactorRGB), (C.GLenum)(dfactorRGB), (C.GLenum)(sfactorAlpha), (C.GLenum)(dfactorAlpha))\n}",
"func Red(rgb uint32) uint8 {\n\treturn uint8((rgb >> 16) & 0xff)\n}"
] | [
"0.6542087",
"0.6257929",
"0.61677504",
"0.6158084",
"0.60555196",
"0.6023693",
"0.6018105",
"0.5883505",
"0.5865986",
"0.58443886",
"0.58059335",
"0.575977",
"0.5702025",
"0.56576234",
"0.5562223",
"0.55082834",
"0.5490548",
"0.5484248",
"0.5483388",
"0.5443062",
"0.54419273",
"0.543838",
"0.5436057",
"0.54330146",
"0.54200715",
"0.5402055",
"0.5402055",
"0.5380444",
"0.5357527",
"0.53403634",
"0.53375995",
"0.5322341",
"0.5302701",
"0.528697",
"0.52838856",
"0.5264175",
"0.526285",
"0.526204",
"0.5261922",
"0.52474487",
"0.5231991",
"0.52246106",
"0.5217394",
"0.5216906",
"0.52140754",
"0.5211958",
"0.5170257",
"0.51679826",
"0.51675093",
"0.51659906",
"0.51546055",
"0.51510245",
"0.5145204",
"0.5136986",
"0.51360005",
"0.51340973",
"0.51198417",
"0.5116636",
"0.5115084",
"0.5110332",
"0.51062423",
"0.5098592",
"0.50974625",
"0.5091881",
"0.5091052",
"0.50818515",
"0.50748223",
"0.50440407",
"0.5035672",
"0.5032247",
"0.50218236",
"0.5020854",
"0.49987885",
"0.49939376",
"0.49907172",
"0.4988301",
"0.49803832",
"0.49652955",
"0.4960804",
"0.49569058",
"0.49546903",
"0.49539194",
"0.49526435",
"0.49523002",
"0.49493462",
"0.4936725",
"0.49366087",
"0.49360338",
"0.49354553",
"0.49349156",
"0.49337727",
"0.49235356",
"0.49182168",
"0.49106506",
"0.49045548",
"0.49010226",
"0.48991203",
"0.48846775",
"0.48840135",
"0.48752716",
"0.4872929"
] | 0.0 | -1 |
copy a block of pixels from one framebuffer object to another | func BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {
C.glowBlitFramebuffer(gpBlitFramebuffer, (C.GLint)(srcX0), (C.GLint)(srcY0), (C.GLint)(srcX1), (C.GLint)(srcY1), (C.GLint)(dstX0), (C.GLint)(dstY0), (C.GLint)(dstX1), (C.GLint)(dstY1), (C.GLbitfield)(mask), (C.GLenum)(filter))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func CopyBufferSubData(readTarget uint32, writeTarget uint32, readOffset int, writeOffset int, size int) {\n C.glowCopyBufferSubData(gpCopyBufferSubData, (C.GLenum)(readTarget), (C.GLenum)(writeTarget), (C.GLintptr)(readOffset), (C.GLintptr)(writeOffset), (C.GLsizeiptr)(size))\n}",
"func BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n C.glowBlitFramebuffer(gpBlitFramebuffer, (C.GLint)(srcX0), (C.GLint)(srcY0), (C.GLint)(srcX1), (C.GLint)(srcY1), (C.GLint)(dstX0), (C.GLint)(dstY0), (C.GLint)(dstX1), (C.GLint)(dstY1), (C.GLbitfield)(mask), (C.GLenum)(filter))\n}",
"func (fr *Frame) Copy(orig *Frame) {\n\tfr.Status = orig.Status\n\tfor y, row := range orig.Pix {\n\t\tcopy(fr.Pix[y][:], row)\n\t}\n}",
"func draw(window *glfw.Window, reactProg, landProg uint32) {\n\n\tvar renderLoops = 4\n\tfor i := 0; i < renderLoops; i++ {\n\t\t// -- DRAW TO BUFFER --\n\t\t// define destination of pixels\n\t\t//gl.BindFramebuffer(gl.FRAMEBUFFER, 0)\n\t\tgl.BindFramebuffer(gl.FRAMEBUFFER, FBO[1])\n\n\t\tgl.Viewport(0, 0, width, height) // Retina display doubles the framebuffer !?!\n\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT)\n\t\tgl.UseProgram(reactProg)\n\n\t\t// bind Texture\n\t\tgl.ActiveTexture(gl.TEXTURE0)\n\t\tgl.BindTexture(gl.TEXTURE_2D, renderedTexture)\n\t\tgl.Uniform1i(uniTex, 0)\n\n\t\tgl.BindVertexArray(VAO)\n\t\tgl.DrawElements(gl.TRIANGLE_STRIP, int32(len(indices)), gl.UNSIGNED_INT, nil)\n\n\t\tgl.BindVertexArray(0)\n\n\t\t// -- copy back textures --\n\t\tgl.BindFramebuffer(gl.READ_FRAMEBUFFER, FBO[1]) // source is high res array\n\t\tgl.ReadBuffer(gl.COLOR_ATTACHMENT0)\n\t\tgl.BindFramebuffer(gl.DRAW_FRAMEBUFFER, FBO[0]) // destination is cells array\n\t\tgl.DrawBuffer(gl.COLOR_ATTACHMENT0)\n\t\tgl.BlitFramebuffer(0, 0, width, height,\n\t\t\t0, 0, cols, rows,\n\t\t\tgl.COLOR_BUFFER_BIT, gl.NEAREST) // downsample\n\t\tgl.BindFramebuffer(gl.READ_FRAMEBUFFER, FBO[0]) // source is low res array - put in texture\n\t\t// read pixels saves data read as unsigned bytes and then loads them in TexImage same way\n\t\tgl.ReadPixels(0, 0, cols, rows, gl.RGBA, gl.FLOAT, gl.Ptr(fData))\n\t\tgl.BindTexture(gl.TEXTURE_2D, renderedTexture)\n\t\tgl.TexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, cols, rows, 0, gl.RGBA, gl.FLOAT, gl.Ptr(fData))\n\t\tCheckGLErrors()\n\t}\n\t// -- DRAW TO SCREEN --\n\tvar model glm.Mat4\n\n\t// destination 0 means screen\n\tgl.BindFramebuffer(gl.FRAMEBUFFER, 0)\n\tgl.Viewport(0, 0, width*2, height*2) // Retina display doubles the framebuffer !?!\n\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT)\n\tgl.UseProgram(landProg)\n\t// bind Texture\n\tgl.ActiveTexture(gl.TEXTURE0)\n\tgl.BindTexture(gl.TEXTURE_2D, drawTexture)\n\n\tvar view glm.Mat4\n\tvar brakeFactor = float64(20000.0)\n\tvar xCoord, yCoord float32\n\txCoord = float32(-3.0 * math.Sin(float64(myClock)))\n\tyCoord = float32(-3.0 * math.Cos(float64(myClock)))\n\t//xCoord = 0.0\n\t//yCoord = float32(-2.5)\n\tmyClock = math.Mod((myClock + float64(deltaTime)/brakeFactor), (math.Pi * 2))\n\tview = glm.LookAt(xCoord, yCoord, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0)\n\tgl.UniformMatrix4fv(uniView, 1, false, &view[0])\n\tmodel = glm.HomogRotate3DX(glm.DegToRad(00.0))\n\tgl.UniformMatrix4fv(uniModel, 1, false, &model[0])\n\tgl.Uniform1i(uniTex2, 0)\n\n\t// render container\n\t//gl.PolygonMode(gl.FRONT_AND_BACK, gl.FILL)\n\t//gl.PolygonMode(gl.FRONT_AND_BACK, gl.LINE)\n\n\tgl.BindVertexArray(VAO)\n\tgl.DrawElements(gl.TRIANGLE_STRIP, int32(len(indices)), gl.UNSIGNED_INT, nil)\n\tgl.BindVertexArray(0)\n\n\tCheckGLErrors()\n\n\tglfw.PollEvents()\n\twindow.SwapBuffers()\n\n\t//time.Sleep(100 * 1000 * 1000)\n}",
"func CopyPixels(x int32, y int32, width int32, height int32, xtype uint32) {\n C.glowCopyPixels(gpCopyPixels, (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(xtype))\n}",
"func BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tsyscall.Syscall12(gpBlitFramebuffer, 10, uintptr(srcX0), uintptr(srcY0), uintptr(srcX1), uintptr(srcY1), uintptr(dstX0), uintptr(dstY0), uintptr(dstX1), uintptr(dstY1), uintptr(mask), uintptr(filter), 0, 0)\n}",
"func Copy(dst draw.Image, src image.Image) {\n\tbd := src.Bounds().Intersect(dst.Bounds())\n\tat := imageutil.NewAtFunc(src)\n\tset := imageutil.NewSetFunc(dst)\n\timageutil.Parallel1D(bd, func(bd image.Rectangle) {\n\t\tfor y := bd.Min.Y; y < bd.Max.Y; y++ {\n\t\t\tfor x := bd.Min.X; x < bd.Max.X; x++ {\n\t\t\t\tr, g, b, a := at(x, y)\n\t\t\t\tset(x, y, r, g, b, a)\n\t\t\t}\n\t\t}\n\t})\n}",
"func (w *WebGLRenderTarget) Copy(source *WebGLRenderTarget) *WebGLRenderTarget {\n\tw.p.Call(\"copy\", source.p)\n\treturn w\n}",
"func (c *Canvas) copyTo(offset image.Point, dstSetCell setCellFunc) error {\n\tfor col := range c.buffer {\n\t\tfor row := range c.buffer[col] {\n\t\t\tpartial, err := c.buffer.IsPartial(image.Point{col, row})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif partial {\n\t\t\t\t// Skip over partial cells, i.e. cells that follow a cell\n\t\t\t\t// containing a full-width rune. A full-width rune takes only\n\t\t\t\t// one cell in the buffer, but two on the terminal.\n\t\t\t\t// See http://www.unicode.org/reports/tr11/.\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tcell := c.buffer[col][row]\n\t\t\tp := image.Point{col, row}.Add(offset)\n\t\t\tif err := dstSetCell(p, cell.Rune, cell.Opts); err != nil {\n\t\t\t\treturn fmt.Errorf(\"setCellFunc%v => error: %v\", p, err)\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}",
"func CopyBufferSubData(readTarget uint32, writeTarget uint32, readOffset int, writeOffset int, size int) {\n\tC.glowCopyBufferSubData(gpCopyBufferSubData, (C.GLenum)(readTarget), (C.GLenum)(writeTarget), (C.GLintptr)(readOffset), (C.GLintptr)(writeOffset), (C.GLsizeiptr)(size))\n}",
"func CopyBufferSubData(readTarget uint32, writeTarget uint32, readOffset int, writeOffset int, size int) {\n\tC.glowCopyBufferSubData(gpCopyBufferSubData, (C.GLenum)(readTarget), (C.GLenum)(writeTarget), (C.GLintptr)(readOffset), (C.GLintptr)(writeOffset), (C.GLsizeiptr)(size))\n}",
"func CopyPixels(x int32, y int32, width int32, height int32, xtype uint32) {\n\tsyscall.Syscall6(gpCopyPixels, 5, uintptr(x), uintptr(y), uintptr(width), uintptr(height), uintptr(xtype), 0)\n}",
"func (native *OpenGL) BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tgl.BlitFramebuffer(srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter)\n}",
"func CopyTexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, x int32, y int32, width int32, height int32) {\n C.glowCopyTexSubImage2D(gpCopyTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func CopyTexImage2D(target uint32, level int32, internalformat uint32, x int32, y int32, width int32, height int32, border int32) {\n C.glowCopyTexImage2D(gpCopyTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border))\n}",
"func CopyPixels(x int32, y int32, width int32, height int32, xtype uint32) {\n\tC.glowCopyPixels(gpCopyPixels, (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(xtype))\n}",
"func (s *Surface) Blit(source *Surface, x, y float64) {\n\ts.Ctx.Call(\"drawImage\", source.Canvas, math.Floor(x), math.Floor(y))\n}",
"func CopyTexImage2D(target uint32, level int32, internalformat uint32, x int32, y int32, width int32, height int32, border int32) {\n\tsyscall.Syscall9(gpCopyTexImage2D, 8, uintptr(target), uintptr(level), uintptr(internalformat), uintptr(x), uintptr(y), uintptr(width), uintptr(height), uintptr(border), 0)\n}",
"func (c *Container) setBitmapCopy(bitmap []uint64) {\n\tvar bitmapCopy [bitmapN]uint64\n\tcopy(bitmapCopy[:], bitmap)\n\tc.setBitmap(bitmapCopy[:])\n}",
"func CopyTexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, x int32, y int32, width int32, height int32) {\n\tsyscall.Syscall9(gpCopyTexSubImage2D, 8, uintptr(target), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(x), uintptr(y), uintptr(width), uintptr(height), 0)\n}",
"func CopyColorTable(target uint32, internalformat uint32, x int32, y int32, width int32) {\n C.glowCopyColorTable(gpCopyColorTable, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func (s *stencilOverdraw) copyImageAspect(ctx context.Context,\n\tcb CommandBuilder,\n\tgs *api.GlobalState,\n\tst *State,\n\ta arena.Arena,\n\tdevice VkDevice,\n\tcmdBuffer VkCommandBuffer,\n\tsrcImgDesc imageDesc,\n\tdstImgDesc imageDesc,\n\textent VkExtent3D,\n\talloc func(v ...interface{}) api.AllocResult,\n\taddCleanup func(func()),\n\tout transform.Writer,\n) {\n\tsrcImg := srcImgDesc.image\n\tdstImg := dstImgDesc.image\n\tcopyBuffer := s.createDepthCopyBuffer(ctx, cb, gs, st, a, device,\n\t\tsrcImg.Info().Fmt(),\n\t\textent.Width(), extent.Height(),\n\t\talloc, addCleanup, out)\n\n\tallCommandsStage := VkPipelineStageFlags(\n\t\tVkPipelineStageFlagBits_VK_PIPELINE_STAGE_ALL_COMMANDS_BIT)\n\tallMemoryAccess := VkAccessFlags(\n\t\tVkAccessFlagBits_VK_ACCESS_MEMORY_WRITE_BIT |\n\t\t\tVkAccessFlagBits_VK_ACCESS_MEMORY_READ_BIT)\n\n\timgBarriers0 := make([]VkImageMemoryBarrier, 2)\n\timgBarriers1 := make([]VkImageMemoryBarrier, 2)\n\t// Transition the src image in and out of the required layouts\n\timgBarriers0[0] = NewVkImageMemoryBarrier(a,\n\t\tVkStructureType_VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType\n\t\t0, // pNext\n\t\tallMemoryAccess, // srcAccessMask\n\t\tVkAccessFlags(VkAccessFlagBits_VK_ACCESS_TRANSFER_READ_BIT), // dstAccessMask\n\t\tsrcImgDesc.layout, // oldLayout\n\t\tVkImageLayout_VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // newLayout\n\t\t^uint32(0), // srcQueueFamilyIndex: VK_QUEUE_FAMILY_IGNORED\n\t\t^uint32(0), // dstQueueFamilyIndex\n\t\tsrcImg.VulkanHandle(), // image\n\t\tsrcImgDesc.subresource, // subresourceRange\n\t)\n\tsrcFinalLayout := srcImgDesc.layout\n\tif srcFinalLayout == VkImageLayout_VK_IMAGE_LAYOUT_UNDEFINED ||\n\t\tsrcFinalLayout == VkImageLayout_VK_IMAGE_LAYOUT_PREINITIALIZED {\n\t\tsrcFinalLayout = VkImageLayout_VK_IMAGE_LAYOUT_GENERAL\n\t}\n\timgBarriers1[0] = NewVkImageMemoryBarrier(a,\n\t\tVkStructureType_VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType\n\t\t0, // pNext\n\t\tVkAccessFlags(VkAccessFlagBits_VK_ACCESS_TRANSFER_READ_BIT), // srcAccessMask\n\t\tallMemoryAccess, // dstAccessMask\n\t\tVkImageLayout_VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // oldLayout\n\t\tsrcFinalLayout, // newLayout\n\t\t^uint32(0), // srcQueueFamilyIndex: VK_QUEUE_FAMILY_IGNORED\n\t\t^uint32(0), // dstQueueFamilyIndex\n\t\tsrcImg.VulkanHandle(), // image\n\t\tsrcImgDesc.subresource, // subresourceRange\n\t)\n\n\t// Transition the new image in and out of its required layouts\n\timgBarriers0[1] = NewVkImageMemoryBarrier(a,\n\t\tVkStructureType_VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType\n\t\t0, // pNext\n\t\tallMemoryAccess, // srcAccessMask\n\t\tVkAccessFlags(VkAccessFlagBits_VK_ACCESS_TRANSFER_WRITE_BIT), // dstAccessMask\n\t\tdstImgDesc.layout, // oldLayout\n\t\tVkImageLayout_VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // newLayout\n\t\t^uint32(0), // srcQueueFamilyIndex: VK_QUEUE_FAMILY_IGNORED\n\t\t^uint32(0), // dstQueueFamilyIndex\n\t\tdstImg.VulkanHandle(), // image\n\t\tdstImgDesc.subresource, // subresourceRange\n\t)\n\n\tdstFinalLayout := dstImgDesc.layout\n\tif dstFinalLayout == VkImageLayout_VK_IMAGE_LAYOUT_UNDEFINED ||\n\t\tdstFinalLayout == VkImageLayout_VK_IMAGE_LAYOUT_PREINITIALIZED {\n\t\tdstFinalLayout = VkImageLayout_VK_IMAGE_LAYOUT_GENERAL\n\t}\n\timgBarriers1[1] = NewVkImageMemoryBarrier(a,\n\t\tVkStructureType_VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType\n\t\t0, // pNext\n\t\tVkAccessFlags(VkAccessFlagBits_VK_ACCESS_TRANSFER_WRITE_BIT), // srcAccessMask\n\t\tallMemoryAccess, // dstAccessMask\n\t\tVkImageLayout_VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // oldLayout\n\t\tdstFinalLayout, // newLayout\n\t\t^uint32(0), // srcQueueFamilyIndex: VK_QUEUE_FAMILY_IGNORED\n\t\t^uint32(0), // dstQueueFamilyIndex\n\t\tdstImg.VulkanHandle(), // image\n\t\tdstImgDesc.subresource, // subresourceRange\n\t)\n\n\tbufBarrier := NewVkBufferMemoryBarrier(a,\n\t\tVkStructureType_VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // sType\n\t\t0, // pNext\n\t\tVkAccessFlags(VkAccessFlagBits_VK_ACCESS_TRANSFER_WRITE_BIT), // srcAccessMask\n\t\tVkAccessFlags(VkAccessFlagBits_VK_ACCESS_TRANSFER_READ_BIT), // dstAccessMask\n\t\t^uint32(0), // srcQueueFamilyIndex: VK_QUEUE_FAMILY_IGNORED\n\t\t^uint32(0), // dstQueueFamilyIndex\n\t\tcopyBuffer, // buffer\n\t\t0, // offset\n\t\t^VkDeviceSize(0), // size: VK_WHOLE_SIZE\n\t)\n\n\tibCopy := NewVkBufferImageCopy(a,\n\t\t0, // bufferOffset\n\t\t0, // bufferRowLength\n\t\t0, // bufferImageHeight\n\t\tNewVkImageSubresourceLayers(a,\n\t\t\tVkImageAspectFlags(srcImgDesc.aspect), // aspectMask\n\t\t\tsrcImgDesc.subresource.BaseMipLevel(), // mipLevel\n\t\t\tsrcImgDesc.subresource.BaseArrayLayer(), // baseArrayLayer\n\t\t\t1, // layerCount\n\t\t), // srcSubresource\n\t\tNewVkOffset3D(a, 0, 0, 0), // offset\n\t\tNewVkExtent3D(a, extent.Width(), extent.Height(), 1), // extent\n\t)\n\n\tbiCopy := NewVkBufferImageCopy(a,\n\t\t0, // bufferOffset\n\t\t0, // bufferRowLength\n\t\t0, // bufferImageHeight\n\t\tNewVkImageSubresourceLayers(a,\n\t\t\tVkImageAspectFlags(dstImgDesc.aspect), // aspectMask\n\t\t\tdstImgDesc.subresource.BaseMipLevel(), // mipLevel\n\t\t\tdstImgDesc.subresource.BaseArrayLayer(), // baseArrayLayer\n\t\t\t1, // layerCount\n\t\t), // srcSubresource\n\t\tNewVkOffset3D(a, 0, 0, 0), // offset\n\t\tNewVkExtent3D(a, extent.Width(), extent.Height(), 1), // extent\n\t)\n\n\timgBarriers0Data := alloc(imgBarriers0)\n\tibCopyData := alloc(ibCopy)\n\tbufBarrierData := alloc(bufBarrier)\n\tbiCopyData := alloc(biCopy)\n\timgBarriers1Data := alloc(imgBarriers1)\n\n\twriteEach(ctx, out,\n\t\tcb.VkCmdPipelineBarrier(cmdBuffer,\n\t\t\tallCommandsStage, // srcStageMask\n\t\t\tVkPipelineStageFlags(VkPipelineStageFlagBits_VK_PIPELINE_STAGE_TRANSFER_BIT), // dstStageMask\n\t\t\t0, // dependencyFlags\n\t\t\t0, // memoryBarrierCount\n\t\t\tmemory.Nullptr, // pMemoryBarriers\n\t\t\t0, // bufferMemoryBarrierCount\n\t\t\tmemory.Nullptr, // pBufferMemoryBarriers\n\t\t\t2, // imageMemoryBarrierCount\n\t\t\timgBarriers0Data.Ptr(), // pImageMemoryBarriers\n\t\t).AddRead(imgBarriers0Data.Data()),\n\t\tcb.VkCmdCopyImageToBuffer(cmdBuffer,\n\t\t\tsrcImg.VulkanHandle(), // srcImage\n\t\t\tVkImageLayout_VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // srcImageLayout\n\t\t\tcopyBuffer, // dstBuffer\n\t\t\t1, // regionCount\n\t\t\tibCopyData.Ptr(), // pRegions\n\t\t).AddRead(ibCopyData.Data()),\n\t\tcb.VkCmdPipelineBarrier(cmdBuffer,\n\t\t\tVkPipelineStageFlags(VkPipelineStageFlagBits_VK_PIPELINE_STAGE_TRANSFER_BIT), // srcStageMask\n\t\t\tVkPipelineStageFlags(VkPipelineStageFlagBits_VK_PIPELINE_STAGE_TRANSFER_BIT), // dstStageMask\n\t\t\t0, // dependencyFlags\n\t\t\t0, // memoryBarrierCount\n\t\t\tmemory.Nullptr, // pMemoryBarriers\n\t\t\t1, // bufferMemoryBarrierCount\n\t\t\tbufBarrierData.Ptr(), // pBufferMemoryBarriers\n\t\t\t0, // imageMemoryBarrierCount\n\t\t\tmemory.Nullptr, // pImageMemoryBarriers\n\t\t).AddRead(bufBarrierData.Data()),\n\t\tcb.VkCmdCopyBufferToImage(cmdBuffer,\n\t\t\tcopyBuffer, // srcBuffer\n\t\t\tdstImg.VulkanHandle(), // dstImage\n\t\t\tVkImageLayout_VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // dstImageLayout\n\t\t\t1, // regionCount\n\t\t\tbiCopyData.Ptr(), // pRegions\n\t\t).AddRead(biCopyData.Data()),\n\t\tcb.VkCmdPipelineBarrier(cmdBuffer,\n\t\t\tVkPipelineStageFlags(VkPipelineStageFlagBits_VK_PIPELINE_STAGE_TRANSFER_BIT), // srcStageMask\n\t\t\tallCommandsStage, // dstStageMask\n\t\t\t0, // dependencyFlags\n\t\t\t0, // memoryBarrierCount\n\t\t\tmemory.Nullptr, // pMemoryBarriers\n\t\t\t0, // bufferMemoryBarrierCount\n\t\t\tmemory.Nullptr, // pBufferMemoryBarriers\n\t\t\t2, // imageMemoryBarrierCount\n\t\t\timgBarriers1Data.Ptr(), // pImageMemoryBarriers\n\t\t).AddRead(imgBarriers1Data.Data()),\n\t)\n}",
"func CopyTexSubImage1D(target uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n C.glowCopyTexSubImage1D(gpCopyTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func (w *windowImpl) Copy(dp image.Point, src screen.Texture, sr image.Rectangle, op draw.Op, opts *screen.DrawOptions) {\n\tpanic(\"not implemented\") // TODO: Implement\n}",
"func CopyColorSubTable(target uint32, start int32, x int32, y int32, width int32) {\n C.glowCopyColorSubTable(gpCopyColorSubTable, (C.GLenum)(target), (C.GLsizei)(start), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func CopyTexSubImage2D(target Enum, level Int, xoffset Int, yoffset Int, x Int, y Int, width Sizei, height Sizei) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tclevel, _ := (C.GLint)(level), cgoAllocsUnknown\n\tcxoffset, _ := (C.GLint)(xoffset), cgoAllocsUnknown\n\tcyoffset, _ := (C.GLint)(yoffset), cgoAllocsUnknown\n\tcx, _ := (C.GLint)(x), cgoAllocsUnknown\n\tcy, _ := (C.GLint)(y), cgoAllocsUnknown\n\tcwidth, _ := (C.GLsizei)(width), cgoAllocsUnknown\n\tcheight, _ := (C.GLsizei)(height), cgoAllocsUnknown\n\tC.glCopyTexSubImage2D(ctarget, clevel, cxoffset, cyoffset, cx, cy, cwidth, cheight)\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n C.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))\n}",
"func Copy(i ImageIr) ImageIr {\n\tc := make([][][]uint32, len(i.pixels))\n\tcopy(c, i.pixels)\n\treturn ImageIr{i.width, i.height, c}\n}",
"func CopyBufferSubData(readTarget uint32, writeTarget uint32, readOffset int, writeOffset int, size int) {\n\tsyscall.Syscall6(gpCopyBufferSubData, 5, uintptr(readTarget), uintptr(writeTarget), uintptr(readOffset), uintptr(writeOffset), uintptr(size), 0)\n}",
"func (t *translator) copyBytes(\n\tirBlock *ir.Block, irPtr, irLen irvalue.Value,\n) irvalue.Value {\n\tirNewPtr := irBlock.NewCall(t.builtins.Malloc(t), irLen)\n\tirBlock.NewCall(t.builtins.Memcpy(t), irNewPtr, irPtr, irLen, irconstant.False)\n\treturn irNewPtr\n}",
"func CopyTexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, x int32, y int32, width int32, height int32) {\n\tC.glowCopyTexSubImage2D(gpCopyTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func CopyTexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, x int32, y int32, width int32, height int32) {\n\tC.glowCopyTexSubImage2D(gpCopyTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func CopyTexImage2D(target uint32, level int32, internalformat uint32, x int32, y int32, width int32, height int32, border int32) {\n\tC.glowCopyTexImage2D(gpCopyTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border))\n}",
"func CopyTexImage2D(target uint32, level int32, internalformat uint32, x int32, y int32, width int32, height int32, border int32) {\n\tC.glowCopyTexImage2D(gpCopyTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border))\n}",
"func (fr *Frame) CreateCopy() *Frame {\n\tframe := new(Frame)\n\tframe.Pix = make([][]uint16, len(fr.Pix))\n\tfor i := range fr.Pix {\n\t\tframe.Pix[i] = make([]uint16, len(fr.Pix[i]))\n\t}\n\tframe.Copy(fr)\n\treturn frame\n}",
"func CopyImageSubData(srcName uint32, srcTarget uint32, srcLevel int32, srcX int32, srcY int32, srcZ int32, dstName uint32, dstTarget uint32, dstLevel int32, dstX int32, dstY int32, dstZ int32, srcWidth int32, srcHeight int32, srcDepth int32) {\n C.glowCopyImageSubData(gpCopyImageSubData, (C.GLuint)(srcName), (C.GLenum)(srcTarget), (C.GLint)(srcLevel), (C.GLint)(srcX), (C.GLint)(srcY), (C.GLint)(srcZ), (C.GLuint)(dstName), (C.GLenum)(dstTarget), (C.GLint)(dstLevel), (C.GLint)(dstX), (C.GLint)(dstY), (C.GLint)(dstZ), (C.GLsizei)(srcWidth), (C.GLsizei)(srcHeight), (C.GLsizei)(srcDepth))\n}",
"func (self *TileSprite) SetCanvasBufferA(member *PIXICanvasBuffer) {\n self.Object.Set(\"canvasBuffer\", member)\n}",
"func (i *Image) readPixelsFromGPU() error {\n\tvar err error\n\ti.basePixels, err = i.image.Pixels()\n\tif err != nil {\n\t\treturn err\n\t}\n\ti.drawImageHistory = nil\n\ti.stale = false\n\treturn nil\n}",
"func BufferSubData(target uint32, offset int, size int, data unsafe.Pointer) {\n C.glowBufferSubData(gpBufferSubData, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(size), data)\n}",
"func newScreenFramebuffer(context *context, width, height int) *framebuffer {\n\treturn &framebuffer{\n\t\tnative: context.getScreenFramebuffer(),\n\t\twidth: width,\n\t\theight: height,\n\t}\n}",
"func newBitmapFrom(other *bitmap, size int) *bitmap {\n\tbitmap := newBitmap(size)\n\n\tif size > other.Size {\n\t\tsize = other.Size\n\t}\n\n\tdiv := size / 8\n\n\tfor i := 0; i < div; i++ {\n\t\tbitmap.data[i] = other.data[i]\n\t}\n\n\tfor i := div * 8; i < size; i++ {\n\t\tif other.Bit(i) == 1 {\n\t\t\tbitmap.Set(i)\n\t\t}\n\t}\n\n\treturn bitmap\n}",
"func InvalidateSubFramebuffer(target uint32, numAttachments int32, attachments *uint32, x int32, y int32, width int32, height int32) {\n C.glowInvalidateSubFramebuffer(gpInvalidateSubFramebuffer, (C.GLenum)(target), (C.GLsizei)(numAttachments), (*C.GLenum)(unsafe.Pointer(attachments)), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func (b *Buffer) Dump() {\n\ts := *b.screen\n\traster := canvas.NewRasterFromImage(b.context.Image())\n\ts.SetContent(raster)\n}",
"func (i *ImageBuf) SetFull(xbegin, xend, ybegin, yend, zbegin, zend int) {\n\tC.ImageBuf_set_full(\n\t\ti.ptr,\n\t\tC.int(xbegin), C.int(xend),\n\t\tC.int(ybegin), C.int(yend),\n\t\tC.int(zbegin), C.int(zend))\n\truntime.KeepAlive(i)\n}",
"func (s *Surface) Copy() *Surface {\n\tr := s.Rect()\n\tcopy := NewSurface(int(r.W), int(r.H))\n\tcopy.Blit(s, 0, 0)\n\treturn copy\n}",
"func (i *ImageBuf) CopyPixels(src *ImageBuf) error {\n\tok := bool(C.ImageBuf_copy_pixels(i.ptr, src.ptr))\n\truntime.KeepAlive(i)\n\truntime.KeepAlive(src)\n\tif !ok {\n\t\treturn i.LastError()\n\t}\n\treturn nil\n}",
"func CopyTexImage2D(target Enum, level Int, internalformat Enum, x Int, y Int, width Sizei, height Sizei, border Int) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tclevel, _ := (C.GLint)(level), cgoAllocsUnknown\n\tcinternalformat, _ := (C.GLenum)(internalformat), cgoAllocsUnknown\n\tcx, _ := (C.GLint)(x), cgoAllocsUnknown\n\tcy, _ := (C.GLint)(y), cgoAllocsUnknown\n\tcwidth, _ := (C.GLsizei)(width), cgoAllocsUnknown\n\tcheight, _ := (C.GLsizei)(height), cgoAllocsUnknown\n\tcborder, _ := (C.GLint)(border), cgoAllocsUnknown\n\tC.glCopyTexImage2D(ctarget, clevel, cinternalformat, cx, cy, cwidth, cheight, cborder)\n}",
"func (self *Rectangle) CopyTo(source interface{}) interface{}{\n return self.Object.Call(\"copyTo\", source)\n}",
"func copyStreamToDMABuf(w gpiostream.Stream, dst []uint32) error {\n\tswitch v := w.(type) {\n\tcase *gpiostream.BitStream:\n\t\tif v.LSBF {\n\t\t\treturn errors.New(\"TODO(simokawa): handle BitStream.LSBF\")\n\t\t}\n\t\t// This is big-endian and MSB first.\n\t\ti := 0\n\t\tfor ; i < len(v.Bits)/4; i++ {\n\t\t\tdst[i] = binary.BigEndian.Uint32(v.Bits[i*4:])\n\t\t}\n\t\tlast := uint32(0)\n\t\tif mod := len(v.Bits) % 4; mod > 0 {\n\t\t\tfor j := 0; j < mod; j++ {\n\t\t\t\tlast |= (uint32(v.Bits[i*4+j])) << uint32(8*(3-j))\n\t\t\t}\n\t\t\tdst[i] = last\n\t\t}\n\t\treturn nil\n\tcase *gpiostream.EdgeStream:\n\t\treturn errors.New(\"TODO(simokawa): handle EdgeStream\")\n\tdefault:\n\t\treturn errors.New(\"unsupported Stream type\")\n\t}\n}",
"func CopyTexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, x int32, y int32, width int32, height int32) {\n C.glowCopyTexSubImage3D(gpCopyTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func setPixel(x, y int, c color, pixels []byte) {\n\tindex := (y*windowWidth + x) * 4\n\n\tif index < len(pixels)-4 && index >= 0 {\n\t\tpixels[index] = c.r\n\t\tpixels[index+1] = c.g\n\t\tpixels[index+1] = c.b\n\t}\n}",
"func SwitchData(x, y gb.UINT8, src, dst []gb.UINT8) {}",
"func CopyTexSubImage2D(target GLEnum, level, xoffset, yoffset, x, y, width, height int32) {\n\tgl.CopyTexSubImage2D(uint32(target), level, xoffset, yoffset, x, y, width, height)\n}",
"func MapBufferRange(target uint32, offset int, length int, access uint32) unsafe.Pointer {\n ret := C.glowMapBufferRange(gpMapBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length), (C.GLbitfield)(access))\n return (unsafe.Pointer)(ret)\n}",
"func initFramebuffer(width, height int) {\n\tlog.Printf(\"[Video]: Initializing HW render (%v x %v).\\n\", width, height)\n\n\tgl.GenFramebuffers(1, &fboID)\n\tgl.BindFramebuffer(gl.FRAMEBUFFER, fboID)\n\n\t//gl.GenTextures(1, &video.texID)\n\tgl.BindTexture(gl.TEXTURE_2D, texID)\n\tgl.TexStorage2D(gl.TEXTURE_2D, 1, gl.RGBA8, int32(width), int32(height))\n\n\tgl.FramebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texID, 0)\n\n\thw := state.Global.Core.HWRenderCallback\n\n\tgl.BindRenderbuffer(gl.RENDERBUFFER, 0)\n\n\tif gl.CheckFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE {\n\t\tlog.Fatalln(\"[Video] Framebuffer is not complete.\")\n\t}\n\n\tgl.ClearColor(0, 0, 0, 1)\n\tif hw.Depth && hw.Stencil {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT | gl.STENCIL_BUFFER_BIT)\n\t} else if hw.Depth {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT)\n\t} else {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT)\n\t}\n\n\tgl.BindFramebuffer(gl.FRAMEBUFFER, 0)\n}",
"func CopyTexSubImage2D(target Enum, level, xoffset, yoffset, x, y, width, height int) {\n\tgl.CopyTexSubImage2D(uint32(target), int32(level), int32(xoffset), int32(yoffset), int32(x), int32(y), int32(width), int32(height))\n}",
"func CopyColorTable(target uint32, internalformat uint32, x int32, y int32, width int32) {\n\tC.glowCopyColorTable(gpCopyColorTable, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func putPixel(screen []byte, color color, x int, y int) {\n\tscreenX := (windowWidth / 2) + x\n\tscreenY := (windowHeight / 2) - y - 1\n\tbase := (screenY*windowWidth + screenX) * 4\n\tscreen[base] = color.r\n\tscreen[base+1] = color.g\n\tscreen[base+2] = color.b\n\tscreen[base+3] = 0xFF\n\tscreen[0] = 0xFF\n}",
"func CopyMem(source uint64, dest uint64, size uint64)",
"func (debugging *debuggingOpenGL) BindFramebuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindFramebuffer\", target, buffer)\n\tdebugging.gl.BindFramebuffer(target, buffer)\n\tdebugging.recordExit(\"BindFramebuffer\")\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n C.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func CopyTexImage2D(target GLEnum, level int32, internalformat GLEnum, x, y, width, height, border int32) {\n\tgl.CopyTexImage2D(uint32(target), level, uint32(internalformat), x, y, width, height, border)\n}",
"func FlushMappedBufferRange(target uint32, offset int, length int) {\n C.glowFlushMappedBufferRange(gpFlushMappedBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func MapBuffer(target uint32, access uint32) unsafe.Pointer {\n ret := C.glowMapBuffer(gpMapBuffer, (C.GLenum)(target), (C.GLenum)(access))\n return (unsafe.Pointer)(ret)\n}",
"func (b *Board) copy() *Board {\n\tnewBoard := &Board{\n\t\tPlayer1: make([]Square, len(b.Player1)),\n\t\tPlayer2: make([]Square, len(b.Player2)),\n\t\tresult: b.result,\n\t}\n\tfor i := range b.Player1 {\n\t\tnewBoard.Player1[i] = b.Player1[i]\n\t}\n\tfor i := range b.Player2 {\n\t\tnewBoard.Player2[i] = b.Player2[i]\n\t}\n\treturn newBoard\n}",
"func CopyTexImage2D(target Enum, level int, internalformat Enum, x, y, width, height, border int) {\n\tgl.CopyTexImage2D(uint32(target), int32(level), uint32(internalformat), int32(x), int32(y), int32(width), int32(height), int32(border))\n}",
"func CopyTexSubImage1D(target uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tsyscall.Syscall6(gpCopyTexSubImage1D, 6, uintptr(target), uintptr(level), uintptr(xoffset), uintptr(x), uintptr(y), uintptr(width))\n}",
"func CopyColorSubTable(target uint32, start int32, x int32, y int32, width int32) {\n\tC.glowCopyColorSubTable(gpCopyColorSubTable, (C.GLenum)(target), (C.GLsizei)(start), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func (b *Buffer) CopyDataFrom(src *Buffer, srcOffset, dstOffset, size int) error {\n\tif size == 0 {\n\t\treturn nil\n\t}\n\n\terrCode := cl.EnqueueCopyBuffer(\n\t\tb.device.cmdQueue,\n\t\tsrc.bufHandle,\n\t\tb.bufHandle,\n\t\tuint64(srcOffset),\n\t\tuint64(dstOffset),\n\t\tuint64(size),\n\t\t0,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif errCode != cl.SUCCESS {\n\t\treturn fmt.Errorf(\"opencl device(%s): error copying device data from buffer %s to buffer %s (errCode %d)\", b.device.Name, src.name, b.name, errCode)\n\t}\n\treturn nil\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n\tsyscall.Syscall(gpBindFramebuffer, 2, uintptr(target), uintptr(framebuffer), 0)\n}",
"func CopyTexSubImage1D(target uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tC.glowCopyTexSubImage1D(gpCopyTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func CopyTexSubImage1D(target uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tC.glowCopyTexSubImage1D(gpCopyTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func CopyTexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, x int32, y int32, width int32, height int32) {\n\tsyscall.Syscall9(gpCopyTexSubImage3D, 9, uintptr(target), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(zoffset), uintptr(x), uintptr(y), uintptr(width), uintptr(height))\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n\tC.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n\tC.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))\n}",
"func MemCopy(dst unsafe.Pointer, src unsafe.Pointer, bytes int) {\n\tfor i := 0; i < bytes; i++ {\n\t\t*(*uint8)(MemAccess(dst, i)) = *(*uint8)(MemAccess(src, i))\n\t}\n}",
"func block(h *[4][16]uint32, base uintptr, offsets *[16]uint32, mask uint16)",
"func CopyConvolutionFilter2D(target uint32, internalformat uint32, x int32, y int32, width int32, height int32) {\n C.glowCopyConvolutionFilter2D(gpCopyConvolutionFilter2D, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func (b *XMobileBackend) PutImageData(img *image.RGBA, x, y int) {\n\tb.activate()\n\n\tb.glctx.ActiveTexture(gl.TEXTURE0)\n\tif b.imageBufTex.Value == 0 {\n\t\tb.imageBufTex = b.glctx.CreateTexture()\n\t\tb.glctx.BindTexture(gl.TEXTURE_2D, b.imageBufTex)\n\t\tb.glctx.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR)\n\t\tb.glctx.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR)\n\t\tb.glctx.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)\n\t\tb.glctx.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)\n\t} else {\n\t\tb.glctx.BindTexture(gl.TEXTURE_2D, b.imageBufTex)\n\t}\n\n\tw, h := img.Bounds().Dx(), img.Bounds().Dy()\n\n\tif img.Stride == img.Bounds().Dx()*4 {\n\t\tb.glctx.TexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, w, h, gl.RGBA, gl.UNSIGNED_BYTE, img.Pix[0:])\n\t} else {\n\t\tdata := make([]uint8, 0, w*h*4)\n\t\tfor cy := 0; cy < h; cy++ {\n\t\t\tstart := cy * img.Stride\n\t\t\tend := start + w*4\n\t\t\tdata = append(data, img.Pix[start:end]...)\n\t\t}\n\t\tb.glctx.TexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, w, h, gl.RGBA, gl.UNSIGNED_BYTE, data[0:])\n\t}\n\n\tdx, dy := float32(x), float32(y)\n\tdw, dh := float32(w), float32(h)\n\n\tb.glctx.BindBuffer(gl.ARRAY_BUFFER, b.buf)\n\tdata := [16]float32{dx, dy, dx + dw, dy, dx + dw, dy + dh, dx, dy + dh,\n\t\t0, 0, 1, 0, 1, 1, 0, 1}\n\tb.glctx.BufferData(gl.ARRAY_BUFFER, byteSlice(unsafe.Pointer(&data[0]), len(data)*4), gl.STREAM_DRAW)\n\n\tb.glctx.UseProgram(b.shd.ID)\n\tb.glctx.Uniform1i(b.shd.Image, 0)\n\tb.glctx.Uniform2f(b.shd.CanvasSize, float32(b.fw), float32(b.fh))\n\tb.glctx.UniformMatrix3fv(b.shd.Matrix, mat3identity[:])\n\tb.glctx.Uniform1f(b.shd.GlobalAlpha, 1)\n\tb.glctx.Uniform1i(b.shd.UseAlphaTex, 0)\n\tb.glctx.Uniform1i(b.shd.Func, shdFuncImage)\n\tb.glctx.VertexAttribPointer(b.shd.Vertex, 2, gl.FLOAT, false, 0, 0)\n\tb.glctx.VertexAttribPointer(b.shd.TexCoord, 2, gl.FLOAT, false, 0, 8*4)\n\tb.glctx.EnableVertexAttribArray(b.shd.Vertex)\n\tb.glctx.EnableVertexAttribArray(b.shd.TexCoord)\n\tb.glctx.DrawArrays(gl.TRIANGLE_FAN, 0, 4)\n\tb.glctx.DisableVertexAttribArray(b.shd.Vertex)\n\tb.glctx.DisableVertexAttribArray(b.shd.TexCoord)\n}",
"func BenchmarkMemcopy2(b *testing.B) {\n\tlen := 1920 * 1080 * 3 / 2\n\tdst := make([]byte, len, len)\n\tb.ResetTimer()\n\tfor i := 0; i < b.N; i++ {\n\t\tMemset(dst, 100)\n\t}\n}",
"func InvalidateSubFramebuffer(target uint32, numAttachments int32, attachments *uint32, x int32, y int32, width int32, height int32) {\n\tsyscall.Syscall9(gpInvalidateSubFramebuffer, 7, uintptr(target), uintptr(numAttachments), uintptr(unsafe.Pointer(attachments)), uintptr(x), uintptr(y), uintptr(width), uintptr(height), 0, 0)\n}",
"func MergeScreenAndBlock(ScreenEmpty [][]int,block [][]int, locate []int)([][]int){\n\tBlockInScreen := CopyNewBlock(ScreenEmpty)\n\tfor row := 0; row<len(block); row++{\n\t\tfor colum :=0; colum<len(block[row]); colum++{\n\t\t\tif block[row][colum] != 0{\n\t\t\t\tnum := block[row][colum]\n\t\t\t\tBlockInScreen[row + locate[0]][colum + locate[1]] = num\n\t\t\t}\n\t\t}\n\t}\n\treturn BlockInScreen\n}",
"func clone(dst, src *image.Gray) {\n\tif dst.Stride == src.Stride {\n\t\t// no need to correct stride, simply copy pixels.\n\t\tcopy(dst.Pix, src.Pix)\n\t\treturn\n\t}\n\t// need to correct stride.\n\tfor i := 0; i < src.Rect.Dy(); i++ {\n\t\tdstH := i * dst.Stride\n\t\tsrcH := i * src.Stride\n\t\tcopy(dst.Pix[dstH:dstH+dst.Stride], src.Pix[srcH:srcH+dst.Stride])\n\t}\n}",
"func (self *GameObjectCreator) BitmapData2O(width int, height int) *BitmapData{\n return &BitmapData{self.Object.Call(\"bitmapData\", width, height)}\n}",
"func (r *Resources) Copy(other *Resources) {\n\tr.CPU = other.CPU\n\tr.DISK = other.DISK\n\tr.MEMORY = other.MEMORY\n\tr.GPU = other.GPU\n}",
"func CopyTexImage1D(target uint32, level int32, internalformat uint32, x int32, y int32, width int32, border int32) {\n C.glowCopyTexImage1D(gpCopyTexImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLint)(border))\n}",
"func (c *Canvas) SetPixels(pixels []uint8) {\n\tc.gf.Dirty()\n\n\tmainthread.Call(func() {\n\t\ttex := c.Texture()\n\t\ttex.Begin()\n\t\ttex.SetPixels(0, 0, tex.Width(), tex.Height(), pixels)\n\t\ttex.End()\n\t})\n}",
"func (d *Display) resetBuffer() {\n\td.width = d.device.Width()\n\td.height = d.device.Height()\n\td.buffer = make([][]byte, d.height)\n\tfor y := range d.buffer {\n\t\td.buffer[y] = make([]byte, d.width)\n\t}\n}",
"func copyTo(dst []byte, src []byte, offset int) {\n\tfor j, k := range src {\n\t\tdst[offset+j] = k\n\t}\n}",
"func (native *OpenGL) BindFramebuffer(target, buffer uint32) {\n\tgl.BindFramebuffer(target, buffer)\n}",
"func BenchmarkMemcopy(b *testing.B) {\n\tlen := 1920 * 1080 * 3 / 2\n\tdst := make([]byte, len, len)\n\tb.ResetTimer()\n\tfor i := 0; i < b.N; i++ {\n\t\tfor j := 0; j < len; j++ {\n\t\t\tdst[i] = 100\n\t\t}\n\t}\n}",
"func CopyTexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, x int32, y int32, width int32, height int32) {\n\tC.glowCopyTexSubImage3D(gpCopyTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func CopyTexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, x int32, y int32, width int32, height int32) {\n\tC.glowCopyTexSubImage3D(gpCopyTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func (i *ImageBuf) Copy(src *ImageBuf) error {\n\tok := bool(C.ImageBuf_copy(i.ptr, src.ptr))\n\truntime.KeepAlive(i)\n\truntime.KeepAlive(src)\n\tif !ok {\n\t\treturn i.LastError()\n\t}\n\treturn nil\n}",
"func GetBufferSubData(target uint32, offset int, size int, data unsafe.Pointer) {\n C.glowGetBufferSubData(gpGetBufferSubData, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(size), data)\n}",
"func (r *AMD64Registers) Copy() (proc.Registers, error) {\n\tvar rr AMD64Registers\n\trr = *r\n\trr.Context = NewCONTEXT()\n\t*(rr.Context) = *(r.Context)\n\trr.fltSave = &rr.Context.FltSave\n\treturn &rr, nil\n}",
"func CopyObject(srcData []byte, dst interface{}) {\n\tjsoniter.Unmarshal(srcData, dst)\n\t//var dstData, err = jsoniter.Marshal(dst)\n\t//if err != nil {\n\t//\tpanic(err)\n\t//}\n\t//fmt.Println(\"overlay:\", string(dstData))\n}",
"func InvalidateSubFramebuffer(target uint32, numAttachments int32, attachments *uint32, x int32, y int32, width int32, height int32) {\n\tC.glowInvalidateSubFramebuffer(gpInvalidateSubFramebuffer, (C.GLenum)(target), (C.GLsizei)(numAttachments), (*C.GLenum)(unsafe.Pointer(attachments)), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func InvalidateSubFramebuffer(target uint32, numAttachments int32, attachments *uint32, x int32, y int32, width int32, height int32) {\n\tC.glowInvalidateSubFramebuffer(gpInvalidateSubFramebuffer, (C.GLenum)(target), (C.GLsizei)(numAttachments), (*C.GLenum)(unsafe.Pointer(attachments)), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}"
] | [
"0.616907",
"0.6164028",
"0.6043857",
"0.5881865",
"0.5830083",
"0.5776536",
"0.57541645",
"0.56396943",
"0.56240493",
"0.5591138",
"0.5591138",
"0.55904067",
"0.55865586",
"0.5573469",
"0.5500675",
"0.54852164",
"0.5402666",
"0.5361507",
"0.53565097",
"0.53402716",
"0.5313543",
"0.531168",
"0.52921325",
"0.5284725",
"0.527589",
"0.5274478",
"0.525763",
"0.52500004",
"0.5233401",
"0.52221763",
"0.5197874",
"0.5197874",
"0.51866096",
"0.51866096",
"0.518625",
"0.5182851",
"0.51632285",
"0.5140699",
"0.51305205",
"0.51239336",
"0.5120676",
"0.5116303",
"0.51051193",
"0.50948185",
"0.5087771",
"0.5077174",
"0.506995",
"0.50462323",
"0.5036454",
"0.5010605",
"0.49990457",
"0.49900433",
"0.49796468",
"0.4975465",
"0.49695483",
"0.49602315",
"0.49574336",
"0.49469355",
"0.49434468",
"0.49068624",
"0.49039158",
"0.4899831",
"0.48979488",
"0.4876837",
"0.4871397",
"0.48675042",
"0.48673463",
"0.4867201",
"0.4854063",
"0.4847975",
"0.48404652",
"0.48404652",
"0.48368424",
"0.48367357",
"0.48367357",
"0.48344055",
"0.48322484",
"0.48307347",
"0.48227397",
"0.48208123",
"0.4816958",
"0.48019165",
"0.47951213",
"0.47920433",
"0.47902718",
"0.4788789",
"0.47817072",
"0.47652838",
"0.47610953",
"0.4756943",
"0.47553828",
"0.47511455",
"0.47511455",
"0.47503197",
"0.4746795",
"0.47448748",
"0.4730114",
"0.47270277",
"0.47270277"
] | 0.5537316 | 15 |
copy a block of pixels from one framebuffer object to another | func BlitNamedFramebuffer(readFramebuffer uint32, drawFramebuffer uint32, srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {
C.glowBlitNamedFramebuffer(gpBlitNamedFramebuffer, (C.GLuint)(readFramebuffer), (C.GLuint)(drawFramebuffer), (C.GLint)(srcX0), (C.GLint)(srcY0), (C.GLint)(srcX1), (C.GLint)(srcY1), (C.GLint)(dstX0), (C.GLint)(dstY0), (C.GLint)(dstX1), (C.GLint)(dstY1), (C.GLbitfield)(mask), (C.GLenum)(filter))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func CopyBufferSubData(readTarget uint32, writeTarget uint32, readOffset int, writeOffset int, size int) {\n C.glowCopyBufferSubData(gpCopyBufferSubData, (C.GLenum)(readTarget), (C.GLenum)(writeTarget), (C.GLintptr)(readOffset), (C.GLintptr)(writeOffset), (C.GLsizeiptr)(size))\n}",
"func BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n C.glowBlitFramebuffer(gpBlitFramebuffer, (C.GLint)(srcX0), (C.GLint)(srcY0), (C.GLint)(srcX1), (C.GLint)(srcY1), (C.GLint)(dstX0), (C.GLint)(dstY0), (C.GLint)(dstX1), (C.GLint)(dstY1), (C.GLbitfield)(mask), (C.GLenum)(filter))\n}",
"func (fr *Frame) Copy(orig *Frame) {\n\tfr.Status = orig.Status\n\tfor y, row := range orig.Pix {\n\t\tcopy(fr.Pix[y][:], row)\n\t}\n}",
"func draw(window *glfw.Window, reactProg, landProg uint32) {\n\n\tvar renderLoops = 4\n\tfor i := 0; i < renderLoops; i++ {\n\t\t// -- DRAW TO BUFFER --\n\t\t// define destination of pixels\n\t\t//gl.BindFramebuffer(gl.FRAMEBUFFER, 0)\n\t\tgl.BindFramebuffer(gl.FRAMEBUFFER, FBO[1])\n\n\t\tgl.Viewport(0, 0, width, height) // Retina display doubles the framebuffer !?!\n\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT)\n\t\tgl.UseProgram(reactProg)\n\n\t\t// bind Texture\n\t\tgl.ActiveTexture(gl.TEXTURE0)\n\t\tgl.BindTexture(gl.TEXTURE_2D, renderedTexture)\n\t\tgl.Uniform1i(uniTex, 0)\n\n\t\tgl.BindVertexArray(VAO)\n\t\tgl.DrawElements(gl.TRIANGLE_STRIP, int32(len(indices)), gl.UNSIGNED_INT, nil)\n\n\t\tgl.BindVertexArray(0)\n\n\t\t// -- copy back textures --\n\t\tgl.BindFramebuffer(gl.READ_FRAMEBUFFER, FBO[1]) // source is high res array\n\t\tgl.ReadBuffer(gl.COLOR_ATTACHMENT0)\n\t\tgl.BindFramebuffer(gl.DRAW_FRAMEBUFFER, FBO[0]) // destination is cells array\n\t\tgl.DrawBuffer(gl.COLOR_ATTACHMENT0)\n\t\tgl.BlitFramebuffer(0, 0, width, height,\n\t\t\t0, 0, cols, rows,\n\t\t\tgl.COLOR_BUFFER_BIT, gl.NEAREST) // downsample\n\t\tgl.BindFramebuffer(gl.READ_FRAMEBUFFER, FBO[0]) // source is low res array - put in texture\n\t\t// read pixels saves data read as unsigned bytes and then loads them in TexImage same way\n\t\tgl.ReadPixels(0, 0, cols, rows, gl.RGBA, gl.FLOAT, gl.Ptr(fData))\n\t\tgl.BindTexture(gl.TEXTURE_2D, renderedTexture)\n\t\tgl.TexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, cols, rows, 0, gl.RGBA, gl.FLOAT, gl.Ptr(fData))\n\t\tCheckGLErrors()\n\t}\n\t// -- DRAW TO SCREEN --\n\tvar model glm.Mat4\n\n\t// destination 0 means screen\n\tgl.BindFramebuffer(gl.FRAMEBUFFER, 0)\n\tgl.Viewport(0, 0, width*2, height*2) // Retina display doubles the framebuffer !?!\n\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT)\n\tgl.UseProgram(landProg)\n\t// bind Texture\n\tgl.ActiveTexture(gl.TEXTURE0)\n\tgl.BindTexture(gl.TEXTURE_2D, drawTexture)\n\n\tvar view glm.Mat4\n\tvar brakeFactor = float64(20000.0)\n\tvar xCoord, yCoord float32\n\txCoord = float32(-3.0 * math.Sin(float64(myClock)))\n\tyCoord = float32(-3.0 * math.Cos(float64(myClock)))\n\t//xCoord = 0.0\n\t//yCoord = float32(-2.5)\n\tmyClock = math.Mod((myClock + float64(deltaTime)/brakeFactor), (math.Pi * 2))\n\tview = glm.LookAt(xCoord, yCoord, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0)\n\tgl.UniformMatrix4fv(uniView, 1, false, &view[0])\n\tmodel = glm.HomogRotate3DX(glm.DegToRad(00.0))\n\tgl.UniformMatrix4fv(uniModel, 1, false, &model[0])\n\tgl.Uniform1i(uniTex2, 0)\n\n\t// render container\n\t//gl.PolygonMode(gl.FRONT_AND_BACK, gl.FILL)\n\t//gl.PolygonMode(gl.FRONT_AND_BACK, gl.LINE)\n\n\tgl.BindVertexArray(VAO)\n\tgl.DrawElements(gl.TRIANGLE_STRIP, int32(len(indices)), gl.UNSIGNED_INT, nil)\n\tgl.BindVertexArray(0)\n\n\tCheckGLErrors()\n\n\tglfw.PollEvents()\n\twindow.SwapBuffers()\n\n\t//time.Sleep(100 * 1000 * 1000)\n}",
"func CopyPixels(x int32, y int32, width int32, height int32, xtype uint32) {\n C.glowCopyPixels(gpCopyPixels, (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(xtype))\n}",
"func BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tsyscall.Syscall12(gpBlitFramebuffer, 10, uintptr(srcX0), uintptr(srcY0), uintptr(srcX1), uintptr(srcY1), uintptr(dstX0), uintptr(dstY0), uintptr(dstX1), uintptr(dstY1), uintptr(mask), uintptr(filter), 0, 0)\n}",
"func Copy(dst draw.Image, src image.Image) {\n\tbd := src.Bounds().Intersect(dst.Bounds())\n\tat := imageutil.NewAtFunc(src)\n\tset := imageutil.NewSetFunc(dst)\n\timageutil.Parallel1D(bd, func(bd image.Rectangle) {\n\t\tfor y := bd.Min.Y; y < bd.Max.Y; y++ {\n\t\t\tfor x := bd.Min.X; x < bd.Max.X; x++ {\n\t\t\t\tr, g, b, a := at(x, y)\n\t\t\t\tset(x, y, r, g, b, a)\n\t\t\t}\n\t\t}\n\t})\n}",
"func (w *WebGLRenderTarget) Copy(source *WebGLRenderTarget) *WebGLRenderTarget {\n\tw.p.Call(\"copy\", source.p)\n\treturn w\n}",
"func (c *Canvas) copyTo(offset image.Point, dstSetCell setCellFunc) error {\n\tfor col := range c.buffer {\n\t\tfor row := range c.buffer[col] {\n\t\t\tpartial, err := c.buffer.IsPartial(image.Point{col, row})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif partial {\n\t\t\t\t// Skip over partial cells, i.e. cells that follow a cell\n\t\t\t\t// containing a full-width rune. A full-width rune takes only\n\t\t\t\t// one cell in the buffer, but two on the terminal.\n\t\t\t\t// See http://www.unicode.org/reports/tr11/.\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tcell := c.buffer[col][row]\n\t\t\tp := image.Point{col, row}.Add(offset)\n\t\t\tif err := dstSetCell(p, cell.Rune, cell.Opts); err != nil {\n\t\t\t\treturn fmt.Errorf(\"setCellFunc%v => error: %v\", p, err)\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}",
"func CopyBufferSubData(readTarget uint32, writeTarget uint32, readOffset int, writeOffset int, size int) {\n\tC.glowCopyBufferSubData(gpCopyBufferSubData, (C.GLenum)(readTarget), (C.GLenum)(writeTarget), (C.GLintptr)(readOffset), (C.GLintptr)(writeOffset), (C.GLsizeiptr)(size))\n}",
"func CopyBufferSubData(readTarget uint32, writeTarget uint32, readOffset int, writeOffset int, size int) {\n\tC.glowCopyBufferSubData(gpCopyBufferSubData, (C.GLenum)(readTarget), (C.GLenum)(writeTarget), (C.GLintptr)(readOffset), (C.GLintptr)(writeOffset), (C.GLsizeiptr)(size))\n}",
"func CopyPixels(x int32, y int32, width int32, height int32, xtype uint32) {\n\tsyscall.Syscall6(gpCopyPixels, 5, uintptr(x), uintptr(y), uintptr(width), uintptr(height), uintptr(xtype), 0)\n}",
"func (native *OpenGL) BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tgl.BlitFramebuffer(srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter)\n}",
"func CopyTexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, x int32, y int32, width int32, height int32) {\n C.glowCopyTexSubImage2D(gpCopyTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tC.glowBlitFramebuffer(gpBlitFramebuffer, (C.GLint)(srcX0), (C.GLint)(srcY0), (C.GLint)(srcX1), (C.GLint)(srcY1), (C.GLint)(dstX0), (C.GLint)(dstY0), (C.GLint)(dstX1), (C.GLint)(dstY1), (C.GLbitfield)(mask), (C.GLenum)(filter))\n}",
"func BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tC.glowBlitFramebuffer(gpBlitFramebuffer, (C.GLint)(srcX0), (C.GLint)(srcY0), (C.GLint)(srcX1), (C.GLint)(srcY1), (C.GLint)(dstX0), (C.GLint)(dstY0), (C.GLint)(dstX1), (C.GLint)(dstY1), (C.GLbitfield)(mask), (C.GLenum)(filter))\n}",
"func CopyTexImage2D(target uint32, level int32, internalformat uint32, x int32, y int32, width int32, height int32, border int32) {\n C.glowCopyTexImage2D(gpCopyTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border))\n}",
"func CopyPixels(x int32, y int32, width int32, height int32, xtype uint32) {\n\tC.glowCopyPixels(gpCopyPixels, (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLenum)(xtype))\n}",
"func (s *Surface) Blit(source *Surface, x, y float64) {\n\ts.Ctx.Call(\"drawImage\", source.Canvas, math.Floor(x), math.Floor(y))\n}",
"func CopyTexImage2D(target uint32, level int32, internalformat uint32, x int32, y int32, width int32, height int32, border int32) {\n\tsyscall.Syscall9(gpCopyTexImage2D, 8, uintptr(target), uintptr(level), uintptr(internalformat), uintptr(x), uintptr(y), uintptr(width), uintptr(height), uintptr(border), 0)\n}",
"func (c *Container) setBitmapCopy(bitmap []uint64) {\n\tvar bitmapCopy [bitmapN]uint64\n\tcopy(bitmapCopy[:], bitmap)\n\tc.setBitmap(bitmapCopy[:])\n}",
"func CopyTexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, x int32, y int32, width int32, height int32) {\n\tsyscall.Syscall9(gpCopyTexSubImage2D, 8, uintptr(target), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(x), uintptr(y), uintptr(width), uintptr(height), 0)\n}",
"func CopyColorTable(target uint32, internalformat uint32, x int32, y int32, width int32) {\n C.glowCopyColorTable(gpCopyColorTable, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func (s *stencilOverdraw) copyImageAspect(ctx context.Context,\n\tcb CommandBuilder,\n\tgs *api.GlobalState,\n\tst *State,\n\ta arena.Arena,\n\tdevice VkDevice,\n\tcmdBuffer VkCommandBuffer,\n\tsrcImgDesc imageDesc,\n\tdstImgDesc imageDesc,\n\textent VkExtent3D,\n\talloc func(v ...interface{}) api.AllocResult,\n\taddCleanup func(func()),\n\tout transform.Writer,\n) {\n\tsrcImg := srcImgDesc.image\n\tdstImg := dstImgDesc.image\n\tcopyBuffer := s.createDepthCopyBuffer(ctx, cb, gs, st, a, device,\n\t\tsrcImg.Info().Fmt(),\n\t\textent.Width(), extent.Height(),\n\t\talloc, addCleanup, out)\n\n\tallCommandsStage := VkPipelineStageFlags(\n\t\tVkPipelineStageFlagBits_VK_PIPELINE_STAGE_ALL_COMMANDS_BIT)\n\tallMemoryAccess := VkAccessFlags(\n\t\tVkAccessFlagBits_VK_ACCESS_MEMORY_WRITE_BIT |\n\t\t\tVkAccessFlagBits_VK_ACCESS_MEMORY_READ_BIT)\n\n\timgBarriers0 := make([]VkImageMemoryBarrier, 2)\n\timgBarriers1 := make([]VkImageMemoryBarrier, 2)\n\t// Transition the src image in and out of the required layouts\n\timgBarriers0[0] = NewVkImageMemoryBarrier(a,\n\t\tVkStructureType_VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType\n\t\t0, // pNext\n\t\tallMemoryAccess, // srcAccessMask\n\t\tVkAccessFlags(VkAccessFlagBits_VK_ACCESS_TRANSFER_READ_BIT), // dstAccessMask\n\t\tsrcImgDesc.layout, // oldLayout\n\t\tVkImageLayout_VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // newLayout\n\t\t^uint32(0), // srcQueueFamilyIndex: VK_QUEUE_FAMILY_IGNORED\n\t\t^uint32(0), // dstQueueFamilyIndex\n\t\tsrcImg.VulkanHandle(), // image\n\t\tsrcImgDesc.subresource, // subresourceRange\n\t)\n\tsrcFinalLayout := srcImgDesc.layout\n\tif srcFinalLayout == VkImageLayout_VK_IMAGE_LAYOUT_UNDEFINED ||\n\t\tsrcFinalLayout == VkImageLayout_VK_IMAGE_LAYOUT_PREINITIALIZED {\n\t\tsrcFinalLayout = VkImageLayout_VK_IMAGE_LAYOUT_GENERAL\n\t}\n\timgBarriers1[0] = NewVkImageMemoryBarrier(a,\n\t\tVkStructureType_VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType\n\t\t0, // pNext\n\t\tVkAccessFlags(VkAccessFlagBits_VK_ACCESS_TRANSFER_READ_BIT), // srcAccessMask\n\t\tallMemoryAccess, // dstAccessMask\n\t\tVkImageLayout_VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // oldLayout\n\t\tsrcFinalLayout, // newLayout\n\t\t^uint32(0), // srcQueueFamilyIndex: VK_QUEUE_FAMILY_IGNORED\n\t\t^uint32(0), // dstQueueFamilyIndex\n\t\tsrcImg.VulkanHandle(), // image\n\t\tsrcImgDesc.subresource, // subresourceRange\n\t)\n\n\t// Transition the new image in and out of its required layouts\n\timgBarriers0[1] = NewVkImageMemoryBarrier(a,\n\t\tVkStructureType_VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType\n\t\t0, // pNext\n\t\tallMemoryAccess, // srcAccessMask\n\t\tVkAccessFlags(VkAccessFlagBits_VK_ACCESS_TRANSFER_WRITE_BIT), // dstAccessMask\n\t\tdstImgDesc.layout, // oldLayout\n\t\tVkImageLayout_VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // newLayout\n\t\t^uint32(0), // srcQueueFamilyIndex: VK_QUEUE_FAMILY_IGNORED\n\t\t^uint32(0), // dstQueueFamilyIndex\n\t\tdstImg.VulkanHandle(), // image\n\t\tdstImgDesc.subresource, // subresourceRange\n\t)\n\n\tdstFinalLayout := dstImgDesc.layout\n\tif dstFinalLayout == VkImageLayout_VK_IMAGE_LAYOUT_UNDEFINED ||\n\t\tdstFinalLayout == VkImageLayout_VK_IMAGE_LAYOUT_PREINITIALIZED {\n\t\tdstFinalLayout = VkImageLayout_VK_IMAGE_LAYOUT_GENERAL\n\t}\n\timgBarriers1[1] = NewVkImageMemoryBarrier(a,\n\t\tVkStructureType_VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType\n\t\t0, // pNext\n\t\tVkAccessFlags(VkAccessFlagBits_VK_ACCESS_TRANSFER_WRITE_BIT), // srcAccessMask\n\t\tallMemoryAccess, // dstAccessMask\n\t\tVkImageLayout_VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // oldLayout\n\t\tdstFinalLayout, // newLayout\n\t\t^uint32(0), // srcQueueFamilyIndex: VK_QUEUE_FAMILY_IGNORED\n\t\t^uint32(0), // dstQueueFamilyIndex\n\t\tdstImg.VulkanHandle(), // image\n\t\tdstImgDesc.subresource, // subresourceRange\n\t)\n\n\tbufBarrier := NewVkBufferMemoryBarrier(a,\n\t\tVkStructureType_VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // sType\n\t\t0, // pNext\n\t\tVkAccessFlags(VkAccessFlagBits_VK_ACCESS_TRANSFER_WRITE_BIT), // srcAccessMask\n\t\tVkAccessFlags(VkAccessFlagBits_VK_ACCESS_TRANSFER_READ_BIT), // dstAccessMask\n\t\t^uint32(0), // srcQueueFamilyIndex: VK_QUEUE_FAMILY_IGNORED\n\t\t^uint32(0), // dstQueueFamilyIndex\n\t\tcopyBuffer, // buffer\n\t\t0, // offset\n\t\t^VkDeviceSize(0), // size: VK_WHOLE_SIZE\n\t)\n\n\tibCopy := NewVkBufferImageCopy(a,\n\t\t0, // bufferOffset\n\t\t0, // bufferRowLength\n\t\t0, // bufferImageHeight\n\t\tNewVkImageSubresourceLayers(a,\n\t\t\tVkImageAspectFlags(srcImgDesc.aspect), // aspectMask\n\t\t\tsrcImgDesc.subresource.BaseMipLevel(), // mipLevel\n\t\t\tsrcImgDesc.subresource.BaseArrayLayer(), // baseArrayLayer\n\t\t\t1, // layerCount\n\t\t), // srcSubresource\n\t\tNewVkOffset3D(a, 0, 0, 0), // offset\n\t\tNewVkExtent3D(a, extent.Width(), extent.Height(), 1), // extent\n\t)\n\n\tbiCopy := NewVkBufferImageCopy(a,\n\t\t0, // bufferOffset\n\t\t0, // bufferRowLength\n\t\t0, // bufferImageHeight\n\t\tNewVkImageSubresourceLayers(a,\n\t\t\tVkImageAspectFlags(dstImgDesc.aspect), // aspectMask\n\t\t\tdstImgDesc.subresource.BaseMipLevel(), // mipLevel\n\t\t\tdstImgDesc.subresource.BaseArrayLayer(), // baseArrayLayer\n\t\t\t1, // layerCount\n\t\t), // srcSubresource\n\t\tNewVkOffset3D(a, 0, 0, 0), // offset\n\t\tNewVkExtent3D(a, extent.Width(), extent.Height(), 1), // extent\n\t)\n\n\timgBarriers0Data := alloc(imgBarriers0)\n\tibCopyData := alloc(ibCopy)\n\tbufBarrierData := alloc(bufBarrier)\n\tbiCopyData := alloc(biCopy)\n\timgBarriers1Data := alloc(imgBarriers1)\n\n\twriteEach(ctx, out,\n\t\tcb.VkCmdPipelineBarrier(cmdBuffer,\n\t\t\tallCommandsStage, // srcStageMask\n\t\t\tVkPipelineStageFlags(VkPipelineStageFlagBits_VK_PIPELINE_STAGE_TRANSFER_BIT), // dstStageMask\n\t\t\t0, // dependencyFlags\n\t\t\t0, // memoryBarrierCount\n\t\t\tmemory.Nullptr, // pMemoryBarriers\n\t\t\t0, // bufferMemoryBarrierCount\n\t\t\tmemory.Nullptr, // pBufferMemoryBarriers\n\t\t\t2, // imageMemoryBarrierCount\n\t\t\timgBarriers0Data.Ptr(), // pImageMemoryBarriers\n\t\t).AddRead(imgBarriers0Data.Data()),\n\t\tcb.VkCmdCopyImageToBuffer(cmdBuffer,\n\t\t\tsrcImg.VulkanHandle(), // srcImage\n\t\t\tVkImageLayout_VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // srcImageLayout\n\t\t\tcopyBuffer, // dstBuffer\n\t\t\t1, // regionCount\n\t\t\tibCopyData.Ptr(), // pRegions\n\t\t).AddRead(ibCopyData.Data()),\n\t\tcb.VkCmdPipelineBarrier(cmdBuffer,\n\t\t\tVkPipelineStageFlags(VkPipelineStageFlagBits_VK_PIPELINE_STAGE_TRANSFER_BIT), // srcStageMask\n\t\t\tVkPipelineStageFlags(VkPipelineStageFlagBits_VK_PIPELINE_STAGE_TRANSFER_BIT), // dstStageMask\n\t\t\t0, // dependencyFlags\n\t\t\t0, // memoryBarrierCount\n\t\t\tmemory.Nullptr, // pMemoryBarriers\n\t\t\t1, // bufferMemoryBarrierCount\n\t\t\tbufBarrierData.Ptr(), // pBufferMemoryBarriers\n\t\t\t0, // imageMemoryBarrierCount\n\t\t\tmemory.Nullptr, // pImageMemoryBarriers\n\t\t).AddRead(bufBarrierData.Data()),\n\t\tcb.VkCmdCopyBufferToImage(cmdBuffer,\n\t\t\tcopyBuffer, // srcBuffer\n\t\t\tdstImg.VulkanHandle(), // dstImage\n\t\t\tVkImageLayout_VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // dstImageLayout\n\t\t\t1, // regionCount\n\t\t\tbiCopyData.Ptr(), // pRegions\n\t\t).AddRead(biCopyData.Data()),\n\t\tcb.VkCmdPipelineBarrier(cmdBuffer,\n\t\t\tVkPipelineStageFlags(VkPipelineStageFlagBits_VK_PIPELINE_STAGE_TRANSFER_BIT), // srcStageMask\n\t\t\tallCommandsStage, // dstStageMask\n\t\t\t0, // dependencyFlags\n\t\t\t0, // memoryBarrierCount\n\t\t\tmemory.Nullptr, // pMemoryBarriers\n\t\t\t0, // bufferMemoryBarrierCount\n\t\t\tmemory.Nullptr, // pBufferMemoryBarriers\n\t\t\t2, // imageMemoryBarrierCount\n\t\t\timgBarriers1Data.Ptr(), // pImageMemoryBarriers\n\t\t).AddRead(imgBarriers1Data.Data()),\n\t)\n}",
"func CopyTexSubImage1D(target uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n C.glowCopyTexSubImage1D(gpCopyTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func (w *windowImpl) Copy(dp image.Point, src screen.Texture, sr image.Rectangle, op draw.Op, opts *screen.DrawOptions) {\n\tpanic(\"not implemented\") // TODO: Implement\n}",
"func CopyColorSubTable(target uint32, start int32, x int32, y int32, width int32) {\n C.glowCopyColorSubTable(gpCopyColorSubTable, (C.GLenum)(target), (C.GLsizei)(start), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func CopyTexSubImage2D(target Enum, level Int, xoffset Int, yoffset Int, x Int, y Int, width Sizei, height Sizei) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tclevel, _ := (C.GLint)(level), cgoAllocsUnknown\n\tcxoffset, _ := (C.GLint)(xoffset), cgoAllocsUnknown\n\tcyoffset, _ := (C.GLint)(yoffset), cgoAllocsUnknown\n\tcx, _ := (C.GLint)(x), cgoAllocsUnknown\n\tcy, _ := (C.GLint)(y), cgoAllocsUnknown\n\tcwidth, _ := (C.GLsizei)(width), cgoAllocsUnknown\n\tcheight, _ := (C.GLsizei)(height), cgoAllocsUnknown\n\tC.glCopyTexSubImage2D(ctarget, clevel, cxoffset, cyoffset, cx, cy, cwidth, cheight)\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n C.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))\n}",
"func Copy(i ImageIr) ImageIr {\n\tc := make([][][]uint32, len(i.pixels))\n\tcopy(c, i.pixels)\n\treturn ImageIr{i.width, i.height, c}\n}",
"func CopyBufferSubData(readTarget uint32, writeTarget uint32, readOffset int, writeOffset int, size int) {\n\tsyscall.Syscall6(gpCopyBufferSubData, 5, uintptr(readTarget), uintptr(writeTarget), uintptr(readOffset), uintptr(writeOffset), uintptr(size), 0)\n}",
"func (t *translator) copyBytes(\n\tirBlock *ir.Block, irPtr, irLen irvalue.Value,\n) irvalue.Value {\n\tirNewPtr := irBlock.NewCall(t.builtins.Malloc(t), irLen)\n\tirBlock.NewCall(t.builtins.Memcpy(t), irNewPtr, irPtr, irLen, irconstant.False)\n\treturn irNewPtr\n}",
"func CopyTexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, x int32, y int32, width int32, height int32) {\n\tC.glowCopyTexSubImage2D(gpCopyTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func CopyTexSubImage2D(target uint32, level int32, xoffset int32, yoffset int32, x int32, y int32, width int32, height int32) {\n\tC.glowCopyTexSubImage2D(gpCopyTexSubImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func (fr *Frame) CreateCopy() *Frame {\n\tframe := new(Frame)\n\tframe.Pix = make([][]uint16, len(fr.Pix))\n\tfor i := range fr.Pix {\n\t\tframe.Pix[i] = make([]uint16, len(fr.Pix[i]))\n\t}\n\tframe.Copy(fr)\n\treturn frame\n}",
"func CopyTexImage2D(target uint32, level int32, internalformat uint32, x int32, y int32, width int32, height int32, border int32) {\n\tC.glowCopyTexImage2D(gpCopyTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border))\n}",
"func CopyTexImage2D(target uint32, level int32, internalformat uint32, x int32, y int32, width int32, height int32, border int32) {\n\tC.glowCopyTexImage2D(gpCopyTexImage2D, (C.GLenum)(target), (C.GLint)(level), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height), (C.GLint)(border))\n}",
"func CopyImageSubData(srcName uint32, srcTarget uint32, srcLevel int32, srcX int32, srcY int32, srcZ int32, dstName uint32, dstTarget uint32, dstLevel int32, dstX int32, dstY int32, dstZ int32, srcWidth int32, srcHeight int32, srcDepth int32) {\n C.glowCopyImageSubData(gpCopyImageSubData, (C.GLuint)(srcName), (C.GLenum)(srcTarget), (C.GLint)(srcLevel), (C.GLint)(srcX), (C.GLint)(srcY), (C.GLint)(srcZ), (C.GLuint)(dstName), (C.GLenum)(dstTarget), (C.GLint)(dstLevel), (C.GLint)(dstX), (C.GLint)(dstY), (C.GLint)(dstZ), (C.GLsizei)(srcWidth), (C.GLsizei)(srcHeight), (C.GLsizei)(srcDepth))\n}",
"func (self *TileSprite) SetCanvasBufferA(member *PIXICanvasBuffer) {\n self.Object.Set(\"canvasBuffer\", member)\n}",
"func (i *Image) readPixelsFromGPU() error {\n\tvar err error\n\ti.basePixels, err = i.image.Pixels()\n\tif err != nil {\n\t\treturn err\n\t}\n\ti.drawImageHistory = nil\n\ti.stale = false\n\treturn nil\n}",
"func BufferSubData(target uint32, offset int, size int, data unsafe.Pointer) {\n C.glowBufferSubData(gpBufferSubData, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(size), data)\n}",
"func newScreenFramebuffer(context *context, width, height int) *framebuffer {\n\treturn &framebuffer{\n\t\tnative: context.getScreenFramebuffer(),\n\t\twidth: width,\n\t\theight: height,\n\t}\n}",
"func newBitmapFrom(other *bitmap, size int) *bitmap {\n\tbitmap := newBitmap(size)\n\n\tif size > other.Size {\n\t\tsize = other.Size\n\t}\n\n\tdiv := size / 8\n\n\tfor i := 0; i < div; i++ {\n\t\tbitmap.data[i] = other.data[i]\n\t}\n\n\tfor i := div * 8; i < size; i++ {\n\t\tif other.Bit(i) == 1 {\n\t\t\tbitmap.Set(i)\n\t\t}\n\t}\n\n\treturn bitmap\n}",
"func InvalidateSubFramebuffer(target uint32, numAttachments int32, attachments *uint32, x int32, y int32, width int32, height int32) {\n C.glowInvalidateSubFramebuffer(gpInvalidateSubFramebuffer, (C.GLenum)(target), (C.GLsizei)(numAttachments), (*C.GLenum)(unsafe.Pointer(attachments)), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func (b *Buffer) Dump() {\n\ts := *b.screen\n\traster := canvas.NewRasterFromImage(b.context.Image())\n\ts.SetContent(raster)\n}",
"func (i *ImageBuf) SetFull(xbegin, xend, ybegin, yend, zbegin, zend int) {\n\tC.ImageBuf_set_full(\n\t\ti.ptr,\n\t\tC.int(xbegin), C.int(xend),\n\t\tC.int(ybegin), C.int(yend),\n\t\tC.int(zbegin), C.int(zend))\n\truntime.KeepAlive(i)\n}",
"func (s *Surface) Copy() *Surface {\n\tr := s.Rect()\n\tcopy := NewSurface(int(r.W), int(r.H))\n\tcopy.Blit(s, 0, 0)\n\treturn copy\n}",
"func (i *ImageBuf) CopyPixels(src *ImageBuf) error {\n\tok := bool(C.ImageBuf_copy_pixels(i.ptr, src.ptr))\n\truntime.KeepAlive(i)\n\truntime.KeepAlive(src)\n\tif !ok {\n\t\treturn i.LastError()\n\t}\n\treturn nil\n}",
"func CopyTexImage2D(target Enum, level Int, internalformat Enum, x Int, y Int, width Sizei, height Sizei, border Int) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tclevel, _ := (C.GLint)(level), cgoAllocsUnknown\n\tcinternalformat, _ := (C.GLenum)(internalformat), cgoAllocsUnknown\n\tcx, _ := (C.GLint)(x), cgoAllocsUnknown\n\tcy, _ := (C.GLint)(y), cgoAllocsUnknown\n\tcwidth, _ := (C.GLsizei)(width), cgoAllocsUnknown\n\tcheight, _ := (C.GLsizei)(height), cgoAllocsUnknown\n\tcborder, _ := (C.GLint)(border), cgoAllocsUnknown\n\tC.glCopyTexImage2D(ctarget, clevel, cinternalformat, cx, cy, cwidth, cheight, cborder)\n}",
"func (self *Rectangle) CopyTo(source interface{}) interface{}{\n return self.Object.Call(\"copyTo\", source)\n}",
"func copyStreamToDMABuf(w gpiostream.Stream, dst []uint32) error {\n\tswitch v := w.(type) {\n\tcase *gpiostream.BitStream:\n\t\tif v.LSBF {\n\t\t\treturn errors.New(\"TODO(simokawa): handle BitStream.LSBF\")\n\t\t}\n\t\t// This is big-endian and MSB first.\n\t\ti := 0\n\t\tfor ; i < len(v.Bits)/4; i++ {\n\t\t\tdst[i] = binary.BigEndian.Uint32(v.Bits[i*4:])\n\t\t}\n\t\tlast := uint32(0)\n\t\tif mod := len(v.Bits) % 4; mod > 0 {\n\t\t\tfor j := 0; j < mod; j++ {\n\t\t\t\tlast |= (uint32(v.Bits[i*4+j])) << uint32(8*(3-j))\n\t\t\t}\n\t\t\tdst[i] = last\n\t\t}\n\t\treturn nil\n\tcase *gpiostream.EdgeStream:\n\t\treturn errors.New(\"TODO(simokawa): handle EdgeStream\")\n\tdefault:\n\t\treturn errors.New(\"unsupported Stream type\")\n\t}\n}",
"func CopyTexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, x int32, y int32, width int32, height int32) {\n C.glowCopyTexSubImage3D(gpCopyTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func setPixel(x, y int, c color, pixels []byte) {\n\tindex := (y*windowWidth + x) * 4\n\n\tif index < len(pixels)-4 && index >= 0 {\n\t\tpixels[index] = c.r\n\t\tpixels[index+1] = c.g\n\t\tpixels[index+1] = c.b\n\t}\n}",
"func SwitchData(x, y gb.UINT8, src, dst []gb.UINT8) {}",
"func CopyTexSubImage2D(target GLEnum, level, xoffset, yoffset, x, y, width, height int32) {\n\tgl.CopyTexSubImage2D(uint32(target), level, xoffset, yoffset, x, y, width, height)\n}",
"func MapBufferRange(target uint32, offset int, length int, access uint32) unsafe.Pointer {\n ret := C.glowMapBufferRange(gpMapBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length), (C.GLbitfield)(access))\n return (unsafe.Pointer)(ret)\n}",
"func initFramebuffer(width, height int) {\n\tlog.Printf(\"[Video]: Initializing HW render (%v x %v).\\n\", width, height)\n\n\tgl.GenFramebuffers(1, &fboID)\n\tgl.BindFramebuffer(gl.FRAMEBUFFER, fboID)\n\n\t//gl.GenTextures(1, &video.texID)\n\tgl.BindTexture(gl.TEXTURE_2D, texID)\n\tgl.TexStorage2D(gl.TEXTURE_2D, 1, gl.RGBA8, int32(width), int32(height))\n\n\tgl.FramebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texID, 0)\n\n\thw := state.Global.Core.HWRenderCallback\n\n\tgl.BindRenderbuffer(gl.RENDERBUFFER, 0)\n\n\tif gl.CheckFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE {\n\t\tlog.Fatalln(\"[Video] Framebuffer is not complete.\")\n\t}\n\n\tgl.ClearColor(0, 0, 0, 1)\n\tif hw.Depth && hw.Stencil {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT | gl.STENCIL_BUFFER_BIT)\n\t} else if hw.Depth {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT)\n\t} else {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT)\n\t}\n\n\tgl.BindFramebuffer(gl.FRAMEBUFFER, 0)\n}",
"func CopyTexSubImage2D(target Enum, level, xoffset, yoffset, x, y, width, height int) {\n\tgl.CopyTexSubImage2D(uint32(target), int32(level), int32(xoffset), int32(yoffset), int32(x), int32(y), int32(width), int32(height))\n}",
"func CopyColorTable(target uint32, internalformat uint32, x int32, y int32, width int32) {\n\tC.glowCopyColorTable(gpCopyColorTable, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func putPixel(screen []byte, color color, x int, y int) {\n\tscreenX := (windowWidth / 2) + x\n\tscreenY := (windowHeight / 2) - y - 1\n\tbase := (screenY*windowWidth + screenX) * 4\n\tscreen[base] = color.r\n\tscreen[base+1] = color.g\n\tscreen[base+2] = color.b\n\tscreen[base+3] = 0xFF\n\tscreen[0] = 0xFF\n}",
"func CopyMem(source uint64, dest uint64, size uint64)",
"func (debugging *debuggingOpenGL) BindFramebuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindFramebuffer\", target, buffer)\n\tdebugging.gl.BindFramebuffer(target, buffer)\n\tdebugging.recordExit(\"BindFramebuffer\")\n}",
"func BindBufferRange(target uint32, index uint32, buffer uint32, offset int, size int) {\n C.glowBindBufferRange(gpBindBufferRange, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size))\n}",
"func CopyTexImage2D(target GLEnum, level int32, internalformat GLEnum, x, y, width, height, border int32) {\n\tgl.CopyTexImage2D(uint32(target), level, uint32(internalformat), x, y, width, height, border)\n}",
"func FlushMappedBufferRange(target uint32, offset int, length int) {\n C.glowFlushMappedBufferRange(gpFlushMappedBufferRange, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func MapBuffer(target uint32, access uint32) unsafe.Pointer {\n ret := C.glowMapBuffer(gpMapBuffer, (C.GLenum)(target), (C.GLenum)(access))\n return (unsafe.Pointer)(ret)\n}",
"func (b *Board) copy() *Board {\n\tnewBoard := &Board{\n\t\tPlayer1: make([]Square, len(b.Player1)),\n\t\tPlayer2: make([]Square, len(b.Player2)),\n\t\tresult: b.result,\n\t}\n\tfor i := range b.Player1 {\n\t\tnewBoard.Player1[i] = b.Player1[i]\n\t}\n\tfor i := range b.Player2 {\n\t\tnewBoard.Player2[i] = b.Player2[i]\n\t}\n\treturn newBoard\n}",
"func CopyTexImage2D(target Enum, level int, internalformat Enum, x, y, width, height, border int) {\n\tgl.CopyTexImage2D(uint32(target), int32(level), uint32(internalformat), int32(x), int32(y), int32(width), int32(height), int32(border))\n}",
"func CopyColorSubTable(target uint32, start int32, x int32, y int32, width int32) {\n\tC.glowCopyColorSubTable(gpCopyColorSubTable, (C.GLenum)(target), (C.GLsizei)(start), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func CopyTexSubImage1D(target uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tsyscall.Syscall6(gpCopyTexSubImage1D, 6, uintptr(target), uintptr(level), uintptr(xoffset), uintptr(x), uintptr(y), uintptr(width))\n}",
"func (b *Buffer) CopyDataFrom(src *Buffer, srcOffset, dstOffset, size int) error {\n\tif size == 0 {\n\t\treturn nil\n\t}\n\n\terrCode := cl.EnqueueCopyBuffer(\n\t\tb.device.cmdQueue,\n\t\tsrc.bufHandle,\n\t\tb.bufHandle,\n\t\tuint64(srcOffset),\n\t\tuint64(dstOffset),\n\t\tuint64(size),\n\t\t0,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif errCode != cl.SUCCESS {\n\t\treturn fmt.Errorf(\"opencl device(%s): error copying device data from buffer %s to buffer %s (errCode %d)\", b.device.Name, src.name, b.name, errCode)\n\t}\n\treturn nil\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n\tsyscall.Syscall(gpBindFramebuffer, 2, uintptr(target), uintptr(framebuffer), 0)\n}",
"func CopyTexSubImage1D(target uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tC.glowCopyTexSubImage1D(gpCopyTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func CopyTexSubImage1D(target uint32, level int32, xoffset int32, x int32, y int32, width int32) {\n\tC.glowCopyTexSubImage1D(gpCopyTexSubImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n\tC.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))\n}",
"func BindFramebuffer(target uint32, framebuffer uint32) {\n\tC.glowBindFramebuffer(gpBindFramebuffer, (C.GLenum)(target), (C.GLuint)(framebuffer))\n}",
"func CopyTexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, x int32, y int32, width int32, height int32) {\n\tsyscall.Syscall9(gpCopyTexSubImage3D, 9, uintptr(target), uintptr(level), uintptr(xoffset), uintptr(yoffset), uintptr(zoffset), uintptr(x), uintptr(y), uintptr(width), uintptr(height))\n}",
"func MemCopy(dst unsafe.Pointer, src unsafe.Pointer, bytes int) {\n\tfor i := 0; i < bytes; i++ {\n\t\t*(*uint8)(MemAccess(dst, i)) = *(*uint8)(MemAccess(src, i))\n\t}\n}",
"func block(h *[4][16]uint32, base uintptr, offsets *[16]uint32, mask uint16)",
"func CopyConvolutionFilter2D(target uint32, internalformat uint32, x int32, y int32, width int32, height int32) {\n C.glowCopyConvolutionFilter2D(gpCopyConvolutionFilter2D, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func (b *XMobileBackend) PutImageData(img *image.RGBA, x, y int) {\n\tb.activate()\n\n\tb.glctx.ActiveTexture(gl.TEXTURE0)\n\tif b.imageBufTex.Value == 0 {\n\t\tb.imageBufTex = b.glctx.CreateTexture()\n\t\tb.glctx.BindTexture(gl.TEXTURE_2D, b.imageBufTex)\n\t\tb.glctx.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR)\n\t\tb.glctx.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR)\n\t\tb.glctx.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)\n\t\tb.glctx.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)\n\t} else {\n\t\tb.glctx.BindTexture(gl.TEXTURE_2D, b.imageBufTex)\n\t}\n\n\tw, h := img.Bounds().Dx(), img.Bounds().Dy()\n\n\tif img.Stride == img.Bounds().Dx()*4 {\n\t\tb.glctx.TexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, w, h, gl.RGBA, gl.UNSIGNED_BYTE, img.Pix[0:])\n\t} else {\n\t\tdata := make([]uint8, 0, w*h*4)\n\t\tfor cy := 0; cy < h; cy++ {\n\t\t\tstart := cy * img.Stride\n\t\t\tend := start + w*4\n\t\t\tdata = append(data, img.Pix[start:end]...)\n\t\t}\n\t\tb.glctx.TexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, w, h, gl.RGBA, gl.UNSIGNED_BYTE, data[0:])\n\t}\n\n\tdx, dy := float32(x), float32(y)\n\tdw, dh := float32(w), float32(h)\n\n\tb.glctx.BindBuffer(gl.ARRAY_BUFFER, b.buf)\n\tdata := [16]float32{dx, dy, dx + dw, dy, dx + dw, dy + dh, dx, dy + dh,\n\t\t0, 0, 1, 0, 1, 1, 0, 1}\n\tb.glctx.BufferData(gl.ARRAY_BUFFER, byteSlice(unsafe.Pointer(&data[0]), len(data)*4), gl.STREAM_DRAW)\n\n\tb.glctx.UseProgram(b.shd.ID)\n\tb.glctx.Uniform1i(b.shd.Image, 0)\n\tb.glctx.Uniform2f(b.shd.CanvasSize, float32(b.fw), float32(b.fh))\n\tb.glctx.UniformMatrix3fv(b.shd.Matrix, mat3identity[:])\n\tb.glctx.Uniform1f(b.shd.GlobalAlpha, 1)\n\tb.glctx.Uniform1i(b.shd.UseAlphaTex, 0)\n\tb.glctx.Uniform1i(b.shd.Func, shdFuncImage)\n\tb.glctx.VertexAttribPointer(b.shd.Vertex, 2, gl.FLOAT, false, 0, 0)\n\tb.glctx.VertexAttribPointer(b.shd.TexCoord, 2, gl.FLOAT, false, 0, 8*4)\n\tb.glctx.EnableVertexAttribArray(b.shd.Vertex)\n\tb.glctx.EnableVertexAttribArray(b.shd.TexCoord)\n\tb.glctx.DrawArrays(gl.TRIANGLE_FAN, 0, 4)\n\tb.glctx.DisableVertexAttribArray(b.shd.Vertex)\n\tb.glctx.DisableVertexAttribArray(b.shd.TexCoord)\n}",
"func BenchmarkMemcopy2(b *testing.B) {\n\tlen := 1920 * 1080 * 3 / 2\n\tdst := make([]byte, len, len)\n\tb.ResetTimer()\n\tfor i := 0; i < b.N; i++ {\n\t\tMemset(dst, 100)\n\t}\n}",
"func InvalidateSubFramebuffer(target uint32, numAttachments int32, attachments *uint32, x int32, y int32, width int32, height int32) {\n\tsyscall.Syscall9(gpInvalidateSubFramebuffer, 7, uintptr(target), uintptr(numAttachments), uintptr(unsafe.Pointer(attachments)), uintptr(x), uintptr(y), uintptr(width), uintptr(height), 0, 0)\n}",
"func MergeScreenAndBlock(ScreenEmpty [][]int,block [][]int, locate []int)([][]int){\n\tBlockInScreen := CopyNewBlock(ScreenEmpty)\n\tfor row := 0; row<len(block); row++{\n\t\tfor colum :=0; colum<len(block[row]); colum++{\n\t\t\tif block[row][colum] != 0{\n\t\t\t\tnum := block[row][colum]\n\t\t\t\tBlockInScreen[row + locate[0]][colum + locate[1]] = num\n\t\t\t}\n\t\t}\n\t}\n\treturn BlockInScreen\n}",
"func clone(dst, src *image.Gray) {\n\tif dst.Stride == src.Stride {\n\t\t// no need to correct stride, simply copy pixels.\n\t\tcopy(dst.Pix, src.Pix)\n\t\treturn\n\t}\n\t// need to correct stride.\n\tfor i := 0; i < src.Rect.Dy(); i++ {\n\t\tdstH := i * dst.Stride\n\t\tsrcH := i * src.Stride\n\t\tcopy(dst.Pix[dstH:dstH+dst.Stride], src.Pix[srcH:srcH+dst.Stride])\n\t}\n}",
"func (self *GameObjectCreator) BitmapData2O(width int, height int) *BitmapData{\n return &BitmapData{self.Object.Call(\"bitmapData\", width, height)}\n}",
"func (r *Resources) Copy(other *Resources) {\n\tr.CPU = other.CPU\n\tr.DISK = other.DISK\n\tr.MEMORY = other.MEMORY\n\tr.GPU = other.GPU\n}",
"func CopyTexImage1D(target uint32, level int32, internalformat uint32, x int32, y int32, width int32, border int32) {\n C.glowCopyTexImage1D(gpCopyTexImage1D, (C.GLenum)(target), (C.GLint)(level), (C.GLenum)(internalformat), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLint)(border))\n}",
"func (c *Canvas) SetPixels(pixels []uint8) {\n\tc.gf.Dirty()\n\n\tmainthread.Call(func() {\n\t\ttex := c.Texture()\n\t\ttex.Begin()\n\t\ttex.SetPixels(0, 0, tex.Width(), tex.Height(), pixels)\n\t\ttex.End()\n\t})\n}",
"func (d *Display) resetBuffer() {\n\td.width = d.device.Width()\n\td.height = d.device.Height()\n\td.buffer = make([][]byte, d.height)\n\tfor y := range d.buffer {\n\t\td.buffer[y] = make([]byte, d.width)\n\t}\n}",
"func copyTo(dst []byte, src []byte, offset int) {\n\tfor j, k := range src {\n\t\tdst[offset+j] = k\n\t}\n}",
"func (native *OpenGL) BindFramebuffer(target, buffer uint32) {\n\tgl.BindFramebuffer(target, buffer)\n}",
"func BenchmarkMemcopy(b *testing.B) {\n\tlen := 1920 * 1080 * 3 / 2\n\tdst := make([]byte, len, len)\n\tb.ResetTimer()\n\tfor i := 0; i < b.N; i++ {\n\t\tfor j := 0; j < len; j++ {\n\t\t\tdst[i] = 100\n\t\t}\n\t}\n}",
"func (i *ImageBuf) Copy(src *ImageBuf) error {\n\tok := bool(C.ImageBuf_copy(i.ptr, src.ptr))\n\truntime.KeepAlive(i)\n\truntime.KeepAlive(src)\n\tif !ok {\n\t\treturn i.LastError()\n\t}\n\treturn nil\n}",
"func CopyTexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, x int32, y int32, width int32, height int32) {\n\tC.glowCopyTexSubImage3D(gpCopyTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func CopyTexSubImage3D(target uint32, level int32, xoffset int32, yoffset int32, zoffset int32, x int32, y int32, width int32, height int32) {\n\tC.glowCopyTexSubImage3D(gpCopyTexSubImage3D, (C.GLenum)(target), (C.GLint)(level), (C.GLint)(xoffset), (C.GLint)(yoffset), (C.GLint)(zoffset), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func GetBufferSubData(target uint32, offset int, size int, data unsafe.Pointer) {\n C.glowGetBufferSubData(gpGetBufferSubData, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(size), data)\n}",
"func (r *AMD64Registers) Copy() (proc.Registers, error) {\n\tvar rr AMD64Registers\n\trr = *r\n\trr.Context = NewCONTEXT()\n\t*(rr.Context) = *(r.Context)\n\trr.fltSave = &rr.Context.FltSave\n\treturn &rr, nil\n}",
"func CopyObject(srcData []byte, dst interface{}) {\n\tjsoniter.Unmarshal(srcData, dst)\n\t//var dstData, err = jsoniter.Marshal(dst)\n\t//if err != nil {\n\t//\tpanic(err)\n\t//}\n\t//fmt.Println(\"overlay:\", string(dstData))\n}",
"func InvalidateSubFramebuffer(target uint32, numAttachments int32, attachments *uint32, x int32, y int32, width int32, height int32) {\n\tC.glowInvalidateSubFramebuffer(gpInvalidateSubFramebuffer, (C.GLenum)(target), (C.GLsizei)(numAttachments), (*C.GLenum)(unsafe.Pointer(attachments)), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func InvalidateSubFramebuffer(target uint32, numAttachments int32, attachments *uint32, x int32, y int32, width int32, height int32) {\n\tC.glowInvalidateSubFramebuffer(gpInvalidateSubFramebuffer, (C.GLenum)(target), (C.GLsizei)(numAttachments), (*C.GLenum)(unsafe.Pointer(attachments)), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width), (C.GLsizei)(height))\n}"
] | [
"0.6169083",
"0.616298",
"0.6043505",
"0.588186",
"0.5827948",
"0.5775845",
"0.57539123",
"0.56394136",
"0.5623513",
"0.5591331",
"0.5591331",
"0.55884236",
"0.5585984",
"0.55723196",
"0.55363816",
"0.55363816",
"0.5499584",
"0.5483157",
"0.5401393",
"0.53606397",
"0.5356079",
"0.53392625",
"0.5312277",
"0.53119516",
"0.5290776",
"0.52842194",
"0.52749354",
"0.5273612",
"0.52571774",
"0.52494305",
"0.52335817",
"0.52217627",
"0.51967335",
"0.51967335",
"0.5186299",
"0.5185594",
"0.5185594",
"0.5183015",
"0.5162797",
"0.51396525",
"0.51302737",
"0.51235056",
"0.51215506",
"0.51156706",
"0.51048756",
"0.50935775",
"0.5087091",
"0.50754607",
"0.50691175",
"0.50458485",
"0.5035867",
"0.50090134",
"0.49965268",
"0.4990242",
"0.4978887",
"0.49746788",
"0.49693742",
"0.4959271",
"0.49564907",
"0.4944469",
"0.49425447",
"0.49069095",
"0.4903367",
"0.48993397",
"0.48972106",
"0.48761544",
"0.48715332",
"0.48667246",
"0.4866454",
"0.48659796",
"0.4854614",
"0.4847783",
"0.4839036",
"0.4839036",
"0.48365155",
"0.48365155",
"0.48353577",
"0.48337394",
"0.4830597",
"0.48297188",
"0.4822105",
"0.48209178",
"0.4816384",
"0.4801755",
"0.4795212",
"0.47920096",
"0.47900355",
"0.4787578",
"0.47794506",
"0.47652942",
"0.47606102",
"0.47570002",
"0.47551826",
"0.4750105",
"0.4749547",
"0.4749547",
"0.4746979",
"0.47443467",
"0.4729474",
"0.47264436",
"0.47264436"
] | 0.0 | -1 |
creates and initializes a buffer object's data store | func BufferData(target uint32, size int, data unsafe.Pointer, usage uint32) {
C.glowBufferData(gpBufferData, (C.GLenum)(target), (C.GLsizeiptr)(size), data, (C.GLenum)(usage))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func newBuffer() *buffer {\n\treturn &buffer{\n\t\tdata: make([]byte, 0),\n\t\tlen: 0,\n\t\tpkg: nil,\n\t\tconn: nil,\n\t\tpkgCh: make(chan *pkg),\n\t\tevCh: make(chan *pkg),\n\t\terrCh: make(chan error, 1),\n\t}\n}",
"func newBuffer(buf []byte) *Buffer {\n\treturn &Buffer{data: buf}\n}",
"func newDatabaseBuffer() databaseBuffer {\n\tb := &dbBuffer{\n\t\tbucketsMap: make(map[xtime.UnixNano]*BufferBucketVersions),\n\t\tinOrderBlockStarts: make([]xtime.UnixNano, 0, bucketsCacheSize),\n\t}\n\treturn b\n}",
"func newBuffer(r io.Reader, offset int64) *buffer {\n\treturn &buffer{\n\t\tr: r,\n\t\toffset: offset,\n\t\tbuf: make([]byte, 0, 4096),\n\t\tallowObjptr: true,\n\t\tallowStream: true,\n\t}\n}",
"func new_buffer(conn *websocket.Conn, ctrl chan struct{}, txqueuelen int) *Buffer {\n\tbuf := Buffer{conn: conn}\n\tbuf.pending = make(chan []byte, txqueuelen)\n\tbuf.ctrl = ctrl\n\tbuf.cache = make([]byte, packet.PACKET_LIMIT+2)\n\treturn &buf\n}",
"func (b *BatchBuffer) Init() {}",
"func newBuffer(b []byte) *buffer {\n\treturn &buffer{proto.NewBuffer(b), 0}\n}",
"func newBuffer(e []byte) *Buffer {\n\tp := buffer_pool.Get().(*Buffer)\n\tp.buf = e\n\treturn p\n}",
"func BufferInit(target Enum, size int, usage Enum) {\n\tgl.BufferData(uint32(target), size, nil, uint32(usage))\n}",
"func newBuffer() Buffer {\n\treturn &buffer{\n\t\tbytes: make([]byte, 0, 64),\n\t}\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n NewOrderModel(buffer),\n NewBalanceModel(buffer),\n NewAccountModel(buffer),\n nil,\n nil,\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyOrderFunc(func(model *OrderModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyBalanceFunc(func(model *BalanceModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyAccountFunc(func(model *AccountModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func (l *Logger) initLoggerBuffer() (err error) {\n\t// build\n\tl.LoggerBuffer = LoggerBuffer{}\n\n\t// get serial data\n\tlConfig, err := l.getSerialConfig()\n\tif err != nil {\n\t\treturn\n\t}\n\n\tport, err := serial.OpenPort(lConfig)\n\tif err != nil {\n\t\treturn\n\t}\n\tl.serialPort = *port\n\n\tgo func() {\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-l.stop:\n\t\t\t\treturn\n\t\t\tdefault:\n\t\t\t\t// get data\n\t\t\t\tbuf := make([]byte, bufSize)\n\n\t\t\t\tn, err := port.Read(buf)\n\t\t\t\ttime := time.Now().UTC()\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Print(err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\t//log.Printf(\"NEW DATA [%03d]: %02X %03d\", n, buf[:n], buf[:n]) // LOG\n\n\t\t\t\t// push to LoggerBuffer\n\t\t\t\tt := util.TimestampBuilder(time)\n\t\t\t\tdu := DataUnit{\n\t\t\t\t\tData: buf[:n],\n\t\t\t\t\tTime: &t,\n\t\t\t\t}\n\n\t\t\t\tl.DataUnit = append(l.DataUnit, &du)\n\n\t\t\t\t// feed consumers\n\t\t\t\tfor _, c := range l.consumers {\n\t\t\t\t\tc <- du\n\n\t\t\t\t\tif l.config.Debug {\n\t\t\t\t\t\tlog.Print(\"Data received: \", du.PrettyString())\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Flush every time new data is received\n\t\t\t\tl.flush()\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn\n}",
"func NewBuffer() *Buffer {\n\treturn NewBufferWithSize(initialSize)\n}",
"func NewBuffer(o *Options) (*Buffer, error) {\n\tif o == nil {\n\t\to = &Options{}\n\t}\n\tret := &Buffer{opts: *o} // copy o before normalizing it\n\n\tif err := ret.opts.normalize(); err != nil {\n\t\treturn nil, errors.Annotate(err, \"normalizing buffer.Options\").Err()\n\t}\n\n\tret.unleased.onlyID = o.FIFO\n\tret.batchItemsGuess = newMovingAverage(10, ret.opts.batchItemsGuess())\n\tret.liveLeases = map[*Batch]struct{}{}\n\tret.unAckedLeases = map[*Batch]struct{}{}\n\treturn ret, nil\n}",
"func (buf *ListBuffer) Init() {\n\tbuf.Buffer = make([]Node, defaultBufferLength)\n\tbuf.FreeHead = 0\n\tbuf.Count = 0\n\n\tfor i := 0; i < len(buf.Buffer); i++ {\n\t\tbuf.Buffer[i].Item.Clear()\n\t\tbuf.Buffer[i].Prev = BufferIndex(i - 1)\n\t\tbuf.Buffer[i].Next = BufferIndex(i + 1)\n\t}\n\n\tbuf.Buffer[0].Prev = NilIndex\n\tbuf.Buffer[len(buf.Buffer)-1].Next = NilIndex\n}",
"func newBuffer(bits uint32) buffer {\n\tvar b buffer\n\tb.data = make([]unsafe.Pointer, 1<<bits)\n\tb.free = 1 << bits\n\tb.mask = 1<<bits - 1\n\tb.bits = bits\n\treturn b\n}",
"func NewBuffer() *Buffer { return globalPool.NewBuffer() }",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n proto.NewProxyWithBuffer(buffer),\n NewStructSimpleModel(buffer),\n NewStructOptionalModel(buffer),\n NewStructNestedModel(buffer),\n NewStructBytesModel(buffer),\n NewStructArrayModel(buffer),\n NewStructVectorModel(buffer),\n NewStructListModel(buffer),\n NewStructSetModel(buffer),\n NewStructMapModel(buffer),\n NewStructHashModel(buffer),\n NewStructHashExModel(buffer),\n NewStructEmptyModel(buffer),\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyStructSimpleFunc(func(model *StructSimpleModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructOptionalFunc(func(model *StructOptionalModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructNestedFunc(func(model *StructNestedModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructBytesFunc(func(model *StructBytesModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructArrayFunc(func(model *StructArrayModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructVectorFunc(func(model *StructVectorModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructListFunc(func(model *StructListModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructSetFunc(func(model *StructSetModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructMapFunc(func(model *StructMapModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructHashFunc(func(model *StructHashModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructHashExFunc(func(model *StructHashExModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructEmptyFunc(func(model *StructEmptyModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func (_this *StreamingReadBuffer) Init(reader io.Reader, bufferSize int, minFreeBytes int) {\n\tif cap(_this.Buffer) < bufferSize {\n\t\t_this.Buffer = make([]byte, 0, bufferSize)\n\t} else {\n\t\t_this.Buffer = _this.Buffer[:0]\n\t}\n\t_this.reader = reader\n\t_this.minFreeBytes = minFreeBytes\n}",
"func New(b []byte) *Buffer {\n\treturn &Buffer{b: b}\n}",
"func NewBuffer(e []byte) *Buffer {\n\treturn &Buffer{buf: e}\n}",
"func NewBuffer(aSlice interface{}) *Buffer {\n return &Buffer{buffer: sliceValue(aSlice, false), handler: valueHandler{}}\n}",
"func NewBuffer(p producer.Producer, size int, flushInterval time.Duration, logger log.Logger) *Buffer {\n\tflush := 1 * time.Second\n\tif flushInterval != 0 {\n\t\tflush = flushInterval\n\t}\n\n\tb := &Buffer{\n\t\trecords: make([]*data.Record, 0, size),\n\t\tmu: new(sync.Mutex),\n\t\tproducer: p,\n\t\tbufferSize: size,\n\t\tlogger: logger,\n\t\tshouldFlush: make(chan bool, 1),\n\t\tflushInterval: flush,\n\t\tlastFlushed: time.Now(),\n\t}\n\n\tgo b.runFlusher()\n\n\treturn b\n}",
"func NewBuffer(capacity int) Buffer {\n\treturn Buffer{\n\t\tcapacity: capacity,\n\t\tcurrentSize: 0,\n\t\tcontents: map[entity.Key]inventoryapi.PostDeltaBody{},\n\t}\n}",
"func ringBufferInitBuffer(buflen uint32, rb *ringBuffer) {\n\tvar new_data []byte\n\tvar i uint\n\tsize := 2 + int(buflen) + int(kSlackForEightByteHashingEverywhere)\n\tif cap(rb.data_) < size {\n\t\tnew_data = make([]byte, size)\n\t} else {\n\t\tnew_data = rb.data_[:size]\n\t}\n\tif rb.data_ != nil {\n\t\tcopy(new_data, rb.data_[:2+rb.cur_size_+uint32(kSlackForEightByteHashingEverywhere)])\n\t}\n\n\trb.data_ = new_data\n\trb.cur_size_ = buflen\n\trb.buffer_ = rb.data_[2:]\n\trb.data_[1] = 0\n\trb.data_[0] = rb.data_[1]\n\tfor i = 0; i < kSlackForEightByteHashingEverywhere; i++ {\n\t\trb.buffer_[rb.cur_size_+uint32(i)] = 0\n\t}\n}",
"func NewBuffer(player *Player, conn net.Conn, ctrl chan bool) *Buffer {\r\n\tmax := DEFAULT_QUEUE_SIZE\r\n\r\n\tbuf := Buffer{conn: conn}\r\n\tbuf.pending = make(chan []byte, max)\r\n\tbuf.ctrl = ctrl\r\n\tbuf.max = max\r\n\treturn &buf\r\n}",
"func NewBuffer(e []byte) *Buffer {\n\treturn &Buffer{buf: e, length: len(e)}\n}",
"func (m *metricMysqlBufferPoolUsage) init() {\n\tm.data.SetName(\"mysql.buffer_pool.usage\")\n\tm.data.SetDescription(\"The number of bytes in the InnoDB buffer pool.\")\n\tm.data.SetUnit(\"By\")\n\tm.data.SetEmptySum()\n\tm.data.Sum().SetIsMonotonic(false)\n\tm.data.Sum().SetAggregationTemporality(pmetric.MetricAggregationTemporalityCumulative)\n\tm.data.Sum().DataPoints().EnsureCapacity(m.capacity)\n}",
"func NewBuffer() Buffer {\n\treturn &buffer{}\n}",
"func NewBuffer() *Buffer {\n\treturn &Buffer{Line: []byte{}, Val: make([]byte, 0, 32)}\n}",
"func createBuffer() *bytes.Buffer {\n\tbuf := bytes.Buffer{}\n\treturn &buf\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n NewEnumsModel(buffer),\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyEnumsFunc(func(model *EnumsModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func init() {\n\tstore = cache.NewMemoryCache()\n}",
"func init() {\n\tMemory = &memoryStorage{\n\t\ttraces: make(map[string]tracer.Trace),\n\t\tservices: make(map[string]string),\n\t\tserviceDeps: make(map[string]*tracer.Dependencies),\n\t}\n}",
"func (m *Manager) NewBuffer(conf buffer.Config) (buffer.Streamed, error) {\n\treturn nil, component.ErrInvalidType(\"buffer\", conf.Type)\n}",
"func FakeTdsBufferCtor(r io.ReadWriteCloser) io.ReadWriteCloser {\n\treturn r\n}",
"func (s *ShmPool) CreateBuffer(id *Buffer, offset int32, width int32, height int32, stride int32, format uint32) {\n sendrequest(s, \"wl_shm_pool_create_buffer\", id, offset, width, height, stride, format)\n}",
"func NewBuffer(size int) *Buffer {\n\tif size <= 0 {\n\t\treturn &Buffer{}\n\t}\n\treturn &Buffer{\n\t\tstorage: make([]byte, size),\n\t\tsize: size,\n\t}\n}",
"func NewEmptyBuffer() *Buffer {\n return &Buffer{data: make([]byte, 0)}\n}",
"func (r *Record) NewBuffer() *bytes.Buffer {\n\tif r.Buffer == nil {\n\t\treturn &bytes.Buffer{}\n\t}\n\n\treturn r.Buffer\n}",
"func (b *BadgerStore) init(dir string) error {\n\n\topts := badger.DefaultOptions(dir)\n\tif dir == \"\" {\n\t\topts = opts.WithInMemory(true)\n\t}\n\topts.Logger = &common.NoopLogger{}\n\tdb, err := badger.Open(opts)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to open database\")\n\t}\n\n\t// Set the database\n\tb.db = db\n\n\t// Initialize the default transaction that auto commits\n\t// on success ops or discards on failure.\n\t// It also enables the renewal of the underlying transaction\n\t// after executing a read/write operation\n\tb.Tx = NewTx(db, true, true)\n\n\treturn nil\n}",
"func (m *metricMysqlBufferPoolOperations) init() {\n\tm.data.SetName(\"mysql.buffer_pool.operations\")\n\tm.data.SetDescription(\"The number of operations on the InnoDB buffer pool.\")\n\tm.data.SetUnit(\"1\")\n\tm.data.SetEmptySum()\n\tm.data.Sum().SetIsMonotonic(true)\n\tm.data.Sum().SetAggregationTemporality(pmetric.MetricAggregationTemporalityCumulative)\n\tm.data.Sum().DataPoints().EnsureCapacity(m.capacity)\n}",
"func (g *GLTF) loadBuffer(bufIdx int) ([]byte, error) {\n\n\t// Check if provided buffer index is valid\n\tif bufIdx < 0 || bufIdx >= len(g.Buffers) {\n\t\treturn nil, fmt.Errorf(\"invalid buffer index\")\n\t}\n\tbufData := &g.Buffers[bufIdx]\n\t// Return cached if available\n\tif bufData.cache != nil {\n\t\tlog.Debug(\"Fetching Buffer %d (cached)\", bufIdx)\n\t\treturn bufData.cache, nil\n\t}\n\tlog.Debug(\"Loading Buffer %d\", bufIdx)\n\n\t// If buffer URI use the chunk data field\n\tif bufData.Uri == \"\" {\n\t\treturn g.data, nil\n\t}\n\n\t// Checks if buffer URI is a data URI\n\tvar data []byte\n\tvar err error\n\tif isDataURL(bufData.Uri) {\n\t\tdata, err = loadDataURL(bufData.Uri)\n\t} else {\n\t\t// Try to load buffer from file\n\t\tdata, err = g.loadFileBytes(bufData.Uri)\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Checks data length\n\tif len(data) != bufData.ByteLength {\n\t\treturn nil, fmt.Errorf(\"buffer:%d read data length:%d expected:%d\", bufIdx, len(data), bufData.ByteLength)\n\t}\n\t// Cache buffer data\n\tg.Buffers[bufIdx].cache = data\n\tlog.Debug(\"cache data:%v\", len(bufData.cache))\n\treturn data, nil\n}",
"func NewBuffer(reader io.Reader, size int64, path string, cursorPosition []string) *Buffer {\n\tb := new(Buffer)\n\tb.LineArray = NewLineArray(size, reader)\n\n\tb.Settings = DefaultLocalSettings()\n\t//\tfor k, v := range globalSettings {\n\t//\t\tif _, ok := b.Settings[k]; ok {\n\t//\t\t\tb.Settings[k] = v\n\t//\t\t}\n\t//\t}\n\n\tif fileformat == 1 {\n\t\tb.Settings[\"fileformat\"] = \"unix\"\n\t} else if fileformat == 2 {\n\t\tb.Settings[\"fileformat\"] = \"dos\"\n\t}\n\n\tb.Path = path\n\n\tb.EventHandler = NewEventHandler(b)\n\n\tb.update()\n\n\tb.Cursor = Cursor{\n\t\tLoc: Loc{0, 0},\n\t\tbuf: b,\n\t}\n\n\t//InitLocalSettings(b)\n\n\tb.cursors = []*Cursor{&b.Cursor}\n\n\treturn b\n}",
"func New(capacity int, fn func(series []*influxdb.Series)) *Buffer {\n\treturn NewBuffer(capacity, fn)\n}",
"func initBuffer(size int) {\n\tif len(buffer) == size {\n\t\treturn\n\t}\n\tbuffer = make([]uint8, size)\n}",
"func NewBuffer(conn *net.TCPConn, buffOb chan bool, maxQueueSize int) *Buffer {\n\tsize := maxQueueSize\n\n\tif size == -1 {\n\t\tsize = DEFAULT_QUEUE_SIZE\n\t}\n\n\tbuf := new(Buffer)\n\tbuf.conn = conn\n\tbuf.pending = make(chan []byte, size)\n\tbuf.ctrl = make(chan bool)\n\tbuf.ob = buffOb\n\tbuf.max = size\n\n\treturn buf\n}",
"func NewBuffer() Buffer {\n\treturn Buffer{\n\t\tCellMap: make(map[image.Point]Cell),\n\t\tArea: image.Rectangle{}}\n}",
"func (b *Buffer) AttachNew() {\n b.data = make([]byte, 0)\n b.size = 0\n b.offset = 0\n}",
"func NewBuffer(length int) *Buffer {\n\treturn &Buffer{\n\t\titems: make([]unsafe.Pointer, length),\n\t}\n}",
"func NewBuffer(conn *sqlite.Conn) (*Buffer, error) {\n\treturn NewBufferSize(conn, 16*1024)\n}",
"func NewBuffer(size int) *Buffer {\n\treturn &Buffer{\n\t\tdata: make([]byte, size),\n\t}\n}",
"func NewBuffer(capacity int, fn func(series []*influxdb.Series)) *Buffer {\n\tb := &Buffer{\n\t\tfn: fn,\n\t\tin: make(chan *influxdb.Series),\n\t\tseries: make(map[string]*influxdb.Series),\n\t\tcapacity: capacity,\n\t}\n\tif b.capacity > 0 {\n\t\tgo b.aggregate()\n\t}\n\n\treturn b\n}",
"func (m *metricMysqlBufferPoolDataPages) init() {\n\tm.data.SetName(\"mysql.buffer_pool.data_pages\")\n\tm.data.SetDescription(\"The number of data pages in the InnoDB buffer pool.\")\n\tm.data.SetUnit(\"1\")\n\tm.data.SetEmptySum()\n\tm.data.Sum().SetIsMonotonic(false)\n\tm.data.Sum().SetAggregationTemporality(pmetric.MetricAggregationTemporalityCumulative)\n\tm.data.Sum().DataPoints().EnsureCapacity(m.capacity)\n}",
"func NewBuffer(inp []byte) *ByteBuffer {\n\tif inp == nil {\n\t\tinp = make([]byte, 0, 512)\n\t}\n\treturn &ByteBuffer{Buffer: bytes.NewBuffer(inp)}\n}",
"func (al *AudioListener) setBuffer(size int) {\n\tal.Lock()\n\tdefer al.Unlock()\n\n\tal.buffer = make([]gumble.AudioPacket, 0, size)\n}",
"func NewCapacityBuffer(capacity int) *Buffer {\n return &Buffer{data: make([]byte, capacity)}\n}",
"func newEventBuffer(size int64) *eventBuffer {\n\tzero := int64(0)\n\tb := &eventBuffer{\n\t\tmaxSize: size,\n\t\tsize: &zero,\n\t}\n\n\titem := newBufferItem(&structs.Events{Index: 0, Events: nil})\n\n\tb.head.Store(item)\n\tb.tail.Store(item)\n\n\treturn b\n}",
"func New(w, h int) *Buffer {\n\tb := &Buffer{\n\t\tWidth: w,\n\t\tHeight: h,\n\t\tCursor: NewCursor(0, 0),\n\t\tTiles: make([]*Tile, w*h),\n\t}\n\tb.Resize(w, h)\n\treturn b\n}",
"func NewBuffer() *Buffer {\n\treturn &Buffer{B: &strings.Builder{}}\n}",
"func NewTelemetryBuffer() (*TelemetryBuffer, error) {\n\tvar tb TelemetryBuffer\n\ttb.data = make(chan interface{})\n\ttb.cancel = make(chan bool, 1)\n\ttb.connections = make([]net.Conn, 1)\n\terr := tb.Listen(FdName)\n\tif err != nil {\n\t\ttb.fdExists = strings.Contains(err.Error(), \"in use\") || strings.Contains(err.Error(), \"Access is denied\")\n\t} else {\n\t\t// Spawn server goroutine to handle incoming connections\n\t\tgo func() {\n\t\t\tfor {\n\t\t\t\t// Spawn worker goroutines to communicate with client\n\t\t\t\tconn, err := tb.listener.Accept()\n\t\t\t\tif err == nil {\n\t\t\t\t\ttb.connections = append(tb.connections, conn)\n\t\t\t\t\tgo func() {\n\t\t\t\t\t\tfor {\n\t\t\t\t\t\t\treportStr, err := read(conn)\n\t\t\t\t\t\t\tif err == nil {\n\t\t\t\t\t\t\t\tvar tmp map[string]interface{}\n\t\t\t\t\t\t\t\tjson.Unmarshal(reportStr, &tmp)\n\t\t\t\t\t\t\t\tif _, ok := tmp[\"NpmVersion\"]; ok {\n\t\t\t\t\t\t\t\t\tvar npmReport NPMReport\n\t\t\t\t\t\t\t\t\tjson.Unmarshal([]byte(reportStr), &npmReport)\n\t\t\t\t\t\t\t\t\ttb.data <- npmReport\n\t\t\t\t\t\t\t\t} else if _, ok := tmp[\"CniSucceeded\"]; ok {\n\t\t\t\t\t\t\t\t\tvar cniReport CNIReport\n\t\t\t\t\t\t\t\t\tjson.Unmarshal([]byte(reportStr), &cniReport)\n\t\t\t\t\t\t\t\t\ttb.data <- cniReport\n\t\t\t\t\t\t\t\t} else if _, ok := tmp[\"Allocations\"]; ok {\n\t\t\t\t\t\t\t\t\tvar dncReport DNCReport\n\t\t\t\t\t\t\t\t\tjson.Unmarshal([]byte(reportStr), &dncReport)\n\t\t\t\t\t\t\t\t\ttb.data <- dncReport\n\t\t\t\t\t\t\t\t} else if _, ok := tmp[\"DncPartitionKey\"]; ok {\n\t\t\t\t\t\t\t\t\tvar cnsReport CNSReport\n\t\t\t\t\t\t\t\t\tjson.Unmarshal([]byte(reportStr), &cnsReport)\n\t\t\t\t\t\t\t\t\ttb.data <- cnsReport\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}()\n\t\t\t\t}\n\t\t\t}\n\t\t}()\n\t}\n\n\terr = tb.Dial(FdName)\n\tif err == nil {\n\t\ttb.connected = true\n\t\ttb.payload.DNCReports = make([]DNCReport, 0)\n\t\ttb.payload.CNIReports = make([]CNIReport, 0)\n\t\ttb.payload.NPMReports = make([]NPMReport, 0)\n\t\ttb.payload.CNSReports = make([]CNSReport, 0)\n\t} else if tb.fdExists {\n\t\ttb.cleanup(FdName)\n\t}\n\n\treturn &tb, err\n}",
"func NewAttached(buffer []byte) *Buffer {\n result := NewEmptyBuffer()\n result.Attach(buffer)\n return result\n}",
"func NewLocalBuffer(b bytes.Buffer) *LocalBuffer { return &LocalBuffer{b: b} }",
"func NewBufferBuilder() *BufferBuilder {\n\treturn &BufferBuilder{}\n}",
"func NewBuffer(m []byte, skip, size int64) (*Buffer, error) {\n\tb := &Buffer{\n\t\toffset: skip,\n\t\tsize: size,\n\t\tdata: m,\n\t}\n\treturn b, nil\n}",
"func NewAttachedBuffer(buffer *Buffer) *Buffer {\n result := NewEmptyBuffer()\n result.AttachBuffer(buffer)\n return result\n}",
"func DefaultBufferStoreSize() int {\n\treturn 100\n}",
"func (m *metricMysqlBufferPoolLimit) init() {\n\tm.data.SetName(\"mysql.buffer_pool.limit\")\n\tm.data.SetDescription(\"The configured size of the InnoDB buffer pool.\")\n\tm.data.SetUnit(\"By\")\n\tm.data.SetEmptySum()\n\tm.data.Sum().SetIsMonotonic(false)\n\tm.data.Sum().SetAggregationTemporality(pmetric.MetricAggregationTemporalityCumulative)\n}",
"func (m *metricMysqlBufferPoolPages) init() {\n\tm.data.SetName(\"mysql.buffer_pool.pages\")\n\tm.data.SetDescription(\"The number of pages in the InnoDB buffer pool.\")\n\tm.data.SetUnit(\"1\")\n\tm.data.SetEmptySum()\n\tm.data.Sum().SetIsMonotonic(false)\n\tm.data.Sum().SetAggregationTemporality(pmetric.MetricAggregationTemporalityCumulative)\n\tm.data.Sum().DataPoints().EnsureCapacity(m.capacity)\n}",
"func (c *HTTPCollector) createBuffer() []*zipkincore.Span {\n\treturn c.batchPool.Get().([]*zipkincore.Span)\n}",
"func (pool *BufferPool) New() (buf *bytes.Buffer) {\n\tselect {\n\tcase buf = <-pool.Buffers:\n\tdefault:\n\t\tbuf = &bytes.Buffer{}\n\t}\n\treturn\n}",
"func NewDistributedObjectWithData() {}",
"func (db *DB) init() error {\n\t// Create two meta pages on a buffer.\n\tbuf := make([]byte, db.pageSize*4)\n\tfor i := 0; i < 2; i++ {\n\t\tp := db.pageInBuffer(buf, pgid(i))\n\t\tp.id = pgid(i)\n\t\tp.flags = metaPageFlag\n\n\t\t// Initialize the meta page.\n\t\tm := p.meta()\n\t\tm.magic = magic\n\t\tm.version = version\n\t\tm.pageSize = uint32(db.pageSize)\n\t\tm.freelist = 2\n\t\tm.root = bucket{root: 3}\n\t\tm.pgid = 4\n\t\tm.txid = txid(i)\n\t\tm.checksum = m.sum64()\n\t}\n\n\t// Write an empty freelist at page 3.\n\tp := db.pageInBuffer(buf, pgid(2))\n\tp.id = pgid(2)\n\tp.flags = freelistPageFlag\n\tp.count = 0\n\n\t// Write an empty leaf page at page 4.\n\tp = db.pageInBuffer(buf, pgid(3))\n\tp.id = pgid(3)\n\tp.flags = leafPageFlag\n\tp.count = 0\n\n\t// Write the buffer to our data file.\n\tif _, err := db.ops.writeAt(buf, 0); err != nil {\n\t\treturn err\n\t}\n\tif err := fdatasync(db); err != nil {\n\t\treturn err\n\t}\n\tdb.filesz = len(buf)\n\n\treturn nil\n}",
"func (src *Source) SetNewBuffer() {\n\tsrc.buf = make([]byte, 64)\n}",
"func (s *seqBuf) init(length time.Duration, maxSeqNum, maxSeqNumDiff seqNum, entryChan chan seqBufEntry) {\n\ts.length = length\n\ts.maxSeqNum = maxSeqNum\n\ts.maxSeqNumDiff = maxSeqNumDiff\n\ts.entryChan = entryChan\n\n\ts.entryAddedChan = make(chan bool)\n\ts.watcherCloseNeededChan = make(chan bool)\n\ts.watcherCloseDoneChan = make(chan bool)\n\tgo s.watcher()\n}",
"func NewMemoryStorage() *MemoryStorage {\n return &MemoryStorage{tickets: make(map[string]*types.Ticket)}\n}",
"func newBuffer(br *Reader) (*buffer, error) {\n\tn, err := io.ReadFull(br.r, br.buf[:4])\n\t// br.r.Chunk() is only valid after the call the Read(), so this\n\t// must come after the first read in the record.\n\ttx := br.r.Begin()\n\tdefer func() {\n\t\tbr.lastChunk = tx.End()\n\t}()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif n != 4 {\n\t\treturn nil, errors.New(\"bam: invalid record: short block size\")\n\t}\n\tb := &buffer{data: br.buf[:4]}\n\tsize := int(b.readInt32())\n\tif size == 0 {\n\t\treturn nil, io.EOF\n\t}\n\tif size < 0 {\n\t\treturn nil, errors.New(\"bam: invalid record: invalid block size\")\n\t}\n\tif size > cap(br.buf) {\n\t\tb.off, b.data = 0, make([]byte, size)\n\t} else {\n\t\tb.off, b.data = 0, br.buf[:size]\n\t\tb.shared = true\n\t}\n\tn, err = io.ReadFull(br.r, b.data)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif n != size {\n\t\treturn nil, errors.New(\"bam: truncated record\")\n\t}\n\treturn b, nil\n}",
"func (kvstore *KVStore) init_db() {\n\t/*\n\t\tdifference between make and new?\n\t\t1. make(T) always return type T; new(T) returns type *T\n\t\t2. new works for all types,\n\tand dynamically allocates space for a variable of that type,\n\tinitialized to the zero value of that type, and returns a pointer to it;\n\t\t make works as a kind of \"constructor\" for certain bult-in types(\n\tslice, map, channel)\n\t*/\n\tfmt.Println(\"initializing kvstore\")\n\tkvstore.kvstore = make(map[string][]byte)\n}",
"func New(capacity int64) chainstore.Store {\n\tmemStore := &memStore{\n\t\tdata: make(map[string][]byte, 1000),\n\t}\n\tstore := lrumgr.New(capacity, memStore)\n\treturn store\n}",
"func NewBuffer(ssrc uint32, vp, ap *sync.Pool) *Buffer {\n\tb := &Buffer{\n\t\tmediaSSRC: ssrc,\n\t\tvideoPool: vp,\n\t\taudioPool: ap,\n\t\tpacketChan: make(chan rtp.Packet, 100),\n\t}\n\treturn b\n}",
"func New(i int) *Buffer {\n\treturn &Buffer{\n\t\tsize: i,\n\t}\n}",
"func (db *DB) init() error {\n\tdb.pageSize = os.Getpagesize()\n\n\tbuf := make([]byte, db.pageSize*4)\n\t//将第0页和第1页初始化meta页,\n\t// 并指定root bucket的page id为3, freelist记录的page id为2,\n\t// 当前数据库总页数为4,同时txid分别为0和1\n\tfor i := 0; i < 2; i++ {\n\t\tp := (*page)(unsafe.Pointer(&buf[pgid(i)*pgid(db.pageSize)]))\n\t\tp.id = pgid(i)\n\t\tp.flags = metaPageFlag\n\n\t\t// initialize the meta page\n\t\tm := p.meta()\n\n\t\tm.magic = magic\n\t\tm.version = version\n\t\tm.pageSize = uint32(db.pageSize)\n\t\tm.freelist = 2\n\t\tm.root = bucket{root: 3}\n\t\tm.pgid = 4\n\n\t\tm.txid = txid(i)\n\t\tm.chckksum = m.sum64()\n\t}\n\t// 将第2页初始化为freelist页,即freelist的记录将会存在第2页;\n\tp := db.pageInBuffer(buf[:], pgid(2))\n\tp.id = pgid(2)\n\tp.flags = freelistPageFlag\n\tp.count = 0\n\t// 将第3页初始化为一个空页,它可以用来写入K/V记录,请注意它必须是B+ Tree中的叶子节点\n\tp = db.pageInBuffer(buf[:], pgid(3))\n\tp.id = pgid(3)\n\tp.flags = leafPageFlag\n\tp.count = 0\n\t//调用写文件函数将buffer中的数据写入文件\n\t// 在open函数中已经设定 db.ops.writeAt = db.file.writeAt.\n\t// 所以现在写数据就是往db.file中写数据\n\tif _, err := db.ops.writeAt(buf, 0); err != nil {\n\t\treturn err\n\t}\n\t//通过fdatasync()调用将内核中磁盘页缓冲立即写入磁盘\n\tif err := fdatasync(db); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (self *MessageStore) Init() {\n\tself.in = make(map[string]*MsgEntry)\n\tself.out = make(map[string]*MsgEntry)\n}",
"func newBufferItem(events *structs.Events) *bufferItem {\n\treturn &bufferItem{\n\t\tlink: &bufferLink{\n\t\t\tnextCh: make(chan struct{}),\n\t\t\tdroppedCh: make(chan struct{}),\n\t\t},\n\t\tEvents: events,\n\t\tcreatedAt: time.Now(),\n\t}\n}",
"func (m *Memory) Init(size int) {\n\tcontent := make([]byte, size)\n\tm.content = content\n}",
"func init() {\n\tos.RemoveAll(DataPath)\n\n\tdc := DatabaseConfig{\n\t\tDataPath: DataPath,\n\t\tIndexDepth: 4,\n\t\tPayloadSize: 16,\n\t\tBucketDuration: 3600000000000,\n\t\tResolution: 60000000000,\n\t\tSegmentSize: 100000,\n\t}\n\n\tcfg := &ServerConfig{\n\t\tVerboseLogs: true,\n\t\tRemoteDebug: true,\n\t\tListenAddress: Address,\n\t\tDatabases: map[string]DatabaseConfig{\n\t\t\tDatabase: dc,\n\t\t},\n\t}\n\n\tdbs := map[string]kdb.Database{}\n\tdb, err := dbase.New(dbase.Options{\n\t\tDatabaseName: Database,\n\t\tDataPath: dc.DataPath,\n\t\tIndexDepth: dc.IndexDepth,\n\t\tPayloadSize: dc.PayloadSize,\n\t\tBucketDuration: dc.BucketDuration,\n\t\tResolution: dc.Resolution,\n\t\tSegmentSize: dc.SegmentSize,\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tdbs[\"test\"] = db\n\td = db\n\to = dc\n\n\ts = NewServer(dbs, cfg)\n\tgo s.Listen()\n\n\t// wait for the server to start\n\ttime.Sleep(time.Second * 2)\n\n\tc = NewClient(Address)\n\tif err := c.Connect(); err != nil {\n\t\tpanic(err)\n\t}\n}",
"func New() *MemStore {\n\ts := new(MemStore)\n\ts.did = make(map[string]common.Role)\n\ts.act = make(map[string]common.Role)\n\ts.tkt = make(map[string]common.Ticket)\n\t// initialize random gen\n\ts.rand = rand.New(rand.NewSource(time.Now().UnixNano() + 9999999))\n\treturn s\n}",
"func (gl *WebGL) NewBuffer(target GLEnum, data interface{}, usage GLEnum) WebGLBuffer {\n\tbuffer := gl.CreateBuffer()\n\tgl.BindBuffer(target, buffer)\n\tgl.BufferData(target, data, usage)\n\treturn buffer\n}",
"func NewBufferFromString(text string) *Buffer {\n\treturn NewBuffer(strings.NewReader(text), int64(len(text)), \"1.msg\", nil)\n}",
"func NewTelemetryBuffer(hostReportURL string) *TelemetryBuffer {\n\tvar tb TelemetryBuffer\n\n\tif hostReportURL == \"\" {\n\t\ttb.azureHostReportURL = azureHostReportURL\n\t}\n\n\ttb.data = make(chan interface{}, MaxNumReports)\n\ttb.cancel = make(chan bool, 1)\n\ttb.connections = make([]net.Conn, 0)\n\ttb.buffer.DNCReports = make([]DNCReport, 0, MaxNumReports)\n\ttb.buffer.CNIReports = make([]CNIReport, 0, MaxNumReports)\n\ttb.buffer.NPMReports = make([]NPMReport, 0, MaxNumReports)\n\ttb.buffer.CNSReports = make([]CNSReport, 0, MaxNumReports)\n\n\treturn &tb\n}",
"func (p *MemProvider) Init(maxLifetime int64, _ string) error {\n\tp.lock.Lock()\n\tp.list = list.New()\n\tp.data = make(map[string]*list.Element)\n\tp.maxLifetime = maxLifetime\n\tp.lock.Unlock()\n\treturn nil\n}",
"func NewBuffer(size int) *Buffer {\n\treturn &Buffer{size: size, tail: 0, head: 0, buf: make([]byte, size)}\n}",
"func (gen *DataGen) Init(m *pktmbuf.Packet, args ...any) {\n\tdata := ndn.MakeData(args...)\n\twire, e := tlv.EncodeValueOnly(data)\n\tif e != nil {\n\t\tlogger.Panic(\"encode Data error\", zap.Error(e))\n\t}\n\n\tm.SetHeadroom(0)\n\tif e := m.Append(wire); e != nil {\n\t\tlogger.Panic(\"insufficient dataroom\", zap.Error(e))\n\t}\n\tbufBegin := unsafe.Pointer(unsafe.SliceData(m.SegmentBytes()[0]))\n\tbufEnd := unsafe.Add(bufBegin, len(wire))\n\t*gen = DataGen{\n\t\ttpl: (*C.struct_rte_mbuf)(m.Ptr()),\n\t\tmeta: unsafe.SliceData(C.DataEnc_NoMetaInfo[:]),\n\t\tcontentIov: [1]C.struct_iovec{{\n\t\t\tiov_base: bufEnd,\n\t\t}},\n\t}\n\n\td := tlv.DecodingBuffer(wire)\n\tfor _, de := range d.Elements() {\n\t\tswitch de.Type {\n\t\tcase an.TtName:\n\t\t\tgen.suffix = C.LName{\n\t\t\t\tvalue: (*C.uint8_t)(unsafe.Add(bufEnd, -len(de.After)-de.Length())),\n\t\t\t\tlength: C.uint16_t(de.Length()),\n\t\t\t}\n\t\tcase an.TtMetaInfo:\n\t\t\tgen.meta = (*C.uint8_t)(unsafe.Add(bufEnd, -len(de.WireAfter())))\n\t\tcase an.TtContent:\n\t\t\tgen.contentIov[0] = C.struct_iovec{\n\t\t\t\tiov_base: unsafe.Add(bufEnd, -len(de.After)-de.Length()),\n\t\t\t\tiov_len: C.size_t(de.Length()),\n\t\t\t}\n\t\t}\n\t}\n\n\tC.rte_pktmbuf_adj(gen.tpl, C.uint16_t(uintptr(gen.contentIov[0].iov_base)-uintptr(bufBegin)))\n\tC.rte_pktmbuf_trim(gen.tpl, C.uint16_t(C.size_t(gen.tpl.pkt_len)-gen.contentIov[0].iov_len))\n}",
"func newSafeBuffer(bufsize int) ([]byte, error) {\n\t// Max BSON document size is 16MB.\n\t// https://docs.mongodb.com/manual/reference/limits/\n\t// For simplicity, bound buffer size at 32MB so that headers and so on fit\n\t// too.\n\t// TODO: Can you put multiple large documents in one insert or reply and\n\t// exceed this limit?\n\tif (bufsize < 0) || (bufsize > 32*1024*1024) {\n\t\treturn nil, fmt.Errorf(\"Invalid buffer size %d\", bufsize)\n\t}\n\treturn make([]byte, bufsize), nil\n}",
"func newBufferPool() *bufferPool {\n\treturn &bufferPool{&sync.Pool{\n\t\tNew: func() interface{} {\n\t\t\treturn &bytes.Buffer{}\n\t\t},\n\t}}\n}",
"func (self Source) SetBuffer(buffer Buffer) {\n\tself.Seti(AlBuffer, int32(buffer))\n}",
"func NewTelemetryBuffer(hostReportURL string) *TelemetryBuffer {\n\tvar tb TelemetryBuffer\n\n\tif hostReportURL == \"\" {\n\t\ttb.azureHostReportURL = azureHostReportURL\n\t}\n\n\ttb.data = make(chan interface{})\n\ttb.cancel = make(chan bool, 1)\n\ttb.connections = make([]net.Conn, 1)\n\ttb.payload.DNCReports = make([]DNCReport, 0)\n\ttb.payload.CNIReports = make([]CNIReport, 0)\n\ttb.payload.NPMReports = make([]NPMReport, 0)\n\ttb.payload.CNSReports = make([]CNSReport, 0)\n\n\terr := telemetryLogger.SetTarget(log.TargetLogfile)\n\tif err != nil {\n\t\tfmt.Printf(\"Failed to configure logging: %v\\n\", err)\n\t}\n\n\treturn &tb\n}",
"func (p *Pool) NewBuffer() *Buffer {\n\treturn &Buffer{pool: p, bufs: make([][]byte, 0, 128), curBufIdx: -1}\n}",
"func (b *defaultByteBuffer) NewBuffer() ByteBuffer {\n\treturn NewWriterBuffer(256)\n}",
"func newDownloadBuffer(length, sectorSize uint64) downloadBuffer {\n\t// Completion the length multiple of sector size(4MB)\n\tif length%sectorSize != 0 {\n\t\tlength += sectorSize - length%sectorSize\n\t}\n\n\tddb := downloadBuffer{\n\t\tbuf: make([][]byte, 0, length/sectorSize),\n\t\tsectorSize: sectorSize,\n\t}\n\tfor length > 0 {\n\t\tddb.buf = append(ddb.buf, make([]byte, sectorSize))\n\t\tlength -= sectorSize\n\t}\n\treturn ddb\n}",
"func New(handler Action, options ...Option) *Buffer {\n\tafb := Buffer{\n\t\tLatch: async.NewLatch(),\n\t\tHandler: handler,\n\t\tParallelism: runtime.NumCPU(),\n\t\tMaxFlushes: DefaultMaxFlushes,\n\t\tMaxLen: DefaultMaxLen,\n\t\tInterval: DefaultFlushInterval,\n\t\tShutdownGracePeriod: DefaultShutdownGracePeriod,\n\t}\n\tfor _, option := range options {\n\t\toption(&afb)\n\t}\n\tafb.contents = collections.NewRingBufferWithCapacity(afb.MaxLen)\n\treturn &afb\n}"
] | [
"0.6452763",
"0.62074417",
"0.6085602",
"0.6062877",
"0.60546154",
"0.6051158",
"0.6039646",
"0.6039385",
"0.60212064",
"0.593024",
"0.58833855",
"0.5863987",
"0.58623064",
"0.58622515",
"0.58257276",
"0.5825609",
"0.5809823",
"0.57785004",
"0.5771321",
"0.5759343",
"0.5755475",
"0.57512695",
"0.5751264",
"0.5740081",
"0.5736525",
"0.5721855",
"0.571137",
"0.56983644",
"0.56968445",
"0.56959856",
"0.56942254",
"0.56913084",
"0.5661283",
"0.5642236",
"0.56211495",
"0.56153333",
"0.5610583",
"0.56062853",
"0.5556813",
"0.55565697",
"0.5552949",
"0.5541792",
"0.55362093",
"0.55336887",
"0.5525886",
"0.55175424",
"0.54931766",
"0.5469633",
"0.5456069",
"0.5452697",
"0.5450981",
"0.5447555",
"0.5445897",
"0.5443028",
"0.54316",
"0.54167944",
"0.5411943",
"0.54110825",
"0.53649944",
"0.53617525",
"0.5345403",
"0.5345295",
"0.53450096",
"0.5322516",
"0.5307159",
"0.529712",
"0.52747643",
"0.5248277",
"0.5241659",
"0.523649",
"0.5234859",
"0.5232545",
"0.522374",
"0.5221605",
"0.5218385",
"0.521391",
"0.52130455",
"0.5199346",
"0.51931405",
"0.5192627",
"0.5189877",
"0.5176763",
"0.5173783",
"0.5150649",
"0.5145779",
"0.5137598",
"0.5136657",
"0.51261795",
"0.510372",
"0.5091024",
"0.5088641",
"0.508824",
"0.50850075",
"0.5084109",
"0.5077823",
"0.50759894",
"0.50746435",
"0.50692624",
"0.50684327",
"0.5068185",
"0.5064972"
] | 0.0 | -1 |
creates and initializes a buffer object's immutable data store | func BufferStorage(target uint32, size int, data unsafe.Pointer, flags uint32) {
C.glowBufferStorage(gpBufferStorage, (C.GLenum)(target), (C.GLsizeiptr)(size), data, (C.GLbitfield)(flags))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func newBuffer() *buffer {\n\treturn &buffer{\n\t\tdata: make([]byte, 0),\n\t\tlen: 0,\n\t\tpkg: nil,\n\t\tconn: nil,\n\t\tpkgCh: make(chan *pkg),\n\t\tevCh: make(chan *pkg),\n\t\terrCh: make(chan error, 1),\n\t}\n}",
"func newBuffer(b []byte) *buffer {\n\treturn &buffer{proto.NewBuffer(b), 0}\n}",
"func newBuffer(buf []byte) *Buffer {\n\treturn &Buffer{data: buf}\n}",
"func newBuffer(r io.Reader, offset int64) *buffer {\n\treturn &buffer{\n\t\tr: r,\n\t\toffset: offset,\n\t\tbuf: make([]byte, 0, 4096),\n\t\tallowObjptr: true,\n\t\tallowStream: true,\n\t}\n}",
"func newBuffer() Buffer {\n\treturn &buffer{\n\t\tbytes: make([]byte, 0, 64),\n\t}\n}",
"func newBuffer(e []byte) *Buffer {\n\tp := buffer_pool.Get().(*Buffer)\n\tp.buf = e\n\treturn p\n}",
"func NewBuffer(aSlice interface{}) *Buffer {\n return &Buffer{buffer: sliceValue(aSlice, false), handler: valueHandler{}}\n}",
"func newBuffer(bits uint32) buffer {\n\tvar b buffer\n\tb.data = make([]unsafe.Pointer, 1<<bits)\n\tb.free = 1 << bits\n\tb.mask = 1<<bits - 1\n\tb.bits = bits\n\treturn b\n}",
"func NewBuffer(capacity int) Buffer {\n\treturn Buffer{\n\t\tcapacity: capacity,\n\t\tcurrentSize: 0,\n\t\tcontents: map[entity.Key]inventoryapi.PostDeltaBody{},\n\t}\n}",
"func NewBuffer() *Buffer {\n\treturn NewBufferWithSize(initialSize)\n}",
"func New(b []byte) *Buffer {\n\treturn &Buffer{b: b}\n}",
"func newDatabaseBuffer() databaseBuffer {\n\tb := &dbBuffer{\n\t\tbucketsMap: make(map[xtime.UnixNano]*BufferBucketVersions),\n\t\tinOrderBlockStarts: make([]xtime.UnixNano, 0, bucketsCacheSize),\n\t}\n\treturn b\n}",
"func new_buffer(conn *websocket.Conn, ctrl chan struct{}, txqueuelen int) *Buffer {\n\tbuf := Buffer{conn: conn}\n\tbuf.pending = make(chan []byte, txqueuelen)\n\tbuf.ctrl = ctrl\n\tbuf.cache = make([]byte, packet.PACKET_LIMIT+2)\n\treturn &buf\n}",
"func NewBuffer() Buffer {\n\treturn &buffer{}\n}",
"func NewBuffer() *Buffer {\n\treturn &Buffer{Line: []byte{}, Val: make([]byte, 0, 32)}\n}",
"func NewBuffer(e []byte) *Buffer {\n\treturn &Buffer{buf: e}\n}",
"func NewBuffer(o *Options) (*Buffer, error) {\n\tif o == nil {\n\t\to = &Options{}\n\t}\n\tret := &Buffer{opts: *o} // copy o before normalizing it\n\n\tif err := ret.opts.normalize(); err != nil {\n\t\treturn nil, errors.Annotate(err, \"normalizing buffer.Options\").Err()\n\t}\n\n\tret.unleased.onlyID = o.FIFO\n\tret.batchItemsGuess = newMovingAverage(10, ret.opts.batchItemsGuess())\n\tret.liveLeases = map[*Batch]struct{}{}\n\tret.unAckedLeases = map[*Batch]struct{}{}\n\treturn ret, nil\n}",
"func NewBuffer(size int) *Buffer {\n\tif size <= 0 {\n\t\treturn &Buffer{}\n\t}\n\treturn &Buffer{\n\t\tstorage: make([]byte, size),\n\t\tsize: size,\n\t}\n}",
"func NewBuffer() *Buffer { return globalPool.NewBuffer() }",
"func NewBuffer(e []byte) *Buffer {\n\treturn &Buffer{buf: e, length: len(e)}\n}",
"func NewBuffer() Buffer {\n\treturn Buffer{\n\t\tCellMap: make(map[image.Point]Cell),\n\t\tArea: image.Rectangle{}}\n}",
"func createBuffer() *bytes.Buffer {\n\tbuf := bytes.Buffer{}\n\treturn &buf\n}",
"func NewEmptyBuffer() *Buffer {\n return &Buffer{data: make([]byte, 0)}\n}",
"func ringBufferInitBuffer(buflen uint32, rb *ringBuffer) {\n\tvar new_data []byte\n\tvar i uint\n\tsize := 2 + int(buflen) + int(kSlackForEightByteHashingEverywhere)\n\tif cap(rb.data_) < size {\n\t\tnew_data = make([]byte, size)\n\t} else {\n\t\tnew_data = rb.data_[:size]\n\t}\n\tif rb.data_ != nil {\n\t\tcopy(new_data, rb.data_[:2+rb.cur_size_+uint32(kSlackForEightByteHashingEverywhere)])\n\t}\n\n\trb.data_ = new_data\n\trb.cur_size_ = buflen\n\trb.buffer_ = rb.data_[2:]\n\trb.data_[1] = 0\n\trb.data_[0] = rb.data_[1]\n\tfor i = 0; i < kSlackForEightByteHashingEverywhere; i++ {\n\t\trb.buffer_[rb.cur_size_+uint32(i)] = 0\n\t}\n}",
"func NewCapacityBuffer(capacity int) *Buffer {\n return &Buffer{data: make([]byte, capacity)}\n}",
"func FakeTdsBufferCtor(r io.ReadWriteCloser) io.ReadWriteCloser {\n\treturn r\n}",
"func BufferInit(target Enum, size int, usage Enum) {\n\tgl.BufferData(uint32(target), size, nil, uint32(usage))\n}",
"func New(capacity int, fn func(series []*influxdb.Series)) *Buffer {\n\treturn NewBuffer(capacity, fn)\n}",
"func (r *Record) NewBuffer() *bytes.Buffer {\n\tif r.Buffer == nil {\n\t\treturn &bytes.Buffer{}\n\t}\n\n\treturn r.Buffer\n}",
"func init() {\n\tstore = cache.NewMemoryCache()\n}",
"func NewBuffer(p producer.Producer, size int, flushInterval time.Duration, logger log.Logger) *Buffer {\n\tflush := 1 * time.Second\n\tif flushInterval != 0 {\n\t\tflush = flushInterval\n\t}\n\n\tb := &Buffer{\n\t\trecords: make([]*data.Record, 0, size),\n\t\tmu: new(sync.Mutex),\n\t\tproducer: p,\n\t\tbufferSize: size,\n\t\tlogger: logger,\n\t\tshouldFlush: make(chan bool, 1),\n\t\tflushInterval: flush,\n\t\tlastFlushed: time.Now(),\n\t}\n\n\tgo b.runFlusher()\n\n\treturn b\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n proto.NewProxyWithBuffer(buffer),\n NewStructSimpleModel(buffer),\n NewStructOptionalModel(buffer),\n NewStructNestedModel(buffer),\n NewStructBytesModel(buffer),\n NewStructArrayModel(buffer),\n NewStructVectorModel(buffer),\n NewStructListModel(buffer),\n NewStructSetModel(buffer),\n NewStructMapModel(buffer),\n NewStructHashModel(buffer),\n NewStructHashExModel(buffer),\n NewStructEmptyModel(buffer),\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyStructSimpleFunc(func(model *StructSimpleModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructOptionalFunc(func(model *StructOptionalModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructNestedFunc(func(model *StructNestedModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructBytesFunc(func(model *StructBytesModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructArrayFunc(func(model *StructArrayModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructVectorFunc(func(model *StructVectorModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructListFunc(func(model *StructListModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructSetFunc(func(model *StructSetModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructMapFunc(func(model *StructMapModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructHashFunc(func(model *StructHashModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructHashExFunc(func(model *StructHashExModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructEmptyFunc(func(model *StructEmptyModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func New(capacity int64) chainstore.Store {\n\tmemStore := &memStore{\n\t\tdata: make(map[string][]byte, 1000),\n\t}\n\tstore := lrumgr.New(capacity, memStore)\n\treturn store\n}",
"func NewBuffer(player *Player, conn net.Conn, ctrl chan bool) *Buffer {\r\n\tmax := DEFAULT_QUEUE_SIZE\r\n\r\n\tbuf := Buffer{conn: conn}\r\n\tbuf.pending = make(chan []byte, max)\r\n\tbuf.ctrl = ctrl\r\n\tbuf.max = max\r\n\treturn &buf\r\n}",
"func NewBuffer(size int) *Buffer {\n\treturn &Buffer{\n\t\tdata: make([]byte, size),\n\t}\n}",
"func newEventBuffer(size int64) *eventBuffer {\n\tzero := int64(0)\n\tb := &eventBuffer{\n\t\tmaxSize: size,\n\t\tsize: &zero,\n\t}\n\n\titem := newBufferItem(&structs.Events{Index: 0, Events: nil})\n\n\tb.head.Store(item)\n\tb.tail.Store(item)\n\n\treturn b\n}",
"func NewBuffer() *Buffer {\n\treturn &Buffer{B: &strings.Builder{}}\n}",
"func NewBuffer(reader io.Reader, size int64, path string, cursorPosition []string) *Buffer {\n\tb := new(Buffer)\n\tb.LineArray = NewLineArray(size, reader)\n\n\tb.Settings = DefaultLocalSettings()\n\t//\tfor k, v := range globalSettings {\n\t//\t\tif _, ok := b.Settings[k]; ok {\n\t//\t\t\tb.Settings[k] = v\n\t//\t\t}\n\t//\t}\n\n\tif fileformat == 1 {\n\t\tb.Settings[\"fileformat\"] = \"unix\"\n\t} else if fileformat == 2 {\n\t\tb.Settings[\"fileformat\"] = \"dos\"\n\t}\n\n\tb.Path = path\n\n\tb.EventHandler = NewEventHandler(b)\n\n\tb.update()\n\n\tb.Cursor = Cursor{\n\t\tLoc: Loc{0, 0},\n\t\tbuf: b,\n\t}\n\n\t//InitLocalSettings(b)\n\n\tb.cursors = []*Cursor{&b.Cursor}\n\n\treturn b\n}",
"func NewBuffer(length int) *Buffer {\n\treturn &Buffer{\n\t\titems: make([]unsafe.Pointer, length),\n\t}\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n NewEnumsModel(buffer),\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyEnumsFunc(func(model *EnumsModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func New(w, h int) *Buffer {\n\tb := &Buffer{\n\t\tWidth: w,\n\t\tHeight: h,\n\t\tCursor: NewCursor(0, 0),\n\t\tTiles: make([]*Tile, w*h),\n\t}\n\tb.Resize(w, h)\n\treturn b\n}",
"func NewBuffer(capacity int, fn func(series []*influxdb.Series)) *Buffer {\n\tb := &Buffer{\n\t\tfn: fn,\n\t\tin: make(chan *influxdb.Series),\n\t\tseries: make(map[string]*influxdb.Series),\n\t\tcapacity: capacity,\n\t}\n\tif b.capacity > 0 {\n\t\tgo b.aggregate()\n\t}\n\n\treturn b\n}",
"func initBuffer(size int) {\n\tif len(buffer) == size {\n\t\treturn\n\t}\n\tbuffer = make([]uint8, size)\n}",
"func (b *BatchBuffer) Init() {}",
"func (b *Buffer) AttachNew() {\n b.data = make([]byte, 0)\n b.size = 0\n b.offset = 0\n}",
"func New(i int) *Buffer {\n\treturn &Buffer{\n\t\tsize: i,\n\t}\n}",
"func newBufferItem(events *structs.Events) *bufferItem {\n\treturn &bufferItem{\n\t\tlink: &bufferLink{\n\t\t\tnextCh: make(chan struct{}),\n\t\t\tdroppedCh: make(chan struct{}),\n\t\t},\n\t\tEvents: events,\n\t\tcreatedAt: time.Now(),\n\t}\n}",
"func NewAttached(buffer []byte) *Buffer {\n result := NewEmptyBuffer()\n result.Attach(buffer)\n return result\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n NewOrderModel(buffer),\n NewBalanceModel(buffer),\n NewAccountModel(buffer),\n nil,\n nil,\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyOrderFunc(func(model *OrderModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyBalanceFunc(func(model *BalanceModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyAccountFunc(func(model *AccountModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func newSafeBuffer() *safeBuffer {\n\treturn &safeBuffer{\n\t\tbuf: bytes.NewBuffer(nil),\n\t}\n}",
"func NewLocalBuffer(b bytes.Buffer) *LocalBuffer { return &LocalBuffer{b: b} }",
"func (m *metricMysqlBufferPoolUsage) init() {\n\tm.data.SetName(\"mysql.buffer_pool.usage\")\n\tm.data.SetDescription(\"The number of bytes in the InnoDB buffer pool.\")\n\tm.data.SetUnit(\"By\")\n\tm.data.SetEmptySum()\n\tm.data.Sum().SetIsMonotonic(false)\n\tm.data.Sum().SetAggregationTemporality(pmetric.MetricAggregationTemporalityCumulative)\n\tm.data.Sum().DataPoints().EnsureCapacity(m.capacity)\n}",
"func (buf *ListBuffer) Init() {\n\tbuf.Buffer = make([]Node, defaultBufferLength)\n\tbuf.FreeHead = 0\n\tbuf.Count = 0\n\n\tfor i := 0; i < len(buf.Buffer); i++ {\n\t\tbuf.Buffer[i].Item.Clear()\n\t\tbuf.Buffer[i].Prev = BufferIndex(i - 1)\n\t\tbuf.Buffer[i].Next = BufferIndex(i + 1)\n\t}\n\n\tbuf.Buffer[0].Prev = NilIndex\n\tbuf.Buffer[len(buf.Buffer)-1].Next = NilIndex\n}",
"func NewBuffer(inp []byte) *ByteBuffer {\n\tif inp == nil {\n\t\tinp = make([]byte, 0, 512)\n\t}\n\treturn &ByteBuffer{Buffer: bytes.NewBuffer(inp)}\n}",
"func New(buffer, backing sorted.KeyValue, maxBufferBytes int64) *KeyValue {\n\treturn &KeyValue{\n\t\tbuffer: buffer,\n\t\tback: backing,\n\t\tmaxBuffer: maxBufferBytes,\n\t}\n}",
"func (src *Source) SetNewBuffer() {\n\tsrc.buf = make([]byte, 64)\n}",
"func NewBufferBuilder() *BufferBuilder {\n\treturn &BufferBuilder{}\n}",
"func (m *metricMysqlBufferPoolOperations) init() {\n\tm.data.SetName(\"mysql.buffer_pool.operations\")\n\tm.data.SetDescription(\"The number of operations on the InnoDB buffer pool.\")\n\tm.data.SetUnit(\"1\")\n\tm.data.SetEmptySum()\n\tm.data.Sum().SetIsMonotonic(true)\n\tm.data.Sum().SetAggregationTemporality(pmetric.MetricAggregationTemporalityCumulative)\n\tm.data.Sum().DataPoints().EnsureCapacity(m.capacity)\n}",
"func (m *Manager) NewBuffer(conf buffer.Config) (buffer.Streamed, error) {\n\treturn nil, component.ErrInvalidType(\"buffer\", conf.Type)\n}",
"func NewBuffer(conn *net.TCPConn, buffOb chan bool, maxQueueSize int) *Buffer {\n\tsize := maxQueueSize\n\n\tif size == -1 {\n\t\tsize = DEFAULT_QUEUE_SIZE\n\t}\n\n\tbuf := new(Buffer)\n\tbuf.conn = conn\n\tbuf.pending = make(chan []byte, size)\n\tbuf.ctrl = make(chan bool)\n\tbuf.ob = buffOb\n\tbuf.max = size\n\n\treturn buf\n}",
"func New() *MemStore {\n\ts := new(MemStore)\n\ts.did = make(map[string]common.Role)\n\ts.act = make(map[string]common.Role)\n\ts.tkt = make(map[string]common.Ticket)\n\t// initialize random gen\n\ts.rand = rand.New(rand.NewSource(time.Now().UnixNano() + 9999999))\n\treturn s\n}",
"func NewAttachedBuffer(buffer *Buffer) *Buffer {\n result := NewEmptyBuffer()\n result.AttachBuffer(buffer)\n return result\n}",
"func (s *ShmPool) CreateBuffer(id *Buffer, offset int32, width int32, height int32, stride int32, format uint32) {\n sendrequest(s, \"wl_shm_pool_create_buffer\", id, offset, width, height, stride, format)\n}",
"func (l *Logger) initLoggerBuffer() (err error) {\n\t// build\n\tl.LoggerBuffer = LoggerBuffer{}\n\n\t// get serial data\n\tlConfig, err := l.getSerialConfig()\n\tif err != nil {\n\t\treturn\n\t}\n\n\tport, err := serial.OpenPort(lConfig)\n\tif err != nil {\n\t\treturn\n\t}\n\tl.serialPort = *port\n\n\tgo func() {\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-l.stop:\n\t\t\t\treturn\n\t\t\tdefault:\n\t\t\t\t// get data\n\t\t\t\tbuf := make([]byte, bufSize)\n\n\t\t\t\tn, err := port.Read(buf)\n\t\t\t\ttime := time.Now().UTC()\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Print(err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\t//log.Printf(\"NEW DATA [%03d]: %02X %03d\", n, buf[:n], buf[:n]) // LOG\n\n\t\t\t\t// push to LoggerBuffer\n\t\t\t\tt := util.TimestampBuilder(time)\n\t\t\t\tdu := DataUnit{\n\t\t\t\t\tData: buf[:n],\n\t\t\t\t\tTime: &t,\n\t\t\t\t}\n\n\t\t\t\tl.DataUnit = append(l.DataUnit, &du)\n\n\t\t\t\t// feed consumers\n\t\t\t\tfor _, c := range l.consumers {\n\t\t\t\t\tc <- du\n\n\t\t\t\t\tif l.config.Debug {\n\t\t\t\t\t\tlog.Print(\"Data received: \", du.PrettyString())\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Flush every time new data is received\n\t\t\t\tl.flush()\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn\n}",
"func (_this *StreamingReadBuffer) Init(reader io.Reader, bufferSize int, minFreeBytes int) {\n\tif cap(_this.Buffer) < bufferSize {\n\t\t_this.Buffer = make([]byte, 0, bufferSize)\n\t} else {\n\t\t_this.Buffer = _this.Buffer[:0]\n\t}\n\t_this.reader = reader\n\t_this.minFreeBytes = minFreeBytes\n}",
"func NewBuffer(m []byte, skip, size int64) (*Buffer, error) {\n\tb := &Buffer{\n\t\toffset: skip,\n\t\tsize: size,\n\t\tdata: m,\n\t}\n\treturn b, nil\n}",
"func (al *AudioListener) setBuffer(size int) {\n\tal.Lock()\n\tdefer al.Unlock()\n\n\tal.buffer = make([]gumble.AudioPacket, 0, size)\n}",
"func newBuffer(br *Reader) (*buffer, error) {\n\tn, err := io.ReadFull(br.r, br.buf[:4])\n\t// br.r.Chunk() is only valid after the call the Read(), so this\n\t// must come after the first read in the record.\n\ttx := br.r.Begin()\n\tdefer func() {\n\t\tbr.lastChunk = tx.End()\n\t}()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif n != 4 {\n\t\treturn nil, errors.New(\"bam: invalid record: short block size\")\n\t}\n\tb := &buffer{data: br.buf[:4]}\n\tsize := int(b.readInt32())\n\tif size == 0 {\n\t\treturn nil, io.EOF\n\t}\n\tif size < 0 {\n\t\treturn nil, errors.New(\"bam: invalid record: invalid block size\")\n\t}\n\tif size > cap(br.buf) {\n\t\tb.off, b.data = 0, make([]byte, size)\n\t} else {\n\t\tb.off, b.data = 0, br.buf[:size]\n\t\tb.shared = true\n\t}\n\tn, err = io.ReadFull(br.r, b.data)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif n != size {\n\t\treturn nil, errors.New(\"bam: truncated record\")\n\t}\n\treturn b, nil\n}",
"func (b *defaultByteBuffer) NewBuffer() ByteBuffer {\n\treturn NewWriterBuffer(256)\n}",
"func (m *Memory) Init(size int) {\n\tcontent := make([]byte, size)\n\tm.content = content\n}",
"func NewBuffer(conn *sqlite.Conn) (*Buffer, error) {\n\treturn NewBufferSize(conn, 16*1024)\n}",
"func (b *Buffer) Mutable() bool { return b.mutable }",
"func (s *seqBuf) init(length time.Duration, maxSeqNum, maxSeqNumDiff seqNum, entryChan chan seqBufEntry) {\n\ts.length = length\n\ts.maxSeqNum = maxSeqNum\n\ts.maxSeqNumDiff = maxSeqNumDiff\n\ts.entryChan = entryChan\n\n\ts.entryAddedChan = make(chan bool)\n\ts.watcherCloseNeededChan = make(chan bool)\n\ts.watcherCloseDoneChan = make(chan bool)\n\tgo s.watcher()\n}",
"func (g *GLTF) loadBuffer(bufIdx int) ([]byte, error) {\n\n\t// Check if provided buffer index is valid\n\tif bufIdx < 0 || bufIdx >= len(g.Buffers) {\n\t\treturn nil, fmt.Errorf(\"invalid buffer index\")\n\t}\n\tbufData := &g.Buffers[bufIdx]\n\t// Return cached if available\n\tif bufData.cache != nil {\n\t\tlog.Debug(\"Fetching Buffer %d (cached)\", bufIdx)\n\t\treturn bufData.cache, nil\n\t}\n\tlog.Debug(\"Loading Buffer %d\", bufIdx)\n\n\t// If buffer URI use the chunk data field\n\tif bufData.Uri == \"\" {\n\t\treturn g.data, nil\n\t}\n\n\t// Checks if buffer URI is a data URI\n\tvar data []byte\n\tvar err error\n\tif isDataURL(bufData.Uri) {\n\t\tdata, err = loadDataURL(bufData.Uri)\n\t} else {\n\t\t// Try to load buffer from file\n\t\tdata, err = g.loadFileBytes(bufData.Uri)\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Checks data length\n\tif len(data) != bufData.ByteLength {\n\t\treturn nil, fmt.Errorf(\"buffer:%d read data length:%d expected:%d\", bufIdx, len(data), bufData.ByteLength)\n\t}\n\t// Cache buffer data\n\tg.Buffers[bufIdx].cache = data\n\tlog.Debug(\"cache data:%v\", len(bufData.cache))\n\treturn data, nil\n}",
"func New(size int) *MsgBuffer {\r\n\r\n\treturn &MsgBuffer{\r\n\t\tb: make([]byte, size),\r\n\t}\r\n}",
"func newSafeBuffer(bufsize int) ([]byte, error) {\n\t// Max BSON document size is 16MB.\n\t// https://docs.mongodb.com/manual/reference/limits/\n\t// For simplicity, bound buffer size at 32MB so that headers and so on fit\n\t// too.\n\t// TODO: Can you put multiple large documents in one insert or reply and\n\t// exceed this limit?\n\tif (bufsize < 0) || (bufsize > 32*1024*1024) {\n\t\treturn nil, fmt.Errorf(\"Invalid buffer size %d\", bufsize)\n\t}\n\treturn make([]byte, bufsize), nil\n}",
"func (self Source) SetBuffer(buffer Buffer) {\n\tself.Seti(AlBuffer, int32(buffer))\n}",
"func New(handler Action, options ...Option) *Buffer {\n\tafb := Buffer{\n\t\tLatch: async.NewLatch(),\n\t\tHandler: handler,\n\t\tParallelism: runtime.NumCPU(),\n\t\tMaxFlushes: DefaultMaxFlushes,\n\t\tMaxLen: DefaultMaxLen,\n\t\tInterval: DefaultFlushInterval,\n\t\tShutdownGracePeriod: DefaultShutdownGracePeriod,\n\t}\n\tfor _, option := range options {\n\t\toption(&afb)\n\t}\n\tafb.contents = collections.NewRingBufferWithCapacity(afb.MaxLen)\n\treturn &afb\n}",
"func (kvstore *KVStore) init_db() {\n\t/*\n\t\tdifference between make and new?\n\t\t1. make(T) always return type T; new(T) returns type *T\n\t\t2. new works for all types,\n\tand dynamically allocates space for a variable of that type,\n\tinitialized to the zero value of that type, and returns a pointer to it;\n\t\t make works as a kind of \"constructor\" for certain bult-in types(\n\tslice, map, channel)\n\t*/\n\tfmt.Println(\"initializing kvstore\")\n\tkvstore.kvstore = make(map[string][]byte)\n}",
"func NewBufferFromString(text string) *Buffer {\n\treturn NewBuffer(strings.NewReader(text), int64(len(text)), \"1.msg\", nil)\n}",
"func NewMemoryStorage() *MemoryStorage {\n return &MemoryStorage{tickets: make(map[string]*types.Ticket)}\n}",
"func NewBuffer(size int) *Buffer {\n\treturn &Buffer{size: size, tail: 0, head: 0, buf: make([]byte, size)}\n}",
"func (src *Source) SetBuffer(buf []byte) {\n\tsrc.buf = buf\n}",
"func (pool *BufferPool) New() (buf *bytes.Buffer) {\n\tselect {\n\tcase buf = <-pool.Buffers:\n\tdefault:\n\t\tbuf = &bytes.Buffer{}\n\t}\n\treturn\n}",
"func (t *table) allocateBuffer(l int) *colReader {\n\tif t.colBufs == nil || atomic.LoadInt64(&t.colBufs.refCount) > 0 {\n\t\t// The current buffer is still being used so we should\n\t\t// generate a new one.\n\t\tt.colBufs = &colReader{\n\t\t\tkey: t.key,\n\t\t\tcolMeta: t.cols,\n\t\t\tcols: make([]array.Interface, len(t.cols)),\n\t\t}\n\t}\n\tt.colBufs.refCount = 1\n\tt.colBufs.l = l\n\treturn t.colBufs\n}",
"func NewPtrBuffer(aSlice interface{}) *Buffer {\n aSliceValue := sliceValue(aSlice, true)\n return &Buffer{\n buffer: aSliceValue,\n handler: overwriteNilPtrHandler{\n creater: newCreaterFunc(nil, aSliceValue.Type())}}\n}",
"func newObject(db *StateDB, key math.Hash, data meta.Account) *StateObject {\n\treturn &StateObject{\n\t\tdb: db,\n\t\tkey: key,\n\t\tdata: data,\n\t\toriginStorage: make(Storage),\n\t\tdirtyStorage: make(Storage),\n\t}\n}",
"func NewDistributedObjectWithData() {}",
"func New() *ListBuffer {\n\tbuf := new(ListBuffer)\n\tbuf.Init()\n\treturn buf\n}",
"func TestGetSingletonBuffer(t *testing.T) {\n\tasserter := assert.New(t)\n\tbuffer := GetBuffer()\n\tbuf := buffer.(*TaskBuffer)\n\tasserter.Equal(100, cap(buf.Channel))\n\tasserter.Equal(10, buf.Tx)\n}",
"func (geom Geometry) Buffer(distance float64, segments int) Geometry {\n\tnewGeom := C.OGR_G_Buffer(geom.cval, C.double(distance), C.int(segments))\n\treturn Geometry{newGeom}\n}",
"func NewSafeBuffer() *SafeBuffer {\n\treturn &SafeBuffer{\n\t\tb: bytes.NewBuffer(nil),\n\t\tm: sync.RWMutex{},\n\t}\n}",
"func newBufferPool() *bufferPool {\n\treturn &bufferPool{&sync.Pool{\n\t\tNew: func() interface{} {\n\t\t\treturn &bytes.Buffer{}\n\t\t},\n\t}}\n}",
"func (b *BadgerStore) init(dir string) error {\n\n\topts := badger.DefaultOptions(dir)\n\tif dir == \"\" {\n\t\topts = opts.WithInMemory(true)\n\t}\n\topts.Logger = &common.NoopLogger{}\n\tdb, err := badger.Open(opts)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to open database\")\n\t}\n\n\t// Set the database\n\tb.db = db\n\n\t// Initialize the default transaction that auto commits\n\t// on success ops or discards on failure.\n\t// It also enables the renewal of the underlying transaction\n\t// after executing a read/write operation\n\tb.Tx = NewTx(db, true, true)\n\n\treturn nil\n}",
"func NewBuilder(buffer []byte) *Builder {\n\treturn &Builder{\n\t\tresult: buffer,\n\t}\n}",
"func TestNewTaskBuffer(t *testing.T) {\n\tasserter := assert.New(t)\n\tbuffer := NewTaskBuffer(10, 1)\n\n\tbuf := buffer.(*TaskBuffer)\n\tasserter.Equal(cap(buf.Channel), 10)\n\tasserter.Equal(buf.Tx, 1)\n}",
"func NewCompactableBuffer(config *CompatbleBufferConfig) (*CompactableBuffer, error) {\n\taddressableBuffer, err := newAddressableBuffer(config.BufferConfig)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Failed to create compatable buffer - unable to create addressable buffer %v\", err)\n\t}\n\tcompatableBuffer, err := loadCompactingBufferIdNeeded(config.BufferConfig)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Failed to create compatable buffer, %v\", err)\n\t}\n\n\tresult := &CompactableBuffer{config: config}\n\tresult.setReadableBuffer(addressableBuffer)\n\tresult.setWritableBuffer(addressableBuffer)\n\n\tif compatableBuffer != nil && compatableBuffer.Count() > 0 {\n\t\terr = result.compactWithBuffer(compatableBuffer)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"Failed to create buffer - unable compact %v\", err)\n\t\t}\n\t}\n\n\tresult.count = int64(addressableBuffer.Count())\n\tresult.removedCount = addressableBuffer.removedCount\n\tresult.dataSize = addressableBuffer.dataSize\n\tresult.entrySize = addressableBuffer.entrySize\n\tresult.compactionWaitGroup = &sync.WaitGroup{}\n\tresult.notification = make(chan bool, 1)\n\tresult.manageAutoCompaction()\n\n\treturn result, nil\n}",
"func init() {\n\tMemory = &memoryStorage{\n\t\ttraces: make(map[string]tracer.Trace),\n\t\tservices: make(map[string]string),\n\t\tserviceDeps: make(map[string]*tracer.Dependencies),\n\t}\n}",
"func newStorage() *storage {\n\tr := make(map[string][]byte)\n\treturn &storage{\n\t\trepository: r,\n\t}\n}",
"func (p *MemProvider) Init(maxLifetime int64, _ string) error {\n\tp.lock.Lock()\n\tp.list = list.New()\n\tp.data = make(map[string]*list.Element)\n\tp.maxLifetime = maxLifetime\n\tp.lock.Unlock()\n\treturn nil\n}",
"func (r *DBReader) SetBuffer(buffer io.Reader) {\n\tr.buffer = buffer\n}"
] | [
"0.6349263",
"0.6268834",
"0.6243279",
"0.6195426",
"0.61694556",
"0.601713",
"0.5932492",
"0.5875461",
"0.5873252",
"0.58522457",
"0.5843238",
"0.58288723",
"0.5812875",
"0.568097",
"0.5639489",
"0.56271803",
"0.5614063",
"0.5613734",
"0.5606573",
"0.55912125",
"0.5575135",
"0.55185974",
"0.5516182",
"0.5504612",
"0.5484755",
"0.546681",
"0.54478246",
"0.54478216",
"0.54390895",
"0.54377943",
"0.5432694",
"0.54238373",
"0.538923",
"0.5387694",
"0.5373588",
"0.5371471",
"0.5370617",
"0.5353238",
"0.53506994",
"0.5343122",
"0.5337203",
"0.5336538",
"0.5330275",
"0.53042126",
"0.5299109",
"0.52942294",
"0.528354",
"0.5283134",
"0.5275738",
"0.5271442",
"0.5266598",
"0.5261546",
"0.5254997",
"0.5232468",
"0.52298075",
"0.5229553",
"0.5217864",
"0.5191744",
"0.51869196",
"0.51857007",
"0.5173917",
"0.517207",
"0.51707196",
"0.51657134",
"0.5163538",
"0.51578736",
"0.5142169",
"0.51415724",
"0.5139734",
"0.51270914",
"0.5120826",
"0.51199836",
"0.5110191",
"0.5104892",
"0.5101042",
"0.5089884",
"0.5070503",
"0.50694305",
"0.5060653",
"0.5056164",
"0.50517243",
"0.50503725",
"0.5045627",
"0.5045109",
"0.5034378",
"0.5029515",
"0.5021194",
"0.499628",
"0.49943373",
"0.4987921",
"0.49807605",
"0.49639478",
"0.49623626",
"0.49597922",
"0.493431",
"0.4931683",
"0.4923737",
"0.4916679",
"0.49129346",
"0.4900499",
"0.4891589"
] | 0.0 | -1 |
Parameter clientBuffer has type C.GLeglClientBufferEXT. | func BufferStorageExternalEXT(target uint32, offset int, size int, clientBuffer unsafe.Pointer, flags uint32) {
C.glowBufferStorageExternalEXT(gpBufferStorageExternalEXT, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(size), (C.GLeglClientBufferEXT)(clientBuffer), (C.GLbitfield)(flags))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n C.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func TextureBuffer(texture uint32, internalformat uint32, buffer uint32) {\n\tC.glowTextureBuffer(gpTextureBuffer, (C.GLuint)(texture), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func TextureBuffer(texture uint32, internalformat uint32, buffer uint32) {\n\tC.glowTextureBuffer(gpTextureBuffer, (C.GLuint)(texture), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n\tC.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func TexBuffer(target uint32, internalformat uint32, buffer uint32) {\n\tC.glowTexBuffer(gpTexBuffer, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLuint)(buffer))\n}",
"func TextureBuffer(texture uint32, internalformat uint32, buffer uint32) {\n\tsyscall.Syscall(gpTextureBuffer, 3, uintptr(texture), uintptr(internalformat), uintptr(buffer))\n}",
"func DeleteBuffer(v Buffer) {\n\tgl.DeleteBuffers(1, &v.Value)\n}",
"func DrawBuffer(buf uint32) {\n\tC.glowDrawBuffer(gpDrawBuffer, (C.GLenum)(buf))\n}",
"func DrawBuffer(buf uint32) {\n\tC.glowDrawBuffer(gpDrawBuffer, (C.GLenum)(buf))\n}",
"func IsBuffer(buffer uint32) bool {\n ret := C.glowIsBuffer(gpIsBuffer, (C.GLuint)(buffer))\n return ret == TRUE\n}",
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tsyscall.Syscall(gpVertexArrayElementBuffer, 2, uintptr(vaobj), uintptr(buffer), 0)\n}",
"func InvalidateBufferData(buffer uint32) {\n C.glowInvalidateBufferData(gpInvalidateBufferData, (C.GLuint)(buffer))\n}",
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tC.glowVertexArrayElementBuffer(gpVertexArrayElementBuffer, (C.GLuint)(vaobj), (C.GLuint)(buffer))\n}",
"func VertexArrayElementBuffer(vaobj uint32, buffer uint32) {\n\tC.glowVertexArrayElementBuffer(gpVertexArrayElementBuffer, (C.GLuint)(vaobj), (C.GLuint)(buffer))\n}",
"func IsBuffer(buffer uint32) bool {\n\tret := C.glowIsBuffer(gpIsBuffer, (C.GLuint)(buffer))\n\treturn ret == TRUE\n}",
"func IsBuffer(buffer uint32) bool {\n\tret := C.glowIsBuffer(gpIsBuffer, (C.GLuint)(buffer))\n\treturn ret == TRUE\n}",
"func DrawBuffer(buf uint32) {\n\tsyscall.Syscall(gpDrawBuffer, 1, uintptr(buf), 0, 0)\n}",
"func IsBuffer(buffer Uint) Boolean {\n\tcbuffer, _ := (C.GLuint)(buffer), cgoAllocsUnknown\n\t__ret := C.glIsBuffer(cbuffer)\n\t__v := (Boolean)(__ret)\n\treturn __v\n}",
"func ReadBuffer(src uint32) {\n\tC.glowReadBuffer(gpReadBuffer, (C.GLenum)(src))\n}",
"func ReadBuffer(src uint32) {\n\tC.glowReadBuffer(gpReadBuffer, (C.GLenum)(src))\n}",
"func SelectBuffer(size int32, buffer *uint32) {\n C.glowSelectBuffer(gpSelectBuffer, (C.GLsizei)(size), (*C.GLuint)(unsafe.Pointer(buffer)))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n C.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func (g *Gaffer) AddBuffer(u *Update) {\n\n\tfor _, v := range u.entities {\n\t\tg.AddEntity(v)\n\t}\n\n\tfor _, v := range u.edges {\n\t\tg.AddEdge(v)\n\t}\n\n}",
"func DrawBuffer(mode uint32) {\n C.glowDrawBuffer(gpDrawBuffer, (C.GLenum)(mode))\n}",
"func GenBuffer() Buffer {\n\tvar b gl.Uint\n\tgl.GenBuffers(1, &b)\n\treturn Buffer(b)\n}",
"func BufferData(target Enum, size Sizeiptr, data unsafe.Pointer, usage Enum) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcsize, _ := (C.GLsizeiptr)(size), cgoAllocsUnknown\n\tcdata, _ := (unsafe.Pointer)(unsafe.Pointer(data)), cgoAllocsUnknown\n\tcusage, _ := (C.GLenum)(usage), cgoAllocsUnknown\n\tC.glBufferData(ctarget, csize, cdata, cusage)\n}",
"func BufferInit(target Enum, size int, usage Enum) {\n\tgl.BufferData(uint32(target), size, nil, uint32(usage))\n}",
"func (native *OpenGL) BindBuffer(target uint32, buffer uint32) {\n\tgl.BindBuffer(target, buffer)\n}",
"func GetBufferPointerv(target uint32, pname uint32, params *unsafe.Pointer) {\n C.glowGetBufferPointerv(gpGetBufferPointerv, (C.GLenum)(target), (C.GLenum)(pname), params)\n}",
"func SelectBuffer(size int32, buffer *uint32) {\n\tC.glowSelectBuffer(gpSelectBuffer, (C.GLsizei)(size), (*C.GLuint)(unsafe.Pointer(buffer)))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tC.glowBindBuffer(gpBindBuffer, (C.GLenum)(target), (C.GLuint)(buffer))\n}",
"func CreateBuffer() Buffer {\n\tvar b Buffer\n\tgl.GenBuffers(1, &b.Value)\n\treturn b\n}",
"func GetBufferPointerv(target uint32, pname uint32, params *unsafe.Pointer) {\n\tC.glowGetBufferPointerv(gpGetBufferPointerv, (C.GLenum)(target), (C.GLenum)(pname), params)\n}",
"func GetBufferPointerv(target uint32, pname uint32, params *unsafe.Pointer) {\n\tC.glowGetBufferPointerv(gpGetBufferPointerv, (C.GLenum)(target), (C.GLenum)(pname), params)\n}",
"func BufferSubData(target Enum, offset Intptr, size Sizeiptr, data unsafe.Pointer) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcoffset, _ := (C.GLintptr)(offset), cgoAllocsUnknown\n\tcsize, _ := (C.GLsizeiptr)(size), cgoAllocsUnknown\n\tcdata, _ := (unsafe.Pointer)(unsafe.Pointer(data)), cgoAllocsUnknown\n\tC.glBufferSubData(ctarget, coffset, csize, cdata)\n}",
"func IsBuffer(b Buffer) bool {\n\treturn gl.IsBuffer(b.Value)\n}",
"func ReadBuffer(src uint32) {\n\tsyscall.Syscall(gpReadBuffer, 1, uintptr(src), 0, 0)\n}",
"func BindBuffer(target Enum, b Buffer) {\n\tgl.BindBuffer(uint32(target), b.Value)\n}",
"func BufferStorageExternalEXT(target uint32, offset int, size int, clientBuffer unsafe.Pointer, flags uint32) {\n\tsyscall.Syscall6(gpBufferStorageExternalEXT, 5, uintptr(target), uintptr(offset), uintptr(size), uintptr(clientBuffer), uintptr(flags), 0)\n}",
"func BufferData(target uint32, size int, data unsafe.Pointer, usage uint32) {\n C.glowBufferData(gpBufferData, (C.GLenum)(target), (C.GLsizeiptr)(size), data, (C.GLenum)(usage))\n}",
"func (native *OpenGL) BufferData(target uint32, size int, data interface{}, usage uint32) {\n\tdataPtr, isPtr := data.(unsafe.Pointer)\n\tif isPtr {\n\t\tgl.BufferData(target, size, dataPtr, usage)\n\t} else {\n\t\tgl.BufferData(target, size, gl.Ptr(data), usage)\n\t}\n}",
"func BindBuffer(target Enum, buffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcbuffer, _ := (C.GLuint)(buffer), cgoAllocsUnknown\n\tC.glBindBuffer(ctarget, cbuffer)\n}",
"func NewWlBuffer(c *wire.Conn) *WlBuffer {\n\treturn NewWlBufferWithID(c, c.NewID())\n}",
"func InvalidateBufferData(buffer uint32) {\n\tC.glowInvalidateBufferData(gpInvalidateBufferData, (C.GLuint)(buffer))\n}",
"func InvalidateBufferData(buffer uint32) {\n\tC.glowInvalidateBufferData(gpInvalidateBufferData, (C.GLuint)(buffer))\n}",
"func NewBufferGeometry() BufferGeometry {\n\treturn NewBufferGeometryFromJSValue(\n\t\tThreejs(\"BufferGeometry\").New(),\n\t)\n}",
"func ReadBuffer(mode uint32) {\n C.glowReadBuffer(gpReadBuffer, (C.GLenum)(mode))\n}",
"func (n *BufferView) AddBuffer() *Buffer {\n\tn.UnfocusBuffers()\n\n\tconf := n.conf\n\tc := NewBuffer(conf, BufferConfig{\n\t\tconf.Theme.Background,\n\t\tconf.Theme.Foreground,\n\t\tconf.Theme.Cursor,\n\t\tconf.Theme.CursorInvert,\n\t\tconf.Theme.HighlightLineBackground,\n\t\tconf.Theme.GutterBackground,\n\t\tconf.Theme.GutterForeground,\n\t\tgui.GetDefaultFont(),\n\t}, n, len(n.buffers))\n\n\tc.SetFocus(true)\n\n\tw, h := n.GetSize()\n\n\tn.focusedBuff = c.index\n\tn.buffers = append(n.buffers, NewBufferPane(c))\n\tn.Resize(w, h)\n\n\treturn c\n}",
"func GetBufferParameteriv(target uint32, pname uint32, params *int32) {\n C.glowGetBufferParameteriv(gpGetBufferParameteriv, (C.GLenum)(target), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func PushClientAttrib(mask uint32) {\n\tC.glowPushClientAttrib(gpPushClientAttrib, (C.GLbitfield)(mask))\n}",
"func (gl *WebGL) BindBuffer(target GLEnum, buffer WebGLBuffer) {\n\tgl.context.Call(\"bindBuffer\", target, buffer)\n}",
"func (gl *WebGL) NewBuffer(target GLEnum, data interface{}, usage GLEnum) WebGLBuffer {\n\tbuffer := gl.CreateBuffer()\n\tgl.BindBuffer(target, buffer)\n\tgl.BufferData(target, data, usage)\n\treturn buffer\n}",
"func (gl *WebGL) BufferData(target GLEnum, data interface{}, usage GLEnum) {\n\tvalues := sliceToTypedArray(data)\n\tgl.context.Call(\"bufferData\", target, values, usage)\n}",
"func InvalidateBufferSubData(buffer uint32, offset int, length int) {\n C.glowInvalidateBufferSubData(gpInvalidateBufferSubData, (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n NewEnumsModel(buffer),\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyEnumsFunc(func(model *EnumsModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func Buffer() string {\n\treturn C.GoString(C.rl_line_buffer)\n}",
"func PushClientAttrib(mask uint32) {\n C.glowPushClientAttrib(gpPushClientAttrib, (C.GLbitfield)(mask))\n}",
"func NamedBufferStorageExternalEXT(buffer uint32, offset int, size int, clientBuffer unsafe.Pointer, flags uint32) {\n\tC.glowNamedBufferStorageExternalEXT(gpNamedBufferStorageExternalEXT, (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(size), (C.GLeglClientBufferEXT)(clientBuffer), (C.GLbitfield)(flags))\n}",
"func (debugging *debuggingOpenGL) BufferData(target uint32, size int, data interface{}, usage uint32) {\n\tdebugging.recordEntry(\"BufferData\", target, size, data, usage)\n\tdebugging.gl.BufferData(target, size, data, usage)\n\tdebugging.recordExit(\"BufferData\")\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n C.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func GetBufferParameteriv(target uint32, pname uint32, params *int32) {\n\tC.glowGetBufferParameteriv(gpGetBufferParameteriv, (C.GLenum)(target), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func GetBufferParameteriv(target uint32, pname uint32, params *int32) {\n\tC.glowGetBufferParameteriv(gpGetBufferParameteriv, (C.GLenum)(target), (C.GLenum)(pname), (*C.GLint)(unsafe.Pointer(params)))\n}",
"func FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n C.glowFramebufferRenderbuffer(gpFramebufferRenderbuffer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLenum)(renderbuffertarget), (C.GLuint)(renderbuffer))\n}",
"func (gl *WebGL) CreateBuffer() WebGLBuffer {\n\treturn WebGLBuffer(gl.context.Call(\"createBuffer\"))\n}",
"func (debugging *debuggingOpenGL) BindBuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindBuffer\", target, buffer)\n\tdebugging.gl.BindBuffer(target, buffer)\n\tdebugging.recordExit(\"BindBuffer\")\n}",
"func BindBufferBase(target uint32, index uint32, buffer uint32) {\n C.glowBindBufferBase(gpBindBufferBase, (C.GLenum)(target), (C.GLuint)(index), (C.GLuint)(buffer))\n}",
"func BufferData(target uint32, size int, data unsafe.Pointer, usage uint32) {\n\tC.glowBufferData(gpBufferData, (C.GLenum)(target), (C.GLsizeiptr)(size), data, (C.GLenum)(usage))\n}",
"func BufferData(target uint32, size int, data unsafe.Pointer, usage uint32) {\n\tC.glowBufferData(gpBufferData, (C.GLenum)(target), (C.GLsizeiptr)(size), data, (C.GLenum)(usage))\n}",
"func GetBufferParameteriv(target Enum, pname Enum, params []Int) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcpname, _ := (C.GLenum)(pname), cgoAllocsUnknown\n\tcparams, _ := (*C.GLint)(unsafe.Pointer((*sliceHeader)(unsafe.Pointer(¶ms)).Data)), cgoAllocsUnknown\n\tC.glGetBufferParameteriv(ctarget, cpname, cparams)\n}",
"func GetBufferPointerv(target uint32, pname uint32, params *unsafe.Pointer) {\n\tsyscall.Syscall(gpGetBufferPointerv, 3, uintptr(target), uintptr(pname), uintptr(unsafe.Pointer(params)))\n}",
"func BufferStorage(target uint32, size int, data unsafe.Pointer, flags uint32) {\n C.glowBufferStorage(gpBufferStorage, (C.GLenum)(target), (C.GLsizeiptr)(size), data, (C.GLbitfield)(flags))\n}",
"func (c *ChromaHighlight) ClrBuffer() {\n\tswitch c.srcBuff {\n\tcase nil:\n\t\tc.txtBuff.Delete(c.txtBuff.GetStartIter(), c.txtBuff.GetEndIter())\n\tdefault:\n\t\tc.srcBuff.Delete(c.srcBuff.GetStartIter(), c.srcBuff.GetEndIter())\n\t}\n}",
"func BindBuffer(target uint32, buffer uint32) {\n\tsyscall.Syscall(gpBindBuffer, 2, uintptr(target), uintptr(buffer), 0)\n}",
"func GetBufferSubData(target uint32, offset int, size int, data unsafe.Pointer) {\n C.glowGetBufferSubData(gpGetBufferSubData, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(size), data)\n}",
"func BufferData(target Enum, src []byte, usage Enum) {\n\tgl.BufferData(uint32(target), int(len(src)), gl.Ptr(&src[0]), uint32(usage))\n}",
"func MapBuffer(target uint32, access uint32) unsafe.Pointer {\n\tret := C.glowMapBuffer(gpMapBuffer, (C.GLenum)(target), (C.GLenum)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func MapBuffer(target uint32, access uint32) unsafe.Pointer {\n\tret := C.glowMapBuffer(gpMapBuffer, (C.GLenum)(target), (C.GLenum)(access))\n\treturn (unsafe.Pointer)(ret)\n}",
"func GetNamedBufferPointerv(buffer uint32, pname uint32, params *unsafe.Pointer) {\n\tC.glowGetNamedBufferPointerv(gpGetNamedBufferPointerv, (C.GLuint)(buffer), (C.GLenum)(pname), params)\n}",
"func GetNamedBufferPointerv(buffer uint32, pname uint32, params *unsafe.Pointer) {\n\tC.glowGetNamedBufferPointerv(gpGetNamedBufferPointerv, (C.GLuint)(buffer), (C.GLenum)(pname), params)\n}",
"func (geom Geometry) Buffer(distance float64, segments int) Geometry {\n\tnewGeom := C.OGR_G_Buffer(geom.cval, C.double(distance), C.int(segments))\n\treturn Geometry{newGeom}\n}",
"func MapBuffer(target uint32, access uint32) unsafe.Pointer {\n ret := C.glowMapBuffer(gpMapBuffer, (C.GLenum)(target), (C.GLenum)(access))\n return (unsafe.Pointer)(ret)\n}",
"func VertexArrayVertexBuffers(vaobj uint32, first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tsyscall.Syscall6(gpVertexArrayVertexBuffers, 6, uintptr(vaobj), uintptr(first), uintptr(count), uintptr(unsafe.Pointer(buffers)), uintptr(unsafe.Pointer(offsets)), uintptr(unsafe.Pointer(strides)))\n}",
"func IsBuffer(buffer uint32) bool {\n\tret, _, _ := syscall.Syscall(gpIsBuffer, 1, uintptr(buffer), 0, 0)\n\treturn ret != 0\n}",
"func BindVertexBuffer(bindingindex uint32, buffer uint32, offset int, stride int32) {\n C.glowBindVertexBuffer(gpBindVertexBuffer, (C.GLuint)(bindingindex), (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizei)(stride))\n}",
"func NewBufferBuilder() *BufferBuilder {\n\treturn &BufferBuilder{}\n}",
"func (b *VBO) GenBuffer() {\n\tgl.GenBuffers(1, &b.vboID)\n\tb.genBound = true\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func BindVertexBuffers(first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowBindVertexBuffers(gpBindVertexBuffers, (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func VertexArrayVertexBuffers(vaobj uint32, first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowVertexArrayVertexBuffers(gpVertexArrayVertexBuffers, (C.GLuint)(vaobj), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func VertexArrayVertexBuffers(vaobj uint32, first uint32, count int32, buffers *uint32, offsets *int, strides *int32) {\n\tC.glowVertexArrayVertexBuffers(gpVertexArrayVertexBuffers, (C.GLuint)(vaobj), (C.GLuint)(first), (C.GLsizei)(count), (*C.GLuint)(unsafe.Pointer(buffers)), (*C.GLintptr)(unsafe.Pointer(offsets)), (*C.GLsizei)(unsafe.Pointer(strides)))\n}",
"func RenderbufferStorage(target uint32, internalformat uint32, width int32, height int32) {\n C.glowRenderbufferStorage(gpRenderbufferStorage, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLsizei)(width), (C.GLsizei)(height))\n}",
"func GetBufferSubData(target uint32, offset int, size int, data unsafe.Pointer) {\n\tC.glowGetBufferSubData(gpGetBufferSubData, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(size), data)\n}",
"func GetBufferSubData(target uint32, offset int, size int, data unsafe.Pointer) {\n\tC.glowGetBufferSubData(gpGetBufferSubData, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(size), data)\n}",
"func NewBuffer(e []byte) *Buffer {\n\treturn &Buffer{buf: e}\n}",
"func GetBufferParameteriv(target gl.Enum, pname gl.Enum, params []int32) {\n\tgl.GetBufferParameteriv(gl.Enum(target), gl.Enum(pname), (*gl.Int)(¶ms[0]))\n}",
"func (self Source) SetBuffer(buffer Buffer) {\n\tself.Seti(AlBuffer, int32(buffer))\n}",
"func GetBufferParameteriv(target uint32, pname uint32, params *int32) {\n\tsyscall.Syscall(gpGetBufferParameteriv, 3, uintptr(target), uintptr(pname), uintptr(unsafe.Pointer(params)))\n}",
"func FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n\tC.glowFramebufferRenderbuffer(gpFramebufferRenderbuffer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLenum)(renderbuffertarget), (C.GLuint)(renderbuffer))\n}",
"func FramebufferRenderbuffer(target uint32, attachment uint32, renderbuffertarget uint32, renderbuffer uint32) {\n\tC.glowFramebufferRenderbuffer(gpFramebufferRenderbuffer, (C.GLenum)(target), (C.GLenum)(attachment), (C.GLenum)(renderbuffertarget), (C.GLuint)(renderbuffer))\n}"
] | [
"0.6129812",
"0.60769343",
"0.60769343",
"0.606016",
"0.606016",
"0.59901834",
"0.5953861",
"0.5930928",
"0.5930928",
"0.590757",
"0.58745706",
"0.58521396",
"0.5824595",
"0.5824595",
"0.5820871",
"0.5820871",
"0.57433826",
"0.57313323",
"0.57080144",
"0.57080144",
"0.5680093",
"0.5664739",
"0.5586729",
"0.5585857",
"0.55144423",
"0.55023044",
"0.5491913",
"0.5473813",
"0.5473327",
"0.5426546",
"0.5411231",
"0.5411231",
"0.54109997",
"0.535774",
"0.535774",
"0.5346246",
"0.5336465",
"0.5325162",
"0.53194696",
"0.53182536",
"0.529881",
"0.5297673",
"0.52953386",
"0.5293211",
"0.5289395",
"0.5289395",
"0.5265549",
"0.5264614",
"0.5264122",
"0.5235758",
"0.52324444",
"0.5217576",
"0.52166593",
"0.5204886",
"0.52040434",
"0.5179893",
"0.517761",
"0.51748055",
"0.5157707",
"0.5155246",
"0.5149377",
"0.51470286",
"0.51470286",
"0.5142462",
"0.51368713",
"0.5127021",
"0.51124066",
"0.5097023",
"0.5097023",
"0.50793546",
"0.5075377",
"0.5064274",
"0.50519174",
"0.5042235",
"0.50397474",
"0.503294",
"0.50255847",
"0.50255847",
"0.5017307",
"0.5017307",
"0.5010855",
"0.49874312",
"0.49845517",
"0.49843925",
"0.4969154",
"0.4953454",
"0.49456453",
"0.49422932",
"0.49422932",
"0.49382502",
"0.49382502",
"0.49255368",
"0.49243832",
"0.49243832",
"0.4904163",
"0.48949006",
"0.48946688",
"0.48940602",
"0.48847577",
"0.48847577"
] | 0.55824924 | 24 |
updates a subset of a buffer object's data store | func BufferSubData(target uint32, offset int, size int, data unsafe.Pointer) {
C.glowBufferSubData(gpBufferSubData, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(size), data)
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (b *CompactableBuffer) Update(address *EntryAddress, data []byte) error {\n\taddress.LockForWrite()\n\tdefer address.UnlockWrite()\n\theader, err := b.ReadHeader(address)\n\tif err != nil {\n\t\treturn err\n\t}\n\tbeforeUpdataDataSize := header.dataSize\n\tafterUpdateDataSize := len(data) + VarIntSize(len(data))\n\tdataSizeDelta := afterUpdateDataSize - int(beforeUpdataDataSize)\n\n\tremainingSpace := int(header.entrySize) - reservedSize - afterUpdateDataSize\n\theader.dataSize = int64(afterUpdateDataSize)\n\tif remainingSpace <= 0 {\n\t\tatomic.AddInt64(&b.dataSize, int64(-beforeUpdataDataSize))\n\t\tatomic.AddInt64(&b.entrySize, int64(-header.entrySize))\n\t\treturn b.expand(address, data)\n\t}\n\n\tatomic.AddInt64(&b.dataSize, int64(dataSizeDelta))\n\tvar target = make([]byte, 0)\n\tAppendToBytes(data, &target)\n\tif len(target) > int(header.dataSize) {\n\t\treturn io.EOF\n\t}\n\twritableBuffer := b.writableBuffer()\n\t_, err = writableBuffer.Write(address.Position()+reservedSize, target...)\n\treturn err\n}",
"func BufferSubData(target uint32, offset int, size int, data unsafe.Pointer) {\n\tsyscall.Syscall6(gpBufferSubData, 4, uintptr(target), uintptr(offset), uintptr(size), uintptr(data), 0, 0)\n}",
"func (d *OneToOne) Set(data GenericDataType) {\n\tidx := d.writeIndex % uint64(len(d.buffer))\n\n\tnewBucket := &bucket{\n\t\tdata: data,\n\t\tseq: d.writeIndex,\n\t}\n\td.writeIndex++\n\n\tatomic.StorePointer(&d.buffer[idx], unsafe.Pointer(newBucket))\n}",
"func BufferSubData(target Enum, offset int, data []byte) {\n\tgl.BufferSubData(uint32(target), offset, int(len(data)), gl.Ptr(&data[0]))\n}",
"func (b *Buffer) updateFirst(fsize uint64) {\n\tif b.biggest == 0 {\n\t\t// Just starting out, no need to update.\n\t\treturn\n\t}\n\n\tvar (\n\t\tstart = b.last % b.capacity\n\t\tend = (start + fsize) % b.capacity\n\t\twrapping = end <= start\n\t)\n\n\tif start == end {\n\t\tb.length = 0\n\t\tb.first = b.last\n\t\treturn\n\t}\n\n\tfor {\n\t\tif b.first == b.last {\n\t\t\t// b can fit only the new incoming record.\n\t\t\treturn\n\t\t}\n\n\t\tfirstWrapped := b.first % b.capacity\n\n\t\tif wrapping {\n\t\t\tif end <= firstWrapped && firstWrapped < start {\n\t\t\t\treturn\n\t\t\t}\n\t\t} else {\n\t\t\tif end <= firstWrapped {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif start > firstWrapped {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\tsecond := b.nextFrameOffset(b.first)\n\t\tb.length -= (second - b.first - total)\n\t\tb.first = second\n\n\t\t// May need to discard multiple records at the begining.\n\t}\n}",
"func InvalidateBufferSubData(buffer uint32, offset int, length int) {\n C.glowInvalidateBufferSubData(gpInvalidateBufferSubData, (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func InvalidateBufferSubData(buffer uint32, offset int, length int) {\n\tsyscall.Syscall(gpInvalidateBufferSubData, 3, uintptr(buffer), uintptr(offset), uintptr(length))\n}",
"func (gl *WebGL) BufferSubData(target GLEnum, offset int, data interface{}) {\n\tvalues := sliceToTypedArray(data)\n\tgl.context.Call(\"bufferSubData\", target, offset, values)\n}",
"func (c *Cache) recordUpdate(p *partition, bytesAdded, bytesGuessed, entriesAdded int32) {\n\t// This method is always called while p.mu is held.\n\t// The below code takes care to ensure that all bytes in c due to p are\n\t// updated appropriately.\n\n\t// NB: The loop and atomics are used because p.size can be modified\n\t// concurrently to calls to recordUpdate. In all cases where p.size is updated\n\t// outside of this function occur while c.mu is held inside of c.Add. These\n\t// occur when either:\n\t//\n\t// 1) a new write adds its guessed write size to p\n\t// 2) p is evicted to make room for a write\n\t//\n\t// Thus p.size is either increasing or becomes evicted while we attempt to\n\t// record the update to p. Once p is evicted it stays evicted forever.\n\t// These facts combine to ensure that p.size never becomes negative from the\n\t// below call to add.\n\n\tdelta := bytesAdded - bytesGuessed\n\tfor {\n\t\tcurSize := p.loadSize()\n\t\tif curSize == evicted {\n\t\t\treturn\n\t\t}\n\t\tnewSize := curSize.add(delta, entriesAdded)\n\t\tif updated := p.setSize(curSize, newSize); updated {\n\t\t\tc.updateGauges(c.addBytes(delta), c.addEntries(entriesAdded))\n\t\t\treturn\n\t\t}\n\t}\n}",
"func (s *EntityStorage) update() {\n\ts.outdated = false\n\ts.occupied = s.occupied[:0]\n\tl := len(s.vec)\n\tfor i := 0; i < l; i++ {\n\t\tif s.vec[i].occupied {\n\t\t\ts.occupied = append(s.occupied, i)\n\t\t}\n\t}\n}",
"func (s *IndexablePartitionClockStorage) update(clock PartitionClock) {\n\ts.VbNos = s.VbNos[:0]\n\ts.Seqs = s.Seqs[:0]\n\tfor vb, seq := range clock {\n\t\ts.VbNos = append(s.VbNos, vb)\n\t\ts.Seqs = append(s.Seqs, seq)\n\t}\n}",
"func (b *BufferManager) SetBuffer(peer *PeerSession) {\n\tb.lock.Lock()\n\tdefer b.lock.Unlock()\n\toffset, ok := b.freeIndex.TryDequeue()\n\tif ok {\n\t\tpeer.bufferOffst = offset.(int64)\n\t\tpeer.buffers = b.buffers[peer.bufferOffst : peer.bufferOffst+int64(b.bufferSize)]\n\t} else {\n\t\tif b.totalBytes-int64(b.bufferSize) < b.currentIndex {\n\t\t\tpeer.buffers = make([]byte, b.bufferSize)\n\t\t\tpeer.bufferOffst = -1\n\t\t\t//The buffer pool is empty.\n\t\t\t//return false\n\t\t} else {\n\t\t\tpeer.bufferOffst = b.currentIndex\n\t\t\tpeer.buffers = b.buffers[peer.bufferOffst : peer.bufferOffst+int64(b.bufferSize)]\n\t\t\tb.currentIndex += int64(b.bufferSize)\n\t\t}\n\t}\n\t//return true\n}",
"func putBufferMessageIntoReadStorage(s *server, ConnID int) {\n\tfor s.clientMap[ConnID].clientBufferQueue.Len() > 0 && s.clientMap[ConnID].clientSequenceMap == s.clientMap[ConnID].clientBufferQueue.Front().Value.(Message).SeqNum {\n\t\tfront := s.clientMap[ConnID].clientBufferQueue.Front()\n\t\ts.clientMap[ConnID].clientBufferQueue.Remove(front)\n\t\tmsg := front.Value.(Message)\n\t\ts.readStorage.PushBack(msg)\n\t\ts.clientMap[ConnID].clientSequenceMap = s.clientMap[ConnID].clientSequenceMap + 1\n\t}\n}",
"func (t *Terminal) updateBuffer() {\n\tcopy(termbox.CellBuffer(), t.buffer)\n\tif err := termbox.Flush(); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}",
"func (b *Buffer) update() {\n\tb.NumLines = len(b.lines)\n}",
"func (g *gcm) update(y *gcmFieldElement, data []byte) {\n\tfullBlocks := (len(data) >> 4) << 4\n\tg.updateBlocks(y, data[:fullBlocks])\n\n\tif len(data) != fullBlocks {\n\t\tvar partialBlock [gcmBlockSize]byte\n\t\tcopy(partialBlock[:], data[fullBlocks:])\n\t\tg.updateBlocks(y, partialBlock[:])\n\t}\n}",
"func BufferSubData(target uint32, offset int, size int, data unsafe.Pointer) {\n C.glowBufferSubData(gpBufferSubData, (C.GLenum)(target), (C.GLintptr)(offset), (C.GLsizeiptr)(size), data)\n}",
"func (al *AudioListener) setBuffer(size int) {\n\tal.Lock()\n\tdefer al.Unlock()\n\n\tal.buffer = make([]gumble.AudioPacket, 0, size)\n}",
"func (store *Store) append(buf []byte) error {\n\t// append data uncommitted\n\t_, err := store.dataFile.Seek(store.ptr, 0)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif _, err = store.dataFile.Write(buf); err != nil {\n\t\treturn err\n\t}\n\tnewptr, err := store.dataFile.Seek(0, 1)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// start committing header\n\tif _, err = store.dataFile.Seek(0, 0); err != nil {\n\t\treturn err\n\t}\n\ttmp := [8]byte{}\n\tif _, err = store.dataFile.Read(tmp[:]); err != nil {\n\t\treturn err\n\t}\n\tflag := tmp[3]\n\tbinary.BigEndian.PutUint64(tmp[:], uint64(newptr))\n\n\tif flag == 0 {\n\t\tflag = 1\n\t\t_, err = store.dataFile.Seek(10, 0)\n\t} else {\n\t\tflag = 0\n\t\t_, err = store.dataFile.Seek(4, 0)\n\t}\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif _, err = store.dataFile.Write(tmp[2:]); err != nil {\n\t\treturn err\n\t}\n\tif _, err = store.dataFile.Seek(3, 0); err != nil {\n\t\treturn err\n\t}\n\tif _, err = store.dataFile.Write([]byte{flag}); err != nil {\n\t\treturn err\n\t}\n\n\t// all clear\n\tstore.ptr = newptr\n\treturn nil\n}",
"func (m *metricMysqlBufferPoolOperations) updateCapacity() {\n\tif m.data.Sum().DataPoints().Len() > m.capacity {\n\t\tm.capacity = m.data.Sum().DataPoints().Len()\n\t}\n}",
"func (b *Buffer) reload() error {\n\tstat, err := os.Stat(b.filename)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfsize := uint64(stat.Size())\n\tb.capacity = fsize - metadata\n\n\tf, err := os.OpenFile(b.filename, os.O_RDWR, 0600)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdata, err := syscall.Mmap(\n\t\tint(f.Fd()), 0, int(fsize),\n\t\tsyscall.PROT_READ|syscall.PROT_WRITE, syscall.MAP_SHARED,\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\tb.data = data\n\n\toff := int(b.capacity)\n\tb.first = binary.GetLittleEndianUint64(b.data, off)\n\tb.last = binary.GetLittleEndianUint64(b.data, off+8)\n\tb.nextSeq = binary.GetLittleEndianUint64(b.data, off+16)\n\tb.biggest = binary.GetLittleEndianUint32(b.data, off+24)\n\tb.length = binary.GetLittleEndianUint64(b.data, off+28)\n\n\treturn nil\n}",
"func (this *DatastoreOperations) Append(transactionBytes []byte, state *DatastoreState, commitTimestamp int64, flushAfterWrite bool, maxFlushDelay int64) (err error) {\n\t// Clone the given state into a new state object\n\tnewState := state.Clone()\n\n\t// Append the transaction to the file\n\t_, err = newState.File.WriteAt(transactionBytes, state.Size())\n\n\t// If an error occured while writing to the file\n\tif err != nil {\n\t\t// Return the error\n\t\treturn\n\t}\n\n\t// Update the index with the timestamps and offsets of the new entries\n\terr = newState.Index.AppendFromBuffer(transactionBytes, nil)\n\n\t// If an error occured while updating the index\n\tif err != nil {\n\t\t// Return the error\n\t\treturn\n\t}\n\n\t// If the datastore should be cached\n\tif this.IsCached {\n\t\t// Update its data cache\n\t\terr = newState.UpdateDataCache(bytes.NewReader(transactionBytes), 0, int64(len(transactionBytes)))\n\n\t\t// If an error has occurred while updating the cache, return\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\t// Schedule a flush, if needed.\n\tif flushAfterWrite {\n\t\tthis.ScheduleFlushIfNeeded(newState, maxFlushDelay)\n\t}\n\n\t// Atomically replace the current state object with the new state object\n\tthis.ReplaceState(newState)\n\n\t// Announce the update\n\tthis.UpdateNotifier.AnnounceUpdate(commitTimestamp)\n\n\treturn\n}",
"func (buf *ListBuffer) Set(idx BufferIndex, item Item) (*error.Error) {\n\tinRange, initialized := buf.legalIndex(idx)\n\tif !inRange {\n\t\tdesc := fmt.Sprintf(\n\t\t\t\"idx, %d, is out of range for IndexBuffer of length %d.\",\n\t\t\tidx, len(buf.Buffer),\n\t\t)\n\t\treturn error.New(error.Value, desc)\n\t} else if !initialized {\n\t\tdesc := fmt.Sprintf(\n\t\t\t\"Item at idx, %d, has the Type value Uninitialized.\", idx,\n\t\t)\n\t\treturn error.New(error.Value, desc)\n\t}\n\n\tbuf.Buffer[idx].Item = item\n\treturn nil\n}",
"func CopyBufferSubData(readTarget uint32, writeTarget uint32, readOffset int, writeOffset int, size int) {\n C.glowCopyBufferSubData(gpCopyBufferSubData, (C.GLenum)(readTarget), (C.GLenum)(writeTarget), (C.GLintptr)(readOffset), (C.GLintptr)(writeOffset), (C.GLsizeiptr)(size))\n}",
"func (p *movingAverageProcessor) addBufferData(index int, data interface{}, namespace string) error {\n\tif _, ok := p.movingAverageMap[namespace]; ok {\n\t\tif index >= len(p.movingAverageMap[namespace].movingAverageBuf) {\n\t\t\treturn errors.New(\"Incorrect value of index, trying to access non-existing element of buffer\")\n\t\t}\n\t\tp.movingAverageMap[namespace].movingAverageBuf[index] = data\n\t\treturn nil\n\t} else {\n\t\treturn errors.New(\"Namespace is not present in the map\")\n\t}\n}",
"func (t Treasure) Mutate(b []byte) error {\n\taddr, data := t.RealAddr(), t.Bytes()\n\tfor i := 0; i < 4; i++ {\n\t\tb[addr+i] = data[i]\n\t}\n\treturn nil\n}",
"func (d *adapterMemoryData) Update(key interface{}, value interface{}) (oldValue interface{}, exist bool, err error) {\n\td.mu.Lock()\n\tdefer d.mu.Unlock()\n\tif item, ok := d.data[key]; ok {\n\t\td.data[key] = adapterMemoryItem{\n\t\t\tv: value,\n\t\t\te: item.e,\n\t\t}\n\t\treturn item.v, true, nil\n\t}\n\treturn nil, false, nil\n}",
"func BufferSubData(target Enum, offset Intptr, size Sizeiptr, data unsafe.Pointer) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcoffset, _ := (C.GLintptr)(offset), cgoAllocsUnknown\n\tcsize, _ := (C.GLsizeiptr)(size), cgoAllocsUnknown\n\tcdata, _ := (unsafe.Pointer)(unsafe.Pointer(data)), cgoAllocsUnknown\n\tC.glBufferSubData(ctarget, coffset, csize, cdata)\n}",
"func InvalidateBufferSubData(buffer uint32, offset int, length int) {\n\tC.glowInvalidateBufferSubData(gpInvalidateBufferSubData, (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func InvalidateBufferSubData(buffer uint32, offset int, length int) {\n\tC.glowInvalidateBufferSubData(gpInvalidateBufferSubData, (C.GLuint)(buffer), (C.GLintptr)(offset), (C.GLsizeiptr)(length))\n}",
"func (p *MemProvider) update(sid string) error {\n\tp.lock.Lock()\n\tdefer p.lock.Unlock()\n\n\tif e, ok := p.data[sid]; ok {\n\t\te.Value.(*MemStore).lastAccess = time.Now()\n\t\tp.list.MoveToFront(e)\n\t\treturn nil\n\t}\n\treturn nil\n}",
"func storeDataUsageInBackend(ctx context.Context, objAPI ObjectLayer, gui <-chan DataUsageInfo) {\n\tfor dataUsageInfo := range gui {\n\t\tdataUsageJSON, err := json.Marshal(dataUsageInfo)\n\t\tif err != nil {\n\t\t\tlogger.LogIf(ctx, err)\n\t\t\tcontinue\n\t\t}\n\t\tsize := int64(len(dataUsageJSON))\n\t\tr, err := hash.NewReader(bytes.NewReader(dataUsageJSON), size, \"\", \"\", size, false)\n\t\tif err != nil {\n\t\t\tlogger.LogIf(ctx, err)\n\t\t\tcontinue\n\t\t}\n\n\t\t_, err = objAPI.PutObject(ctx, dataUsageBucket, dataUsageObjName, NewPutObjReader(r, nil, nil), ObjectOptions{})\n\t\tif !isErrBucketNotFound(err) {\n\t\t\tlogger.LogIf(ctx, err)\n\t\t}\n\t}\n}",
"func (m *metricMysqlBufferPoolUsage) updateCapacity() {\n\tif m.data.Sum().DataPoints().Len() > m.capacity {\n\t\tm.capacity = m.data.Sum().DataPoints().Len()\n\t}\n}",
"func (k Keeper) UpdateTickersBuffer(startTS, endTS int64, productList []string) {\n\n\tdefer types.PrintStackIfPanic()\n\n\tk.Orm.Debug(fmt.Sprintf(\"[backend] entering UpdateTickersBuffer, latestTickers: %+v, TickerTimeRange: [%d, %d)=[%s, %s), productList: %v\",\n\t\tk.Cache.LatestTicker, startTS, endTS, types.TimeString(startTS), types.TimeString(endTS), productList))\n\n\tlatestProducts := []string{}\n\tfor p := range k.Cache.LatestTicker {\n\t\tlatestProducts = append(latestProducts, p)\n\t}\n\ttickerMap, err := k.Orm.RefreshTickers(startTS, endTS, productList)\n\tif err != nil {\n\t\tk.Orm.Error(fmt.Sprintf(\"generateTicker error %+v, latestTickers %+v, returnTickers: %+v\", err, k.Cache.LatestTicker, tickerMap))\n\t\treturn\n\t}\n\n\tif len(tickerMap) > 0 {\n\t\tfor product, ticker := range tickerMap {\n\t\t\tk.Cache.LatestTicker[product] = ticker\n\t\t\tk.pushWSItem(ticker)\n\t\t\tk.pushTickerItems(ticker)\n\t\t}\n\n\t\tk.Orm.Debug(fmt.Sprintf(\"UpdateTickersBuffer LatestTickerMap: %+v\", k.Cache.LatestTicker))\n\t} else {\n\t\tk.Orm.Debug(fmt.Sprintf(\"UpdateTickersBuffer No product's deal refresh in [%d, %d), latestTicker: %+v\", startTS, endTS, k.Cache.LatestTicker))\n\t}\n\n\t// Case: No deals produced in last 24 hours.\n\tfor _, p := range latestProducts {\n\t\trefreshedTicker := tickerMap[p]\n\t\tif refreshedTicker == nil {\n\t\t\tpreviousTicker := k.Cache.LatestTicker[p]\n\t\t\tif previousTicker != nil && (endTS > previousTicker.Timestamp+types.SecondsInADay) {\n\t\t\t\tpreviousTicker.Open = previousTicker.Close\n\t\t\t\tpreviousTicker.High = previousTicker.Close\n\t\t\t\tpreviousTicker.Low = previousTicker.Close\n\t\t\t\tpreviousTicker.Volume = 0\n\t\t\t\tpreviousTicker.Change = 0\n\t\t\t\tpreviousTicker.ChangePercentage = \"0.00%\"\n\t\t\t}\n\n\t\t}\n\t}\n}",
"func (c *AdapterMemory) Update(ctx context.Context, key interface{}, value interface{}) (oldValue *gvar.Var, exist bool, err error) {\n\tv, exist, err := c.data.Update(key, value)\n\treturn gvar.New(v), exist, err\n}",
"func (o StreamOptimizer) batch(ctx context.Context, chq <-chan *tree.SyncChange) <-chan *ChangeBuffer {\n\tcbQ := make(chan *ChangeBuffer, 1)\n\n\tvar nid string\n\tvar change *tree.SyncChange\n\tbuf := newBuffer()\n\n\tgo func() {\n\t\tdefer func() { close(cbQ) }()\n\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn\n\t\t\tcase change = <-chq:\n\t\t\t\t// TODO why do we sometimes receive nil?\n\t\t\t\tif change == nil {\n\t\t\t\t\tif !buf.isEmpty() {\n\t\t\t\t\t\t// transmit the last buffer before returning\n\t\t\t\t\t\tcbQ <- buf\n\t\t\t\t\t}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tif nid != change.NodeId {\n\t\t\t\t\tif buf.isEmpty() {\n\t\t\t\t\t\t// Drop it on the floor: it happens on the very first iteration\n\t\t\t\t\t} else {\n\t\t\t\t\t\tcbQ <- buf\n\t\t\t\t\t}\n\t\t\t\t\tbuf = newBuffer()\n\t\t\t\t\tnid = change.NodeId\n\t\t\t\t}\n\t\t\t\tbuf.Append(change)\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn cbQ\n}",
"func CopyBufferSubData(readTarget uint32, writeTarget uint32, readOffset int, writeOffset int, size int) {\n\tsyscall.Syscall6(gpCopyBufferSubData, 5, uintptr(readTarget), uintptr(writeTarget), uintptr(readOffset), uintptr(writeOffset), uintptr(size), 0)\n}",
"func (c *fakeRedisConn) SetReadBuffer(bytes int) {}",
"func (m *DBMem) Update(idToUpdate int, data Person) {\n m.Lock()\n defer m.Unlock()\n\n\tif len(m.data) <= idToUpdate {\n\t\tfmt.Println(\"ID is out of range\")\n\t\treturn\n\t}\n m.data[idToUpdate] = data\n m.history.Append(\"UPDATE\", idToUpdate, data)\n}",
"func (s BoltStore) BatchUpdate(ids []interface{}, data []interface{}, store string, opts ObjectStoreOptions) (err error) {\n\treturn ErrNotImplemented\n}",
"func (lvs *ValueStore) bufferChunk(v Value, c chunks.Chunk, height uint64, hints Hints) {\n\tlvs.pendingMu.Lock()\n\tdefer lvs.pendingMu.Unlock()\n\th := c.Hash()\n\td.Chk.NotZero(height)\n\tlvs.pendingPuts[h] = pendingChunk{c, height, hints}\n\tlvs.pendingPutSize += uint64(len(c.Data()))\n\n\tputChildren := func(parent hash.Hash) (dataPut int) {\n\t\tpc, present := lvs.pendingPuts[parent]\n\t\td.Chk.True(present)\n\t\tv := DecodeValue(pc.c, lvs)\n\t\tv.WalkRefs(func(grandchildRef Ref) {\n\t\t\tif pc, present := lvs.pendingPuts[grandchildRef.TargetHash()]; present {\n\t\t\t\tlvs.bs.SchedulePut(pc.c, pc.height, pc.hints)\n\t\t\t\tdataPut += len(pc.c.Data())\n\t\t\t\tdelete(lvs.pendingPuts, grandchildRef.TargetHash())\n\t\t\t}\n\t\t})\n\t\treturn\n\t}\n\n\t// Enforce invariant (1)\n\tif height > 1 {\n\t\tv.WalkRefs(func(childRef Ref) {\n\t\t\tchildHash := childRef.TargetHash()\n\t\t\tif _, present := lvs.pendingPuts[childHash]; present {\n\t\t\t\tlvs.pendingParents[h] = height\n\t\t\t} else {\n\t\t\t\t// Shouldn't be able to be in pendingParents without being in pendingPuts\n\t\t\t\t_, present := lvs.pendingParents[childHash]\n\t\t\t\td.Chk.False(present)\n\t\t\t}\n\n\t\t\tif _, present := lvs.pendingParents[childHash]; present {\n\t\t\t\tlvs.pendingPutSize -= uint64(putChildren(childHash))\n\t\t\t\tdelete(lvs.pendingParents, childHash)\n\t\t\t}\n\t\t})\n\t}\n\n\t// Enforce invariant (2)\n\tfor lvs.pendingPutSize > lvs.pendingPutMax {\n\t\tvar tallest hash.Hash\n\t\tvar height uint64 = 0\n\t\tfor parent, ht := range lvs.pendingParents {\n\t\t\tif ht > height {\n\t\t\t\ttallest = parent\n\t\t\t\theight = ht\n\t\t\t}\n\t\t}\n\t\tif height == 0 { // This can happen if there are no pending parents\n\t\t\tvar pc pendingChunk\n\t\t\tfor tallest, pc = range lvs.pendingPuts {\n\t\t\t\t// Any pendingPut is as good as another in this case, so take the first one\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tlvs.bs.SchedulePut(pc.c, pc.height, pc.hints)\n\t\t\tlvs.pendingPutSize -= uint64(len(pc.c.Data()))\n\t\t\tdelete(lvs.pendingPuts, tallest)\n\t\t\tcontinue\n\t\t}\n\n\t\tlvs.pendingPutSize -= uint64(putChildren(tallest))\n\t\tdelete(lvs.pendingParents, tallest)\n\t}\n}",
"func (m *metricRedisClientsMaxInputBuffer) updateCapacity() {\n\tif m.data.Gauge().DataPoints().Len() > m.capacity {\n\t\tm.capacity = m.data.Gauge().DataPoints().Len()\n\t}\n}",
"func (w *Writer) SetBuffer(raw []byte) {\n\tif w.err != nil {\n\t\treturn\n\t}\n\tw.b = w.b[:0]\n\tw.b = append(w.b, raw...)\n}",
"func (b *Buffer) updateMeta() {\n\t// First 8 bytes have the first frame offset,\n\t// next 8 bytes have the last frame offset,\n\t// next 8 bytes are the next sequence number,\n\t// next 4 bytes are the biggest data record we've seen,\n\t// next 8 bytes are the total data in the buffer.\n\toff := int(b.capacity)\n\tbinary.PutLittleEndianUint64(b.data, off, b.first)\n\tbinary.PutLittleEndianUint64(b.data, off+8, b.last)\n\tbinary.PutLittleEndianUint64(b.data, off+16, b.nextSeq)\n\tbinary.PutLittleEndianUint32(b.data, off+24, b.biggest)\n\tbinary.PutLittleEndianUint64(b.data, off+28, b.length)\n}",
"func (g *GLTF) loadBuffer(bufIdx int) ([]byte, error) {\n\n\t// Check if provided buffer index is valid\n\tif bufIdx < 0 || bufIdx >= len(g.Buffers) {\n\t\treturn nil, fmt.Errorf(\"invalid buffer index\")\n\t}\n\tbufData := &g.Buffers[bufIdx]\n\t// Return cached if available\n\tif bufData.cache != nil {\n\t\tlog.Debug(\"Fetching Buffer %d (cached)\", bufIdx)\n\t\treturn bufData.cache, nil\n\t}\n\tlog.Debug(\"Loading Buffer %d\", bufIdx)\n\n\t// If buffer URI use the chunk data field\n\tif bufData.Uri == \"\" {\n\t\treturn g.data, nil\n\t}\n\n\t// Checks if buffer URI is a data URI\n\tvar data []byte\n\tvar err error\n\tif isDataURL(bufData.Uri) {\n\t\tdata, err = loadDataURL(bufData.Uri)\n\t} else {\n\t\t// Try to load buffer from file\n\t\tdata, err = g.loadFileBytes(bufData.Uri)\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Checks data length\n\tif len(data) != bufData.ByteLength {\n\t\treturn nil, fmt.Errorf(\"buffer:%d read data length:%d expected:%d\", bufIdx, len(data), bufData.ByteLength)\n\t}\n\t// Cache buffer data\n\tg.Buffers[bufIdx].cache = data\n\tlog.Debug(\"cache data:%v\", len(bufData.cache))\n\treturn data, nil\n}",
"func (g *Gaffer) AddBuffer(u *Update) {\n\n\tfor _, v := range u.entities {\n\t\tg.AddEntity(v)\n\t}\n\n\tfor _, v := range u.edges {\n\t\tg.AddEdge(v)\n\t}\n\n}",
"func (b *Binding) Set(buf uint32) {\n\tgl.BindBufferBase(gl.SHADER_STORAGE_BUFFER, b.uint32, buf)\n}",
"func (b *Buffer) Sync() {\n\tb.SetArea(b.Bounds())\n}",
"func WriteSlice(buffer []byte, offset int, value []byte, valueOffset int, valueSize int) {\n copy(buffer[offset:offset + len(value)], value[valueOffset:valueOffset + valueSize])\n}",
"func (ob *OrderBook) BatchUpdate() {\n\n}",
"func (d *datastoreValues) set(data map[string][]byte) {\n\td.mu.Lock()\n\tdefer d.mu.Unlock()\n\n\tif d.Data == nil {\n\t\td.Data = data\n\t\treturn\n\t}\n\n\tfor key, value := range data {\n\t\td.Data[key] = value\n\t}\n}",
"func (shp *SHPImpl) SetNewWithBuffer() (ret bool) {\n\tret = false\n\tfor i := 0; i < int(shp.tf.bufferSize); i++ {\n\t\tvertex := shp.tf.buffer[i]\n\t\tif shp.vertex2Target[vertex] != shp.vertex2Bucket[vertex] &&\n\t\t\trand.Float64() < shp.probability[shp.vertex2Bucket[vertex]][shp.vertex2Target[vertex]] {\n\t\t\tshp.vertex2Bucket[vertex] = shp.vertex2Target[vertex]\n\t\t\tret = true\n\t\t}\n\t}\n\treturn\n}",
"func (bp *bufferPool) putBuffer(b *buffer) {\n\tbp.lock.Lock()\n\tif bp.freeBufNum < 1000 {\n\t\tb.next = bp.freeList\n\t\tbp.freeList = b\n\t\tbp.freeBufNum++\n\t}\n\tbp.lock.Unlock()\n}",
"func (pool *FixedBytePool) Set(index int32, key []byte) error {\n\tif int(index) >= pool.maxElemNum {\n\t\treturn fmt.Errorf(\"index out of range %d %d\", index, pool.maxElemNum)\n\t}\n\n\tif len(key) != pool.elemSize {\n\t\treturn fmt.Errorf(\"length must be %d while %d\", pool.elemSize, len(key))\n\t}\n\tstart := int(index) * pool.elemSize\n\tcopy(pool.buf[start:], key)\n\n\treturn nil\n}",
"func UpdateBatch(conf *Configuration, result interface{}) (chan interface{}, error) {\n\telemt := internal.VerifyStructPointer(result)\n\n\tsession, err := mgo.Dial(connectString(conf))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t// query db\n\tc := session.DB(conf.Database).C(conf.Collection)\n\n\tch := make(chan interface{})\n\ts := reflect.MakeSlice(reflect.SliceOf(elemt), 0, 0)\n\n\tvar ticker *time.Ticker\n\tvar t <-chan time.Time\n\tif conf.UpdateStrategy.UseMinRecords && conf.UpdateStrategy.UseTimeInterval {\n\t\tticker, t = internal.NewTicker(conf.UpdateStrategy.MaxInterval)\n\t}\n\n\tgo func() {\n\t\tdefer session.Close()\n\t\tminr := int(conf.UpdateStrategy.MinRecords)\n\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase v := <-ch: // receiving stream\n\t\t\t\tif !conf.UpdateStrategy.UseMinRecords {\n\t\t\t\t\t//update as they arrive\n\t\t\t\t\tgo updateSingleRecord(conf, v, c)\n\t\t\t\t} else {\n\t\t\t\t\trv := reflect.ValueOf(v)\n\t\t\t\t\tif rv.Type() == elemt {\n\t\t\t\t\t\ts = reflect.Append(s, rv)\n\t\t\t\t\t}\n\n\t\t\t\t\tif s.Len() >= minr {\n\t\t\t\t\t\ts = updateAndClear(conf, s, c)\n\n\t\t\t\t\t\t// reset the timer\n\t\t\t\t\t\tif conf.UpdateStrategy.UseTimeInterval {\n\t\t\t\t\t\t\tif ticker != nil {\n\t\t\t\t\t\t\t\tticker.Stop()\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tticker, t = internal.NewTicker(conf.UpdateStrategy.MaxInterval)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\tcase <-t: // the max interval to go without updating records. nil channel if disabled.\n\t\t\t\tif s.Len() > 0 {\n\t\t\t\t\ts = updateAndClear(conf, s, c)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn ch, nil\n}",
"func (m *metricMysqlBufferPoolDataPages) updateCapacity() {\n\tif m.data.Sum().DataPoints().Len() > m.capacity {\n\t\tm.capacity = m.data.Sum().DataPoints().Len()\n\t}\n}",
"func (bw *BufferedWriterMongo) writeBuffer() (err error) {\n\n\tif len(bw.buffer) == 0 {\n\t\treturn nil\n\t}\n\n\tcoll := bw.client.Database(bw.db).Collection(bw.collection)\n\t_, err = coll.InsertMany(bw.ctx, bw.buffer)\n\treturn err\n}",
"func (r *Buffer) Put(item []byte) {\n\tatomic.StorePointer(&r.items[atomic.LoadUint32(&r.writeIndex)], unsafe.Pointer(&item))\n\tincrementIndex(&r.writeIndex, len(r.items)-1)\n}",
"func (s *PntSubset) Refresh() {\n\tL := len(s.index)\n\tif s.input == nil {\n\t\ts.input = make([]Point, L)\n\t}\n\tif s.output == nil {\n\t\ts.output = make([]Point, L)\n\t}\n\n\tfor i := 0; i < L; i++ {\n\t\ts.input[i] = *s.ds.Point(s.index[i])\n\t\tif s.index[i]+1 >= s.ds.NumPoints() {\n\t\t\tcontinue\n\t\t}\n\t\ts.output[i] = *s.ds.Point(s.index[i] + 1)\n\t}\n}",
"func (ms *MemStore) SetAll(data map[string]io.WriterTo) error {\n\tvar err error\n\tms.mu.Lock()\n\tfor k, d := range data {\n\t\tvar buf memio.Buffer\n\t\tif _, err = d.WriteTo(&buf); err != nil {\n\t\t\tbreak\n\t\t}\n\t\tms.data[k] = buf\n\t}\n\tms.mu.Unlock()\n\treturn err\n}",
"func (b *buffer) buffer() []byte {\n\treturn b.buf[b.offset:]\n}",
"func (self *ValueStore) Append(options WriteOptions, instanceId uint64, buffer []byte, fileIdStr *string) error {\n begin := util.NowTimeMs()\n\n self.mutex.Lock()\n defer self.mutex.Unlock()\n\n bufferLen := len(buffer)\n len := util.UINT64SIZE + bufferLen\n tmpBufLen := len + util.INT32SIZE\n\n var fileId int32\n var offset uint32\n err := self.getFileId(uint32(tmpBufLen), &fileId, &offset)\n if err != nil {\n return err\n }\n\n tmpBuf := make([]byte, tmpBufLen)\n util.EncodeInt32(tmpBuf, 0, int32(len))\n util.EncodeUint64(tmpBuf, util.INT32SIZE, instanceId)\n copy(tmpBuf[util.INT32SIZE+util.UINT64SIZE:], []byte(buffer))\n\n ret, err := self.file.Write(tmpBuf)\n if ret != tmpBufLen {\n err = fmt.Errorf(\"writelen %d not equal to %d,buffer size %d\",\n ret, tmpBufLen, bufferLen)\n return err\n }\n\n if options.Sync {\n self.file.Sync()\n }\n\n self.nowFileOffset += uint64(tmpBufLen)\n\n ckSum := util.Crc32(0, tmpBuf[util.INT32SIZE:], common.CRC32_SKIP)\n self.EncodeFileId(fileId, uint64(offset), ckSum, fileIdStr)\n\n useMs := util.NowTimeMs() - begin\n\n log.Info(\"ok, offset %d fileid %d cksum %d instanceid %d buffersize %d usetime %d ms sync %t\",\n offset, fileId, ckSum, instanceId, bufferLen, useMs, options.Sync)\n return nil\n}",
"func BufferStorage(target uint32, size int, data unsafe.Pointer, flags uint32) {\n\tsyscall.Syscall6(gpBufferStorage, 4, uintptr(target), uintptr(size), uintptr(data), uintptr(flags), 0, 0)\n}",
"func (m *metricMysqlBufferPoolLimit) updateCapacity() {\n\tif m.data.Sum().DataPoints().Len() > m.capacity {\n\t\tm.capacity = m.data.Sum().DataPoints().Len()\n\t}\n}",
"func (self Source) SetBuffer(buffer Buffer) {\n\tself.Seti(AlBuffer, int32(buffer))\n}",
"func (b *RecordBuffer) Flush() {\n\tb.recordsInBuffer = b.recordsInBuffer[:0]\n\tb.sequencesInBuffer = b.sequencesInBuffer[:0]\n}",
"func (ctl *Controller) updateBuffer(position int, colour Colour) {\n\tbufferOffset := headerSize + position*ledPacketSize\n\t// Write out the brightness\n\tbrightness := colour.L\n\tif ctl.brightness != 255 {\n\t\tbrightness = uint8(float32(ctl.brightness) * float32(brightness) / 255)\n\t}\n\tif ctl.gammaFunc != nil {\n\t\t// Apply gamma correction.\n\t\tcolour = ctl.gammaFunc(colour)\n\t}\n\tctl.buffer[bufferOffset] = brightness>>3 | brightnessHeader\n\tctl.buffer[bufferOffset+ctl.rOffset] = colour.R\n\tctl.buffer[bufferOffset+ctl.bOffset] = colour.B\n\tctl.buffer[bufferOffset+ctl.gOffset] = colour.G\n}",
"func TestReplicatedCmdBuf(t *testing.T) {\n\tdefer leaktest.AfterTest(t)()\n\tdefer log.Scope(t).Close(t)\n\tvar buf replicatedCmdBuf\n\t// numStates is chosen arbitrarily.\n\tconst numStates = 5*replicatedCmdBufNodeSize + 1\n\t// Test that the len field is properly updated.\n\tvar states []*replicatedCmd\n\tfor i := 0; i < numStates; i++ {\n\t\tassert.Equal(t, i, int(buf.len))\n\t\tstates = append(states, buf.allocate())\n\t\tassert.Equal(t, i+1, int(buf.len))\n\t}\n\t// Test the iterator.\n\tvar it replicatedCmdBufSlice\n\ti := 0\n\tfor it.init(&buf); it.Valid(); it.Next() {\n\t\tassert.Equal(t, states[i], it.cur())\n\t\ti++\n\t}\n\tassert.Equal(t, i, numStates) // make sure we saw them all\n\t// Test clear.\n\tbuf.clear()\n\tassert.EqualValues(t, buf, replicatedCmdBuf{})\n\tassert.Equal(t, 0, int(buf.len))\n\tit.init(&buf)\n\tassert.False(t, it.Valid())\n\t// Test clear on an empty buffer.\n\tbuf.clear()\n\tassert.EqualValues(t, buf, replicatedCmdBuf{})\n}",
"func (q *Queue) Update(id int, data []byte) error {\n\tif _, ok := q.data[id]; !ok {\n\t\treturn fmt.Errorf(\"could not find any job with id : %d\", id)\n\t}\n\tq.lock.Lock()\n\tq.data[id] = data\n\tq.lock.Unlock()\n\treturn nil\n}",
"func (r *Ring) set(p int, v interface{}) {\n\tr.buff[r.mod(p)] = v\n}",
"func (b *buffer) grow() {\n\t// ugh all these atomics\n\tatomic.AddUint32(&b.free, uint32(len(b.data)))\n\tatomic.AddUint32(&b.mask, atomic.LoadUint32(&b.mask))\n\tatomic.AddUint32(&b.mask, 1)\n\tatomic.AddUint32(&b.bits, 1)\n\n\tnext := make([]unsafe.Pointer, 2*len(b.data))\n\tcopy(next, b.data)\n\n\t// UGH need to do this with atomics. one pointer + 2 uint64 calls?\n\tb.data = next\n}",
"func (m *metricActiveDirectoryDsReplicationSyncObjectPending) updateCapacity() {\n\tif m.data.Sum().DataPoints().Len() > m.capacity {\n\t\tm.capacity = m.data.Sum().DataPoints().Len()\n\t}\n}",
"func CopyBufferSubData(readTarget uint32, writeTarget uint32, readOffset int, writeOffset int, size int) {\n\tC.glowCopyBufferSubData(gpCopyBufferSubData, (C.GLenum)(readTarget), (C.GLenum)(writeTarget), (C.GLintptr)(readOffset), (C.GLintptr)(writeOffset), (C.GLsizeiptr)(size))\n}",
"func CopyBufferSubData(readTarget uint32, writeTarget uint32, readOffset int, writeOffset int, size int) {\n\tC.glowCopyBufferSubData(gpCopyBufferSubData, (C.GLenum)(readTarget), (C.GLenum)(writeTarget), (C.GLintptr)(readOffset), (C.GLintptr)(writeOffset), (C.GLsizeiptr)(size))\n}",
"func (m *metricRedisClientsMaxOutputBuffer) updateCapacity() {\n\tif m.data.Gauge().DataPoints().Len() > m.capacity {\n\t\tm.capacity = m.data.Gauge().DataPoints().Len()\n\t}\n}",
"func (m *metricAerospikeNodeQueryTracked) updateCapacity() {\n\tif m.data.Sum().DataPoints().Len() > m.capacity {\n\t\tm.capacity = m.data.Sum().DataPoints().Len()\n\t}\n}",
"func (b *Buffer) Overwrite(c byte) {\n\tb.mux.Lock()\n\tdefer b.mux.Unlock()\n\n\tif b.isFull() {\n\t\tb.data[b.tail] = c\n\t\tb.advance(&b.tail)\n\t} else {\n\t\tb.data[b.head] = c\n\t\tb.advance(&b.head)\n\t\tb.dataSize++\n\t}\n}",
"func (b *Backend) Update() (c context.Context, err error) {\n\tvar m Mutation\n\tfor {\n\t\terr = b.cursor.Next(c, &m)\n\t\tif err == scroll.Done {\n\t\t\tbreak\n\t\t} else if err != nil {\n\t\t\treturn\n\t\t}\n\t\tm.Update(b)\n\t}\n\treturn\n}",
"func (client *Client) addToBuffer(key string, metricValue string) {\n\t// build metric\n\tmetric := fmt.Sprintf(\"%s:%s\", key, metricValue)\n\n\t// flush\n\tif client.keyBuffer == nil {\n\t\t// send metric now\n\t\tgo client.send(metric)\n\t} else {\n\t\t// add metric to buffer for next manual flush\n\t\tclient.keyBufferLock.Lock()\n\t\tclient.keyBuffer = append(client.keyBuffer, metric)\n\t\tclient.keyBufferLock.Unlock()\n\t}\n}",
"func (w *Windowed) Set(data []StatusAndTime) {\n\tif w.data != nil {\n\t\tw.data = w.data[:0]\n\t}\n\n\tw.data = append(w.data, data...)\n\n\tw.head = 0\n\tw.length = len(data)\n}",
"func (m *metricRedisRdbChangesSinceLastSave) updateCapacity() {\n\tif m.data.Sum().DataPoints().Len() > m.capacity {\n\t\tm.capacity = m.data.Sum().DataPoints().Len()\n\t}\n}",
"func (debugging *debuggingOpenGL) BufferData(target uint32, size int, data interface{}, usage uint32) {\n\tdebugging.recordEntry(\"BufferData\", target, size, data, usage)\n\tdebugging.gl.BufferData(target, size, data, usage)\n\tdebugging.recordExit(\"BufferData\")\n}",
"func (m *metricMysqlBufferPoolPages) updateCapacity() {\n\tif m.data.Sum().DataPoints().Len() > m.capacity {\n\t\tm.capacity = m.data.Sum().DataPoints().Len()\n\t}\n}",
"func (src *Source) SetBuffer(buf []byte) {\n\tsrc.buf = buf\n}",
"func (b *Ring) SetCapacity(capacity int) {\n\tb.lock.Lock()\n\tdefer b.lock.Unlock()\n\n\tif capacity < b.size {\n\t\tcapacity = b.size\n\t}\n\tif capacity == len(b.buf) { //nothing to be done\n\t\treturn\n\t}\n\n\tnbuf := make([]interface{}, capacity)\n\n\t// now that the new capacity is enough we just copy down the buffer\n\n\t//there are only two cases:\n\t// either the values are contiguous, then they goes from\n\t// tail to head\n\t// or there are splitted in two:\n\t// tail to buffer's end\n\t// 0 to head.\n\n\thead := b.head\n\ttail := Index(-1, head, b.size, len(b.buf))\n\n\t// we are not going to copy the buffer in the same state (absolute position of head and tail)\n\t// instead, we are going to select the simplest solution.\n\tif tail < head { //data is in one piece\n\t\tcopy(nbuf, b.buf[tail:head+1])\n\t} else { //two pieces\n\t\t//copy as much as possible to the end of the buf\n\t\tn := copy(nbuf, b.buf[tail:])\n\t\t//and then from the beginning\n\t\tcopy(nbuf[n:], b.buf[:head+1])\n\t}\n\tb.buf = nbuf\n\tb.head = b.size - 1\n\treturn\n}",
"func (m *metricAerospikeNamespaceGeojsonRegionQueryPoints) updateCapacity() {\n\tif m.data.Sum().DataPoints().Len() > m.capacity {\n\t\tm.capacity = m.data.Sum().DataPoints().Len()\n\t}\n}",
"func (self *Ring) bulkDataSendToReplicas(maxRingPos int) {\n\n\tmin := self.KeyValTable.Min()\n\tkey := self.getKey()\n\n\t//Tree is empty\n\tif min.Equal(self.KeyValTable.Limit()) {\n\t\treturn\n\t}\n\tfor min != self.KeyValTable.Limit() {\n\t\titem := min.Item().(data.DataStore)\n if item.Key > maxRingPos {\n return\n }\n\t\tself.writeToReplicas(&item, key)\n\t\tfmt.Println(min.Item().(data.DataStore))\n\t\tmin = min.Next()\n\t}\n\n}",
"func (obj *Object) UpdateObjectInArray(query map[string]interface{}, data map[string]interface{}) *Object {\n\t// just allow update element,\n\t// so clear all another updates\n\tobj.changedData = map[string]interface{}{}\n\tobj.changedData[\"query\"] = query\n\tobj.changedData[\"data\"] = data\n\tobj.addtionalURL = \"/array\"\n\treturn obj\n}",
"func GetBufferSubData(target uint32, offset int, size int, data unsafe.Pointer) {\n\tsyscall.Syscall6(gpGetBufferSubData, 4, uintptr(target), uintptr(offset), uintptr(size), uintptr(data), 0, 0)\n}",
"func (o MempoolBinSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), mempoolBinPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"mempool_bin\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, mempoolBinPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in mempoolBin slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all mempoolBin\")\n\t}\n\treturn rowsAff, nil\n}",
"func InvalidateBufferData(buffer uint32) {\n C.glowInvalidateBufferData(gpInvalidateBufferData, (C.GLuint)(buffer))\n}",
"func (m *metricAerospikeNamespaceGeojsonRegionQueryRequests) updateCapacity() {\n\tif m.data.Sum().DataPoints().Len() > m.capacity {\n\t\tm.capacity = m.data.Sum().DataPoints().Len()\n\t}\n}",
"func (a *Adapter) Set(key uint64, response []byte, expiration time.Time) {\n\ta.mutex.Lock()\n\tdefer a.mutex.Unlock()\n\n\tif _, ok := a.store[key]; ok {\n\t\t// Known key, overwrite previous item.\n\t\ta.store[key] = response\n\t\treturn\n\t}\n\n\t// New key, make sure we have the capacity.\n\tif len(a.store) == a.capacity {\n\t\ta.evict()\n\t}\n\n\ta.store[key] = response\n}",
"func freeBuffer(b []uint16) { pathPool.Put(&b) }",
"func (m *metricMysqlBufferPoolPageFlushes) updateCapacity() {\n\tif m.data.Sum().DataPoints().Len() > m.capacity {\n\t\tm.capacity = m.data.Sum().DataPoints().Len()\n\t}\n}",
"func (_ BufferPtrPool1M) Put(b *[]byte) {\n\tPutBytesSlicePtr1M(b)\n}",
"func InvalidateBufferData(buffer uint32) {\n\tsyscall.Syscall(gpInvalidateBufferData, 1, uintptr(buffer), 0, 0)\n}",
"func (s *SoBlockSummaryObjectWrap) update(sa *SoBlockSummaryObject) bool {\n\tif s.dba == nil || sa == nil {\n\t\treturn false\n\t}\n\tbuf, err := proto.Marshal(sa)\n\tif err != nil {\n\t\treturn false\n\t}\n\n\tkeyBuf, err := s.encodeMainKey()\n\tif err != nil {\n\t\treturn false\n\t}\n\n\treturn s.dba.Put(keyBuf, buf) == nil\n}",
"func (q *dStarLiteQueue) update(n *dStarLiteNode, k key) {\n\tn.key = k\n\theap.Fix(q, n.idx)\n}"
] | [
"0.61380696",
"0.55751455",
"0.5563734",
"0.5530041",
"0.5502157",
"0.5497179",
"0.5490029",
"0.54680365",
"0.5456551",
"0.5442295",
"0.5409087",
"0.5386492",
"0.53746295",
"0.53503454",
"0.5280482",
"0.52780133",
"0.5277513",
"0.52261436",
"0.52215093",
"0.5215384",
"0.520201",
"0.5198486",
"0.51822466",
"0.51726574",
"0.51691",
"0.51677835",
"0.5167326",
"0.5166624",
"0.5136405",
"0.5136405",
"0.51147866",
"0.5084164",
"0.50730103",
"0.5072506",
"0.50496536",
"0.5031781",
"0.5024501",
"0.50196296",
"0.50160843",
"0.5016084",
"0.5010254",
"0.50074524",
"0.49982575",
"0.4993605",
"0.49927554",
"0.498996",
"0.49720266",
"0.49696788",
"0.49608877",
"0.49582875",
"0.49429858",
"0.49421215",
"0.49415207",
"0.49253255",
"0.49139848",
"0.49095914",
"0.49091128",
"0.48903623",
"0.48882025",
"0.48861125",
"0.48808736",
"0.48806992",
"0.48802307",
"0.48715544",
"0.48666936",
"0.486359",
"0.48626631",
"0.48448563",
"0.48441416",
"0.48429173",
"0.48415968",
"0.48405096",
"0.48383865",
"0.48383865",
"0.48370838",
"0.48364225",
"0.483284",
"0.48327917",
"0.48283613",
"0.4828161",
"0.4821796",
"0.48181167",
"0.48179418",
"0.48165378",
"0.48098913",
"0.48074064",
"0.4806758",
"0.4800836",
"0.47997946",
"0.47874856",
"0.47872245",
"0.47870657",
"0.4786375",
"0.47829238",
"0.4782722",
"0.47825363",
"0.47795504",
"0.4767208",
"0.47657278"
] | 0.5120068 | 31 |
execute a display list | func CallList(list uint32) {
C.glowCallList(gpCallList, (C.GLuint)(list))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (e *Election) executeDisplay(msg imessage.IMessage) {\n\tif e.Display != nil {\n\t\te.Display.Execute(msg)\n\t}\n}",
"func (l *FileList) Display(\n\taccessKey *ui.Entry,\n\tsecretKey *ui.Entry,\n\tbucket *ui.Entry) (err error) {\n\n\tlist, err := comm.Refresh(\n\t\taccessKey.Text(), secretKey.Text(), bucket.Text())\n\n\tif err != nil {\n\t\treturn\n\t}\n\tlog.Println(\"Displaying the list.\")\n\n\tl.name.Clear()\n\tl.mType.Clear()\n\tl.size.Clear()\n\tl.checkbox.Clear()\n\tlog.Println(\"Boxes cleared.\")\n\n\tl.NameList = []string{}\n\tl.CheckboxList = []*ui.Checkbox{}\n\tlog.Println(\"Lists cleared.\")\n\n\t// Fix the number of the list.\n\t// TODO: clear this limit.\n\tif len(list) > 30 {\n\t\tlist = list[:30]\n\t}\n\n\tfor _, item := range list {\n\t\tl.name.Append(ui.NewLabel(item.Key), true)\n\t\tl.NameList = append(l.NameList, item.Key)\n\n\t\tl.mType.Append(ui.NewLabel(item.MimeType), true)\n\t\tl.size.Append(\n\t\t\tui.NewLabel(tool.FormatSize(item.Fsize)), true)\n\n\t\ttempCheckbox := ui.NewCheckbox(\"\")\n\t\tl.checkbox.Append(tempCheckbox, true)\n\t\tl.CheckboxList = append(l.CheckboxList, tempCheckbox)\n\t}\n\tlog.Println(\"Displayed the list.\")\n\n\treturn\n}",
"func runList(cmd *cobra.Command, args []string) error {\n\tverb := \"GET\"\n\turl := \"/v1/query\"\n\n\tresp, err := web.Request(cmd, verb, url, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcmd.Printf(\"\\n%s\\n\\n\", resp)\n\treturn nil\n}",
"func (r *Ri) Display(name, typeof, mode RtToken, parameterlist ...Rter) error {\n\n\tvar out = []Rter{name, typeof, mode, PARAMETERLIST}\n\tout = append(out, parameterlist...)\n\n\treturn r.writef(\"Display\", out...)\n}",
"func Display(possible ...Cmd) {\n\thint := randHint(possible)\n\tif hint != \"\" {\n\t\tui.Hint(hint, false)\n\t}\n}",
"func (l List) ShowList() {\n\tfmt.Println(\"ID\\tArrival\\tBurst\\tPriority\")\n\n\tfor i := 0; i < len(l); i++ {\n\t\tfmt.Printf(\"%d\\t%d\\t%d\\t%d\\n\",\n\t\t\tl[i].ID, l[i].Arrival, l[i].Burst, l[i].Priority)\n\t}\n\tfmt.Printf(\"Number of jobs: %d\\n\\n\", len(l))\n}",
"func (controller *List) Display() {\n\tcontroller.Data[\"searches\"] = controller.Account.GetSearches().GetAll()\n\tcontroller.SetCustomTitle(\"Account - Activity\")\n\tcontroller.LoadTemplate(\"home\")\n}",
"func runList(cmd *cobra.Command, args []string) {\n\tif conn == nil {\n\t\trunListWeb(cmd)\n\t\treturn\n\t}\n\n\trunListDB(cmd)\n}",
"func List() {\n\terr := ListCmd.Parse(os.Args[2:])\n\tif err != nil || internal.Help {\n\t\tListCmd.Usage()\n\t\tos.Exit(0)\n\t}\n\n\tconfigurator, err := config.NewConfigurator()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tlist, err := configurator.GetCollaborators()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tsort.Slice(list, func(i, j int) bool {\n\t\treturn config.Less(list[i], list[j])\n\t})\n\n\ttw := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0x0)\n\tfor _, collab := range list {\n\t\tline := fmt.Sprintf(\"\\t%s\\t<%s>\", collab.Name, collab.Email)\n\t\tif (collab.Alias != collab.Name) {\n\t\t\tline = fmt.Sprintf(\"%s:%s\", collab.Alias, line)\n\t\t}\n\t\tfmt.Fprintln(tw, line)\n\t}\n\ttw.Flush()\n}",
"func (*ListCmd) Name() string { return \"list\" }",
"func (l *List) Display() {\n\tlst := l.head\n\tfor lst != nil {\n\t\tif lst.next != nil {\n\t\t\tfmt.Printf(\"%+v -> \", lst.val)\n\t\t} else {\n\t\t\tfmt.Printf(\"%+v\", lst.val)\n\t\t}\n\t\tlst = lst.next\n\t}\n\tfmt.Println()\n}",
"func (c *showCommand) Run(ctx context.Context, _ *commoncli.Env, serverClient util.ServerClient) error {\n\tif err := c.validate(); err != nil {\n\t\treturn err\n\t}\n\n\tresp, err := c.fetchEntries(ctx, serverClient.NewEntryClient())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcommonutil.SortTypesEntries(resp.Entries)\n\treturn c.printer.PrintProto(resp)\n}",
"func (s *Service) Display(c context.Context, mid int64, plat int8, build int, buvid, channel, ip, ak, network, mobiApp,\n\tdevice, language, adExtra string, isTmp bool, now time.Time) (res []*show.Show) {\n\tres = s.showDisplay(c, mid, plat, build, buvid, channel, ip, ak, network, mobiApp, device, language, adExtra, isTmp, false, false, now)\n\treturn\n}",
"func (ll *linkedList) display() {\n\tfor tip := ll.head; tip != nil; tip = tip.Next {\n\t\tfmt.Printf(\"NODE: %+v %p \\n\", tip, tip)\n\t}\n\tfmt.Println()\n}",
"func (L *List) Display() {\n\tnode := L.Head\n\tfor node != nil {\n\t\tfmt.Printf(\"%+v -> \", node.Key)\n\t\tnode = node.next\n\t}\n\tfmt.Println(\"---\")\n}",
"func (h *History) List() {\n\tload := reverse(h.Load())\n\tprompt := promptui.Select{\n\t\tLabel: \"Target hisotry\",\n\t\tItems: load,\n\t\tSize: 10,\n\t}\n\n\ti, _, err := prompt.Run()\n\n\tif err != nil {\n\t\tlog.Fatalln(\"Prompt failed: \\n\", err)\n\t}\n\n\titem := load[i]\n\th.Write(item)\n\tExecuteItem(h.binary, item)\n}",
"func List(g *types.Cmd) {\n\tg.AddOptions(\"list\")\n}",
"func view() {\n\tvar selection string\n\tlisted, _ := exec.Command(\"ssh\", \"[email protected]\", \"ls\", \"-a\").Output()\n\tfmt.Println(\"\")\n\tfmt.Println(\"Current Files in directory:\")\n\tfmt.Println(string(listed))\n\tfmt.Println(\"Press 1 to create a file\")\n\tfmt.Println(\"Press 2 to delete a file\")\n\tfmt.Println(\"Press 3 to go back to the main menu\")\n\tfmt.Scan(&selection)\n\n\tif selection == \"1\" {\n\t\tcreate()\n\t}\n\tif selection == \"2\" {\n\t\tdelete()\n\t}\n\tif selection == \"3\" {\n\t\tmainmenu()\n\t}\n\tgoback()\n\n}",
"func (db database) list(w http.ResponseWriter, req *http.Request) {\n\n\tif err := itemList.Execute(w, db); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}",
"func scanCmdDisplay(result *client.StorageScanResp, summary bool) (string, error) {\n\tout := &bytes.Buffer{}\n\n\tgroups, err := groupScanResults(result, summary)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tif summary {\n\t\tif len(groups) == 0 {\n\t\t\treturn \"no hosts found\", nil\n\t\t}\n\t\treturn tabulateHostGroups(groups, \"Hosts\", \"SCM Total\", \"NVMe Total\")\n\t}\n\n\tformatHostGroups(out, groups)\n\n\treturn out.String(), nil\n}",
"func (c *DashboardLsCmd) Exec(ctx context.Context, args []string) error {\n\tdashboards, err := c.Conf.Client().Search(ctx, grafsdk.DashTypeSearchOption())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ttable := tablewriter.NewWriter(os.Stdout)\n\ttable.SetHeader([]string{\"UID\", \"Folder\", \"Title\", \"URL\"})\n\n\tfor _, dashboard := range dashboards {\n\t\ttable.Append([]string{dashboard.UID, dashboard.FolderTitle, dashboard.Title, fmt.Sprintf(\"%s/%s\", c.Conf.APIURL, dashboard.URL)})\n\t}\n\ttable.Render()\n\n\treturn nil\n}",
"func ShowList(manuals []*model.Manual) {\n\tnum := len(manuals)\n\tif num > 1 {\n\t\tfmt.Println(\"Found \" + strconv.Itoa(num) + \" manuals\")\n\t} else if num == 0 {\n\t\tfmt.Println(ansi.Red + \"No manuals found\" + ansi.Reset)\n\t\treturn\n\t}\n\n\t// show manuals\n\thead, rows := createList(manuals)\n\tfmt.Println(ansi.ColorCode(\"cyan\") + head + ansi.Reset)\n\tfmt.Println(strings.Join(rows, \"\\n\"))\n}",
"func (d *Inbrs) DisplayTable(w io.Writer) {\n\tvar data [][]string\n\tfor _, s := range d.list {\n\t\tdata = append(data, []string{s.hostname, s.intName, s.area,\n\t\t\ts.remoteID, s.fwAddress.String()})\n\t}\n\ttable := tablewriter.NewWriter(w)\n\ttable.SetHeader([]string{\"hostname\", \"interface\", \"area\", \"remote id\", \"FW address\"})\n\tfor _, v := range data {\n\t\ttable.Append(v)\n\t}\n\ttable.Render() // Send output\n}",
"func ShowInteractiveList(manuals []*model.Manual) {\n\tnum := len(manuals)\n\tif num > 1 {\n\t\tfmt.Println(\"Found \" + strconv.Itoa(num) + \" manuals\")\n\t} else if num == 0 {\n\t\tfmt.Println(ansi.Red + \"No manuals found\" + ansi.Reset)\n\t\treturn\n\t}\n\n\t_, rows := createList(manuals)\n\tprompt := &survey.Select{\n\t\tMessage: \"Select a manual to show\",\n\t\tOptions: rows,\n\t}\n\tvar row string\n\tif err := survey.AskOne(prompt, &row, nil); err != nil {\n\t\tText(err)\n\t\treturn\n\t}\n\n\tfor i, r := range rows {\n\t\tif row == r {\n\t\t\tShowManual(manuals[i], false)\n\t\t\tbreak\n\t\t}\n\t}\n}",
"func ShowUserList(ul *[]define.User) {\n\t//ul := &define.UserList\n\tt := tablewriter.NewWriter(os.Stdout)\n\tt.SetAutoFormatHeaders(false)\n\tt.SetAutoWrapText(false)\n\tt.SetReflowDuringAutoWrap(false)\n\tt.SetHeader([]string{\"ID\", \"Name\", \"Cell\", \"Address\", \"Born\", \"Passwd\"})\n\tfor _, user := range *ul {\n\t\tid := strconv.FormatUint(uint64(user.ID), 10)\n\t\tt.Append([]string{id, user.Name, user.Cell, user.Address,\n\t\t\tuser.Born.Format(\"2006.01.02\"), user.Passwd})\n\t}\n\tt.Render()\n}",
"func display(w http.ResponseWriter, tmpl string, data interface{}) {\n\ttemplates.ExecuteTemplate(w, tmpl, data)\n}",
"func (l *Listener) List(in string , list *[]Message) error {\n\n\tfmt.Println(\"Command list\")\n\n\tswitch semantic {\n\n\tcase 1:\n\t\t*list = *queue\n\n\tcase 2:\n\n\t\tvar newlist []Message\n\n\t\tfor i := 0; i < len(*queue); i++ {\n\n\t\t\tif (*queue)[i].Visible {\n\n\t\t\t\tnewlist = append(newlist, (*queue)[i])\n\n\t\t\t}\n\n\t\t}\n\n\t\t*list = newlist\n\n\t}\n\n\treturn nil\n}",
"func RunCmdActionList(c *CmdConfig) error {\n\tactions, err := c.Actions().List()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tactions, err = filterActionList(c, actions)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tsort.Sort(actionsByCompletedAt(actions))\n\n\titem := &displayers.Action{Actions: actions}\n\treturn c.Display(item)\n}",
"func (c *Console) List(group string) (err error) {\n\t_, err = fmt.Fprintf(c.conn, \"%v\\n\", toJSON([]string{\"list\", group}))\n\tif err == nil {\n\t\terr = <-c.Waiter\n\t}\n\treturn\n}",
"func (n *NetworkListCommand) runNetworkList(args []string) error {\n\tlogrus.Debugf(\"list the networks\")\n\n\tctx := context.Background()\n\tapiClient := n.cli.Client()\n\trespNetworkResource, err := apiClient.NetworkList(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdisplay := n.cli.NewTableDisplay()\n\tdisplay.AddRow([]string{\"NETWORK ID\", \"NAME\", \"DRIVER\", \"SCOPE\"})\n\tfor _, network := range respNetworkResource {\n\t\tdisplay.AddRow([]string{\n\t\t\tnetwork.ID[:10],\n\t\t\tnetwork.Name,\n\t\t\tnetwork.Driver,\n\t\t\tnetwork.Scope,\n\t\t})\n\t}\n\n\tdisplay.Flush()\n\treturn nil\n}",
"func listRun(cmd *cobra.Command, args []string) {\n\t\n\t// Items are read in using ReadItems; an example of stepwise refinement and procedural abstraction.\n\titems, err := todo.ReadItems(dataFile)\n\n\tvar data [][]string\n\n\t// Selection statement run to check if the To-Do list is empty\n\tif len(items) == 0 {\n\t\tlog.Println(\"No To-Do's in Your List - use the create command to get started!\")\n\t\treturn\n\t}\n\n\t// Selection statement run to check if there was an error from reading the data\n\tif err != nil {\n\t\tlog.Printf(\"%v\", err)\n\t} \n\n\t// Calls Sort method created in todo.go; an example of stepwise refinement\n\ttodo.Sort(items)\n\n\t// Iterative statement that appends all of the To-Dos in the list to a String array\n\t// Sequential statements are run within the FOR-EACH loop\n\tfor _, i := range items {\n\t\tvar temp []string\n\t\ttemp = append(temp, i.Label())\n\t\ttemp = append(temp, i.PrettyDone())\n\t\ttemp = append(temp, i.PrettyPrint())\n\t\ttemp = append(temp, i.Text)\n\t\tdata = append(data, temp)\n\t}\n\n\t\n\t/*\n\tSets the parameters for the To-Do list displayed as a table to the user. \n\tControls the appearence of the GUI.\n\t*/\n\ttable := tablewriter.NewWriter(os.Stdout)\n\ttable.SetHeader([]string {\"Position\", \"Done?\", \"Priority\", \"Task\"})\n\n\ttable.SetHeaderColor(tablewriter.Colors{tablewriter.Bold, tablewriter.BgHiBlueColor},\n\t\ttablewriter.Colors{tablewriter.FgWhiteColor, tablewriter.Bold, tablewriter.BgHiBlueColor},\n\t\ttablewriter.Colors{tablewriter.BgHiBlueColor, tablewriter.FgWhiteColor},\n\t\ttablewriter.Colors{tablewriter.BgHiBlueColor, tablewriter.FgWhiteColor})\n\n\ttable.SetColumnColor(tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiCyanColor},\n\t\ttablewriter.Colors{tablewriter.Bold, tablewriter.FgHiRedColor},\n\t\ttablewriter.Colors{tablewriter.Bold, tablewriter.FgHiMagentaColor},\n\t\ttablewriter.Colors{tablewriter.Bold, tablewriter.FgBlackColor})\n\n\tw := tabwriter.NewWriter(os.Stdout, 3, 0, 1, ' ', 0)\n\n\t// Iterative statement that appends all To-Do items marked done based on the condition of if either the --all or --done flag is active.\n\tfor p, i := range data {\n\t\tif allFlag || items[p].Done == doneFlag {\n\t\t\ttable.Append(i)\n\t\t}\n\t}\n\n\t// Renders the table\n\ttable.Render()\n\n\t// Flushes the writer\n\tw.Flush()\n\n}",
"func (hc *Hailconfig) List() error {\n\tcols, _ := consolesize.GetConsoleSize()\n\tmaxLenAlias := 25\n\tmaxLenCommand := 80\n\tmaxLenDescription := 25\n\tif cols > 10 {\n\t\tmaxLenAlias = cols/4 - 5\n\t\tmaxLenCommand = cols / 2\n\t\tmaxLenDescription = cols/4 - 5\n\t}\n\n\tt := table.NewWriter()\n\tt.SetOutputMirror(os.Stdout)\n\tt.AppendHeader(table.Row{\"Alias\", \"Command\", \"Description\"})\n\tt.SetColumnConfigs([]table.ColumnConfig{\n\t\t{\n\t\t\tName: \"Alias\",\n\t\t\tWidthMin: 5,\n\t\t\tWidthMax: maxLenAlias,\n\t\t},\n\t\t{\n\t\t\tName: \"Command\",\n\t\t\tWidthMin: 10,\n\t\t\tWidthMax: maxLenCommand,\n\t\t}, {\n\t\t\tName: \"Description\",\n\t\t\tWidthMin: 5,\n\t\t\tWidthMax: maxLenDescription,\n\t\t},\n\t})\n\t//t.SetAllowedRowLength(90)\n\tfor alias, script := range hc.Scripts {\n\t\tt.AppendRow([]interface{}{alias, script.Command, script.Description})\n\t\tt.AppendSeparator()\n\t}\n\tt.Render()\n\treturn nil\n}",
"func formatCmdDisplay(results client.StorageFormatResults, summary bool) (string, error) {\n\tout := &bytes.Buffer{}\n\n\tgroups, mixedGroups, err := groupFormatResults(results, summary)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tif len(groups) > 0 {\n\t\tfmt.Fprintf(out, \"\\n%s\\n\", groups)\n\t}\n\n\treturn formatHostGroups(out, mixedGroups), nil\n}",
"func (cfg *Config) Display() {\n fmt.Println(os.Args)\n fmt.Println(\"-------------------------------------\")\n}",
"func (o *ListOptions) Run(ctx context.Context) (err error) {\n\to.printDevfileList(o.devfileList.Items)\n\treturn nil\n}",
"func (c *Dg) ShowList() ([]string, error) {\n c.con.LogQuery(\"(show) list of device groups\")\n path := c.xpath(nil)\n return c.con.EntryListUsing(c.con.Show, path[:len(path) - 1])\n}",
"func (r *ListCommand) Execute(args []string) (err error) {\n\tif err := r.RootCommand.Execute(args); err != nil {\n\t\tcerberus.Logger.Fatalln(err)\n\t}\n\n\tsvcs, err := cerberus.LoadServicesCfg()\n\tif err != nil {\n\t\tcerberus.DebugLogger.Fatalln(err)\n\t}\n\n\tfmt.Printf(\"\\nCerberus installed services:\\n\")\n\tfmt.Println(strings.Repeat(\"-\", 80))\n\n\tp := keyValuePrinter{indentSize: 5}\n\tfor _, s := range svcs {\n\t\tif r.Query != \"\" {\n\t\t\tif !strings.Contains(strings.ToLower(s.Name), strings.ToLower(r.Query)) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\n\t\tp.println(\"Name\", s.Name)\n\t\tp.println(\"Display Name\", s.DisplayName)\n\t\tp.println(\"Description\", s.Desc)\n\t\tp.println(\"Executable Path\", s.ExePath)\n\t\tp.println(\"Working Directory\", s.WorkDir)\n\t\tif len(s.Args) > 0 {\n\t\t\tp.println(\"Arguments\", strings.Join(s.Args, \" \"))\n\t\t}\n\t\tif len(s.Env) > 0 {\n\t\t\tp.println(\"Environment Variables\", strings.Join(s.Env, \" \"))\n\t\t}\n\t\tp.println(\"Start Type\", startTypeMapping[s.StartType])\n\t\tif s.StopSignal != cerberus.NoSignal {\n\t\t\tp.println(\"Stop Signal\", s.StopSignal)\n\t\t}\n\t\tp.println(\"Service User\", s.ServiceUser)\n\t\tif len(s.Dependencies) > 0 {\n\t\t\tp.println(\"Dependencies\", strings.Join(s.Dependencies, \" | \"))\n\t\t}\n\t\tvar actlng = len(s.RecoveryActions)\n\t\tif actlng > 0 {\n\t\t\tp.println(\"Recovery Actions\", \"\")\n\t\t\tp.indent()\n\t\t\tfor _, action := range s.RecoveryActions {\n\t\t\t\tp.println(\"Error Code\", action.ExitCode)\n\t\t\t\tp.println(\"Action\", mapAction(action.Action))\n\t\t\t\tif action.Action&cerberus.RestartAction == cerberus.RestartAction {\n\t\t\t\t\tp.println(\"Delay\", action.Delay)\n\t\t\t\t\tp.println(\"Max Restarts\", action.MaxRestarts)\n\t\t\t\t\tp.println(\"Reset After\", action.ResetAfter)\n\t\t\t\t}\n\t\t\t\tif action.Action&cerberus.RunProgramAction == cerberus.RunProgramAction {\n\t\t\t\t\tp.println(\"Program\", action.Program)\n\t\t\t\t\tp.println(\"Arguments\", fmt.Sprintf(\"[%v]\", concatArgs(action.Arguments)))\n\t\t\t\t}\n\t\t\t\tif actlng > 1 {\n\t\t\t\t\tp.println(\"-\", nil)\n\t\t\t\t}\n\t\t\t\tactlng--\n\t\t\t}\n\t\t}\n\n\t\tp.writeTo(os.Stdout)\n\t\tfmt.Fprintf(os.Stdout, \"%v\\n\", strings.Repeat(\"-\", 80))\n\t}\n\n\treturn nil\n}",
"func (c *Firewall) ShowList() ([]string, error) {\n\tans := c.container()\n\treturn c.ns.Listing(util.Show, c.pather(), ans)\n}",
"func runShow(args []string) int {\n\tcfg, err := loadConfig(showOpt.configFile)\n\tif err != nil {\n\t\tfmt.Fprintln(o.err, err)\n\t\treturn 1\n\t}\n\tdb := dbClientFor(cfg)\n\tdb.Connect()\n\tdefer db.Disconnect()\n\n\topt := dbmodel.RequireNone\n\tif showOpt.showAll {\n\t\topt = dbmodel.RequireAll\n\t}\n\tif len(args) == 0 {\n\t\tfmt.Fprintln(o.err, \"require table name as argument.\")\n\t\treturn 1\n\t}\n\ttbl, err := db.Table(cfg.Schema, args[0], opt)\n\tif err != nil {\n\t\tfmt.Fprintln(o.err, err)\n\t\treturn 1\n\t}\n\n\tconv := findConverter(showOpt.prettyPrint, cfg.Driver)\n\tprintTable(tbl, conv)\n\treturn 0\n}",
"func (l *LinkedList) Display() {\n\tif l.count == 0 {\n\t\tfmt.Println(\"No items in the stock\")\n\t} else {\n\t\tfmt.Println(\"..........The stock report is......\")\n\t\tcurrentPost := l.head\n\t\tfor currentPost != nil {\n\t\t\tfmt.Println(\"\\n-----------------------------------------\\n\")\n\t\t\tfmt.Printf(\"\\nName : %s\", currentPost.Name)\n\t\t\tfmt.Printf(\"\\nShare_Price : %d\", currentPost.Share_price)\n\t\t\tfmt.Printf(\"\\nShares : %d\", currentPost.Number_of_shares)\n\t\t\tfmt.Println(\"\\n----------------------------------------\\n\")\n\t\t\tcurrentPost = currentPost.next\n\t\t}\n\t}\n}",
"func Display(lst *Node) {\n\tfor lst != nil {\n\t\tif lst.next != nil {\n\t\t\tfmt.Printf(\"%v -> \", lst.val)\n\t\t} else {\n\t\t\tfmt.Printf(\"%v\", lst.val)\n\t\t}\n\t\tlst = lst.next\n\t}\n\tfmt.Println()\n}",
"func ListCommand(c *cli.Context, log logging.Logger, _ string) int {\n\tif len(c.Args()) != 0 {\n\t\tcli.ShowCommandHelp(c, \"list\")\n\t\treturn 1\n\t}\n\n\tshowAll := c.Bool(\"all\")\n\n\tk, err := klient.CreateKlientWithDefaultOpts()\n\tif err != nil {\n\t\tlog.Error(\"Error creating klient client. err:%s\", err)\n\t\tfmt.Println(defaultHealthChecker.CheckAllFailureOrMessagef(GenericInternalError))\n\t\treturn 1\n\t}\n\n\tif err := k.Dial(); err != nil {\n\t\tlog.Error(\"Error dialing klient client. err:%s\", err)\n\t\tfmt.Println(defaultHealthChecker.CheckAllFailureOrMessagef(GenericInternalError))\n\t\treturn 1\n\t}\n\n\tinfos, err := getListOfMachines(k)\n\tif err != nil {\n\t\tlog.Error(\"Error listing machines. err:%s\", err)\n\t\tfmt.Println(getListErrRes(err, defaultHealthChecker))\n\t\treturn 1\n\t}\n\n\t// Sort our infos\n\tsort.Sort(infos)\n\n\t// Filter out infos for listing and json.\n\tfor i := 0; i < len(infos); i++ {\n\t\tinfo := &infos[i]\n\n\t\tonlineRecently := time.Since(info.OnlineAt) <= 24*time.Hour\n\t\thasMounts := len(info.Mounts) > 0\n\t\t// Do not show machines that have been offline for more than 24h,\n\t\t// but only if the machine doesn't have any mounts and we aren't using the --all\n\t\t// flag.\n\t\tif !hasMounts && !showAll && !onlineRecently {\n\t\t\t// Remove this element from the slice, because we're not showing it as\n\t\t\t// described above.\n\t\t\tinfos = append(infos[:i], infos[i+1:]...)\n\t\t\t// Decrement the index, since we're removing the item from the slice.\n\t\t\ti--\n\t\t\tcontinue\n\t\t}\n\n\t\t// For a more clear UX, replace the team name of the default Koding team,\n\t\t// with Koding.com\n\t\tfor i, team := range info.Teams {\n\t\t\tif team == \"Koding\" {\n\t\t\t\tinfo.Teams[i] = \"koding.com\"\n\t\t\t}\n\t\t}\n\n\t\tswitch info.MachineStatus {\n\t\tcase machine.MachineOffline:\n\t\t\tinfo.MachineStatusName = \"offline\"\n\t\tcase machine.MachineOnline:\n\t\t\tinfo.MachineStatusName = \"online\"\n\t\tcase machine.MachineDisconnected:\n\t\t\tinfo.MachineStatusName = \"disconnected\"\n\t\tcase machine.MachineConnected:\n\t\t\tinfo.MachineStatusName = \"connected\"\n\t\tcase machine.MachineError:\n\t\t\tinfo.MachineStatusName = \"error\"\n\t\tcase machine.MachineRemounting:\n\t\t\tinfo.MachineStatusName = \"remounting\"\n\t\tdefault:\n\t\t\tinfo.MachineStatusName = \"unknown\"\n\t\t}\n\t}\n\n\tif c.Bool(\"json\") {\n\t\tjsonBytes, err := json.MarshalIndent(infos, \"\", \" \")\n\t\tif err != nil {\n\t\t\tlog.Error(\"Marshalling infos to json failed. err:%s\", err)\n\t\t\tfmt.Println(GenericInternalError)\n\t\t\treturn 1\n\t\t}\n\n\t\tfmt.Println(string(jsonBytes))\n\t\treturn 0\n\t}\n\n\tw := tabwriter.NewWriter(os.Stdout, 2, 0, 2, ' ', 0)\n\tfmt.Fprintf(w, \"\\tTEAM\\tLABEL\\tIP\\tALIAS\\tSTATUS\\tMOUNTED PATHS\\n\")\n\tfor i, info := range infos {\n\t\t// Join multiple teams into a single identifier\n\t\tteam := strings.Join(info.Teams, \",\")\n\n\t\tvar formattedMount string\n\t\tif len(info.Mounts) > 0 {\n\t\t\tformattedMount += fmt.Sprintf(\n\t\t\t\t\"%s -> %s\",\n\t\t\t\tshortenPath(info.Mounts[0].LocalPath),\n\t\t\t\tshortenPath(info.Mounts[0].RemotePath),\n\t\t\t)\n\t\t}\n\n\t\t// Currently we are displaying the status message over the formattedMount,\n\t\t// if it exists.\n\t\tif info.StatusMessage != \"\" {\n\t\t\tformattedMount = info.StatusMessage\n\t\t}\n\n\t\tfmt.Fprintf(w, \" %d.\\t%s\\t%s\\t%s\\t%s\\t%s\\t%s\\n\",\n\t\t\ti+1, team, info.MachineLabel, info.IP, info.VMName, info.MachineStatusName,\n\t\t\tformattedMount,\n\t\t)\n\t}\n\tw.Flush()\n\n\treturn 0\n}",
"func (command QuerybynameCommand) Execute() {\n\tquery := command.checkParam()\n\tleaves := query.Query()\n\n\tfmt.Printf(\"%s\\t\\t\\t\\t%s\\t%s\\n\", \"leaveID\", \"time-from\", \"time-end\")\n\tfor _, leave := range leaves {\n\t\tfmt.Printf(\"%s\\t%s\\t%s\\n\", leave.GetID(), leave.GetTimeFrom(), leave.GetTimeEnd())\n\t}\n}",
"func Display() chan<- Result {\n\t// Create a channel to receive the results on.\n\tresult := make(chan Result)\n\n\tgo func() {\n\t\t// Wait for results from the different feeds and\n\t\t// display them.\n\t\tfor found := range result {\n\t\t\tlog.Printf(\"%s:\\n%s\\n\\n\", found.Field, found.Content)\n\t\t}\n\t}()\n\n\treturn result\n}",
"func (cmd *ListKeys) Execute() error {\n\tvar walkKeyMap func(keymap *KeyMap, path []rune, f func(path []rune, b *KeyBinding))\n\twalkKeyMap = func(keymap *KeyMap, path []rune, f func(path []rune, b *KeyBinding)) {\n\t\tfor _, keyBinding := range keymap.Bindings() {\n\t\t\tchildPath := make([]rune, len(path))\n\t\t\tcopy(childPath, path)\n\t\t\tchildPath = append(childPath, keyBinding.Key())\n\t\t\tif keyBinding.HasChildren() {\n\t\t\t\twalkKeyMap(keyBinding.Children(), childPath, f)\n\t\t\t} else {\n\t\t\t\tf(childPath, keyBinding)\n\t\t\t}\n\t\t}\n\t}\n\n\tprintBinding := func(path []rune, b *KeyBinding) {\n\t\tpathString := []string{}\n\t\tfor _, r := range path {\n\t\t\tpathString = append(pathString, fmt.Sprintf(\"%c\", r))\n\t\t}\n\t\tfmt.Fprintf(cmd.Terminal, \"%s: %s\\n\", strings.Join(pathString, \" \"), b.Description())\n\t}\n\n\twalkKeyMap(cmd.CurrentKeyMap, []rune{}, printBinding)\n\treturn nil\n}",
"func (info *Info) Run(c Cursor) {\n\tp := c.P()\n\tif !info.HeadLess {\n\t\tp.Printf(\"info %v {\", info.Domain)\n\t\tp.ShiftIn()\n\t\tdefer p.ShiftOut(\"}\")\n\t}\n\n\tips := info.run(c)\n\tif c.E() != nil {\n\t\treturn\n\t}\n\n\tif !info.HideResult {\n\t\tips.PrintResult(c)\n\n\t\tif len(info.NameServers) > 0 {\n\t\t\tp.Print()\n\t\t\tfor _, ns := range info.NameServers {\n\t\t\t\tp.Printf(\"// %v\", ns)\n\t\t\t}\n\t\t}\n\n\t\tif len(info.Records) > 0 {\n\t\t\tp.Print()\n\t\t\tfor _, rr := range info.Records {\n\t\t\t\tp.Printf(\"// %s\", rr.Digest())\n\t\t\t}\n\t\t}\n\t}\n}",
"func OnList(c *grumble.Context) error {\n\tlen := len(config.AppConfig.Plans)\n\tif len == 0 {\n\t\tfmt.Println(\"No plans available. Try \\\"read\\\".\")\n\t\treturn nil\n\t}\n\n\tfor i, plan := range config.AppConfig.Plans {\n\t\tfmt.Println(i+1, plan.Name)\n\t\tfor i, task := range plan.Tasks {\n\t\t\tif task.GetDescription() != \"\" {\n\t\t\t\tfmt.Println(\" \", strconv.Itoa(i+1)+\".\", task.GetDescription())\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}",
"func (c *list) execute(sess *session) *response {\n\n\t// Is the user authenticated?\n\tif sess.st != authenticated {\n\t\treturn mustAuthenticate(sess, c.tag, \"LIST\")\n\t}\n\n\t// Is the mailbox pattern empty? This indicates that we should return\n\t// the delimiter and the root name of the reference\n\tif c.mboxPattern == \"\" {\n\t\tres := ok(c.tag, \"LIST completed\")\n\t\tres.extra(fmt.Sprintf(`LIST () \"%s\" %s`, pathDelimiter, c.reference))\n\t\treturn res\n\t}\n\n\t// Convert the reference and mbox pattern into slices\n\tref := pathToSlice(c.reference)\n\tmbox := pathToSlice(c.mboxPattern)\n\n\t// Get the list of mailboxes\n\tmboxes, err := sess.list(ref, mbox)\n\n\tif err != nil {\n\t\treturn internalError(sess, c.tag, \"LIST\", err)\n\t}\n\n\t// Check for an empty response\n\tif len(mboxes) == 0 {\n\t\treturn no(c.tag, \"LIST no results\")\n\t}\n\n\t// Respond with the mailboxes\n\tres := ok(c.tag, \"LIST completed\")\n\tfor _, mbox := range mboxes {\n\t\tres.extra(fmt.Sprintf(`LIST (%s) \"%s\" /%s`,\n\t\t\tjoinMailboxFlags(mbox),\n\t\t\tstring(pathDelimiter),\n\t\t\tstrings.Join(mbox.Path, string(pathDelimiter))))\n\t}\n\n\treturn res\n}",
"func executeListCmd(t *gotesting.T, stdout io.Writer, args []string, wrapper *stubRunWrapper) subcommands.ExitStatus {\n\ttd := testutil.TempDir(t)\n\tdefer os.RemoveAll(td)\n\n\tcmd := newListCmd(stdout, td)\n\tcmd.wrapper = wrapper\n\tflags := flag.NewFlagSet(\"\", flag.ContinueOnError)\n\tcmd.SetFlags(flags)\n\tif err := flags.Parse(args); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tflags.Set(\"build\", \"false\") // DeriveDefaults fails if -build=true and bundle dirs are missing\n\treturn cmd.Execute(context.Background(), flags)\n}",
"func CmdClList(s ircx.Sender, m *irc.Message) {\n\tr := report.Reports[\"classes\"].(*report.Context)\n\n\tfor _, v := range r.Classes {\n\t\ts.Send(&irc.Message{\n\t\t\tCommand: irc.PRIVMSG,\n\t\t\tParams: Params(m),\n\t\t\tTrailing: v,\n\t\t})\n\t}\n\n\ttime.Sleep(600 * time.Millisecond)\n}",
"func CommandList() error {\n\tcommon.LogInfo2Quiet(\"My Apps\")\n\tapps, err := common.DokkuApps()\n\tif err != nil {\n\t\tcommon.LogWarn(err.Error())\n\t\treturn nil\n\t}\n\n\tfor _, appName := range apps {\n\t\tcommon.Log(appName)\n\t}\n\n\treturn nil\n}",
"func (ts *TaskSet) DisplayByNext() {\n\tif ts.numTasksLoaded == 0 {\n\t\tfmt.Println(\"\\033[31mNo tasks found. Showing help.\\033[0m\")\n\t\tHelp(\"\")\n\t} else if len(ts.tasks) == 0 {\n\t\tExitFail(\"No matching tasks in given context or filter.\")\n\t} else if len(ts.tasks) == 1 {\n\t\tts.tasks[0].Display()\n\t\treturn\n\t} else {\n\t\tvar tasks []*Task\n\t\tw, h := MustGetTermSize()\n\n\t\th -= 8 // leave room for context message, header and prompt\n\n\t\tif h > len(ts.tasks) || h < 0 {\n\t\t\ttasks = ts.tasks\n\t\t} else {\n\t\t\ttasks = ts.tasks[:h]\n\t\t}\n\n\t\ttable := NewTable(\n\t\t\tw,\n\t\t\t\"ID\",\n\t\t\t\"Priority\",\n\t\t\t\"Tags\",\n\t\t\t\"Project\",\n\t\t\t\"Summary\",\n\t\t)\n\n\t\tfor _, t := range tasks {\n\t\t\tstyle := t.Style()\n\t\t\ttable.AddRow(\n\t\t\t\t[]string{\n\t\t\t\t\t// id should be at least 2 chars wide to match column header\n\t\t\t\t\t// (headers can be truncated)\n\t\t\t\t\tfmt.Sprintf(\"%-2d\", t.ID),\n\t\t\t\t\tt.Priority,\n\t\t\t\t\tstrings.Join(t.Tags, \" \"),\n\t\t\t\t\tt.Project,\n\t\t\t\t\tt.Summary,\n\t\t\t\t},\n\t\t\t\tstyle,\n\t\t\t)\n\t\t}\n\n\t\ttable.Render()\n\n\t\tif h >= len(ts.tasks) {\n\t\t\tfmt.Printf(\"\\n%v tasks.\\n\", len(ts.tasks))\n\t\t} else {\n\t\t\tfmt.Printf(\"\\n%v tasks, truncated to %v lines.\\n\", len(ts.tasks), h)\n\t\t}\n\t}\n}",
"func Display(n notifier) {\n\tn.display()\n}",
"func (z *zfsctl) List(ctx context.Context, name, options, max string, oProperties []string, sProperty, SProperty, t string) *execute {\n\targs := []string{\"list\"}\n\tif len(options) > 0 {\n\t\targs = append(args, options)\n\t}\n\tif len(max) > 0 {\n\t\targs = append(args, max)\n\t}\n\tif oProperties != nil {\n\t\to := \"-o \"\n\t\tfor _, p := range oProperties {\n\t\t\to += p + \",\"\n\t\t}\n\t\targs = append(args, strings.TrimSuffix(o, \",\"))\n\t}\n\tif len(sProperty) > 0 {\n\t\targs = append(args, sProperty)\n\t}\n\tif len(SProperty) > 0 {\n\t\targs = append(args, SProperty)\n\t}\n\tif len(t) > 0 {\n\t\targs = append(args, \"-t \"+t)\n\t}\n\tif len(name) > 0 {\n\t\targs = append(args, name)\n\t}\n\treturn &execute{ctx: ctx, name: z.cmd, args: args}\n}",
"func (r renderer) List(out *bytes.Buffer, text func() bool, flags int) {\n\t// TODO: This is not desired (we'd rather not write lists as part of summary),\n\t// but see this issue: https://github.com/russross/blackfriday/issues/189\n\tmarker := out.Len()\n\tif !text() {\n\t\tout.Truncate(marker)\n\t}\n\tout.Write([]byte{' '})\n}",
"func (c *DomainsListCmd) Run() (err error) {\n\ts := NewSpinner(\"Looking up domains\")\n\ts.Start()\n\n\tdomains, err := api.Domains(c.AccountID)\n\ts.Stop()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ttable := NewTable(os.Stdout)\n\ttable.SetHeader([]string{\"Domain\", \"Engaged\"})\n\n\tfor _, d := range domains {\n\t\tr := []string{d.DomainName, fmt.Sprintf(\"%t\", d.Engaged)}\n\t\ttable.Append(r)\n\t}\n\ttable.Render()\n\n\treturn err\n}",
"func (ll *Doubly[T]) Display() {\n\tfor cur := ll.Head.Next; cur != ll.Head; cur = cur.Next {\n\t\tfmt.Print(cur.Val, \" \")\n\t}\n\n\tfmt.Print(\"\\n\")\n}",
"func runListDB(cmd *cobra.Command) {\n\tcmd.Println(\"Getting Set List\")\n\n\tnames, err := query.GetNames(\"\", conn)\n\tif err != nil {\n\t\tcmd.Println(\"Getting Set List : \", err)\n\t\treturn\n\t}\n\n\tcmd.Println(\"\")\n\n\tfor _, name := range names {\n\t\tcmd.Println(name)\n\t}\n\n\tcmd.Println(\"\")\n}",
"func (z *zpoolctl) List(ctx context.Context, name, options string, properties []string, t string) *execute {\n\targs := []string{\"list\"}\n\tif len(options) > 0 {\n\t\targs = append(args, options)\n\t}\n\tif properties != nil {\n\t\tkv := \"-o \"\n\t\tfor _, v := range properties {\n\t\t\tkv += v + \",\"\n\t\t}\n\t\tkv = strings.TrimSuffix(kv, \",\")\n\t\targs = append(args, kv)\n\t}\n\tif len(t) > 0 {\n\t\targs = append(args, \"-T \"+t)\n\t}\n\targs = append(args, name)\n\treturn &execute{ctx: ctx, name: z.cmd, args: args}\n}",
"func display(w http.ResponseWriter, tmpl string, data interface{}) {\n\terr := templates.ExecuteTemplate(w, tmpl, data)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t}\n}",
"func ShowList(selfMember *MemberID, memberList *[]MemberID) {\n\tfmt.Println(\"You are:\")\n\tfmt.Println(*selfMember)\n\n\tfmt.Println(\"And this is your membership list:\")\n\tfor _, Member := range *memberList {\n\t\tfmt.Println(Member)\n\t}\n\tfmt.Println()\n}",
"func (h *HandlersApp01sqVendor) ListShow(w http.ResponseWriter, offset int, msg string) {\n\tvar err error\n\tvar rcds []App01sqVendor.App01sqVendor\n\tvar name = \"App01sq.Vendor.list.gohtml\"\n\tvar str strings.Builder\n\n\tlog.Printf(\"hndlrVendor.ListShow(%d)\\n\", offset)\n\tlog.Printf(\"\\tname: %s\\n\", name)\n\tw2 := io.MultiWriter(w, &str)\n\n\t// Get the records to display\n\trcds, err = h.db.RowPage(offset, h.rowsPerPage)\n\tif err != nil {\n\n\t\tlog.Printf(\"...end hndlrVendor.ListShow(Error:400) - No Key\\n\")\n\n\t\thttp.Error(w, http.StatusText(400), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tdata := struct {\n\t\tRcds []App01sqVendor.App01sqVendor\n\t\tOffset int\n\t\tMsg string\n\t}{rcds, offset, msg}\n\n\tlog.Printf(\"\\tData: %+v\\n\", data)\n\n\tlog.Printf(\"\\tExecuting template: %s\\n\", name)\n\terr = h.Tmpls.Tmpls.ExecuteTemplate(w2, name, data)\n\tif err != nil {\n\t\tfmt.Fprintf(w, err.Error())\n\t}\n\n\tlog.Printf(\"\\t output: %s\\n\", str.String())\n\tlog.Printf(\"...end hndlrVendor.ListShow(%s)\\n\", util.ErrorString(err))\n}",
"func Display(c *client.Client, evts chan *events.Event) {\n\t// Register all encodings\n\tencoding.Register()\n\n\t// Channels\n\tclear := make(chan bool)\n\ttermEvts := make(chan tcell.Event)\n\n\t// Create a screen\n\ts, err := tcell.NewScreen()\n\tscreen = s\n\n\tif err != nil {\n\t\terrs.Emerg(err)\n\t}\n\n\t// Initialize the screen\n\tif err := s.Init(); err != nil {\n\t\terrs.Emerg(err)\n\t}\n\n\t// Set default style\n\ts.SetStyle(tcell.StyleDefault.Foreground(tcell.ColorWhite))\n\n\t// Display prompt and chatbox\n\tgo prompt.DisplayPrompt(s, c, termEvts, clear)\n\tgo chatbox.DisplayChatbox(s, evts, clear)\n\tgo statusline.DisplayStatusLine(s, evts)\n\n\t// Poll terminal events\n\tgo func() {\n\t\tfor {\n\t\t\ttermEvts <- s.PollEvent()\n\t\t}\n\t}()\n}",
"func List(ctx *cli.Context) error {\n\tm := task.NewFileManager()\n\ttasks := m.GetAllOpenTasks()\n\n\ttasks = sortTasks(ctx.String(\"sort\"), tasks)\n\n\tfor _, v := range tasks {\n\t\tfmt.Println(v.String())\n\t}\n\treturn nil\n}",
"func DisplayResults(number uint64, length uint64) {\n\tfmt.Println(\"Longest Collatz sequence under\", gridSize, \"starts at\")\n\tfmt.Println(number)\n\tfmt.Println(\"and has a length of\")\n\tfmt.Println(length)\n}",
"func (l *ToDoList) showList() []string {\n\treturn l.list\n}",
"func (p *Proxy) handleShowProcesslist(session *driver.Session, query string, node sqlparser.Statement) (*sqltypes.Result, error) {\n\t// sessions := spanner.sessions\n\tqr := &sqltypes.Result{}\n\tqr.Fields = []*querypb.Field{\n\t\t{Name: \"Id\", Type: querypb.Type_INT64},\n\t\t{Name: \"User\", Type: querypb.Type_VARCHAR},\n\t\t{Name: \"Host\", Type: querypb.Type_VARCHAR},\n\t\t{Name: \"db\", Type: querypb.Type_VARCHAR},\n\t\t{Name: \"Command\", Type: querypb.Type_VARCHAR},\n\t\t{Name: \"Time\", Type: querypb.Type_INT32},\n\t\t{Name: \"State\", Type: querypb.Type_VARCHAR},\n\t\t{Name: \"Info\", Type: querypb.Type_VARCHAR},\n\t\t{Name: \"Rows_sent\", Type: querypb.Type_INT64},\n\t\t{Name: \"Rows_examined\", Type: querypb.Type_INT64},\n\t}\n\n\t// var sessionInfos []SessionInfo\n\t// privilegePlug := spanner.plugins.PlugPrivilege()\n\t// if privilegePlug.IsSuperPriv(session.User()) {\n\t// \tsessionInfos = sessions.Snapshot()\n\t// } else {\n\t// \tsessionInfos = sessions.SnapshotUser(session.User())\n\t// }\n\n\t// for _, info := range sessionInfos {\n\t// \trow := []sqltypes.Value{\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_INT64, []byte(fmt.Sprintf(\"%v\", info.ID))),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_VARCHAR, []byte(info.User)),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_VARCHAR, []byte(info.Host)),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_VARCHAR, []byte(info.DB)),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_VARCHAR, []byte(info.Command)),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_INT32, []byte(fmt.Sprintf(\"%v\", info.Time))),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_VARCHAR, []byte(info.State)),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_VARCHAR, []byte(info.Info)),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_INT64, []byte(fmt.Sprintf(\"%v\", 0))),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_INT64, []byte(fmt.Sprintf(\"%v\", 0))),\n\t// \t}\n\t// \tqr.Rows = append(qr.Rows, row)\n\t// }\n\treturn qr, nil\n}",
"func runList(props ListCmdProps, output io.Writer, repo db.Repo) error {\n\tstart, err := parseDateOrDefault(props.startDate)\n\n\tif props.startDate == \"\" {\n\t\tdefaultStart := start.Add(-1 * time.Hour * 24 * 30)\n\t\tstart = &defaultStart\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tend, err := parseDateOrDefault(props.endDate)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tworkingDays, err := repo.ListRange(start, end)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\trenderTable(workingDays, output)\n\n\treturn nil\n}",
"func (cmd *networkListCmd) Execute(args []string) error {\n\tcmd.log.Infof(\"Supported Providers:\\n%s\\n\", cmd.conns.NetworkListProviders())\n\treturn nil\n}",
"func displayRules(c *cli.Context) error {\n\tfmt.Println(\"[+] Retrieving Rules\")\n\trules, er := mapi.DisplayRules()\n\n\tif er != nil {\n\t\treturn er\n\t}\n\n\tfmt.Printf(\"[+] Found %d rules\\n\", len(rules))\n\tfor _, v := range rules {\n\t\tfmt.Printf(\"Rule: %s RuleID: %x\\n\", string(v.RuleName), v.RuleID)\n\t}\n\treturn er\n}",
"func (e *AliasExecutor) List(_ context.Context, cmdCtx CommandContext) (interactive.Message, error) {\n\tcmdVerb, cmdRes := parseCmdVerb(cmdCtx.Args)\n\tdefer e.reportCommand(cmdVerb, cmdRes, cmdCtx.Conversation.CommandOrigin, cmdCtx.Platform)\n\te.log.Debug(\"Listing aliases...\")\n\toutMsg := respond(e.getTabularOutput(cmdCtx.Conversation.ExecutorBindings), cmdCtx)\n\toutMsg.Sections = []interactive.Section{\n\t\t{\n\t\t\tBase: outMsg.Base,\n\t\t\tContext: []interactive.ContextItem{\n\t\t\t\t{Text: aliasesForCurrentBindingsMsg},\n\t\t\t},\n\t\t},\n\t}\n\toutMsg.Base = interactive.Base{}\n\n\treturn outMsg, nil\n}",
"func userListCommandFunc(cmd *cobra.Command, args []string) {\n\tif len(args) != 0 {\n\t\tExitWithError(ExitBadArgs, fmt.Errorf(\"user list command requires no arguments\"))\n\t}\n\n\tfmt.Println(\"用户列表\")\n}",
"func DisplayAll() {\n\n\tif len(dataStorageMap) == 0 {\n\t\tfmt.Println(\"Data Storage Empty!!! No data Found !!!\")\n\t} else {\n\t\tfor key, val := range dataStorageMap {\n\t\t\tfmt.Println(key, \"-->\", val)\n\t\t}\n\t}\n}",
"func List(title, text string, items []string) (string, bool, error) {\n\treturn listBox(title, text, \"ClassList\", items, false)\n}",
"func MachineListCommand(c *cli.Context, log logging.Logger, _ string) (int, error) {\n\topts := &machine.ListOptions{\n\t\tLog: log.New(\"machine:list\"),\n\t}\n\n\tinfos, err := machine.List(opts)\n\tif err != nil {\n\t\treturn 1, err\n\t}\n\n\ttabFormatter(os.Stdout, infos)\n\treturn 0, nil\n}",
"func (d *Driver) Display() error {\n\tbuff := make([]byte, d.Drivers*2)\n\n\tfor r := 0; r < 8; r++ {\n\t\trowNo := r + 1\n\t\tfor i := 0; i < d.Drivers; i++ {\n\t\t\tbuffIdx := d.Drivers*i + r\n\t\t\toutIdx := (d.Drivers - i - 1) * 2\n\t\t\tbuff[outIdx] = byte(rowNo)\n\t\t\tbuff[outIdx+1] = d.buff[buffIdx]\n\t\t}\n\n\t\tif err := d.dev.Tx(buff, nil); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}",
"func RunList(cmd *cobra.Command, args []string) {\n\tlist := &nbv1.NamespaceStoreList{\n\t\tTypeMeta: metav1.TypeMeta{Kind: \"NamespaceStoreList\"},\n\t}\n\tif !util.KubeList(list, &client.ListOptions{Namespace: options.Namespace}) {\n\t\treturn\n\t}\n\tif len(list.Items) == 0 {\n\t\tfmt.Printf(\"No namespace stores found.\\n\")\n\t\treturn\n\t}\n\ttable := (&util.PrintTable{}).AddRow(\n\t\t\"NAME\",\n\t\t\"TYPE\",\n\t\t\"TARGET-BUCKET\",\n\t\t\"PHASE\",\n\t\t\"AGE\",\n\t)\n\tfor i := range list.Items {\n\t\tbs := &list.Items[i]\n\t\ttb, err := util.GetNamespaceStoreTargetBucket(bs)\n\t\tif err == nil {\n\t\t\ttable.AddRow(\n\t\t\t\tbs.Name,\n\t\t\t\tstring(bs.Spec.Type),\n\t\t\t\ttb,\n\t\t\t\tstring(bs.Status.Phase),\n\t\t\t\tutil.HumanizeDuration(time.Since(bs.CreationTimestamp.Time).Round(time.Second)),\n\t\t\t)\n\t\t}\n\t}\n\tfmt.Print(table.String())\n}",
"func (p *showPlan) Execute(ctx context.Context) (*table.Table, error) {\n\tt, err := table.New([]string{\"?graph_id\"})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\terrs := make(chan error)\n\tnames := make(chan string)\n\tgo func() {\n\t\terrs <- p.store.GraphNames(ctx, names)\n\t\tclose(errs)\n\t}()\n\n\tfor name := range names {\n\t\tid := name\n\t\tt.AddRow(table.Row{\n\t\t\t\"?graph_id\": &table.Cell{\n\t\t\t\tS: &id,\n\t\t\t},\n\t\t})\n\t}\n\tif <-errs != nil {\n\t\treturn nil, err\n\t}\n\treturn t, nil\n}",
"func (e *Engine) ListCmds(w io.Writer, verbose bool, names ...string) error {\n\tif names == nil {\n\t\tnames = make([]string, 0, len(e.Cmds))\n\t\tfor name := range e.Cmds {\n\t\t\tnames = append(names, name)\n\t\t}\n\t\tsort.Strings(names)\n\t}\n\n\tfor _, name := range names {\n\t\tcmd := e.Cmds[name]\n\t\tusage := cmd.Usage()\n\n\t\tsuffix := \"\"\n\t\tif usage.Async {\n\t\t\tsuffix = \" [&]\"\n\t\t}\n\n\t\t_, err := fmt.Fprintf(w, \"%s %s%s\\n\\t%s\\n\", name, usage.Args, suffix, usage.Summary)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif verbose {\n\t\t\tif _, err := io.WriteString(w, \"\\n\"); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfor _, line := range usage.Detail {\n\t\t\t\tif err := wrapLine(w, line, 60, \"\\t\"); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t\tif _, err := io.WriteString(w, \"\\n\"); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}",
"func (o *listOptions) Run() error {\n\n\tids, err := backend.List(o.accessToken, o.pipelinesFolderPath, o.getAppServiceNames(), o.isCICD)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Unable to a get list of webhook IDs: %v\", err)\n\t}\n\n\tif ids != nil {\n\t\tif log.IsJSON() {\n\t\t\tmachineoutput.OutputSuccess(ids)\n\t\t} else {\n\t\t\tw := tabwriter.NewWriter(os.Stdout, 5, 2, 3, ' ', tabwriter.TabIndent)\n\t\t\tfmt.Fprintln(w, \"ID\")\n\t\t\tfmt.Fprintln(w, \"==\")\n\t\t\tfor _, id := range ids {\n\t\t\t\tfmt.Fprintln(w, id)\n\t\t\t}\n\t\t\tw.Flush()\n\t\t}\n\t}\n\n\treturn nil\n}",
"func displayLinks(links []string) {\n\tfor i, link := range links{\n\t\tfmt.Printf(\"[% 3d ] %s\\n\", i,link)\n\t}\n}",
"func displayInstructions(s tcell.Screen) {\n emitStr(s, 2, 2, tcell.StyleDefault, \"Press f/b to go to next/previous stretches\")\n emitStr(s, 2, 3, tcell.StyleDefault, \"Press p to toggle pause\")\n emitStr(s, 2, 4, tcell.StyleDefault, \"Press ESC exit\")\n return\n}",
"func createDisplaysList(e *Elevator) {\n\tdisplay := newDisplay(1, displayOn, 1)\n\te.floorDisplaysList = append(e.floorDisplaysList, *display)\n\tfor i := e.column.minFloor; i <= e.column.maxFloor; i++ {\n\t\tdisplay = newDisplay(i, displayOn, i)\n\t\te.floorDisplaysList = append(e.floorDisplaysList, *display)\n\t}\n}",
"func (task *Task) display() {\n\tterminalWidth, _ := terminal.Width()\n\ttheScreen := newScreen()\n\tif Config.Options.SingleLineDisplay {\n\n\t\tvar durString, etaString, stepString, errorString string\n\t\tdisplayString := \"\"\n\n\t\teffectiveWidth := int(terminalWidth)\n\n\t\tfillColor := color.ColorCode(strconv.Itoa(Config.Options.ColorSuccess) + \"+i\")\n\t\temptyColor := color.ColorCode(strconv.Itoa(Config.Options.ColorSuccess))\n\t\tif TaskStats.totalFailedTasks > 0 {\n\t\t\tfillColor = color.ColorCode(strconv.Itoa(Config.Options.ColorError) + \"+i\")\n\t\t\temptyColor = color.ColorCode(strconv.Itoa(Config.Options.ColorError))\n\t\t}\n\n\t\tnumFill := int(effectiveWidth) * TaskStats.completedTasks / TaskStats.totalTasks\n\n\t\tif Config.Options.ShowSummaryTimes {\n\t\t\tduration := time.Since(startTime)\n\t\t\tdurString = fmt.Sprintf(\" Runtime[%s]\", showDuration(duration))\n\n\t\t\ttotalEta := time.Duration(Config.totalEtaSeconds) * time.Second\n\t\t\tremainingEta := time.Duration(totalEta.Seconds()-duration.Seconds()) * time.Second\n\t\t\tetaString = fmt.Sprintf(\" ETA[%s]\", showDuration(remainingEta))\n\t\t}\n\n\t\tif TaskStats.completedTasks == TaskStats.totalTasks {\n\t\t\tetaString = \"\"\n\t\t}\n\n\t\tif Config.Options.ShowSummarySteps {\n\t\t\tstepString = fmt.Sprintf(\" Tasks[%d/%d]\", TaskStats.completedTasks, TaskStats.totalTasks)\n\t\t}\n\n\t\tif Config.Options.ShowSummaryErrors {\n\t\t\terrorString = fmt.Sprintf(\" Errors[%d]\", TaskStats.totalFailedTasks)\n\t\t}\n\n\t\tvalueStr := stepString + errorString + durString + etaString\n\n\t\tdisplayString = fmt.Sprintf(\"%[1]*s\", -effectiveWidth, fmt.Sprintf(\"%[1]*s\", (effectiveWidth+len(valueStr))/2, valueStr))\n\t\tdisplayString = fillColor + displayString[:numFill] + color.Reset + emptyColor + displayString[numFill:] + color.Reset\n\n\t\ttheScreen.Display(displayString, 0)\n\t} else {\n\t\ttheScreen.Display(task.String(int(terminalWidth)), task.Display.Index)\n\t}\n\n}",
"func handleList(cmd *cobra.Command, args []string) {\n\tquery := `\n\tSELECT\n chat.chat_identifier AS id,\n count(chat.chat_identifier) AS messages\n\tFROM\n\t\tchat\n\t\tJOIN chat_message_join ON chat.\"ROWID\" = chat_message_join.chat_id\n\t\tJOIN message ON chat_message_join.message_id = message.\"ROWID\"\n\tWHERE TRUE\n\t-- filter out message reactions\n\tAND text IS NOT NULL\n\tAND associated_message_type == 0\n\t-- filter out empty messages\n\tAND trim(text, ' ') <> ''\n\tAND text <> ''\n\tGROUP BY\n\t\tchat.chat_identifier\n\tHAVING messages > ?\n\tORDER BY\n\t\tmessages DESC, id DESC;\n\t`\n\trows, err := db.Query(query, count)\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\tdefer rows.Close()\n\tfor rows.Next() {\n\t\tvar id string\n\t\tvar messages string\n\t\terr = rows.Scan(&id, &messages)\n\t\tif err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\t\tif isatty.IsTerminal(uintptr(unix.Stdout)) {\n\t\t\tfmt.Printf(\"%s\\t%s\\n\", aurora.Yellow(id), aurora.Blue(messages))\n\t\t} else {\n\t\t\tfmt.Printf(\"%s\\t%s\\n\", id, messages)\n\t\t}\n\t}\n\terr = rows.Err()\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n}",
"func displayAllRecords(records []Record) {\n\tfmt.Printf(\"\\nDisplaying all records...\\n\\n\")\n\n\tfor i := 0; i < len(records); i++ {\n\t\tfmt.Printf(\"Record ID: %d: %+v\\n\", i, records[i])\n\t\ttime.Sleep(5 * time.Millisecond) // 5ms between records\n\t}\n}",
"func ShowCurrentUserList() {\n\tul := &define.UserList\n\tShowUserList(ul)\n}",
"func (ll *DoubleLinkedList) Display() {\n\tfor cur := ll.head; cur != nil; cur = cur.next {\n\t\tfmt.Print(cur.val, \" \")\n\t}\n\n\tfmt.Print(\"\\n\")\n}",
"func DisplayNumber(number int) {\n\tfmt.Println(\"Display\", number)\n}",
"func (c *listCommand) Run(ctx context.Context, _ *commoncli.Env, serverClient util.ServerClient) error {\n\tfilter := &agentv1.ListAgentsRequest_Filter{}\n\tif len(c.selectors) > 0 {\n\t\tmatchBehavior, err := parseToSelectorMatch(c.matchSelectorsOn)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tselectors := make([]*types.Selector, len(c.selectors))\n\t\tfor i, sel := range c.selectors {\n\t\t\tselector, err := util.ParseSelector(sel)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"error parsing selector %q: %w\", sel, err)\n\t\t\t}\n\t\t\tselectors[i] = selector\n\t\t}\n\t\tfilter.BySelectorMatch = &types.SelectorMatch{\n\t\t\tSelectors: selectors,\n\t\t\tMatch: matchBehavior,\n\t\t}\n\t}\n\n\tagentClient := serverClient.NewAgentClient()\n\n\tpageToken := \"\"\n\tresponse := new(agentv1.ListAgentsResponse)\n\tfor {\n\t\tlistResponse, err := agentClient.ListAgents(ctx, &agentv1.ListAgentsRequest{\n\t\t\tPageSize: 1000, // comfortably under the (4 MB/theoretical maximum size of 1 agent in MB)\n\t\t\tPageToken: pageToken,\n\t\t\tFilter: filter,\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tresponse.Agents = append(response.Agents, listResponse.Agents...)\n\t\tif pageToken = listResponse.NextPageToken; pageToken == \"\" {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn c.printer.PrintProto(response)\n}",
"func (v *View) Display() {\n\tv.DisplayView()\n\tv.cursor.Display()\n\tv.sline.Display()\n}",
"func (s *Service) showDisplay(c context.Context, mid int64, plat int8, build int, buvid, channel, ip, ak, network, mobiApp,\n\tdevice, language, adExtra string, isTmp, isRegion, isIndex bool, now time.Time) (res []*show.Show) {\n\tvar (\n\t\tbnr string\n\t\tbanners map[int][]*resource.Banner\n\t\tshowRec []*show.Item\n\t\tshowLive []*show.Item\n\t\tisBangumi = false\n\t\tisRegionBanner = false\n\t\tss []*show.Show\n\t\tresIDStr = _bannersPlat[plat]\n\t)\n\tif language == \"\" {\n\t\tlanguage = _initlanguage\n\t}\n\tkey := fmt.Sprintf(_initShowKey, plat, language)\n\tif (plat == model.PlatIPhone && build > 6050) || (plat == model.PlatAndroid && build > 512007) {\n\t\tss = s.cacheBgEp[key]\n\t} else if ((mobiApp == \"iphone\" && build > 5600) || (mobiApp == \"android\" && build > 507000)) && isIndex {\n\t\tss = s.cacheBg[key]\n\t} else {\n\t\tss = s.cache[key]\n\t}\n\tif isTmp {\n\t\tss = s.tempCache[key]\n\t}\n\tif len(ss) == 0 {\n\t\tres = _emptyShow\n\t\treturn\n\t}\n\tres = make([]*show.Show, 0, len(ss))\n\tif (mobiApp == \"iphone\" && build > 4310) || (mobiApp == \"android\" && build > 502000) || isIndex {\n\t\tisBangumi = true\n\t}\n\tif (mobiApp == \"iphone\" && build > 4350) || (mobiApp == \"android\" && build > 503000) {\n\t\tisRegionBanner = true\n\t}\n\tg, ctx := errgroup.WithContext(c)\n\tg.Go(func() error {\n\t\tbanners = s.resBanners(ctx, plat, build, mid, resIDStr, channel, ip, buvid, network, mobiApp, device, adExtra)\n\t\treturn nil\n\t})\n\tif !isRegion {\n\t\tg.Go(func() error {\n\t\t\tshowRec = s.getRecommend(ctx, mid, build, plat, buvid, network, mobiApp, device, ip)\n\t\t\treturn nil\n\t\t})\n\t\tg.Go(func() error {\n\t\t\tshowLive = s.getLive(ctx, mid, ak, ip, 0, now)\n\t\t\treturn nil\n\t\t})\n\t}\n\tif err := g.Wait(); err != nil {\n\t\tlog.Error(\"showDisplay errgroup.WithContext error(%v)\", err)\n\t}\n\tfor i, sw := range ss {\n\t\tif mobiApp == \"white\" && 101220 >= build && sw.Param == \"165\" { // 165 ad region\n\t\t\tcontinue\n\t\t} else if sw.Param != \"165\" || ((mobiApp != \"iphone\" || device != \"pad\") || build <= 3590) {\n\t\t\tif model.InvalidBuild(build, sw.Build, sw.Condition) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\tif sw.Type == \"recommend\" {\n\t\t\tif isRegion {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tsw = s.dealRecommend(c, sw, plat, mid, build, buvid, network, mobiApp, device, ip, showRec)\n\t\t\tbnr = \"0\"\n\t\t} else if sw.Type == \"live\" {\n\t\t\tif isRegion {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tsw = s.dealLive(c, sw, showLive)\n\t\t\tbnr = \"65537\"\n\t\t} else if sw.Type == \"bangumi\" {\n\t\t\tif ok := s.auditRegion(mobiApp, plat, build, \"13\"); ok {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif isRegion && isBangumi && !isRegionBanner {\n\t\t\t\tbnr = \"-1\"\n\t\t\t} else if isRegion && !isBangumi && !isRegionBanner {\n\t\t\t\tcontinue\n\t\t\t} else {\n\t\t\t\tbnr = \"13\"\n\t\t\t}\n\t\t} else {\n\t\t\tbnr = sw.Param\n\t\t\tif isRegion {\n\t\t\t\tif ok := s.auditRegion(mobiApp, plat, build, sw.Param); ok {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tif !isRegionBanner {\n\t\t\t\t\tif sw.Param == \"1\" && !isBangumi {\n\t\t\t\t\t\tbnr = \"-1\"\n\t\t\t\t\t}\n\t\t\t\t\tif sw.Type == \"topic\" && i > 0 && (ss[i-1].Type == \"bangumi\" || ss[i-1].Type == \"1\") {\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tsw.Banner = s.getBanners(c, plat, build, bnr, channel, ip, banners, isIndex)\n\t\tres = append(res, sw)\n\t}\n\treturn\n}",
"func (i *VMList) Execute() ([]VMListResponse, error) {\n\treturn i.VMList(i.params)\n}",
"func (c *CmdSimpleFSSyncShow) Run() error {\n\tcli, err := GetSimpleFSClient(c.G())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tctx := context.TODO()\n\tui := c.G().UI.GetTerminalUI()\n\tif c.getAll {\n\t\tres, err := cli.SimpleFSSyncConfigAndStatus(ctx, nil)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tfor _, folder := range res.Folders {\n\t\t\tp, err := makeSimpleFSPath(mountDir + \"/\" + folder.Folder.String())\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tui.Printf(\"%s\\n\", folder.Folder)\n\t\t\terr = printFolderStatus(\n\t\t\t\tctx, cli, ui, folder.Config, folder.Status, \" \", p, false)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tui.Printf(\"\\n\")\n\t\t}\n\n\t\tprintPrefetchStatus(ui, res.OverallStatus, \"\")\n\t\tprintLocalStats(ui, res.OverallStatus)\n\t} else {\n\t\tres, err := cli.SimpleFSFolderSyncConfigAndStatus(ctx, c.path)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\ttlfPath, err := toTlfPath(c.path)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn printFolderStatus(\n\t\t\tctx, cli, ui, res.Config, res.Status, \"\", tlfPath, true)\n\t}\n\n\treturn nil\n}",
"func (h Client) list(namespace string, extraArgs ...string) (string, error) {\n\targs := []string{\"list\", \"--namespace\", namespace}\n\targs = append(args, extraArgs...)\n\tstdOut, stdErr, err := h.Exec(args...)\n\tif err != nil && stdErr != \"\" {\n\t\treturn \"\", errors.New(stdErr)\n\t}\n\treturn stdOut, nil\n}",
"func (c *AppsListCmd) Run(cli *CLI, logWriters *LogWriters) (err error){\n\ts := NewSpinner(\"Looking up apps\",logWriters)\n\ts.Start()\n\n\taccounts, err := api.Accounts()\n\tif err != nil {\n\t\ts. Stop()\n\t\tlog.Error().Err(err).Msg(\"Unable to look up accounts\");\n\t\tos.Exit(1)\n\t}\n\ts.Stop()\n\tif c.AccountID != 0{\n\t\tnewAct := []api.Account{}\n\t\tfor _, a := range accounts {\n\t\t\tif a.ID == c.AccountID{\n\t\t\t\tnewAct = append(newAct, a)\n\t\t\t}\n\t\t\taccounts = newAct\n\t\t}\n\t\tif(len(newAct) == 0){\n\t\t\tlog.Info().Int(\"Account ID\",c.AccountID).Msg(\"Unable to find accounts where\")\n\t\t\tos.Exit(1)\n\t\t}\n\t}\n\tfmt.Println()\n\tfmt.Println()\n\tfor _, acc := range accounts {\n\t\tlog.Info().Msg(fmt.Sprint(HiWhite(\"Account #\"),HiWhite(strconv.Itoa(acc.ID)),\" - \", HiYellow(acc.AccountName)))\n\t\ttable := NewTable(cli, os.Stdout)\n\t\ttable.SetHeader([]string{\"App ID\", \"App Name\"})\n\t\ttable.SetColumnColor(tablewriter.Colors{tablewriter.Normal,tablewriter.FgWhiteColor},\n\t\ttablewriter.Colors{tablewriter.Normal, tablewriter.FgHiGreenColor})\n\t\ttable.SetAlignment(tablewriter.ALIGN_LEFT)\n\t\ttable.SetCenterSeparator(\"\")\n\t\ttable.SetColumnSeparator(\"\")\n\t\ttable.SetNoWhiteSpace(true)\n\t\ttable.SetAutoMergeCells(true)\n\t\ttable.SetRowLine(true)\n\t\tfor _, app := range acc.Applications {\n\t\t\t\tr := []string{strconv.Itoa(app.ID), strings.Trim(app.ApplicationName,\"\\\"\")}\n\t\t\t\ttable.Append(r)\n\t\t}\n\t\ttable.Render()\n\t\tfmt.Println()\n\t\tfmt.Println()\n\t}\n\treturn err\n}",
"func RunListDisk() {\n\n\t// dir, err := filepath.Abs(filepath.Dir(os.Args[0]))\n\t// if err != nil {\n\t// \tlog.Fatal(err)\n\t// \treturn\n\t// }\n\n\t// lsscsipath := path.Join(dir, \"lsscsi\")\n\t// if _, err := os.Stat(lsscsipath); os.IsNotExist(err) {\n\t// \tlsscsipath = \"lsscsi\"\n\t// }\n\tlsscsipath := \"lsscsi\"\n\tcmd := exec.Command(lsscsipath, \"-s\", \"-g\")\n\tstdout, err := cmd.StdoutPipe()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tif err := cmd.Start(); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\ttimer := time.AfterFunc(10*time.Second, func() {\n\t\tcmd.Process.Kill()\n\t})\n\n\tscanner := bufio.NewScanner(stdout)\n\tvar hddinfo []string\n\tvar hddchanged bool\n\tvar wg sync.WaitGroup\n\tfor scanner.Scan() {\n\t\tss := scanner.Text()\n\t\tfmt.Println(ss)\n\t\thddinfo = append(hddinfo, ss)\n\t\tif !DetectData.MatchKey(ss) {\n\t\t\thddchanged = true\n\t\t}\n\t\tif !DetectData.ContainsKey(ss) {\n\t\t\t//\\s Matches any white-space character.\n\t\t\tr := regexp.MustCompile(`^([\\s\\S]{13})(disk[\\s\\S]{4})([\\s\\S]{9})([\\s\\S]{17})([\\s\\S]{6})([\\s\\S]{11})([\\s\\S]{11})([\\s\\S]+)$`)\n\t\t\tdiskinfos := r.FindStringSubmatch(ss)\n\t\t\tif len(diskinfos) == 9 {\n\t\t\t\tvar dddect = NewSyncDataDetect()\n\t\t\t\tdddect.detectHDD.Locpath = strings.Trim(diskinfos[1], \" \")\n\t\t\t\tdddect.detectHDD.Type = strings.Trim(diskinfos[2], \" \")\n\t\t\t\tdddect.detectHDD.Manufacture = strings.Trim(diskinfos[3], \" \")\n\t\t\t\tdddect.detectHDD.Model = strings.Trim(diskinfos[4], \" \")\n\t\t\t\tdddect.detectHDD.Version = strings.Trim(diskinfos[5], \" \")\n\t\t\t\tdddect.detectHDD.LinuxName = strings.Trim(diskinfos[6], \" \")\n\t\t\t\tdddect.detectHDD.SGLibName = strings.Trim(diskinfos[7], \" \")\n\t\t\t\tdddect.detectHDD.Size = strings.Trim(diskinfos[8], \" \")\n\n\t\t\t\tif strings.Index(dddect.detectHDD.LinuxName, `/dev/`) == -1 {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\t//hddchanged = true\n\t\t\t\tDetectData.AddValue(ss, dddect)\n\t\t\t\twg.Add(1)\n\t\t\t\tgo dddect.ReadDataFromSmartCtl(&wg)\n\t\t\t}\n\t\t} else {\n\t\t\tif vv, ok := DetectData.Get(ss); ok {\n\t\t\t\tif len(vv.detectHDD.UILabel) == 0 && len(vv.detectHDD.Otherinfo) == 0 {\n\t\t\t\t\twg.Add(1)\n\t\t\t\t\tgo vv.ReadDataFromSmartCtl(&wg)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\ttimer.Stop()\n\tDetectData.RemoveOld(hddinfo)\n\n\ttime.Sleep(4 * time.Second)\n\n\tif hddchanged {\n\t\tfmt.Print(\"changed!\")\n\t\tcclist, err := configxmldata.Conf.GetCardListIndex()\n\t\tif err == nil {\n\t\t\tfor _, i := range cclist {\n\t\t\t\twg.Add(1)\n\t\t\t\tgo SASHDDinfo.RunCardInfo(i, &wg)\n\t\t\t}\n\t\t}\n\t\tfor i := 0; i < 30; i++ {\n\t\t\tif waitTimeout(&wg, 10*time.Second) {\n\t\t\t\tfmt.Println(\"Timed out waiting for wait group\")\n\t\t\t\tMergeCalibration()\n\t\t\t} else {\n\t\t\t\tfmt.Println(\"Wait group finished\")\n\t\t\t\tMergeCalibration()\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t} else {\n\t\twaitTimeout(&wg, 300*time.Second)\n\t}\n\n}",
"func DisplayHelp(b *Brute, m *discordgo.MessageCreate, c []*Command) {\n\tif c == nil || len(c) == 0 {\n\t\tmessage := \"Supported commands:\\n\"\n\t\tfor _, cmd := range b.Commands {\n\t\t\tmessage = fmt.Sprintf(\"%s\\n`%s%s`%s\", message, b.Prefix, cmd.Cmd[0], getAliases(cmd.Cmd, b.Prefix))\n\t\t}\n\n\t\tmessage = fmt.Sprintf(\"%s\\n\\nUse `%shelp [command]` for more info about the concrete command\", message, b.Prefix)\n\n\t\t_, err := b.Session.ChannelMessageSend(m.ChannelID, message)\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Failed to send message: %v\\n\", err)\n\t\t}\n\n\t\treturn\n\t}\n\n\tfor _, cmd := range c {\n\t\tif cmd != nil {\n\t\t\tDisplayCommandHelp(b, m, cmd)\n\t\t}\n\t}\n}",
"func visTasks(tasks []db.Task, full bool) {\n\tstasks := make([]string, len(tasks))\n\tfor i, val := range tasks {\n\t\tif full {\n\t\t\tstasks[i] = fmt.Sprintf(\"Task no. %d)\\n%v\", i+1, val)\n\t\t} else {\n\t\t\tstasks[i] = fmt.Sprintf(\"Task no. %d) %s\\n\", i+1, val.Body)\n\t\t}\n\t}\n\tif full {\n\t\tfmt.Println(strings.Join(stasks, \"\\n\\n\"))\n\t} else {\n\t\tfmt.Print(strings.Join(stasks, \"\"))\n\t}\n}",
"func show(c *cli.Context) {\n\tif c.NArg() < 1 {\n\t\tfmt.Fprintln(c.App.ErrWriter, \"show requires at least 1 argument\")\n\t\tcli.ShowCommandHelp(c, \"show\")\n\t\treturn\n\t}\n\n\t// parse rdbfile\n\tfmt.Fprintln(c.App.Writer, \"start parsing...\")\n\tinstances := []string{}\n\tfor _, file := range c.Args() {\n\t\tdecoder := rdr.NewDecoder()\n\t\tgo decode(c, decoder, file)\n\t\tcounter := rdr.NewCounter()\n\t\tcounter.Count(decoder.Entries)\n\t\tfilename := filepath.Base(file)\n\t\trdr.Counters.Set(filename, counter)\n\t\tinstances = append(instances, filename)\n\t\tfmt.Fprintf(c.App.Writer, \"parse %v done\\n\", file)\n\t}\n\n\t// init html template\n\t// init common data in template\n\trdr.InitHTMLTmpl()\n\trdr.TplCommonData[\"Instances\"] = instances\n\n\t// start http server\n\tstaticFS := assetfs.AssetFS{\n\t\tAsset: static.Asset,\n\t\tAssetDir: static.AssetDir,\n\t\tAssetInfo: static.AssetInfo,\n\t}\n\trouter := httprouter.New()\n\trouter.ServeFiles(\"/static/*filepath\", &staticFS)\n\trouter.GET(\"/\", rdr.Index)\n\trouter.GET(\"/instance/:path\", rdr.RdbReveal)\n\tfmt.Fprintln(c.App.Writer, \"parsing finished, please access http://127.0.0.1:\"+c.String(\"port\"))\n\tlistenErr := http.ListenAndServe(\":\"+c.String(\"port\"), router)\n\tif listenErr != nil {\n\t\tfmt.Fprintf(c.App.ErrWriter, \"Listen port err: %v\\n\", listenErr)\n\t}\n}",
"func initShowCommand(ctx *Context, root *Command) {\n\tshow := NewCommand(\"show\")\n\tshow.Dispatch = func(line string) error {\n\t\tfmt.Printf(\"executing show.. show what..\\n\")\n\t\treturn nil\n\t}\n\tshow.Completer = func(line string, i int, r rune) (newline string, newpos int, ok bool) {\n\n\t\tDbg(\"show line=%v i=%d r=%v\\n\", line, i, r)\n\n\t\treturn completeCommands(ctx, show, line, i, r)\n\n\t}\n\n\tshow.TabComplete = func(line string, i int, r rune) (newline string, newpos int, ok bool) {\n\t\treturn completeCommands(ctx, show, line, i, r)\n\t}\n\n\tversion := show.NewCommand(\"version\")\n\tversion.Dispatch = func(line string) error {\n\t\tfmt.Printf(\"version 1.0\\n\")\n\t\tfmt.Printf(\"dbg line: %s\\n\", line)\n\t\treturn nil\n\t}\n\n\tfrontend := show.NewCommand(\"frontend\")\n\tfrontend.Dispatch = func(line string) error {\n\t\tfmt.Printf(\"frontend -- %q\\n\", line)\n\n\t\tfrontends, _ := ctx.Ha.ShowStat(-1, haproxyctl.ObjectFrontend, -1)\n\t\tfor _, f := range frontends {\n\n\t\t\tfmt.Printf(\"\\n\")\n\t\t\tfmt.Printf(\"frontend %s\\n\", f.Pxname)\n\t\t\tfmt.Printf(\" %+v\\n\", f)\n\t\t}\n\n\t\treturn nil\n\t}\n\n\tbar := show.NewCommand(\"bar\")\n\tbar.Dispatch = func(line string) error {\n\t\tfmt.Printf(\"bar -- %q\\n\", line)\n\t\treturn nil\n\t}\n\n\troot.Add(\"show\", show)\n}"
] | [
"0.65087223",
"0.63860244",
"0.6184103",
"0.616007",
"0.6151892",
"0.6128357",
"0.6125275",
"0.61051875",
"0.6054109",
"0.60203445",
"0.5973835",
"0.5953178",
"0.59499496",
"0.5948756",
"0.5931617",
"0.59210974",
"0.5900011",
"0.58987164",
"0.588424",
"0.58840364",
"0.5871788",
"0.5862851",
"0.5840662",
"0.5823783",
"0.58036613",
"0.57946706",
"0.57699025",
"0.57682633",
"0.57671434",
"0.57602555",
"0.57261467",
"0.57207584",
"0.56977516",
"0.5670484",
"0.56671053",
"0.5651792",
"0.56350076",
"0.5628817",
"0.56269765",
"0.5618964",
"0.5613971",
"0.5608961",
"0.56066287",
"0.5605286",
"0.5595949",
"0.55853695",
"0.55845773",
"0.5583824",
"0.55779904",
"0.55716556",
"0.55557525",
"0.5551984",
"0.5551223",
"0.55506134",
"0.554359",
"0.55421484",
"0.55322456",
"0.55250764",
"0.550613",
"0.55024976",
"0.5501112",
"0.54917866",
"0.54910535",
"0.5484393",
"0.5483361",
"0.54746276",
"0.5459713",
"0.5439428",
"0.54225326",
"0.53918356",
"0.5381005",
"0.5376172",
"0.5366075",
"0.53625804",
"0.5362156",
"0.53577024",
"0.53568935",
"0.53518546",
"0.53509676",
"0.53499705",
"0.5335613",
"0.53333706",
"0.5331108",
"0.5327024",
"0.5325867",
"0.5325038",
"0.5324673",
"0.53083336",
"0.5297439",
"0.5297153",
"0.52893037",
"0.52882046",
"0.5287955",
"0.52854615",
"0.52842814",
"0.52821964",
"0.5279928",
"0.5279878",
"0.52737385",
"0.5273618",
"0.52710897"
] | 0.0 | -1 |
execute a list of display lists | func CallLists(n int32, xtype uint32, lists unsafe.Pointer) {
C.glowCallLists(gpCallLists, (C.GLsizei)(n), (C.GLenum)(xtype), lists)
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (c *DashboardLsCmd) Exec(ctx context.Context, args []string) error {\n\tdashboards, err := c.Conf.Client().Search(ctx, grafsdk.DashTypeSearchOption())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ttable := tablewriter.NewWriter(os.Stdout)\n\ttable.SetHeader([]string{\"UID\", \"Folder\", \"Title\", \"URL\"})\n\n\tfor _, dashboard := range dashboards {\n\t\ttable.Append([]string{dashboard.UID, dashboard.FolderTitle, dashboard.Title, fmt.Sprintf(\"%s/%s\", c.Conf.APIURL, dashboard.URL)})\n\t}\n\ttable.Render()\n\n\treturn nil\n}",
"func (e *Election) executeDisplay(msg imessage.IMessage) {\n\tif e.Display != nil {\n\t\te.Display.Execute(msg)\n\t}\n}",
"func (l List) ShowList() {\n\tfmt.Println(\"ID\\tArrival\\tBurst\\tPriority\")\n\n\tfor i := 0; i < len(l); i++ {\n\t\tfmt.Printf(\"%d\\t%d\\t%d\\t%d\\n\",\n\t\t\tl[i].ID, l[i].Arrival, l[i].Burst, l[i].Priority)\n\t}\n\tfmt.Printf(\"Number of jobs: %d\\n\\n\", len(l))\n}",
"func ShowInteractiveList(manuals []*model.Manual) {\n\tnum := len(manuals)\n\tif num > 1 {\n\t\tfmt.Println(\"Found \" + strconv.Itoa(num) + \" manuals\")\n\t} else if num == 0 {\n\t\tfmt.Println(ansi.Red + \"No manuals found\" + ansi.Reset)\n\t\treturn\n\t}\n\n\t_, rows := createList(manuals)\n\tprompt := &survey.Select{\n\t\tMessage: \"Select a manual to show\",\n\t\tOptions: rows,\n\t}\n\tvar row string\n\tif err := survey.AskOne(prompt, &row, nil); err != nil {\n\t\tText(err)\n\t\treturn\n\t}\n\n\tfor i, r := range rows {\n\t\tif row == r {\n\t\t\tShowManual(manuals[i], false)\n\t\t\tbreak\n\t\t}\n\t}\n}",
"func (l *FileList) Display(\n\taccessKey *ui.Entry,\n\tsecretKey *ui.Entry,\n\tbucket *ui.Entry) (err error) {\n\n\tlist, err := comm.Refresh(\n\t\taccessKey.Text(), secretKey.Text(), bucket.Text())\n\n\tif err != nil {\n\t\treturn\n\t}\n\tlog.Println(\"Displaying the list.\")\n\n\tl.name.Clear()\n\tl.mType.Clear()\n\tl.size.Clear()\n\tl.checkbox.Clear()\n\tlog.Println(\"Boxes cleared.\")\n\n\tl.NameList = []string{}\n\tl.CheckboxList = []*ui.Checkbox{}\n\tlog.Println(\"Lists cleared.\")\n\n\t// Fix the number of the list.\n\t// TODO: clear this limit.\n\tif len(list) > 30 {\n\t\tlist = list[:30]\n\t}\n\n\tfor _, item := range list {\n\t\tl.name.Append(ui.NewLabel(item.Key), true)\n\t\tl.NameList = append(l.NameList, item.Key)\n\n\t\tl.mType.Append(ui.NewLabel(item.MimeType), true)\n\t\tl.size.Append(\n\t\t\tui.NewLabel(tool.FormatSize(item.Fsize)), true)\n\n\t\ttempCheckbox := ui.NewCheckbox(\"\")\n\t\tl.checkbox.Append(tempCheckbox, true)\n\t\tl.CheckboxList = append(l.CheckboxList, tempCheckbox)\n\t}\n\tlog.Println(\"Displayed the list.\")\n\n\treturn\n}",
"func runList(cmd *cobra.Command, args []string) {\n\tif conn == nil {\n\t\trunListWeb(cmd)\n\t\treturn\n\t}\n\n\trunListDB(cmd)\n}",
"func Display(possible ...Cmd) {\n\thint := randHint(possible)\n\tif hint != \"\" {\n\t\tui.Hint(hint, false)\n\t}\n}",
"func execlist(tx migration.LimitedTx, stms []string) error {\n\tvar err error\n\tfor _, s := range stms {\n\t\t_, err = tx.Exec(s)\n\t\tif err != nil {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn err\n}",
"func RunCmdActionList(c *CmdConfig) error {\n\tactions, err := c.Actions().List()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tactions, err = filterActionList(c, actions)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tsort.Sort(actionsByCompletedAt(actions))\n\n\titem := &displayers.Action{Actions: actions}\n\treturn c.Display(item)\n}",
"func runList(cmd *cobra.Command, args []string) error {\n\tverb := \"GET\"\n\turl := \"/v1/query\"\n\n\tresp, err := web.Request(cmd, verb, url, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcmd.Printf(\"\\n%s\\n\\n\", resp)\n\treturn nil\n}",
"func (r *Ri) Display(name, typeof, mode RtToken, parameterlist ...Rter) error {\n\n\tvar out = []Rter{name, typeof, mode, PARAMETERLIST}\n\tout = append(out, parameterlist...)\n\n\treturn r.writef(\"Display\", out...)\n}",
"func displayLinks(links []string) {\n\tfor i, link := range links{\n\t\tfmt.Printf(\"[% 3d ] %s\\n\", i,link)\n\t}\n}",
"func ShowList(manuals []*model.Manual) {\n\tnum := len(manuals)\n\tif num > 1 {\n\t\tfmt.Println(\"Found \" + strconv.Itoa(num) + \" manuals\")\n\t} else if num == 0 {\n\t\tfmt.Println(ansi.Red + \"No manuals found\" + ansi.Reset)\n\t\treturn\n\t}\n\n\t// show manuals\n\thead, rows := createList(manuals)\n\tfmt.Println(ansi.ColorCode(\"cyan\") + head + ansi.Reset)\n\tfmt.Println(strings.Join(rows, \"\\n\"))\n}",
"func List() {\n\terr := ListCmd.Parse(os.Args[2:])\n\tif err != nil || internal.Help {\n\t\tListCmd.Usage()\n\t\tos.Exit(0)\n\t}\n\n\tconfigurator, err := config.NewConfigurator()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tlist, err := configurator.GetCollaborators()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tsort.Slice(list, func(i, j int) bool {\n\t\treturn config.Less(list[i], list[j])\n\t})\n\n\ttw := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0x0)\n\tfor _, collab := range list {\n\t\tline := fmt.Sprintf(\"\\t%s\\t<%s>\", collab.Name, collab.Email)\n\t\tif (collab.Alias != collab.Name) {\n\t\t\tline = fmt.Sprintf(\"%s:%s\", collab.Alias, line)\n\t\t}\n\t\tfmt.Fprintln(tw, line)\n\t}\n\ttw.Flush()\n}",
"func (e *Engine) ListCmds(w io.Writer, verbose bool, names ...string) error {\n\tif names == nil {\n\t\tnames = make([]string, 0, len(e.Cmds))\n\t\tfor name := range e.Cmds {\n\t\t\tnames = append(names, name)\n\t\t}\n\t\tsort.Strings(names)\n\t}\n\n\tfor _, name := range names {\n\t\tcmd := e.Cmds[name]\n\t\tusage := cmd.Usage()\n\n\t\tsuffix := \"\"\n\t\tif usage.Async {\n\t\t\tsuffix = \" [&]\"\n\t\t}\n\n\t\t_, err := fmt.Fprintf(w, \"%s %s%s\\n\\t%s\\n\", name, usage.Args, suffix, usage.Summary)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif verbose {\n\t\t\tif _, err := io.WriteString(w, \"\\n\"); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfor _, line := range usage.Detail {\n\t\t\t\tif err := wrapLine(w, line, 60, \"\\t\"); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t\tif _, err := io.WriteString(w, \"\\n\"); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}",
"func listRun(cmd *cobra.Command, args []string) {\n\t\n\t// Items are read in using ReadItems; an example of stepwise refinement and procedural abstraction.\n\titems, err := todo.ReadItems(dataFile)\n\n\tvar data [][]string\n\n\t// Selection statement run to check if the To-Do list is empty\n\tif len(items) == 0 {\n\t\tlog.Println(\"No To-Do's in Your List - use the create command to get started!\")\n\t\treturn\n\t}\n\n\t// Selection statement run to check if there was an error from reading the data\n\tif err != nil {\n\t\tlog.Printf(\"%v\", err)\n\t} \n\n\t// Calls Sort method created in todo.go; an example of stepwise refinement\n\ttodo.Sort(items)\n\n\t// Iterative statement that appends all of the To-Dos in the list to a String array\n\t// Sequential statements are run within the FOR-EACH loop\n\tfor _, i := range items {\n\t\tvar temp []string\n\t\ttemp = append(temp, i.Label())\n\t\ttemp = append(temp, i.PrettyDone())\n\t\ttemp = append(temp, i.PrettyPrint())\n\t\ttemp = append(temp, i.Text)\n\t\tdata = append(data, temp)\n\t}\n\n\t\n\t/*\n\tSets the parameters for the To-Do list displayed as a table to the user. \n\tControls the appearence of the GUI.\n\t*/\n\ttable := tablewriter.NewWriter(os.Stdout)\n\ttable.SetHeader([]string {\"Position\", \"Done?\", \"Priority\", \"Task\"})\n\n\ttable.SetHeaderColor(tablewriter.Colors{tablewriter.Bold, tablewriter.BgHiBlueColor},\n\t\ttablewriter.Colors{tablewriter.FgWhiteColor, tablewriter.Bold, tablewriter.BgHiBlueColor},\n\t\ttablewriter.Colors{tablewriter.BgHiBlueColor, tablewriter.FgWhiteColor},\n\t\ttablewriter.Colors{tablewriter.BgHiBlueColor, tablewriter.FgWhiteColor})\n\n\ttable.SetColumnColor(tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiCyanColor},\n\t\ttablewriter.Colors{tablewriter.Bold, tablewriter.FgHiRedColor},\n\t\ttablewriter.Colors{tablewriter.Bold, tablewriter.FgHiMagentaColor},\n\t\ttablewriter.Colors{tablewriter.Bold, tablewriter.FgBlackColor})\n\n\tw := tabwriter.NewWriter(os.Stdout, 3, 0, 1, ' ', 0)\n\n\t// Iterative statement that appends all To-Do items marked done based on the condition of if either the --all or --done flag is active.\n\tfor p, i := range data {\n\t\tif allFlag || items[p].Done == doneFlag {\n\t\t\ttable.Append(i)\n\t\t}\n\t}\n\n\t// Renders the table\n\ttable.Render()\n\n\t// Flushes the writer\n\tw.Flush()\n\n}",
"func (l *Listener) List(in string , list *[]Message) error {\n\n\tfmt.Println(\"Command list\")\n\n\tswitch semantic {\n\n\tcase 1:\n\t\t*list = *queue\n\n\tcase 2:\n\n\t\tvar newlist []Message\n\n\t\tfor i := 0; i < len(*queue); i++ {\n\n\t\t\tif (*queue)[i].Visible {\n\n\t\t\t\tnewlist = append(newlist, (*queue)[i])\n\n\t\t\t}\n\n\t\t}\n\n\t\t*list = newlist\n\n\t}\n\n\treturn nil\n}",
"func visTasks(tasks []db.Task, full bool) {\n\tstasks := make([]string, len(tasks))\n\tfor i, val := range tasks {\n\t\tif full {\n\t\t\tstasks[i] = fmt.Sprintf(\"Task no. %d)\\n%v\", i+1, val)\n\t\t} else {\n\t\t\tstasks[i] = fmt.Sprintf(\"Task no. %d) %s\\n\", i+1, val.Body)\n\t\t}\n\t}\n\tif full {\n\t\tfmt.Println(strings.Join(stasks, \"\\n\\n\"))\n\t} else {\n\t\tfmt.Print(strings.Join(stasks, \"\"))\n\t}\n}",
"func printCommands(cmds []Commander) {\n\tconst format = \"%v\\t%v\\t%v\\n\"\n\ttw := new(tabwriter.Writer).Init(os.Stdout, 0, 8, 2, ' ', 0)\n\tfmt.Fprintf(tw, format, \"Alias\", \"Command\", \"Args\")\n\tfmt.Fprintf(tw, format, \"-----\", \"-------\", \"----\")\n\tfor _, t := range cmds {\n\t\tfmt.Fprintf(tw, format, t.Alias, t.Command, strings.Join(t.Args, \" \"))\n\t}\n\ttw.Flush() // calculate column widths and print table\n}",
"func (ts *TaskSet) DisplayByNext() {\n\tif ts.numTasksLoaded == 0 {\n\t\tfmt.Println(\"\\033[31mNo tasks found. Showing help.\\033[0m\")\n\t\tHelp(\"\")\n\t} else if len(ts.tasks) == 0 {\n\t\tExitFail(\"No matching tasks in given context or filter.\")\n\t} else if len(ts.tasks) == 1 {\n\t\tts.tasks[0].Display()\n\t\treturn\n\t} else {\n\t\tvar tasks []*Task\n\t\tw, h := MustGetTermSize()\n\n\t\th -= 8 // leave room for context message, header and prompt\n\n\t\tif h > len(ts.tasks) || h < 0 {\n\t\t\ttasks = ts.tasks\n\t\t} else {\n\t\t\ttasks = ts.tasks[:h]\n\t\t}\n\n\t\ttable := NewTable(\n\t\t\tw,\n\t\t\t\"ID\",\n\t\t\t\"Priority\",\n\t\t\t\"Tags\",\n\t\t\t\"Project\",\n\t\t\t\"Summary\",\n\t\t)\n\n\t\tfor _, t := range tasks {\n\t\t\tstyle := t.Style()\n\t\t\ttable.AddRow(\n\t\t\t\t[]string{\n\t\t\t\t\t// id should be at least 2 chars wide to match column header\n\t\t\t\t\t// (headers can be truncated)\n\t\t\t\t\tfmt.Sprintf(\"%-2d\", t.ID),\n\t\t\t\t\tt.Priority,\n\t\t\t\t\tstrings.Join(t.Tags, \" \"),\n\t\t\t\t\tt.Project,\n\t\t\t\t\tt.Summary,\n\t\t\t\t},\n\t\t\t\tstyle,\n\t\t\t)\n\t\t}\n\n\t\ttable.Render()\n\n\t\tif h >= len(ts.tasks) {\n\t\t\tfmt.Printf(\"\\n%v tasks.\\n\", len(ts.tasks))\n\t\t} else {\n\t\t\tfmt.Printf(\"\\n%v tasks, truncated to %v lines.\\n\", len(ts.tasks), h)\n\t\t}\n\t}\n}",
"func (cmd *ListKeys) Execute() error {\n\tvar walkKeyMap func(keymap *KeyMap, path []rune, f func(path []rune, b *KeyBinding))\n\twalkKeyMap = func(keymap *KeyMap, path []rune, f func(path []rune, b *KeyBinding)) {\n\t\tfor _, keyBinding := range keymap.Bindings() {\n\t\t\tchildPath := make([]rune, len(path))\n\t\t\tcopy(childPath, path)\n\t\t\tchildPath = append(childPath, keyBinding.Key())\n\t\t\tif keyBinding.HasChildren() {\n\t\t\t\twalkKeyMap(keyBinding.Children(), childPath, f)\n\t\t\t} else {\n\t\t\t\tf(childPath, keyBinding)\n\t\t\t}\n\t\t}\n\t}\n\n\tprintBinding := func(path []rune, b *KeyBinding) {\n\t\tpathString := []string{}\n\t\tfor _, r := range path {\n\t\t\tpathString = append(pathString, fmt.Sprintf(\"%c\", r))\n\t\t}\n\t\tfmt.Fprintf(cmd.Terminal, \"%s: %s\\n\", strings.Join(pathString, \" \"), b.Description())\n\t}\n\n\twalkKeyMap(cmd.CurrentKeyMap, []rune{}, printBinding)\n\treturn nil\n}",
"func list() (r []Output) {\n\t//r := []Output{}\n\tr = append(r, Output{Message: fmt.Sprint(\"one\")})\n\tr = append(r, Output{Message: fmt.Sprint(\"two\")})\n\tr = append(r, Output{Message: fmt.Sprint(\"three\")})\n\tr = append(r, Output{Message: fmt.Sprint(\"four\")})\n\n\treturn\n}",
"func (r renderer) List(out *bytes.Buffer, text func() bool, flags int) {\n\t// TODO: This is not desired (we'd rather not write lists as part of summary),\n\t// but see this issue: https://github.com/russross/blackfriday/issues/189\n\tmarker := out.Len()\n\tif !text() {\n\t\tout.Truncate(marker)\n\t}\n\tout.Write([]byte{' '})\n}",
"func ShowUserList(ul *[]define.User) {\n\t//ul := &define.UserList\n\tt := tablewriter.NewWriter(os.Stdout)\n\tt.SetAutoFormatHeaders(false)\n\tt.SetAutoWrapText(false)\n\tt.SetReflowDuringAutoWrap(false)\n\tt.SetHeader([]string{\"ID\", \"Name\", \"Cell\", \"Address\", \"Born\", \"Passwd\"})\n\tfor _, user := range *ul {\n\t\tid := strconv.FormatUint(uint64(user.ID), 10)\n\t\tt.Append([]string{id, user.Name, user.Cell, user.Address,\n\t\t\tuser.Born.Format(\"2006.01.02\"), user.Passwd})\n\t}\n\tt.Render()\n}",
"func (hc *Hailconfig) List() error {\n\tcols, _ := consolesize.GetConsoleSize()\n\tmaxLenAlias := 25\n\tmaxLenCommand := 80\n\tmaxLenDescription := 25\n\tif cols > 10 {\n\t\tmaxLenAlias = cols/4 - 5\n\t\tmaxLenCommand = cols / 2\n\t\tmaxLenDescription = cols/4 - 5\n\t}\n\n\tt := table.NewWriter()\n\tt.SetOutputMirror(os.Stdout)\n\tt.AppendHeader(table.Row{\"Alias\", \"Command\", \"Description\"})\n\tt.SetColumnConfigs([]table.ColumnConfig{\n\t\t{\n\t\t\tName: \"Alias\",\n\t\t\tWidthMin: 5,\n\t\t\tWidthMax: maxLenAlias,\n\t\t},\n\t\t{\n\t\t\tName: \"Command\",\n\t\t\tWidthMin: 10,\n\t\t\tWidthMax: maxLenCommand,\n\t\t}, {\n\t\t\tName: \"Description\",\n\t\t\tWidthMin: 5,\n\t\t\tWidthMax: maxLenDescription,\n\t\t},\n\t})\n\t//t.SetAllowedRowLength(90)\n\tfor alias, script := range hc.Scripts {\n\t\tt.AppendRow([]interface{}{alias, script.Command, script.Description})\n\t\tt.AppendSeparator()\n\t}\n\tt.Render()\n\treturn nil\n}",
"func CommandList() error {\n\tcommon.LogInfo2Quiet(\"My Apps\")\n\tapps, err := common.DokkuApps()\n\tif err != nil {\n\t\tcommon.LogWarn(err.Error())\n\t\treturn nil\n\t}\n\n\tfor _, appName := range apps {\n\t\tcommon.Log(appName)\n\t}\n\n\treturn nil\n}",
"func PrintMenuItems(menu_item_list []MenuItem) {\n groupOld := C_NOGROUP\n\n for _, item := range menu_item_list {\n //fmt.Println(item.command)\n\n if (groupOld != item.group) {\n if (groupOld != C_NOGROUP) {\n fmt.Println(C_SEPARATOR)\n }\n groupOld = item.group\n }\n if (item.command != \"\") {\n fmt.Println(\"prog \\\"\" + escapeSpecial(item.label) + \"\\\" \" + getIcon(item.icon) + \" \" + item.command)\n }\n if (item.subPipeMenu != \"\") {\n fmt.Println(\"menuprogreload \\\"\" + escapeSpecial(item.label) + \"\\\" \" + C_NOICON + \" 0 \" + item.subPipeMenu)\n }\n\n }\n}",
"func OnList(c *grumble.Context) error {\n\tlen := len(config.AppConfig.Plans)\n\tif len == 0 {\n\t\tfmt.Println(\"No plans available. Try \\\"read\\\".\")\n\t\treturn nil\n\t}\n\n\tfor i, plan := range config.AppConfig.Plans {\n\t\tfmt.Println(i+1, plan.Name)\n\t\tfor i, task := range plan.Tasks {\n\t\t\tif task.GetDescription() != \"\" {\n\t\t\t\tfmt.Println(\" \", strconv.Itoa(i+1)+\".\", task.GetDescription())\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}",
"func (n *NetworkListCommand) runNetworkList(args []string) error {\n\tlogrus.Debugf(\"list the networks\")\n\n\tctx := context.Background()\n\tapiClient := n.cli.Client()\n\trespNetworkResource, err := apiClient.NetworkList(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdisplay := n.cli.NewTableDisplay()\n\tdisplay.AddRow([]string{\"NETWORK ID\", \"NAME\", \"DRIVER\", \"SCOPE\"})\n\tfor _, network := range respNetworkResource {\n\t\tdisplay.AddRow([]string{\n\t\t\tnetwork.ID[:10],\n\t\t\tnetwork.Name,\n\t\t\tnetwork.Driver,\n\t\t\tnetwork.Scope,\n\t\t})\n\t}\n\n\tdisplay.Flush()\n\treturn nil\n}",
"func (c *list) execute(sess *session) *response {\n\n\t// Is the user authenticated?\n\tif sess.st != authenticated {\n\t\treturn mustAuthenticate(sess, c.tag, \"LIST\")\n\t}\n\n\t// Is the mailbox pattern empty? This indicates that we should return\n\t// the delimiter and the root name of the reference\n\tif c.mboxPattern == \"\" {\n\t\tres := ok(c.tag, \"LIST completed\")\n\t\tres.extra(fmt.Sprintf(`LIST () \"%s\" %s`, pathDelimiter, c.reference))\n\t\treturn res\n\t}\n\n\t// Convert the reference and mbox pattern into slices\n\tref := pathToSlice(c.reference)\n\tmbox := pathToSlice(c.mboxPattern)\n\n\t// Get the list of mailboxes\n\tmboxes, err := sess.list(ref, mbox)\n\n\tif err != nil {\n\t\treturn internalError(sess, c.tag, \"LIST\", err)\n\t}\n\n\t// Check for an empty response\n\tif len(mboxes) == 0 {\n\t\treturn no(c.tag, \"LIST no results\")\n\t}\n\n\t// Respond with the mailboxes\n\tres := ok(c.tag, \"LIST completed\")\n\tfor _, mbox := range mboxes {\n\t\tres.extra(fmt.Sprintf(`LIST (%s) \"%s\" /%s`,\n\t\t\tjoinMailboxFlags(mbox),\n\t\t\tstring(pathDelimiter),\n\t\t\tstrings.Join(mbox.Path, string(pathDelimiter))))\n\t}\n\n\treturn res\n}",
"func createDisplaysList(e *Elevator) {\n\tdisplay := newDisplay(1, displayOn, 1)\n\te.floorDisplaysList = append(e.floorDisplaysList, *display)\n\tfor i := e.column.minFloor; i <= e.column.maxFloor; i++ {\n\t\tdisplay = newDisplay(i, displayOn, i)\n\t\te.floorDisplaysList = append(e.floorDisplaysList, *display)\n\t}\n}",
"func (ll *linkedList) display() {\n\tfor tip := ll.head; tip != nil; tip = tip.Next {\n\t\tfmt.Printf(\"NODE: %+v %p \\n\", tip, tip)\n\t}\n\tfmt.Println()\n}",
"func executeListCmd(t *gotesting.T, stdout io.Writer, args []string, wrapper *stubRunWrapper) subcommands.ExitStatus {\n\ttd := testutil.TempDir(t)\n\tdefer os.RemoveAll(td)\n\n\tcmd := newListCmd(stdout, td)\n\tcmd.wrapper = wrapper\n\tflags := flag.NewFlagSet(\"\", flag.ContinueOnError)\n\tcmd.SetFlags(flags)\n\tif err := flags.Parse(args); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tflags.Set(\"build\", \"false\") // DeriveDefaults fails if -build=true and bundle dirs are missing\n\treturn cmd.Execute(context.Background(), flags)\n}",
"func (l *List) Display() {\n\tlst := l.head\n\tfor lst != nil {\n\t\tif lst.next != nil {\n\t\t\tfmt.Printf(\"%+v -> \", lst.val)\n\t\t} else {\n\t\t\tfmt.Printf(\"%+v\", lst.val)\n\t\t}\n\t\tlst = lst.next\n\t}\n\tfmt.Println()\n}",
"func displayInstructions(s tcell.Screen) {\n emitStr(s, 2, 2, tcell.StyleDefault, \"Press f/b to go to next/previous stretches\")\n emitStr(s, 2, 3, tcell.StyleDefault, \"Press p to toggle pause\")\n emitStr(s, 2, 4, tcell.StyleDefault, \"Press ESC exit\")\n return\n}",
"func Display() chan<- Result {\n\t// Create a channel to receive the results on.\n\tresult := make(chan Result)\n\n\tgo func() {\n\t\t// Wait for results from the different feeds and\n\t\t// display them.\n\t\tfor found := range result {\n\t\t\tlog.Printf(\"%s:\\n%s\\n\\n\", found.Field, found.Content)\n\t\t}\n\t}()\n\n\treturn result\n}",
"func (d *Inbrs) DisplayTable(w io.Writer) {\n\tvar data [][]string\n\tfor _, s := range d.list {\n\t\tdata = append(data, []string{s.hostname, s.intName, s.area,\n\t\t\ts.remoteID, s.fwAddress.String()})\n\t}\n\ttable := tablewriter.NewWriter(w)\n\ttable.SetHeader([]string{\"hostname\", \"interface\", \"area\", \"remote id\", \"FW address\"})\n\tfor _, v := range data {\n\t\ttable.Append(v)\n\t}\n\ttable.Render() // Send output\n}",
"func (h *History) List() {\n\tload := reverse(h.Load())\n\tprompt := promptui.Select{\n\t\tLabel: \"Target hisotry\",\n\t\tItems: load,\n\t\tSize: 10,\n\t}\n\n\ti, _, err := prompt.Run()\n\n\tif err != nil {\n\t\tlog.Fatalln(\"Prompt failed: \\n\", err)\n\t}\n\n\titem := load[i]\n\th.Write(item)\n\tExecuteItem(h.binary, item)\n}",
"func (db database) list(w http.ResponseWriter, req *http.Request) {\n\n\tif err := itemList.Execute(w, db); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}",
"func DisplayResults(number uint64, length uint64) {\n\tfmt.Println(\"Longest Collatz sequence under\", gridSize, \"starts at\")\n\tfmt.Println(number)\n\tfmt.Println(\"and has a length of\")\n\tfmt.Println(length)\n}",
"func CallList(list uint32) {\n\tsyscall.Syscall(gpCallList, 1, uintptr(list), 0, 0)\n}",
"func (ll *Doubly[T]) Display() {\n\tfor cur := ll.Head.Next; cur != ll.Head; cur = cur.Next {\n\t\tfmt.Print(cur.Val, \" \")\n\t}\n\n\tfmt.Print(\"\\n\")\n}",
"func display(w http.ResponseWriter, tmpl string, data interface{}) {\n\ttemplates.ExecuteTemplate(w, tmpl, data)\n}",
"func CallLists(n int32, xtype uint32, lists unsafe.Pointer) {\n\tsyscall.Syscall(gpCallLists, 3, uintptr(n), uintptr(xtype), uintptr(lists))\n}",
"func List(ctx *cli.Context) error {\n\tm := task.NewFileManager()\n\ttasks := m.GetAllOpenTasks()\n\n\ttasks = sortTasks(ctx.String(\"sort\"), tasks)\n\n\tfor _, v := range tasks {\n\t\tfmt.Println(v.String())\n\t}\n\treturn nil\n}",
"func displayAllRecords(records []Record) {\n\tfmt.Printf(\"\\nDisplaying all records...\\n\\n\")\n\n\tfor i := 0; i < len(records); i++ {\n\t\tfmt.Printf(\"Record ID: %d: %+v\\n\", i, records[i])\n\t\ttime.Sleep(5 * time.Millisecond) // 5ms between records\n\t}\n}",
"func (c *showCommand) Run(ctx context.Context, _ *commoncli.Env, serverClient util.ServerClient) error {\n\tif err := c.validate(); err != nil {\n\t\treturn err\n\t}\n\n\tresp, err := c.fetchEntries(ctx, serverClient.NewEntryClient())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcommonutil.SortTypesEntries(resp.Entries)\n\treturn c.printer.PrintProto(resp)\n}",
"func List(g *types.Cmd) {\n\tg.AddOptions(\"list\")\n}",
"func RunCommands(t *testing.T, tp *Tapestry, c []int) {\n\tfor _, comm := range c {\n\t\tswitch comm {\n\t\tcase PUB:\n\t\t\tPublish(t, tp)\n\t\t\tbreak\n\t\tcase GET:\n\t\t\tRetrieve(t, tp)\n\t\t\tbreak\n\t\tcase ADD:\n\t\t\tAdd(t, tp)\n\t\t\tbreak\n\t\tcase LEAVE:\n\t\t\tLeave(t, tp)\n\t\t\tbreak\n\t\tcase DEL:\n\t\t\tDelete(t, tp)\n\t\tcase KILL:\n\t\t\tKill(t, tp)\n\t\tdefault:\n\t\t\tt.Errorf(\"invalid command\")\n\t\t}\n\t\ttime.Sleep(time.Duration(tp.Rand.Intn(MAX_CMD_DELAY_MS)) * time.Millisecond)\n\t}\n}",
"func (command QuerybynameCommand) Execute() {\n\tquery := command.checkParam()\n\tleaves := query.Query()\n\n\tfmt.Printf(\"%s\\t\\t\\t\\t%s\\t%s\\n\", \"leaveID\", \"time-from\", \"time-end\")\n\tfor _, leave := range leaves {\n\t\tfmt.Printf(\"%s\\t%s\\t%s\\n\", leave.GetID(), leave.GetTimeFrom(), leave.GetTimeEnd())\n\t}\n}",
"func (r *ListCommand) Execute(args []string) (err error) {\n\tif err := r.RootCommand.Execute(args); err != nil {\n\t\tcerberus.Logger.Fatalln(err)\n\t}\n\n\tsvcs, err := cerberus.LoadServicesCfg()\n\tif err != nil {\n\t\tcerberus.DebugLogger.Fatalln(err)\n\t}\n\n\tfmt.Printf(\"\\nCerberus installed services:\\n\")\n\tfmt.Println(strings.Repeat(\"-\", 80))\n\n\tp := keyValuePrinter{indentSize: 5}\n\tfor _, s := range svcs {\n\t\tif r.Query != \"\" {\n\t\t\tif !strings.Contains(strings.ToLower(s.Name), strings.ToLower(r.Query)) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\n\t\tp.println(\"Name\", s.Name)\n\t\tp.println(\"Display Name\", s.DisplayName)\n\t\tp.println(\"Description\", s.Desc)\n\t\tp.println(\"Executable Path\", s.ExePath)\n\t\tp.println(\"Working Directory\", s.WorkDir)\n\t\tif len(s.Args) > 0 {\n\t\t\tp.println(\"Arguments\", strings.Join(s.Args, \" \"))\n\t\t}\n\t\tif len(s.Env) > 0 {\n\t\t\tp.println(\"Environment Variables\", strings.Join(s.Env, \" \"))\n\t\t}\n\t\tp.println(\"Start Type\", startTypeMapping[s.StartType])\n\t\tif s.StopSignal != cerberus.NoSignal {\n\t\t\tp.println(\"Stop Signal\", s.StopSignal)\n\t\t}\n\t\tp.println(\"Service User\", s.ServiceUser)\n\t\tif len(s.Dependencies) > 0 {\n\t\t\tp.println(\"Dependencies\", strings.Join(s.Dependencies, \" | \"))\n\t\t}\n\t\tvar actlng = len(s.RecoveryActions)\n\t\tif actlng > 0 {\n\t\t\tp.println(\"Recovery Actions\", \"\")\n\t\t\tp.indent()\n\t\t\tfor _, action := range s.RecoveryActions {\n\t\t\t\tp.println(\"Error Code\", action.ExitCode)\n\t\t\t\tp.println(\"Action\", mapAction(action.Action))\n\t\t\t\tif action.Action&cerberus.RestartAction == cerberus.RestartAction {\n\t\t\t\t\tp.println(\"Delay\", action.Delay)\n\t\t\t\t\tp.println(\"Max Restarts\", action.MaxRestarts)\n\t\t\t\t\tp.println(\"Reset After\", action.ResetAfter)\n\t\t\t\t}\n\t\t\t\tif action.Action&cerberus.RunProgramAction == cerberus.RunProgramAction {\n\t\t\t\t\tp.println(\"Program\", action.Program)\n\t\t\t\t\tp.println(\"Arguments\", fmt.Sprintf(\"[%v]\", concatArgs(action.Arguments)))\n\t\t\t\t}\n\t\t\t\tif actlng > 1 {\n\t\t\t\t\tp.println(\"-\", nil)\n\t\t\t\t}\n\t\t\t\tactlng--\n\t\t\t}\n\t\t}\n\n\t\tp.writeTo(os.Stdout)\n\t\tfmt.Fprintf(os.Stdout, \"%v\\n\", strings.Repeat(\"-\", 80))\n\t}\n\n\treturn nil\n}",
"func DisplayAll() {\n\n\tif len(dataStorageMap) == 0 {\n\t\tfmt.Println(\"Data Storage Empty!!! No data Found !!!\")\n\t} else {\n\t\tfor key, val := range dataStorageMap {\n\t\t\tfmt.Println(key, \"-->\", val)\n\t\t}\n\t}\n}",
"func RunList(cmd *cobra.Command, args []string) {\n\tlist := &nbv1.NamespaceStoreList{\n\t\tTypeMeta: metav1.TypeMeta{Kind: \"NamespaceStoreList\"},\n\t}\n\tif !util.KubeList(list, &client.ListOptions{Namespace: options.Namespace}) {\n\t\treturn\n\t}\n\tif len(list.Items) == 0 {\n\t\tfmt.Printf(\"No namespace stores found.\\n\")\n\t\treturn\n\t}\n\ttable := (&util.PrintTable{}).AddRow(\n\t\t\"NAME\",\n\t\t\"TYPE\",\n\t\t\"TARGET-BUCKET\",\n\t\t\"PHASE\",\n\t\t\"AGE\",\n\t)\n\tfor i := range list.Items {\n\t\tbs := &list.Items[i]\n\t\ttb, err := util.GetNamespaceStoreTargetBucket(bs)\n\t\tif err == nil {\n\t\t\ttable.AddRow(\n\t\t\t\tbs.Name,\n\t\t\t\tstring(bs.Spec.Type),\n\t\t\t\ttb,\n\t\t\t\tstring(bs.Status.Phase),\n\t\t\t\tutil.HumanizeDuration(time.Since(bs.CreationTimestamp.Time).Round(time.Second)),\n\t\t\t)\n\t\t}\n\t}\n\tfmt.Print(table.String())\n}",
"func (p *Proxy) handleShowProcesslist(session *driver.Session, query string, node sqlparser.Statement) (*sqltypes.Result, error) {\n\t// sessions := spanner.sessions\n\tqr := &sqltypes.Result{}\n\tqr.Fields = []*querypb.Field{\n\t\t{Name: \"Id\", Type: querypb.Type_INT64},\n\t\t{Name: \"User\", Type: querypb.Type_VARCHAR},\n\t\t{Name: \"Host\", Type: querypb.Type_VARCHAR},\n\t\t{Name: \"db\", Type: querypb.Type_VARCHAR},\n\t\t{Name: \"Command\", Type: querypb.Type_VARCHAR},\n\t\t{Name: \"Time\", Type: querypb.Type_INT32},\n\t\t{Name: \"State\", Type: querypb.Type_VARCHAR},\n\t\t{Name: \"Info\", Type: querypb.Type_VARCHAR},\n\t\t{Name: \"Rows_sent\", Type: querypb.Type_INT64},\n\t\t{Name: \"Rows_examined\", Type: querypb.Type_INT64},\n\t}\n\n\t// var sessionInfos []SessionInfo\n\t// privilegePlug := spanner.plugins.PlugPrivilege()\n\t// if privilegePlug.IsSuperPriv(session.User()) {\n\t// \tsessionInfos = sessions.Snapshot()\n\t// } else {\n\t// \tsessionInfos = sessions.SnapshotUser(session.User())\n\t// }\n\n\t// for _, info := range sessionInfos {\n\t// \trow := []sqltypes.Value{\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_INT64, []byte(fmt.Sprintf(\"%v\", info.ID))),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_VARCHAR, []byte(info.User)),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_VARCHAR, []byte(info.Host)),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_VARCHAR, []byte(info.DB)),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_VARCHAR, []byte(info.Command)),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_INT32, []byte(fmt.Sprintf(\"%v\", info.Time))),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_VARCHAR, []byte(info.State)),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_VARCHAR, []byte(info.Info)),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_INT64, []byte(fmt.Sprintf(\"%v\", 0))),\n\t// \t\tsqltypes.MakeTrusted(querypb.Type_INT64, []byte(fmt.Sprintf(\"%v\", 0))),\n\t// \t}\n\t// \tqr.Rows = append(qr.Rows, row)\n\t// }\n\treturn qr, nil\n}",
"func (info *Info) Run(c Cursor) {\n\tp := c.P()\n\tif !info.HeadLess {\n\t\tp.Printf(\"info %v {\", info.Domain)\n\t\tp.ShiftIn()\n\t\tdefer p.ShiftOut(\"}\")\n\t}\n\n\tips := info.run(c)\n\tif c.E() != nil {\n\t\treturn\n\t}\n\n\tif !info.HideResult {\n\t\tips.PrintResult(c)\n\n\t\tif len(info.NameServers) > 0 {\n\t\t\tp.Print()\n\t\t\tfor _, ns := range info.NameServers {\n\t\t\t\tp.Printf(\"// %v\", ns)\n\t\t\t}\n\t\t}\n\n\t\tif len(info.Records) > 0 {\n\t\t\tp.Print()\n\t\t\tfor _, rr := range info.Records {\n\t\t\t\tp.Printf(\"// %s\", rr.Digest())\n\t\t\t}\n\t\t}\n\t}\n}",
"func listActions(actions []Actionable) error {\n\tlog.Printf(\"query=%s\", query)\n\n\tfor _, a := range actions {\n\t\tit := wf.NewItem(a.Title()).\n\t\t\tArg(a.Title()).\n\t\t\tIcon(a.Icon()).\n\t\t\tCopytext(a.Title()).\n\t\t\tUID(a.Title()).\n\t\t\tValid(true).\n\t\t\tVar(\"ALSF_ACTION\", a.Title())\n\n\t\tit.NewModifier(\"cmd\").\n\t\t\tSubtitle(\"Blacklist action\").\n\t\t\tArg(a.Title()).\n\t\t\tValid(true).\n\t\t\tIcon(IconBlacklist).\n\t\t\tVar(\"action\", \"blacklist\")\n\n\t\tif _, ok := a.(TabActionable); ok {\n\t\t\tit.Var(\"ALSF_ACTION_TYPE\", \"tab\").\n\t\t\t\tVar(\"action\", \"tab-action\")\n\t\t} else if _, ok := a.(URLActionable); ok {\n\t\t\tit.Var(\"ALSF_ACTION_TYPE\", \"url\").\n\t\t\t\tVar(\"action\", \"url-action\")\n\t\t}\n\t}\n\n\tif query != \"\" {\n\t\tres := wf.Filter(query)\n\t\tlog.Printf(\"%d action(s) for %q\", len(res), query)\n\t}\n\twf.WarnEmpty(\"No actions found\", \"Try a different query?\")\n\twf.SendFeedback()\n\treturn nil\n}",
"func view() {\n\tvar selection string\n\tlisted, _ := exec.Command(\"ssh\", \"[email protected]\", \"ls\", \"-a\").Output()\n\tfmt.Println(\"\")\n\tfmt.Println(\"Current Files in directory:\")\n\tfmt.Println(string(listed))\n\tfmt.Println(\"Press 1 to create a file\")\n\tfmt.Println(\"Press 2 to delete a file\")\n\tfmt.Println(\"Press 3 to go back to the main menu\")\n\tfmt.Scan(&selection)\n\n\tif selection == \"1\" {\n\t\tcreate()\n\t}\n\tif selection == \"2\" {\n\t\tdelete()\n\t}\n\tif selection == \"3\" {\n\t\tmainmenu()\n\t}\n\tgoback()\n\n}",
"func (o *ListOptions) Run(ctx context.Context) (err error) {\n\to.printDevfileList(o.devfileList.Items)\n\treturn nil\n}",
"func (c *Console) List(group string) (err error) {\n\t_, err = fmt.Fprintf(c.conn, \"%v\\n\", toJSON([]string{\"list\", group}))\n\tif err == nil {\n\t\terr = <-c.Waiter\n\t}\n\treturn\n}",
"func ListCommands() {\n\tfmt.Fprintln(config.ErrOut, \"Commands:\")\n\tpadLen := 0\n\tfor _, cmd := range config.CommandList {\n\t\tif len(cmd.Name) > padLen {\n\t\t\tpadLen = len(cmd.Name)\n\t\t}\n\t}\n\tfor _, cmd := range config.CommandList {\n\t\tfmt.Fprintf(config.ErrOut, \" %s%s %s\\n\", cmd.Name, strings.Repeat(\" \", padLen-len(cmd.Name)), cmd.Title)\n\t}\n\tpad := strings.Repeat(\" \", len(config.Me)-1)\n\trunfileOpt := \"\"\n\tif config.EnableRunfileOverride {\n\t\trunfileOpt = \"[-r runfile] \"\n\t}\n\tfmt.Fprintf(config.ErrOut, \"Usage:\\n\")\n\tfmt.Fprintf(config.ErrOut, \" %s %shelp <command>\\n\", config.Me, runfileOpt)\n\tfmt.Fprintf(config.ErrOut, \" %s (show help for <command>)\\n\", pad)\n\tfmt.Fprintf(config.ErrOut, \" or %s %s<command> [option ...]\\n\", config.Me, runfileOpt)\n\tfmt.Fprintf(config.ErrOut, \" %s (run <command>)\\n\", pad)\n}",
"func (*ListCmd) Name() string { return \"list\" }",
"func (lw *listWin) refresh() {\n\tvar inds []int\n\tfor i, task := range file.Tasks {\n\t\tok := true\n\t\tfor _, filter := range lw.filters {\n\t\t\tif !task.HasTag(filter) {\n\t\t\t\tok = false\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif ok {\n\t\t\tinds = append(inds, i)\n\t\t}\n\t}\n\n\tsort.Sort(sorter{inds, file.Tasks, lw.less})\n\n\tprojs := make(map[string]bool)\n\tctxs := make(map[string]bool)\n\n\tif err := lw.Addr(\",\"); err != nil {\n\t\tdie(1, \"Failed to set address for %s: %s\", lw.title, err)\n\t}\n\n\tfor _, i := range inds {\n\t\ttask := file.Tasks[i]\n\t\tif _, err := fmt.Fprintf(lw.Data, \"%5d. %s\\n\", i+1, task.String()); err != nil {\n\t\t\tdie(1, \"Failed to refresh window %s: %s\", lw.title, err)\n\t\t}\n\t\tfor _, t := range task.Tags(todotxt.ProjectTag) {\n\t\t\tprojs[t] = true\n\t\t}\n\t\tfor _, t := range task.Tags(todotxt.ContextTag) {\n\t\t\tctxs[t] = true\n\t\t}\n\t}\n\n\tif err := lw.Addr(\"#0\"); err != nil {\n\t\tdie(1, \"Failed to write address to %s: %s\", lw.title, err)\n\t}\n\tif err := lw.Ctl(\"dot=addr\"); err != nil {\n\t\tdie(1, \"Failed to write dot=addr to %s ctl: %s\", lw.title, err)\n\t}\n\tif err := lw.Ctl(\"show\"); err != nil {\n\t\tdie(1, \"Failed to write show to %s ctl: %s\", lw.title, err)\n\t}\n\tif err := lw.Ctl(\"clean\"); err != nil {\n\t\tdie(1, \"Failed to write clean to %s ctl: %s\", lw.title, err)\n\t}\n}",
"func (L *List) Display() {\n\tnode := L.Head\n\tfor node != nil {\n\t\tfmt.Printf(\"%+v -> \", node.Key)\n\t\tnode = node.next\n\t}\n\tfmt.Println(\"---\")\n}",
"func Display(lst *Node) {\n\tfor lst != nil {\n\t\tif lst.next != nil {\n\t\t\tfmt.Printf(\"%v -> \", lst.val)\n\t\t} else {\n\t\t\tfmt.Printf(\"%v\", lst.val)\n\t\t}\n\t\tlst = lst.next\n\t}\n\tfmt.Println()\n}",
"func PrintCommands() {\n logger.Log(fmt.Sprintln(\"** Daemonized Commands **\"))\n for cmd, desc := range DaemonizedCommands() {\n logger.Log(fmt.Sprintf(\"%15s: %s\\n\", cmd, desc.description))\n }\n\n logger.Log(fmt.Sprintln(\"** Information Commands **\"))\n for cmd, desc := range InfoCommands() {\n logger.Log(fmt.Sprintf(\"%15s: %s\\n\", cmd, desc.description))\n }\n\n logger.Log(fmt.Sprintln(\"** Interactive Commands **\"))\n for cmd, desc := range InteractiveCommands() {\n logger.Log(fmt.Sprintf(\"%15s: %s\\n\", cmd, desc.description))\n }\n}",
"func (c *AppsListCmd) Run(cli *CLI, logWriters *LogWriters) (err error){\n\ts := NewSpinner(\"Looking up apps\",logWriters)\n\ts.Start()\n\n\taccounts, err := api.Accounts()\n\tif err != nil {\n\t\ts. Stop()\n\t\tlog.Error().Err(err).Msg(\"Unable to look up accounts\");\n\t\tos.Exit(1)\n\t}\n\ts.Stop()\n\tif c.AccountID != 0{\n\t\tnewAct := []api.Account{}\n\t\tfor _, a := range accounts {\n\t\t\tif a.ID == c.AccountID{\n\t\t\t\tnewAct = append(newAct, a)\n\t\t\t}\n\t\t\taccounts = newAct\n\t\t}\n\t\tif(len(newAct) == 0){\n\t\t\tlog.Info().Int(\"Account ID\",c.AccountID).Msg(\"Unable to find accounts where\")\n\t\t\tos.Exit(1)\n\t\t}\n\t}\n\tfmt.Println()\n\tfmt.Println()\n\tfor _, acc := range accounts {\n\t\tlog.Info().Msg(fmt.Sprint(HiWhite(\"Account #\"),HiWhite(strconv.Itoa(acc.ID)),\" - \", HiYellow(acc.AccountName)))\n\t\ttable := NewTable(cli, os.Stdout)\n\t\ttable.SetHeader([]string{\"App ID\", \"App Name\"})\n\t\ttable.SetColumnColor(tablewriter.Colors{tablewriter.Normal,tablewriter.FgWhiteColor},\n\t\ttablewriter.Colors{tablewriter.Normal, tablewriter.FgHiGreenColor})\n\t\ttable.SetAlignment(tablewriter.ALIGN_LEFT)\n\t\ttable.SetCenterSeparator(\"\")\n\t\ttable.SetColumnSeparator(\"\")\n\t\ttable.SetNoWhiteSpace(true)\n\t\ttable.SetAutoMergeCells(true)\n\t\ttable.SetRowLine(true)\n\t\tfor _, app := range acc.Applications {\n\t\t\t\tr := []string{strconv.Itoa(app.ID), strings.Trim(app.ApplicationName,\"\\\"\")}\n\t\t\t\ttable.Append(r)\n\t\t}\n\t\ttable.Render()\n\t\tfmt.Println()\n\t\tfmt.Println()\n\t}\n\treturn err\n}",
"func RunSnapshotList(c *CmdConfig) error {\n\tvar err error\n\tss := c.Snapshots()\n\n\trestype, err := c.Doit.GetString(c.NS, doctl.ArgResourceType)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tregion, err := c.Doit.GetString(c.NS, doctl.ArgRegionSlug)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tmatches := make([]glob.Glob, 0, len(c.Args))\n\tfor _, globStr := range c.Args {\n\t\tg, err := glob.Compile(globStr)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unknown glob %q\", globStr)\n\t\t}\n\n\t\tmatches = append(matches, g)\n\t}\n\n\tvar matchedList []do.Snapshot\n\tvar list []do.Snapshot\n\n\tif restype == \"droplet\" {\n\t\tlist, err = ss.ListDroplet()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t} else if restype == \"volume\" {\n\t\tlist, err = ss.ListVolume()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\tlist, err = ss.List()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tfor _, snapshot := range list {\n\t\tvar skip = true\n\t\tif len(matches) == 0 {\n\t\t\tskip = false\n\t\t} else {\n\t\t\tfor _, m := range matches {\n\t\t\t\tif m.Match(snapshot.ID) {\n\t\t\t\t\tskip = false\n\t\t\t\t}\n\t\t\t\tif m.Match(snapshot.Name) {\n\t\t\t\t\tskip = false\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif !skip && region != \"\" {\n\t\t\tfor _, snapshotRegion := range snapshot.Regions {\n\t\t\t\tif region != snapshotRegion {\n\t\t\t\t\tskip = true\n\t\t\t\t} else {\n\t\t\t\t\tskip = false\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\n\t\t}\n\n\t\tif !skip {\n\t\t\tmatchedList = append(matchedList, snapshot)\n\t\t}\n\t}\n\n\titem := &displayers.Snapshot{Snapshots: matchedList}\n\treturn c.Display(item)\n}",
"func runListServers(_ *cobra.Command, _ []string) {\n\tcfg, err := config.LoadFromFile()\n\tif err != nil {\n\t\texitWithError(err)\n\t}\n\n\tregions, err := checkRegions(*region)\n\tif err != nil {\n\t\texitWithError(err)\n\t}\n\n\tnameFilter := core.NewFilter(core.TagName, *name, core.Contains, *ignoreCase)\n\tenvFilter := core.NewFilter(core.TagEnv, *env, core.Equals, *ignoreCase)\n\tservers, err := core.GetAllServers(cfg.AWSCredentials, regions, nameFilter, envFilter)\n\tif err != nil {\n\t\texitWithError(err)\n\t}\n\n\tw := tabwriter.NewWriter(os.Stdout, 0, 0, 3, ' ', 0)\n\tfmt.Fprintln(w, \"NAME\\tENVIRONMENT\\tPRIVATE IP\\tPUBLIC IP\")\n\tfor _, server := range servers {\n\t\tfmt.Fprintf(w, \"%s\\t%s\\t%s\\t%s\\n\", server.Name, server.Env, server.PrivateIP, server.PublicIP)\n\t}\n\tw.Flush()\n}",
"func runListDB(cmd *cobra.Command) {\n\tcmd.Println(\"Getting Set List\")\n\n\tnames, err := query.GetNames(\"\", conn)\n\tif err != nil {\n\t\tcmd.Println(\"Getting Set List : \", err)\n\t\treturn\n\t}\n\n\tcmd.Println(\"\")\n\n\tfor _, name := range names {\n\t\tcmd.Println(name)\n\t}\n\n\tcmd.Println(\"\")\n}",
"func CmdClList(s ircx.Sender, m *irc.Message) {\n\tr := report.Reports[\"classes\"].(*report.Context)\n\n\tfor _, v := range r.Classes {\n\t\ts.Send(&irc.Message{\n\t\t\tCommand: irc.PRIVMSG,\n\t\t\tParams: Params(m),\n\t\t\tTrailing: v,\n\t\t})\n\t}\n\n\ttime.Sleep(600 * time.Millisecond)\n}",
"func List(ctx *cli.Context) error {\n\targs := &listArgs{}\n\targs.Parse(ctx)\n\n\tmanager := yata.NewTaskManager()\n\ttasks, err := manager.GetAll()\n\thandleError(err)\n\n\tif args.showTags {\n\t\treturn displayTags(tasks)\n\t}\n\n\ttasks = yata.FilterTasks(tasks, func(t yata.Task) bool {\n\t\treturn (args.tag == \"\" || sliceContains(t.Tags, args.tag)) &&\n\t\t\t(args.description == \"\" || strings.Contains(t.Description, args.description)) &&\n\t\t\t(args.all || !t.Completed)\n\t})\n\n\tsortTasks(args.sort, &tasks)\n\n\tfor _, v := range tasks {\n\t\tstringer := yata.NewTaskStringer(v, taskStringer(args.format))\n\t\tswitch v.Priority {\n\t\tcase yata.LowPriority:\n\t\t\tyata.PrintlnColor(\"cyan+h\", stringer.String())\n\t\tcase yata.HighPriority:\n\t\t\tyata.PrintlnColor(\"red+h\", stringer.String())\n\t\tdefault:\n\t\t\tyata.Println(stringer.String())\n\t\t}\n\t}\n\n\treturn nil\n}",
"func ShowList(selfMember *MemberID, memberList *[]MemberID) {\n\tfmt.Println(\"You are:\")\n\tfmt.Println(*selfMember)\n\n\tfmt.Println(\"And this is your membership list:\")\n\tfor _, Member := range *memberList {\n\t\tfmt.Println(Member)\n\t}\n\tfmt.Println()\n}",
"func RenderList(w http.ResponseWriter, r *http.Request, l []Renderer) error {\n\tfor _, v := range l {\n\t\tif err := renderer(w, r, v); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tRespond(w, r, l)\n\treturn nil\n}",
"func List(title, text string, items []string) (string, bool, error) {\n\treturn listBox(title, text, \"ClassList\", items, false)\n}",
"func runList(props ListCmdProps, output io.Writer, repo db.Repo) error {\n\tstart, err := parseDateOrDefault(props.startDate)\n\n\tif props.startDate == \"\" {\n\t\tdefaultStart := start.Add(-1 * time.Hour * 24 * 30)\n\t\tstart = &defaultStart\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tend, err := parseDateOrDefault(props.endDate)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tworkingDays, err := repo.ListRange(start, end)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\trenderTable(workingDays, output)\n\n\treturn nil\n}",
"func printAllCommands(cmds []Commander) {\n\tconst format = \"%v\\t%v\\t%v\\t%v\\n\"\n\ttw := new(tabwriter.Writer).Init(os.Stdout, 0, 8, 2, ' ', 0)\n\tfmt.Fprintf(tw, format, \"Path\", \"Alias\", \"Command\", \"Args\")\n\tfmt.Fprintf(tw, format, \"-----\", \"-----\", \"-------\", \"----\")\n\tfor _, t := range cmds {\n\t\tfmt.Fprintf(tw, format, t.Path, t.Alias, t.Command, strings.Join(t.Args, \" \"))\n\t}\n\ttw.Flush()\n}",
"func CallLists(n int32, xtype uint32, lists unsafe.Pointer) {\n C.glowCallLists(gpCallLists, (C.GLsizei)(n), (C.GLenum)(xtype), lists)\n}",
"func RunAppsList(c *CmdConfig) error {\n\twithProjects, err := c.Doit.GetBool(c.NS, doctl.ArgAppWithProjects)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tapps, err := c.Apps().List(withProjects)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn c.Display(displayers.Apps(apps))\n}",
"func (c *Dg) ShowList() ([]string, error) {\n c.con.LogQuery(\"(show) list of device groups\")\n path := c.xpath(nil)\n return c.con.EntryListUsing(c.con.Show, path[:len(path) - 1])\n}",
"func main() {\n\tlist := &ToDoList{}\n\tfmt.Print(\"* * * To Do List * * *\\n\")\n\n\tfor true {\n\t\tfmt.Print(\"\\nEnter a command (Show, Add, Move, Complete) or End\\n\")\n\t\treader := bufio.NewReader(os.Stdin)\n\t\tcommand, _ := reader.ReadString('\\n')\n\n\t\t// Process Show command\n\t\tif strings.Contains(command, \"Show\") {\n\t\t\tindex := 1\n\t\t\tfor i := 0; i < len(list.showList()); i++ {\n\t\t\t\tfmt.Printf(\"%d. %s\", index, list.showList()[i])\n\t\t\t\tindex++\n\t\t\t}\n\n\t\t\t// Process Add command\n\t\t} else if strings.Contains(command, \"Add\") {\n\t\t\tsplitCommand := strings.SplitN(command, \" \", 2)\n\t\t\tlist.addListItem(splitCommand[1])\n\n\t\t\t// Process Complete command\n\t\t} else if strings.Contains(command, \"Complete\") {\n\t\t\tsplitCommand := strings.SplitN(command, \" \", 2)\n\t\t\titemConv, err := strconv.Atoi(strings.TrimSuffix(splitCommand[1], \"\\n\"))\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(err)\n\t\t\t\tos.Exit(0)\n\t\t\t}\n\t\t\tif itemConv > 0 && itemConv <= len(list.showList()) {\n\t\t\t\tlist.completeListItem(itemConv - 1)\n\t\t\t} else {\n\t\t\t\tfmt.Println(\"This item is not in the list.\")\n\t\t\t}\n\n\t\t\t// Process Move command\n\t\t} else if strings.Contains(command, \"Move\") {\n\t\t\tsplitCommand := strings.SplitN(command, \" \", 3)\n\t\t\tlocation, item1err := strconv.Atoi(splitCommand[1])\n\t\t\tdestination, item2err := strconv.Atoi(strings.TrimSuffix(splitCommand[2], \"\\n\"))\n\t\t\tif item1err != nil {\n\t\t\t\tfmt.Println(item1err)\n\t\t\t\tos.Exit(0)\n\t\t\t}\n\t\t\tif item2err != nil {\n\t\t\t\tfmt.Println(item2err)\n\t\t\t\tos.Exit(0)\n\t\t\t}\n\t\t\tif (location > 0 && location <= len(list.showList())) && (destination > 0 && destination <= len(list.showList())) {\n\t\t\t\tlist.moveListItem(location-1, destination-1)\n\t\t\t} else {\n\t\t\t\tfmt.Println(\"This item is not in the list.\")\n\t\t\t}\n\n\t\t\t// Process End command\n\t\t} else if strings.Contains(command, \"End\") {\n\t\t\tfmt.Print(list.endList() + \"\\n\")\n\t\t\tbreak\n\n\t\t\t// Process default\n\t\t} else {\n\t\t\tfmt.Println(command + \" is an unrecognized command.\")\n\t\t}\n\t}\n}",
"func dispres(r []string, c int) {\n\tif len(r) < c {\n\t\tc = len(r)\n\t}\n\tfor i := 0; i < c; i++ {\n\t\tfmt.Printf(\"%s\\n\", r[i])\n\t}\n}",
"func scanCmdDisplay(result *client.StorageScanResp, summary bool) (string, error) {\n\tout := &bytes.Buffer{}\n\n\tgroups, err := groupScanResults(result, summary)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tif summary {\n\t\tif len(groups) == 0 {\n\t\t\treturn \"no hosts found\", nil\n\t\t}\n\t\treturn tabulateHostGroups(groups, \"Hosts\", \"SCM Total\", \"NVMe Total\")\n\t}\n\n\tformatHostGroups(out, groups)\n\n\treturn out.String(), nil\n}",
"func formatCmdDisplay(results client.StorageFormatResults, summary bool) (string, error) {\n\tout := &bytes.Buffer{}\n\n\tgroups, mixedGroups, err := groupFormatResults(results, summary)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tif len(groups) > 0 {\n\t\tfmt.Fprintf(out, \"\\n%s\\n\", groups)\n\t}\n\n\treturn formatHostGroups(out, mixedGroups), nil\n}",
"func (p *showPlan) Execute(ctx context.Context) (*table.Table, error) {\n\tt, err := table.New([]string{\"?graph_id\"})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\terrs := make(chan error)\n\tnames := make(chan string)\n\tgo func() {\n\t\terrs <- p.store.GraphNames(ctx, names)\n\t\tclose(errs)\n\t}()\n\n\tfor name := range names {\n\t\tid := name\n\t\tt.AddRow(table.Row{\n\t\t\t\"?graph_id\": &table.Cell{\n\t\t\t\tS: &id,\n\t\t\t},\n\t\t})\n\t}\n\tif <-errs != nil {\n\t\treturn nil, err\n\t}\n\treturn t, nil\n}",
"func (c *DomainsListCmd) Run() (err error) {\n\ts := NewSpinner(\"Looking up domains\")\n\ts.Start()\n\n\tdomains, err := api.Domains(c.AccountID)\n\ts.Stop()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ttable := NewTable(os.Stdout)\n\ttable.SetHeader([]string{\"Domain\", \"Engaged\"})\n\n\tfor _, d := range domains {\n\t\tr := []string{d.DomainName, fmt.Sprintf(\"%t\", d.Engaged)}\n\t\ttable.Append(r)\n\t}\n\ttable.Render()\n\n\treturn err\n}",
"func ShowMultiCursor(screen tcell.Screen, x, y, i int) {\n\tif i == 0 {\n\t\tscreen.ShowCursor(x, y)\n\t} else {\n\t\tr, _, _, _ := screen.GetContent(x, y)\n\t\tscreen.SetContent(x, y, r, nil, defStyle.Reverse(true))\n\t}\n}",
"func (controller *List) Display() {\n\tcontroller.Data[\"searches\"] = controller.Account.GetSearches().GetAll()\n\tcontroller.SetCustomTitle(\"Account - Activity\")\n\tcontroller.LoadTemplate(\"home\")\n}",
"func (s *Service) Display(c context.Context, mid int64, plat int8, build int, buvid, channel, ip, ak, network, mobiApp,\n\tdevice, language, adExtra string, isTmp bool, now time.Time) (res []*show.Show) {\n\tres = s.showDisplay(c, mid, plat, build, buvid, channel, ip, ak, network, mobiApp, device, language, adExtra, isTmp, false, false, now)\n\treturn\n}",
"func PrintList(t Outformatter) {\n\tprintout(t, \"list\", \"print\")\n}",
"func (inst *TaskInstance) ListProcesses(inputs *Inputs, outputs *Outputs) (err error) {\n\n\tcmd := inputs.Cmd\n\tparams := inputs.Params\n\tvar outBytes []byte\n\toutval := bytes.NewBuffer(outBytes)\n\tfmt.Printf(\"Params = %v \\n Command = %v \\n\", cmd, params)\n\n\tif err := os.Chdir(inputs.Path); err != nil {\n\t\tfmt.Printf(\"%v\", err)\n\t}\n\n\tcmdObject := exec.Command(cmd, params...)\n\tcmdObject.Stdout = outval\n\tif errObj := cmdObject.Run(); errObj != nil {\n\t\tfmt.Println(errObj)\n\t\treturn errObj\n\t}\n\toutputs.Output1 = outval.Bytes()\n\treturn nil\n}",
"func PrintList(head *Node) {\n\trunner := head\n\tfor runner != nil {\n\t\tfmt.Printf(\"%d\\t\", runner.Val)\n\t\trunner = runner.Next\n\t}\n\tfmt.Printf(\"\\n\")\n}",
"func (c *listCommand) Run(ctx context.Context, _ *commoncli.Env, serverClient util.ServerClient) error {\n\tfilter := &agentv1.ListAgentsRequest_Filter{}\n\tif len(c.selectors) > 0 {\n\t\tmatchBehavior, err := parseToSelectorMatch(c.matchSelectorsOn)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tselectors := make([]*types.Selector, len(c.selectors))\n\t\tfor i, sel := range c.selectors {\n\t\t\tselector, err := util.ParseSelector(sel)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"error parsing selector %q: %w\", sel, err)\n\t\t\t}\n\t\t\tselectors[i] = selector\n\t\t}\n\t\tfilter.BySelectorMatch = &types.SelectorMatch{\n\t\t\tSelectors: selectors,\n\t\t\tMatch: matchBehavior,\n\t\t}\n\t}\n\n\tagentClient := serverClient.NewAgentClient()\n\n\tpageToken := \"\"\n\tresponse := new(agentv1.ListAgentsResponse)\n\tfor {\n\t\tlistResponse, err := agentClient.ListAgents(ctx, &agentv1.ListAgentsRequest{\n\t\t\tPageSize: 1000, // comfortably under the (4 MB/theoretical maximum size of 1 agent in MB)\n\t\t\tPageToken: pageToken,\n\t\t\tFilter: filter,\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tresponse.Agents = append(response.Agents, listResponse.Agents...)\n\t\tif pageToken = listResponse.NextPageToken; pageToken == \"\" {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn c.printer.PrintProto(response)\n}",
"func (l *ToDoList) showList() []string {\n\treturn l.list\n}",
"func printResults() {\n\tclearScreen()\n\n\tfmt.Println(\" Positions:\")\n\tfmt.Println(\"--------------------------\")\n\tfor _, exg := range exchanges {\n\t\tfmt.Printf(\"%-13s %10.2f\\n\", exg, exg.Position())\n\t}\n\tfmt.Println(\"--------------------------\")\n\tfmt.Printf(\"\\nRun P&L: $%.2f\\n\", pl)\n}",
"func runShow(args []string) int {\n\tcfg, err := loadConfig(showOpt.configFile)\n\tif err != nil {\n\t\tfmt.Fprintln(o.err, err)\n\t\treturn 1\n\t}\n\tdb := dbClientFor(cfg)\n\tdb.Connect()\n\tdefer db.Disconnect()\n\n\topt := dbmodel.RequireNone\n\tif showOpt.showAll {\n\t\topt = dbmodel.RequireAll\n\t}\n\tif len(args) == 0 {\n\t\tfmt.Fprintln(o.err, \"require table name as argument.\")\n\t\treturn 1\n\t}\n\ttbl, err := db.Table(cfg.Schema, args[0], opt)\n\tif err != nil {\n\t\tfmt.Fprintln(o.err, err)\n\t\treturn 1\n\t}\n\n\tconv := findConverter(showOpt.prettyPrint, cfg.Driver)\n\tprintTable(tbl, conv)\n\treturn 0\n}",
"func (z *zpoolctl) List(ctx context.Context, name, options string, properties []string, t string) *execute {\n\targs := []string{\"list\"}\n\tif len(options) > 0 {\n\t\targs = append(args, options)\n\t}\n\tif properties != nil {\n\t\tkv := \"-o \"\n\t\tfor _, v := range properties {\n\t\t\tkv += v + \",\"\n\t\t}\n\t\tkv = strings.TrimSuffix(kv, \",\")\n\t\targs = append(args, kv)\n\t}\n\tif len(t) > 0 {\n\t\targs = append(args, \"-T \"+t)\n\t}\n\targs = append(args, name)\n\treturn &execute{ctx: ctx, name: z.cmd, args: args}\n}",
"func (z *zfsctl) List(ctx context.Context, name, options, max string, oProperties []string, sProperty, SProperty, t string) *execute {\n\targs := []string{\"list\"}\n\tif len(options) > 0 {\n\t\targs = append(args, options)\n\t}\n\tif len(max) > 0 {\n\t\targs = append(args, max)\n\t}\n\tif oProperties != nil {\n\t\to := \"-o \"\n\t\tfor _, p := range oProperties {\n\t\t\to += p + \",\"\n\t\t}\n\t\targs = append(args, strings.TrimSuffix(o, \",\"))\n\t}\n\tif len(sProperty) > 0 {\n\t\targs = append(args, sProperty)\n\t}\n\tif len(SProperty) > 0 {\n\t\targs = append(args, SProperty)\n\t}\n\tif len(t) > 0 {\n\t\targs = append(args, \"-t \"+t)\n\t}\n\tif len(name) > 0 {\n\t\targs = append(args, name)\n\t}\n\treturn &execute{ctx: ctx, name: z.cmd, args: args}\n}",
"func showServers(client *clcv2.CLIClient, servnames []string) {\n\ttype asyncServerResult struct {\n\t\tserver clcv2.Server\n\t\tgroup clcv2.Group\n\t}\n\n\tvar (\n\t\twg sync.WaitGroup\n\t\tresChan = make(chan asyncServerResult)\n\t\tresults []asyncServerResult\n\t)\n\n\tfor _, servname := range servnames {\n\t\tservname := servname\n\t\twg.Add(1)\n\t\tgo func() {\n\t\t\tdefer wg.Done()\n\t\t\tserver, err := client.GetServer(servname)\n\t\t\tif err != nil {\n\t\t\t\tfmt.Fprintf(os.Stderr, \"Failed to list details of server %q: %s\\n\", servname, err)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tgrp, err := client.GetGroup(server.GroupId)\n\t\t\tif err != nil {\n\t\t\t\tfmt.Fprintf(os.Stderr, \"Failed to resolve %s group UUID: %s\\n\", servname, err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tresChan <- asyncServerResult{\n\t\t\t\tserver: server,\n\t\t\t\tgroup: *grp,\n\t\t\t}\n\t\t}()\n\t}\n\t// Waiter needs to run in the background, to close generator\n\tgo func() {\n\t\twg.Wait()\n\t\tclose(resChan)\n\t}()\n\n\tfor res := range resChan {\n\t\tresults = append(results, res)\n\t}\n\n\tif len(results) > 0 {\n\t\tvar table = tablewriter.NewWriter(os.Stdout)\n\t\t// Sort in ascending order of last-modified date.\n\n\t\tsort.Slice(results, func(i, j int) bool {\n\t\t\treturn results[i].server.ChangeInfo.ModifiedDate.Before(results[j].server.ChangeInfo.ModifiedDate)\n\t\t})\n\n\t\ttable.SetAutoFormatHeaders(false)\n\t\ttable.SetAlignment(tablewriter.ALIGN_LEFT)\n\t\ttable.SetAutoWrapText(true)\n\n\t\ttable.SetHeader([]string{\n\t\t\t\"Name\", \"Group\", \"Description\", \"OS\",\n\t\t\t\"IP\", \"CPU\", \"Mem\", \"Storage\",\n\t\t\t\"Status\", \"Last Change\",\n\t\t})\n\n\t\tfor _, res := range results {\n\t\t\tIPs := []string{}\n\t\t\tfor _, ip := range res.server.Details.IpAddresses {\n\t\t\t\tif ip.Public != \"\" {\n\t\t\t\t\tIPs = append(IPs, ip.Public)\n\t\t\t\t}\n\t\t\t\tif ip.Internal != \"\" {\n\t\t\t\t\tIPs = append(IPs, ip.Internal)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tstatus := res.server.Details.PowerState\n\t\t\tif res.server.Details.InMaintenanceMode {\n\t\t\t\tstatus = \"MAINTENANCE\"\n\t\t\t} else if res.server.Status != \"active\" {\n\t\t\t\tstatus = res.server.Status\n\t\t\t}\n\n\t\t\tdesc := res.server.Description\n\t\t\tif res.server.IsTemplate {\n\t\t\t\tdesc = \"TPL: \" + desc\n\t\t\t}\n\n\t\t\tmodifiedStr := humanize.Time(res.server.ChangeInfo.ModifiedDate)\n\t\t\t// The ModifiedBy field can be an email address, or an API Key (hex string) //\n\t\t\tif _, err := hex.DecodeString(res.server.ChangeInfo.ModifiedBy); err == nil {\n\t\t\t\tmodifiedStr += \" via API Key\"\n\t\t\t} else {\n\t\t\t\tmodifiedStr += \" by \" + truncate(res.server.ChangeInfo.ModifiedBy, 6)\n\t\t\t}\n\n\t\t\t// Append a tilde (~) to indicate it has snapshots\n\t\t\tserverName := res.server.Name\n\t\t\tif len(res.server.Details.Snapshots) > 0 {\n\t\t\t\tserverName += \" ~\"\n\t\t\t}\n\n\t\t\ttable.Append([]string{\n\t\t\t\tserverName, res.group.Name, truncate(desc, 30), truncate(res.server.OsType, 15),\n\t\t\t\tstrings.Join(IPs, \" \"),\n\t\t\t\tfmt.Sprint(res.server.Details.Cpu), fmt.Sprintf(\"%d G\", res.server.Details.MemoryMb/1024),\n\t\t\t\tfmt.Sprintf(\"%d G\", res.server.Details.StorageGb),\n\t\t\t\tstatus, modifiedStr,\n\t\t\t})\n\t\t}\n\n\t\ttable.Render()\n\t}\n}",
"func updateDisplays(elevatorFloor int, e *Elevator) {\n\tfor _, display := range e.floorDisplaysList {\n\t\tdisplay.floor = elevatorFloor\n\t}\n}",
"func (e *AliasExecutor) List(_ context.Context, cmdCtx CommandContext) (interactive.Message, error) {\n\tcmdVerb, cmdRes := parseCmdVerb(cmdCtx.Args)\n\tdefer e.reportCommand(cmdVerb, cmdRes, cmdCtx.Conversation.CommandOrigin, cmdCtx.Platform)\n\te.log.Debug(\"Listing aliases...\")\n\toutMsg := respond(e.getTabularOutput(cmdCtx.Conversation.ExecutorBindings), cmdCtx)\n\toutMsg.Sections = []interactive.Section{\n\t\t{\n\t\t\tBase: outMsg.Base,\n\t\t\tContext: []interactive.ContextItem{\n\t\t\t\t{Text: aliasesForCurrentBindingsMsg},\n\t\t\t},\n\t\t},\n\t}\n\toutMsg.Base = interactive.Base{}\n\n\treturn outMsg, nil\n}",
"func (task *Task) listenAndDisplay(environment map[string]string) {\n\tscr := newScreen()\n\t// just wait for stuff to come back\n\n\tfor TaskStats.runningCmds > 0 {\n\t\tselect {\n\t\tcase <-ticker.C:\n\t\t\tspinner.Next()\n\n\t\t\tif task.Config.CmdString != \"\" {\n\t\t\t\tif !task.Command.Complete && task.Command.Started {\n\t\t\t\t\ttask.Display.Values.Prefix = spinner.Current()\n\t\t\t\t\ttask.Display.Values.Eta = task.CurrentEta()\n\t\t\t\t}\n\t\t\t\ttask.display()\n\t\t\t}\n\n\t\t\tfor _, taskObj := range task.Children {\n\t\t\t\tif !taskObj.Command.Complete && taskObj.Command.Started {\n\t\t\t\t\ttaskObj.Display.Values.Prefix = spinner.Current()\n\t\t\t\t\ttaskObj.Display.Values.Eta = taskObj.CurrentEta()\n\t\t\t\t}\n\t\t\t\ttaskObj.display()\n\t\t\t}\n\n\t\t\t// update the summary line\n\t\t\tif Config.Options.ShowSummaryFooter {\n\t\t\t\tscr.DisplayFooter(footer(statusPending, \"\"))\n\t\t\t}\n\n\t\tcase msgObj := <-task.resultChan:\n\t\t\teventTask := msgObj.Task\n\n\t\t\t// update the state before displaying...\n\t\t\tif msgObj.Complete {\n\t\t\t\teventTask.Completed(msgObj.ReturnCode)\n\t\t\t\ttask.StartAvailableTasks(environment)\n\t\t\t\ttask.status = msgObj.Status\n\t\t\t\tif msgObj.Status == statusError {\n\t\t\t\t\t// update the group status to indicate a failed subtask\n\t\t\t\t\tTaskStats.totalFailedTasks++\n\n\t\t\t\t\t// keep note of the failed task for an after task report\n\t\t\t\t\ttask.failedTasks = append(task.failedTasks, eventTask)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif !eventTask.Config.ShowTaskOutput {\n\t\t\t\tmsgObj.Stderr = \"\"\n\t\t\t\tmsgObj.Stdout = \"\"\n\t\t\t}\n\n\t\t\tif msgObj.Stderr != \"\" {\n\t\t\t\teventTask.Display.Values = LineInfo{Status: msgObj.Status.Color(\"i\"), Title: eventTask.Config.Name, Msg: msgObj.Stderr, Prefix: spinner.Current(), Eta: eventTask.CurrentEta()}\n\t\t\t} else {\n\t\t\t\teventTask.Display.Values = LineInfo{Status: msgObj.Status.Color(\"i\"), Title: eventTask.Config.Name, Msg: msgObj.Stdout, Prefix: spinner.Current(), Eta: eventTask.CurrentEta()}\n\t\t\t}\n\n\t\t\teventTask.display()\n\n\t\t\t// update the summary line\n\t\t\tif Config.Options.ShowSummaryFooter {\n\t\t\t\tscr.DisplayFooter(footer(statusPending, \"\"))\n\t\t\t} else {\n\t\t\t\tscr.MovePastFrame(false)\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\tif !exitSignaled {\n\t\ttask.waiter.Wait()\n\t}\n\n}"
] | [
"0.59823793",
"0.59679854",
"0.58969665",
"0.5839446",
"0.5837045",
"0.5829667",
"0.58105326",
"0.5798192",
"0.5782938",
"0.5754856",
"0.5722812",
"0.5719009",
"0.57121134",
"0.5702872",
"0.5686456",
"0.56853086",
"0.5649026",
"0.5640594",
"0.557456",
"0.557434",
"0.5555338",
"0.55276287",
"0.5526202",
"0.55202764",
"0.5488742",
"0.5454457",
"0.5450306",
"0.544822",
"0.5432062",
"0.54307485",
"0.5424809",
"0.5417027",
"0.5403658",
"0.54017603",
"0.53865296",
"0.53856647",
"0.5372781",
"0.5359642",
"0.5358563",
"0.533874",
"0.5322687",
"0.5321892",
"0.5310271",
"0.5306707",
"0.5300754",
"0.5299782",
"0.52805513",
"0.5274512",
"0.5272945",
"0.52659744",
"0.5265567",
"0.52584726",
"0.5254955",
"0.5249074",
"0.52475727",
"0.5245613",
"0.5245514",
"0.5240433",
"0.52281946",
"0.52262163",
"0.5225883",
"0.52135485",
"0.52132285",
"0.5207643",
"0.5203132",
"0.519818",
"0.51877594",
"0.5184061",
"0.51805526",
"0.517049",
"0.51687944",
"0.5165862",
"0.51650155",
"0.5164138",
"0.5160401",
"0.5158323",
"0.5156016",
"0.51513726",
"0.51491576",
"0.5146261",
"0.5138362",
"0.5131551",
"0.51313007",
"0.5131024",
"0.5130116",
"0.5127433",
"0.5125394",
"0.5125346",
"0.51250625",
"0.511351",
"0.5107461",
"0.5100624",
"0.5100353",
"0.51002634",
"0.5098274",
"0.5096191",
"0.50945336",
"0.5093308",
"0.5091685",
"0.5083758",
"0.5078324"
] | 0.0 | -1 |
check the completeness status of a framebuffer | func CheckFramebufferStatus(target uint32) uint32 {
ret := C.glowCheckFramebufferStatus(gpCheckFramebufferStatus, (C.GLenum)(target))
return (uint32)(ret)
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (debugging *debuggingOpenGL) CheckFramebufferStatus(target uint32) uint32 {\n\tdebugging.recordEntry(\"CheckFramebufferStatus\", target)\n\tresult := debugging.gl.CheckFramebufferStatus(target)\n\tdebugging.recordExit(\"CheckFramebufferStatus\")\n\treturn result\n}",
"func (native *OpenGL) CheckFramebufferStatus(target uint32) uint32 {\n\treturn gl.CheckFramebufferStatus(target)\n}",
"func (f *Framebuffer) Status() error {\n\tf.useState()\n\te := f.ctx.O.Call(\"checkFramebufferStatus\", f.ctx.FRAMEBUFFER).Int()\n\n\t// Avoid the larger switch statement below, as no error is the most likely\n\t// case.\n\tif e == f.ctx.FRAMEBUFFER_COMPLETE {\n\t\treturn nil\n\t}\n\n\tswitch e {\n\tcase f.ctx.FRAMEBUFFER_INCOMPLETE_ATTACHMENT:\n\t\treturn gfx.ErrFramebufferIncompleteAttachment\n\tcase f.ctx.FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:\n\t\treturn gfx.ErrFramebufferIncompleteMissingAttachment\n\tcase f.ctx.FRAMEBUFFER_INCOMPLETE_DIMENSIONS:\n\t\treturn gfx.ErrFramebufferIncompleteDimensions\n\tcase f.ctx.FRAMEBUFFER_UNSUPPORTED:\n\t\treturn gfx.ErrFramebufferIncompleteDimensions\n\tdefault:\n\t\tpanic(fmt.Sprintf(\"webgl: unhandled framebuffer status 0x%X\\n\", e))\n\t}\n}",
"func CheckFramebufferStatus(target uint32) uint32 {\n ret := C.glowCheckFramebufferStatus(gpCheckFramebufferStatus, (C.GLenum)(target))\n return (uint32)(ret)\n}",
"func CheckFramebufferStatus(target uint32) uint32 {\n\tret, _, _ := syscall.Syscall(gpCheckFramebufferStatus, 1, uintptr(target), 0, 0)\n\treturn (uint32)(ret)\n}",
"func CheckNamedFramebufferStatus(framebuffer uint32, target uint32) uint32 {\n\tret, _, _ := syscall.Syscall(gpCheckNamedFramebufferStatus, 2, uintptr(framebuffer), uintptr(target), 0)\n\treturn (uint32)(ret)\n}",
"func CheckFramebufferStatus(target Enum) Enum {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\t__ret := C.glCheckFramebufferStatus(ctarget)\n\t__v := (Enum)(__ret)\n\treturn __v\n}",
"func CheckFramebufferStatus(target Enum) Enum {\n\treturn Enum(gl.CheckFramebufferStatus(uint32(target)))\n}",
"func CheckNamedFramebufferStatus(framebuffer uint32, target uint32) uint32 {\n\tret := C.glowCheckNamedFramebufferStatus(gpCheckNamedFramebufferStatus, (C.GLuint)(framebuffer), (C.GLenum)(target))\n\treturn (uint32)(ret)\n}",
"func CheckNamedFramebufferStatus(framebuffer uint32, target uint32) uint32 {\n\tret := C.glowCheckNamedFramebufferStatus(gpCheckNamedFramebufferStatus, (C.GLuint)(framebuffer), (C.GLenum)(target))\n\treturn (uint32)(ret)\n}",
"func IsFramebuffer(framebuffer uint32) bool {\n ret := C.glowIsFramebuffer(gpIsFramebuffer, (C.GLuint)(framebuffer))\n return ret == TRUE\n}",
"func IsFramebuffer(framebuffer uint32) bool {\n\tret, _, _ := syscall.Syscall(gpIsFramebuffer, 1, uintptr(framebuffer), 0, 0)\n\treturn ret != 0\n}",
"func IsFramebuffer(framebuffer Uint) Boolean {\n\tcframebuffer, _ := (C.GLuint)(framebuffer), cgoAllocsUnknown\n\t__ret := C.glIsFramebuffer(cframebuffer)\n\t__v := (Boolean)(__ret)\n\treturn __v\n}",
"func IsFramebuffer(framebuffer uint32) bool {\n\tret := C.glowIsFramebuffer(gpIsFramebuffer, (C.GLuint)(framebuffer))\n\treturn ret == TRUE\n}",
"func IsFramebuffer(framebuffer uint32) bool {\n\tret := C.glowIsFramebuffer(gpIsFramebuffer, (C.GLuint)(framebuffer))\n\treturn ret == TRUE\n}",
"func (ring *ringBuffer) isFull() bool {\n\tring.mutex.Lock()\n\tfull := !ring.spaceReady\n\tring.mutex.Unlock()\n\treturn full\n}",
"func IsFramebuffer(fb Framebuffer) bool {\n\treturn gl.IsFramebuffer(fb.Value)\n}",
"func (self *Geometry) IsDataBufferReady() bool {\n\treturn len(self.data_buffer_vpoints) > 0 || len(self.data_buffer_fpoints) > 0\n}",
"func (f *Frame) Full() bool {\n\treturn f.full == 1\n}",
"func check_complete(){\nif len(change_buffer)> 0{/* changing is false */\nbuffer= change_buffer\nchange_buffer= nil\nchanging= true\nchange_depth= include_depth\nloc= 0\nerr_print(\"! Change file entry did not match\")\n\n}\n}",
"func isUpdated() bool {\n\tupdated := true\n\tfor _, color := range rgb {\n\t\tif deviceTwinResult.Twin[color].Expected != nil &&\n\t\t\t((deviceTwinResult.Twin[color].Actual == nil && deviceTwinResult.Twin[color].Expected != nil) ||\n\t\t\t\tcompareValue(color)) {\n\t\t\tupdated = false\n\t\t\tbreak\n\t\t}\n\t}\n\treturn updated\n}",
"func IsRenderbuffer(renderbuffer uint32) bool {\n ret := C.glowIsRenderbuffer(gpIsRenderbuffer, (C.GLuint)(renderbuffer))\n return ret == TRUE\n}",
"func (API) GetFramebufferAttachmentInfo(state *api.State, thread uint64, attachment api.FramebufferAttachment) (width, height, index uint32, format *image.Format, err error) {\n\treturn 0, 0, 0, nil, fmt.Errorf(\"GVR does not support framebuffers\")\n}",
"func (s *frameStats) checkVideoHealth() error {\n\tif s.TotalFrames == 0 {\n\t\treturn errors.New(\"no frame was displayed\")\n\t}\n\n\t// If the test was running under QEMU, check the percentage of broken frames.\n\tif vm.IsRunningOnVM() {\n\t\t// Ratio of broken frames must be less than |threshold| %.\n\t\tconst threshold = 1.0\n\t\tblackPercentage := s.blackFramesPercentage()\n\t\tfrozenPercentage := s.frozenFramesPercentage()\n\t\tif threshold < blackPercentage+frozenPercentage {\n\t\t\treturn errors.Errorf(\"too many broken frames: black %.1f%%, frozen %.1f%% (total %d)\",\n\t\t\t\tblackPercentage, frozenPercentage, s.TotalFrames)\n\t\t}\n\t}\n\n\treturn nil\n}",
"func waitForStableReadings(ctx context.Context, backend Backend, width, height int, timeout, interval time.Duration, threshold float64) (reading int, err error) {\n\t// Keep the last numReadings for moving average purposes. Make it half the\n\t// size that the current timeout and interval would allow.\n\tnumReadings := int(math.Floor(float64(timeout / (2.0 * interval))))\n\n\tvar currentNumReadings int\n\tvar values = make([]int, numReadings)\n\n\terr = testing.Poll(ctx, func(ctx context.Context) error {\n\t\tvar e error\n\t\treading, e = backend.ReadFramebufferCount(ctx, width, height)\n\t\tif e != nil {\n\t\t\treturn testing.PollBreak(errors.Wrap(e, \"failed measuring\"))\n\t\t}\n\t\tvalues[currentNumReadings%numReadings] = reading\n\t\tcurrentNumReadings++\n\t\tif currentNumReadings < numReadings {\n\t\t\treturn errors.Errorf(\"need more values (got: %d and want: %d)\", currentNumReadings, numReadings)\n\t\t}\n\t\taverage := mean(values)\n\n\t\tif math.Abs(float64(reading)-average) > threshold {\n\t\t\treturn errors.Errorf(\"reading %d is not within %.1f of %.1f\", reading, threshold, average)\n\t\t}\n\t\treturn nil\n\t}, &testing.PollOptions{Timeout: timeout, Interval: interval})\n\treturn reading, err\n}",
"func (fm FinalModelEnumUInt32) Verify() (bool, int) {\n if (fm.buffer.Offset() + fm.FBEOffset() + fm.FBESize()) > fm.buffer.Size() {\n return false, 0\n }\n\n return true, fm.FBESize()\n}",
"func initFramebuffer(width, height int) {\n\tlog.Printf(\"[Video]: Initializing HW render (%v x %v).\\n\", width, height)\n\n\tgl.GenFramebuffers(1, &fboID)\n\tgl.BindFramebuffer(gl.FRAMEBUFFER, fboID)\n\n\t//gl.GenTextures(1, &video.texID)\n\tgl.BindTexture(gl.TEXTURE_2D, texID)\n\tgl.TexStorage2D(gl.TEXTURE_2D, 1, gl.RGBA8, int32(width), int32(height))\n\n\tgl.FramebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texID, 0)\n\n\thw := state.Global.Core.HWRenderCallback\n\n\tgl.BindRenderbuffer(gl.RENDERBUFFER, 0)\n\n\tif gl.CheckFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE {\n\t\tlog.Fatalln(\"[Video] Framebuffer is not complete.\")\n\t}\n\n\tgl.ClearColor(0, 0, 0, 1)\n\tif hw.Depth && hw.Stencil {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT | gl.STENCIL_BUFFER_BIT)\n\t} else if hw.Depth {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT)\n\t} else {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT)\n\t}\n\n\tgl.BindFramebuffer(gl.FRAMEBUFFER, 0)\n}",
"func IsBuffer(buffer uint32) bool {\n ret := C.glowIsBuffer(gpIsBuffer, (C.GLuint)(buffer))\n return ret == TRUE\n}",
"func (fr *Frame) Valid() bool {\n\treturn false\n}",
"func (buf *ListBuffer) IsFull() bool {\n\treturn buf.Count >= MaxBufferCount\n}",
"func CheckScreenshot(ctx context.Context, tconn *chrome.TestConn, downloadsPath string, source CaptureModeSource) error {\n\timageConfig, err := retrieveCaptureImageConfig(downloadsPath)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to get image config\")\n\t}\n\n\tfullScreenBounds, err := calculateCaptureSurfaceFullScreenBounds(ctx, tconn)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to get full screen bounds\")\n\t}\n\n\tswitch source {\n\tcase FullScreen:\n\t\tcompareScreenshotDimensions(imageConfig, fullScreenBounds, source)\n\tcase PartialScreen:\n\t\texpectedScreenShotBounds := coords.NewSize(fullScreenBounds.Width/2, fullScreenBounds.Height/2)\n\t\tcompareScreenshotDimensions(imageConfig, expectedScreenShotBounds, source)\n\tcase Window:\n\t\tactiveWindow, err := ash.GetActiveWindow(ctx, tconn)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to find active window\")\n\t\t}\n\t\texpectedScreenShotBounds := coords.NewSize(activeWindow.BoundsInRoot.Width, activeWindow.BoundsInRoot.Height)\n\t\tcompareScreenshotDimensions(imageConfig, expectedScreenShotBounds, source)\n\tdefault:\n\t\treturn errors.New(\"unknown screenshot type\")\n\t}\n\n\treturn nil\n}",
"func CheckHepevtConsistency(f *os.File) bool {\n\t\n\tif f == nil {\n\t\tf = os.Stdout\n\t}\n\tc_fd := C.int(f.Fd())\n\t_ = f.Sync()\n\tc_mode := C.CString(\"a\")\n\tdefer C.free(unsafe.Pointer(c_mode))\n\tc_f := C.fdopen(c_fd, c_mode)\n\tC.fflush(c_f)\n\n\to := C.hepevt_check_hepevt_consistency(c_f)\n\tC.fflush(c_f)\n\t_ = f.Sync()\n\n\tif o != C.int(0) {\n\t\treturn true\n\t}\n\treturn false\n}",
"func (cca *cookedSyncCmdArgs) scanningComplete() bool {\n\treturn atomic.LoadUint32(&cca.atomicScanningStatus) > 0\n}",
"func (_e *MockCompactionPlanContext_Expecter) isFull() *MockCompactionPlanContext_isFull_Call {\n\treturn &MockCompactionPlanContext_isFull_Call{Call: _e.mock.On(\"isFull\")}\n}",
"func (b *box) completed() bool {\n\treturn b.setValues == 9\n}",
"func (o *HyperflexSnapshotStatus) HasPctComplete() bool {\n\tif o != nil && o.PctComplete != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func (e *ControlleeExpectations) Fulfilled() bool {\n\t// TODO: think about why this line being atomic doesn't matter\n\treturn atomic.LoadInt64(&e.add) <= 0 && atomic.LoadInt64(&e.del) <= 0\n}",
"func compareGraphicsMemoryBeforeAfter(ctx context.Context, payload func() error, backend Backend, roundedWidth, roundedHeight int) (err error) {\n\tvar before, during, after int\n\n\tif before, err = readStableObjectCount(ctx, backend, roundedWidth, roundedHeight); err != nil {\n\t\treturn errors.Wrap(err, \"failed to get the framebuffer object count\")\n\t}\n\n\ttesting.ContextLog(ctx, \"Running the payload() and measuring the number of graphics objects during its execution\")\n\tc := make(chan error)\n\tgo func(c chan error) {\n\t\tc <- payload()\n\t}(c)\n\t// Note: We don't wait for the ReadFramebufferCount() to finish, just keep\n\t// measuring until we get a non-zero value in during, for further comparison\n\t// below.\n\tgo func() {\n\t\tconst pollTimeout = 10 * time.Second\n\t\tconst pollInterval = 100 * time.Millisecond\n\t\t_ = testing.Poll(ctx, func(ctx context.Context) error {\n\t\t\t// TODO(crbug.com/1047514): instead of blindly sampling the amount of\n\t\t\t// objects during the test and comparing them further down, verify them\n\t\t\t// here directly.\n\t\t\tif during, _ = backend.ReadFramebufferCount(ctx, roundedWidth, roundedHeight); during == before {\n\t\t\t\treturn errors.New(\"Still waiting for graphics objects\")\n\t\t\t}\n\t\t\treturn nil\n\t\t}, &testing.PollOptions{Timeout: pollTimeout, Interval: pollInterval})\n\t}()\n\terr = <-c\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif after, err = readStableObjectCount(ctx, backend, roundedWidth, roundedHeight); err != nil {\n\t\treturn errors.Wrap(err, \"failed to get the framebuffer object count\")\n\t}\n\tif before != after {\n\t\treturn errors.Wrapf(err, \"graphics objects of size %d x %d do not coincide: before=%d, after=%d\", roundedWidth, roundedHeight, before, after)\n\t}\n\tif during == before {\n\t\treturn errors.Wrapf(err, \"graphics objects of size %d x %d did not increase during play back: before=%d, during=%d\", roundedWidth, roundedHeight, before, during)\n\t}\n\ttesting.ContextLogf(ctx, \"Graphics objects of size %d x %d before=%d, during=%d, after=%d\", roundedWidth, roundedHeight, before, during, after)\n\treturn nil\n}",
"func (f Frame) Valid() bool {\n\treturn f != InvalidFrame\n}",
"func ScreenshotIsTaken(ctx context.Context, s *testing.State) {\n\tcr := s.FixtValue().(*chrome.Chrome)\n\n\tcleanupCtx := ctx\n\tctx, cancel := ctxutil.Shorten(ctx, 5*time.Second)\n\tdefer cancel()\n\n\ttconn, err := cr.TestAPIConn(ctx)\n\tif err != nil {\n\t\ts.Fatal(\"Failed to connect to Test API: \", err)\n\t}\n\tdefer faillog.DumpUITreeWithScreenshotOnError(cleanupCtx, s.OutDir(), s.HasError, cr,\n\t\t\"ui_dump\")\n\n\tui := uiauto.New(tconn).WithTimeout(20 * time.Second)\n\n\t// Launch feedback app and go to share data page.\n\tfeedbackRootNode, err := feedbackapp.LaunchAndGoToShareDataPage(ctx, tconn)\n\tif err != nil {\n\t\ts.Fatal(\"Failed to launch feedback app and go to share data page: \", err)\n\t}\n\n\t// Verify screenshot checkbox and image exist.\n\t// Verify clicking screenshot will open screenshot diaglog.\n\tscreenshotCheckBox := nodewith.Name(\"Screenshot\").Role(role.CheckBox).Ancestor(\n\t\tfeedbackRootNode)\n\tscreenshotImg := nodewith.Role(role.Image).Ancestor(feedbackRootNode)\n\tscreenshotDialog := nodewith.Role(role.Dialog).Ancestor(feedbackRootNode).First()\n\n\tif err := uiauto.Combine(\"Verify screenshot exists\",\n\t\tui.WaitUntilExists(screenshotCheckBox),\n\t\tui.DoDefault(screenshotImg),\n\t\tui.WaitUntilExists(screenshotDialog),\n\t)(ctx); err != nil {\n\t\ts.Fatal(\"Failed to verify screenshot exists: \", err)\n\t}\n\n\t// Verify clicking screenshot button will close screenshot diaglog.\n\tscreenshotButton := nodewith.Name(\"Back\").Role(role.Button).Ancestor(feedbackRootNode)\n\n\tif err := uiauto.Combine(\"Verify clicking screenshot button closes dialog\",\n\t\tui.DoDefault(screenshotButton),\n\t\tui.WaitUntilGone(screenshotDialog),\n\t)(ctx); err != nil {\n\t\ts.Fatal(\"Failed to verify clicking screenshot button closes dialog: \", err)\n\t}\n}",
"func (bg *bufferedGroup) haveCompleteGroup() bool {\n if len(bg.images) == 0 {\n log.Panicf(\"a buffered group should never be empty\")\n }\n\n return bg.firstTimeKey != bg.lastTimeKey\n}",
"func qr_decoder_is_busy(p _QrDecoderHandle) int {\n\tv := C.qr_decoder_is_busy(C.QrDecoderHandle(p))\n\treturn int(v)\n}",
"func waitForCompletion(sensor SensorInterface, i2c *i2c.I2C) (timeout bool, err error) {\n\tfor i := 0; i < 10; i++ {\n\t\tflag, err := sensor.IsBusy(i2c)\n\t\tif err != nil {\n\t\t\treturn false, err\n\t\t}\n\t\tif flag == false {\n\t\t\treturn false, nil\n\t\t}\n\t\ttime.Sleep(5 * time.Millisecond)\n\t}\n\treturn true, nil\n}",
"func (d *Driver) fingerprintSuccessful() bool {\n\td.fingerprintLock.Lock()\n\tdefer d.fingerprintLock.Unlock()\n\treturn d.fingerprintSuccess == nil || *d.fingerprintSuccess\n}",
"func (event *mappingEvent) isFull() bool {\n\treturn event.pending > full\n}",
"func (receiver *Receiver) checkFrame() error {\n\tvar frame uint64\n\tif receiver.transaction != nil {\n\t\tframe = receiver.transaction.frame\n\t} else {\n\t\tvar err error\n\t\tframe, err = receiver.meta.GetUint64(rxFrame)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tframe++\n\tframePath, err := receiver.storage.Frame(frame)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif _, err := os.Stat(framePath); err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\treturn io.EOF\n\t\t}\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (renderbuffer Renderbuffer) IsRenderbuffer() bool {\n\treturn gl.IsRenderbuffer(uint32(renderbuffer))\n}",
"func (f *fragment) awaitSnapshot() {\n\tf.mu.Lock()\n\tdefer f.mu.Unlock()\n\tfor f.snapshotting {\n\t\tf.snapshotCond.Wait()\n\t}\n}",
"func (o *HyperflexSnapshotStatus) GetPctCompleteOk() (*int64, bool) {\n\tif o == nil || o.PctComplete == nil {\n\t\treturn nil, false\n\t}\n\treturn o.PctComplete, true\n}",
"func (rgba *rgba) checker() error {\n\tif rgba.Hex == \"\" {\n\t\treturn fmt.Errorf(\"this is an uninitialised rgba\")\n\t}\n\treturn nil\n}",
"func (f *framer) messageReady() (int, bool) {\n\tif len(f.buffer) < 2 {\n\t\treturn 0, false\n\t}\n\tmsgSize := int(binary.LittleEndian.Uint16(f.buffer[:2]))\n\n\treturn msgSize, len(f.buffer) >= msgSize+2\n}",
"func (s *BaseEvent) BSuccess() bool {\n if !s.sysParamsExtracted { panic(\"!s.sysParamsExtracted\"); }\n return (s.sysParams.BSuccess == 1)\n}",
"func checkMagConnection() bool {\n\tsetSetting(0x25, 0x0C|0x80)\n\tsetSetting(0x26, 0x00)\n\tsetSetting(0x27, 0x81) // Read one byte.\n\n\ttime.Sleep(100 * time.Microsecond)\n\n\tr, err := i2cbus.ReadByteFromReg(0x68, 0x49)\n\tchkErr(err)\n\n\tret := r == 0x48\n\n\t// Read calibration data.\n\tsetSetting(0x25, 0x0C|0x80)\n\tsetSetting(0x26, 0x10)\n\tsetSetting(0x27, 0x83) // Read three bytes, (CalX, CalY, CalZ).\n\n\tmxcal, err := i2cbus.ReadByteFromReg(0x68, 0x49)\n\tchkErr(err)\n\tmycal, err := i2cbus.ReadByteFromReg(0x68, 0x4A)\n\tchkErr(err)\n\tmzcal, err := i2cbus.ReadByteFromReg(0x68, 0x4B)\n\tchkErr(err)\n\n\tmagXcal = (float64(mxcal)-128)/256.0 + 1.0\n\tmagYcal = (float64(mycal)-128)/256.0 + 1.0\n\tmagZcal = (float64(mzcal)-128)/256.0 + 1.0\n\n\treturn ret\n}",
"func (o *V0037JobProperties) GetGpuBindingOk() (*string, bool) {\n\tif o == nil || o.GpuBinding == nil {\n\t\treturn nil, false\n\t}\n\treturn o.GpuBinding, true\n}",
"func (f *Sink) HasReady() bool {\n\treturn f.readyList.Len() != 0\n}",
"func (d *decoder) finished() bool {\n\treturn d.br.finished() && d.dt[d.state].nbBits > 0\n}",
"func IsRenderbuffer(renderbuffer uint32) bool {\n\tret, _, _ := syscall.Syscall(gpIsRenderbuffer, 1, uintptr(renderbuffer), 0, 0)\n\treturn ret != 0\n}",
"func (self VideoMode) IsValid() bool {\n\treturn int(C.sfVideoMode_isValid(*self.Cref)) == 1\n}",
"func (p *Prober) isReady() bool {\n\tready := atomic.LoadUint32(&p.ready)\n\treturn ready > 0\n}",
"func (DrawTexture) IsDrawAction() {}",
"func (g GenericBackend) ReadFramebufferCount(ctx context.Context, width, height int) (framebuffers int, e error) {\n\tf, err := os.Open(fmt.Sprintf(genericFramebufferFilePattern, g.index))\n\tif err != nil {\n\t\treturn framebuffers, errors.Wrap(err, \"failed to open dri file\")\n\t}\n\n\ttext, err := ioutil.ReadAll(f)\n\tif err != nil {\n\t\treturn framebuffers, errors.Wrap(err, \"failed to read dri file\")\n\t}\n\tlines := strings.Split(string(text), \"\\n\")\n\tfor _, line := range lines {\n\t\t// The line we're looking for looks like \"...size=1920x1080\"\n\t\tvar fbWidth, fbHeight int\n\t\tif _, err := fmt.Sscanf(line, \" size=%dx%d\", &fbWidth, &fbHeight); err != nil {\n\t\t\tcontinue\n\t\t}\n\t\tif fbWidth == width && fbHeight == height {\n\t\t\tframebuffers++\n\t\t}\n\t}\n\treturn\n}",
"func IsRenderbuffer(renderbuffer uint32) bool {\n\tret := C.glowIsRenderbuffer(gpIsRenderbuffer, (C.GLuint)(renderbuffer))\n\treturn ret == TRUE\n}",
"func IsRenderbuffer(renderbuffer uint32) bool {\n\tret := C.glowIsRenderbuffer(gpIsRenderbuffer, (C.GLuint)(renderbuffer))\n\treturn ret == TRUE\n}",
"func CheckFsCreationInProgress(device model.Device) (inProgress bool, err error) {\n\treturn false, fmt.Errorf(\"FS progress check is not implemented for Mac\")\n}",
"func (g I915Backend) ReadFramebufferCount(ctx context.Context, width, height int) (framebuffers int, e error) {\n\tf, err := os.Open(i915FramebufferFile)\n\tif err != nil {\n\t\treturn framebuffers, errors.Wrap(err, \"failed to open dri file\")\n\t}\n\ttext, err := ioutil.ReadAll(f)\n\tif err != nil {\n\t\treturn framebuffers, errors.Wrap(err, \"failed to read dri file\")\n\t}\n\tlines := strings.Split(string(text), \"\\n\")\n\tfor _, line := range lines {\n\t\t// The line we're looking for looks like \"user size: 1920 x 1080,...\"\n\t\tvar fbWidth, fbHeight int\n\t\tif _, err := fmt.Sscanf(line, \"user size: %d x %d\", &fbWidth, &fbHeight); err != nil {\n\t\t\tcontinue\n\t\t}\n\t\tif fbWidth == width && fbHeight == height {\n\t\t\tframebuffers++\n\t\t}\n\t}\n\treturn\n}",
"func (boardService *BoardService) CheckBoardIsFull() bool {\n\tflag := false\n\tvar i uint8\n\t//Loops through all cells and checks for NoMark, if at the end of loop, all elements have been marked, then board is full\n\tfor i = 0; i < boardService.Board.Size; i++ {\n\t\tif boardService.Board.Cells[i].Mark != components.NoMark {\n\t\t\tflag = true\n\t\t} else {\n\t\t\tflag = false\n\t\t}\n\t}\n\treturn flag\n}",
"func (debugging *debuggingOpenGL) BindFramebuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindFramebuffer\", target, buffer)\n\tdebugging.gl.BindFramebuffer(target, buffer)\n\tdebugging.recordExit(\"BindFramebuffer\")\n}",
"func (i *ImageBuf) Initialized() bool {\n\tif i.ptr == nil {\n\t\treturn false\n\t}\n\tret := bool(C.ImageBuf_initialized(i.ptr))\n\truntime.KeepAlive(i)\n\treturn ret\n}",
"func (o *UcsdBackupInfoAllOf) HasPercentageCompletion() bool {\n\tif o != nil && o.PercentageCompletion != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func processFrame(b *bytes.Buffer) (bool, int) {\n\tmagic := make([]byte, len(magicBytes))\n\tb.Read(magic)\n\n\tlenBytes := make([]byte, 4)\n\tb.Read(lenBytes)\n\n\treturn bytes.Equal(magic, magicBytes), int(binary.BigEndian.Uint32(lenBytes))\n}",
"func (cs *cpuState) Framebuffer() []byte {\n\treturn cs.LCD.framebuffer[:]\n}",
"func IsRenderbuffer(renderbuffer Uint) Boolean {\n\tcrenderbuffer, _ := (C.GLuint)(renderbuffer), cgoAllocsUnknown\n\t__ret := C.glIsRenderbuffer(crenderbuffer)\n\t__v := (Boolean)(__ret)\n\treturn __v\n}",
"func (cb *Buffer) WriteAvailability() int {\n\tif cb.full {\n\t\treturn 0\n\t}\n\n\tif cb.wpos <= cb.rpos {\n\t\treturn len(cb.buffer) - cb.rpos + cb.wpos\n\t}\n\n\treturn cb.wpos - cb.rpos\n}",
"func fileStatus(dasquery dasql.DASQuery) bool {\n\tspec := dasquery.Spec\n\tstatus := spec[\"status\"]\n\tif status != nil {\n\t\tval := status.(string)\n\t\tif strings.ToLower(val) == \"valid\" {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}",
"func IsBuffer(buffer uint32) bool {\n\tret := C.glowIsBuffer(gpIsBuffer, (C.GLuint)(buffer))\n\treturn ret == TRUE\n}",
"func IsBuffer(buffer uint32) bool {\n\tret := C.glowIsBuffer(gpIsBuffer, (C.GLuint)(buffer))\n\treturn ret == TRUE\n}",
"func (display smallEpd) waitUntilIdle() (err error) {\n\tlog.Debug(\"EPD42 WaitUntilIdle\")\n\tfor {\n\t\tbusy, err := display.driver.DigitalRead(display.BUSY)\n\t\tif !busy {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Error checking bust %s\\n\", err.Error())\n\t\t}\n\t\tfmt.Printf(\".\")\n\t\ttime.Sleep(200 * time.Millisecond)\n\t}\n\tlog.Debug(\"EPD42 WaitUntilIdle End\")\n\treturn\n}",
"func flagsComplete() (allValid bool, err string) {\n\tallValid = true\n\tif srcAddress == \"\" {\n\t\terr = err + \"Invalid source address.\\n\"\n\t\tallValid = false\n\t}\n\n\treturn allValid, err\n}",
"func (r *RingBuffer) IsFull() bool {\n\tr.mu.Lock()\n\tdefer r.mu.Unlock()\n\n\treturn r.isFull\n}",
"func (r *RingBuffer) IsFull() bool {\n\tr.mu.Lock()\n\tdefer r.mu.Unlock()\n\n\treturn r.isFull\n}",
"func (px *Pex) full() bool {\n\treturn px.maxPeers > 0 && len(px.peers) >= px.maxPeers\n}",
"func (o *UcsdBackupInfoAllOf) GetPercentageCompletionOk() (*int64, bool) {\n\tif o == nil || o.PercentageCompletion == nil {\n\t\treturn nil, false\n\t}\n\treturn o.PercentageCompletion, true\n}",
"func (fail *failingState) HitOk() bool {\n\tif fail.toggle == 0 {\n\t\treturn true\n\t}\n\n\tfail.mtx.Lock()\n\tdefer fail.mtx.Unlock()\n\n\tif fail.toggle <= fail.count {\n\t\tfail.up = !fail.up\n\t\tfail.count = 0\n\t}\n\tfail.count++\n\tlog.Printf(\"up: %t count: %d next flip: %d\\n\", fail.up, fail.count, fail.toggle)\n\treturn fail.up\n}",
"func (r pciResource) valid() bool {\n\treturn r.flags != 0 && r.start != 0 && r.end != 0\n}",
"func (e *Edge) isComplete() bool {\n\treturn len(e.ClientService) != 0 && len(e.ServerService) != 0\n}",
"func waitForScanStatus(t *testing.T, f *framework.Framework, namespace, name string, targetStaus complianceoperatorv1alpha1.ComplianceScanStatusPhase) error {\n\texampleComplianceScan := &complianceoperatorv1alpha1.ComplianceScan{}\n\tvar lastErr error\n\t// retry and ignore errors until timeout\n\ttimeouterr := wait.Poll(retryInterval, timeout, func() (bool, error) {\n\t\tlastErr = f.Client.Get(goctx.TODO(), types.NamespacedName{Name: name, Namespace: namespace}, exampleComplianceScan)\n\t\tif lastErr != nil {\n\t\t\tif apierrors.IsNotFound(lastErr) {\n\t\t\t\tt.Logf(\"Waiting for availability of %s compliancescan\\n\", name)\n\t\t\t\treturn false, nil\n\t\t\t}\n\t\t\tt.Logf(\"Retrying. Got error: %v\\n\", lastErr)\n\t\t\treturn false, nil\n\t\t}\n\n\t\tif exampleComplianceScan.Status.Phase == targetStaus {\n\t\t\treturn true, nil\n\t\t}\n\t\tt.Logf(\"Waiting for run of %s compliancescan (%s)\\n\", name, exampleComplianceScan.Status.Phase)\n\t\treturn false, nil\n\t})\n\t// Error in function call\n\tif lastErr != nil {\n\t\treturn lastErr\n\t}\n\t// Timeout\n\tif timeouterr != nil {\n\t\treturn timeouterr\n\t}\n\tt.Logf(\"ComplianceScan ready (%s)\\n\", exampleComplianceScan.Status.Phase)\n\treturn nil\n}",
"func (n number) valid(img image.Image, offset int) float64 {\n\n\tmatching := 0.0\n\n\tfor _, point := range n.highlighted {\n\t\tr, g, b, _ := img.At(offset-n.width+point.X, point.Y).RGBA()\n\t\tif r == 0 && g == 0 && b == 0 {\n\t\t\tmatching += 1.0\n\t\t}\n\t}\n\n\tpercentMatching := matching / float64(len(n.highlighted))\n\n\t// log.Printf(\"%d matched %f percent\\n\", n.value, percentMatching*100)\n\n\treturn percentMatching\n}",
"func (batch *Batch) IsFull() bool {\n\treturn len(batch.messages) == batch.maxSize\n}",
"func cursorCompleted(cursor int, boundary int) bool {\n\treturn cursor >= boundary\n}",
"func (l *Level) IsFull() bool {\n\tif l.Balls.Length() >= l.MaxBalls {\n\t\treturn true\n\t}\n\treturn false\n}",
"func waitForCompletion(done chan bool) bool {\n\ttimer := time.NewTimer(totalWaitTime)\n\tdefer timer.Stop()\n\tselect {\n\tcase <-done:\n\t\treturn true\n\tcase <-timer.C:\n\t\treturn false\n\t}\n}",
"func (o *ApplianceImageBundleAllOf) GetFingerprintOk() (*string, bool) {\n\tif o == nil || o.Fingerprint == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Fingerprint, true\n}",
"func (b *BlockChecker) FullCheck(t *testing.T) {\n\tseqChecker := &functest.SeqRFuncChecker{\n\t\tF: &rnn.BlockSeqFunc{B: b.B},\n\t\tInput: b.Input,\n\t\tVars: b.Vars,\n\t\tRV: b.RV,\n\t\tDelta: b.Delta,\n\t\tPrec: b.Prec,\n\t}\n\tseqChecker.FullCheck(t)\n\tb.testNilUpstream(t)\n\tb.testNilUpstreamR(t)\n}",
"func checkNotActive() error {\n\toperationDetails := activeOperation\n\tif operationDetails != nil {\n\t\tselect {\n\t\tcase <-operationDetails.exportDone:\n\t\t\t// nil-out any stale operation\n\t\t\tactiveOperation = nil\n\t\tdefault:\n\t\t\tif operationDetails.isRestore {\n\t\t\t\treturn fmt.Errorf(\"restore operation already in progress for height %d\", operationDetails.blockHeight)\n\t\t\t} else {\n\t\t\t\treturn fmt.Errorf(\"export operation already in progress for height %d\", operationDetails.blockHeight)\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}",
"func BindFramebuffer(target Enum, framebuffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcframebuffer, _ := (C.GLuint)(framebuffer), cgoAllocsUnknown\n\tC.glBindFramebuffer(ctarget, cframebuffer)\n}",
"func checkBluetooth() {\n\t// init part: get the list of paired bluetooth devices\n\tresult, err := exec.Command(\"bluetoothctl\", \"devices\").Output()\n\tif err != nil {\n\t\tlogger.Error(err.Error())\n\t} else {\n\t\tarr := strings.Split(string(result), \"\\n\")\n\t\tlogger.Info(\"BT Devices paired:\")\n\t\tfor _, s := range arr {\n\t\t\tparts := strings.Split(s, \" \")\n\t\t\tif len(parts) > 1 {\n\t\t\t\tinfo, err2 := exec.Command(\"bluetoothctl\", \"info\", parts[1]).Output()\n\t\t\t\tif err2 == nil {\n\t\t\t\t\tif strings.Contains(string(info), \"Audio Sink\") {\n\t\t\t\t\t\tbtDevices = append(btDevices, parts[1])\n\t\t\t\t\t\tlogger.Info(parts[1])\n\t\t\t\t\t\tif strings.Contains(string(info), \"Connected: yes\") {\n\t\t\t\t\t\t\tlogger.Info(\"BT connected to \" + parts[1])\n\t\t\t\t\t\t\tbluetoothConnected = true\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treadyForMplayer = true\n}",
"func (f *Framebuffer) useState() {\n\t// Bind the framebuffer now.\n\tif f.ctx.fastBindFramebuffer(f.o) {\n\t\tf.GLCall(nil)\n\t}\n\tf.GLCall(f.Loaded)\n}",
"func (native *OpenGL) BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tgl.BlitFramebuffer(srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter)\n}",
"func (b *board) checkDraw() bool {\n\tfor r := 0; r < size; r++ {\n\t\tfor c := 0; c < size; c++ {\n\t\t\tif b.cells[c][r] == N {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t}\n\treturn true\n}"
] | [
"0.73928267",
"0.70599395",
"0.7016133",
"0.7013041",
"0.69053",
"0.6679579",
"0.6654849",
"0.6514342",
"0.64238244",
"0.64238244",
"0.6319358",
"0.5978551",
"0.58935463",
"0.579545",
"0.579545",
"0.5652102",
"0.56273943",
"0.5616955",
"0.5486069",
"0.5477105",
"0.53774965",
"0.5331109",
"0.5242837",
"0.51874745",
"0.5152993",
"0.51515174",
"0.5149183",
"0.5147049",
"0.51119864",
"0.5098306",
"0.5068124",
"0.5034749",
"0.5029903",
"0.50112754",
"0.50106186",
"0.50083154",
"0.50002617",
"0.49938226",
"0.4976891",
"0.49722472",
"0.49677396",
"0.49669737",
"0.49605048",
"0.4933649",
"0.4903357",
"0.48976323",
"0.48918205",
"0.4878514",
"0.48777294",
"0.48746806",
"0.48730773",
"0.4863409",
"0.48373112",
"0.48187432",
"0.48125014",
"0.4809325",
"0.48065674",
"0.48063347",
"0.48056927",
"0.48019442",
"0.47970888",
"0.47936296",
"0.47936296",
"0.47912422",
"0.4776496",
"0.4775387",
"0.4764304",
"0.47642428",
"0.47617427",
"0.47592047",
"0.47487128",
"0.47472435",
"0.47406393",
"0.474039",
"0.47323123",
"0.47323123",
"0.4728093",
"0.47273967",
"0.47269708",
"0.47269708",
"0.47253412",
"0.47204444",
"0.47168258",
"0.4716804",
"0.4713971",
"0.47136793",
"0.47111723",
"0.4704208",
"0.46921062",
"0.467299",
"0.46531856",
"0.46511358",
"0.46485806",
"0.46477306",
"0.464522",
"0.46441934",
"0.46440506",
"0.46437424",
"0.46345195"
] | 0.6496274 | 9 |
check the completeness status of a framebuffer | func CheckNamedFramebufferStatus(framebuffer uint32, target uint32) uint32 {
ret := C.glowCheckNamedFramebufferStatus(gpCheckNamedFramebufferStatus, (C.GLuint)(framebuffer), (C.GLenum)(target))
return (uint32)(ret)
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (debugging *debuggingOpenGL) CheckFramebufferStatus(target uint32) uint32 {\n\tdebugging.recordEntry(\"CheckFramebufferStatus\", target)\n\tresult := debugging.gl.CheckFramebufferStatus(target)\n\tdebugging.recordExit(\"CheckFramebufferStatus\")\n\treturn result\n}",
"func (native *OpenGL) CheckFramebufferStatus(target uint32) uint32 {\n\treturn gl.CheckFramebufferStatus(target)\n}",
"func (f *Framebuffer) Status() error {\n\tf.useState()\n\te := f.ctx.O.Call(\"checkFramebufferStatus\", f.ctx.FRAMEBUFFER).Int()\n\n\t// Avoid the larger switch statement below, as no error is the most likely\n\t// case.\n\tif e == f.ctx.FRAMEBUFFER_COMPLETE {\n\t\treturn nil\n\t}\n\n\tswitch e {\n\tcase f.ctx.FRAMEBUFFER_INCOMPLETE_ATTACHMENT:\n\t\treturn gfx.ErrFramebufferIncompleteAttachment\n\tcase f.ctx.FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:\n\t\treturn gfx.ErrFramebufferIncompleteMissingAttachment\n\tcase f.ctx.FRAMEBUFFER_INCOMPLETE_DIMENSIONS:\n\t\treturn gfx.ErrFramebufferIncompleteDimensions\n\tcase f.ctx.FRAMEBUFFER_UNSUPPORTED:\n\t\treturn gfx.ErrFramebufferIncompleteDimensions\n\tdefault:\n\t\tpanic(fmt.Sprintf(\"webgl: unhandled framebuffer status 0x%X\\n\", e))\n\t}\n}",
"func CheckFramebufferStatus(target uint32) uint32 {\n ret := C.glowCheckFramebufferStatus(gpCheckFramebufferStatus, (C.GLenum)(target))\n return (uint32)(ret)\n}",
"func CheckFramebufferStatus(target uint32) uint32 {\n\tret, _, _ := syscall.Syscall(gpCheckFramebufferStatus, 1, uintptr(target), 0, 0)\n\treturn (uint32)(ret)\n}",
"func CheckNamedFramebufferStatus(framebuffer uint32, target uint32) uint32 {\n\tret, _, _ := syscall.Syscall(gpCheckNamedFramebufferStatus, 2, uintptr(framebuffer), uintptr(target), 0)\n\treturn (uint32)(ret)\n}",
"func CheckFramebufferStatus(target Enum) Enum {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\t__ret := C.glCheckFramebufferStatus(ctarget)\n\t__v := (Enum)(__ret)\n\treturn __v\n}",
"func CheckFramebufferStatus(target Enum) Enum {\n\treturn Enum(gl.CheckFramebufferStatus(uint32(target)))\n}",
"func CheckFramebufferStatus(target uint32) uint32 {\n\tret := C.glowCheckFramebufferStatus(gpCheckFramebufferStatus, (C.GLenum)(target))\n\treturn (uint32)(ret)\n}",
"func CheckFramebufferStatus(target uint32) uint32 {\n\tret := C.glowCheckFramebufferStatus(gpCheckFramebufferStatus, (C.GLenum)(target))\n\treturn (uint32)(ret)\n}",
"func IsFramebuffer(framebuffer uint32) bool {\n ret := C.glowIsFramebuffer(gpIsFramebuffer, (C.GLuint)(framebuffer))\n return ret == TRUE\n}",
"func IsFramebuffer(framebuffer uint32) bool {\n\tret, _, _ := syscall.Syscall(gpIsFramebuffer, 1, uintptr(framebuffer), 0, 0)\n\treturn ret != 0\n}",
"func IsFramebuffer(framebuffer Uint) Boolean {\n\tcframebuffer, _ := (C.GLuint)(framebuffer), cgoAllocsUnknown\n\t__ret := C.glIsFramebuffer(cframebuffer)\n\t__v := (Boolean)(__ret)\n\treturn __v\n}",
"func IsFramebuffer(framebuffer uint32) bool {\n\tret := C.glowIsFramebuffer(gpIsFramebuffer, (C.GLuint)(framebuffer))\n\treturn ret == TRUE\n}",
"func IsFramebuffer(framebuffer uint32) bool {\n\tret := C.glowIsFramebuffer(gpIsFramebuffer, (C.GLuint)(framebuffer))\n\treturn ret == TRUE\n}",
"func (ring *ringBuffer) isFull() bool {\n\tring.mutex.Lock()\n\tfull := !ring.spaceReady\n\tring.mutex.Unlock()\n\treturn full\n}",
"func IsFramebuffer(fb Framebuffer) bool {\n\treturn gl.IsFramebuffer(fb.Value)\n}",
"func (self *Geometry) IsDataBufferReady() bool {\n\treturn len(self.data_buffer_vpoints) > 0 || len(self.data_buffer_fpoints) > 0\n}",
"func (f *Frame) Full() bool {\n\treturn f.full == 1\n}",
"func check_complete(){\nif len(change_buffer)> 0{/* changing is false */\nbuffer= change_buffer\nchange_buffer= nil\nchanging= true\nchange_depth= include_depth\nloc= 0\nerr_print(\"! Change file entry did not match\")\n\n}\n}",
"func isUpdated() bool {\n\tupdated := true\n\tfor _, color := range rgb {\n\t\tif deviceTwinResult.Twin[color].Expected != nil &&\n\t\t\t((deviceTwinResult.Twin[color].Actual == nil && deviceTwinResult.Twin[color].Expected != nil) ||\n\t\t\t\tcompareValue(color)) {\n\t\t\tupdated = false\n\t\t\tbreak\n\t\t}\n\t}\n\treturn updated\n}",
"func IsRenderbuffer(renderbuffer uint32) bool {\n ret := C.glowIsRenderbuffer(gpIsRenderbuffer, (C.GLuint)(renderbuffer))\n return ret == TRUE\n}",
"func (API) GetFramebufferAttachmentInfo(state *api.State, thread uint64, attachment api.FramebufferAttachment) (width, height, index uint32, format *image.Format, err error) {\n\treturn 0, 0, 0, nil, fmt.Errorf(\"GVR does not support framebuffers\")\n}",
"func (s *frameStats) checkVideoHealth() error {\n\tif s.TotalFrames == 0 {\n\t\treturn errors.New(\"no frame was displayed\")\n\t}\n\n\t// If the test was running under QEMU, check the percentage of broken frames.\n\tif vm.IsRunningOnVM() {\n\t\t// Ratio of broken frames must be less than |threshold| %.\n\t\tconst threshold = 1.0\n\t\tblackPercentage := s.blackFramesPercentage()\n\t\tfrozenPercentage := s.frozenFramesPercentage()\n\t\tif threshold < blackPercentage+frozenPercentage {\n\t\t\treturn errors.Errorf(\"too many broken frames: black %.1f%%, frozen %.1f%% (total %d)\",\n\t\t\t\tblackPercentage, frozenPercentage, s.TotalFrames)\n\t\t}\n\t}\n\n\treturn nil\n}",
"func waitForStableReadings(ctx context.Context, backend Backend, width, height int, timeout, interval time.Duration, threshold float64) (reading int, err error) {\n\t// Keep the last numReadings for moving average purposes. Make it half the\n\t// size that the current timeout and interval would allow.\n\tnumReadings := int(math.Floor(float64(timeout / (2.0 * interval))))\n\n\tvar currentNumReadings int\n\tvar values = make([]int, numReadings)\n\n\terr = testing.Poll(ctx, func(ctx context.Context) error {\n\t\tvar e error\n\t\treading, e = backend.ReadFramebufferCount(ctx, width, height)\n\t\tif e != nil {\n\t\t\treturn testing.PollBreak(errors.Wrap(e, \"failed measuring\"))\n\t\t}\n\t\tvalues[currentNumReadings%numReadings] = reading\n\t\tcurrentNumReadings++\n\t\tif currentNumReadings < numReadings {\n\t\t\treturn errors.Errorf(\"need more values (got: %d and want: %d)\", currentNumReadings, numReadings)\n\t\t}\n\t\taverage := mean(values)\n\n\t\tif math.Abs(float64(reading)-average) > threshold {\n\t\t\treturn errors.Errorf(\"reading %d is not within %.1f of %.1f\", reading, threshold, average)\n\t\t}\n\t\treturn nil\n\t}, &testing.PollOptions{Timeout: timeout, Interval: interval})\n\treturn reading, err\n}",
"func (fm FinalModelEnumUInt32) Verify() (bool, int) {\n if (fm.buffer.Offset() + fm.FBEOffset() + fm.FBESize()) > fm.buffer.Size() {\n return false, 0\n }\n\n return true, fm.FBESize()\n}",
"func IsBuffer(buffer uint32) bool {\n ret := C.glowIsBuffer(gpIsBuffer, (C.GLuint)(buffer))\n return ret == TRUE\n}",
"func initFramebuffer(width, height int) {\n\tlog.Printf(\"[Video]: Initializing HW render (%v x %v).\\n\", width, height)\n\n\tgl.GenFramebuffers(1, &fboID)\n\tgl.BindFramebuffer(gl.FRAMEBUFFER, fboID)\n\n\t//gl.GenTextures(1, &video.texID)\n\tgl.BindTexture(gl.TEXTURE_2D, texID)\n\tgl.TexStorage2D(gl.TEXTURE_2D, 1, gl.RGBA8, int32(width), int32(height))\n\n\tgl.FramebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texID, 0)\n\n\thw := state.Global.Core.HWRenderCallback\n\n\tgl.BindRenderbuffer(gl.RENDERBUFFER, 0)\n\n\tif gl.CheckFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE {\n\t\tlog.Fatalln(\"[Video] Framebuffer is not complete.\")\n\t}\n\n\tgl.ClearColor(0, 0, 0, 1)\n\tif hw.Depth && hw.Stencil {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT | gl.STENCIL_BUFFER_BIT)\n\t} else if hw.Depth {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT)\n\t} else {\n\t\tgl.Clear(gl.COLOR_BUFFER_BIT)\n\t}\n\n\tgl.BindFramebuffer(gl.FRAMEBUFFER, 0)\n}",
"func (fr *Frame) Valid() bool {\n\treturn false\n}",
"func (buf *ListBuffer) IsFull() bool {\n\treturn buf.Count >= MaxBufferCount\n}",
"func CheckScreenshot(ctx context.Context, tconn *chrome.TestConn, downloadsPath string, source CaptureModeSource) error {\n\timageConfig, err := retrieveCaptureImageConfig(downloadsPath)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to get image config\")\n\t}\n\n\tfullScreenBounds, err := calculateCaptureSurfaceFullScreenBounds(ctx, tconn)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to get full screen bounds\")\n\t}\n\n\tswitch source {\n\tcase FullScreen:\n\t\tcompareScreenshotDimensions(imageConfig, fullScreenBounds, source)\n\tcase PartialScreen:\n\t\texpectedScreenShotBounds := coords.NewSize(fullScreenBounds.Width/2, fullScreenBounds.Height/2)\n\t\tcompareScreenshotDimensions(imageConfig, expectedScreenShotBounds, source)\n\tcase Window:\n\t\tactiveWindow, err := ash.GetActiveWindow(ctx, tconn)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to find active window\")\n\t\t}\n\t\texpectedScreenShotBounds := coords.NewSize(activeWindow.BoundsInRoot.Width, activeWindow.BoundsInRoot.Height)\n\t\tcompareScreenshotDimensions(imageConfig, expectedScreenShotBounds, source)\n\tdefault:\n\t\treturn errors.New(\"unknown screenshot type\")\n\t}\n\n\treturn nil\n}",
"func CheckHepevtConsistency(f *os.File) bool {\n\t\n\tif f == nil {\n\t\tf = os.Stdout\n\t}\n\tc_fd := C.int(f.Fd())\n\t_ = f.Sync()\n\tc_mode := C.CString(\"a\")\n\tdefer C.free(unsafe.Pointer(c_mode))\n\tc_f := C.fdopen(c_fd, c_mode)\n\tC.fflush(c_f)\n\n\to := C.hepevt_check_hepevt_consistency(c_f)\n\tC.fflush(c_f)\n\t_ = f.Sync()\n\n\tif o != C.int(0) {\n\t\treturn true\n\t}\n\treturn false\n}",
"func (cca *cookedSyncCmdArgs) scanningComplete() bool {\n\treturn atomic.LoadUint32(&cca.atomicScanningStatus) > 0\n}",
"func (b *box) completed() bool {\n\treturn b.setValues == 9\n}",
"func (_e *MockCompactionPlanContext_Expecter) isFull() *MockCompactionPlanContext_isFull_Call {\n\treturn &MockCompactionPlanContext_isFull_Call{Call: _e.mock.On(\"isFull\")}\n}",
"func (o *HyperflexSnapshotStatus) HasPctComplete() bool {\n\tif o != nil && o.PctComplete != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func (e *ControlleeExpectations) Fulfilled() bool {\n\t// TODO: think about why this line being atomic doesn't matter\n\treturn atomic.LoadInt64(&e.add) <= 0 && atomic.LoadInt64(&e.del) <= 0\n}",
"func compareGraphicsMemoryBeforeAfter(ctx context.Context, payload func() error, backend Backend, roundedWidth, roundedHeight int) (err error) {\n\tvar before, during, after int\n\n\tif before, err = readStableObjectCount(ctx, backend, roundedWidth, roundedHeight); err != nil {\n\t\treturn errors.Wrap(err, \"failed to get the framebuffer object count\")\n\t}\n\n\ttesting.ContextLog(ctx, \"Running the payload() and measuring the number of graphics objects during its execution\")\n\tc := make(chan error)\n\tgo func(c chan error) {\n\t\tc <- payload()\n\t}(c)\n\t// Note: We don't wait for the ReadFramebufferCount() to finish, just keep\n\t// measuring until we get a non-zero value in during, for further comparison\n\t// below.\n\tgo func() {\n\t\tconst pollTimeout = 10 * time.Second\n\t\tconst pollInterval = 100 * time.Millisecond\n\t\t_ = testing.Poll(ctx, func(ctx context.Context) error {\n\t\t\t// TODO(crbug.com/1047514): instead of blindly sampling the amount of\n\t\t\t// objects during the test and comparing them further down, verify them\n\t\t\t// here directly.\n\t\t\tif during, _ = backend.ReadFramebufferCount(ctx, roundedWidth, roundedHeight); during == before {\n\t\t\t\treturn errors.New(\"Still waiting for graphics objects\")\n\t\t\t}\n\t\t\treturn nil\n\t\t}, &testing.PollOptions{Timeout: pollTimeout, Interval: pollInterval})\n\t}()\n\terr = <-c\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif after, err = readStableObjectCount(ctx, backend, roundedWidth, roundedHeight); err != nil {\n\t\treturn errors.Wrap(err, \"failed to get the framebuffer object count\")\n\t}\n\tif before != after {\n\t\treturn errors.Wrapf(err, \"graphics objects of size %d x %d do not coincide: before=%d, after=%d\", roundedWidth, roundedHeight, before, after)\n\t}\n\tif during == before {\n\t\treturn errors.Wrapf(err, \"graphics objects of size %d x %d did not increase during play back: before=%d, during=%d\", roundedWidth, roundedHeight, before, during)\n\t}\n\ttesting.ContextLogf(ctx, \"Graphics objects of size %d x %d before=%d, during=%d, after=%d\", roundedWidth, roundedHeight, before, during, after)\n\treturn nil\n}",
"func (f Frame) Valid() bool {\n\treturn f != InvalidFrame\n}",
"func ScreenshotIsTaken(ctx context.Context, s *testing.State) {\n\tcr := s.FixtValue().(*chrome.Chrome)\n\n\tcleanupCtx := ctx\n\tctx, cancel := ctxutil.Shorten(ctx, 5*time.Second)\n\tdefer cancel()\n\n\ttconn, err := cr.TestAPIConn(ctx)\n\tif err != nil {\n\t\ts.Fatal(\"Failed to connect to Test API: \", err)\n\t}\n\tdefer faillog.DumpUITreeWithScreenshotOnError(cleanupCtx, s.OutDir(), s.HasError, cr,\n\t\t\"ui_dump\")\n\n\tui := uiauto.New(tconn).WithTimeout(20 * time.Second)\n\n\t// Launch feedback app and go to share data page.\n\tfeedbackRootNode, err := feedbackapp.LaunchAndGoToShareDataPage(ctx, tconn)\n\tif err != nil {\n\t\ts.Fatal(\"Failed to launch feedback app and go to share data page: \", err)\n\t}\n\n\t// Verify screenshot checkbox and image exist.\n\t// Verify clicking screenshot will open screenshot diaglog.\n\tscreenshotCheckBox := nodewith.Name(\"Screenshot\").Role(role.CheckBox).Ancestor(\n\t\tfeedbackRootNode)\n\tscreenshotImg := nodewith.Role(role.Image).Ancestor(feedbackRootNode)\n\tscreenshotDialog := nodewith.Role(role.Dialog).Ancestor(feedbackRootNode).First()\n\n\tif err := uiauto.Combine(\"Verify screenshot exists\",\n\t\tui.WaitUntilExists(screenshotCheckBox),\n\t\tui.DoDefault(screenshotImg),\n\t\tui.WaitUntilExists(screenshotDialog),\n\t)(ctx); err != nil {\n\t\ts.Fatal(\"Failed to verify screenshot exists: \", err)\n\t}\n\n\t// Verify clicking screenshot button will close screenshot diaglog.\n\tscreenshotButton := nodewith.Name(\"Back\").Role(role.Button).Ancestor(feedbackRootNode)\n\n\tif err := uiauto.Combine(\"Verify clicking screenshot button closes dialog\",\n\t\tui.DoDefault(screenshotButton),\n\t\tui.WaitUntilGone(screenshotDialog),\n\t)(ctx); err != nil {\n\t\ts.Fatal(\"Failed to verify clicking screenshot button closes dialog: \", err)\n\t}\n}",
"func (bg *bufferedGroup) haveCompleteGroup() bool {\n if len(bg.images) == 0 {\n log.Panicf(\"a buffered group should never be empty\")\n }\n\n return bg.firstTimeKey != bg.lastTimeKey\n}",
"func qr_decoder_is_busy(p _QrDecoderHandle) int {\n\tv := C.qr_decoder_is_busy(C.QrDecoderHandle(p))\n\treturn int(v)\n}",
"func waitForCompletion(sensor SensorInterface, i2c *i2c.I2C) (timeout bool, err error) {\n\tfor i := 0; i < 10; i++ {\n\t\tflag, err := sensor.IsBusy(i2c)\n\t\tif err != nil {\n\t\t\treturn false, err\n\t\t}\n\t\tif flag == false {\n\t\t\treturn false, nil\n\t\t}\n\t\ttime.Sleep(5 * time.Millisecond)\n\t}\n\treturn true, nil\n}",
"func (d *Driver) fingerprintSuccessful() bool {\n\td.fingerprintLock.Lock()\n\tdefer d.fingerprintLock.Unlock()\n\treturn d.fingerprintSuccess == nil || *d.fingerprintSuccess\n}",
"func (event *mappingEvent) isFull() bool {\n\treturn event.pending > full\n}",
"func (receiver *Receiver) checkFrame() error {\n\tvar frame uint64\n\tif receiver.transaction != nil {\n\t\tframe = receiver.transaction.frame\n\t} else {\n\t\tvar err error\n\t\tframe, err = receiver.meta.GetUint64(rxFrame)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tframe++\n\tframePath, err := receiver.storage.Frame(frame)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif _, err := os.Stat(framePath); err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\treturn io.EOF\n\t\t}\n\t\treturn err\n\t}\n\treturn nil\n}",
"func (renderbuffer Renderbuffer) IsRenderbuffer() bool {\n\treturn gl.IsRenderbuffer(uint32(renderbuffer))\n}",
"func (o *HyperflexSnapshotStatus) GetPctCompleteOk() (*int64, bool) {\n\tif o == nil || o.PctComplete == nil {\n\t\treturn nil, false\n\t}\n\treturn o.PctComplete, true\n}",
"func (f *fragment) awaitSnapshot() {\n\tf.mu.Lock()\n\tdefer f.mu.Unlock()\n\tfor f.snapshotting {\n\t\tf.snapshotCond.Wait()\n\t}\n}",
"func (rgba *rgba) checker() error {\n\tif rgba.Hex == \"\" {\n\t\treturn fmt.Errorf(\"this is an uninitialised rgba\")\n\t}\n\treturn nil\n}",
"func (f *framer) messageReady() (int, bool) {\n\tif len(f.buffer) < 2 {\n\t\treturn 0, false\n\t}\n\tmsgSize := int(binary.LittleEndian.Uint16(f.buffer[:2]))\n\n\treturn msgSize, len(f.buffer) >= msgSize+2\n}",
"func (s *BaseEvent) BSuccess() bool {\n if !s.sysParamsExtracted { panic(\"!s.sysParamsExtracted\"); }\n return (s.sysParams.BSuccess == 1)\n}",
"func checkMagConnection() bool {\n\tsetSetting(0x25, 0x0C|0x80)\n\tsetSetting(0x26, 0x00)\n\tsetSetting(0x27, 0x81) // Read one byte.\n\n\ttime.Sleep(100 * time.Microsecond)\n\n\tr, err := i2cbus.ReadByteFromReg(0x68, 0x49)\n\tchkErr(err)\n\n\tret := r == 0x48\n\n\t// Read calibration data.\n\tsetSetting(0x25, 0x0C|0x80)\n\tsetSetting(0x26, 0x10)\n\tsetSetting(0x27, 0x83) // Read three bytes, (CalX, CalY, CalZ).\n\n\tmxcal, err := i2cbus.ReadByteFromReg(0x68, 0x49)\n\tchkErr(err)\n\tmycal, err := i2cbus.ReadByteFromReg(0x68, 0x4A)\n\tchkErr(err)\n\tmzcal, err := i2cbus.ReadByteFromReg(0x68, 0x4B)\n\tchkErr(err)\n\n\tmagXcal = (float64(mxcal)-128)/256.0 + 1.0\n\tmagYcal = (float64(mycal)-128)/256.0 + 1.0\n\tmagZcal = (float64(mzcal)-128)/256.0 + 1.0\n\n\treturn ret\n}",
"func (o *V0037JobProperties) GetGpuBindingOk() (*string, bool) {\n\tif o == nil || o.GpuBinding == nil {\n\t\treturn nil, false\n\t}\n\treturn o.GpuBinding, true\n}",
"func (f *Sink) HasReady() bool {\n\treturn f.readyList.Len() != 0\n}",
"func (d *decoder) finished() bool {\n\treturn d.br.finished() && d.dt[d.state].nbBits > 0\n}",
"func (p *Prober) isReady() bool {\n\tready := atomic.LoadUint32(&p.ready)\n\treturn ready > 0\n}",
"func (self VideoMode) IsValid() bool {\n\treturn int(C.sfVideoMode_isValid(*self.Cref)) == 1\n}",
"func IsRenderbuffer(renderbuffer uint32) bool {\n\tret, _, _ := syscall.Syscall(gpIsRenderbuffer, 1, uintptr(renderbuffer), 0, 0)\n\treturn ret != 0\n}",
"func (DrawTexture) IsDrawAction() {}",
"func (g GenericBackend) ReadFramebufferCount(ctx context.Context, width, height int) (framebuffers int, e error) {\n\tf, err := os.Open(fmt.Sprintf(genericFramebufferFilePattern, g.index))\n\tif err != nil {\n\t\treturn framebuffers, errors.Wrap(err, \"failed to open dri file\")\n\t}\n\n\ttext, err := ioutil.ReadAll(f)\n\tif err != nil {\n\t\treturn framebuffers, errors.Wrap(err, \"failed to read dri file\")\n\t}\n\tlines := strings.Split(string(text), \"\\n\")\n\tfor _, line := range lines {\n\t\t// The line we're looking for looks like \"...size=1920x1080\"\n\t\tvar fbWidth, fbHeight int\n\t\tif _, err := fmt.Sscanf(line, \" size=%dx%d\", &fbWidth, &fbHeight); err != nil {\n\t\t\tcontinue\n\t\t}\n\t\tif fbWidth == width && fbHeight == height {\n\t\t\tframebuffers++\n\t\t}\n\t}\n\treturn\n}",
"func IsRenderbuffer(renderbuffer uint32) bool {\n\tret := C.glowIsRenderbuffer(gpIsRenderbuffer, (C.GLuint)(renderbuffer))\n\treturn ret == TRUE\n}",
"func IsRenderbuffer(renderbuffer uint32) bool {\n\tret := C.glowIsRenderbuffer(gpIsRenderbuffer, (C.GLuint)(renderbuffer))\n\treturn ret == TRUE\n}",
"func CheckFsCreationInProgress(device model.Device) (inProgress bool, err error) {\n\treturn false, fmt.Errorf(\"FS progress check is not implemented for Mac\")\n}",
"func (boardService *BoardService) CheckBoardIsFull() bool {\n\tflag := false\n\tvar i uint8\n\t//Loops through all cells and checks for NoMark, if at the end of loop, all elements have been marked, then board is full\n\tfor i = 0; i < boardService.Board.Size; i++ {\n\t\tif boardService.Board.Cells[i].Mark != components.NoMark {\n\t\t\tflag = true\n\t\t} else {\n\t\t\tflag = false\n\t\t}\n\t}\n\treturn flag\n}",
"func (g I915Backend) ReadFramebufferCount(ctx context.Context, width, height int) (framebuffers int, e error) {\n\tf, err := os.Open(i915FramebufferFile)\n\tif err != nil {\n\t\treturn framebuffers, errors.Wrap(err, \"failed to open dri file\")\n\t}\n\ttext, err := ioutil.ReadAll(f)\n\tif err != nil {\n\t\treturn framebuffers, errors.Wrap(err, \"failed to read dri file\")\n\t}\n\tlines := strings.Split(string(text), \"\\n\")\n\tfor _, line := range lines {\n\t\t// The line we're looking for looks like \"user size: 1920 x 1080,...\"\n\t\tvar fbWidth, fbHeight int\n\t\tif _, err := fmt.Sscanf(line, \"user size: %d x %d\", &fbWidth, &fbHeight); err != nil {\n\t\t\tcontinue\n\t\t}\n\t\tif fbWidth == width && fbHeight == height {\n\t\t\tframebuffers++\n\t\t}\n\t}\n\treturn\n}",
"func (o *UcsdBackupInfoAllOf) HasPercentageCompletion() bool {\n\tif o != nil && o.PercentageCompletion != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func (i *ImageBuf) Initialized() bool {\n\tif i.ptr == nil {\n\t\treturn false\n\t}\n\tret := bool(C.ImageBuf_initialized(i.ptr))\n\truntime.KeepAlive(i)\n\treturn ret\n}",
"func (debugging *debuggingOpenGL) BindFramebuffer(target uint32, buffer uint32) {\n\tdebugging.recordEntry(\"BindFramebuffer\", target, buffer)\n\tdebugging.gl.BindFramebuffer(target, buffer)\n\tdebugging.recordExit(\"BindFramebuffer\")\n}",
"func processFrame(b *bytes.Buffer) (bool, int) {\n\tmagic := make([]byte, len(magicBytes))\n\tb.Read(magic)\n\n\tlenBytes := make([]byte, 4)\n\tb.Read(lenBytes)\n\n\treturn bytes.Equal(magic, magicBytes), int(binary.BigEndian.Uint32(lenBytes))\n}",
"func (cs *cpuState) Framebuffer() []byte {\n\treturn cs.LCD.framebuffer[:]\n}",
"func IsRenderbuffer(renderbuffer Uint) Boolean {\n\tcrenderbuffer, _ := (C.GLuint)(renderbuffer), cgoAllocsUnknown\n\t__ret := C.glIsRenderbuffer(crenderbuffer)\n\t__v := (Boolean)(__ret)\n\treturn __v\n}",
"func fileStatus(dasquery dasql.DASQuery) bool {\n\tspec := dasquery.Spec\n\tstatus := spec[\"status\"]\n\tif status != nil {\n\t\tval := status.(string)\n\t\tif strings.ToLower(val) == \"valid\" {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}",
"func (cb *Buffer) WriteAvailability() int {\n\tif cb.full {\n\t\treturn 0\n\t}\n\n\tif cb.wpos <= cb.rpos {\n\t\treturn len(cb.buffer) - cb.rpos + cb.wpos\n\t}\n\n\treturn cb.wpos - cb.rpos\n}",
"func IsBuffer(buffer uint32) bool {\n\tret := C.glowIsBuffer(gpIsBuffer, (C.GLuint)(buffer))\n\treturn ret == TRUE\n}",
"func IsBuffer(buffer uint32) bool {\n\tret := C.glowIsBuffer(gpIsBuffer, (C.GLuint)(buffer))\n\treturn ret == TRUE\n}",
"func flagsComplete() (allValid bool, err string) {\n\tallValid = true\n\tif srcAddress == \"\" {\n\t\terr = err + \"Invalid source address.\\n\"\n\t\tallValid = false\n\t}\n\n\treturn allValid, err\n}",
"func (display smallEpd) waitUntilIdle() (err error) {\n\tlog.Debug(\"EPD42 WaitUntilIdle\")\n\tfor {\n\t\tbusy, err := display.driver.DigitalRead(display.BUSY)\n\t\tif !busy {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Error checking bust %s\\n\", err.Error())\n\t\t}\n\t\tfmt.Printf(\".\")\n\t\ttime.Sleep(200 * time.Millisecond)\n\t}\n\tlog.Debug(\"EPD42 WaitUntilIdle End\")\n\treturn\n}",
"func (r *RingBuffer) IsFull() bool {\n\tr.mu.Lock()\n\tdefer r.mu.Unlock()\n\n\treturn r.isFull\n}",
"func (r *RingBuffer) IsFull() bool {\n\tr.mu.Lock()\n\tdefer r.mu.Unlock()\n\n\treturn r.isFull\n}",
"func (px *Pex) full() bool {\n\treturn px.maxPeers > 0 && len(px.peers) >= px.maxPeers\n}",
"func (o *UcsdBackupInfoAllOf) GetPercentageCompletionOk() (*int64, bool) {\n\tif o == nil || o.PercentageCompletion == nil {\n\t\treturn nil, false\n\t}\n\treturn o.PercentageCompletion, true\n}",
"func (fail *failingState) HitOk() bool {\n\tif fail.toggle == 0 {\n\t\treturn true\n\t}\n\n\tfail.mtx.Lock()\n\tdefer fail.mtx.Unlock()\n\n\tif fail.toggle <= fail.count {\n\t\tfail.up = !fail.up\n\t\tfail.count = 0\n\t}\n\tfail.count++\n\tlog.Printf(\"up: %t count: %d next flip: %d\\n\", fail.up, fail.count, fail.toggle)\n\treturn fail.up\n}",
"func (r pciResource) valid() bool {\n\treturn r.flags != 0 && r.start != 0 && r.end != 0\n}",
"func waitForScanStatus(t *testing.T, f *framework.Framework, namespace, name string, targetStaus complianceoperatorv1alpha1.ComplianceScanStatusPhase) error {\n\texampleComplianceScan := &complianceoperatorv1alpha1.ComplianceScan{}\n\tvar lastErr error\n\t// retry and ignore errors until timeout\n\ttimeouterr := wait.Poll(retryInterval, timeout, func() (bool, error) {\n\t\tlastErr = f.Client.Get(goctx.TODO(), types.NamespacedName{Name: name, Namespace: namespace}, exampleComplianceScan)\n\t\tif lastErr != nil {\n\t\t\tif apierrors.IsNotFound(lastErr) {\n\t\t\t\tt.Logf(\"Waiting for availability of %s compliancescan\\n\", name)\n\t\t\t\treturn false, nil\n\t\t\t}\n\t\t\tt.Logf(\"Retrying. Got error: %v\\n\", lastErr)\n\t\t\treturn false, nil\n\t\t}\n\n\t\tif exampleComplianceScan.Status.Phase == targetStaus {\n\t\t\treturn true, nil\n\t\t}\n\t\tt.Logf(\"Waiting for run of %s compliancescan (%s)\\n\", name, exampleComplianceScan.Status.Phase)\n\t\treturn false, nil\n\t})\n\t// Error in function call\n\tif lastErr != nil {\n\t\treturn lastErr\n\t}\n\t// Timeout\n\tif timeouterr != nil {\n\t\treturn timeouterr\n\t}\n\tt.Logf(\"ComplianceScan ready (%s)\\n\", exampleComplianceScan.Status.Phase)\n\treturn nil\n}",
"func (e *Edge) isComplete() bool {\n\treturn len(e.ClientService) != 0 && len(e.ServerService) != 0\n}",
"func (n number) valid(img image.Image, offset int) float64 {\n\n\tmatching := 0.0\n\n\tfor _, point := range n.highlighted {\n\t\tr, g, b, _ := img.At(offset-n.width+point.X, point.Y).RGBA()\n\t\tif r == 0 && g == 0 && b == 0 {\n\t\t\tmatching += 1.0\n\t\t}\n\t}\n\n\tpercentMatching := matching / float64(len(n.highlighted))\n\n\t// log.Printf(\"%d matched %f percent\\n\", n.value, percentMatching*100)\n\n\treturn percentMatching\n}",
"func (batch *Batch) IsFull() bool {\n\treturn len(batch.messages) == batch.maxSize\n}",
"func cursorCompleted(cursor int, boundary int) bool {\n\treturn cursor >= boundary\n}",
"func (l *Level) IsFull() bool {\n\tif l.Balls.Length() >= l.MaxBalls {\n\t\treturn true\n\t}\n\treturn false\n}",
"func waitForCompletion(done chan bool) bool {\n\ttimer := time.NewTimer(totalWaitTime)\n\tdefer timer.Stop()\n\tselect {\n\tcase <-done:\n\t\treturn true\n\tcase <-timer.C:\n\t\treturn false\n\t}\n}",
"func (o *ApplianceImageBundleAllOf) GetFingerprintOk() (*string, bool) {\n\tif o == nil || o.Fingerprint == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Fingerprint, true\n}",
"func (b *BlockChecker) FullCheck(t *testing.T) {\n\tseqChecker := &functest.SeqRFuncChecker{\n\t\tF: &rnn.BlockSeqFunc{B: b.B},\n\t\tInput: b.Input,\n\t\tVars: b.Vars,\n\t\tRV: b.RV,\n\t\tDelta: b.Delta,\n\t\tPrec: b.Prec,\n\t}\n\tseqChecker.FullCheck(t)\n\tb.testNilUpstream(t)\n\tb.testNilUpstreamR(t)\n}",
"func checkNotActive() error {\n\toperationDetails := activeOperation\n\tif operationDetails != nil {\n\t\tselect {\n\t\tcase <-operationDetails.exportDone:\n\t\t\t// nil-out any stale operation\n\t\t\tactiveOperation = nil\n\t\tdefault:\n\t\t\tif operationDetails.isRestore {\n\t\t\t\treturn fmt.Errorf(\"restore operation already in progress for height %d\", operationDetails.blockHeight)\n\t\t\t} else {\n\t\t\t\treturn fmt.Errorf(\"export operation already in progress for height %d\", operationDetails.blockHeight)\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}",
"func checkBluetooth() {\n\t// init part: get the list of paired bluetooth devices\n\tresult, err := exec.Command(\"bluetoothctl\", \"devices\").Output()\n\tif err != nil {\n\t\tlogger.Error(err.Error())\n\t} else {\n\t\tarr := strings.Split(string(result), \"\\n\")\n\t\tlogger.Info(\"BT Devices paired:\")\n\t\tfor _, s := range arr {\n\t\t\tparts := strings.Split(s, \" \")\n\t\t\tif len(parts) > 1 {\n\t\t\t\tinfo, err2 := exec.Command(\"bluetoothctl\", \"info\", parts[1]).Output()\n\t\t\t\tif err2 == nil {\n\t\t\t\t\tif strings.Contains(string(info), \"Audio Sink\") {\n\t\t\t\t\t\tbtDevices = append(btDevices, parts[1])\n\t\t\t\t\t\tlogger.Info(parts[1])\n\t\t\t\t\t\tif strings.Contains(string(info), \"Connected: yes\") {\n\t\t\t\t\t\t\tlogger.Info(\"BT connected to \" + parts[1])\n\t\t\t\t\t\t\tbluetoothConnected = true\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treadyForMplayer = true\n}",
"func BindFramebuffer(target Enum, framebuffer Uint) {\n\tctarget, _ := (C.GLenum)(target), cgoAllocsUnknown\n\tcframebuffer, _ := (C.GLuint)(framebuffer), cgoAllocsUnknown\n\tC.glBindFramebuffer(ctarget, cframebuffer)\n}",
"func (f *Framebuffer) useState() {\n\t// Bind the framebuffer now.\n\tif f.ctx.fastBindFramebuffer(f.o) {\n\t\tf.GLCall(nil)\n\t}\n\tf.GLCall(f.Loaded)\n}",
"func (native *OpenGL) BlitFramebuffer(srcX0 int32, srcY0 int32, srcX1 int32, srcY1 int32, dstX0 int32, dstY0 int32, dstX1 int32, dstY1 int32, mask uint32, filter uint32) {\n\tgl.BlitFramebuffer(srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter)\n}",
"func (b *board) checkDraw() bool {\n\tfor r := 0; r < size; r++ {\n\t\tfor c := 0; c < size; c++ {\n\t\t\tif b.cells[c][r] == N {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t}\n\treturn true\n}"
] | [
"0.73902386",
"0.70568424",
"0.7014738",
"0.7010243",
"0.6902235",
"0.667687",
"0.66522145",
"0.6511212",
"0.64931464",
"0.64931464",
"0.63167614",
"0.5976161",
"0.5891553",
"0.5793179",
"0.5793179",
"0.5653093",
"0.5625014",
"0.5617161",
"0.54864055",
"0.5478664",
"0.5378006",
"0.53290814",
"0.52393013",
"0.51871127",
"0.5153975",
"0.5151512",
"0.5145303",
"0.51449996",
"0.51117486",
"0.5098514",
"0.50677943",
"0.5034877",
"0.50336725",
"0.50129783",
"0.5012211",
"0.50117946",
"0.50001514",
"0.49938452",
"0.4976653",
"0.49727038",
"0.49702755",
"0.49673215",
"0.4963245",
"0.49347624",
"0.49044496",
"0.4897185",
"0.48903227",
"0.48809195",
"0.4880019",
"0.48736337",
"0.48730546",
"0.4866454",
"0.48380953",
"0.48180422",
"0.48128802",
"0.48107538",
"0.48074397",
"0.48052686",
"0.48047888",
"0.4801357",
"0.479488",
"0.47917062",
"0.47917062",
"0.479096",
"0.47758576",
"0.47743586",
"0.47654048",
"0.476374",
"0.47612998",
"0.47595897",
"0.47473902",
"0.47456458",
"0.47416794",
"0.47415408",
"0.47305825",
"0.47305825",
"0.47290164",
"0.47286296",
"0.47275797",
"0.47275797",
"0.47262785",
"0.47237313",
"0.47179312",
"0.4717864",
"0.4717213",
"0.47159114",
"0.47135425",
"0.47052327",
"0.46939296",
"0.46746382",
"0.4654753",
"0.46509048",
"0.46498471",
"0.46489275",
"0.46435505",
"0.4642507",
"0.46419337",
"0.46404696",
"0.4634363"
] | 0.6420987 | 11 |
specify whether data read via should be clamped | func ClampColor(target uint32, clamp uint32) {
C.glowClampColor(gpClampColor, (C.GLenum)(target), (C.GLenum)(clamp))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func clamp(val float64) float64 {\n\treturn math.Max(-1, math.Min(1, val))\n}",
"func clamp(n, min, max int16) int16 {\n\tif n < min {\n\t\treturn min\n\t} else if n > max {\n\t\treturn max\n\t}\n\n\treturn n\n}",
"func clamp(min, max, v int) int {\n\tif v < min {\n\t\tmin = v\n\t} else if v > max {\n\t\tmax = v\n\t}\n\n\treturn v\n}",
"func clamp(x float64) float64 {\n\tif x > 1 {\n\t\treturn 1\n\t}\n\treturn x\n}",
"func (w *WidgetImplement) Clamp() [2]bool {\n\treturn w.clamp\n}",
"func clamp(v, min, max int) int {\n\tif v > max {\n\t\treturn max\n\t}\n\tif v < min {\n\t\treturn min\n\t}\n\treturn v\n}",
"func clamp(i int32) int32 {\n\tif i < 0 {\n\t\treturn 0\n\t}\n\tif i > 0xffff {\n\t\treturn 0xffff\n\t}\n\treturn i\n}",
"func clampRange(low, size, max int) (from, upto int) {\n\tfrom, upto = low, low+size\n\tif from > max {\n\t\tfrom = max\n\t}\n\tif upto > max {\n\t\tupto = max\n\t}\n\treturn\n}",
"func clampval(x float32) float32 {\n\tif math.Abs(float64(x)) < 0.01 {\n\t\treturn 0.0\n\t}\n\n\tif x < -0.99 {\n\t\treturn -1.0\n\t} else if x > 0.99 {\n\t\treturn 1.0\n\t}\n\treturn x\n}",
"func funcClamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {\n\tvec := vals[0].(Vector)\n\tmin := vals[1].(Vector)[0].F\n\tmax := vals[2].(Vector)[0].F\n\tif max < min {\n\t\treturn enh.Out\n\t}\n\tfor _, el := range vec {\n\t\tenh.Out = append(enh.Out, Sample{\n\t\t\tMetric: enh.DropMetricName(el.Metric),\n\t\t\tF: math.Max(min, math.Min(max, el.F)),\n\t\t})\n\t}\n\treturn enh.Out\n}",
"func isLimitBandwidthChange() bool {\n\ttempBW, err := ReadBandwithLimitFile()\n\tif err == nil {\n\t\tif tempBW != throttle {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}",
"func convertibleToInt64(v model.SampleValue) bool {\n\treturn v <= maxInt64 && v >= minInt64\n}",
"func (w *Whisper) Overflow() bool {\n\tval, _ := w.settings.Load(overflowIdx)\n\treturn val.(bool)\n}",
"func (v ivec2) readingLess(v1 ivec2) bool {\n\tif v.y == v1.y {\n\t\treturn v.x < v1.x\n\t}\n\treturn v.y < v1.y\n}",
"func clamp(rs []intRange, low, high int) []intRange {\n\tresult := make([]intRange, 0, len(rs))\n\tfor _, r := range rs {\n\t\tl, h := max(low, r.low), min(high, r.high)\n\t\tif l > h { // wrap-around\n\t\t\tresult = append(result, intRange{low: l, high: high, step: r.step},\n\t\t\t\tintRange{low: low, high: h, step: r.step})\n\t\t} else {\n\t\t\tresult = append(result, intRange{low: l, high: h, step: r.step})\n\t\t}\n\t}\n\treturn result\n}",
"func (me TaltitudeModeEnumType) IsClampToGround() bool { return me == \"clampToGround\" }",
"func funcClampMin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {\n\tvec := vals[0].(Vector)\n\tmin := vals[1].(Vector)[0].F\n\tfor _, el := range vec {\n\t\tenh.Out = append(enh.Out, Sample{\n\t\t\tMetric: enh.DropMetricName(el.Metric),\n\t\t\tF: math.Max(min, el.F),\n\t\t})\n\t}\n\treturn enh.Out\n}",
"func readingLess(a, b *point) bool {\n\tswitch {\n\tcase a.y < b.y:\n\t\treturn true\n\tcase a.y == b.y && a.x < b.x:\n\t\treturn true\n\tdefault:\n\t\treturn false\n\t}\n}",
"func processSensorData() (sensorRead sensorReadType, pos int, hint int, cross bool, out bool) {\n\tread()\n\tf, l, r, b := trimSensor(cB), trimSensor(cL), trimSensor(cR), trimSensor(cB)\n\n\tsensorRead = sensorReadZero\n\tif isOnTrack(b) {\n\t\tsensorRead |= bitB\n\t}\n\tif isOnTrack(r) {\n\t\tsensorRead |= bitR\n\t}\n\tif isOnTrack(l) {\n\t\tsensorRead |= bitL\n\t}\n\tif isOnTrack(f) {\n\t\tsensorRead |= bitF\n\t}\n\n\tswitch sensorRead {\n\tcase sensorReadZero:\n\tcase sensorReadB:\n\tcase sensorReadF:\n\t\t// Out\n\t\tout = true\n\t\tpos, hint, cross = 0, 0, false\n\tcase sensorReadR:\n\t\tpos = conf.SensorRadius*2 + distanceFromSensor(r)\n\t\thint = 0\n\t\tcross, out = false, false\n\tcase sensorReadRB:\n\t\tpos = conf.SensorRadius + positionBetweenSensors(b, r)\n\t\thint = 1\n\t\tcross, out = false, false\n\tcase sensorReadL:\n\t\tpos = -conf.SensorRadius*2 - distanceFromSensor(l)\n\t\thint = 0\n\t\tcross, out = false, false\n\tcase sensorReadLB:\n\t\tpos = -conf.SensorRadius + positionBetweenSensors(l, b)\n\t\thint = 1\n\t\tcross, out = false, false\n\tcase sensorReadLR:\n\tcase sensorReadLRB:\n\tcase sensorReadFLRB:\n\tcase sensorReadFLR:\n\t\t// Cross\n\t\tcross = true\n\t\tpos, hint, out = 0, 0, false\n\tcase sensorReadFB:\n\t\tpos = 0\n\t\thint = 0\n\t\tcross, out = false, false\n\tcase sensorReadFR:\n\t\tpos = conf.SensorRadius + positionBetweenSensors(f, r)\n\t\thint = -1\n\t\tcross, out = false, false\n\tcase sensorReadFRB:\n\t\tpos = conf.SensorRadius + positionBetweenSensors((f+b)/2, r)\n\t\thint = 0\n\t\tcross, out = false, false\n\tcase sensorReadFL:\n\t\tpos = -conf.SensorRadius + positionBetweenSensors(l, f)\n\t\thint = 0\n\t\tcross, out = false, false\n\tcase sensorReadFLB:\n\t\tpos = -conf.SensorRadius + positionBetweenSensors(l, (f+b)/2)\n\t\thint = 0\n\t\tcross, out = false, false\n\tdefault:\n\t\tprint(\"Error: reading\", sensorRead)\n\t}\n\n\treturn\n}",
"func clampDeltaTime(in shared.MDuration) shared.MDuration {\n\tmaxDT := shared.MDuration{shared.MAX_DT}\n\n\tif in.Milliseconds() < 0 || in.Milliseconds() > maxDT.Milliseconds() {\n\t\treturn maxDT\n\t}\n\n\treturn in\n}",
"func (s *Stream) willRead(n uint64) error {\n\ts.kind = -1 // rearm / re-initialize Kind\n\tif len(s.stack) > 0 {\n\t\ttos := s.stack[len(s.stack)-1]\n\t\t// read size cannot greater than the size of the list\n\t\tif n > tos.size-tos.pos {\n\t\t\treturn ErrElemTooLarge\n\t\t}\n\t\t// change the list position\n\t\ts.stack[len(s.stack)-1].pos += n\n\t}\n\tif s.limited {\n\n\t\tif n > s.remaining {\n\t\t\treturn ErrValueTooLarge\n\t\t}\n\t\ts.remaining -= n\n\t}\n\treturn nil\n}",
"func funcClampMax(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {\n\tvec := vals[0].(Vector)\n\tmax := vals[1].(Vector)[0].F\n\tfor _, el := range vec {\n\t\tenh.Out = append(enh.Out, Sample{\n\t\t\tMetric: enh.DropMetricName(el.Metric),\n\t\t\tF: math.Min(max, el.F),\n\t\t})\n\t}\n\treturn enh.Out\n}",
"func (r *readerWithStats) MustReadFull(data []byte) {\n\tfs.MustReadData(r.r, data)\n\tr.bytesRead += uint64(len(data))\n}",
"func (p *Plane3D) isBounded() bool {\n\n\treturn false\n}",
"func TestPartitionReader__Range(t *testing.T) {\n\tengine, _ := open(nil)\n\tpart, _ := initPartition(\"test.partition\", engine)\n\n\tb := make([]byte, 100)\n\n\t// Emulate different time points.\n\tpart.Write(1, b)\n\tpart.Write(3, b)\n\tpart.Write(5, b)\n\n\tvar (\n\t\tn int\n\t\terr error\n\t\tr *partitionReader\n\t)\n\n\tbuf := make([]byte, 1000)\n\n\t// Upper bound\n\tr = part.Reader(0, 2)\n\n\tassert.Equal(t, r.index, 0)\n\tassert.Equal(t, r.stop, 1)\n\n\tn, err = r.Read(buf)\n\n\tassert.Equal(t, 100, n)\n\tassert.Equal(t, io.EOF, err)\n\n\t// Lower bound\n\tr = part.Reader(2, 0)\n\n\tassert.Equal(t, r.index, 1)\n\tassert.Equal(t, r.stop, 3)\n\n\tn, err = r.Read(buf)\n\n\tassert.Equal(t, 200, n)\n\tassert.Equal(t, io.EOF, err)\n\n\t// Slice\n\tr = part.Reader(2, 4)\n\n\tassert.Equal(t, r.index, 1)\n\tassert.Equal(t, r.stop, 2)\n\n\tn, err = r.Read(buf)\n\n\tassert.Equal(t, 100, n)\n\tassert.Equal(t, io.EOF, err)\n\n\t// Out of range\n\tr = part.Reader(6, 0)\n\n\tn, err = r.Read(buf)\n\n\tassert.Equal(t, 0, n)\n\tassert.Equal(t, io.EOF, err)\n}",
"func (r *Reader) setRate(lim rate.Limit) bool {\n\tmin := rate.Every(r.starvationLatency)\n\tmax := r.globalLimit.Limit()\n\n\tif lim < min {\n\t\tlim = min\n\t} else if lim > max {\n\t\tlim = max\n\t}\n\n\tprev := r.adaptiveLimit.Limit()\n\n\tif lim != prev {\n\t\tr.adaptiveLimit.SetLimit(lim)\n\t\treturn true\n\t}\n\n\treturn false\n}",
"func (s *ClampDirectionOffset) Min() float64 {\n\treturn s.min\n}",
"func constrain(input, min, max float64) float32 {\n\tvar value float32\n\tvalue = float32(math.Min(max, math.Max(min, input)))\n\treturn value\n}",
"func (r *Reader) Limit(lch chan int) <-chan bool {\n\tdone := make(chan bool, 1)\n\tready := make(chan struct{})\n\tr.newLimit <- &limit{\n\t\tlim: lch,\n\t\tdone: done,\n\t\tready: ready,\n\t}\n\t<-ready\n\treturn done\n}",
"func ReadNormal(r Reader) float32 {\n\t// sign bit\n\tif ReadBool(r) {\n\t\treturn float32(r.ReadBits(11)) * normal_divisor\n\t} else {\n\t\treturn -float32(r.ReadBits(11)) * normal_divisor\n\t}\n}",
"func (c Conn) LimitedRead(b []byte) (int, error) {\n\tr := io.LimitReader(c.Conn, c.maxReadBuffer)\n\treturn r.Read(b)\n}",
"func (r *Reader) adjustRate() bool {\n\tlatency := r.effectiveLatency()\n\n\t// headroom is the difference between the acceptable latency and the\n\t// effective latency. If the headroom is positive, we're doing 'better' than\n\t// the acceptable latency and can backoff the poll rate.\n\theadroom := r.acceptableLatency - latency\n\n\t// don't back off if our headroom is less than 25%\n\t// if headroom > 0 && headroom < r.acceptableLatency/25 {\n\t// \treturn false\n\t// }\n\n\t// Get the current rate in terms of an interval.\n\tcurrentInterval := metrics.RateToDuration(\n\t\tr.adaptiveLimit.Limit(),\n\t)\n\n\treturn r.setRate(\n\t\trate.Every(currentInterval + headroom),\n\t)\n}",
"func isReadLimitReached(bytesLoaded int64, linesLoaded int64, logFilePosition string) bool {\n\treturn (logFilePosition == logs.Beginning && bytesLoaded >= byteReadLimit) ||\n\t\t(logFilePosition == logs.End && linesLoaded >= lineReadLimit)\n}",
"func checkReadQuality(read *sam.Record) bool {\n\tif int(read.MapQ) < MinMapQuality || read.Len() < MinReadLength {\n\t\treturn false\n\t}\n\n\t//\t\tfor _, cigar := range read.Cigar {\n\t//\t\t\tif cigar.Type() != sam.CigarMatch && cigar.Type() != sam.CigarSoftClipped {\n\t//\t\t\t\treturn false\n\t//\t\t\t}\n\t//\t\t}\n\t\n return true\n}",
"func (f *inFlow) onRead(n uint32) (swu, cwu uint32) {\n\tif n == 0 {\n\t\treturn\n\t}\n\tf.lock.Lock()\n\tdefer f.lock.Unlock()\n\tif f.pendingData == 0 {\n\t\t// pendingData has been adjusted by restoreConn.\n\t\treturn\n\t}\n\tf.pendingData -= n\n\tf.pendingUpdate += n\n\tif f.pendingUpdate >= f.limit/4 {\n\t\tswu = f.pendingUpdate\n\t\tf.pendingUpdate = 0\n\t}\n\tcwu = f.connInFlow.connOnRead(n)\n\treturn\n}",
"func (self *IoRate) TakeMax(other *IoRate) {\n\tself.Read.TakeMax(other.Read)\n\tself.Write.TakeMax(other.Write)\n}",
"func ClipRange(in Res, min, max anyvec.Numeric) Res {\n\treturn Pool(in, func(in Res) Res {\n\t\thighEnough, lowEnough := in.Output().Copy(), in.Output().Copy()\n\t\tanyvec.GreaterThan(highEnough, min)\n\t\tanyvec.LessThan(lowEnough, max)\n\n\t\tmidRange := highEnough.Copy()\n\t\tmidRange.Mul(lowEnough)\n\t\tmiddlePart := Mul(in, NewConst(midRange))\n\n\t\tanyvec.Complement(lowEnough)\n\t\tlowEnough.Scale(max)\n\t\tanyvec.Complement(highEnough)\n\t\thighEnough.Scale(min)\n\t\treturn Add(\n\t\t\tmiddlePart,\n\t\t\tAdd(\n\t\t\t\tNewConst(lowEnough),\n\t\t\t\tNewConst(highEnough),\n\t\t\t),\n\t\t)\n\t})\n}",
"func checkValueInRange(valueType string, reading interface{}) bool {\n\tisValid := false\n\n\tif valueType == common.ValueTypeString || valueType == common.ValueTypeBool || valueType == common.ValueTypeObject {\n\t\treturn true\n\t}\n\n\tif valueType == common.ValueTypeInt8 || valueType == common.ValueTypeInt16 ||\n\t\tvalueType == common.ValueTypeInt32 || valueType == common.ValueTypeInt64 {\n\t\tval := cast.ToInt64(reading)\n\t\tisValid = checkIntValueRange(valueType, val)\n\t}\n\n\tif valueType == common.ValueTypeUint8 || valueType == common.ValueTypeUint16 ||\n\t\tvalueType == common.ValueTypeUint32 || valueType == common.ValueTypeUint64 {\n\t\tval := cast.ToUint64(reading)\n\t\tisValid = checkUintValueRange(valueType, val)\n\t}\n\n\tif valueType == common.ValueTypeFloat32 || valueType == common.ValueTypeFloat64 {\n\t\tval := cast.ToFloat64(reading)\n\t\tisValid = checkFloatValueRange(valueType, val)\n\t}\n\n\treturn isValid\n}",
"func (_c *calibrationData) Adjust(rawStick [2]uint16) [2]int16 {\n\tc := _c\n\tif c == nil {\n\t\tc = &fakeCalibrationData\n\t} else if (c.xMinOff == 0) || (c.xMaxOff == 0) || (c.yMinOff == 0) || (c.yMaxOff == 0) {\n\t\tc = &fakeCalibrationData\n\t} else if (c.xCenter == 0xFFF) || (c.yCenter == 0xFFF) {\n\t\tc = &fakeCalibrationData\n\t}\n\n\tvar out [2]int16\n\t// careful - need to upcast to int before multiplying\n\t// 1. convert to signed\n\t// 2. subtract center value\n\t// 3. widen to int (!)\n\t// 4. multiply by desiredRange\n\t// 5. divide by range-from-center\n\tif rawStick[0] < c.xCenter {\n\t\tout[0] = int16(int((int16(rawStick[0]) - int16(c.xCenter))) * desiredRange / int(c.xMinOff))\n\t} else {\n\t\tout[0] = int16(int((int16(rawStick[0]) - int16(c.xCenter))) * desiredRange / int(c.xMaxOff))\n\t}\n\tif rawStick[1] < c.yCenter {\n\t\tout[1] = int16(int((int16(rawStick[1]) - int16(c.yCenter))) * desiredRange / int(c.yMinOff))\n\t} else {\n\t\tout[1] = int16(int((int16(rawStick[1]) - int16(c.yCenter))) * desiredRange / int(c.yMaxOff))\n\t}\n\n\t// 6. clamp\n\tif out[0] > desiredRange || out[0] < -desiredRange || out[1] > desiredRange || out[1] < -desiredRange {\n\t\tvar modX, modY float64 = float64(out[0]), float64(out[1])\n\t\tif modX > desiredRange || modX < -desiredRange {\n\t\t\t// overFactor is slightly over 1 or slightly under -1\n\t\t\toverFactor := modX / desiredRange\n\t\t\toverFactor = math.Copysign(overFactor, 1.0)\n\t\t\tmodX /= overFactor\n\t\t\tmodY /= overFactor\n\t\t}\n\t\tif modY > desiredRange || modY < -desiredRange {\n\t\t\t// overFactor is slightly over 1 or slightly under -1\n\t\t\toverFactor := modY / desiredRange\n\t\t\toverFactor = math.Copysign(overFactor, 1.0)\n\t\t\tmodX /= overFactor\n\t\t\tmodY /= overFactor\n\t\t}\n\t\t// clamp again in case of fraction weirdness\n\t\tif modX > desiredRange {\n\t\t\tmodX = desiredRange\n\t\t}\n\t\tif modX < -desiredRange {\n\t\t\tmodX = -desiredRange\n\t\t}\n\t\tif modY > desiredRange {\n\t\t\tmodY = desiredRange\n\t\t}\n\t\tif modY < -desiredRange {\n\t\t\tmodY = -desiredRange\n\t\t}\n\t\tout[0], out[1] = int16(modX), int16(modY)\n\t}\n\n\treturn out\n}",
"func all_on (inp uint32, filter uint32) int8 {\n inp &= filter;\n\n if (inp == filter) {\n return 1;\n }\n\n if (inp == 0) {\n return -1;\n }\n\n return 0;\n}",
"func fillBuffer(stream io.Reader, buffer []byte) bool {\n n, _ := io.ReadFull(stream, buffer)\n if n < len(buffer) {\n return false\n }\n return true\n}",
"func (alr *adjustableLimitedReader) setLimit(n int64) {\n\talr.R.N = n\n}",
"func (s *ScalableTargetAdapter) CanRampTo(desiredPercent uint32) bool {\n\ttarget := s.Incarnation.target()\n\tstatus := s.Incarnation.status\n\tcontroller := s.Incarnation.controller\n\tlog := s.Incarnation.getLog()\n\n\tif target == nil || target.Scale.Min == nil {\n\t\treturn false\n\t}\n\n\ttotalReplicas := float64(controller.expectedTotalReplicas(*target.Scale.Min, int32(desiredPercent)))\n\texpectedReplicas := int32(math.Ceil(totalReplicas * Threshold))\n\n\tlog.Info(\n\t\t\"Computed expectedReplicas\",\n\t\t\"desiredPercent\", desiredPercent,\n\t\t\"expectedReplicas\", expectedReplicas,\n\t\t\"currentReplicas\", status.Scale.Current,\n\t)\n\tif status.Scale.Current < expectedReplicas {\n\t\tlog.Info(\n\t\t\t\"Deferring ramp-up; not all desired replicas are ready\",\n\t\t\t\"desiredPercent\", desiredPercent,\n\t\t\t\"expectedReplicas\", expectedReplicas,\n\t\t\t\"currentReplicas\", status.Scale.Current,\n\t\t)\n\t\treturn false\n\t}\n\n\treturn true\n}",
"func ContinuousReadCHK(connection spi.Conn, drdy gpio.PinIO) (int32, error) {\n\n\tif drdy.WaitForEdge(-1) {\n\n\t\tif err := connection.Tx(empty, conversionbytes); err != nil {\n\t\t\treturn 0, errors.New(\"SPI connection failed\")\n\t\t} else if conversionbytes[5] != (conversionbytes[1]+conversionbytes[2]+conversionbytes[3]+conversionbytes[4]+0x9B)&255 {\n\t\t\treturn 0, errors.New(\"Checksum Failed - data transmission error occurred\")\n\t\t} else {\n\t\t\trawdata := int(conversionbytes[1])<<24 | int(conversionbytes[2])<<16 | int(conversionbytes[3])<<8 | int(conversionbytes[4])\n\t\t\ttobeconverted := int32(rawdata)\n\t\t\treturn tobeconverted, nil\n\t\t}\n\t}\n\treturn 0, errors.New(\"Pin timeout\")\n}",
"func (f *Feedback) adjustFeedbackBoundaries(sc *stmtctx.StatementContext, min, max *types.Datum) (bool, error) {\n\tresult, err := outOfRange(sc, min, max, f.Lower)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\tif result > 0 {\n\t\treturn true, nil\n\t}\n\tif result < 0 {\n\t\tf.Lower = min\n\t}\n\tresult, err = outOfRange(sc, min, max, f.Upper)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\tif result < 0 {\n\t\treturn true, nil\n\t}\n\tif result > 0 {\n\t\tf.Upper = max\n\t}\n\treturn false, nil\n}",
"func IntClamp(x, min, max int) int {\n\tswitch {\n\tcase x < min:\n\t\treturn min\n\tcase x > max:\n\t\treturn max\n\tdefault:\n\t\treturn x\n\t}\n}",
"func Clamp(value, min, max float32) float32 {\n\tvar res float32\n\tif value < min {\n\t\tres = min\n\t} else {\n\t\tres = value\n\t}\n\n\tif res > max {\n\t\treturn max\n\t}\n\n\treturn res\n}",
"func (cb *Buffer) Omit(n int) error {\n\tif n < 1 {\n\t\treturn fmt.Errorf(\"Positive number required\")\n\t}\n\n\tif cb.ReadAvailability() < n {\n\t\treturn fmt.Errorf(\"Not enough unread data\")\n\t}\n\n\tif cb.rpos <= n {\n\t\tcb.rpos = len(cb.buffer) - n + cb.rpos\n\t} else {\n\t\tcb.rpos -= n\n\t}\n\n\tcb.full = false\n\treturn nil\n}",
"func (r *lockedSource) read(p []byte, readVal *int64, readPos *int8) (n int, err error) {\n\tr.lk.Lock()\n\tn, err = read(p, r.src.Int63, readVal, readPos)\n\tr.lk.Unlock()\n\treturn\n}",
"func clampToUint16(x float32) (y uint16) {\n\ty = uint16(x)\n\tif x < 0 {\n\t\ty = 0\n\t} else if x > float32(0xfffe) {\n\t\t// \"else if x > float32(0xffff)\" will cause overflows!\n\t\ty = 0xffff\n\t}\n\treturn\n}",
"func Clamp(val, low, high float64) float64 {\n\tif val < low {\n\t\treturn low\n\t}\n\n\tif val > high {\n\t\treturn high\n\t}\n\n\treturn val\n}",
"func lazyCopy(dst io.Writer, src io.Reader, hideFuncs ...func() bool) error {\n\n\tbuffer := make([]byte, 4096)\n\n\tbackOffDelay := BackOffReadInitialSleepDuration\n\n\tvar hide bool\n\n\tfor {\n\t\tsize, err := src.Read(buffer)\n\t\tif size > 0 {\n\t\t\thide = false\n\t\t\tfor _, f := range hideFuncs {\n\t\t\t\tif f() {\n\t\t\t\t\thide = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t\tif !hide {\n\t\t\t\tif _, err := dst.Write(buffer[:size]); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t\tbackOffDelay = BackOffReadInitialSleepDuration\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif size == 0 {\n\t\t\t// if there was no data to read, wait a little while before trying again\n\t\t\ttime.Sleep(backOffDelay)\n\t\t\tbackOffDelay = backOffDelay * 2\n\t\t\tif backOffDelay > BackOffReadMaxSleepDuration {\n\t\t\t\tbackOffDelay = BackOffReadMaxSleepDuration\n\t\t\t}\n\t\t}\n\t}\n}",
"func (q *queue) read(ctx context.Context, dst usermem.IOSequence, l *lineDiscipline) (int64, bool, error) {\n\tq.mu.Lock()\n\tdefer q.mu.Unlock()\n\tif !q.readable {\n\t\treturn 0, false, syserror.ErrWouldBlock\n\t}\n\n\t// Read out from the read buffer.\n\tn := canonMaxBytes\n\tif n > int(dst.NumBytes()) {\n\t\tn = int(dst.NumBytes())\n\t}\n\tif n > q.readBuf.Len() {\n\t\tn = q.readBuf.Len()\n\t}\n\tn, err := dst.Writer(ctx).Write(q.readBuf.Bytes()[:n])\n\tif err != nil {\n\t\treturn 0, false, err\n\t}\n\t// Discard bytes read out.\n\tq.readBuf.Next(n)\n\n\t// If we read everything, this queue is no longer readable.\n\tif q.readBuf.Len() == 0 {\n\t\tq.readable = false\n\t}\n\n\t// Move data from the queue's wait buffer to its read buffer.\n\tnPushed := q.pushWaitBufLocked(l)\n\n\treturn int64(n), nPushed > 0, nil\n}",
"func (e *T) readAvailable(dst []byte) (n int, err error) {\n\tn = copy(dst, e.buf[e.ridx:e.widx])\n\ti := e.ridx + n\n\tif e.secure {\n\t\tclearbytes(e.buf[e.ridx:i])\n\t}\n\te.ridx = i\n\tif e.ridx >= e.widx {\n\t\te.ridx = 0\n\t\te.widx = 0\n\t\te.available = false\n\t}\n\treturn n, nil\n}",
"func (f *inFlow) onData(n uint32) error {\n\tif n == 0 {\n\t\treturn nil\n\t}\n\tf.lock.Lock()\n\tdefer f.lock.Unlock()\n\tif f.pendingData+f.pendingUpdate+n > f.limit {\n\t\treturn fmt.Errorf(\"recieved %d-bytes data exceeding the limit %d bytes\", f.pendingData+f.pendingUpdate+n, f.limit)\n\t}\n\tif f.connInFlow != nil {\n\t\terr := f.connInFlow.onData(n)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tf.pendingData += n\n\treturn nil\n}",
"func (filter *Saturation) IsScalable() {\n}",
"func skipData(reader *bytes.Reader, numBytes uint64) error {\n\tmax := uint64(math.MaxInt32)\n\n\t/*\n\t * Check if we can seek this far.\n\t */\n\tif numBytes > max {\n\t\treturn fmt.Errorf(\"Cannot skip more than %d bytes.\", max)\n\t} else {\n\t\tsignedBytes := int64(numBytes)\n\t\tmode := io.SeekCurrent\n\t\treader.Seek(signedBytes, mode)\n\t\treturn nil\n\t}\n\n}",
"func (_this *StreamingReadBuffer) RefillIfNecessary(startOffset, position int) (positionOffset int) {\n\tif !_this.isEOF && _this.unreadByteCount(position) < _this.minFreeBytes {\n\t\treturn _this.Refill(startOffset)\n\t}\n\treturn 0\n}",
"func (img *FloatImage) ConvolveClamp(kernel *ConvKernel) *FloatImage {\n\treturn img.convolve(kernel, clampPlaneExtension)\n}",
"func (r reader) availableData() bool {\n\treturn r.at < r.sb.start+len(r.sb.buf)\n}",
"func discardInput(r io.Reader, n uint32) {\n\tmaxSize := uint32(10 * 1024) // 10k at a time\n\tnumReads := n / maxSize\n\tbytesRemaining := n % maxSize\n\tif n > 0 {\n\t\tbuf := make([]byte, maxSize)\n\t\tfor i := uint32(0); i < numReads; i++ {\n\t\t\tio.ReadFull(r, buf)\n\t\t}\n\t}\n\tif bytesRemaining > 0 {\n\t\tbuf := make([]byte, bytesRemaining)\n\t\tio.ReadFull(r, buf)\n\t}\n}",
"func discardInput(r io.Reader, n uint32) {\n\tmaxSize := uint32(10 * 1024) // 10k at a time\n\tnumReads := n / maxSize\n\tbytesRemaining := n % maxSize\n\tif n > 0 {\n\t\tbuf := make([]byte, maxSize)\n\t\tfor i := uint32(0); i < numReads; i++ {\n\t\t\tio.ReadFull(r, buf)\n\t\t}\n\t}\n\tif bytesRemaining > 0 {\n\t\tbuf := make([]byte, bytesRemaining)\n\t\tio.ReadFull(r, buf)\n\t}\n}",
"func (af *filtBase) checkFloatParam(p, low, high float64, name string) (float64, error) {\n\tif low <= p && p <= high {\n\t\treturn p, nil\n\t} else {\n\t\terr := fmt.Errorf(\"parameter %v is not in range <%v, %v>\", name, low, high)\n\t\treturn 0, err\n\t}\n}",
"func (s *Stream) readData(hdr header, flags uint16, conn io.Reader) error {\n\tif err := s.processFlags(flags); err != nil {\n\t\treturn err\n\t}\n\n\t// Check that our recv window is not exceeded\n\tlength := hdr.Length()\n\tif length == 0 {\n\t\treturn nil\n\t}\n\n\t// Wrap in a limited reader\n\tconn = &io.LimitedReader{R: conn, N: int64(length)}\n\n\t// Copy into buffer\n\ts.recvLock.Lock()\n\n\tif length > s.recvWindow {\n\t\ts.session.logger.Printf(\"[ERR] yamux: receive window exceeded (stream: %d, remain: %d, recv: %d)\", s.id, s.recvWindow, length)\n\t\treturn ErrRecvWindowExceeded\n\t}\n\n\tif s.recvBuf == nil {\n\t\t// Allocate the receive buffer just-in-time to fit the full data frame.\n\t\t// This way we can read in the whole packet without further allocations.\n\t\ts.recvBuf = bytes.NewBuffer(make([]byte, 0, length))\n\t}\n\tif _, err := io.Copy(s.recvBuf, conn); err != nil {\n\t\ts.session.logger.Printf(\"[ERR] yamux: Failed to read stream data: %v\", err)\n\t\ts.recvLock.Unlock()\n\t\treturn err\n\t}\n\n\t// Decrement the receive window\n\ts.recvWindow -= length\n\ts.recvLock.Unlock()\n\n\t// Unblock any readers\n\tasyncNotify(s.recvNotifyCh)\n\treturn nil\n}",
"func (s *Streaming) UncorrectedValue() float64 {\n\treturn s.c * s.normx * s.normy / s.n\n}",
"func isSilent(audio []int16) bool {\n\tvar max int16 = audio[0]\n\tfor _, value := range audio {\n\t\tif value > max {\n\t\t\tmax = value\n\t\t}\n\t}\n\treturn max < SilentThresh\n}",
"func (x *fastReflection_SendEnabled) Range(f func(protoreflect.FieldDescriptor, protoreflect.Value) bool) {\n\tif x.Denom != \"\" {\n\t\tvalue := protoreflect.ValueOfString(x.Denom)\n\t\tif !f(fd_SendEnabled_denom, value) {\n\t\t\treturn\n\t\t}\n\t}\n\tif x.Enabled != false {\n\t\tvalue := protoreflect.ValueOfBool(x.Enabled)\n\t\tif !f(fd_SendEnabled_enabled, value) {\n\t\t\treturn\n\t\t}\n\t}\n}",
"func (d Download) IsRangeable() bool {\n\treturn d.info.Rangeable\n}",
"func (s *Set) await(idx int, bof, eof int64) bool {\n\tif s.maxOffsets[idx][0] < 0 || (s.maxOffsets[idx][0] > 0 && int64(s.maxOffsets[idx][0]) >= bof) {\n\t\treturn true\n\t}\n\tif s.maxOffsets[idx][1] < 0 || (s.maxOffsets[idx][1] > 0 && int64(s.maxOffsets[idx][1]) >= eof) {\n\t\treturn true\n\t}\n\treturn false\n}",
"func (a *ArrayDataSlab) IsUnderflow() (uint32, bool) {\n\tif uint32(minThreshold) > a.header.size {\n\t\treturn uint32(minThreshold) - a.header.size, true\n\t}\n\treturn 0, false\n}",
"func (this *FeedableBuffer) Minimize() {\n\tthis.Data = this.Data[:this.minByteCount]\n}",
"func MustReadFloat(r io.Reader) float64 {\n\tvar res float64\n\t_, err := fmt.Fscanf(r, \"%f\", &res)\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"Failed to read float: %v\", err))\n\t}\n\treturn res\n}",
"func passLoadIfFlow(flow, inputLoad data.ND1Float64,\n\tscalingFactor float64,\n\toutputLoad data.ND1Float64) {\n\n\tif scalingFactor == 0.0 {\n\t\treturn\n\t}\n\n\tn := flow.Len1()\n\tidx := []int{0}\n\n\tfor day := 0; day < n; day++ {\n\t\tidx[0] = day\n\t\tf := flow.Get(idx)\n\t\tl := inputLoad.Get(idx)\n\n\t\tif f > EFFECTIVELY_ZERO {\n\t\t\toutputLoad.Set(idx, l*scalingFactor)\n\t\t} else {\n\t\t\toutputLoad.Set(idx, 0.0)\n\t\t}\n\t}\n}",
"func (a *ArrayDataSlab) CanLendToLeft(size uint32) bool {\n\tif len(a.elements) == 0 {\n\t\t// TODO return EmptyDataSlabError\n\t\tpanic(fmt.Sprintf(\"empty data slab %d\", a.header.id))\n\t}\n\tif len(a.elements) < 2 {\n\t\treturn false\n\t}\n\tif a.header.size-size < uint32(minThreshold) {\n\t\treturn false\n\t}\n\tlendSize := uint32(0)\n\tfor i := 0; i < len(a.elements); i++ {\n\t\tlendSize += a.elements[i].ByteSize()\n\t\tif a.header.size-lendSize < uint32(minThreshold) {\n\t\t\treturn false\n\t\t}\n\t\tif lendSize >= size {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}",
"func (v *Vec3i) Clamp(min, max Vec3i) {\n\tif v.X < min.X {\n\t\tv.X = min.X\n\t} else if v.X > max.X {\n\t\tv.X = max.X\n\t}\n\tif v.Y < min.Y {\n\t\tv.Y = min.Y\n\t} else if v.Y > max.Y {\n\t\tv.Y = max.Y\n\t}\n\tif v.Z < min.Z {\n\t\tv.Z = min.Z\n\t} else if v.Z > max.Z {\n\t\tv.Z = max.Z\n\t}\n}",
"func Trim(read bioutil.Read, mean int, window int, minLength int) (cutoff int, originalMean float32, finalMean float32) {\n\tif window > minLength {\n\t\tpanic(\"QTrim: window must be <= minimum read length\")\n\t}\n\n\tline := read.Quality()\n\tlength := read.Length()\n\n\ttotal := 0\n\twindowTotal := 0\n\tfor i, b := range line {\n\t\tquality := Score(b)\n\t\ttotal += quality\n\t\tif i > length-window {\n\t\t\twindowTotal += quality\n\t\t}\n\t}\n\n\toriginalMean = float32(total) / float32(length)\n\n\twindowCompare := mean * window\n\tcutoff = length\n\tif cutoff <= minLength || cutoff <= window { // Discard this sequence\n\t\treturn 0, originalMean, 0\n\t}\n\tfor !(total >= mean*cutoff && windowTotal >= windowCompare) {\n\t\tif cutoff <= minLength || cutoff <= window { // Discard this sequence\n\t\t\treturn 0, originalMean, 0\n\t\t}\n\t\tcutoff--\n\t\ttrimmedScore := Score(line[cutoff])\n\t\ttotal -= trimmedScore\n\t\twindowTotal = windowTotal - trimmedScore + Score(line[cutoff-window])\n\t}\n\tfor Score(line[cutoff-1]) < mean {\n\t\tif cutoff <= minLength || cutoff <= window { // Discard this sequence\n\t\t\treturn 0, originalMean, 0\n\t\t}\n\t\tcutoff--\n\t}\n\tfinalMean = float32(total) / float32(cutoff)\n\treturn\n}",
"func (b *logEventBuffer) normalRange(start, end int) (int, int) {\n\tif end < start || end == 0 {\n\t\t// invalid range\n\t\treturn -1, -1\n\t}\n\tsize := b.bufferSize()\n\tif start == 0 {\n\t\t// we reduce start by 1 to make it easier to calculate the index,\n\t\t// but we need to ensure we don't go below 0.\n\t\tstart++\n\t}\n\tif start == end {\n\t\t// ensure we have at least one block in range\n\t\tend++\n\t}\n\tif end-start > size {\n\t\t// ensure we don't have more than the buffer size\n\t\tstart = (end - size) + 1\n\t}\n\tstart = (start - 1) % size\n\tend = end % size\n\n\treturn start, end\n}",
"func (s *ClampDirectionOffset) Max() float64 {\n\treturn s.max\n}",
"func CTCLossIgnoreLongerOutputsThanInputs(value bool) CTCLossAttr {\n\treturn func(m optionalAttr) {\n\t\tm[\"ignore_longer_outputs_than_inputs\"] = value\n\t}\n}",
"func clampPlaneExtension(index, limit int) int {\n\tif index >= limit {\n\t\tindex = limit - 1\n\t} else if index < 0 {\n\t\tindex = 0\n\t}\n\treturn index\n}",
"func ReadBool(buffer []byte, offset int) bool {\n return buffer[offset] != 0\n}",
"func (r *Reader) Unlimit() {\n\tr.newLimit <- nil\n}",
"func (q *Question) _baseReadFloat(prompt string, defaultAnswer float64, def hasDefault) (answer float64, err error) {\n\tline := q.getLine(\n\t\tprompt,\n\t\tstrconv.FormatFloat(defaultAnswer, QuestionFloatFmt, QuestionFloatPrec, 64),\n\t\tdef,\n\t)\n\n\tfor {\n\t\tinput, err := line.Read()\n\t\tif err != nil {\n\t\t\treturn 0.0, err\n\t\t}\n\t\tif input == \"\" && def != _DEFAULT_NO {\n\t\t\treturn defaultAnswer, nil\n\t\t}\n\n\t\tanswer, err = strconv.ParseFloat(input, 64)\n\t\tif err != nil {\n\t\t\tfmt.Fprintf(output, \"%s%q: the value has to be a float\\r\\n\",\n\t\t\t\tQuestionErrPrefix, input)\n\t\t\tcontinue\n\t\t} else {\n\t\t\treturn answer, nil\n\t\t}\n\t}\n\treturn\n}",
"func CheckBoundsInt64IgnoreOverflows(lower, upper int64) error {\n\tif lower > upper {\n\t\treturn fmt.Errorf(\"Upper bound (%d) must be larger than lower bound (%d)\", upper, lower)\n\t}\n\tif lower == upper {\n\t\tlog.Warningf(\"Lower bound is equal to upper bound: all added elements will be clamped to %d\", upper)\n\t}\n\treturn nil\n}",
"func (self *IoRateValues) TakeMax(other IoRateValues) {\n\tif self.Syscalls < other.Syscalls {\n\t\tself.Syscalls = other.Syscalls\n\t}\n\tif self.BlockBytes < other.BlockBytes {\n\t\tself.BlockBytes = other.BlockBytes\n\t}\n}",
"func CheckGetRaw(raw *Raw, fileLength int64) error {\n\t// if raw.Length < 0 ,read All data\n\tif raw.Offset < 0 {\n\t\treturn errors.Wrapf(cdnerrors.ErrInvalidValue, \"the offset: %d is a negative integer\", raw.Offset)\n\t}\n\tif raw.Length < 0 {\n\t\treturn errors.Wrapf(cdnerrors.ErrInvalidValue, \"the length: %d is a negative integer\", raw.Length)\n\t}\n\tif fileLength < raw.Offset {\n\t\treturn errors.Wrapf(cdnerrors.ErrInvalidValue, \"the offset: %d is lager than the file length: %d\", raw.Offset, fileLength)\n\t}\n\n\tif fileLength < (raw.Offset + raw.Length) {\n\t\treturn errors.Wrapf(cdnerrors.ErrInvalidValue, \"the offset: %d and length: %d is lager than the file length: %d\", raw.Offset, raw.Length, fileLength)\n\t}\n\treturn nil\n}",
"func CheckTrunc(raw *Raw) error {\n\tif raw.Trunc && raw.TruncSize < 0 {\n\t\treturn errors.Wrapf(cdnerrors.ErrInvalidValue, \"the truncSize: %d is a negative integer\", raw.Length)\n\t}\n\treturn nil\n}",
"func (i *Number) Clamp(lower, upper Number) *Number {\n\tif i.value < lower.value {\n\t\treturn &lower\n\t}\n\tif i.value > upper.value {\n\t\treturn &upper\n\t}\n\treturn i\n}",
"func DataUnfit() bool {\n\n\treturn unfit\n\n}",
"func filterAbs(ctx stick.Context, val stick.Value, args ...stick.Value) stick.Value {\n\tn := stick.CoerceNumber(val)\n\tif 0 == n {\n\t\treturn n\n\t}\n\treturn math.Abs(n)\n}",
"func DiscardByRead(source Source, delta int64) error {\n\tbuf := make([]byte, 4096)\n\tfor delta > 0 {\n\t\ttoRead := delta\n\t\tif toRead > int64(len(buf)) {\n\t\t\ttoRead = int64(len(buf))\n\t\t}\n\n\t\tn, err := source.Read(buf[:toRead])\n\t\tif err != nil {\n\t\t\treturn errors.WithStack(err)\n\t\t}\n\n\t\tdelta -= int64(n)\n\t}\n\n\treturn nil\n}",
"func (r *Reader) SimpleLimit(n int, t time.Duration) <-chan bool {\n\tdone := make(chan bool, 1)\n\tready := make(chan struct{})\n\tr.newLimit <- &limit{\n\t\trate: rate{n, t},\n\t\tdone: done,\n\t\tready: ready,\n\t}\n\t<-ready\n\treturn done\n}",
"func checkReader(t *testing.T, r zbuf.Reader, checkReads bool) {\n\tfor expect := 3; expect <= 6; expect++ {\n\t\trec, err := r.Read()\n\t\trequire.NoError(t, err)\n\n\t\tv, err := rec.AccessInt(\"value\")\n\t\trequire.NoError(t, err)\n\n\t\trequire.Equal(t, int64(expect), v, \"Got expected record value\")\n\t}\n\n\trec, err := r.Read()\n\trequire.NoError(t, err)\n\trequire.Nil(t, rec, \"Reached eof after last record in time span\")\n\n\tif checkReads {\n\t\trr, ok := r.(*rangeReader)\n\t\trequire.True(t, ok, \"Can get read stats from index reader\")\n\t\trequire.LessOrEqual(t, rr.reads(), uint64(6), \"Indexed reader did not read the entire file\")\n\t}\n}",
"func (a *ArrayDataSlab) CanLendToRight(size uint32) bool {\n\tif len(a.elements) == 0 {\n\t\t// TODO return EmptyDataSlabError\n\t\tpanic(fmt.Sprintf(\"empty data slab %d\", a.header.id))\n\t}\n\tif len(a.elements) < 2 {\n\t\treturn false\n\t}\n\tif a.header.size-size < uint32(minThreshold) {\n\t\treturn false\n\t}\n\tlendSize := uint32(0)\n\tfor i := len(a.elements) - 1; i >= 0; i-- {\n\t\tlendSize += a.elements[i].ByteSize()\n\t\tif a.header.size-lendSize < uint32(minThreshold) {\n\t\t\treturn false\n\t\t}\n\t\tif lendSize >= size {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}",
"func (d *Decoder) ensure1(n int) bool {\n\tfor {\n\t\tif d.complete {\n\t\t\t// No possibility of more data.\n\t\t\treturn false\n\t\t}\n\t\td.readMore()\n\t\tif d.r1+n <= len(d.buf) {\n\t\t\t// There are enough bytes available.\n\t\t\treturn true\n\t\t}\n\t}\n}",
"func readDataOnly(ctx context.Context, conn *client.Conn) (uint64, time.Duration, error) {\n\tvar (\n\t\tbuf = make([]byte, 10240)\n\t\tbyteCount atomic.Uint64\n\t\tstartTime = time.Now()\n\t)\n\tfor {\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\treturn byteCount.Load(), time.Since(startTime), nil\n\t\tdefault:\n\t\t}\n\n\t\tn, err := conn.Conn.Conn.Read(buf)\n\t\tif err != nil {\n\t\t\treturn byteCount.Load(), time.Since(startTime), errors.Annotatef(err, \"read binary data\")\n\t\t}\n\t\tbyteCount.Add(uint64(n))\n\t}\n}",
"func ImplementsReaderAtOpts(t *testing.T, reader io.ReaderAt, length int64, opts ReaderAtOpts) bool {\n\tvar buf = make([]byte, opts.BufferSize)\n\tvar err error\n\tvar n int64\n\n\tif !noopRead(t, toReader(reader, 0)) {\n\t\treturn false\n\t}\n\n\tfor err == nil {\n\t\tvar a int\n\t\ta, err = reader.ReadAt(buf, n)\n\t\tn += int64(a)\n\t\tif !(assert.GreaterOrEqual(t, a, 0) && assert.LessOrEqual(t, int64(opts.BufferSize), n)) {\n\t\t\treturn false\n\t\t}\n\n\t\tif 0 < n && n < int64(opts.BufferSize) {\n\t\t\treturn assert.Error(t, err)\n\t\t}\n\t}\n\n\tgrp, _ := errgroup.WithContext(context.Background())\n\tfor i := int64(0); i < length && i < 50; i++ {\n\t\ti := i\n\t\tgrp.Go(func() error {\n\t\t\tvar buf = make([]byte, opts.BufferSize)\n\t\t\t_, err := reader.ReadAt(buf, i)\n\t\t\tassert.NoError(t, err)\n\t\t\treturn err\n\t\t})\n\t}\n\terr2 := grp.Wait()\n\n\treturn assert.EqualError(t, err, io.EOF.Error()) && assert.NoError(t, err2)\n}",
"func (p *Stream) CanRead() bool {\n\treturn p.readIndex < p.writeIndex || p.readSeg < p.writeSeg\n}",
"func (cb *Buffer) ReadAvailability() int {\n\treturn len(cb.buffer) - cb.WriteAvailability()\n}",
"func newBoundedReader(r io.ReaderAt, minOffset, maxOffset int64) io.Reader {\n\treturn &boundedReader{\n\t\treaderAt: r,\n\t\tminOffset: minOffset,\n\t\tmaxOffset: maxOffset,\n\t}\n}",
"func parseReadOnly(b interface{}) bool {\n\tswitch b.(type) {\n\tcase float64:\n\t\treturn b.(float64) > 0\n\tcase int64:\n\t\treturn b.(int64) > 0\n\tcase string:\n\t\ti, _ := strconv.Atoi(b.(string))\n\t\treturn i > 0\n\t}\n\t// default to ReadOnly == false\n\treturn false\n}"
] | [
"0.5568912",
"0.544267",
"0.5435445",
"0.54336435",
"0.54229486",
"0.5378128",
"0.5269297",
"0.5250991",
"0.5245359",
"0.5202621",
"0.50596756",
"0.49487576",
"0.49410132",
"0.4889552",
"0.48799032",
"0.48721945",
"0.4850817",
"0.4798648",
"0.47760805",
"0.47460827",
"0.47380048",
"0.47221872",
"0.46646792",
"0.46542412",
"0.46262687",
"0.4610322",
"0.46083212",
"0.4594526",
"0.45746464",
"0.45412847",
"0.45378268",
"0.4536934",
"0.45350957",
"0.4526396",
"0.45223063",
"0.45196933",
"0.45119315",
"0.45073986",
"0.45049977",
"0.44906938",
"0.44856298",
"0.44805822",
"0.44770962",
"0.44637564",
"0.44240084",
"0.44136116",
"0.4411545",
"0.44052956",
"0.4401592",
"0.43944573",
"0.43856704",
"0.43824717",
"0.4381426",
"0.43813902",
"0.43775305",
"0.43731317",
"0.43615454",
"0.43601444",
"0.43593255",
"0.43550482",
"0.4353383",
"0.4353383",
"0.43503836",
"0.43482098",
"0.43414664",
"0.4337218",
"0.43275848",
"0.43243977",
"0.43205583",
"0.43120906",
"0.43040937",
"0.4292973",
"0.4290253",
"0.42783177",
"0.42777693",
"0.42634746",
"0.4259622",
"0.42574668",
"0.42380536",
"0.423316",
"0.4226335",
"0.42214525",
"0.42145684",
"0.4205774",
"0.4195386",
"0.41947716",
"0.41914186",
"0.4184225",
"0.41825107",
"0.41722178",
"0.41700926",
"0.41678345",
"0.416638",
"0.41600046",
"0.4158769",
"0.41460663",
"0.41449675",
"0.4138085",
"0.41378844",
"0.41365302",
"0.41313836"
] | 0.0 | -1 |
clear buffers to preset values | func Clear(mask uint32) {
C.glowClear(gpClear, (C.GLbitfield)(mask))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (d *Display) resetBuffer() {\n\td.width = d.device.Width()\n\td.height = d.device.Height()\n\td.buffer = make([][]byte, d.height)\n\tfor y := range d.buffer {\n\t\td.buffer[y] = make([]byte, d.width)\n\t}\n}",
"func (d *Driver) Clear() {\n\tfor i := 0; i < len(d.buff); i++ {\n\t\td.buff[i] = 0\n\t}\n}",
"func (w *buffer) reset() {\n\tfor i := range (*w)[:cap(*w)] {\n\t\t(*w)[i] = 0\n\t}\n\t*w = (*w)[:0]\n}",
"func Clear() {\n\tfor i := 0; i < bufferLength; i++ {\n\t\tbuffer[i] = 0x00\n\t}\n}",
"func (b *Buffer) Clear() {\n\tfor o := range b.Tiles {\n\t\tb.Tiles[o] = nil\n\t}\n}",
"func (b *Buffer) Reset() {\n b.size = 0\n b.offset = 0\n}",
"func (rb *RingBuffer[T]) Clear() {\n\trb.mu.Lock()\n\tdefer rb.mu.Unlock()\n\trb.pos = 0\n\trb.buf = nil\n}",
"func (b *Buffer) Clear() {\n\tb.series = make(map[string]*influxdb.Series)\n\tb.size = 0\n}",
"func (speech *SpeechSync) ClearBuffer() {\n\tspeech.dataInfo.ClearBuffer()\n}",
"func (pb *PacketBuffer) Clear() {\n\tpb.numPackets = 0\n\tpb.offsets[0] = 0\n}",
"func (t *FileTarget) clearBuffer() {\n\tfor {\n\t\tselect {\n\t\tcase <-t.tick.C:\n\t\t\t_ = t.writer.Flush()\n\t\t}\n\t}\n}",
"func (buf *Buffer) reset() {\n\tbuf.DNCReports = nil\n\tbuf.DNCReports = make([]DNCReport, 0)\n\tbuf.CNIReports = nil\n\tbuf.CNIReports = make([]CNIReport, 0)\n\tbuf.NPMReports = nil\n\tbuf.NPMReports = make([]NPMReport, 0)\n\tbuf.CNSReports = nil\n\tbuf.CNSReports = make([]CNSReport, 0)\n\tpayloadSize = 0\n}",
"func (b *Buffer) Reset() {\n\tb.Line = b.Line[:0]\n\tb.Val = b.Val[:0]\n}",
"func (c *Canvas) Clear() error {\n\tb, err := buffer.New(c.Size())\n\tif err != nil {\n\t\treturn err\n\t}\n\tc.buffer = b\n\treturn nil\n}",
"func (self *nativePduBuffer) Clear() {\n\tself.start = 0\n\tself.count = 0\n}",
"func (e *ObservableEditableBuffer) ResetBuffer() {\n\te.filtertagobservers = false\n\te.seq = 0\n\te.f = NewTypeBuffer([]rune{}, e)\n}",
"func (b *Buffer) Clear() {\n\tb.currentSize = 0\n\tb.contents = map[entity.Key]inventoryapi.PostDeltaBody{}\n}",
"func (s *Streams) Clear() {\n\ts.Buffer.Clear()\n}",
"func (console *testConsole) Clear() {\n\tconsole.bufMx.Lock()\n\tconsole.buf = console.buf[:0]\n\tconsole.bufMx.Unlock()\n}",
"func (b *Buf) Reset() { b.b = b.b[:0] }",
"func (w *Window) Clear() {\n\tfor i, _ := range w.tex {\n\t\tw.tex[i] = 0\n\t}\n}",
"func (b *Buffer) Clear() {\n\tb.mu.Lock()\n\tdefer b.mu.Unlock()\n\tif err := b.flushAll(); err != nil {\n\t\tb.logger.ErrorContext(context.Background(), `k-stream.changelog.buffer`, err)\n\t}\n\n}",
"func (b *Buffer) Reset() {\n\tif len(b.bufs) > 0 {\n\t\tb.curBuf = b.bufs[0]\n\t\tb.curBufLen = len(b.bufs[0])\n\t\tb.curBufIdx = 0\n\t\tb.curIdx = 0\n\t}\n}",
"func (b *Buffer) Reset() {\n\tb.buf = b.buf[:0]\n}",
"func (spriteBatch *SpriteBatch) Clear() {\n\tspriteBatch.arrayBuf = newVertexBuffer(spriteBatch.size*4*8, []float32{}, spriteBatch.usage)\n\tspriteBatch.count = 0\n}",
"func (b *Buffer) ClearFrom(o int) {\n\tfor l := len(b.Tiles); o < l; o++ {\n\t\tb.Tiles[o] = nil\n\t}\n}",
"func (b *Buffer) Reset() {\n\tb.B = b.arena\n}",
"func (b *Buffer) Reset() {\n\tb.B = b.B[:0]\n}",
"func (i *Input) Clear() {\n\ti.Pos = 0\n\ti.Buffer = NewLine()\n}",
"func (b *FixedBuffer) Reset() {\n\tb.w = 0\n\tb.r = 0\n}",
"func (n *BufferView) UnfocusBuffers() {\n\t// clear focus from buffers\n\tfor _, buffPane := range n.buffers {\n\t\tbuffPane.SetFocus(false)\n\t}\n}",
"func (buffer *Buffer) Reset() {\n\tbuffer.B.Reset()\n}",
"func (b *Buffer) ClearMatches() {\n\tfor i := range b.lines {\n\t\tb.SetMatch(i, nil)\n\t\tb.SetState(i, nil)\n\t}\n}",
"func (b *Buffer) Reset() {\n\tb.start = 0\n\tb.used = 0\n}",
"func (this *channelStruct) Clear() {\n\tthis.samples = make([]float64, 0)\n}",
"func (cb *Buffer) Reset() {\n\tcb.wpos = len(cb.buffer)\n\tcb.rpos = cb.wpos\n\tcb.full = false\n}",
"func (debugging *debuggingOpenGL) Clear(mask uint32) {\n\tdebugging.recordEntry(\"Clear\", mask)\n\tdebugging.gl.Clear(mask)\n\tdebugging.recordExit(\"Clear\")\n}",
"func (tv *TextView) Clear() {\n\tif tv.Buf == nil {\n\t\treturn\n\t}\n\ttv.Buf.New(0)\n}",
"func (s *StackF64) clear() {\n\tfor i := 0; i < len(s.data); i++ {\n\t\ts.data[i] = 0\n\t}\n}",
"func (r *Render) clear(cursor int) {\n\tr.move(cursor, 0)\n\tr.out.EraseDown()\n}",
"func (b *Buffer) ClearTo(o int) {\n\tfor o = calc.MinInt(o, len(b.Tiles)); o >= 0; o-- {\n\t\tb.Tiles[o] = nil\n\t}\n}",
"func (b *Buffer) Reset() {\n\tb.writeCursor = 0\n\tb.written = 0\n}",
"func (p *Buffer) Reset() {\n\tp.buf = p.buf[0:0] // for reading/writing\n\tp.index = 0 // for reading\n\tp.err = nil\n\tp.array_indexes = nil\n}",
"func (tb *TransactionBuffer) Clear() {\n\ttb.mux.Lock()\n\ttb.Buffer = make([]TxPublish, 0)\n\ttb.mux.Unlock()\n}",
"func (v view) Clear() {\n\tfor i := 0; i < len(v.screen); i++ {\n\t\tv.screen[i].Clear()\n\t}\n}",
"func (c *Cipher) Reset() {\n\tfor i := range c.state {\n\t\tc.state[i] = 0\n\t}\n\tfor i := range c.buf {\n\t\tc.buf[i] = 0\n\t}\n}",
"func (r *Ring) Clear() {\n\tr.size, r.in, r.out = 0, 0, 0\n}",
"func ClearCommitBuffer() {\n\tvar commitBuffer RootCommitBuffer\n\tcWarning := color.New(color.FgYellow).SprintFunc()\n\n\tbufferFile, err := os.Open(SilkRoot() + \"/\" + RootDirectoryName + \"/commit/buffer\")\n\tif err != nil {\n\t\tfmt.Println(cWarning(\"\\n\\tError\") + \": unable to open commit buffer file\")\n\t\tfmt.Print(\"\\n\")\n\t}\n\tdefer bufferFile.Close()\n\n\t// Remove all bufferFile.Changes()\n\tbyteValue, err := ioutil.ReadAll(bufferFile)\n\tif err != nil {\n\t\tfmt.Println(cWarning(\"\\n\\tError\") + \": unable to read buffer file byte values\")\n\t\tfmt.Print(\"\\n\")\n\t}\n\n\terr = json.Unmarshal(byteValue, &commitBuffer)\n\tif err != nil {\n\t\tfmt.Println(cWarning(\"\\n\\tError\") + \": unable to create latest commit file\")\n\t\tfmt.Print(\"\\n\")\n\t}\n\n\tcommitBuffer.Changes = []FileChange{}\n\n\tcommitBufferJSON, err := json.MarshalIndent(commitBuffer, \" \", \"\")\n\tif err != nil {\n\t\tfmt.Println(cWarning(\"\\n\\tError\") + \": unable to marshal json to commit buffer\")\n\t\tfmt.Print(\"\\n\")\n\t}\n\n\terr = ioutil.WriteFile(SilkRoot()+\"/.silk/commit/buffer\", []byte(string(commitBufferJSON)+\"\\n\"), 0766)\n\tif err != nil {\n\t\tfmt.Println(cWarning(\"\\n\\tError\") + \": unable to write to commit buffer file\")\n\t\tfmt.Print(\"\\n\")\n\t}\n}",
"func (b *baseKVStoreBatch) Clear() {\n\tb.mutex.Lock()\n\tdefer b.mutex.Unlock()\n\tb.writeQueue = nil\n\n\tb.fillLock.Lock()\n\tdefer b.fillLock.Unlock()\n\tfor k := range b.fill {\n\t\tdelete(b.fill, k)\n\t}\n}",
"func (c *CmdBuff) Reset() {\n\tc.ClearText(true)\n\tc.SetActive(false)\n\tc.fireBufferCompleted(c.GetText(), c.GetSuggestion())\n}",
"func (display smallEpd) Clear() (err error) {\n\tlog.Debug(\"EPD42 Clear\")\n\n\tif err = display.sendCommand(DATA_START_TRANSMISSION_1); err != nil {\n\t\treturn\n\t}\n\n\t// TODO: Verify that this is enough bits\n\tfor i := 0; i < display.Width()*display.Height()/8; i++ {\n\t\tif err = display.sendData([]byte{0xFF}); err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\tif err = display.sendCommand(DATA_START_TRANSMISSION_2); err != nil {\n\t\treturn\n\t}\n\n\tfor i := 0; i < display.Width()*display.Height()/8; i++ {\n\t\tif err = display.sendData([]byte{0xFF}); err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\tif err = display.sendCommand(DISPLAY_REFRESH); err != nil {\n\t\treturn\n\t}\n\n\tif err = display.waitUntilIdle(); err != nil {\n\t\treturn\n\t}\n\n\tlog.Debug(\"EPD42 Clear End\")\n\treturn\n}",
"func (w *Window) Clear() {\n\tw.mx.Lock()\n\tw.start = 0\n\tw.Len = 0\n\tw.mx.Unlock()\n}",
"func Clear(mask Enum) {\n\tgl.Clear(uint32(mask))\n}",
"func (c *counter) reset() {\n\tc.messages, c.samples = 0, 0\n}",
"func (c *counter) reset() {\n\tc.messages, c.samples = 0, 0\n}",
"func (g *MyGame) Clear() {\n g.Points = []int{}\n g.Tables = []string{}\n g.States = []string{}\n g.CurTable = \"\"\n g.CurWords = []string{}\n g.CurRound = -1\n for _, _ = range g.Users {\n g.Points = append(g.Points, 0)\n }\n}",
"func (c *Canvas) Reset() {\n\tfor y := 0; uint8(y) < canvasHeight; y++ {\n\t\tfor x := 0; uint8(x) < canvasWidth; x++ {\n\t\t\t(*c)[y][x] = 0\n\t\t}\n\t}\n}",
"func (b *Buffer) ClearAt(o int) {\n\tif o < len(b.Tiles) {\n\t\tb.Tiles[o] = nil\n\t}\n}",
"func (r *RingBuffer) Reset() {\n\tr.mu.Lock()\n\tdefer r.mu.Unlock()\n\n\tr.rPos = 0\n\tr.wPos = 0\n\tr.isFull = false\n}",
"func (ctl *Controller) Clear() {\n\tfor i := 0; i < ctl.count; i++ {\n\t\tctl.SetColour(i, Off)\n\t}\n}",
"func (bb *ByteBuffer) Reset() {\n\tbb.B = bb.B[:0]\n}",
"func (m *Manager) clear() error {\n\tfor _, ch := range m.meter {\n\t\tif ch == '\\n' {\n\t\t\t_, err := os.Stdout.WriteString(\"\\x1b[1A\\x1b[2K\")\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}",
"func (ds *Dataset) Clear() {\n\tds.min = math.MaxFloat64\n\tds.max = math.SmallestNonzeroFloat64\n\tds.product = 1\n\tds.total = 0\n\tds.recipsum = 0\n\tds.values = ds.values[:0]\n}",
"func (b *batch) Reset() {\n\tb.batch.Clear()\n\tb.size = 0\n}",
"func Clear(secret []byte) {\n\tfor i := range secret {\n\t\tsecret[i] = 0 // Clear each byte\n\t}\n\tsecret = nil // Reset data slice\n}",
"func (r *RingBuffer) Reset() {\n\tr.mu.Lock()\n\tdefer r.mu.Unlock()\n\n\tr.r = 0\n\tr.w = 0\n\tr.isFull = false\n}",
"func (g *Grid) Clear() { g.rows = []ui.GridBufferer{} }",
"func Clear(mask Bitfield) {\n\tcmask, _ := (C.GLbitfield)(mask), cgoAllocsUnknown\n\tC.glClear(cmask)\n}",
"func Clear(mask uint32) {\n\tsyscall.Syscall(gpClear, 1, uintptr(mask), 0, 0)\n}",
"func (f *Flash) Clear() {\n\tfor k := range f.v {\n\t\tif !f.changed {\n\t\t\tf.changed = true\n\t\t}\n\t\tf.v.Del(k)\n\t}\n}",
"func (v *vncPlayer) Clear() {\n\tv.Lock()\n\tdefer v.Unlock()\n\n\tfor k, p := range v.m {\n\t\tlog.Debug(\"stopping kb playback for %v\", k)\n\t\tif err := p.Stop(); err != nil {\n\t\t\tlog.Error(\"%v\", err)\n\t\t}\n\n\t\tdelete(v.m, k)\n\t}\n}",
"func (b *baseKVStoreBatch) Clear() {\n\tb.mutex.Lock()\n\tdefer b.mutex.Unlock()\n\tb.writeQueue = nil\n}",
"func ClearBufferData(target uint32, internalformat uint32, format uint32, xtype uint32, data unsafe.Pointer) {\n\tsyscall.Syscall6(gpClearBufferData, 5, uintptr(target), uintptr(internalformat), uintptr(format), uintptr(xtype), uintptr(data), 0)\n}",
"func (batch *Batch) Clear() {\n\tbatch.mutex.Lock()\n\tdefer batch.mutex.Unlock()\n\tbatch.messages = batch.messages[0:0]\n}",
"func (native *OpenGL) Clear(mask uint32) {\n\tgl.Clear(mask)\n}",
"func clearbytes(bs []byte) {\n\tfor i := range bs {\n\t\tbs[i] = 0\n\t}\n}",
"func (bbw *Writer) Reset(buf []byte, extendable bool) {\n\tbbw.buf = buf\n\tif cap(bbw.buf) > 0 {\n\t\tbbw.buf = bbw.buf[:cap(bbw.buf)]\n\t}\n\tbbw.offs = 0\n\tbbw.clsdPos = -1\n\tbbw.ext = extendable\n}",
"func (r *RunningStats) Clear() {\n\tr.n = 0\n\tr.m1 = 0.0\n\tr.m2 = 0.0\n\tr.m3 = 0.0\n\tr.m4 = 0.0\n}",
"func (glx *Context) Clear() {\n\tglx.constants.Clear(glx.constants.COLOR_BUFFER_BIT)\n\tglx.constants.Clear(glx.constants.DEPTH_BUFFER_BIT)\n}",
"func (b *blockEnc) reset(prev *blockEnc) {\n\tb.extraLits = 0\n\tb.literals = b.literals[:0]\n\tb.size = 0\n\tb.sequences = b.sequences[:0]\n\tb.output = b.output[:0]\n\tb.last = false\n\tif prev != nil {\n\t\tb.recentOffsets = prev.prevRecentOffsets\n\t}\n\tb.dictLitEnc = nil\n}",
"func (b *ChangeBuffer) emptyBuffer() {\n\tlast := b.Back()\n\tfor last != nil {\n\t\tif last.Prev() != nil {\n\t\t\tlast = last.Prev()\n\t\t\tb.Remove(last.Next())\n\t\t} else {\n\t\t\tb.Remove(last)\n\t\t\tlast = nil\n\t\t}\n\t}\n}",
"func (x *Secp256k1N) Clear() {\n\tx.limbs[0] = 0\n\tx.limbs[1] = 0\n\tx.limbs[2] = 0\n\tx.limbs[3] = 0\n\tx.limbs[4] = 0\n}",
"func CleanActionBuffer(){\n\tActionBuffer=nil //throw to garbage collector\n\tInitiateActionBuffer()\n}",
"func (b *batch) Reset() {\n\tb.writes = b.writes[:0]\n\tb.size = 0\n}",
"func (b *Buffer) Reset() {\n\tb.mux.Lock()\n\tdefer b.mux.Unlock()\n\n\tb.data = make([]byte, len(b.data))\n\tb.dataSize = 0\n\tb.head, b.tail = 0, 0\n}",
"func (m *MsgBuffer) Reset() {\n\tm.Buffer.Reset()\n\tm.bs = m.bs[:0]\n\tm.err = nil\n}",
"func (bbw *Writer) Reset(buf []byte, extendable bool) {\n\tbbw.buf = buf\n\tif cap(bbw.buf) > 0 {\n\t\tbbw.buf = bbw.buf[:cap(bbw.buf)]\n\t}\n\tbbw.offs = 0\n\tbbw.clsdPos = 0\n\tbbw.noExt = !extendable\n}",
"func (d *Decoder) reset() {\n\tif unread := len(d.buf) - d.r1; unread == 0 {\n\t\t// No bytes in the buffer, so we can start from the beginning without\n\t\t// needing to copy anything (and get better cache behaviour too).\n\t\td.buf = d.buf[:0]\n\t\td.r1 = 0\n\t} else if !d.complete && unread <= maxSlide {\n\t\t// Slide the unread portion of the buffer to the\n\t\t// start so that when we read more data,\n\t\t// there's less chance that we'll need to grow the buffer.\n\t\tcopy(d.buf, d.buf[d.r1:])\n\t\td.r1 = 0\n\t\td.buf = d.buf[:unread]\n\t}\n\td.r0 = d.r1\n\td.escBuf = d.escBuf[:0]\n}",
"func (mc *MultiCursor) Clear() {\n\tmc.cursors = mc.cursors[0:1]\n}",
"func (s *System) clear() {\n\ts.vertex = 0\n\ts.indice = 0\n\n\tvar i int\n\tfor j := range s.particles {\n\t\tp := &s.particles[j]\n\t\tif p.progress >= 1 {\n\t\t\tcontinue\n\t\t}\n\n\t\tp.vertex = s.vertex\n\t\tp.indice = s.indice\n\n\t\ts.vertex += p.vertexes\n\t\ts.indice += p.indices\n\n\t\ts.particles[i] = *p\n\t\ti++\n\t}\n\n\ts.particles = s.particles[:i]\n\n\treturn\n}",
"func (b *messageBuffer) clear() {\n\tb.mu.Lock()\n\tbacklog := b.backlog\n\tb.backlog = nil\n\tb.mu.Unlock()\n\n\tselect {\n\tcase m := <-b.c:\n\t\tm.next()\n\tdefault:\n\t}\n\tfor _, m := range backlog {\n\t\tm.next()\n\t}\n}",
"func (s *SharedMemorySegment) Clear() {\n\tfor i := 0; i < len(s.data); i++ {\n\t\ts.data[i] = 0\n\t}\n}",
"func (ring *ringBuffer) destroy() {\n\t// do nothing in go\n}",
"func (e *Encoder) Reset(buf []byte) {\n\tfor k := range e.syms {\n\t\tdelete(e.syms, k)\n\t}\n\te.buf = buf[:0]\n}",
"func (b *ByteBuffer) Reset() {\n\tb.B = b.B[:0]\n}",
"func (be *Batch) Clear() {\n\tbe.errors = nil\n}",
"func (w *Writer) Reset() {\n\tw.buf = w.buf[:0]\n}",
"func (c *Chunk) Clear() {\n\tc.data = nil\n}",
"func (ex *Ex) Clear() {\n\tex.input.Clear()\n}",
"func (d *Display) Clear() {\n\td.sendCommand(0b00000001)\n\ttime.Sleep(10 * time.Millisecond) // long instruction, max 6.2ms\n}",
"func (b *RecordBuffer) Flush() {\n\tb.recordsInBuffer = b.recordsInBuffer[:0]\n\tb.sequencesInBuffer = b.sequencesInBuffer[:0]\n}"
] | [
"0.73622304",
"0.7274667",
"0.72177845",
"0.7180202",
"0.7105434",
"0.70081955",
"0.7007828",
"0.6998411",
"0.6987734",
"0.69397366",
"0.6896155",
"0.6880796",
"0.68779135",
"0.67908055",
"0.6777215",
"0.6763572",
"0.67466843",
"0.67050415",
"0.67035717",
"0.66774946",
"0.665214",
"0.66300035",
"0.6597652",
"0.6556426",
"0.6544201",
"0.6517131",
"0.64986295",
"0.6488991",
"0.6486305",
"0.64742374",
"0.64608574",
"0.64413863",
"0.64171964",
"0.6397872",
"0.6371868",
"0.6356182",
"0.63389665",
"0.63356173",
"0.6328066",
"0.6323698",
"0.63185537",
"0.63016075",
"0.62966126",
"0.6286802",
"0.62811863",
"0.627623",
"0.6275596",
"0.6244734",
"0.62227327",
"0.620539",
"0.6201757",
"0.6201198",
"0.61977845",
"0.6194031",
"0.6194031",
"0.6171421",
"0.61556315",
"0.6152827",
"0.6145092",
"0.6129979",
"0.61221904",
"0.611607",
"0.6100872",
"0.6086423",
"0.6075234",
"0.60672426",
"0.60654175",
"0.60639524",
"0.60588837",
"0.60577273",
"0.6053051",
"0.6051624",
"0.60495806",
"0.60462743",
"0.60430044",
"0.60400295",
"0.6038312",
"0.60251284",
"0.60230273",
"0.60220367",
"0.60177284",
"0.6017722",
"0.6012795",
"0.60110855",
"0.6010865",
"0.60074896",
"0.60067844",
"0.60010445",
"0.5993601",
"0.59767115",
"0.5974678",
"0.59718347",
"0.5964853",
"0.596351",
"0.5956956",
"0.5956893",
"0.5944529",
"0.5943594",
"0.59326154",
"0.59234643",
"0.59203225"
] | 0.0 | -1 |
specify clear values for the accumulation buffer | func ClearAccum(red float32, green float32, blue float32, alpha float32) {
C.glowClearAccum(gpClearAccum, (C.GLfloat)(red), (C.GLfloat)(green), (C.GLfloat)(blue), (C.GLfloat)(alpha))
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (s *UniformSample) Clear() {\n\ts.mutex.Lock()\n\tdefer s.mutex.Unlock()\n\ts.count = 0\n\ts.values = make([]int64, 0, s.reservoirSize)\n}",
"func (b *Buffer) Clear() {\n\tb.series = make(map[string]*influxdb.Series)\n\tb.size = 0\n}",
"func (this *channelStruct) Clear() {\n\tthis.samples = make([]float64, 0)\n}",
"func (rb *RingBuffer[T]) Clear() {\n\trb.mu.Lock()\n\tdefer rb.mu.Unlock()\n\trb.pos = 0\n\trb.buf = nil\n}",
"func (ds *Dataset) Clear() {\n\tds.min = math.MaxFloat64\n\tds.max = math.SmallestNonzeroFloat64\n\tds.product = 1\n\tds.total = 0\n\tds.recipsum = 0\n\tds.values = ds.values[:0]\n}",
"func (d *Driver) Clear() {\n\tfor i := 0; i < len(d.buff); i++ {\n\t\td.buff[i] = 0\n\t}\n}",
"func ClearAccum(red float32, green float32, blue float32, alpha float32) {\n\tsyscall.Syscall6(gpClearAccum, 4, uintptr(math.Float32bits(red)), uintptr(math.Float32bits(green)), uintptr(math.Float32bits(blue)), uintptr(math.Float32bits(alpha)), 0, 0)\n}",
"func (c *Concurrent) Clear() {\n\tfor {\n\t\tselect {\n\t\tcase <-c.values:\n\t\tdefault:\n\t\t\treturn\n\t\t}\n\t}\n}",
"func (s *StackF64) clear() {\n\tfor i := 0; i < len(s.data); i++ {\n\t\ts.data[i] = 0\n\t}\n}",
"func (NilUGauge) Clear() uint64 { return 0 }",
"func (b *baseKVStoreBatch) Clear() {\n\tb.mutex.Lock()\n\tdefer b.mutex.Unlock()\n\tb.writeQueue = nil\n\n\tb.fillLock.Lock()\n\tdefer b.fillLock.Unlock()\n\tfor k := range b.fill {\n\t\tdelete(b.fill, k)\n\t}\n}",
"func (self *nativePduBuffer) Clear() {\n\tself.start = 0\n\tself.count = 0\n}",
"func (tb *TransactionBuffer) Clear() {\n\ttb.mux.Lock()\n\ttb.Buffer = make([]TxPublish, 0)\n\ttb.mux.Unlock()\n}",
"func Clear() {\n\tfor i := 0; i < bufferLength; i++ {\n\t\tbuffer[i] = 0x00\n\t}\n}",
"func (r *Ring) Clear() {\n\tr.size, r.in, r.out = 0, 0, 0\n}",
"func (pb *PacketBuffer) Clear() {\n\tpb.numPackets = 0\n\tpb.offsets[0] = 0\n}",
"func (NilCounter) Clear() {}",
"func (NilCounter) Clear() {}",
"func (g *GaugeInt64) Clear() {\n\tatomic.StoreInt64(&g.val, 0)\n}",
"func (r *RunningStats) Clear() {\n\tr.n = 0\n\tr.m1 = 0.0\n\tr.m2 = 0.0\n\tr.m3 = 0.0\n\tr.m4 = 0.0\n}",
"func (c *standardResettingCounter) Clear() {\n\tatomic.StoreInt64(&c.count, 0)\n}",
"func (b *baseKVStoreBatch) Clear() {\n\tb.mutex.Lock()\n\tdefer b.mutex.Unlock()\n\tb.writeQueue = nil\n}",
"func (b *Buffer) Clear() {\n\tfor o := range b.Tiles {\n\t\tb.Tiles[o] = nil\n\t}\n}",
"func (n *aggregateFuncExpr) clear() {\n\tn.currentGroup = []byte{}\n\tn.contextPerGroupMap = nil\n}",
"func (w *buffer) reset() {\n\tfor i := range (*w)[:cap(*w)] {\n\t\t(*w)[i] = 0\n\t}\n\t*w = (*w)[:0]\n}",
"func (t *FileTarget) clearBuffer() {\n\tfor {\n\t\tselect {\n\t\tcase <-t.tick.C:\n\t\t\t_ = t.writer.Flush()\n\t\t}\n\t}\n}",
"func ClearAccum(red float32, green float32, blue float32, alpha float32) {\n C.glowClearAccum(gpClearAccum, (C.GLfloat)(red), (C.GLfloat)(green), (C.GLfloat)(blue), (C.GLfloat)(alpha))\n}",
"func (c *StandardCounter) Clear() {\n\tatomic.StoreInt64(&c.count, 0)\n}",
"func (c *StandardCounter) Clear() {\n\tatomic.StoreInt64(&c.count, 0)\n}",
"func (buf *Buffer) reset() {\n\tbuf.DNCReports = nil\n\tbuf.DNCReports = make([]DNCReport, 0)\n\tbuf.CNIReports = nil\n\tbuf.CNIReports = make([]CNIReport, 0)\n\tbuf.NPMReports = nil\n\tbuf.NPMReports = make([]NPMReport, 0)\n\tbuf.CNSReports = nil\n\tbuf.CNSReports = make([]CNSReport, 0)\n\tpayloadSize = 0\n}",
"func (v *Data) UClear() {\n\t*v = (*v)[:0]\n}",
"func (b *Buffer) Clear() {\n\tb.currentSize = 0\n\tb.contents = map[entity.Key]inventoryapi.PostDeltaBody{}\n}",
"func (b *messageBuffer) clear() {\n\tb.mu.Lock()\n\tbacklog := b.backlog\n\tb.backlog = nil\n\tb.mu.Unlock()\n\n\tselect {\n\tcase m := <-b.c:\n\t\tm.next()\n\tdefault:\n\t}\n\tfor _, m := range backlog {\n\t\tm.next()\n\t}\n}",
"func (queue *Queue) Clear() {\n\tqueue.data = queue.data[:0]\n}",
"func (z *Int) Clear() *Int {\n\tz[3], z[2], z[1], z[0] = 0, 0, 0, 0\n\treturn z\n}",
"func (b *Buf) Reset() { b.b = b.b[:0] }",
"func (b *Buffer) Clear() {\n\tb.mu.Lock()\n\tdefer b.mu.Unlock()\n\tif err := b.flushAll(); err != nil {\n\t\tb.logger.ErrorContext(context.Background(), `k-stream.changelog.buffer`, err)\n\t}\n\n}",
"func (speech *SpeechSync) ClearBuffer() {\n\tspeech.dataInfo.ClearBuffer()\n}",
"func (c *counter) reset() {\n\tc.messages, c.samples = 0, 0\n}",
"func (c *counter) reset() {\n\tc.messages, c.samples = 0, 0\n}",
"func (batch *Batch) Clear() {\n\tbatch.mutex.Lock()\n\tdefer batch.mutex.Unlock()\n\tbatch.messages = batch.messages[0:0]\n}",
"func (m *MetricUnion) Reset() { *m = emptyMetricUnion }",
"func (b *Buffer) Reset() {\n b.size = 0\n b.offset = 0\n}",
"func (debugging *debuggingOpenGL) Clear(mask uint32) {\n\tdebugging.recordEntry(\"Clear\", mask)\n\tdebugging.gl.Clear(mask)\n\tdebugging.recordExit(\"Clear\")\n}",
"func (q *Queue) Clear() {\n\tq.items = []Lit{}\n}",
"func (c *Canvas) Clear() error {\n\tb, err := buffer.New(c.Size())\n\tif err != nil {\n\t\treturn err\n\t}\n\tc.buffer = b\n\treturn nil\n}",
"func (fb *FlatBatch) Reset() {\n\tfb.lock.Lock()\n\tdefer fb.lock.Unlock()\n\n\tfb.keysize, fb.valsize = 0, 0\n\tfb.keys = fb.keys[:0]\n\tfb.vals = fb.vals[:0]\n}",
"func (it iterator) clear(b *ringBuf) {\n\tb.buf[it] = raftpb.Entry{}\n}",
"func (s *Statistics) reset() {\n\ts.cycles++\n\ts.totalMessagesCleared += s.messagesCleared\n\n\ts.memoryCleared = 0\n\ts.messagesCleared = 0\n}",
"func (e *eventsBatcher) clear() {\n\te.meta = map[string]int{}\n\te.evts = []*evtsapi.Event{}\n\te.expiredEvts = []*evtsapi.Event{}\n}",
"func (v *Data) Clear() {\n\tv.Truncate(0)\n}",
"func (arr *FloatArray) Clear() {\n\tnewArr := *arr\n\t*arr = newArr[:0]\n}",
"func (c *Counter) Clear() {\n\tc.mut.Lock()\n\tdefer c.mut.Unlock()\n\n\tc.count = make(map[interface{}]int)\n}",
"func (x *Secp256k1N) Clear() {\n\tx.limbs[0] = 0\n\tx.limbs[1] = 0\n\tx.limbs[2] = 0\n\tx.limbs[3] = 0\n\tx.limbs[4] = 0\n}",
"func (r *PendingPodsRecorder) Clear() {\n\tr.recorder.Set(float64(0))\n}",
"func (console *testConsole) Clear() {\n\tconsole.bufMx.Lock()\n\tconsole.buf = console.buf[:0]\n\tconsole.bufMx.Unlock()\n}",
"func (i *Input) Clear() {\n\ti.Pos = 0\n\ti.Buffer = NewLine()\n}",
"func (b Bitmask) ClearAll() {\n\tfor i := range b {\n\t\tb[i] = 0\n\t}\n}",
"func (d *Display) resetBuffer() {\n\td.width = d.device.Width()\n\td.height = d.device.Height()\n\td.buffer = make([][]byte, d.height)\n\tfor y := range d.buffer {\n\t\td.buffer[y] = make([]byte, d.width)\n\t}\n}",
"func (q *Queue) clear() ([500]*model.RawTweet, int) {\n\trts := q.rts\n\tsize := q.size\n\n\tq.rts = [500]*model.RawTweet{}\n\tq.size = 0\n\n\treturn rts, size\n}",
"func (g *StandardUGauge) Clear() uint64 {\n\treturn atomic.SwapUint64(&g.value, 0)\n}",
"func (c *TimerCond) Clear() {\n\titerOptionalFields(reflect.ValueOf(c).Elem(), nil, func(name string, val optionalVal) bool {\n\t\tval.Clear()\n\t\treturn true\n\t})\n\tc.KeyPrefix = false\n}",
"func (frac *Fractal) Clear() {\n\tfrac.R = histo.New(frac.Width, frac.Height)\n\tfrac.G = histo.New(frac.Width, frac.Height)\n\tfrac.B = histo.New(frac.Width, frac.Height)\n}",
"func (b *Buffer) Reset() {\n\tb.Line = b.Line[:0]\n\tb.Val = b.Val[:0]\n}",
"func (dc *DatadogCollector) Reset() {}",
"func (c *SyncCollector) Clear() {\n\tfor k := range c.c {\n\t\tc.rw.Lock()\n\t\tdelete(c.c, k)\n\t\tc.rw.Unlock()\n\t}\n}",
"func (m *Manager) clear() error {\n\tfor _, ch := range m.meter {\n\t\tif ch == '\\n' {\n\t\t\t_, err := os.Stdout.WriteString(\"\\x1b[1A\\x1b[2K\")\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}",
"func (k *MutableKey) Clear() {\n\tfor v := range k.vals {\n\t\tdelete(k.vals, v)\n\t\tk.synced = false\n\t}\n}",
"func (e *Timing) Reset() {\n\te.Min = 0\n\te.Max = 0\n\te.Value = 0\n\te.Values = make(float64Slice, 0)\n\te.Count = 0\n}",
"func (m BoolMemConcurrentMap) Clear() {\n\tfor item := range m.IterBuffered() {\n\t\tm.Remove(item.Key)\n\t}\n}",
"func (b *Buffer) ClearTo(o int) {\n\tfor o = calc.MinInt(o, len(b.Tiles)); o >= 0; o-- {\n\t\tb.Tiles[o] = nil\n\t}\n}",
"func (c ShmCounter) Clear() {\n\tatomic.StoreInt64((*int64)(unsafe.Pointer(c)), 0)\n}",
"func (r *BasicResampler) Reset() {\n\tr.sampleAggregates = r.sampleAggregates[:0]\n}",
"func (ss *SequenceStats) Clear() {\n\tss.Min.Clear()\n\tss.Max.Clear()\n\tss.Variance.Reset()\n}",
"func (s *System) clear() {\n\ts.vertex = 0\n\ts.indice = 0\n\n\tvar i int\n\tfor j := range s.particles {\n\t\tp := &s.particles[j]\n\t\tif p.progress >= 1 {\n\t\t\tcontinue\n\t\t}\n\n\t\tp.vertex = s.vertex\n\t\tp.indice = s.indice\n\n\t\ts.vertex += p.vertexes\n\t\ts.indice += p.indices\n\n\t\ts.particles[i] = *p\n\t\ti++\n\t}\n\n\ts.particles = s.particles[:i]\n\n\treturn\n}",
"func (c Collector) Clear() {\n\tfor k := range c {\n\t\tdelete(c, k)\n\t}\n}",
"func (s *Streams) Clear() {\n\ts.Buffer.Clear()\n}",
"func (c *PNGCopyCounters) Clear() {\n\tc.StoredBytes = 0\n\tif c.CopiedBytes >= c.ToCopy && c.Created {\n\t\tc.Image = nil\n\t\tc.Buffer = nil\n\t}\n}",
"func (s *SharedMemorySegment) Clear() {\n\tfor i := 0; i < len(s.data); i++ {\n\t\ts.data[i] = 0\n\t}\n}",
"func (c *TimeAvgAggregator) Reset(w Window) {\n\tc.integral = 0\n\tif c.initialized {\n\t\tc.startTime = w.StartTime\n\t\tc.startValue = c.endValue\n\t\tc.endTime = w.EndTime\n\t}\n}",
"func (i *IQR) Clear() {\n\ti.quantile.Clear()\n}",
"func (v *IntVec) Clear() {\n\tv.Truncate(0)\n}",
"func (c *Counter) flush() {\n\tatomic.StoreUint64(&c.value, 0)\n}",
"func (b *Buffer) Reset() {\n\tb.B = b.B[:0]\n}",
"func Clear(secret []byte) {\n\tfor i := range secret {\n\t\tsecret[i] = 0 // Clear each byte\n\t}\n\tsecret = nil // Reset data slice\n}",
"func (h *Histogram) Reset() {\n\th.Counter.Reset()\n\t// Leave Offset and Divider alone\n\tfor i := 0; i < len(h.Hdata); i++ {\n\t\th.Hdata[i] = 0\n\t}\n}",
"func (v *Int32Vec) Clear() {\n\tv.Truncate(0)\n}",
"func (this *ByteQueue) TokenClear() {\n\tthis.recordReset = true\n}",
"func (gdt *Array) Clear() {\n\targ0 := gdt.getBase()\n\n\tC.go_godot_array_clear(GDNative.api, arg0)\n}",
"func (b *batch) Reset() {\n\tb.batch.Clear()\n\tb.size = 0\n}",
"func (m *metricVec) Reset() { m.metricMap.Reset() }",
"func (b *ChangeBuffer) emptyBuffer() {\n\tlast := b.Back()\n\tfor last != nil {\n\t\tif last.Prev() != nil {\n\t\t\tlast = last.Prev()\n\t\t\tb.Remove(last.Next())\n\t\t} else {\n\t\t\tb.Remove(last)\n\t\t\tlast = nil\n\t\t}\n\t}\n}",
"func (hc *cmdCollector) Reset() {\n}",
"func TestAccumulatorCycle_NoValue(t *testing.T) {\n var targetSampleCount = uint32(100) // the sample count is larger than the number of values provided within this test\n \n var accumulator = NewAccumulator(\"stream\", 0, math.MaxInt64, OrdinalInterval, targetSampleCount)\n \n // this test covers a scenario in which no values are received within the interval\n // .. no values are provided to the accumulator ..\n \n // finalise the accumulator and gather statistics\n var statistics = accumulator.Finalise()\n \n assert.Equal(t, uint64(0), statistics.Count, \"Count should be 0 when no values where provided to the accumulator\")\n assert.Equal(t, statistics.Count, uint64(statistics.SampleCount), \"Sample count must count when all values are included in the sample set\")\n}",
"func (rra *RoundRobinArchive) clear() {\n\tif len(rra.dps) > 0 {\n\t\trra.dps = make(map[int64]float64)\n\t}\n}",
"func (r *PackageAggRow) ClearCount() { r.Data.Count = nil }",
"func (buffer *Buffer) Reset() {\n\tbuffer.B.Reset()\n}",
"func (v *Bitmap256) Clear(pos uint8) {\n\tv[pos>>6] &= ^(1 << (pos & 63))\n}",
"func (f *Flash) Clear() {\n\tfor k := range f.v {\n\t\tif !f.changed {\n\t\t\tf.changed = true\n\t\t}\n\t\tf.v.Del(k)\n\t}\n}",
"func (s *activeSeriesStripe) clear() {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\n\ts.oldestEntryTs.Store(0)\n\ts.refs = map[uint64][]activeSeriesEntry{}\n\ts.active = 0\n}"
] | [
"0.6964405",
"0.67491966",
"0.67166877",
"0.6640258",
"0.6579979",
"0.6576724",
"0.65722305",
"0.6554715",
"0.6526159",
"0.64833635",
"0.64789146",
"0.6452193",
"0.6428444",
"0.64101845",
"0.6362103",
"0.6338142",
"0.6329918",
"0.6329918",
"0.624533",
"0.6206225",
"0.62022775",
"0.61679703",
"0.6149324",
"0.6137383",
"0.6088605",
"0.60813355",
"0.60532635",
"0.6013449",
"0.6013449",
"0.60088235",
"0.5976666",
"0.59599364",
"0.5954747",
"0.59522253",
"0.5947834",
"0.59466565",
"0.5943304",
"0.5928917",
"0.5916799",
"0.5916799",
"0.59145814",
"0.58987105",
"0.58851427",
"0.5879721",
"0.58770996",
"0.58733606",
"0.58588177",
"0.58464307",
"0.5833797",
"0.5818301",
"0.5817861",
"0.58153415",
"0.5804885",
"0.57942766",
"0.57938033",
"0.5793222",
"0.5791569",
"0.5786207",
"0.57785004",
"0.5776698",
"0.57710236",
"0.5765485",
"0.5761803",
"0.5752139",
"0.5742882",
"0.57420915",
"0.5738719",
"0.57335037",
"0.5732501",
"0.5721843",
"0.57191557",
"0.57163745",
"0.571246",
"0.57104254",
"0.5709837",
"0.5701865",
"0.5701531",
"0.57010734",
"0.57006866",
"0.56992316",
"0.5689009",
"0.56889635",
"0.5688167",
"0.5668538",
"0.564894",
"0.5647449",
"0.5644387",
"0.56442034",
"0.5635936",
"0.5624063",
"0.5620053",
"0.5613774",
"0.55942225",
"0.55888927",
"0.55705667",
"0.5569868",
"0.5567351",
"0.5564559",
"0.55608594",
"0.5552454"
] | 0.6179983 | 21 |
fill a buffer object's data store with a fixed value | func ClearBufferData(target uint32, internalformat uint32, format uint32, xtype uint32, data unsafe.Pointer) {
C.glowClearBufferData(gpClearBufferData, (C.GLenum)(target), (C.GLenum)(internalformat), (C.GLenum)(format), (C.GLenum)(xtype), data)
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"func (dataBlock *DataBlock) fill(offset uint64, buf *bytes.Buffer) uint64 {\n\tbyteToFill := dataBlock.Capacity - offset\n\tbyteActualFill := buf.Next(int(byteToFill)) // API user...\n\tif len(byteActualFill) < int(byteToFill) {\n\t\t// Not sure if the byte at `offset` would be covered by new value...???????????????\n\t\tleftover := dataBlock.Data[(int(offset) + len(byteActualFill)):]\n\t\tdataBlock.Data = append(dataBlock.Data[:offset], append(byteActualFill, leftover...)...)\n\t} else {\n\t\tdataBlock.Data = append(dataBlock.Data[:offset], byteActualFill...)\n\t}\n\treturn uint64(len(byteActualFill))\n}",
"func (b *mpgBuff) fill(buff *concBuff) {\n\tbuff.Lock()\n\tn, err := b.fileDec.Read(buff.data)\n\tbuff.len = n\n\tbuff.pos = 0\n\tif err == mpg123.EOF {\n\t\tb.eof = true\n\t}\n\tbuff.Unlock()\n}",
"func (b base) Fill(value uint64) {\n\tbinary.PutUvarint(b.inputBuffer[b.seedLen:], value)\n\t// base64 encoding reduce byte size\n\t// from i to o\n\tb64encoder.Encode(b.outputBuffer, b.inputBuffer)\n}",
"func (r *BytesRecord) Fill(rid RID, version int, content []byte) error {\n\tr.RID = rid\n\tr.Vers = version\n\tr.Data = content\n\treturn nil\n}",
"func (b *Binding) Set(buf uint32) {\n\tgl.BindBufferBase(gl.SHADER_STORAGE_BUFFER, b.uint32, buf)\n}",
"func (al *AudioListener) setBuffer(size int) {\n\tal.Lock()\n\tdefer al.Unlock()\n\n\tal.buffer = make([]gumble.AudioPacket, 0, size)\n}",
"func (d *OneToOne) Set(data GenericDataType) {\n\tidx := d.writeIndex % uint64(len(d.buffer))\n\n\tnewBucket := &bucket{\n\t\tdata: data,\n\t\tseq: d.writeIndex,\n\t}\n\td.writeIndex++\n\n\tatomic.StorePointer(&d.buffer[idx], unsafe.Pointer(newBucket))\n}",
"func newBuffer(bits uint32) buffer {\n\tvar b buffer\n\tb.data = make([]unsafe.Pointer, 1<<bits)\n\tb.free = 1 << bits\n\tb.mask = 1<<bits - 1\n\tb.bits = bits\n\treturn b\n}",
"func (addr *Bytes) Store(val []byte) {\n\taddr.v.Store(val)\n}",
"func (self Source) SetBuffer(buffer Buffer) {\n\tself.Seti(AlBuffer, int32(buffer))\n}",
"func NewBuffer(aSlice interface{}) *Buffer {\n return &Buffer{buffer: sliceValue(aSlice, false), handler: valueHandler{}}\n}",
"func ringBufferInitBuffer(buflen uint32, rb *ringBuffer) {\n\tvar new_data []byte\n\tvar i uint\n\tsize := 2 + int(buflen) + int(kSlackForEightByteHashingEverywhere)\n\tif cap(rb.data_) < size {\n\t\tnew_data = make([]byte, size)\n\t} else {\n\t\tnew_data = rb.data_[:size]\n\t}\n\tif rb.data_ != nil {\n\t\tcopy(new_data, rb.data_[:2+rb.cur_size_+uint32(kSlackForEightByteHashingEverywhere)])\n\t}\n\n\trb.data_ = new_data\n\trb.cur_size_ = buflen\n\trb.buffer_ = rb.data_[2:]\n\trb.data_[1] = 0\n\trb.data_[0] = rb.data_[1]\n\tfor i = 0; i < kSlackForEightByteHashingEverywhere; i++ {\n\t\trb.buffer_[rb.cur_size_+uint32(i)] = 0\n\t}\n}",
"func (vm *VM) bufferVariable(variable Variable) {\n\tvm.bufferPush(variable.Value())\n}",
"func (vm *VM) bufferPush(data string) {\n\tnewBuffer := &buffer{\n\t\tprevious: vm.buffer,\n\t\tvalue: data,\n\t}\n\tvm.buffer = newBuffer\n}",
"func (rb *RingBuffer) Add(value stats.Record) {\n\trb.lock.Lock()\n\tdefer rb.lock.Unlock()\n\trb.data[rb.seq%uint64(len(rb.data))] = value\n\trb.seq++\n}",
"func (b *buffer) fill(need int) (err error) {\n\tb.idx = 0\n\tb.length = 0\n\n\tvar n int\n\tfor b.length < need {\n\t\tn, err = b.rd.Read(b.buf[b.length:])\n\t\tb.length += n\n\n\t\tif err == nil {\n\t\t\tcontinue\n\t\t}\n\t\treturn // err\n\t}\n\n\treturn\n}",
"func newBuffer(buf []byte) *Buffer {\n\treturn &Buffer{data: buf}\n}",
"func (geom Geometry) Buffer(distance float64, segments int) Geometry {\n\tnewGeom := C.OGR_G_Buffer(geom.cval, C.double(distance), C.int(segments))\n\treturn Geometry{newGeom}\n}",
"func WriteCount(buffer []byte, offset int, value byte, valueCount int) {\n for i := 0; i < valueCount; i++ {\n buffer[offset + i] = value\n }\n}",
"func BufferStorage(target uint32, size int, data unsafe.Pointer, flags uint32) {\n\tsyscall.Syscall6(gpBufferStorage, 4, uintptr(target), uintptr(size), uintptr(data), uintptr(flags), 0, 0)\n}",
"func (b *Buffer) AttachNew() {\n b.data = make([]byte, 0)\n b.size = 0\n b.offset = 0\n}",
"func (debugging *debuggingOpenGL) BufferData(target uint32, size int, data interface{}, usage uint32) {\n\tdebugging.recordEntry(\"BufferData\", target, size, data, usage)\n\tdebugging.gl.BufferData(target, size, data, usage)\n\tdebugging.recordExit(\"BufferData\")\n}",
"func (src *Source) SetNewBuffer() {\n\tsrc.buf = make([]byte, 64)\n}",
"func (w *Writer) SetBuffer(raw []byte) {\n\tif w.err != nil {\n\t\treturn\n\t}\n\tw.b = w.b[:0]\n\tw.b = append(w.b, raw...)\n}",
"func newBuffer(r io.Reader, offset int64) *buffer {\n\treturn &buffer{\n\t\tr: r,\n\t\toffset: offset,\n\t\tbuf: make([]byte, 0, 4096),\n\t\tallowObjptr: true,\n\t\tallowStream: true,\n\t}\n}",
"func (p *movingAverageProcessor) addBufferData(index int, data interface{}, namespace string) error {\n\tif _, ok := p.movingAverageMap[namespace]; ok {\n\t\tif index >= len(p.movingAverageMap[namespace].movingAverageBuf) {\n\t\t\treturn errors.New(\"Incorrect value of index, trying to access non-existing element of buffer\")\n\t\t}\n\t\tp.movingAverageMap[namespace].movingAverageBuf[index] = data\n\t\treturn nil\n\t} else {\n\t\treturn errors.New(\"Namespace is not present in the map\")\n\t}\n}",
"func (src *Source) SetBuffer(buf []byte) {\n\tsrc.buf = buf\n}",
"func BufferStorage(target uint32, size int, data unsafe.Pointer, flags uint32) {\n C.glowBufferStorage(gpBufferStorage, (C.GLenum)(target), (C.GLsizeiptr)(size), data, (C.GLbitfield)(flags))\n}",
"func (bs endecBytes) fill(offset int, fields ...interface{}) int {\n\tfor _, val := range fields {\n\t\tswitch val.(type) {\n\t\tcase byte:\n\t\t\tbs[offset] = val.(byte)\n\t\t\toffset++\n\t\tcase uint16:\n\t\t\tbinary.BigEndian.PutUint16(bs[offset:], val.(uint16))\n\t\t\toffset += 2\n\t\tcase uint32: // remaingLength\n\t\t\tn := binary.PutUvarint(bs[offset:], uint64(val.(uint32)))\n\t\t\toffset += n\n\t\tcase string:\n\t\t\tstr := val.(string)\n\t\t\tbinary.BigEndian.PutUint16(bs[offset:], uint16(len(str)))\n\t\t\toffset += 2\n\t\t\toffset += copy(bs[offset:], str)\n\t\tcase []byte:\n\t\t\toffset += copy(bs[offset:], val.([]byte))\n\t\tdefault: // unknown type\n\t\t\treturn -offset\n\t\t}\n\t}\n\treturn offset\n}",
"func (r *Ring) set(p int, v interface{}) {\n\tr.buff[r.mod(p)] = v\n}",
"func newBuffer(e []byte) *Buffer {\n\tp := buffer_pool.Get().(*Buffer)\n\tp.buf = e\n\treturn p\n}",
"func (b *buffer) grow() {\n\t// ugh all these atomics\n\tatomic.AddUint32(&b.free, uint32(len(b.data)))\n\tatomic.AddUint32(&b.mask, atomic.LoadUint32(&b.mask))\n\tatomic.AddUint32(&b.mask, 1)\n\tatomic.AddUint32(&b.bits, 1)\n\n\tnext := make([]unsafe.Pointer, 2*len(b.data))\n\tcopy(next, b.data)\n\n\t// UGH need to do this with atomics. one pointer + 2 uint64 calls?\n\tb.data = next\n}",
"func (s *Arena) putVal(v y.ValueStruct) uint32 {\n\tl := uint32(v.EncodedSize())\n\toffset := s.allocate(l) - l\n\tbuf := s.data[offset : offset+l]\n\tv.Encode(buf)\n\treturn offset\n}",
"func newBuffer() Buffer {\n\treturn &buffer{\n\t\tbytes: make([]byte, 0, 64),\n\t}\n}",
"func newBuffer() *buffer {\n\treturn &buffer{\n\t\tdata: make([]byte, 0),\n\t\tlen: 0,\n\t\tpkg: nil,\n\t\tconn: nil,\n\t\tpkgCh: make(chan *pkg),\n\t\tevCh: make(chan *pkg),\n\t\terrCh: make(chan error, 1),\n\t}\n}",
"func (g *GLTF) loadBuffer(bufIdx int) ([]byte, error) {\n\n\t// Check if provided buffer index is valid\n\tif bufIdx < 0 || bufIdx >= len(g.Buffers) {\n\t\treturn nil, fmt.Errorf(\"invalid buffer index\")\n\t}\n\tbufData := &g.Buffers[bufIdx]\n\t// Return cached if available\n\tif bufData.cache != nil {\n\t\tlog.Debug(\"Fetching Buffer %d (cached)\", bufIdx)\n\t\treturn bufData.cache, nil\n\t}\n\tlog.Debug(\"Loading Buffer %d\", bufIdx)\n\n\t// If buffer URI use the chunk data field\n\tif bufData.Uri == \"\" {\n\t\treturn g.data, nil\n\t}\n\n\t// Checks if buffer URI is a data URI\n\tvar data []byte\n\tvar err error\n\tif isDataURL(bufData.Uri) {\n\t\tdata, err = loadDataURL(bufData.Uri)\n\t} else {\n\t\t// Try to load buffer from file\n\t\tdata, err = g.loadFileBytes(bufData.Uri)\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Checks data length\n\tif len(data) != bufData.ByteLength {\n\t\treturn nil, fmt.Errorf(\"buffer:%d read data length:%d expected:%d\", bufIdx, len(data), bufData.ByteLength)\n\t}\n\t// Cache buffer data\n\tg.Buffers[bufIdx].cache = data\n\tlog.Debug(\"cache data:%v\", len(bufData.cache))\n\treturn data, nil\n}",
"func (d *decoder) buffer() []byte {\n\tif d.buf == nil {\n\t\td.buf = make([]byte, 8)\n\t}\n\treturn d.buf\n}",
"func (b *Buffer) Data() []byte { return b.data }",
"func (b *Ring) push(value interface{}) {\n\tb.lock.Lock()\n\tdefer b.lock.Unlock()\n\tif len(b.buf) == 0 || b.size == 0 { // nothing to do\n\t\treturn\n\t}\n\tnext := Next(1, b.head, len(b.buf))\n\tb.buf[next] = value\n\tb.head = next\n\t// note that the oldest is auto pruned, when size== capacity, but with the size attribute we know it has been discarded\n}",
"func (m *MsgMemoryBuffer) Insert(value interface{}) {\n\tm.buff.Value = value\n\tm.buff = m.buff.Next()\n}",
"func (b *CompactableBuffer) Update(address *EntryAddress, data []byte) error {\n\taddress.LockForWrite()\n\tdefer address.UnlockWrite()\n\theader, err := b.ReadHeader(address)\n\tif err != nil {\n\t\treturn err\n\t}\n\tbeforeUpdataDataSize := header.dataSize\n\tafterUpdateDataSize := len(data) + VarIntSize(len(data))\n\tdataSizeDelta := afterUpdateDataSize - int(beforeUpdataDataSize)\n\n\tremainingSpace := int(header.entrySize) - reservedSize - afterUpdateDataSize\n\theader.dataSize = int64(afterUpdateDataSize)\n\tif remainingSpace <= 0 {\n\t\tatomic.AddInt64(&b.dataSize, int64(-beforeUpdataDataSize))\n\t\tatomic.AddInt64(&b.entrySize, int64(-header.entrySize))\n\t\treturn b.expand(address, data)\n\t}\n\n\tatomic.AddInt64(&b.dataSize, int64(dataSizeDelta))\n\tvar target = make([]byte, 0)\n\tAppendToBytes(data, &target)\n\tif len(target) > int(header.dataSize) {\n\t\treturn io.EOF\n\t}\n\twritableBuffer := b.writableBuffer()\n\t_, err = writableBuffer.Write(address.Position()+reservedSize, target...)\n\treturn err\n}",
"func newBuffer(b []byte) *buffer {\n\treturn &buffer{proto.NewBuffer(b), 0}\n}",
"func (native *OpenGL) BufferData(target uint32, size int, data interface{}, usage uint32) {\n\tdataPtr, isPtr := data.(unsafe.Pointer)\n\tif isPtr {\n\t\tgl.BufferData(target, size, dataPtr, usage)\n\t} else {\n\t\tgl.BufferData(target, size, gl.Ptr(data), usage)\n\t}\n}",
"func putBuffer(buf *bytes.Buffer) {\n\tbuf.Reset()\n\tbufferPool.Put(buf)\n}",
"func (bp *bufferPool) putBuffer(b *buffer) {\n\tbp.lock.Lock()\n\tif bp.freeBufNum < 1000 {\n\t\tb.next = bp.freeList\n\t\tbp.freeList = b\n\t\tbp.freeBufNum++\n\t}\n\tbp.lock.Unlock()\n}",
"func (c webgl) BufferDataX(target Enum, d interface{}, usage Enum) {\n\tc.ctx.Call(\"bufferData\", target, conv(d), usage)\n}",
"func BufferData(target uint32, size int, data unsafe.Pointer, usage uint32) {\n\tsyscall.Syscall6(gpBufferData, 4, uintptr(target), uintptr(size), uintptr(data), uintptr(usage), 0, 0)\n}",
"func NewEmptyBuffer() *Buffer {\n return &Buffer{data: make([]byte, 0)}\n}",
"func new_buffer(conn *websocket.Conn, ctrl chan struct{}, txqueuelen int) *Buffer {\n\tbuf := Buffer{conn: conn}\n\tbuf.pending = make(chan []byte, txqueuelen)\n\tbuf.ctrl = ctrl\n\tbuf.cache = make([]byte, packet.PACKET_LIMIT+2)\n\treturn &buf\n}",
"func (storage *Storage) SetValue(key string, value []byte) {\n\tfullEntity := encodeRecord(key, string(value))\n\tstorage.mutex.Lock()\n\tbytesWritten := addBlock(storage.file, fullEntity)\n\toffset := int64(storage.size) + int64(len(key)) + 8\n\tatomic.AddInt64(&storage.size, int64(bytesWritten))\n\tstorage.mutex.Unlock()\n\tstorage.store.Store(key, Coords{offset: offset, len: len(value)})\n}",
"func BufferData(target Enum, src []byte, usage Enum) {\n\tgl.BufferData(uint32(target), int(len(src)), gl.Ptr(&src[0]), uint32(usage))\n}",
"func initBuffer(size int) {\n\tif len(buffer) == size {\n\t\treturn\n\t}\n\tbuffer = make([]uint8, size)\n}",
"func (d *Display) resetBuffer() {\n\td.width = d.device.Width()\n\td.height = d.device.Height()\n\td.buffer = make([][]byte, d.height)\n\tfor y := range d.buffer {\n\t\td.buffer[y] = make([]byte, d.width)\n\t}\n}",
"func (b *Ring) add(val interface{}) error {\n\tif b.size >= len(b.buf) {\n\t\treturn ErrFull\n\t}\n\tb.lock.Lock()\n\tdefer b.lock.Unlock()\n\n\tnext := Next(1, b.head, len(b.buf))\n\tb.buf[next] = val\n\tb.head = next\n\tb.size++ // increase the inner size\n\treturn nil\n}",
"func NewBuffer(capacity int, fn func(series []*influxdb.Series)) *Buffer {\n\tb := &Buffer{\n\t\tfn: fn,\n\t\tin: make(chan *influxdb.Series),\n\t\tseries: make(map[string]*influxdb.Series),\n\t\tcapacity: capacity,\n\t}\n\tif b.capacity > 0 {\n\t\tgo b.aggregate()\n\t}\n\n\treturn b\n}",
"func BufferInit(target Enum, size int, usage Enum) {\n\tgl.BufferData(uint32(target), size, nil, uint32(usage))\n}",
"func (c *fakeRedisConn) SetReadBuffer(bytes int) {}",
"func (b *Buffer) Store(record *data.Record) {\n\tb.mu.Lock()\n\tdefer b.mu.Unlock()\n\n\tb.records = append(b.records, record)\n\n\tif len(b.records) >= b.bufferSize {\n\t\tb.flush()\n\t}\n}",
"func (b *Builder) Buffer(count int) value.Pointer {\n\tpointerSize := b.memoryLayout.GetPointer().GetSize()\n\tdynamic := false\n\tsize := 0\n\n\t// Examine the stack to see where these values came from\n\tfor i := 0; i < count; i++ {\n\t\te := b.stack[len(b.stack)-i-1]\n\t\top := b.instructions[e.idx]\n\t\tty := e.ty\n\t\tif _, isPush := op.(asm.Push); !isPush {\n\t\t\t// Values that have made their way on to the stack from non-constant\n\t\t\t// sources cannot be put in the constant buffer.\n\t\t\tdynamic = true\n\t\t}\n\t\tswitch ty {\n\t\tcase protocol.Type_ConstantPointer, protocol.Type_VolatilePointer:\n\t\t\t// Pointers cannot be put into the constant buffer as they are remapped\n\t\t\t// by the VM\n\t\t\tdynamic = true\n\t\t}\n\n\t\tsize += ty.Size(pointerSize)\n\t}\n\n\tif dynamic {\n\t\t// At least one of the values was not from a Push()\n\t\t// Build the buffer in temporary memory at replay time.\n\t\tbuf := b.AllocateTemporaryMemory(uint64(size))\n\t\toffset := size\n\t\tfor i := 0; i < count; i++ {\n\t\t\te := b.stack[len(b.stack)-1]\n\t\t\toffset -= e.ty.Size(pointerSize)\n\t\t\tb.Store(buf.Offset(uint64(offset)))\n\t\t}\n\t\treturn buf\n\t}\n\t// All the values are constant.\n\t// Move the pushed values into a constant memory buffer.\n\tvalues := make([]value.Value, count)\n\tfor i := 0; i < count; i++ {\n\t\te := b.stack[len(b.stack)-1]\n\t\tvalues[count-i-1] = b.instructions[e.idx].(asm.Push).Value\n\t\tb.removeInstruction(e.idx)\n\t\tb.popStack()\n\t}\n\treturn b.constantMemory.writeValues(values...)\n}",
"func (self *ValueStore) Append(options WriteOptions, instanceId uint64, buffer []byte, fileIdStr *string) error {\n begin := util.NowTimeMs()\n\n self.mutex.Lock()\n defer self.mutex.Unlock()\n\n bufferLen := len(buffer)\n len := util.UINT64SIZE + bufferLen\n tmpBufLen := len + util.INT32SIZE\n\n var fileId int32\n var offset uint32\n err := self.getFileId(uint32(tmpBufLen), &fileId, &offset)\n if err != nil {\n return err\n }\n\n tmpBuf := make([]byte, tmpBufLen)\n util.EncodeInt32(tmpBuf, 0, int32(len))\n util.EncodeUint64(tmpBuf, util.INT32SIZE, instanceId)\n copy(tmpBuf[util.INT32SIZE+util.UINT64SIZE:], []byte(buffer))\n\n ret, err := self.file.Write(tmpBuf)\n if ret != tmpBufLen {\n err = fmt.Errorf(\"writelen %d not equal to %d,buffer size %d\",\n ret, tmpBufLen, bufferLen)\n return err\n }\n\n if options.Sync {\n self.file.Sync()\n }\n\n self.nowFileOffset += uint64(tmpBufLen)\n\n ckSum := util.Crc32(0, tmpBuf[util.INT32SIZE:], common.CRC32_SKIP)\n self.EncodeFileId(fileId, uint64(offset), ckSum, fileIdStr)\n\n useMs := util.NowTimeMs() - begin\n\n log.Info(\"ok, offset %d fileid %d cksum %d instanceid %d buffersize %d usetime %d ms sync %t\",\n offset, fileId, ckSum, instanceId, bufferLen, useMs, options.Sync)\n return nil\n}",
"func (p *Buffer) SetBuf(s []byte) {\n\tp.buf = s\n\tp.index = 0\n\tp.length = len(s)\n}",
"func BufferData(target uint32, size int, data unsafe.Pointer, usage uint32) {\n C.glowBufferData(gpBufferData, (C.GLenum)(target), (C.GLsizeiptr)(size), data, (C.GLenum)(usage))\n}",
"func (lvs *ValueStore) bufferChunk(v Value, c chunks.Chunk, height uint64, hints Hints) {\n\tlvs.pendingMu.Lock()\n\tdefer lvs.pendingMu.Unlock()\n\th := c.Hash()\n\td.Chk.NotZero(height)\n\tlvs.pendingPuts[h] = pendingChunk{c, height, hints}\n\tlvs.pendingPutSize += uint64(len(c.Data()))\n\n\tputChildren := func(parent hash.Hash) (dataPut int) {\n\t\tpc, present := lvs.pendingPuts[parent]\n\t\td.Chk.True(present)\n\t\tv := DecodeValue(pc.c, lvs)\n\t\tv.WalkRefs(func(grandchildRef Ref) {\n\t\t\tif pc, present := lvs.pendingPuts[grandchildRef.TargetHash()]; present {\n\t\t\t\tlvs.bs.SchedulePut(pc.c, pc.height, pc.hints)\n\t\t\t\tdataPut += len(pc.c.Data())\n\t\t\t\tdelete(lvs.pendingPuts, grandchildRef.TargetHash())\n\t\t\t}\n\t\t})\n\t\treturn\n\t}\n\n\t// Enforce invariant (1)\n\tif height > 1 {\n\t\tv.WalkRefs(func(childRef Ref) {\n\t\t\tchildHash := childRef.TargetHash()\n\t\t\tif _, present := lvs.pendingPuts[childHash]; present {\n\t\t\t\tlvs.pendingParents[h] = height\n\t\t\t} else {\n\t\t\t\t// Shouldn't be able to be in pendingParents without being in pendingPuts\n\t\t\t\t_, present := lvs.pendingParents[childHash]\n\t\t\t\td.Chk.False(present)\n\t\t\t}\n\n\t\t\tif _, present := lvs.pendingParents[childHash]; present {\n\t\t\t\tlvs.pendingPutSize -= uint64(putChildren(childHash))\n\t\t\t\tdelete(lvs.pendingParents, childHash)\n\t\t\t}\n\t\t})\n\t}\n\n\t// Enforce invariant (2)\n\tfor lvs.pendingPutSize > lvs.pendingPutMax {\n\t\tvar tallest hash.Hash\n\t\tvar height uint64 = 0\n\t\tfor parent, ht := range lvs.pendingParents {\n\t\t\tif ht > height {\n\t\t\t\ttallest = parent\n\t\t\t\theight = ht\n\t\t\t}\n\t\t}\n\t\tif height == 0 { // This can happen if there are no pending parents\n\t\t\tvar pc pendingChunk\n\t\t\tfor tallest, pc = range lvs.pendingPuts {\n\t\t\t\t// Any pendingPut is as good as another in this case, so take the first one\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tlvs.bs.SchedulePut(pc.c, pc.height, pc.hints)\n\t\t\tlvs.pendingPutSize -= uint64(len(pc.c.Data()))\n\t\t\tdelete(lvs.pendingPuts, tallest)\n\t\t\tcontinue\n\t\t}\n\n\t\tlvs.pendingPutSize -= uint64(putChildren(tallest))\n\t\tdelete(lvs.pendingParents, tallest)\n\t}\n}",
"func NewBuffer(capacity int) Buffer {\n\treturn Buffer{\n\t\tcapacity: capacity,\n\t\tcurrentSize: 0,\n\t\tcontents: map[entity.Key]inventoryapi.PostDeltaBody{},\n\t}\n}",
"func (ab *Buffer) Add(ctx context.Context, obj interface{}) {\n\tif ab.Tracer != nil {\n\t\tfinisher := ab.Tracer.StartAdd(ctx)\n\t\tdefer finisher.Finish(nil)\n\t}\n\tvar bufferLength int\n\tif ab.Stats != nil {\n\t\tab.maybeStatCount(ctx, MetricAdd, 1)\n\t\tstart := time.Now().UTC()\n\t\tdefer func() {\n\t\t\tab.maybeStatGauge(ctx, MetricBufferLength, float64(bufferLength))\n\t\t\tab.maybeStatElapsed(ctx, MetricAddElapsed, start)\n\t\t}()\n\t}\n\n\tvar flush []interface{}\n\tab.contentsMu.Lock()\n\tbufferLength = ab.contents.Len()\n\tab.contents.Enqueue(obj)\n\tif ab.contents.Len() >= ab.MaxLen {\n\t\tflush = ab.contents.Drain()\n\t}\n\tab.contentsMu.Unlock()\n\tab.unsafeFlushAsync(ctx, flush)\n}",
"func newDatabaseBuffer() databaseBuffer {\n\tb := &dbBuffer{\n\t\tbucketsMap: make(map[xtime.UnixNano]*BufferBucketVersions),\n\t\tinOrderBlockStarts: make([]xtime.UnixNano, 0, bucketsCacheSize),\n\t}\n\treturn b\n}",
"func (p *byteBufferPool) give(buf *[]byte) {\n\tif buf == nil {\n\t\treturn\n\t}\n\tsize := cap(*buf)\n\tslot := p.slot(size)\n\tif slot == errSlot {\n\t\treturn\n\t}\n\tif size != int(p.pool[slot].defaultSize) {\n\t\treturn\n\t}\n\tp.pool[slot].pool.Put(buf)\n}",
"func NewBuffer() *Buffer {\n\treturn &Buffer{Line: []byte{}, Val: make([]byte, 0, 32)}\n}",
"func (gl *WebGL) BufferData(target GLEnum, data interface{}, usage GLEnum) {\n\tvalues := sliceToTypedArray(data)\n\tgl.context.Call(\"bufferData\", target, values, usage)\n}",
"func (client *Client) addToBuffer(key string, metricValue string) {\n\t// build metric\n\tmetric := fmt.Sprintf(\"%s:%s\", key, metricValue)\n\n\t// flush\n\tif client.keyBuffer == nil {\n\t\t// send metric now\n\t\tgo client.send(metric)\n\t} else {\n\t\t// add metric to buffer for next manual flush\n\t\tclient.keyBufferLock.Lock()\n\t\tclient.keyBuffer = append(client.keyBuffer, metric)\n\t\tclient.keyBufferLock.Unlock()\n\t}\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n proto.NewProxyWithBuffer(buffer),\n NewStructSimpleModel(buffer),\n NewStructOptionalModel(buffer),\n NewStructNestedModel(buffer),\n NewStructBytesModel(buffer),\n NewStructArrayModel(buffer),\n NewStructVectorModel(buffer),\n NewStructListModel(buffer),\n NewStructSetModel(buffer),\n NewStructMapModel(buffer),\n NewStructHashModel(buffer),\n NewStructHashExModel(buffer),\n NewStructEmptyModel(buffer),\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyStructSimpleFunc(func(model *StructSimpleModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructOptionalFunc(func(model *StructOptionalModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructNestedFunc(func(model *StructNestedModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructBytesFunc(func(model *StructBytesModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructArrayFunc(func(model *StructArrayModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructVectorFunc(func(model *StructVectorModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructListFunc(func(model *StructListModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructSetFunc(func(model *StructSetModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructMapFunc(func(model *StructMapModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructHashFunc(func(model *StructHashModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructHashExFunc(func(model *StructHashExModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyStructEmptyFunc(func(model *StructEmptyModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func (b *Buffer) Sync() {\n\tb.SetArea(b.Bounds())\n}",
"func (j *JSendBuilderBuffer) Data(data interface{}) JSendBuilder {\n\treturn j.Set(FieldData, data)\n}",
"func (l *Logger) initLoggerBuffer() (err error) {\n\t// build\n\tl.LoggerBuffer = LoggerBuffer{}\n\n\t// get serial data\n\tlConfig, err := l.getSerialConfig()\n\tif err != nil {\n\t\treturn\n\t}\n\n\tport, err := serial.OpenPort(lConfig)\n\tif err != nil {\n\t\treturn\n\t}\n\tl.serialPort = *port\n\n\tgo func() {\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-l.stop:\n\t\t\t\treturn\n\t\t\tdefault:\n\t\t\t\t// get data\n\t\t\t\tbuf := make([]byte, bufSize)\n\n\t\t\t\tn, err := port.Read(buf)\n\t\t\t\ttime := time.Now().UTC()\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Print(err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\t//log.Printf(\"NEW DATA [%03d]: %02X %03d\", n, buf[:n], buf[:n]) // LOG\n\n\t\t\t\t// push to LoggerBuffer\n\t\t\t\tt := util.TimestampBuilder(time)\n\t\t\t\tdu := DataUnit{\n\t\t\t\t\tData: buf[:n],\n\t\t\t\t\tTime: &t,\n\t\t\t\t}\n\n\t\t\t\tl.DataUnit = append(l.DataUnit, &du)\n\n\t\t\t\t// feed consumers\n\t\t\t\tfor _, c := range l.consumers {\n\t\t\t\t\tc <- du\n\n\t\t\t\t\tif l.config.Debug {\n\t\t\t\t\t\tlog.Print(\"Data received: \", du.PrettyString())\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Flush every time new data is received\n\t\t\t\tl.flush()\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn\n}",
"func (s *scratch) add(c byte) {\n\tif s.fill+1 >= cap(s.data) {\n\t\ts.grow()\n\t}\n\n\ts.data[s.fill] = c\n\ts.fill++\n}",
"func (b *Buf) Reset() { b.b = b.b[:0] }",
"func (b *Buffer) AttachBytes(buffer []byte, offset int, size int) {\n if len(buffer) < size {\n panic(\"invalid buffer\")\n }\n if size <= 0 {\n panic(\"invalid size\")\n }\n if offset > size {\n panic(\"invalid offset\")\n }\n\n b.data = buffer\n b.size = size\n b.offset = offset\n}",
"func (u *Uintptr) Store(val uintptr) {\n\tatomic.StoreUintptr(&u.v, val)\n}",
"func NewCapacityBuffer(capacity int) *Buffer {\n return &Buffer{data: make([]byte, capacity)}\n}",
"func advanceBuffer(buff *bytes.Buffer, num int) {\n\tbuff.Next(num)\n\t// move buffer from num offset to 0\n\tbytearr := buff.Bytes()\n\tbuff.Reset()\n\tbuff.Write(bytearr)\n}",
"func (tb *TelemetryBuffer) BufferAndPushData(intervalms time.Duration) {\n\tdefer tb.close()\n\tif !tb.FdExists {\n\t\ttelemetryLogger.Printf(\"[Telemetry] Buffer telemetry data and send it to host\")\n\t\tif intervalms < DefaultInterval {\n\t\t\tintervalms = DefaultInterval\n\t\t}\n\n\t\tinterval := time.NewTicker(intervalms).C\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-interval:\n\t\t\t\t// Send payload to host and clear cache when sent successfully\n\t\t\t\t// To-do : if we hit max slice size in payload, write to disk and process the logs on disk on future sends\n\t\t\t\ttelemetryLogger.Printf(\"[Telemetry] send data to host\")\n\t\t\t\tif err := tb.sendToHost(); err == nil {\n\t\t\t\t\ttb.payload.reset()\n\t\t\t\t} else {\n\t\t\t\t\ttelemetryLogger.Printf(\"[Telemetry] sending to host failed with error %+v\", err)\n\t\t\t\t}\n\t\t\tcase report := <-tb.data:\n\t\t\t\ttelemetryLogger.Printf(\"[Telemetry] Got data..Append it to buffer\")\n\t\t\t\ttb.payload.push(report)\n\t\t\tcase <-tb.cancel:\n\t\t\t\tgoto EXIT\n\t\t\t}\n\t\t}\n\t} else {\n\t\t<-tb.cancel\n\t}\n\nEXIT:\n}",
"func (g *GrowingBuffer) Values() interface{} {\n return g.bufferPtr.Elem().Interface()\n}",
"func (_e *MockTestTransportInstance_Expecter) FillBuffer(until interface{}) *MockTestTransportInstance_FillBuffer_Call {\n\treturn &MockTestTransportInstance_FillBuffer_Call{Call: _e.mock.On(\"FillBuffer\", until)}\n}",
"func NewProxyWithBuffer(buffer *fbe.Buffer) *Proxy {\n proxy := &Proxy{\n fbe.NewReceiver(buffer, false),\n NewOrderModel(buffer),\n NewBalanceModel(buffer),\n NewAccountModel(buffer),\n nil,\n nil,\n nil,\n }\n proxy.SetupHandlerOnReceive(proxy)\n proxy.SetupHandlerOnProxyOrderFunc(func(model *OrderModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyBalanceFunc(func(model *BalanceModel, fbeType int, buffer []byte) {})\n proxy.SetupHandlerOnProxyAccountFunc(func(model *AccountModel, fbeType int, buffer []byte) {})\n return proxy\n}",
"func Put(buffer *Buffer) {\n\t// Prohibit uninitialized buffers from being added to the pool\n\tif buffer.arena == nil {\n\t\tpanic(\"invalid Buffer object\")\n\t}\n\t// Resetting happens inside put\n\tfreeList.put(buffer)\n}",
"func (b *BufferPool) Flush(interface{}) {\n\tb.Lock()\n\tb.Data = make([]byte, b.Size)\n\tb.Unlock()\n}",
"func (b *BufferManager) SetBuffer(peer *PeerSession) {\n\tb.lock.Lock()\n\tdefer b.lock.Unlock()\n\toffset, ok := b.freeIndex.TryDequeue()\n\tif ok {\n\t\tpeer.bufferOffst = offset.(int64)\n\t\tpeer.buffers = b.buffers[peer.bufferOffst : peer.bufferOffst+int64(b.bufferSize)]\n\t} else {\n\t\tif b.totalBytes-int64(b.bufferSize) < b.currentIndex {\n\t\t\tpeer.buffers = make([]byte, b.bufferSize)\n\t\t\tpeer.bufferOffst = -1\n\t\t\t//The buffer pool is empty.\n\t\t\t//return false\n\t\t} else {\n\t\t\tpeer.bufferOffst = b.currentIndex\n\t\t\tpeer.buffers = b.buffers[peer.bufferOffst : peer.bufferOffst+int64(b.bufferSize)]\n\t\t\tb.currentIndex += int64(b.bufferSize)\n\t\t}\n\t}\n\t//return true\n}",
"func (dc *FixedLenByteArrayDictConverter) FillZero(out interface{}) {\n\to := out.([]parquet.FixedLenByteArray)\n\to[0] = dc.zeroVal\n\tfor i := 1; i < len(o); i *= 2 {\n\t\tcopy(o[i:], o[:i])\n\t}\n}",
"func (gen *DataGen) Init(m *pktmbuf.Packet, args ...any) {\n\tdata := ndn.MakeData(args...)\n\twire, e := tlv.EncodeValueOnly(data)\n\tif e != nil {\n\t\tlogger.Panic(\"encode Data error\", zap.Error(e))\n\t}\n\n\tm.SetHeadroom(0)\n\tif e := m.Append(wire); e != nil {\n\t\tlogger.Panic(\"insufficient dataroom\", zap.Error(e))\n\t}\n\tbufBegin := unsafe.Pointer(unsafe.SliceData(m.SegmentBytes()[0]))\n\tbufEnd := unsafe.Add(bufBegin, len(wire))\n\t*gen = DataGen{\n\t\ttpl: (*C.struct_rte_mbuf)(m.Ptr()),\n\t\tmeta: unsafe.SliceData(C.DataEnc_NoMetaInfo[:]),\n\t\tcontentIov: [1]C.struct_iovec{{\n\t\t\tiov_base: bufEnd,\n\t\t}},\n\t}\n\n\td := tlv.DecodingBuffer(wire)\n\tfor _, de := range d.Elements() {\n\t\tswitch de.Type {\n\t\tcase an.TtName:\n\t\t\tgen.suffix = C.LName{\n\t\t\t\tvalue: (*C.uint8_t)(unsafe.Add(bufEnd, -len(de.After)-de.Length())),\n\t\t\t\tlength: C.uint16_t(de.Length()),\n\t\t\t}\n\t\tcase an.TtMetaInfo:\n\t\t\tgen.meta = (*C.uint8_t)(unsafe.Add(bufEnd, -len(de.WireAfter())))\n\t\tcase an.TtContent:\n\t\t\tgen.contentIov[0] = C.struct_iovec{\n\t\t\t\tiov_base: unsafe.Add(bufEnd, -len(de.After)-de.Length()),\n\t\t\t\tiov_len: C.size_t(de.Length()),\n\t\t\t}\n\t\t}\n\t}\n\n\tC.rte_pktmbuf_adj(gen.tpl, C.uint16_t(uintptr(gen.contentIov[0].iov_base)-uintptr(bufBegin)))\n\tC.rte_pktmbuf_trim(gen.tpl, C.uint16_t(C.size_t(gen.tpl.pkt_len)-gen.contentIov[0].iov_len))\n}",
"func New(i int) *Buffer {\n\treturn &Buffer{\n\t\tsize: i,\n\t}\n}",
"func (c *SimpleMemoryCache) Set(ctx *Context, next Next) {\n\tc.data[ctx.Key] = ctx.Value.(int64)\n\n\t// call next to make sure all memory cache ready\n\t// maybe you should check next is nil or not\n\tnext(ctx)\n}",
"func BufferStorage(target uint32, size int, data unsafe.Pointer, flags uint32) {\n\tC.glowBufferStorage(gpBufferStorage, (C.GLenum)(target), (C.GLsizeiptr)(size), data, (C.GLbitfield)(flags))\n}",
"func BufferStorage(target uint32, size int, data unsafe.Pointer, flags uint32) {\n\tC.glowBufferStorage(gpBufferStorage, (C.GLenum)(target), (C.GLsizeiptr)(size), data, (C.GLbitfield)(flags))\n}",
"func NewBuffer() *Buffer { return globalPool.NewBuffer() }",
"func (b *BufferPool) Put(data interface{}) (n int64, err error) {\n\tvar putData []byte\n\tvar putDataLength int64\n\tvar blackspaceLength int64\n\tswitch info := data.(type) {\n\tcase string:\n\t\tputData = []byte(info)\n\t\tputDataLength = int64(len(putData))\n\tcase []byte:\n\t\tputData = info\n\t\tputDataLength = int64(len(putData))\n\tdefault:\n\t\treturn 0, TYPEERROR\n\t}\n\n\tb.Lock()\n\t// free buffer size smaller than data size which will be written to.\n\tif b.Free < int64(len(putData)) {\n\t\taddRate := math.Ceil(float64(putDataLength) / float64(b.Size))\n\t\tif addRate <= 1 {\n\t\t\taddRate = 2\n\t\t}\n\t\tif b.AutoIncrement == true {\n\t\t\tblackspaceLength = b.Size*int64(addRate) - b.Used - putDataLength\n\t\t} else {\n\t\t\treturn 0, BUFFERNOTENOUGH\n\t\t}\n\t} else {\n\t\tblackspaceLength = b.Free - putDataLength\n\t}\n\tb.Data = append(b.Data[:b.Used], putData...)\n\tb.Data = append(b.Data, make([]byte, blackspaceLength)...)\n\tb.Used = b.Used + putDataLength\n\tb.Free = blackspaceLength\n\tb.Size = b.Used + b.Free\n\tb.Unlock()\n\treturn putDataLength, nil\n}",
"func (c *ChannelData) grow(v int) {\n\tn := len(c.Raw) + v\n\tfor cap(c.Raw) < n {\n\t\tc.Raw = append(c.Raw, 0)\n\t}\n\tc.Raw = c.Raw[:n]\n}",
"func Memset(data []byte, value byte) {\n\tif value == 0 {\n\t\tfor i := range data {\n\t\t\tdata[i] = 0\n\t\t}\n\t} else if len(data) != 0 {\n\t\tdata[0] = value\n\n\t\tfor i := 1; i < len(data); i *= 2 {\n\t\t\tcopy(data[i:], data[:i])\n\t\t}\n\t}\n}",
"func (_this *StreamingReadBuffer) Init(reader io.Reader, bufferSize int, minFreeBytes int) {\n\tif cap(_this.Buffer) < bufferSize {\n\t\t_this.Buffer = make([]byte, 0, bufferSize)\n\t} else {\n\t\t_this.Buffer = _this.Buffer[:0]\n\t}\n\t_this.reader = reader\n\t_this.minFreeBytes = minFreeBytes\n}",
"func (d *Object) flush() {\n\n\td.buf.Pos = offsetFieldCount\n\td.buf.WriteUint16(d.fieldCount)\n\n\td.buf.Pos = offsetSize\n\td.buf.WriteUint24(d.size)\n}",
"func NewBuffer(p producer.Producer, size int, flushInterval time.Duration, logger log.Logger) *Buffer {\n\tflush := 1 * time.Second\n\tif flushInterval != 0 {\n\t\tflush = flushInterval\n\t}\n\n\tb := &Buffer{\n\t\trecords: make([]*data.Record, 0, size),\n\t\tmu: new(sync.Mutex),\n\t\tproducer: p,\n\t\tbufferSize: size,\n\t\tlogger: logger,\n\t\tshouldFlush: make(chan bool, 1),\n\t\tflushInterval: flush,\n\t\tlastFlushed: time.Now(),\n\t}\n\n\tgo b.runFlusher()\n\n\treturn b\n}",
"func (r *DBReader) SetBuffer(buffer io.Reader) {\n\tr.buffer = buffer\n}"
] | [
"0.5932752",
"0.5734838",
"0.5678285",
"0.56575704",
"0.565682",
"0.5643374",
"0.5642318",
"0.55881345",
"0.5571034",
"0.55051994",
"0.5500698",
"0.5496386",
"0.5485012",
"0.5478254",
"0.54011256",
"0.53939354",
"0.53479874",
"0.5335982",
"0.5331352",
"0.532966",
"0.5309759",
"0.5296057",
"0.5286058",
"0.52831966",
"0.5281891",
"0.52748287",
"0.5247775",
"0.52266586",
"0.5222271",
"0.52146494",
"0.5214304",
"0.5211984",
"0.5196486",
"0.5192103",
"0.518555",
"0.5175499",
"0.5173198",
"0.5169991",
"0.5167336",
"0.51523554",
"0.51521516",
"0.515018",
"0.5145603",
"0.51449215",
"0.5137784",
"0.51350534",
"0.5117907",
"0.5108699",
"0.51039183",
"0.508503",
"0.508456",
"0.50689125",
"0.5051397",
"0.5045432",
"0.5045167",
"0.5038706",
"0.5033315",
"0.50119543",
"0.50049376",
"0.5002351",
"0.50016403",
"0.49979174",
"0.49967128",
"0.4985536",
"0.49722958",
"0.4970573",
"0.49704558",
"0.49689072",
"0.4968269",
"0.49679992",
"0.49672273",
"0.4965999",
"0.49625775",
"0.4960239",
"0.4955156",
"0.49476492",
"0.49473548",
"0.49403834",
"0.49390844",
"0.49358124",
"0.49352425",
"0.49278685",
"0.49226865",
"0.49095052",
"0.48937458",
"0.48934686",
"0.48890376",
"0.48757586",
"0.4874286",
"0.48737428",
"0.48647708",
"0.48567122",
"0.48567122",
"0.48552528",
"0.4849615",
"0.48482677",
"0.48477465",
"0.48437017",
"0.4843111",
"0.4842041",
"0.48395348"
] | 0.0 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.