query
stringlengths
7
3.85k
document
stringlengths
11
430k
metadata
dict
negatives
sequencelengths
0
101
negative_scores
sequencelengths
0
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
CompareAllocatableResources compares `expected` and `got` map zone:allocatableResources respectively (see: AllocatableResourceListFromNodeResourceTopology), and informs the caller if the maps are equal. Here `equal` means the same zoneNames with the same resources, where the resources are equal if they have the same resources with the same quantities. Returns the name of the different zone, the name of the different resources within the zone, the comparison result (same semantic as strings.Compare) and a boolean that reports if the resourceLists are consistent. See `CompareResourceList`.
func CompareAllocatableResources(expected, got map[string]corev1.ResourceList) (string, string, int, bool) { if len(got) != len(expected) { framework.Logf("-> expected=%v (len=%d) got=%v (len=%d)", expected, len(expected), got, len(got)) return "", "", 0, false } for expZoneName, expResList := range expected { gotResList, ok := got[expZoneName] if !ok { return expZoneName, "", 0, false } if resName, cmp, ok := CompareResourceList(expResList, gotResList); !ok || cmp != 0 { return expZoneName, resName, cmp, ok } } return "", "", 0, true }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func CompareResourceList(expected, got corev1.ResourceList) (string, int, bool) {\n\tif len(got) != len(expected) {\n\t\tframework.Logf(\"-> expected=%v (len=%d) got=%v (len=%d)\", expected, len(expected), got, len(got))\n\t\treturn \"\", 0, false\n\t}\n\tfor expResName, expResQty := range expected {\n\t\tgotResQty, ok := got[expResName]\n\t\tif !ok {\n\t\t\treturn string(expResName), 0, false\n\t\t}\n\t\tif cmp := gotResQty.Cmp(expResQty); cmp != 0 {\n\t\t\tframework.Logf(\"-> resource=%q cmp=%d expected=%v got=%v\", expResName, cmp, expResQty, gotResQty)\n\t\t\treturn string(expResName), cmp, true\n\t\t}\n\t}\n\treturn \"\", 0, true\n}", "func CompareResources(resA, resB types.Resource) int {\n\tequal := cmp.Equal(resA, resB,\n\t\tignoreProtoXXXFields(),\n\t\tcmpopts.IgnoreFields(types.Metadata{}, \"ID\"),\n\t\tcmpopts.IgnoreFields(types.DatabaseV3{}, \"Status\"),\n\t\tcmpopts.EquateEmpty(),\n\t)\n\tif equal {\n\t\treturn Equal\n\t}\n\treturn Different\n}", "func ResourcesEqual(a, b map[string]envoy.Resource) bool {\n\n\tif len(a) != len(b) {\n\t\treturn false\n\t}\n\n\tfor name, resource := range a {\n\t\tif !proto.Equal(resource, b[name]) {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func (r *Compare) Compare() (map[string][]schema.GroupVersionResource, error) {\n\tpreferredSrcResourceList, err := collectPreferredResources(r.SrcDiscovery)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsrcCRDResource, err := collectPreferredCRDResource(r.SrcDiscovery)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdstResourceList, err := collectNamespacedResources(r.DstDiscovery)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpreferredSrcResourceList, err = r.excludeCRDs(preferredSrcResourceList, srcCRDResource, r.SrcClient)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresourcesDiff := r.compareResources(preferredSrcResourceList, dstResourceList)\n\tincompatibleGVKs, err := convertToGVRList(resourcesDiff)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Don't report an incompatibleGVK if user settings will skip resource anyways\n\texcludedResources := toStringSlice(settings.ExcludedInitialResources.Union(toSet(r.Plan.Status.ExcludedResources)))\n\tfilteredGVKs := []schema.GroupVersionResource{}\n\tfor _, gvr := range incompatibleGVKs {\n\t\tskip := false\n\t\tfor _, resource := range excludedResources {\n\t\t\tif strings.EqualFold(gvr.Resource, resource) {\n\t\t\t\tskip = true\n\t\t\t}\n\t\t}\n\t\tif !skip {\n\t\t\tfilteredGVKs = append(filteredGVKs, gvr)\n\t\t}\n\t}\n\n\treturn r.collectIncompatibleMapping(filteredGVKs)\n}", "func (r *Compare) CompareCRDs() (map[string][]schema.GroupVersionResource, error) {\n\tsrcCRDResource, err := collectPreferredCRDResource(r.SrcDiscovery)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdstCRDResourceList, err := collectCRDResources(r.DstDiscovery)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcrdGVDiff := r.compareResources(srcCRDResource, dstCRDResourceList)\n\t// if len(crdGVDiff)>0, then CRD APIVersion is incompatible between src and dest\n\tif len(crdGVDiff) > 0 {\n\t\tsrcCRDs, err := collectPreferredResources(r.SrcDiscovery)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tsrcCRDs, err = r.includeCRDsOnly(srcCRDs, srcCRDResource, r.SrcClient)\n\n\t\tdstCRDs, err := collectNamespacedResources(r.DstDiscovery)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tdstCRDs, err = r.includeCRDsOnly(dstCRDs, dstCRDResourceList, r.DstClient)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tcrdsDiff := r.compareResources(srcCRDs, dstCRDs)\n\t\tincompatibleGVKs, err := convertToGVRList(crdsDiff)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\t// Don't report an incompatibleGVK if user settings will skip resource anyways\n\t\texcludedResources := toStringSlice(settings.ExcludedInitialResources.Union(toSet(r.Plan.Status.ExcludedResources)))\n\t\tfilteredGVKs := []schema.GroupVersionResource{}\n\t\tfor _, gvr := range incompatibleGVKs {\n\t\t\tskip := false\n\t\t\tfor _, resource := range excludedResources {\n\t\t\t\tif strings.EqualFold(gvr.Resource, resource) {\n\t\t\t\t\tskip = true\n\t\t\t\t}\n\t\t\t}\n\t\t\tif !skip {\n\t\t\t\tfilteredGVKs = append(filteredGVKs, gvr)\n\t\t\t}\n\t\t}\n\n\t\treturn r.collectIncompatibleMapping(filteredGVKs)\n\t}\n\treturn nil, nil\n}", "func (s *SchedulerSpec) ExpectedResources(rsrc interface{}) []ResourceInfo {\n\tr := rsrc.(*AirflowCluster)\n\trsrcInfos := []ResourceInfo{\n\t\tResourceInfo{LifecycleManaged, s.serviceaccount(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.rb(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.sts(r), \"\"},\n\t}\n\n\tif r.Spec.DAGs != nil {\n\t\tgit := r.Spec.DAGs.Git\n\t\tif git != nil && git.CredSecretRef != nil {\n\t\t\trsrcInfos = append(rsrcInfos,\n\t\t\t\tResourceInfo{LifecycleReferred,\n\t\t\t\t\t&resources.Secret{\n\t\t\t\t\t\tSecret: &corev1.Secret{\n\t\t\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\t\t\tNamespace: r.Namespace,\n\t\t\t\t\t\t\t\tName: git.CredSecretRef.Name,\n\t\t\t\t\t\t\t}}},\n\t\t\t\t\t\"\"})\n\t\t}\n\t}\n\n\treturn rsrcInfos\n}", "func (s *AirflowUISpec) ExpectedResources(rsrc interface{}) []ResourceInfo {\n\tr := rsrc.(*AirflowCluster)\n\treturn []ResourceInfo{\n\t\tResourceInfo{LifecycleManaged, s.sts(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.secret(r), \"\"},\n\t}\n}", "func (s *NFSStoreSpec) ExpectedResources(rsrc interface{}) []ResourceInfo {\n\tr := rsrc.(*AirflowBase)\n\treturn []ResourceInfo{\n\t\tResourceInfo{LifecycleManaged, s.sts(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.service(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.podDisruption(r), \"\"},\n\t}\n}", "func (s *FlowerSpec) ExpectedResources(rsrc interface{}) []ResourceInfo {\n\tr := rsrc.(*AirflowCluster)\n\treturn []ResourceInfo{\n\t\tResourceInfo{LifecycleManaged, s.sts(r), \"\"},\n\t}\n}", "func (p *Plan) CompareResultSets(regressDir string, expectedDir string, t *tap.T) {\n\tfor i, rs := range p.ResultSets {\n\t\ttestName := strings.TrimPrefix(rs.Filename, regressDir+\"/out/\")\n\t\texpectedFilename := filepath.Join(expectedDir,\n\t\t\tfilepath.Base(rs.Filename))\n\t\tdiff, err := DiffFiles(expectedFilename, rs.Filename, 3)\n\n\t\tif err != nil {\n\t\t\tt.Diagnostic(\n\t\t\t\tfmt.Sprintf(`Query File: '%s'\nBindings File: '%s'\nBindings Name: '%s'\nQuery Parameters: '%v'\nExpected Result File: '%s'\nActual Result File: '%s'\n\nFailed to compare results: %s`,\n\t\t\t\t\tp.Query.Path,\n\t\t\t\t\tp.Path,\n\t\t\t\t\tp.Names[i],\n\t\t\t\t\tp.Bindings[i],\n\t\t\t\t\texpectedFilename,\n\t\t\t\t\trs.Filename,\n\t\t\t\t\terr.Error()))\n\t\t}\n\n\t\tif diff != \"\" {\n\t\t\tt.Diagnostic(\n\t\t\t\tfmt.Sprintf(`Query File: '%s'\nBindings File: '%s'\nBindings Name: '%s'\nQuery Parameters: '%v'\nExpected Result File: '%s'\nActual Result File: '%s'\n\n%s`,\n\t\t\t\t\tp.Query.Path,\n\t\t\t\t\tp.Path,\n\t\t\t\t\tp.Names[i],\n\t\t\t\t\tp.Bindings[i],\n\t\t\t\t\texpectedFilename,\n\t\t\t\t\trs.Filename,\n\t\t\t\t\tdiff))\n\t\t}\n\t\tt.Ok(diff == \"\", testName)\n\t}\n}", "func (rm *RsrcManager) SpecDiffers(expected, observed *reconciler.Object) bool {\n\te := expected.Obj.(*Object).Bucket\n\to := observed.Obj.(*Object).Bucket\n\treturn !reflect.DeepEqual(e.Acl, o.Acl) ||\n\t\t!reflect.DeepEqual(e.Billing, o.Billing) ||\n\t\t!reflect.DeepEqual(e.Cors, o.Cors) ||\n\t\t!reflect.DeepEqual(e.DefaultEventBasedHold, o.DefaultEventBasedHold) ||\n\t\t!reflect.DeepEqual(e.Encryption, o.Encryption) ||\n\t\t!reflect.DeepEqual(e.Labels, o.Labels) ||\n\t\t!reflect.DeepEqual(e.Lifecycle, o.Lifecycle) ||\n\t\t!strings.EqualFold(e.Location, o.Location) ||\n\t\t!reflect.DeepEqual(e.Logging, o.Logging) ||\n\t\t!reflect.DeepEqual(e.Name, o.Name) ||\n\t\t!reflect.DeepEqual(e.Owner, o.Owner) ||\n\t\t!reflect.DeepEqual(e.StorageClass, o.StorageClass) ||\n\t\t!reflect.DeepEqual(e.Versioning, o.Versioning) ||\n\t\t!reflect.DeepEqual(e.Website, o.Website)\n}", "func assertZonesMatch(t *testing.T, expected, actual time.Time) {\n\tt.Helper()\n\texpectedName, expectedOffset := expected.Zone()\n\tactualName, actualOffset := actual.Zone()\n\tif expectedOffset != actualOffset {\n\t\tt.Errorf(\"Expected Zone '%s' with offset %d. Got Zone '%s' with offset %d\", expectedName, expectedOffset, actualName, actualOffset)\n\t}\n}", "func (s *SQLProxySpec) ExpectedResources(rsrc interface{}) []ResourceInfo {\n\tr := rsrc.(*AirflowBase)\n\tname, _, _ := nameAndLabels(r, ValueAirflowComponentSQL, \"\", false)\n\treturn []ResourceInfo{\n\t\tResourceInfo{LifecycleManaged, s.service(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.sts(r), \"\"},\n\t\tResourceInfo{LifecycleReferred,\n\t\t\t&resources.Secret{\n\t\t\t\tSecret: &corev1.Secret{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tNamespace: r.Namespace,\n\t\t\t\t\t\tName: name,\n\t\t\t\t\t}}},\n\t\t\t\"\"},\n\t}\n}", "func (s *RedisSpec) ExpectedResources(rsrc interface{}) []ResourceInfo {\n\tr := rsrc.(*AirflowCluster)\n\trsrcInfos := []ResourceInfo{\n\t\tResourceInfo{LifecycleManaged, s.secret(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.service(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.sts(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.podDisruption(r), \"\"},\n\t}\n\t//if s.VolumeClaimTemplate != nil {\n\t//\t\trsrcInfos = append(rsrcInfos, ResourceInfo{LifecycleReferred, s.VolumeClaimTemplate, \"\"})\n\t//\t}\n\treturn rsrcInfos\n}", "func Compare(expected, actual io.Reader) error {\n\texpScan := bufio.NewScanner(expected)\n\tactScan := bufio.NewScanner(actual)\n\n\tfor line := 1; ; line++ {\n\t\texp, hasExp := scanTrimRight(expScan)\n\t\tact, hasAct := scanTrimRight(actScan)\n\n\t\t// EOF at the same time\n\t\tif !hasExp && !hasAct {\n\t\t\treturn nil\n\t\t}\n\t\t// they are not equal\n\t\tif exp != act {\n\t\t\treturn newErr(line, exp, act)\n\t\t}\n\t\t// they are all exists and equal\n\t\tif hasExp && hasAct {\n\t\t\tcontinue\n\t\t}\n\t\t// verify all empty line lefts\n\t\tif err := verifyEOFSpace(\"actual\", actScan); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err := verifyEOFSpace(\"expected\", expScan); err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// at this point, they should all be same\n\t\treturn nil\n\t}\n}", "func (s *WorkerSpec) ExpectedResources(rsrc interface{}) []ResourceInfo {\n\tr := rsrc.(*AirflowCluster)\n\trsrcInfos := []ResourceInfo{\n\t\tResourceInfo{LifecycleManaged, s.sts(r), \"\"},\n\t}\n\t// TODO storage spec ?\n\treturn rsrcInfos\n}", "func (s *MySQLSpec) ExpectedResources(rsrc interface{}) []ResourceInfo {\n\tr := rsrc.(*AirflowBase)\n\tif s.Operator {\n\t\treturn nil\n\t}\n\trsrcInfos := []ResourceInfo{\n\t\tResourceInfo{LifecycleManaged, s.secret(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.service(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.sts(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.podDisruption(r), \"\"},\n\t}\n\t//if s.VolumeClaimTemplate != nil {\n\t//\trsrcInfos = append(rsrcInfos, ResourceInfo{LifecycleReferred, s.VolumeClaimTemplate, \"\"})\n\t//}\n\treturn rsrcInfos\n}", "func (r Resources) Equal(o Resources) bool {\n\tLog.Vomit.Printf(\"Comparing resources: %+ v ?= %+ v\", r, o)\n\tif len(r) != len(o) {\n\t\tLog.Vomit.Println(\"Lengths differ\")\n\t\treturn false\n\t}\n\n\tif r.Ports() != o.Ports() {\n\t\tLog.Vomit.Println(\"Ports differ\")\n\t\treturn false\n\t}\n\n\tif math.Abs(r.Cpus()-o.Cpus()) > 0.001 {\n\t\tLog.Vomit.Println(\"Cpus differ\")\n\t\treturn false\n\t}\n\n\tif math.Abs(r.Memory()-o.Memory()) > 0.001 {\n\t\tLog.Vomit.Println(\"Memory differ\")\n\t\treturn false\n\t}\n\n\treturn true\n}", "func (r *Resources) Equal(other *Resources) bool {\n\treturn equal(r.CPU, other.CPU) &&\n\t\tequal(r.MEMORY, other.MEMORY) &&\n\t\tequal(r.DISK, other.DISK) &&\n\t\tequal(r.GPU, other.GPU)\n}", "func (s *SQLProxySpec) Differs(expected ResourceInfo, observed ResourceInfo) bool {\n\tswitch expected.Obj.(type) {\n\tcase *resources.Service:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t\texpected.Obj.(*resources.Service).Spec.ClusterIP = observed.Obj.(*resources.Service).Spec.ClusterIP\n\t}\n\treturn true\n}", "func compareRes(a, b []byte) error {\n\tvar am, bm interface{}\n\tif err := json.Unmarshal(a, &am); err != nil {\n\t\treturn fmt.Errorf(\"%s: %v\", a, err)\n\t}\n\tif err := json.Unmarshal(b, &bm); err != nil {\n\t\treturn fmt.Errorf(\"%s: %v\", b, err)\n\t}\n\n\treturn cmp(am, bm)\n}", "func VerifyResources(resources string) error {\n\tif resources != \"\" {\n\t\tvar r map[string]interface{}\n\t\tif e := json.Unmarshal([]byte(resources), &r); e != nil {\n\t\t\treturn fmt.Errorf(\"%s: %s should be JSON format\", envResource, resources)\n\t\t}\n\t}\n\treturn nil\n}", "func (s *PostgresSpec) ExpectedResources(rsrc interface{}) []ResourceInfo {\n\tr := rsrc.(*AirflowBase)\n\tif s.Operator {\n\t\treturn nil\n\t}\n\trsrcInfos := []ResourceInfo{\n\t\tResourceInfo{LifecycleManaged, s.secret(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.service(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.sts(r), \"\"},\n\t\tResourceInfo{LifecycleManaged, s.podDisruption(r), \"\"},\n\t}\n\t//if s.VolumeClaimTemplate != nil {\n\t//\trsrcInfos = append(rsrcInfos, ResourceInfo{LifecycleReferred, s.VolumeClaimTemplate, \"\"})\n\t//}\n\treturn rsrcInfos\n}", "func compareDevices(expected *model.Device, actual *model.Device, t *testing.T) {\n\tassert.Equal(t, expected.Id, actual.Id)\n\tassert.Equal(t, expected.PubKey, actual.PubKey)\n\tassert.Equal(t, expected.IdData, actual.IdData)\n\tassert.Equal(t, expected.IdDataStruct, actual.IdDataStruct)\n\tassert.Equal(t, expected.IdDataSha256, actual.IdDataSha256)\n\tassert.Equal(t, expected.Status, actual.Status)\n\tassert.Equal(t, expected.ApiLimits, actual.ApiLimits)\n\tcompareTime(expected.CreatedTs, actual.CreatedTs, t)\n\tcompareTime(expected.UpdatedTs, actual.UpdatedTs, t)\n}", "func cmp(a, b planv1.Plan) bool {\n\tif a.Namespace != b.Namespace {\n\t\treturn false\n\t}\n\n\tif a.Spec.Version != b.Spec.Version {\n\t\treturn false\n\t}\n\n\tif a.Spec.Concurrency != b.Spec.Concurrency {\n\t\treturn false\n\t}\n\n\t//TODO Refactor to not use reflection\n\tif !reflect.DeepEqual(a.Spec, b.Spec) {\n\t\treturn false\n\t}\n\tif !reflect.DeepEqual(a.ObjectMeta, b.ObjectMeta) {\n\t\treturn false\n\t}\n\tif !reflect.DeepEqual(a.TypeMeta, b.TypeMeta) {\n\t\treturn false\n\t}\n\treturn true\n}", "func newResourceDelta(\n\ta *resource,\n\tb *resource,\n) *ackcompare.Delta {\n\tdelta := ackcompare.NewDelta()\n\tif (a == nil && b != nil) ||\n\t\t(a != nil && b == nil) {\n\t\tdelta.Add(\"\", a, b)\n\t\treturn delta\n\t}\n\n\tif ackcompare.HasNilDifference(a.ko.Spec.Constraints, b.ko.Spec.Constraints) {\n\t\tdelta.Add(\"Spec.Constraints\", a.ko.Spec.Constraints, b.ko.Spec.Constraints)\n\t} else if a.ko.Spec.Constraints != nil && b.ko.Spec.Constraints != nil {\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.Constraints.EncryptionContextEquals, b.ko.Spec.Constraints.EncryptionContextEquals) {\n\t\t\tdelta.Add(\"Spec.Constraints.EncryptionContextEquals\", a.ko.Spec.Constraints.EncryptionContextEquals, b.ko.Spec.Constraints.EncryptionContextEquals)\n\t\t} else if a.ko.Spec.Constraints.EncryptionContextEquals != nil && b.ko.Spec.Constraints.EncryptionContextEquals != nil {\n\t\t\tif !ackcompare.MapStringStringPEqual(a.ko.Spec.Constraints.EncryptionContextEquals, b.ko.Spec.Constraints.EncryptionContextEquals) {\n\t\t\t\tdelta.Add(\"Spec.Constraints.EncryptionContextEquals\", a.ko.Spec.Constraints.EncryptionContextEquals, b.ko.Spec.Constraints.EncryptionContextEquals)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.Constraints.EncryptionContextSubset, b.ko.Spec.Constraints.EncryptionContextSubset) {\n\t\t\tdelta.Add(\"Spec.Constraints.EncryptionContextSubset\", a.ko.Spec.Constraints.EncryptionContextSubset, b.ko.Spec.Constraints.EncryptionContextSubset)\n\t\t} else if a.ko.Spec.Constraints.EncryptionContextSubset != nil && b.ko.Spec.Constraints.EncryptionContextSubset != nil {\n\t\t\tif !ackcompare.MapStringStringPEqual(a.ko.Spec.Constraints.EncryptionContextSubset, b.ko.Spec.Constraints.EncryptionContextSubset) {\n\t\t\t\tdelta.Add(\"Spec.Constraints.EncryptionContextSubset\", a.ko.Spec.Constraints.EncryptionContextSubset, b.ko.Spec.Constraints.EncryptionContextSubset)\n\t\t\t}\n\t\t}\n\t}\n\tif !ackcompare.SliceStringPEqual(a.ko.Spec.GrantTokens, b.ko.Spec.GrantTokens) {\n\t\tdelta.Add(\"Spec.GrantTokens\", a.ko.Spec.GrantTokens, b.ko.Spec.GrantTokens)\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.GranteePrincipal, b.ko.Spec.GranteePrincipal) {\n\t\tdelta.Add(\"Spec.GranteePrincipal\", a.ko.Spec.GranteePrincipal, b.ko.Spec.GranteePrincipal)\n\t} else if a.ko.Spec.GranteePrincipal != nil && b.ko.Spec.GranteePrincipal != nil {\n\t\tif *a.ko.Spec.GranteePrincipal != *b.ko.Spec.GranteePrincipal {\n\t\t\tdelta.Add(\"Spec.GranteePrincipal\", a.ko.Spec.GranteePrincipal, b.ko.Spec.GranteePrincipal)\n\t\t}\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.KeyID, b.ko.Spec.KeyID) {\n\t\tdelta.Add(\"Spec.KeyID\", a.ko.Spec.KeyID, b.ko.Spec.KeyID)\n\t} else if a.ko.Spec.KeyID != nil && b.ko.Spec.KeyID != nil {\n\t\tif *a.ko.Spec.KeyID != *b.ko.Spec.KeyID {\n\t\t\tdelta.Add(\"Spec.KeyID\", a.ko.Spec.KeyID, b.ko.Spec.KeyID)\n\t\t}\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.Name, b.ko.Spec.Name) {\n\t\tdelta.Add(\"Spec.Name\", a.ko.Spec.Name, b.ko.Spec.Name)\n\t} else if a.ko.Spec.Name != nil && b.ko.Spec.Name != nil {\n\t\tif *a.ko.Spec.Name != *b.ko.Spec.Name {\n\t\t\tdelta.Add(\"Spec.Name\", a.ko.Spec.Name, b.ko.Spec.Name)\n\t\t}\n\t}\n\tif !ackcompare.SliceStringPEqual(a.ko.Spec.Operations, b.ko.Spec.Operations) {\n\t\tdelta.Add(\"Spec.Operations\", a.ko.Spec.Operations, b.ko.Spec.Operations)\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.RetiringPrincipal, b.ko.Spec.RetiringPrincipal) {\n\t\tdelta.Add(\"Spec.RetiringPrincipal\", a.ko.Spec.RetiringPrincipal, b.ko.Spec.RetiringPrincipal)\n\t} else if a.ko.Spec.RetiringPrincipal != nil && b.ko.Spec.RetiringPrincipal != nil {\n\t\tif *a.ko.Spec.RetiringPrincipal != *b.ko.Spec.RetiringPrincipal {\n\t\t\tdelta.Add(\"Spec.RetiringPrincipal\", a.ko.Spec.RetiringPrincipal, b.ko.Spec.RetiringPrincipal)\n\t\t}\n\t}\n\n\treturn delta\n}", "func (s *RedisSpec) Differs(expected ResourceInfo, observed ResourceInfo) bool {\n\tswitch expected.Obj.(type) {\n\tcase *resources.Secret:\n\t\t// Dont update a secret\n\t\treturn false\n\tcase *resources.Service:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t\texpected.Obj.(*resources.Service).Spec.ClusterIP = observed.Obj.(*resources.Service).Spec.ClusterIP\n\tcase *resources.PodDisruptionBudget:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t}\n\treturn true\n}", "func (s *PostgresSpec) Differs(expected ResourceInfo, observed ResourceInfo) bool {\n\tswitch expected.Obj.(type) {\n\tcase *resources.Secret:\n\t\t// Dont update a secret\n\t\treturn false\n\tcase *resources.Service:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t\texpected.Obj.(*resources.Service).Spec.ClusterIP = observed.Obj.(*resources.Service).Spec.ClusterIP\n\tcase *resources.PodDisruptionBudget:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t}\n\treturn true\n}", "func compareMaps(t *testing.T, a, b map[string]string) {\n\tif len(a) != len(b) {\n\t\tt.Error(\"Maps different sizes\", a, b)\n\t}\n\tfor ka, va := range a {\n\t\tif vb, ok := b[ka]; !ok || va != vb {\n\t\t\tt.Error(\"Difference in key\", ka, va, b[ka])\n\t\t}\n\t}\n\tfor kb, vb := range b {\n\t\tif va, ok := a[kb]; !ok || vb != va {\n\t\t\tt.Error(\"Difference in key\", kb, vb, a[kb])\n\t\t}\n\t}\n}", "func (s *WorkerSpec) Differs(expected ResourceInfo, observed ResourceInfo) bool {\n\t// TODO\n\treturn true\n}", "func CompareJSON(expected string, actual string, topDir string) (string, error) {\n\n\tvar fActual, fExpected *os.File\n\tvar err error\n\tif fActual, err = writeTempFile(\"tmp_actual\", actual); err != nil {\n\t\treturn \"\", err\n\t}\n\tif fExpected, err = writeTempFile(\"tmp_expected\", expected); err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer func() {\n\t\t_ = os.Remove(fActual.Name())\n\t\t_ = os.Remove(fExpected.Name())\n\t}()\n\n\tscript := path.Join(topDir, diffExecutable)\n\tcompareCmd := exec.Command(\n\t\t\"python\",\n\t\tscript,\n\t\tfActual.Name(),\n\t\tfExpected.Name())\n\tdiffCmd := exec.Command(\n\t\t\"python\",\n\t\tscript,\n\t\t\"--diff\",\n\t\tfActual.Name(),\n\t\tfExpected.Name())\n\treturn runJSONDiff(compareCmd, diffCmd)\n}", "func CompareStorageRequests(initial corev1.ResourceRequirements, updated corev1.ResourceRequirements) StorageComparison {\n\tinitialSize := initial.Requests.Storage()\n\tupdatedSize := updated.Requests.Storage()\n\tif initialSize.IsZero() || updatedSize.IsZero() {\n\t\treturn StorageComparison{}\n\t}\n\tswitch updatedSize.Cmp(*initialSize) {\n\tcase -1: // decrease\n\t\treturn StorageComparison{Decrease: true}\n\tcase 1: // increase\n\t\treturn StorageComparison{Increase: true}\n\tdefault: // same size\n\t\treturn StorageComparison{}\n\t}\n}", "func compareRequests(want []interface{}, got []interface{}) error {\n\tif len(got) != len(want) {\n\t\tvar gotMsg string\n\t\tfor _, r := range got {\n\t\t\tgotMsg += fmt.Sprintf(\"%v: %+v]\\n\", reflect.TypeOf(r), r)\n\t\t}\n\n\t\tvar wantMsg string\n\t\tfor _, r := range want {\n\t\t\twantMsg += fmt.Sprintf(\"%v: %+v]\\n\", reflect.TypeOf(r), r)\n\t\t}\n\n\t\treturn fmt.Errorf(\"got %d requests, want %d requests:\\ngot:\\n%s\\nwant:\\n%s\", len(got), len(want), gotMsg, wantMsg)\n\t}\n\n\tfor i, want := range want {\n\t\tif reflect.TypeOf(got[i]) != reflect.TypeOf(want) {\n\t\t\treturn fmt.Errorf(\"request %d: got %+v, want %+v\", i, reflect.TypeOf(got[i]), reflect.TypeOf(want))\n\t\t}\n\t}\n\treturn nil\n}", "func Compare(t *testing.T, expected string, out string) {\n\tif out != expected {\n\t\tt.Error(Warn(`FAIL!`), \"\\n\\n\"+\n\t\t\t`Got: `+quoted(out)+\"\\n\"+\n\t\t\t`Expected: `+quoted(expected)+\"\\n\",\n\t\t)\n\t} else {\n\t\tt.Log(Okay(`PASS:`), quoted(strings.TrimSuffix(out, \"\\n\")))\n\t}\n}", "func (s *NFSStoreSpec) Differs(expected ResourceInfo, observed ResourceInfo) bool {\n\t// TODO\n\tswitch expected.Obj.(type) {\n\tcase *resources.Service:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t\texpected.Obj.(*resources.Service).Spec.ClusterIP = observed.Obj.(*resources.Service).Spec.ClusterIP\n\tcase *resources.PodDisruptionBudget:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t}\n\treturn true\n}", "func routerCAConfigMapsEqual(a, b *corev1.ConfigMap) bool {\n\tif a.Data[\"ca-bundle.crt\"] != b.Data[\"ca-bundle.crt\"] {\n\t\treturn false\n\t}\n\treturn true\n}", "func AssertEqualStringMap(expected map[string]string, result map[string]string) {\n\tAssertType(expected, result)\n\tif expected == nil && result == nil {\n\t\treturn\n\t}\n\tif len(expected) != len(result) {\n\t\tpanic(fmt.Sprintf(\"Error: [] Different count of items\\nExpected Value: %v\\nResult: %v\", expected, result))\n\t}\n\tfor expectedKey := range expected {\n\t\tif result[expectedKey] != expected[expectedKey] {\n\t\t\tpanic(fmt.Sprintf(\"Error: [] Item missing: %v.\\nExpected Value: %v\\nResult: %v\", expected[expectedKey], expected, result))\n\t\t}\n\t}\n}", "func compare_clocks(new_clock ClockVector, last_clock ClockVector) bool {\n\treturn new_clock.X >= last_clock.X && new_clock.Y >= last_clock.Y && new_clock.Z >= last_clock.Z\n}", "func compareObservations(expected v1alpha1.NodeResult, actual v1alpha1.NodeResult, t *testing.T) error {\n\tif expected.Node != actual.Node {\n\t\treturn fmt.Errorf(\"NodeResult should be on %s, but is on %s\", expected.Node, actual.Node)\n\t}\n\texObs := expected.Observations\n\tacObs := actual.Observations\n\tif len(exObs) != len(acObs) {\n\t\treturn fmt.Errorf(\"Observations should be %v, but got %v\", exObs, acObs)\n\t}\n\tfor i := 0; i < len(exObs); i++ {\n\t\tif exObs[i].Component != acObs[i].Component ||\n\t\t\texObs[i].ComponentInfo != acObs[i].ComponentInfo ||\n\t\t\texObs[i].Action != acObs[i].Action {\n\t\t\treturn fmt.Errorf(\"Observations should be %v, but got %v\", exObs, acObs)\n\t\t}\n\t}\n\treturn nil\n}", "func Compare(resultPaths []string) {\n\t// m maps names like \"baseline\" and \"instrumented\" to textual data in\n\t// the Go Benchmark Data Format. See https://golang.org/issue/14313.\n\tm := make(map[string][]byte)\n\n\t// Walk each directory from resultPaths and collect benchmark data from\n\t// all result.json files.\n\tfor _, root := range resultPaths {\n\t\t_ = filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error {\n\t\t\tif err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\t\t\tif !d.IsDir() {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tdefer func() {\n\t\t\t\t// ignore panics from readTestResult\n\t\t\t\t_ = recover()\n\t\t\t}()\n\t\t\ttr := readTestResult(filepath.Join(path, \"result.json\"))\n\t\t\tm[d.Name()] = append(m[d.Name()], toGoBenchFormat(tr)...)\n\t\t\treturn nil\n\t\t})\n\t}\n\n\t// Sort names to make output deterministic.\n\tvar names []string\n\tfor name := range m {\n\t\tnames = append(names, name)\n\t}\n\tsort.Strings(names)\n\tvar c benchstat.Collection\n\tfor _, name := range names {\n\t\tc.AddConfig(name, m[name])\n\t}\n\n\t// Print comparison.\n\tbenchstat.FormatText(os.Stdout, c.Tables())\n}", "func VerifyResourcesProvisionedForSpace(t *testing.T, awaitilities wait.Awaitilities, spaceName string, additionalCriteria ...wait.SpaceWaitCriterion) (*toolchainv1alpha1.Space, *toolchainv1alpha1.NSTemplateSet) {\n\tspace, err := awaitilities.Host().WaitForSpace(t, spaceName,\n\t\tappend(additionalCriteria,\n\t\t\twait.UntilSpaceHasAnyTargetClusterSet(),\n\t\t\twait.UntilSpaceHasAnyTierNameSet())...)\n\trequire.NoError(t, err)\n\ttargetCluster := getSpaceTargetMember(t, awaitilities, space)\n\ttier, err := awaitilities.Host().WaitForNSTemplateTier(t, space.Spec.TierName)\n\trequire.NoError(t, err)\n\tchecks, err := tiers.NewChecksForTier(tier)\n\trequire.NoError(t, err)\n\n\tt.Logf(\"verifying resources provisioned for space '%s' with tier '%s'\", space.Name, space.Spec.TierName)\n\treturn verifyResourcesProvisionedForSpace(t, awaitilities.Host(), targetCluster, spaceName, tier, checks)\n}", "func compareDiagrams(t *testing.T, result string, expected string) {\n\tdec := json.NewDecoder(strings.NewReader(result))\n\tvar resData, expData diagramData\n\tif err := dec.Decode(&resData); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdec = json.NewDecoder(strings.NewReader(expected))\n\tif err := dec.Decode(&expData); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif !reflect.DeepEqual(resData, expData) {\n\t\tt.Errorf(\"\\ngot:\\n%s\\nwant:\\n%s\", result, expected)\n\t}\n}", "func (s *SchedulerSpec) Differs(expected ResourceInfo, observed ResourceInfo) bool {\n\tswitch expected.Obj.(type) {\n\tcase *resources.ServiceAccount:\n\t\t// Dont update a SA\n\t\treturn false\n\t}\n\treturn true\n}", "func checkNodeAllocatableTest(f *framework.Framework) {\n\n\tnodeMem := getNodeMemory(f)\n\tframework.Logf(\"nodeMem says: %+v\", nodeMem)\n\n\t// calculate the allocatable mem based on capacity - reserved amounts\n\tcalculatedNodeAlloc := nodeMem.capacity.Copy()\n\tcalculatedNodeAlloc.Sub(nodeMem.systemReserve)\n\tcalculatedNodeAlloc.Sub(nodeMem.kubeReserve)\n\tcalculatedNodeAlloc.Sub(nodeMem.softEviction)\n\tcalculatedNodeAlloc.Sub(nodeMem.hardEviction)\n\n\tginkgo.By(fmt.Sprintf(\"Checking stated allocatable memory %v against calculated allocatable memory %v\", &nodeMem.allocatable, calculatedNodeAlloc))\n\n\t// sanity check against stated allocatable\n\tgomega.Expect(calculatedNodeAlloc.Cmp(nodeMem.allocatable)).To(gomega.Equal(0))\n}", "func CompareArgv(expected, got []string) error {\n\tif len(expected) != len(got) {\n\t\treturn fmt.Errorf(\"expected %d entries but got %d (%+v)\", len(expected), len(got), got)\n\t}\n\truntimex.Assert(len(got) >= 1, \"too few entries\")\n\tif !strings.HasSuffix(got[0], expected[0]) {\n\t\treturn fmt.Errorf(\"expected %s suffix but got %s\", expected[0], got[0])\n\t}\n\tfor idx := 1; idx < len(got); idx++ {\n\t\tif got[idx] != expected[idx] {\n\t\t\treturn fmt.Errorf(\"entry %d of %+v: expected %s, but got %s\", idx, expected, expected[idx], got[idx])\n\t\t}\n\t}\n\treturn nil\n}", "func TestCompare(t *testing.T, first Block, second Block) {\n\tjs, err := json.Marshal(first)\n\tif err != nil {\n\t\tt.Errorf(\"the error was expected to be nil, error returned: %s\", err.Error())\n\t\treturn\n\t}\n\n\terr = json.Unmarshal(js, second)\n\tif err != nil {\n\t\tt.Errorf(\"the error was expected to be nil, error returned: %s\", err.Error())\n\t\treturn\n\t}\n\n\treJS, err := json.Marshal(second)\n\tif err != nil {\n\t\tt.Errorf(\"the error was expected to be nil, error returned: %s\", err.Error())\n\t\treturn\n\t}\n\n\tif bytes.Compare(js, reJS) != 0 {\n\t\tt.Errorf(\"the transformed javascript is different.\\n%s\\n%s\", js, reJS)\n\t\treturn\n\t}\n\n\tif !first.Hash().Compare(second.Hash()) {\n\t\tt.Errorf(\"the instance conversion failed\")\n\t\treturn\n\t}\n}", "func Compare(t *testing.T, got, expected string) {\n\tCompareCallDepth(t, got, expected, 2)\n}", "func newResourceDelta(\n\ta *resource,\n\tb *resource,\n) *ackcompare.Delta {\n\tdelta := ackcompare.NewDelta()\n\tif (a == nil && b != nil) ||\n\t\t(a != nil && b == nil) {\n\t\tdelta.Add(\"\", a, b)\n\t\treturn delta\n\t}\n\n\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig, b.ko.Spec.MonitoringScheduleConfig) {\n\t\tdelta.Add(\"Spec.MonitoringScheduleConfig\", a.ko.Spec.MonitoringScheduleConfig, b.ko.Spec.MonitoringScheduleConfig)\n\t} else if a.ko.Spec.MonitoringScheduleConfig != nil && b.ko.Spec.MonitoringScheduleConfig != nil {\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition) {\n\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition)\n\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig != nil {\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource != nil {\n\t\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI)\n\t\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI != nil {\n\t\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI {\n\t\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource != nil {\n\t\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI)\n\t\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI != nil {\n\t\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI {\n\t\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment != nil {\n\t\t\t\tif !ackcompare.MapStringStringPEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification != nil {\n\t\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerArguments, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerArguments) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerArguments\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerArguments, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerArguments)\n\t\t\t\t}\n\t\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerEntrypoint, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerEntrypoint) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerEntrypoint\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerEntrypoint, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerEntrypoint)\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringInputs, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringInputs) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringInputs\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringInputs, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringInputs)\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig != nil {\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif !reflect.DeepEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.MonitoringOutputs, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.MonitoringOutputs) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.MonitoringOutputs\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.MonitoringOutputs, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.MonitoringOutputs)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources != nil {\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig != nil {\n\t\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount)\n\t\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount != nil {\n\t\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount {\n\t\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType)\n\t\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType != nil {\n\t\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType {\n\t\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID)\n\t\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID != nil {\n\t\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID {\n\t\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB)\n\t\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB != nil {\n\t\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB {\n\t\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig != nil {\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig != nil {\n\t\t\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.SecurityGroupIDs, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.SecurityGroupIDs) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.SecurityGroupIDs\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.SecurityGroupIDs, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.SecurityGroupIDs)\n\t\t\t\t\t}\n\t\t\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.Subnets, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.Subnets) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.Subnets\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.Subnets, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.Subnets)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN != nil {\n\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition != nil {\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName) {\n\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName)\n\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName != nil {\n\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringType, b.ko.Spec.MonitoringScheduleConfig.MonitoringType) {\n\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringType\", a.ko.Spec.MonitoringScheduleConfig.MonitoringType, b.ko.Spec.MonitoringScheduleConfig.MonitoringType)\n\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringType != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringType != nil {\n\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringType != *b.ko.Spec.MonitoringScheduleConfig.MonitoringType {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringType\", a.ko.Spec.MonitoringScheduleConfig.MonitoringType, b.ko.Spec.MonitoringScheduleConfig.MonitoringType)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig, b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig) {\n\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.ScheduleConfig\", a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig, b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig)\n\t\t} else if a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig != nil && b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression, b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression\", a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression, b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression != nil && b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression != nil {\n\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression != *b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression\", a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression, b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleName, b.ko.Spec.MonitoringScheduleName) {\n\t\tdelta.Add(\"Spec.MonitoringScheduleName\", a.ko.Spec.MonitoringScheduleName, b.ko.Spec.MonitoringScheduleName)\n\t} else if a.ko.Spec.MonitoringScheduleName != nil && b.ko.Spec.MonitoringScheduleName != nil {\n\t\tif *a.ko.Spec.MonitoringScheduleName != *b.ko.Spec.MonitoringScheduleName {\n\t\t\tdelta.Add(\"Spec.MonitoringScheduleName\", a.ko.Spec.MonitoringScheduleName, b.ko.Spec.MonitoringScheduleName)\n\t\t}\n\t}\n\n\treturn delta\n}", "func (s *MySQLSpec) Differs(expected ResourceInfo, observed ResourceInfo) bool {\n\tswitch expected.Obj.(type) {\n\tcase *resources.Secret:\n\t\t// Dont update a secret\n\t\treturn false\n\tcase *resources.Service:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t\texpected.Obj.(*resources.Service).Spec.ClusterIP = observed.Obj.(*resources.Service).Spec.ClusterIP\n\tcase *resources.PodDisruptionBudget:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t}\n\treturn true\n}", "func TestRunDiff(t *testing.T) {\n\tcases := map[string]struct {\n\t\tcomparers compare.ComparerSet\n\t\tresourceChange []plan.ResourcePlan\n\t\tpreHook func()\n\t\texpected int\n\t\texpectedOutput []string\n\t}{\n\t\t\"create returns false with create resource\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: false,\n\t\t\t\t\tDiffOutput: \"comparer fail\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"comparer fail\"},\n\t\t},\n\t\t\"create returns true with create resource\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: true,\n\t\t\t\t\tDiffOutput: \"comparer ok\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"comparer ok\"},\n\t\t},\n\t\t\"no matching comparer\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: false,\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tAddressReturns: \"address\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"\"},\n\t\t},\n\t\t\"no matching comparer with strict enabled\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: false,\n\t\t\t\t\tDiffOutput: \"comparer fail\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tAddressReturns: \"address\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tpreHook: func() {\n\t\t\t\tstrict = true\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"?\", \"address (no matching comparer)\"},\n\t\t},\n\t\t\"create returns true with multiple resources\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: true,\n\t\t\t\t\tDiffOutput: \"comparer ok\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address1\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address2\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"comparer ok\\ncomparer ok\"},\n\t\t},\n\t\t\"fails if there is at least 1 failure\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: false,\n\t\t\t\t\tDiffOutput: \"comparer fail\",\n\t\t\t\t},\n\t\t\t\tDestroyComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: true,\n\t\t\t\t\tDiffOutput: \"comparer ok\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address1\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tDeleteReturns: true,\n\t\t\t\t\tAddressReturns: \"address2\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"comparer fail\", \"comparer ok\"},\n\t\t},\n\t\t\"returns 1 if there is at least 1 failure and errorOnFail is set\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: false,\n\t\t\t\t\tDiffOutput: \"comparer fail\",\n\t\t\t\t},\n\t\t\t\tDestroyComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: true,\n\t\t\t\t\tDiffOutput: \"comparer ok\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address1\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tDeleteReturns: true,\n\t\t\t\t\tAddressReturns: \"address2\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tpreHook: func() {\n\t\t\t\terrorOnFail = true\n\t\t\t},\n\t\t\texpected: 1,\n\t\t\texpectedOutput: []string{\"comparer fail\", \"comparer ok\"},\n\t\t},\n\t\t\"only outputs failed with failedOnly\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: false,\n\t\t\t\t\tDiffOutput: \"comparer fail\",\n\t\t\t\t},\n\t\t\t\tDestroyComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: true,\n\t\t\t\t\tDiffOutput: \"comparer ok\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address1\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tDeleteReturns: true,\n\t\t\t\t\tAddressReturns: \"address2\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tpreHook: func() {\n\t\t\t\tfailedOnly = true\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"comparer fail\"},\n\t\t},\n\t\t// TODO: test case to ensure comparers are called correctly(matching type and number of calls)\n\t}\n\n\tfor name, tc := range cases {\n\t\tt.Run(name, func(t *testing.T) {\n\t\t\t// set default vars\n\t\t\terrorOnFail = false\n\t\t\tstrict = false\n\t\t\tfailedOnly = false\n\n\t\t\tif tc.preHook != nil {\n\t\t\t\ttc.preHook()\n\t\t\t}\n\n\t\t\tvar output bytes.Buffer\n\t\t\tif got := runDiff(&output, tc.resourceChange, tc.comparers); got != tc.expected {\n\t\t\t\tt.Errorf(\"Expected: %v but got %v\", tc.expected, got)\n\t\t\t}\n\n\t\t\tfor _, s := range tc.expectedOutput {\n\t\t\t\tif !strings.Contains(output.String(), s) {\n\t\t\t\t\tt.Errorf(\"Result string did not contain %v\", s)\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t}\n}", "func verifyExpectedRequests(ctx context.Context, fc *fakeclient.Client, resourceNames ...string) error {\n\tfor _, name := range resourceNames {\n\t\tif name == \"\" {\n\t\t\t// ResourceName empty string indicates a cancel.\n\t\t\tif _, err := fc.WaitForCancelEDSWatch(ctx); err != nil {\n\t\t\t\treturn fmt.Errorf(\"timed out when expecting resource %q\", name)\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\n\t\tresName, err := fc.WaitForWatchEDS(ctx)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"timed out when expecting resource %q, %p\", name, fc)\n\t\t}\n\t\tif resName != name {\n\t\t\treturn fmt.Errorf(\"got EDS request for resource %q, expected: %q\", resName, name)\n\t\t}\n\t}\n\treturn nil\n}", "func (d *portworx) comparePoolsAndDisks(srcNode *api.StorageNode,\n\tdstNode *api.StorageNode) bool {\n\tsrcPools := srcNode.Pools\n\tdstPools := dstNode.Pools\n\n\t// Comparing pool ids\n\tif len(srcPools) != len(dstPools) {\n\t\treturn false\n\t}\n\n\tfor x, pool := range srcPools {\n\t\tif pool.Uuid != dstPools[x].Uuid {\n\t\t\tlog.Errorf(\"Source pools: [%v] not macthing with Destination pools: [%v]\", srcPools, dstPools)\n\t\t\treturn false\n\t\t}\n\t}\n\n\t// Comparing disks\n\tsrcDisks := srcNode.Disks\n\tdstDisks := dstNode.Disks\n\n\tfor disk, value := range srcDisks {\n\t\tif !srcDisks[disk].Metadata && !dstDisks[disk].Metadata {\n\t\t\tif value.Id != dstDisks[disk].Id {\n\t\t\t\treturn false\n\t\t\t}\n\t\t} else if srcDisks[disk].Metadata && dstDisks[disk].Metadata {\n\t\t\tif value.Id != dstDisks[disk].Id {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t}\n\treturn true\n}", "func TestResourceListSorting(t *testing.T) {\n\tsortedResourceList := make([]string, len(resourceList))\n\tcopy(sortedResourceList, resourceList)\n\tsort.Strings(sortedResourceList)\n\tfor i := 0; i < len(resourceList); i++ {\n\t\tif resourceList[i] != sortedResourceList[i] {\n\t\t\tt.Errorf(\"Expected resourceList[%d] = \\\"%s\\\", resourceList is not correctly sorted.\", i, sortedResourceList[i])\n\t\t\tbreak\n\t\t}\n\t}\n}", "func compare(a, b *Record) bool {\n\tif a == nil || b == nil {\n\t\treturn false\n\t}\n\tif a.Name != b.Name {\n\t\treturn false\n\t}\n\tif a.Type != b.Type {\n\t\treturn false\n\t}\n\tif a.TTL != b.TTL {\n\t\treturn false\n\t}\n\tif len(a.Data) != len(b.Data) {\n\t\treturn false\n\t}\n\tfor i := 0; i < len(a.Data); i++ {\n\t\tif a.Data[i] != b.Data[i] {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func compareTeamMaps(a, b Teams) bool {\n\tfor teamIndex, team := range a.Teams {\n\t\tif team.Name != b.Teams[teamIndex].Name {\n\t\t\treturn false\n\t\t}\n\t\tfor acctIndex, acct := range team.Accounts {\n\t\t\tif acct != b.Teams[teamIndex].Accounts[acctIndex] {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t}\n\n\treturn true\n}", "func AssertStringsAreEqualJSON(t testing.TB, expected, got string) {\n\treplacer := strings.NewReplacer(\"\\n\", \"\", \"\\t\", \"\")\n\n\texpected = replacer.Replace(expected)\n\tgot = replacer.Replace(got)\n\n\tvar obj1, obj2 interface{}\n\n\terr := json.Unmarshal([]byte(expected), &obj1)\n\tif err != nil {\n\t\terr = fmt.Errorf(`expected is not JSON. value = \"%v\", err = \"%v\"`, expected, err)\n\t}\n\tFailOnError(t, err)\n\n\terr = json.Unmarshal([]byte(got), &obj2)\n\tif err != nil {\n\t\terr = fmt.Errorf(`got is not JSON. value = \"%v\", err = \"%v\"`, got, err)\n\t}\n\tFailOnError(t, err)\n\n\tassert.Equal(\n\t\tt,\n\t\tobj1,\n\t\tobj2,\n\t\tfmt.Sprintf(`%v\nand\n%v\nshould represent the same json`, expected, got),\n\t)\n}", "func Compare[T any](t testing.TB, x, y T) bool {\n\treturn objectsAreEqual(x, y)\n}", "func compareMaps(info string, a, b map[int]bool) {\n\tif len(a) != len(b) {\n\t\tpanic(errors.Bug(\"%s - a.len:%d != b.len:%d\", info, len(a), len(b)))\n\t}\n\tfor k, v := range a {\n\t\tif _, ok := b[k]; !ok {\n\t\t\tpanic(fmt.Sprintf(\"%s - k:%d (%t)\\n\", info, k, v))\n\t\t}\n\t}\n}", "func gatherAndCompare(c prometheus.Collector, expected string, metricNames []string) error {\n\texpected = removeUnusedWhitespace(expected)\n\n\treg := prometheus.NewPedanticRegistry()\n\tif err := reg.Register(c); err != nil {\n\t\treturn fmt.Errorf(\"registering collector failed: %s\", err)\n\t}\n\tmetrics, err := reg.Gather()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"gathering metrics failed: %s\", err)\n\t}\n\tif metricNames != nil {\n\t\tmetrics = filterMetrics(metrics, metricNames)\n\t}\n\tvar tp expfmt.TextParser\n\texpectedMetrics, err := tp.TextToMetricFamilies(bytes.NewReader([]byte(expected)))\n\tif err != nil {\n\t\treturn fmt.Errorf(\"parsing expected metrics failed: %s\", err)\n\t}\n\n\tif !reflect.DeepEqual(metrics, normalizeMetricFamilies(expectedMetrics)) {\n\t\t// Encode the gathered output to the readbale text format for comparison.\n\t\tvar buf1 bytes.Buffer\n\t\tenc := expfmt.NewEncoder(&buf1, expfmt.FmtText)\n\t\tfor _, mf := range metrics {\n\t\t\tif err := enc.Encode(mf); err != nil {\n\t\t\t\treturn fmt.Errorf(\"encoding result failed: %s\", err)\n\t\t\t}\n\t\t}\n\t\t// Encode normalized expected metrics again to generate them in the same ordering\n\t\t// the registry does to spot differences more easily.\n\t\tvar buf2 bytes.Buffer\n\t\tenc = expfmt.NewEncoder(&buf2, expfmt.FmtText)\n\t\tfor _, mf := range normalizeMetricFamilies(expectedMetrics) {\n\t\t\tif err := enc.Encode(mf); err != nil {\n\t\t\t\treturn fmt.Errorf(\"encoding result failed: %s\", err)\n\t\t\t}\n\t\t}\n\n\t\treturn fmt.Errorf(`\nmetric output does not match expectation; want:\n\n%s\n\ngot:\n\n%s\n`, buf2.String(), buf1.String())\n\t}\n\treturn nil\n}", "func CmpResources(a, b *v1.ResourceRequirements) bool {\n\treturn CmpResourceList(&a.Limits, &b.Limits) && CmpResourceList(&a.Requests, &b.Requests)\n}", "func AssertPlannedReparentShardResponsesEqual(t *testing.T, expected *vtctldatapb.PlannedReparentShardResponse, actual *vtctldatapb.PlannedReparentShardResponse) {\n\tt.Helper()\n\n\texpected = proto.Clone(expected).(*vtctldatapb.PlannedReparentShardResponse)\n\texpected.Events = nil\n\n\tactual = proto.Clone(actual).(*vtctldatapb.PlannedReparentShardResponse)\n\tactual.Events = nil\n\n\tutils.MustMatch(t, expected, actual)\n}", "func AssertPlannedReparentShardResponsesEqual(t *testing.T, expected *vtctldatapb.PlannedReparentShardResponse, actual *vtctldatapb.PlannedReparentShardResponse) {\n\tt.Helper()\n\n\texpected = proto.Clone(expected).(*vtctldatapb.PlannedReparentShardResponse)\n\texpected.Events = nil\n\n\tactual = proto.Clone(actual).(*vtctldatapb.PlannedReparentShardResponse)\n\tactual.Events = nil\n\n\tutils.MustMatch(t, expected, actual)\n}", "func shouldResembleByteMap(actual interface{}, expected ...interface{}) string {\n\tact, ok := actual.(map[string]*bytes.Buffer)\n\tif !ok {\n\t\treturn \"actual is not a map[string]*bytes.Buffer\"\n\t}\n\tif len(expected) != 1 {\n\t\treturn \"expected is not a map[string][]byte\"\n\t}\n\texp, ok := expected[0].(map[string][]byte)\n\tif !ok {\n\t\treturn \"expected is not a map[string][]byte\"\n\t}\n\n\tif len(act) != len(exp) {\n\t\treturn fmt.Sprintf(\"len(actual) != len(expected): %v != %v\", len(act), len(exp))\n\t}\n\n\tfor k, v := range act {\n\t\tif got, want := v.Bytes(), exp[k]; !reflect.DeepEqual(got, want) {\n\t\t\treturn fmt.Sprintf(\"actual[%q] != expected[%q]: %q != %q\", k, k, got, want)\n\t\t}\n\t}\n\treturn \"\"\n}", "func diffUrlMap(c *Client, desired, actual *UrlMap, opts ...dcl.ApplyOption) ([]urlMapDiff, error) {\n\tif desired == nil || actual == nil {\n\t\treturn nil, fmt.Errorf(\"nil resource passed to diff - always a programming error: %#v, %#v\", desired, actual)\n\t}\n\n\tvar diffs []urlMapDiff\n\tif compareUrlMapDefaultRouteAction(c, desired.DefaultRouteAction, actual.DefaultRouteAction) {\n\t\tc.Config.Logger.Infof(\"Detected diff in DefaultRouteAction.\\nDESIRED: %v\\nACTUAL: %v\", desired.DefaultRouteAction, actual.DefaultRouteAction)\n\n\t\tdiffs = append(diffs, urlMapDiff{\n\t\t\tUpdateOp: &updateUrlMapUpdateOperation{},\n\t\t\tFieldName: \"DefaultRouteAction\",\n\t\t})\n\n\t}\n\tif !dcl.IsZeroValue(desired.DefaultService) && !dcl.StringCanonicalize(desired.DefaultService, actual.DefaultService) {\n\t\tc.Config.Logger.Infof(\"Detected diff in DefaultService.\\nDESIRED: %v\\nACTUAL: %v\", desired.DefaultService, actual.DefaultService)\n\n\t\tdiffs = append(diffs, urlMapDiff{\n\t\t\tUpdateOp: &updateUrlMapUpdateOperation{},\n\t\t\tFieldName: \"DefaultService\",\n\t\t})\n\n\t}\n\tif compareUrlMapDefaultUrlRedirect(c, desired.DefaultUrlRedirect, actual.DefaultUrlRedirect) {\n\t\tc.Config.Logger.Infof(\"Detected diff in DefaultUrlRedirect.\\nDESIRED: %v\\nACTUAL: %v\", desired.DefaultUrlRedirect, actual.DefaultUrlRedirect)\n\n\t\tdiffs = append(diffs, urlMapDiff{\n\t\t\tUpdateOp: &updateUrlMapUpdateOperation{},\n\t\t\tFieldName: \"DefaultUrlRedirect\",\n\t\t})\n\n\t}\n\tif !dcl.IsZeroValue(desired.Description) && !dcl.StringCanonicalize(desired.Description, actual.Description) {\n\t\tc.Config.Logger.Infof(\"Detected diff in Description.\\nDESIRED: %v\\nACTUAL: %v\", desired.Description, actual.Description)\n\n\t\tdiffs = append(diffs, urlMapDiff{\n\t\t\tUpdateOp: &updateUrlMapUpdateOperation{},\n\t\t\tFieldName: \"Description\",\n\t\t})\n\n\t}\n\tif compareUrlMapHeaderAction(c, desired.HeaderAction, actual.HeaderAction) {\n\t\tc.Config.Logger.Infof(\"Detected diff in HeaderAction.\\nDESIRED: %v\\nACTUAL: %v\", desired.HeaderAction, actual.HeaderAction)\n\n\t\tdiffs = append(diffs, urlMapDiff{\n\t\t\tUpdateOp: &updateUrlMapUpdateOperation{},\n\t\t\tFieldName: \"HeaderAction\",\n\t\t})\n\n\t}\n\tif compareUrlMapHostRuleSlice(c, desired.HostRule, actual.HostRule) {\n\t\tc.Config.Logger.Infof(\"Detected diff in HostRule.\\nDESIRED: %v\\nACTUAL: %v\", desired.HostRule, actual.HostRule)\n\n\t\ttoAdd, toRemove := compareUrlMapHostRuleSets(c, desired.HostRule, actual.HostRule)\n\t\tc.Config.Logger.Infof(\"diff in HostRule is a set field - recomparing with set logic. \\nto add: %#v\\nto remove: %#v\", toAdd, toRemove)\n\t\tif len(toAdd) != 0 || len(toRemove) != 0 {\n\t\t\tc.Config.Logger.Info(\"diff in HostRule persists after set logic analysis.\")\n\t\t\tdiffs = append(diffs, urlMapDiff{\n\t\t\t\tUpdateOp: &updateUrlMapUpdateOperation{},\n\t\t\t\tFieldName: \"HostRule\",\n\t\t\t})\n\t\t}\n\n\t}\n\tif !dcl.IsZeroValue(desired.Name) && !dcl.StringCanonicalize(desired.Name, actual.Name) {\n\t\tc.Config.Logger.Infof(\"Detected diff in Name.\\nDESIRED: %v\\nACTUAL: %v\", desired.Name, actual.Name)\n\n\t\tdiffs = append(diffs, urlMapDiff{\n\t\t\tUpdateOp: &updateUrlMapUpdateOperation{},\n\t\t\tFieldName: \"Name\",\n\t\t})\n\n\t}\n\tif compareUrlMapPathMatcherSlice(c, desired.PathMatcher, actual.PathMatcher) {\n\t\tc.Config.Logger.Infof(\"Detected diff in PathMatcher.\\nDESIRED: %v\\nACTUAL: %v\", desired.PathMatcher, actual.PathMatcher)\n\n\t\ttoAdd, toRemove := compareUrlMapPathMatcherSets(c, desired.PathMatcher, actual.PathMatcher)\n\t\tc.Config.Logger.Infof(\"diff in PathMatcher is a set field - recomparing with set logic. \\nto add: %#v\\nto remove: %#v\", toAdd, toRemove)\n\t\tif len(toAdd) != 0 || len(toRemove) != 0 {\n\t\t\tc.Config.Logger.Info(\"diff in PathMatcher persists after set logic analysis.\")\n\t\t\tdiffs = append(diffs, urlMapDiff{\n\t\t\t\tUpdateOp: &updateUrlMapUpdateOperation{},\n\t\t\t\tFieldName: \"PathMatcher\",\n\t\t\t})\n\t\t}\n\n\t}\n\tif !dcl.IsZeroValue(desired.Region) && !dcl.StringCanonicalize(desired.Region, actual.Region) {\n\t\tc.Config.Logger.Infof(\"Detected diff in Region.\\nDESIRED: %v\\nACTUAL: %v\", desired.Region, actual.Region)\n\t\tdiffs = append(diffs, urlMapDiff{\n\t\t\tRequiresRecreate: true,\n\t\t\tFieldName: \"Region\",\n\t\t})\n\t}\n\tif compareUrlMapTestSlice(c, desired.Test, actual.Test) {\n\t\tc.Config.Logger.Infof(\"Detected diff in Test.\\nDESIRED: %v\\nACTUAL: %v\", desired.Test, actual.Test)\n\n\t\tdiffs = append(diffs, urlMapDiff{\n\t\t\tUpdateOp: &updateUrlMapUpdateOperation{},\n\t\t\tFieldName: \"Test\",\n\t\t})\n\n\t}\n\t// We need to ensure that this list does not contain identical operations *most of the time*.\n\t// There may be some cases where we will need multiple copies of the same operation - for instance,\n\t// if a resource has multiple prerequisite-containing fields. For now, we don't know of any\n\t// such examples and so we deduplicate unconditionally.\n\n\t// The best way for us to do this is to iterate through the list\n\t// and remove any copies of operations which are identical to a previous operation.\n\t// This is O(n^2) in the number of operations, but n will always be very small,\n\t// even 10 would be an extremely high number.\n\tvar opTypes []string\n\tvar deduped []urlMapDiff\n\tfor _, d := range diffs {\n\t\t// Two operations are considered identical if they have the same type.\n\t\t// The type of an operation is derived from the name of the update method.\n\t\tif !dcl.StringSliceContains(fmt.Sprintf(\"%T\", d.UpdateOp), opTypes) {\n\t\t\tdeduped = append(deduped, d)\n\t\t\topTypes = append(opTypes, fmt.Sprintf(\"%T\", d.UpdateOp))\n\t\t} else {\n\t\t\tc.Config.Logger.Infof(\"Omitting planned operation of type %T since once is already scheduled.\", d.UpdateOp)\n\t\t}\n\t}\n\n\treturn deduped, nil\n}", "func CmpResourceList(a, b *v1.ResourceList) bool {\n\treturn a.Cpu().Cmp(*b.Cpu()) == 0 &&\n\t\ta.Memory().Cmp(*b.Memory()) == 0 &&\n\t\tb.Pods().Cmp(*b.Pods()) == 0 &&\n\t\tb.StorageEphemeral().Cmp(*b.StorageEphemeral()) == 0\n}", "func RunResourceConversionTestForDnsZonesCAARecord(subject DnsZonesCAARecord) string {\n\t// Copy subject to make sure conversion doesn't modify it\n\tcopied := subject.DeepCopy()\n\n\t// Convert to our hub version\n\tvar hub v20180501s.DnsZonesCAARecord\n\terr := copied.ConvertTo(&hub)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\t// Convert from our hub version\n\tvar actual DnsZonesCAARecord\n\terr = actual.ConvertFrom(&hub)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\t// Compare actual with what we started with\n\tmatch := cmp.Equal(subject, actual, cmpopts.EquateEmpty())\n\tif !match {\n\t\tactualFmt := pretty.Sprint(actual)\n\t\tsubjectFmt := pretty.Sprint(subject)\n\t\tresult := diff.Diff(subjectFmt, actualFmt)\n\t\treturn result\n\t}\n\n\treturn \"\"\n}", "func (s *AirflowUISpec) Differs(expected ResourceInfo, observed ResourceInfo) bool {\n\t// TODO\n\tswitch expected.Obj.(type) {\n\tcase *resources.Secret:\n\t\t// Dont update a secret\n\t\treturn false\n\t}\n\treturn true\n}", "func SortResources(resources []*metav1.APIResourceList) {\n\tsort.SliceStable(resources, func(i, j int) bool {\n\t\tleft := resources[i]\n\t\tleftGV, _ := schema.ParseGroupVersion(left.GroupVersion)\n\t\t// not checking error because it should be impossible to fail to parse data coming from the\n\t\t// apiserver\n\t\tif leftGV.Group == \"extensions\" {\n\t\t\t// always sort extensions at the bottom by saying left is \"greater\"\n\t\t\treturn false\n\t\t}\n\n\t\tright := resources[j]\n\t\trightGV, _ := schema.ParseGroupVersion(right.GroupVersion)\n\t\t// not checking error because it should be impossible to fail to parse data coming from the\n\t\t// apiserver\n\t\tif rightGV.Group == \"extensions\" {\n\t\t\t// always sort extensions at the bottom by saying left is \"less\"\n\t\t\treturn true\n\t\t}\n\n\t\treturn i < j\n\t})\n}", "func TestRedpandaResourceRequirements(t *testing.T) {\n\ttype test struct {\n\t\tname string\n\t\tsetRequestsCPU resource.Quantity\n\t\tsetRequestsMem resource.Quantity\n\t\tsetRedpandaCPU resource.Quantity\n\t\tsetRedpandaMem resource.Quantity\n\t\texpectedRedpandaCPU resource.Quantity\n\t\texpectedRedpandaMem resource.Quantity\n\t}\n\tmakeResources := func(t test) v1alpha1.RedpandaResourceRequirements {\n\t\treturn v1alpha1.RedpandaResourceRequirements{\n\t\t\tResourceRequirements: corev1.ResourceRequirements{\n\t\t\t\tRequests: corev1.ResourceList{\n\t\t\t\t\tcorev1.ResourceMemory: t.setRequestsMem,\n\t\t\t\t\tcorev1.ResourceCPU: t.setRequestsCPU,\n\t\t\t\t},\n\t\t\t},\n\t\t\tRedpanda: corev1.ResourceList{\n\t\t\t\tcorev1.ResourceMemory: t.setRedpandaMem,\n\t\t\t\tcorev1.ResourceCPU: t.setRedpandaCPU,\n\t\t\t},\n\t\t}\n\t}\n\n\tt.Run(\"Memory\", func(t *testing.T) {\n\t\ttests := []test{\n\t\t\t{\n\t\t\t\tname: \"RedpandaMemory is set from requests.memory\",\n\t\t\t\tsetRequestsMem: resource.MustParse(\"3000Mi\"),\n\t\t\t\texpectedRedpandaMem: resource.MustParse(\"2700Mi\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaMemory is set from lower redpanda.memory\",\n\t\t\t\tsetRequestsMem: resource.MustParse(\"4000Mi\"),\n\t\t\t\tsetRedpandaMem: resource.MustParse(\"3000Mi\"),\n\t\t\t\texpectedRedpandaMem: resource.MustParse(\"3000Mi\"),\n\t\t\t},\n\t\t}\n\t\tfor _, tt := range tests {\n\t\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\t\trrr := makeResources(tt)\n\t\t\t\tassert.Equal(t, tt.expectedRedpandaMem.Value(), rrr.RedpandaMemory().Value())\n\t\t\t})\n\t\t}\n\t})\n\n\tt.Run(\"CPU\", func(t *testing.T) {\n\t\ttests := []test{\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is set from integer requests.cpu\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"1\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"20Gi\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"1\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is set from milli requests.cpu\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"1000m\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"20Gi\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"1\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is rounded up from milli requests.cpu\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"1001m\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"20Gi\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"2\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is set from lower redpanda.cpu\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"2\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"20Gi\"),\n\t\t\t\tsetRedpandaCPU: resource.MustParse(\"1\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"1\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is set from higher redpanda.cpu\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"1\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"20Gi\"),\n\t\t\t\tsetRedpandaCPU: resource.MustParse(\"2\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"2\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is rounded up from milli redpanda.cpu\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"1\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"20Gi\"),\n\t\t\t\tsetRedpandaCPU: resource.MustParse(\"1001m\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"2\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is limited by 2GiB/core\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"10\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"4Gi\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"2\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU has a minimum if requests >0\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"100m\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"100Mi\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"1\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU not set if no request\",\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"0\"),\n\t\t\t},\n\t\t}\n\t\tfor _, tt := range tests {\n\t\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\t\trrr := makeResources(tt)\n\t\t\t\tassert.Equal(t, tt.expectedRedpandaCPU.Value(), rrr.RedpandaCPU().Value())\n\t\t\t})\n\t\t}\n\t})\n}", "func equalNetworks(net1, net2 string) bool {\n\tif strings.HasPrefix(net1, netalloc_api.AllocRefPrefix) ||\n\t\tstrings.HasPrefix(net2, netalloc_api.AllocRefPrefix) {\n\t\treturn net1 == net2\n\t}\n\t_, n1, err1 := net.ParseCIDR(net1)\n\t_, n2, err2 := net.ParseCIDR(net2)\n\tif err1 != nil || err2 != nil {\n\t\t// if parsing fails, compare as strings\n\t\treturn strings.EqualFold(net1, net2)\n\t}\n\treturn n1.IP.Equal(n2.IP) && bytes.Equal(n1.Mask, n2.Mask)\n}", "func TestEqual(t *testing.T) {\n\ttables := []struct {\n\t\tx []string\n\t\ty []string\n\t\texpected bool\n\t}{\n\t\t{[]string{}, []string{}, true},\n\t\t{[]string{}, []string{\"\"}, false},\n\t\t{[]string{\"\"}, []string{\"\"}, true},\n\t\t{[]string{\"\"}, []string{\"a\"}, false},\n\t\t{[]string{\"a\"}, []string{\"a\", \"a\"}, false},\n\t\t{[]string{\"b\"}, []string{\"a\"}, false},\n\t\t{[]string{\"\", \"\", \"\"}, []string{\"\", \"\", \"\"}, true},\n\t\t{[]string{\"a\", \"b\", \"c\"}, []string{\"a\", \"b\", \"e\"}, false},\n\t}\n\n\tfor _, table := range tables {\n\t\tresult := Equal(table.x, table.y)\n\t\tif result != table.expected {\n\t\t\tt.Errorf(\"Match failed for (%s, %s). Expected %t, got %t\",\n\t\t\t\ttable.x, table.y, table.expected, result)\n\t\t}\n\t}\n}", "func compareAuthSet(expected *model.AuthSet, actual *model.AuthSet, t *testing.T) {\n\tassert.Equal(t, expected.IdData, actual.IdData)\n\tassert.Equal(t, expected.PubKey, actual.PubKey)\n\tassert.Equal(t, expected.DeviceId, actual.DeviceId)\n\tassert.Equal(t, expected.IdDataStruct, actual.IdDataStruct)\n\tassert.Equal(t, expected.IdDataSha256, actual.IdDataSha256)\n\tassert.Equal(t, expected.Status, actual.Status)\n\tcompareTime(uto.Time(expected.Timestamp), uto.Time(actual.Timestamp), t)\n}", "func (s *StorageSuite) TestServersEquality(c *check.C) {\n\tservers := Servers{{\n\t\tAdvertiseIP: \"192.168.1.1\",\n\t\tHostname: \"node-1\",\n\t\tRole: \"worker\",\n\t}}\n\ttestCases := []struct {\n\t\tservers Servers\n\t\tresult bool\n\t\tcomment string\n\t}{\n\t\t{\n\t\t\tservers: Servers{{\n\t\t\t\tAdvertiseIP: \"192.168.1.1\",\n\t\t\t\tHostname: \"node-1\",\n\t\t\t\tRole: \"worker\",\n\t\t\t}},\n\t\t\tresult: true,\n\t\t\tcomment: \"Servers should be equal\",\n\t\t},\n\t\t{\n\t\t\tservers: Servers{\n\t\t\t\t{\n\t\t\t\t\tAdvertiseIP: \"192.168.1.1\",\n\t\t\t\t\tHostname: \"node-1\",\n\t\t\t\t\tRole: \"worker\",\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAdvertiseIP: \"192.168.1.2\",\n\t\t\t\t\tHostname: \"node-2\",\n\t\t\t\t\tRole: \"worker\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: false,\n\t\t\tcomment: \"Servers should not be equal: different number of servers\",\n\t\t},\n\t\t{\n\t\t\tservers: Servers{{\n\t\t\t\tAdvertiseIP: \"192.168.1.2\",\n\t\t\t\tHostname: \"node-1\",\n\t\t\t\tRole: \"worker\",\n\t\t\t}},\n\t\t\tresult: false,\n\t\t\tcomment: \"Servers should not be equal: different IPs\",\n\t\t},\n\t\t{\n\t\t\tservers: Servers{{\n\t\t\t\tAdvertiseIP: \"192.168.1.1\",\n\t\t\t\tHostname: \"node-2\",\n\t\t\t\tRole: \"worker\",\n\t\t\t}},\n\t\t\tresult: false,\n\t\t\tcomment: \"Servers should not be equal: different hostnames\",\n\t\t},\n\t\t{\n\t\t\tservers: Servers{{\n\t\t\t\tAdvertiseIP: \"192.168.1.1\",\n\t\t\t\tHostname: \"node-1\",\n\t\t\t\tRole: \"db\",\n\t\t\t}},\n\t\t\tresult: false,\n\t\t\tcomment: \"Servers should not be equal: different roles\",\n\t\t},\n\t}\n\tfor _, tc := range testCases {\n\t\tc.Assert(servers.IsEqualTo(tc.servers), check.Equals, tc.result,\n\t\t\tcheck.Commentf(tc.comment))\n\t}\n}", "func (p *Profile) Compare(a, b string) bool {\n\tvar buf buffers\n\n\takey, err := buf.enforce(p, []byte(a), true)\n\tif err != nil {\n\t\treturn false\n\t}\n\n\tbuf = buffers{}\n\tbkey, err := buf.enforce(p, []byte(b), true)\n\tif err != nil {\n\t\treturn false\n\t}\n\n\treturn bytes.Equal(akey, bkey)\n}", "func AllocatableResourceListFromNodeResourceTopology(nodeTopo *v1alpha2.NodeResourceTopology) map[string]corev1.ResourceList {\n\tallocRes := make(map[string]corev1.ResourceList)\n\tfor _, zone := range nodeTopo.Zones {\n\t\tif zone.Type != \"Node\" {\n\t\t\tcontinue\n\t\t}\n\t\tresList := make(corev1.ResourceList)\n\t\tfor _, res := range zone.Resources {\n\t\t\tresList[corev1.ResourceName(res.Name)] = res.Allocatable.DeepCopy()\n\t\t}\n\t\tif len(resList) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\tallocRes[zone.Name] = resList\n\t}\n\treturn allocRes\n}", "func CompareJSON(a, b map[string]interface{}, skip []string) ([]string, []string, []string, bool) {\n\tvar missKeys []string\n\tvar leftKeys []string\n\tvar diffKeys []string\n\tisSame := true\n\n\tfor k, v := range a {\n\t\tvb, ok := b[k]\n\t\tif ok {\n\t\t\tdelete(b, k)\n\t\t}\n\t\tif contains(skip, k) {\n\t\t\tcontinue\n\t\t}\n\t\tif !ok {\n\t\t\tmissKeys = append(missKeys, k)\n\t\t\tisSame = false\n\t\t\tcontinue\n\t\t}\n\t\tif reflect.TypeOf(v) != reflect.TypeOf(vb) {\n\t\t\tif reflect.TypeOf(v) == nil && reflect.ValueOf(vb).Len() == 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif reflect.TypeOf(vb) == nil && reflect.ValueOf(v).Len() == 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tdiffKeys = append(diffKeys, diffDebug(k, v, vb))\n\t\t\tisSame = false\n\t\t\tcontinue\n\t\t}\n\t\tswitch v.(type) {\n\t\tcase map[string]interface{}:\n\t\t\tnextSkip := skip\n\t\t\tfor _, s := range skip {\n\t\t\t\tif strings.Contains(s, \":\") {\n\t\t\t\t\ttmp := strings.Split(s, \":\")\n\t\t\t\t\tif tmp[0] == k {\n\t\t\t\t\t\tnextSkip = append(nextSkip, strings.Join(tmp[1:], \":\"))\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tsubmiss, subleft, subdiff, ok := CompareJSON(v.(map[string]interface{}),\n\t\t\t\tvb.(map[string]interface{}), nextSkip)\n\t\t\tmissKeys = append(missKeys, addPrefix(submiss, k)...)\n\t\t\tleftKeys = append(leftKeys, addPrefix(subleft, k)...)\n\t\t\tdiffKeys = append(diffKeys, addPrefix(subdiff, k)...)\n\t\t\tif !ok {\n\t\t\t\tisSame = false\n\t\t\t}\n\t\tcase []interface{}:\n\t\t\ttmpa := v.([]interface{})\n\t\t\ttmpb := vb.([]interface{})\n\t\t\tif len(tmpa) != len(tmpb) {\n\t\t\t\tdiffKeys = append(diffKeys, diffDebug(k, v, vb))\n\t\t\t\tisSame = false\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tm := make(map[interface{}]bool)\n\t\t\tfor i := 0; i < len(tmpb); i++ {\n\t\t\t\tm[tmpb[i]] = true\n\t\t\t}\n\n\t\t\tfor i := 0; i < len(tmpa); i++ {\n\t\t\t\tif _, ok := m[tmpa[i]]; !ok {\n\t\t\t\t\tdiffKeys = append(diffKeys, diffDebug(k, v, vb))\n\t\t\t\t\tisSame = false\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\tdefault:\n\t\t\tif !reflect.DeepEqual(v, vb) {\n\t\t\t\tdiffKeys = append(diffKeys, diffDebug(k, v, vb))\n\t\t\t\tisSame = false\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(b) > 0 {\n\t\tfor k := range b {\n\t\t\tleftKeys = append(leftKeys, k)\n\t\t}\n\t}\n\n\treturn missKeys, leftKeys, diffKeys, isSame\n}", "func EqualContainers(expected corev1.Container, found corev1.Container) bool {\n\tlogger := log.WithValues(\"func\", \"EqualContainers\")\n\tif !reflect.DeepEqual(found.Name, expected.Name) {\n\t\tlogger.Info(\"Container name not equal\", \"Found\", found.Name, \"Expected\", expected.Name)\n\t\treturn false\n\t}\n\tif !reflect.DeepEqual(found.Image, expected.Image) {\n\t\tlogger.Info(\"Image not equal\", \"Found\", found.Image, \"Expected\", expected.Image)\n\t\treturn false\n\t}\n\tif !reflect.DeepEqual(found.ImagePullPolicy, expected.ImagePullPolicy) {\n\t\tlogger.Info(\"ImagePullPolicy not equal\", \"Found\", found.ImagePullPolicy, \"Expected\", expected.ImagePullPolicy)\n\t\treturn false\n\t}\n\tif !reflect.DeepEqual(found.VolumeMounts, expected.VolumeMounts) {\n\t\tlogger.Info(\"VolumeMounts not equal\", \"Found\", found.VolumeMounts, \"Expected\", expected.VolumeMounts)\n\t\treturn false\n\t}\n\tif !reflect.DeepEqual(found.SecurityContext, expected.SecurityContext) {\n\t\tlogger.Info(\"SecurityContext not equal\", \"Found\", found.SecurityContext, \"Expected\", expected.SecurityContext)\n\t\treturn false\n\t}\n\tif !equalResources(found.Resources, expected.Resources) {\n\t\tlogger.Info(\"Resources not equal\", \"Found\", found.Resources, \"Expected\", expected.Resources)\n\t\treturn false\n\t}\n\treturn true\n}", "func newResourceDelta(\n\ta *resource,\n\tb *resource,\n) *ackcompare.Delta {\n\tdelta := ackcompare.NewDelta()\n\tif (a == nil && b != nil) ||\n\t\t(a != nil && b == nil) {\n\t\tdelta.Add(\"\", a, b)\n\t\treturn delta\n\t}\n\tcustomSetDefaults(a, b)\n\n\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig, b.ko.Spec.HyperParameterTuningJobConfig) {\n\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig\", a.ko.Spec.HyperParameterTuningJobConfig, b.ko.Spec.HyperParameterTuningJobConfig)\n\t} else if a.ko.Spec.HyperParameterTuningJobConfig != nil && b.ko.Spec.HyperParameterTuningJobConfig != nil {\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective != nil && b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName != nil && b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName != *b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type != nil && b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type != *b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ParameterRanges\", a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges != nil && b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges != nil {\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges\", a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges)\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges\", a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges)\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges\", a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits != nil && b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs != nil && b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs != *b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs != nil && b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs != *b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.Strategy, b.ko.Spec.HyperParameterTuningJobConfig.Strategy) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.Strategy\", a.ko.Spec.HyperParameterTuningJobConfig.Strategy, b.ko.Spec.HyperParameterTuningJobConfig.Strategy)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.Strategy != nil && b.ko.Spec.HyperParameterTuningJobConfig.Strategy != nil {\n\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.Strategy != *b.ko.Spec.HyperParameterTuningJobConfig.Strategy {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.Strategy\", a.ko.Spec.HyperParameterTuningJobConfig.Strategy, b.ko.Spec.HyperParameterTuningJobConfig.Strategy)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType, b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType\", a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType, b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType != nil && b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType != nil {\n\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType != *b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType\", a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType, b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria\", a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria != nil && b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue\", a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue != nil && b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue != *b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue\", a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobName, b.ko.Spec.HyperParameterTuningJobName) {\n\t\tdelta.Add(\"Spec.HyperParameterTuningJobName\", a.ko.Spec.HyperParameterTuningJobName, b.ko.Spec.HyperParameterTuningJobName)\n\t} else if a.ko.Spec.HyperParameterTuningJobName != nil && b.ko.Spec.HyperParameterTuningJobName != nil {\n\t\tif *a.ko.Spec.HyperParameterTuningJobName != *b.ko.Spec.HyperParameterTuningJobName {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobName\", a.ko.Spec.HyperParameterTuningJobName, b.ko.Spec.HyperParameterTuningJobName)\n\t\t}\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition, b.ko.Spec.TrainingJobDefinition) {\n\t\tdelta.Add(\"Spec.TrainingJobDefinition\", a.ko.Spec.TrainingJobDefinition, b.ko.Spec.TrainingJobDefinition)\n\t} else if a.ko.Spec.TrainingJobDefinition != nil && b.ko.Spec.TrainingJobDefinition != nil {\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification != nil && b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName != nil && b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName != *b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions)\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage != nil && b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage != *b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode != nil && b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode != *b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.CheckpointConfig, b.ko.Spec.TrainingJobDefinition.CheckpointConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig, b.ko.Spec.TrainingJobDefinition.CheckpointConfig)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.CheckpointConfig != nil && b.ko.Spec.TrainingJobDefinition.CheckpointConfig != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig.LocalPath\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath != nil && b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath != *b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig.LocalPath\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig.S3URI\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI != nil && b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI != *b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig.S3URI\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.DefinitionName, b.ko.Spec.TrainingJobDefinition.DefinitionName) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.DefinitionName\", a.ko.Spec.TrainingJobDefinition.DefinitionName, b.ko.Spec.TrainingJobDefinition.DefinitionName)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.DefinitionName != nil && b.ko.Spec.TrainingJobDefinition.DefinitionName != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.DefinitionName != *b.ko.Spec.TrainingJobDefinition.DefinitionName {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.DefinitionName\", a.ko.Spec.TrainingJobDefinition.DefinitionName, b.ko.Spec.TrainingJobDefinition.DefinitionName)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption, b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption\", a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption, b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption != nil && b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption != *b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption\", a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption, b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining, b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableManagedSpotTraining\", a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining, b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining != nil && b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining != *b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableManagedSpotTraining\", a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining, b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation, b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableNetworkIsolation\", a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation, b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation != nil && b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation != *b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableNetworkIsolation\", a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation, b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.HyperParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.HyperParameterRanges\", a.ko.Spec.TrainingJobDefinition.HyperParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.HyperParameterRanges != nil && b.ko.Spec.TrainingJobDefinition.HyperParameterRanges != nil {\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges\", a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges)\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges\", a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges)\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges\", a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges)\n\t\t\t}\n\t\t}\n\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.InputDataConfig, b.ko.Spec.TrainingJobDefinition.InputDataConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.InputDataConfig\", a.ko.Spec.TrainingJobDefinition.InputDataConfig, b.ko.Spec.TrainingJobDefinition.InputDataConfig)\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.OutputDataConfig, b.ko.Spec.TrainingJobDefinition.OutputDataConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig, b.ko.Spec.TrainingJobDefinition.OutputDataConfig)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.OutputDataConfig != nil && b.ko.Spec.TrainingJobDefinition.OutputDataConfig != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID != nil && b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID != *b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath != nil && b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath != *b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig, b.ko.Spec.TrainingJobDefinition.ResourceConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig\", a.ko.Spec.TrainingJobDefinition.ResourceConfig, b.ko.Spec.TrainingJobDefinition.ResourceConfig)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.InstanceCount\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount != *b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.InstanceCount\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.InstanceType\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType != *b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.InstanceType\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID != *b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB != *b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.RoleARN, b.ko.Spec.TrainingJobDefinition.RoleARN) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.RoleARN\", a.ko.Spec.TrainingJobDefinition.RoleARN, b.ko.Spec.TrainingJobDefinition.RoleARN)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.RoleARN != nil && b.ko.Spec.TrainingJobDefinition.RoleARN != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.RoleARN != *b.ko.Spec.TrainingJobDefinition.RoleARN {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.RoleARN\", a.ko.Spec.TrainingJobDefinition.RoleARN, b.ko.Spec.TrainingJobDefinition.RoleARN)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.StaticHyperParameters, b.ko.Spec.TrainingJobDefinition.StaticHyperParameters) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StaticHyperParameters\", a.ko.Spec.TrainingJobDefinition.StaticHyperParameters, b.ko.Spec.TrainingJobDefinition.StaticHyperParameters)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.StaticHyperParameters != nil && b.ko.Spec.TrainingJobDefinition.StaticHyperParameters != nil {\n\t\t\tif !ackcompare.MapStringStringPEqual(a.ko.Spec.TrainingJobDefinition.StaticHyperParameters, b.ko.Spec.TrainingJobDefinition.StaticHyperParameters) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StaticHyperParameters\", a.ko.Spec.TrainingJobDefinition.StaticHyperParameters, b.ko.Spec.TrainingJobDefinition.StaticHyperParameters)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.StoppingCondition, b.ko.Spec.TrainingJobDefinition.StoppingCondition) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition\", a.ko.Spec.TrainingJobDefinition.StoppingCondition, b.ko.Spec.TrainingJobDefinition.StoppingCondition)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.StoppingCondition != nil && b.ko.Spec.TrainingJobDefinition.StoppingCondition != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds\", a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds != nil && b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds != *b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds\", a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds\", a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds != nil && b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds != *b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds\", a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.TuningObjective, b.ko.Spec.TrainingJobDefinition.TuningObjective) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective\", a.ko.Spec.TrainingJobDefinition.TuningObjective, b.ko.Spec.TrainingJobDefinition.TuningObjective)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.TuningObjective != nil && b.ko.Spec.TrainingJobDefinition.TuningObjective != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName, b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective.MetricName\", a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName, b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName != nil && b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName != *b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective.MetricName\", a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName, b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.TuningObjective.Type, b.ko.Spec.TrainingJobDefinition.TuningObjective.Type) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective.Type\", a.ko.Spec.TrainingJobDefinition.TuningObjective.Type, b.ko.Spec.TrainingJobDefinition.TuningObjective.Type)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.TuningObjective.Type != nil && b.ko.Spec.TrainingJobDefinition.TuningObjective.Type != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.TuningObjective.Type != *b.ko.Spec.TrainingJobDefinition.TuningObjective.Type {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective.Type\", a.ko.Spec.TrainingJobDefinition.TuningObjective.Type, b.ko.Spec.TrainingJobDefinition.TuningObjective.Type)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.VPCConfig, b.ko.Spec.TrainingJobDefinition.VPCConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.VPCConfig\", a.ko.Spec.TrainingJobDefinition.VPCConfig, b.ko.Spec.TrainingJobDefinition.VPCConfig)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.VPCConfig != nil && b.ko.Spec.TrainingJobDefinition.VPCConfig != nil {\n\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs, b.ko.Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs\", a.ko.Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs, b.ko.Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs)\n\t\t\t}\n\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.TrainingJobDefinition.VPCConfig.Subnets, b.ko.Spec.TrainingJobDefinition.VPCConfig.Subnets) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.VPCConfig.Subnets\", a.ko.Spec.TrainingJobDefinition.VPCConfig.Subnets, b.ko.Spec.TrainingJobDefinition.VPCConfig.Subnets)\n\t\t\t}\n\t\t}\n\t}\n\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinitions, b.ko.Spec.TrainingJobDefinitions) {\n\t\tdelta.Add(\"Spec.TrainingJobDefinitions\", a.ko.Spec.TrainingJobDefinitions, b.ko.Spec.TrainingJobDefinitions)\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.WarmStartConfig, b.ko.Spec.WarmStartConfig) {\n\t\tdelta.Add(\"Spec.WarmStartConfig\", a.ko.Spec.WarmStartConfig, b.ko.Spec.WarmStartConfig)\n\t} else if a.ko.Spec.WarmStartConfig != nil && b.ko.Spec.WarmStartConfig != nil {\n\t\tif !reflect.DeepEqual(a.ko.Spec.WarmStartConfig.ParentHyperParameterTuningJobs, b.ko.Spec.WarmStartConfig.ParentHyperParameterTuningJobs) {\n\t\t\tdelta.Add(\"Spec.WarmStartConfig.ParentHyperParameterTuningJobs\", a.ko.Spec.WarmStartConfig.ParentHyperParameterTuningJobs, b.ko.Spec.WarmStartConfig.ParentHyperParameterTuningJobs)\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.WarmStartConfig.WarmStartType, b.ko.Spec.WarmStartConfig.WarmStartType) {\n\t\t\tdelta.Add(\"Spec.WarmStartConfig.WarmStartType\", a.ko.Spec.WarmStartConfig.WarmStartType, b.ko.Spec.WarmStartConfig.WarmStartType)\n\t\t} else if a.ko.Spec.WarmStartConfig.WarmStartType != nil && b.ko.Spec.WarmStartConfig.WarmStartType != nil {\n\t\t\tif *a.ko.Spec.WarmStartConfig.WarmStartType != *b.ko.Spec.WarmStartConfig.WarmStartType {\n\t\t\t\tdelta.Add(\"Spec.WarmStartConfig.WarmStartType\", a.ko.Spec.WarmStartConfig.WarmStartType, b.ko.Spec.WarmStartConfig.WarmStartType)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn delta\n}", "func ArePodSpecDifferent(lhs, rhs v1.PodSpec, strictTolerations bool) bool {\n\tchanged := false\n\n\tif len(lhs.Containers) != len(rhs.Containers) {\n\t\tchanged = true\n\t}\n\n\t// check nodeselectors\n\tif !areSelectorsSame(lhs.NodeSelector, rhs.NodeSelector) {\n\t\tchanged = true\n\t}\n\n\t// strictTolerations are for when we compare from the deployments or statefulsets\n\t// if we are seeing if rolled out pods contain changes we don't want strictTolerations\n\t// since k8s may add additional tolerations to pods\n\tif strictTolerations {\n\t\t// check tolerations\n\t\tif !areTolerationsSame(lhs.Tolerations, rhs.Tolerations) {\n\t\t\tchanged = true\n\t\t}\n\t} else {\n\t\t// check tolerations\n\t\tif !containsSameTolerations(lhs.Tolerations, rhs.Tolerations) {\n\t\t\tchanged = true\n\t\t}\n\t}\n\n\t// check container fields\n\tfor _, lContainer := range lhs.Containers {\n\t\tfound := false\n\n\t\tfor _, rContainer := range rhs.Containers {\n\t\t\t// Only compare the images of containers with the same name\n\t\t\tif lContainer.Name != rContainer.Name {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tfound = true\n\n\t\t\t// can't use reflect.DeepEqual here, due to k8s adding token mounts\n\t\t\t// check that rContainer is all found within lContainer and that they match by name\n\t\t\tif !containsSameVolumeMounts(lContainer.VolumeMounts, rContainer.VolumeMounts) {\n\t\t\t\tchanged = true\n\t\t\t}\n\n\t\t\tif lContainer.Image != rContainer.Image {\n\t\t\t\tchanged = true\n\t\t\t}\n\n\t\t\tif !comparators.EnvValueEqual(lContainer.Env, rContainer.Env) {\n\t\t\t\tchanged = true\n\t\t\t}\n\n\t\t\tif !reflect.DeepEqual(lContainer.Args, rContainer.Args) {\n\t\t\t\tchanged = true\n\t\t\t}\n\n\t\t\tif !reflect.DeepEqual(lContainer.Ports, rContainer.Ports) {\n\t\t\t\tchanged = true\n\t\t\t}\n\n\t\t\tif different, _ := utils.CompareResources(lContainer.Resources, rContainer.Resources); different {\n\t\t\t\tchanged = true\n\t\t\t}\n\t\t}\n\n\t\tif !found {\n\t\t\tchanged = true\n\t\t}\n\t}\n\treturn changed\n}", "func TestCompareStrings(t *testing.T) {\n\tlt := CompareStrings(\"a\", \"b\")\n\teq := CompareStrings(\"b\", \"b\")\n\tgt := CompareStrings(\"b\", \"a\")\n\n\tif err := checkResult(lt, eq, gt); err != nil {\n\t\tt.Fatalf(\"%s\", err)\n\t}\n}", "func ValidateResources(resources *corev1.ResourceRequirements, defaults corev1.ResourceRequirements) {\n\t// check for nil maps\n\tif resources.Requests == nil {\n\t\tresources.Requests = make(corev1.ResourceList)\n\t}\n\tif resources.Limits == nil {\n\t\tresources.Limits = make(corev1.ResourceList)\n\t}\n\n\t// if not given, use default cpu requests\n\t_, ok := resources.Requests[corev1.ResourceCPU]\n\tif !ok {\n\t\tresources.Requests[corev1.ResourceCPU] = defaults.Requests[corev1.ResourceCPU]\n\t}\n\n\t// if not given, use default memory requests\n\t_, ok = resources.Requests[corev1.ResourceMemory]\n\tif !ok {\n\t\tresources.Requests[corev1.ResourceMemory] = defaults.Requests[corev1.ResourceMemory]\n\t}\n\n\t// if not given, use default cpu limits\n\t_, ok = resources.Limits[corev1.ResourceCPU]\n\tif !ok {\n\t\tresources.Limits[corev1.ResourceCPU] = defaults.Limits[corev1.ResourceCPU]\n\t}\n\n\t// if not given, use default memory limits\n\t_, ok = resources.Limits[corev1.ResourceMemory]\n\tif !ok {\n\t\tresources.Limits[corev1.ResourceMemory] = defaults.Limits[corev1.ResourceMemory]\n\t}\n}", "func compareConfigs(config1 Configuration.Config, config2 Configuration.Config, t *testing.T) {\n\tfor _, t1 := range config2.Tool {\n\t\tfor _, t2 := range config2.Tool {\n\t\t\tif( (t1.Name != t2.Name) ||\n\t\t\t\t(t1.Enabled != t2.Enabled) ||\n\t\t\t\t(t1.Path != t2.Path) ||\n\t\t\t\t(t1.Args != t2.Args)){\n\t\t\t\t\tt.Fail()\n\t\t\t}\n\t\t}\n\t}\n\tif(config1.RelativePath != config2.RelativePath){\n\t\tt.Fail()\n\t}\n}", "func (c *Comparator) ComparesAsJSON(compares map[string][]FilePath) string {\n\tjsonString, _ := json.MarshalIndent(compares, \"\", \" \")\n\n\treturn string(jsonString)\n}", "func (s *FlowerSpec) Differs(expected ResourceInfo, observed ResourceInfo) bool {\n\t// TODO\n\tswitch expected.Obj.(type) {\n\tcase *resources.Secret:\n\t\t// Dont update a secret\n\t\treturn false\n\t}\n\treturn true\n}", "func getRequestedResources(container corev1.Container) (map[string]int64, error) {\n\tfor _, v := range container.Env {\n\t\tif strings.HasPrefix(v.Name, \"FPGA_REGION\") || strings.HasPrefix(v.Name, \"FPGA_AFU\") {\n\t\t\treturn nil, errors.Errorf(\"environment variable '%s' is not allowed\", v.Name)\n\t\t}\n\t}\n\n\t// Container may happen to have Requests, but not Limits. Check Requests first,\n\t// then in the next loop iterate over Limits.\n\tfor resourceName, resourceQuantity := range container.Resources.Requests {\n\t\trname := strings.ToLower(string(resourceName))\n\t\tif !strings.HasPrefix(rname, namespace) {\n\t\t\t// Skip non-FPGA resources in Requests.\n\t\t\tcontinue\n\t\t}\n\n\t\tif container.Resources.Limits[resourceName] != resourceQuantity {\n\t\t\treturn nil, errors.Errorf(\n\t\t\t\t\"'limits' and 'requests' for %q must be equal as extended resources cannot be overcommitted\",\n\t\t\t\trname)\n\t\t}\n\t}\n\n\tresources := make(map[string]int64)\n\tfor resourceName, resourceQuantity := range container.Resources.Limits {\n\t\trname := strings.ToLower(string(resourceName))\n\t\tif !strings.HasPrefix(rname, namespace) {\n\t\t\t// Skip non-FPGA resources in Limits.\n\t\t\tcontinue\n\t\t}\n\n\t\tif container.Resources.Requests[resourceName] != resourceQuantity {\n\t\t\treturn nil, errors.Errorf(\n\t\t\t\t\"'limits' and 'requests' for %q must be equal as extended resources cannot be overcommitted\",\n\t\t\t\trname)\n\t\t}\n\n\t\tquantity, ok := resourceQuantity.AsInt64()\n\t\tif !ok {\n\t\t\treturn nil, errors.Errorf(\"resource quantity isn't of integral type for %q\", rname)\n\t\t}\n\n\t\tresources[rname] = quantity\n\t}\n\n\treturn resources, nil\n}", "func TestDiff_srcDestContentsDiffer(t *testing.T) {\n\ts := t.TempDir()\n\td := t.TempDir()\n\n\terr := os.Mkdir(filepath.Join(s, \"a1\"), 0700)\n\tassert.NoError(t, err)\n\terr = os.WriteFile(\n\t\tfilepath.Join(s, \"a1\", \"f.yaml\"), []byte(`a`), 0600)\n\tassert.NoError(t, err)\n\n\terr = os.Mkdir(filepath.Join(d, \"a1\"), 0700)\n\tassert.NoError(t, err)\n\terr = os.WriteFile(\n\t\tfilepath.Join(d, \"a1\", \"f.yaml\"), []byte(`b`), 0600)\n\tassert.NoError(t, err)\n\n\tdiff, err := Diff(s, d)\n\tassert.NoError(t, err)\n\tassert.ElementsMatch(t, diff.List(), []string{\n\t\tfmt.Sprintf(\"a1%sf.yaml\", string(filepath.Separator)),\n\t})\n}", "func RunJSONSerializationTestForResourceReference_ARM(subject ResourceReference_ARM) string {\n\t// Serialize to JSON\n\tbin, err := json.Marshal(subject)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\t// Deserialize back into memory\n\tvar actual ResourceReference_ARM\n\terr = json.Unmarshal(bin, &actual)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\t// Check for outcome\n\tmatch := cmp.Equal(subject, actual, cmpopts.EquateEmpty())\n\tif !match {\n\t\tactualFmt := pretty.Sprint(actual)\n\t\tsubjectFmt := pretty.Sprint(subject)\n\t\tresult := diff.Diff(subjectFmt, actualFmt)\n\t\treturn result\n\t}\n\n\treturn \"\"\n}", "func AssertJSONEqualsBytes(expected []byte, given []byte, t *testing.T) {\n\teq, err := jsonBytesEqual(expected, given)\n\n\tif err != nil {\n\t\tt.Fatalf(\"Internal compare failure:%s\", err)\n\t}\n\tif !eq {\n\n\t\tt.Fatalf(\"JSON differs:\\nExpected:\\n-----\\n%s\\n-----\\nGot :\\n-----\\n%s\\n-----\\n\", string(expected), string(given))\n\t}\n}", "func AssertJSONEquals(expected string, given string, t *testing.T) {\n\ta := []byte(expected)\n\tb := []byte(given)\n\tAssertJSONEqualsBytes(a, b, t)\n}", "func (o ValidatingAdmissionPolicyBindingSpecPatchOutput) MatchResources() MatchResourcesPatchPtrOutput {\n\treturn o.ApplyT(func(v ValidatingAdmissionPolicyBindingSpecPatch) *MatchResourcesPatch { return v.MatchResources }).(MatchResourcesPatchPtrOutput)\n}", "func (resource *ResourceType) Equals(other Type, override EqualityOverrides) bool {\n\tif resource == other {\n\t\t// Same reference\n\t\treturn true\n\t}\n\n\totherResource, ok := other.(*ResourceType)\n\tif !ok {\n\t\treturn false\n\t}\n\n\t// Do cheap tests earlier\n\tif resource.isStorageVersion != otherResource.isStorageVersion ||\n\t\tlen(resource.testcases) != len(otherResource.testcases) ||\n\t\tlen(resource.functions) != len(otherResource.functions) ||\n\t\t!TypeEquals(resource.spec, otherResource.spec, override) ||\n\t\t!TypeEquals(resource.status, otherResource.status, override) ||\n\t\tlen(resource.annotations) != len(otherResource.annotations) ||\n\t\tresource.scope != otherResource.scope ||\n\t\tresource.armType != otherResource.armType ||\n\t\t!TypeEquals(resource.apiVersionTypeName, otherResource.apiVersionTypeName) ||\n\t\t!resource.apiVersionEnumValue.Equals(&otherResource.apiVersionEnumValue) ||\n\t\t!resource.InterfaceImplementer.Equals(otherResource.InterfaceImplementer, override) {\n\t\treturn false\n\t}\n\n\t// Check same functions present\n\tfor name, fn := range otherResource.functions {\n\t\tourFn, ok := resource.functions[name]\n\t\tif !ok {\n\t\t\treturn false\n\t\t}\n\n\t\tif !ourFn.Equals(fn, override) {\n\t\t\treturn false\n\t\t}\n\t}\n\n\t// Check same test cases present\n\tfor name, testcase := range otherResource.testcases {\n\t\tourCase, ok := resource.testcases[name]\n\t\tif !ok {\n\t\t\t// Didn't find the func, not equal\n\t\t\treturn false\n\t\t}\n\n\t\tif !ourCase.Equals(testcase, override) {\n\t\t\t// Different testcase, even though same name; not-equal\n\t\t\treturn false\n\t\t}\n\t}\n\n\t// Check same annotations present in the same order\n\tfor i, ourAnnotation := range resource.annotations {\n\t\totherAnnotation := otherResource.annotations[i]\n\t\tif ourAnnotation != otherAnnotation {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func CompareTokens(expected, actual []*Token) (bool, error) {\n\tif len(expected) != len(actual) {\n\t\treturn false, fmt.Errorf(\"Different lengths. Expected %d, Got %d\", len(expected), len(actual))\n\t}\n\tfor i := range expected {\n\t\tif expected[i].Type != actual[i].Type {\n\t\t\treturn false, fmt.Errorf(\"Different token types at index %d. Expected %v, Got %v. Value: %v\",\n\t\t\t\ti, expected[i].Type, actual[i].Type, expected[i].Value)\n\t\t}\n\t\tif expected[i].Value != actual[i].Value {\n\t\t\treturn false, fmt.Errorf(\"Different token values at index %d. Expected %v, Got %v\",\n\t\t\t\ti, expected[i].Value, actual[i].Value)\n\t\t}\n\t}\n\treturn true, nil\n}", "func TestCompareStrings(t *testing.T) {\n\tstrings1 := []string{\"one\", \"two\", \"three\"}\n\tstrings2 := []string{\"one\", \"two\"}\n\tstrings3 := []string{\"one\", \"two\", \"THREE\"}\n\n\tif !compareStrings(strings1, strings1) {\n\t\tt.Error(\"Equal slices fail check!\")\n\t}\n\n\tif compareStrings(strings1, strings2) {\n\t\tt.Error(\"Different size slices are OK!\")\n\t}\n\n\tif compareStrings(strings1, strings3) {\n\t\tt.Error(\"Slice with different strings are OK!\")\n\t}\n}", "func compareDefaultIDSetMaps(x, y *DefaultIDSetMap) bool {\n\tif x == nil && y == nil {\n\t\treturn true\n\t}\n\n\tif x == nil || y == nil {\n\t\treturn false\n\t}\n\n\tm1 := toMap(x)\n\tm2 := toMap(y)\n\n\tif len(m1) != len(m2) {\n\t\treturn false\n\t}\n\n\tfor k, v := range m1 {\n\t\tif !compareIDSets(v, m2[k]) {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func TablesAreEqual(t1, t2 Table) bool {\n\tif len(t1) != len(t2) {\n\t\treturn false\n\t}\n\n\tfor i, s := range t1 {\n\t\tif !StacksAreEqual(s, t2[i]) {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func CompareDatasetRef(a, b *DatasetRef) error {\n\tif a == nil && b != nil || a != nil && b == nil {\n\t\treturn fmt.Errorf(\"nil mismatch: %v != %v\", a, b)\n\t}\n\tif a == nil && b == nil {\n\t\treturn nil\n\t}\n\tif a.Peername != b.Peername {\n\t\treturn fmt.Errorf(\"peername mismatch. %s != %s\", a.Name, b.Name)\n\t}\n\tif a.Name != b.Name {\n\t\treturn fmt.Errorf(\"name mismatch. %s != %s\", a.Name, b.Name)\n\t}\n\tif a.Path != b.Path {\n\t\treturn fmt.Errorf(\"path mismatch. %s != %s\", a.Path, b.Path)\n\t}\n\treturn nil\n}", "func TestBucketPolicyResourceMatch(t *testing.T) {\n\n\t// generates statement with given resource..\n\tgenerateStatement := func(resource string) policy.Statement {\n\t\tstatement := policy.Statement{}\n\t\tstatement.Resources = set.CreateStringSet([]string{resource}...)\n\t\treturn statement\n\t}\n\n\t// generates resource prefix.\n\tgenerateResource := func(bucketName, objectName string) string {\n\t\treturn bucketARNPrefix + bucketName + \"/\" + objectName\n\t}\n\n\ttestCases := []struct {\n\t\tresourceToMatch string\n\t\tstatement policy.Statement\n\t\texpectedResourceMatch bool\n\t}{\n\t\t// Test case 1-4.\n\t\t// Policy with resource ending with bucket/* allows access to all objects inside the given bucket.\n\t\t{generateResource(\"minio-bucket\", \"\"), generateStatement(fmt.Sprintf(\"%s%s\", bucketARNPrefix, \"minio-bucket\"+\"/*\")), true},\n\t\t{generateResource(\"minio-bucket\", \"\"), generateStatement(fmt.Sprintf(\"%s%s\", bucketARNPrefix, \"minio-bucket\"+\"/*\")), true},\n\t\t{generateResource(\"minio-bucket\", \"\"), generateStatement(fmt.Sprintf(\"%s%s\", bucketARNPrefix, \"minio-bucket\"+\"/*\")), true},\n\t\t{generateResource(\"minio-bucket\", \"\"), generateStatement(fmt.Sprintf(\"%s%s\", bucketARNPrefix, \"minio-bucket\"+\"/*\")), true},\n\t\t// Test case - 5.\n\t\t// Policy with resource ending with bucket/oo* should not allow access to bucket/output.txt.\n\t\t{generateResource(\"minio-bucket\", \"output.txt\"), generateStatement(fmt.Sprintf(\"%s%s\", bucketARNPrefix, \"minio-bucket\"+\"/oo*\")), false},\n\t\t// Test case - 6.\n\t\t// Policy with resource ending with bucket/oo* should allow access to bucket/ootput.txt.\n\t\t{generateResource(\"minio-bucket\", \"ootput.txt\"), generateStatement(fmt.Sprintf(\"%s%s\", bucketARNPrefix, \"minio-bucket\"+\"/oo*\")), true},\n\t\t// Test case - 7.\n\t\t// Policy with resource ending with bucket/oo* allows access to all sub-dirs starting with \"oo\" inside given bucket.\n\t\t{generateResource(\"minio-bucket\", \"oop-bucket/my-file\"), generateStatement(fmt.Sprintf(\"%s%s\", bucketARNPrefix, \"minio-bucket\"+\"/oo*\")), true},\n\t\t// Test case - 8.\n\t\t{generateResource(\"minio-bucket\", \"Asia/India/1.pjg\"), generateStatement(fmt.Sprintf(\"%s%s\", bucketARNPrefix, \"minio-bucket\"+\"/Asia/Japan/*\")), false},\n\t\t// Test case - 9.\n\t\t{generateResource(\"minio-bucket\", \"Asia/India/1.pjg\"), generateStatement(fmt.Sprintf(\"%s%s\", bucketARNPrefix, \"minio-bucket\"+\"/Asia/Japan/*\")), false},\n\t\t// Test case - 10.\n\t\t// Proves that the name space is flat.\n\t\t{generateResource(\"minio-bucket\", \"Africa/Bihar/India/design_info.doc/Bihar\"), generateStatement(fmt.Sprintf(\"%s%s\", bucketARNPrefix,\n\t\t\t\"minio-bucket\"+\"/*/India/*/Bihar\")), true},\n\t\t// Test case - 11.\n\t\t// Proves that the name space is flat.\n\t\t{generateResource(\"minio-bucket\", \"Asia/China/India/States/Bihar/output.txt\"), generateStatement(fmt.Sprintf(\"%s%s\", bucketARNPrefix,\n\t\t\t\"minio-bucket\"+\"/*/India/*/Bihar/*\")), true},\n\t}\n\tfor i, testCase := range testCases {\n\t\tactualResourceMatch := bucketPolicyResourceMatch(testCase.resourceToMatch, testCase.statement)\n\t\tif testCase.expectedResourceMatch != actualResourceMatch {\n\t\t\tt.Errorf(\"Test %d: Expected Resource match to be `%v`, but instead found it to be `%v`\", i+1, testCase.expectedResourceMatch, actualResourceMatch)\n\t\t}\n\t}\n}", "func CompareRegexJSON(expected string, actual string, topDir string) (string, error) {\n\n\tvar fActual, fExpected *os.File\n\tvar err error\n\tif fActual, err = writeTempFile(\"tmp_actual\", actual); err != nil {\n\t\treturn \"\", err\n\t}\n\tif fExpected, err = writeTempFile(\"tmp_expected\", expected); err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer func() {\n\t\t_ = os.Remove(fActual.Name())\n\t\t_ = os.Remove(fExpected.Name())\n\t}()\n\n\tscript := path.Join(topDir, diffExecutable)\n\tcompareCmd := exec.Command(\n\t\t\"python\",\n\t\tscript,\n\t\t\"--use_model\",\n\t\tfActual.Name(),\n\t\tfExpected.Name())\n\tdiffCmd := exec.Command(\n\t\t\"python\",\n\t\tscript,\n\t\t\"--diff\",\n\t\t\"--use_model\",\n\t\tfActual.Name(),\n\t\tfExpected.Name())\n\treturn runJSONDiff(compareCmd, diffCmd)\n}", "func compare(a []string, b []string) bool {\n\tif len(a) != len(b) { // if their length is not equal\n\t\treturn false // then they are obviously not the same\n\t}\n\tfor i := 0; i < len(a); i++ {\n\t\tif a[i] != b[i] { // if there is one element unequal\n\t\t\treturn false // then it is false\n\t\t}\n\t}\n\treturn true // otherwise true\n}", "func RunJSONSerializationTestForRequestUriMatchConditionParameters_ARM(subject RequestUriMatchConditionParameters_ARM) string {\n\t// Serialize to JSON\n\tbin, err := json.Marshal(subject)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\t// Deserialize back into memory\n\tvar actual RequestUriMatchConditionParameters_ARM\n\terr = json.Unmarshal(bin, &actual)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\t// Check for outcome\n\tmatch := cmp.Equal(subject, actual, cmpopts.EquateEmpty())\n\tif !match {\n\t\tactualFmt := pretty.Sprint(actual)\n\t\tsubjectFmt := pretty.Sprint(subject)\n\t\tresult := diff.Diff(subjectFmt, actualFmt)\n\t\treturn result\n\t}\n\n\treturn \"\"\n}" ]
[ "0.64290345", "0.5865311", "0.58325607", "0.5697454", "0.54446197", "0.5403544", "0.5374727", "0.53742474", "0.53078616", "0.52964586", "0.52862316", "0.51846445", "0.51759404", "0.5174776", "0.51174635", "0.5074527", "0.50719815", "0.50290644", "0.5021602", "0.5009109", "0.4984351", "0.49673533", "0.4951663", "0.49247012", "0.49116075", "0.48933783", "0.48583674", "0.48566896", "0.4851615", "0.48136693", "0.4798348", "0.47883686", "0.4770763", "0.47585484", "0.47533947", "0.47496495", "0.4724863", "0.47245175", "0.4718809", "0.4707709", "0.47015542", "0.46861923", "0.46825275", "0.4641478", "0.46294597", "0.46259698", "0.46214232", "0.4619345", "0.46152043", "0.4594025", "0.45823094", "0.45621532", "0.45569864", "0.45544282", "0.4550648", "0.4537797", "0.4525119", "0.4512455", "0.44956303", "0.44807518", "0.44755837", "0.44755837", "0.447459", "0.4469794", "0.44597653", "0.44446808", "0.44412678", "0.44397366", "0.44346705", "0.4429205", "0.44279018", "0.4423948", "0.44208175", "0.44196934", "0.4417696", "0.4405454", "0.44027242", "0.439934", "0.43981555", "0.43967795", "0.43849435", "0.43665984", "0.43613273", "0.43564573", "0.43533754", "0.43516463", "0.43324196", "0.43316162", "0.43283725", "0.43276703", "0.43261275", "0.4324415", "0.43228087", "0.43096384", "0.42994088", "0.4295694", "0.42904356", "0.42877686", "0.42832467", "0.42704114" ]
0.84132695
0
CompareResourceList compares `expected` and `got` ResourceList respectively, and informs the caller if the two ResourceList are equal. Here `equal` means the same resources with the same quantities. Returns the different resource, the comparison result (same semantic as strings.Compare) and a boolean that reports if the resourceLists are consistent. The ResourceLists are consistent only if the represent the same resource set (all the resources listed in one are also present in the another; no ResourceList is a superset nor a subset of the other)
func CompareResourceList(expected, got corev1.ResourceList) (string, int, bool) { if len(got) != len(expected) { framework.Logf("-> expected=%v (len=%d) got=%v (len=%d)", expected, len(expected), got, len(got)) return "", 0, false } for expResName, expResQty := range expected { gotResQty, ok := got[expResName] if !ok { return string(expResName), 0, false } if cmp := gotResQty.Cmp(expResQty); cmp != 0 { framework.Logf("-> resource=%q cmp=%d expected=%v got=%v", expResName, cmp, expResQty, gotResQty) return string(expResName), cmp, true } } return "", 0, true }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func TestResourceListSorting(t *testing.T) {\n\tsortedResourceList := make([]string, len(resourceList))\n\tcopy(sortedResourceList, resourceList)\n\tsort.Strings(sortedResourceList)\n\tfor i := 0; i < len(resourceList); i++ {\n\t\tif resourceList[i] != sortedResourceList[i] {\n\t\t\tt.Errorf(\"Expected resourceList[%d] = \\\"%s\\\", resourceList is not correctly sorted.\", i, sortedResourceList[i])\n\t\t\tbreak\n\t\t}\n\t}\n}", "func CmpResourceList(a, b *v1.ResourceList) bool {\n\treturn a.Cpu().Cmp(*b.Cpu()) == 0 &&\n\t\ta.Memory().Cmp(*b.Memory()) == 0 &&\n\t\tb.Pods().Cmp(*b.Pods()) == 0 &&\n\t\tb.StorageEphemeral().Cmp(*b.StorageEphemeral()) == 0\n}", "func CompareResources(resA, resB types.Resource) int {\n\tequal := cmp.Equal(resA, resB,\n\t\tignoreProtoXXXFields(),\n\t\tcmpopts.IgnoreFields(types.Metadata{}, \"ID\"),\n\t\tcmpopts.IgnoreFields(types.DatabaseV3{}, \"Status\"),\n\t\tcmpopts.EquateEmpty(),\n\t)\n\tif equal {\n\t\treturn Equal\n\t}\n\treturn Different\n}", "func CompareAllocatableResources(expected, got map[string]corev1.ResourceList) (string, string, int, bool) {\n\tif len(got) != len(expected) {\n\t\tframework.Logf(\"-> expected=%v (len=%d) got=%v (len=%d)\", expected, len(expected), got, len(got))\n\t\treturn \"\", \"\", 0, false\n\t}\n\tfor expZoneName, expResList := range expected {\n\t\tgotResList, ok := got[expZoneName]\n\t\tif !ok {\n\t\t\treturn expZoneName, \"\", 0, false\n\t\t}\n\t\tif resName, cmp, ok := CompareResourceList(expResList, gotResList); !ok || cmp != 0 {\n\t\t\treturn expZoneName, resName, cmp, ok\n\t\t}\n\t}\n\treturn \"\", \"\", 0, true\n}", "func ResourcesEqual(a, b map[string]envoy.Resource) bool {\n\n\tif len(a) != len(b) {\n\t\treturn false\n\t}\n\n\tfor name, resource := range a {\n\t\tif !proto.Equal(resource, b[name]) {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func (r Resources) Equal(o Resources) bool {\n\tLog.Vomit.Printf(\"Comparing resources: %+ v ?= %+ v\", r, o)\n\tif len(r) != len(o) {\n\t\tLog.Vomit.Println(\"Lengths differ\")\n\t\treturn false\n\t}\n\n\tif r.Ports() != o.Ports() {\n\t\tLog.Vomit.Println(\"Ports differ\")\n\t\treturn false\n\t}\n\n\tif math.Abs(r.Cpus()-o.Cpus()) > 0.001 {\n\t\tLog.Vomit.Println(\"Cpus differ\")\n\t\treturn false\n\t}\n\n\tif math.Abs(r.Memory()-o.Memory()) > 0.001 {\n\t\tLog.Vomit.Println(\"Memory differ\")\n\t\treturn false\n\t}\n\n\treturn true\n}", "func (p *Plan) CompareResultSets(regressDir string, expectedDir string, t *tap.T) {\n\tfor i, rs := range p.ResultSets {\n\t\ttestName := strings.TrimPrefix(rs.Filename, regressDir+\"/out/\")\n\t\texpectedFilename := filepath.Join(expectedDir,\n\t\t\tfilepath.Base(rs.Filename))\n\t\tdiff, err := DiffFiles(expectedFilename, rs.Filename, 3)\n\n\t\tif err != nil {\n\t\t\tt.Diagnostic(\n\t\t\t\tfmt.Sprintf(`Query File: '%s'\nBindings File: '%s'\nBindings Name: '%s'\nQuery Parameters: '%v'\nExpected Result File: '%s'\nActual Result File: '%s'\n\nFailed to compare results: %s`,\n\t\t\t\t\tp.Query.Path,\n\t\t\t\t\tp.Path,\n\t\t\t\t\tp.Names[i],\n\t\t\t\t\tp.Bindings[i],\n\t\t\t\t\texpectedFilename,\n\t\t\t\t\trs.Filename,\n\t\t\t\t\terr.Error()))\n\t\t}\n\n\t\tif diff != \"\" {\n\t\t\tt.Diagnostic(\n\t\t\t\tfmt.Sprintf(`Query File: '%s'\nBindings File: '%s'\nBindings Name: '%s'\nQuery Parameters: '%v'\nExpected Result File: '%s'\nActual Result File: '%s'\n\n%s`,\n\t\t\t\t\tp.Query.Path,\n\t\t\t\t\tp.Path,\n\t\t\t\t\tp.Names[i],\n\t\t\t\t\tp.Bindings[i],\n\t\t\t\t\texpectedFilename,\n\t\t\t\t\trs.Filename,\n\t\t\t\t\tdiff))\n\t\t}\n\t\tt.Ok(diff == \"\", testName)\n\t}\n}", "func (r *Resources) Equal(other *Resources) bool {\n\treturn equal(r.CPU, other.CPU) &&\n\t\tequal(r.MEMORY, other.MEMORY) &&\n\t\tequal(r.DISK, other.DISK) &&\n\t\tequal(r.GPU, other.GPU)\n}", "func validateResourceList(resourceList core.ResourceList, upperBound core.ResourceList, fldPath *field.Path) field.ErrorList {\n\tallErrs := field.ErrorList{}\n\tfor resourceName, quantity := range resourceList {\n\t\tresPath := fldPath.Key(string(resourceName))\n\t\t// Validate resource name.\n\t\tallErrs = append(allErrs, validateResourceName(&resourceName, resPath)...)\n\t\t// Validate resource quantity.\n\t\tallErrs = append(allErrs, corevalidation.ValidateResourceQuantityValue(string(resourceName), quantity, resPath)...)\n\t\tif upperBound != nil {\n\t\t\t// Check that request <= limit.\n\t\t\tupperBoundQuantity, exists := upperBound[resourceName]\n\t\t\tif exists && quantity.Cmp(upperBoundQuantity) > 0 {\n\t\t\t\tallErrs = append(allErrs, field.Invalid(fldPath, quantity.String(),\n\t\t\t\t\t\"must be less than or equal to the upper bound\"))\n\t\t\t}\n\t\t}\n\t}\n\treturn allErrs\n}", "func ExpectedResourceListFor(expectedISCount int64) kapi.ResourceList {\n\treturn kapi.ResourceList{\n\t\timageapi.ResourceImageStreams: *resource.NewQuantity(expectedISCount, resource.DecimalSI),\n\t}\n}", "func testCheckDDCloudAddressListMatches(name string, expected compute.IPAddressList) resource.TestCheckFunc {\n\tname = ensureResourceTypePrefix(name, \"ddcloud_address_list\")\n\n\treturn func(state *terraform.State) error {\n\t\tres, ok := state.RootModule().Resources[name]\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"Not found: %s\", name)\n\t\t}\n\n\t\taddressListID := res.Primary.ID\n\n\t\tclient := testAccProvider.Meta().(*providerState).Client()\n\t\taddressList, err := client.GetIPAddressList(addressListID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"bad: Get address list: %s\", err)\n\t\t}\n\t\tif addressList == nil {\n\t\t\treturn fmt.Errorf(\"bad: address list not found with Id '%s'\", addressListID)\n\t\t}\n\n\t\tif addressList.Name != expected.Name {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has name '%s' (expected '%s')\", addressListID, addressList.Name, expected.Name)\n\t\t}\n\n\t\tif addressList.Description != expected.Description {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has description '%s' (expected '%s')\", addressListID, addressList.Description, expected.Description)\n\t\t}\n\n\t\tif len(addressList.Addresses) != len(expected.Addresses) {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has %d addresses or address-ranges (expected '%d')\", addressListID, len(addressList.Addresses), len(expected.Addresses))\n\t\t}\n\n\t\terr = compareAddressListEntries(expected, *addressList)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif len(addressList.ChildLists) != len(expected.ChildLists) {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has %d child lists (expected '%d')\", addressListID, len(addressList.ChildLists), len(expected.ChildLists))\n\t\t}\n\n\t\tfor index := range addressList.ChildLists {\n\t\t\texpectedChildListID := expected.ChildLists[index].ID\n\t\t\tactualChildListID := addressList.ChildLists[index].ID\n\n\t\t\tif actualChildListID != expectedChildListID {\n\t\t\t\treturn fmt.Errorf(\"bad: address list '%s' has child list at index %d with Id %s (expected '%s')\",\n\t\t\t\t\taddressListID, index, actualChildListID, expectedChildListID,\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t}\n}", "func assertEqualEndpointLists(t *testing.T, expected, actual []*Endpoint) {\n\texpectedSet := map[string]*Endpoint{}\n\tfor _, ep := range expected {\n\t\tuid, found := ep.getSingleValuedAttrs()[DestinationUID.AttrName()]\n\t\tif !found {\n\t\t\tt.Fatalf(\"expected ep found with no UID is an indication of bad test data: '%v'\", ep)\n\t\t}\n\t\texpectedSet[uid] = ep\n\t}\n\tactualSet := map[string]*Endpoint{}\n\tfor _, ep := range actual {\n\t\tuid, found := ep.getSingleValuedAttrs()[DestinationUID.AttrName()]\n\t\tif !found {\n\t\t\tt.Errorf(\"actual ep found with no UID '%s'\", epDebugInfo(ep))\n\t\t\tcontinue\n\t\t}\n\t\tactualSet[uid] = ep\n\t}\n\tfor uid, expectedEp := range expectedSet {\n\t\tactualEp, found := actualSet[uid]\n\t\tif !found {\n\t\t\tt.Errorf(\"expecting endpoint\\nShortForm: %s\\nLongForm : %s\\nfound none\", epDebugInfo(expectedEp), *expectedEp)\n\t\t\tcontinue\n\t\t}\n\t\tassertEqualEndpoints(t, expectedEp, actualEp)\n\t\tdelete(actualSet, uid)\n\t}\n\tfor _, ep := range actualSet {\n\t\tt.Errorf(\"unexpected endpoint found: %s\", epDebugInfo(ep))\n\t}\n\tif len(expected) != len(actual) {\n\t\tt.Errorf(\"expected endpoint count: %d do not tally with actual count: %d\", len(expected), len(actual))\n\t}\n}", "func CompareIPLists(a []string, b []string) bool {\n\tif len(a) != len(b) {\n\t\treturn true\n\t}\n\n\tsort.Strings(a)\n\tsort.Strings(b)\n\n\tif !reflect.DeepEqual(a, b) {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (r *Compare) Compare() (map[string][]schema.GroupVersionResource, error) {\n\tpreferredSrcResourceList, err := collectPreferredResources(r.SrcDiscovery)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsrcCRDResource, err := collectPreferredCRDResource(r.SrcDiscovery)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdstResourceList, err := collectNamespacedResources(r.DstDiscovery)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpreferredSrcResourceList, err = r.excludeCRDs(preferredSrcResourceList, srcCRDResource, r.SrcClient)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresourcesDiff := r.compareResources(preferredSrcResourceList, dstResourceList)\n\tincompatibleGVKs, err := convertToGVRList(resourcesDiff)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Don't report an incompatibleGVK if user settings will skip resource anyways\n\texcludedResources := toStringSlice(settings.ExcludedInitialResources.Union(toSet(r.Plan.Status.ExcludedResources)))\n\tfilteredGVKs := []schema.GroupVersionResource{}\n\tfor _, gvr := range incompatibleGVKs {\n\t\tskip := false\n\t\tfor _, resource := range excludedResources {\n\t\t\tif strings.EqualFold(gvr.Resource, resource) {\n\t\t\t\tskip = true\n\t\t\t}\n\t\t}\n\t\tif !skip {\n\t\t\tfilteredGVKs = append(filteredGVKs, gvr)\n\t\t}\n\t}\n\n\treturn r.collectIncompatibleMapping(filteredGVKs)\n}", "func maxResourceList(list, new corev1.ResourceList) {\n\tfor name, quantity := range new {\n\t\tif value, ok := list[name]; !ok {\n\t\t\tlist[name] = quantity.DeepCopy()\n\t\t\tcontinue\n\t\t} else {\n\t\t\tif quantity.Cmp(value) > 0 {\n\t\t\t\tlist[name] = quantity.DeepCopy()\n\t\t\t}\n\t\t}\n\t}\n}", "func testCheckDDCloudPortListMatches(name string, expected compute.PortList) resource.TestCheckFunc {\n\tname = ensureResourceTypePrefix(name, \"ddcloud_port_list\")\n\n\treturn func(state *terraform.State) error {\n\t\tres, ok := state.RootModule().Resources[name]\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"not found: %s\", name)\n\t\t}\n\n\t\tportListID := res.Primary.ID\n\n\t\tclient := testAccProvider.Meta().(*providerState).Client()\n\t\tportList, err := client.GetPortList(portListID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"bad: get port list: %s\", err)\n\t\t}\n\t\tif portList == nil {\n\t\t\treturn fmt.Errorf(\"bad: port list not found with Id '%s'\", portListID)\n\t\t}\n\n\t\tif portList.Name != expected.Name {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has name '%s' (expected '%s')\", portListID, portList.Name, expected.Name)\n\t\t}\n\n\t\tif portList.Description != expected.Description {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has description '%s' (expected '%s')\", portListID, portList.Description, expected.Description)\n\t\t}\n\n\t\tif len(portList.Ports) != len(expected.Ports) {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has %d ports or port ranges (expected '%d')\", portListID, len(portList.Ports), len(expected.Ports))\n\t\t}\n\n\t\terr = comparePortListEntries(expected, *portList)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif len(portList.ChildLists) != len(expected.ChildLists) {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has %d child lists (expected '%d')\", portListID, len(portList.ChildLists), len(expected.ChildLists))\n\t\t}\n\n\t\tfor index := range portList.ChildLists {\n\t\t\texpectedChildListID := expected.ChildLists[index].ID\n\t\t\tactualChildListID := portList.ChildLists[index].ID\n\n\t\t\tif actualChildListID != expectedChildListID {\n\t\t\t\treturn fmt.Errorf(\"bad: port list '%s' has child list at index %d with Id %s (expected '%s')\",\n\t\t\t\t\tportListID, index, actualChildListID, expectedChildListID,\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t}\n}", "func maxResourceList(list, new v1.ResourceList) {\n\tfor name, quantity := range new {\n\t\tif value, ok := list[name]; !ok {\n\t\t\tlist[name] = quantity.DeepCopy()\n\t\t\tcontinue\n\t\t} else {\n\t\t\tif quantity.Cmp(value) > 0 {\n\t\t\t\tlist[name] = quantity.DeepCopy()\n\t\t\t}\n\t\t}\n\t}\n}", "func parseResourceRequirementsList(rsr *v1alpha1.ResourceSpecRequirements) (corev1.ResourceList, error) {\n\trl := corev1.ResourceList{}\n\n\tif rsr.Cpu != \"\" {\n\t\tcpu := rsr.Cpu\n\t\tif !strings.HasSuffix(cpu, \"m\") {\n\t\t\tcpuFloat64, err := strconv.ParseFloat(cpu, 64)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tcpu = fmt.Sprintf(\"%.1f\", cpuFloat64)\n\t\t}\n\t\tcpuQuantity, err := resource.ParseQuantity(cpu)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\trl[corev1.ResourceCPU] = cpuQuantity\n\t}\n\n\tif rsr.Memory != \"\" {\n\t\tmemoryQuantity, err := resource.ParseQuantity(rsr.Memory)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\trl[corev1.ResourceMemory] = memoryQuantity\n\t}\n\n\tif rsr.Storage != \"\" {\n\t\tstorageQuantity, err := resource.ParseQuantity(rsr.Storage)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\trl[corev1.ResourceStorage] = storageQuantity\n\t}\n\n\treturn rl, nil\n}", "func (in *ResourceList) DeepCopy() *ResourceList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceList) DeepCopy() *ResourceList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func List[T comparable](t *testing.T, actuals []T, expecteds []T) {\n\tt.Helper()\n\tEqual(t, len(actuals), len(expecteds))\n\n\tfor i, actual := range actuals {\n\t\tEqual(t, actual, expecteds[i])\n\t}\n}", "func CmpResources(a, b *v1.ResourceRequirements) bool {\n\treturn CmpResourceList(&a.Limits, &b.Limits) && CmpResourceList(&a.Requests, &b.Requests)\n}", "func (v NetworkListResponse) Equal(o NetworkListResponse) bool {\n\treturn len(v.NetworkIdentifiers) == len(o.NetworkIdentifiers) &&\n\t\tnetworkIdentifierSliceEqual(v.NetworkIdentifiers, o.NetworkIdentifiers)\n}", "func (m *MockMetaDataMgmtService) ListResource() ([]*entity.ResourceType, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ListResource\")\n\tret0, _ := ret[0].([]*entity.ResourceType)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func SortResources(resources []*metav1.APIResourceList) {\n\tsort.SliceStable(resources, func(i, j int) bool {\n\t\tleft := resources[i]\n\t\tleftGV, _ := schema.ParseGroupVersion(left.GroupVersion)\n\t\t// not checking error because it should be impossible to fail to parse data coming from the\n\t\t// apiserver\n\t\tif leftGV.Group == \"extensions\" {\n\t\t\t// always sort extensions at the bottom by saying left is \"greater\"\n\t\t\treturn false\n\t\t}\n\n\t\tright := resources[j]\n\t\trightGV, _ := schema.ParseGroupVersion(right.GroupVersion)\n\t\t// not checking error because it should be impossible to fail to parse data coming from the\n\t\t// apiserver\n\t\tif rightGV.Group == \"extensions\" {\n\t\t\t// always sort extensions at the bottom by saying left is \"less\"\n\t\t\treturn true\n\t\t}\n\n\t\treturn i < j\n\t})\n}", "func TestCompareStrings(t *testing.T) {\n\tstrings1 := []string{\"one\", \"two\", \"three\"}\n\tstrings2 := []string{\"one\", \"two\"}\n\tstrings3 := []string{\"one\", \"two\", \"THREE\"}\n\n\tif !compareStrings(strings1, strings1) {\n\t\tt.Error(\"Equal slices fail check!\")\n\t}\n\n\tif compareStrings(strings1, strings2) {\n\t\tt.Error(\"Different size slices are OK!\")\n\t}\n\n\tif compareStrings(strings1, strings3) {\n\t\tt.Error(\"Slice with different strings are OK!\")\n\t}\n}", "func (r *Compare) CompareCRDs() (map[string][]schema.GroupVersionResource, error) {\n\tsrcCRDResource, err := collectPreferredCRDResource(r.SrcDiscovery)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdstCRDResourceList, err := collectCRDResources(r.DstDiscovery)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcrdGVDiff := r.compareResources(srcCRDResource, dstCRDResourceList)\n\t// if len(crdGVDiff)>0, then CRD APIVersion is incompatible between src and dest\n\tif len(crdGVDiff) > 0 {\n\t\tsrcCRDs, err := collectPreferredResources(r.SrcDiscovery)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tsrcCRDs, err = r.includeCRDsOnly(srcCRDs, srcCRDResource, r.SrcClient)\n\n\t\tdstCRDs, err := collectNamespacedResources(r.DstDiscovery)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tdstCRDs, err = r.includeCRDsOnly(dstCRDs, dstCRDResourceList, r.DstClient)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tcrdsDiff := r.compareResources(srcCRDs, dstCRDs)\n\t\tincompatibleGVKs, err := convertToGVRList(crdsDiff)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\t// Don't report an incompatibleGVK if user settings will skip resource anyways\n\t\texcludedResources := toStringSlice(settings.ExcludedInitialResources.Union(toSet(r.Plan.Status.ExcludedResources)))\n\t\tfilteredGVKs := []schema.GroupVersionResource{}\n\t\tfor _, gvr := range incompatibleGVKs {\n\t\t\tskip := false\n\t\t\tfor _, resource := range excludedResources {\n\t\t\t\tif strings.EqualFold(gvr.Resource, resource) {\n\t\t\t\t\tskip = true\n\t\t\t\t}\n\t\t\t}\n\t\t\tif !skip {\n\t\t\t\tfilteredGVKs = append(filteredGVKs, gvr)\n\t\t\t}\n\t\t}\n\n\t\treturn r.collectIncompatibleMapping(filteredGVKs)\n\t}\n\treturn nil, nil\n}", "func (in ResourceList) DeepCopy() ResourceList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceList)\n\tin.DeepCopyInto(out)\n\treturn *out\n}", "func TestCompareStrings(t *testing.T) {\n\tlt := CompareStrings(\"a\", \"b\")\n\teq := CompareStrings(\"b\", \"b\")\n\tgt := CompareStrings(\"b\", \"a\")\n\n\tif err := checkResult(lt, eq, gt); err != nil {\n\t\tt.Fatalf(\"%s\", err)\n\t}\n}", "func (m *MockList) Equal(other List) bool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Equal\", other)\n\tret0, _ := ret[0].(bool)\n\treturn ret0\n}", "func AssertEqualStringArray(expected []string, result []string) {\n\tAssertType(expected, result)\n\tif expected == nil && result == nil {\n\t\treturn\n\t}\n\tif len(expected) != len(result) {\n\t\tpanic(fmt.Sprintf(\"Error: [] Different count of items\\nExpected Value: %v\\nResult: %v\", expected, result))\n\t}\n\tfor expectedIdx := range expected {\n\t\telementExists := false\n\t\tfor resultIdx := range result {\n\t\t\tif result[resultIdx] == expected[expectedIdx] {\n\t\t\t\telementExists = true\n\t\t\t}\n\t\t}\n\t\tif !elementExists {\n\t\t\tpanic(fmt.Sprintf(\"Error: [] Item missing: %v.\\nExpected Value: %v\\nResult: %v\", expected[expectedIdx], expected, result))\n\t\t}\n\t}\n}", "func compareRequests(want []interface{}, got []interface{}) error {\n\tif len(got) != len(want) {\n\t\tvar gotMsg string\n\t\tfor _, r := range got {\n\t\t\tgotMsg += fmt.Sprintf(\"%v: %+v]\\n\", reflect.TypeOf(r), r)\n\t\t}\n\n\t\tvar wantMsg string\n\t\tfor _, r := range want {\n\t\t\twantMsg += fmt.Sprintf(\"%v: %+v]\\n\", reflect.TypeOf(r), r)\n\t\t}\n\n\t\treturn fmt.Errorf(\"got %d requests, want %d requests:\\ngot:\\n%s\\nwant:\\n%s\", len(got), len(want), gotMsg, wantMsg)\n\t}\n\n\tfor i, want := range want {\n\t\tif reflect.TypeOf(got[i]) != reflect.TypeOf(want) {\n\t\t\treturn fmt.Errorf(\"request %d: got %+v, want %+v\", i, reflect.TypeOf(got[i]), reflect.TypeOf(want))\n\t\t}\n\t}\n\treturn nil\n}", "func compareRes(a, b []byte) error {\n\tvar am, bm interface{}\n\tif err := json.Unmarshal(a, &am); err != nil {\n\t\treturn fmt.Errorf(\"%s: %v\", a, err)\n\t}\n\tif err := json.Unmarshal(b, &bm); err != nil {\n\t\treturn fmt.Errorf(\"%s: %v\", b, err)\n\t}\n\n\treturn cmp(am, bm)\n}", "func (m *MockMutableList) Equal(other List) bool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Equal\", other)\n\tret0, _ := ret[0].(bool)\n\treturn ret0\n}", "func TestListEqual(t *T) {\n\t// Degenerate case\n\tl1, l2 := NewList(), NewList()\n\tassert.Equal(t, true, l1.Equal(l2))\n\tassert.Equal(t, true, l2.Equal(l1))\n\n\t// False with different sizes\n\tl1 = l1.Prepend(1)\n\tassert.Equal(t, false, l1.Equal(l2))\n\tassert.Equal(t, false, l2.Equal(l1))\n\n\t// False with same sizes\n\tl2 = l2.Prepend(2)\n\tassert.Equal(t, false, l1.Equal(l2))\n\tassert.Equal(t, false, l2.Equal(l1))\n\n\t// Now true\n\tl1 = l1.Prepend(2)\n\tl2 = l2.Append(1)\n\tassert.Equal(t, true, l1.Equal(l2))\n\tassert.Equal(t, true, l2.Equal(l1))\n\n\t// False with embedded list\n\tl1 = l1.Prepend(NewList(3))\n\tassert.Equal(t, false, l1.Equal(l2))\n\tassert.Equal(t, false, l2.Equal(l1))\n\n\t// True with embedded set\n\tl2 = l2.Prepend(NewList(3))\n\tassert.Equal(t, true, l1.Equal(l2))\n\tassert.Equal(t, true, l2.Equal(l1))\n}", "func ListEquals(a, b []string) bool {\n\tif len(a) != len(b) {\n\t\treturn false\n\t}\n\tfor i := range a {\n\t\tif a[i] != b[i] {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func TestRedpandaResourceRequirements(t *testing.T) {\n\ttype test struct {\n\t\tname string\n\t\tsetRequestsCPU resource.Quantity\n\t\tsetRequestsMem resource.Quantity\n\t\tsetRedpandaCPU resource.Quantity\n\t\tsetRedpandaMem resource.Quantity\n\t\texpectedRedpandaCPU resource.Quantity\n\t\texpectedRedpandaMem resource.Quantity\n\t}\n\tmakeResources := func(t test) v1alpha1.RedpandaResourceRequirements {\n\t\treturn v1alpha1.RedpandaResourceRequirements{\n\t\t\tResourceRequirements: corev1.ResourceRequirements{\n\t\t\t\tRequests: corev1.ResourceList{\n\t\t\t\t\tcorev1.ResourceMemory: t.setRequestsMem,\n\t\t\t\t\tcorev1.ResourceCPU: t.setRequestsCPU,\n\t\t\t\t},\n\t\t\t},\n\t\t\tRedpanda: corev1.ResourceList{\n\t\t\t\tcorev1.ResourceMemory: t.setRedpandaMem,\n\t\t\t\tcorev1.ResourceCPU: t.setRedpandaCPU,\n\t\t\t},\n\t\t}\n\t}\n\n\tt.Run(\"Memory\", func(t *testing.T) {\n\t\ttests := []test{\n\t\t\t{\n\t\t\t\tname: \"RedpandaMemory is set from requests.memory\",\n\t\t\t\tsetRequestsMem: resource.MustParse(\"3000Mi\"),\n\t\t\t\texpectedRedpandaMem: resource.MustParse(\"2700Mi\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaMemory is set from lower redpanda.memory\",\n\t\t\t\tsetRequestsMem: resource.MustParse(\"4000Mi\"),\n\t\t\t\tsetRedpandaMem: resource.MustParse(\"3000Mi\"),\n\t\t\t\texpectedRedpandaMem: resource.MustParse(\"3000Mi\"),\n\t\t\t},\n\t\t}\n\t\tfor _, tt := range tests {\n\t\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\t\trrr := makeResources(tt)\n\t\t\t\tassert.Equal(t, tt.expectedRedpandaMem.Value(), rrr.RedpandaMemory().Value())\n\t\t\t})\n\t\t}\n\t})\n\n\tt.Run(\"CPU\", func(t *testing.T) {\n\t\ttests := []test{\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is set from integer requests.cpu\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"1\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"20Gi\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"1\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is set from milli requests.cpu\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"1000m\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"20Gi\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"1\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is rounded up from milli requests.cpu\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"1001m\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"20Gi\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"2\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is set from lower redpanda.cpu\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"2\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"20Gi\"),\n\t\t\t\tsetRedpandaCPU: resource.MustParse(\"1\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"1\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is set from higher redpanda.cpu\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"1\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"20Gi\"),\n\t\t\t\tsetRedpandaCPU: resource.MustParse(\"2\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"2\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is rounded up from milli redpanda.cpu\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"1\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"20Gi\"),\n\t\t\t\tsetRedpandaCPU: resource.MustParse(\"1001m\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"2\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU is limited by 2GiB/core\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"10\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"4Gi\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"2\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU has a minimum if requests >0\",\n\t\t\t\tsetRequestsCPU: resource.MustParse(\"100m\"),\n\t\t\t\tsetRequestsMem: resource.MustParse(\"100Mi\"),\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"1\"),\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: \"RedpandaCPU not set if no request\",\n\t\t\t\texpectedRedpandaCPU: resource.MustParse(\"0\"),\n\t\t\t},\n\t\t}\n\t\tfor _, tt := range tests {\n\t\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\t\trrr := makeResources(tt)\n\t\t\t\tassert.Equal(t, tt.expectedRedpandaCPU.Value(), rrr.RedpandaCPU().Value())\n\t\t\t})\n\t\t}\n\t})\n}", "func CompareStorageRequests(initial corev1.ResourceRequirements, updated corev1.ResourceRequirements) StorageComparison {\n\tinitialSize := initial.Requests.Storage()\n\tupdatedSize := updated.Requests.Storage()\n\tif initialSize.IsZero() || updatedSize.IsZero() {\n\t\treturn StorageComparison{}\n\t}\n\tswitch updatedSize.Cmp(*initialSize) {\n\tcase -1: // decrease\n\t\treturn StorageComparison{Decrease: true}\n\tcase 1: // increase\n\t\treturn StorageComparison{Increase: true}\n\tdefault: // same size\n\t\treturn StorageComparison{}\n\t}\n}", "func (s *StorageSuite) TestServersEquality(c *check.C) {\n\tservers := Servers{{\n\t\tAdvertiseIP: \"192.168.1.1\",\n\t\tHostname: \"node-1\",\n\t\tRole: \"worker\",\n\t}}\n\ttestCases := []struct {\n\t\tservers Servers\n\t\tresult bool\n\t\tcomment string\n\t}{\n\t\t{\n\t\t\tservers: Servers{{\n\t\t\t\tAdvertiseIP: \"192.168.1.1\",\n\t\t\t\tHostname: \"node-1\",\n\t\t\t\tRole: \"worker\",\n\t\t\t}},\n\t\t\tresult: true,\n\t\t\tcomment: \"Servers should be equal\",\n\t\t},\n\t\t{\n\t\t\tservers: Servers{\n\t\t\t\t{\n\t\t\t\t\tAdvertiseIP: \"192.168.1.1\",\n\t\t\t\t\tHostname: \"node-1\",\n\t\t\t\t\tRole: \"worker\",\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAdvertiseIP: \"192.168.1.2\",\n\t\t\t\t\tHostname: \"node-2\",\n\t\t\t\t\tRole: \"worker\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: false,\n\t\t\tcomment: \"Servers should not be equal: different number of servers\",\n\t\t},\n\t\t{\n\t\t\tservers: Servers{{\n\t\t\t\tAdvertiseIP: \"192.168.1.2\",\n\t\t\t\tHostname: \"node-1\",\n\t\t\t\tRole: \"worker\",\n\t\t\t}},\n\t\t\tresult: false,\n\t\t\tcomment: \"Servers should not be equal: different IPs\",\n\t\t},\n\t\t{\n\t\t\tservers: Servers{{\n\t\t\t\tAdvertiseIP: \"192.168.1.1\",\n\t\t\t\tHostname: \"node-2\",\n\t\t\t\tRole: \"worker\",\n\t\t\t}},\n\t\t\tresult: false,\n\t\t\tcomment: \"Servers should not be equal: different hostnames\",\n\t\t},\n\t\t{\n\t\t\tservers: Servers{{\n\t\t\t\tAdvertiseIP: \"192.168.1.1\",\n\t\t\t\tHostname: \"node-1\",\n\t\t\t\tRole: \"db\",\n\t\t\t}},\n\t\t\tresult: false,\n\t\t\tcomment: \"Servers should not be equal: different roles\",\n\t\t},\n\t}\n\tfor _, tc := range testCases {\n\t\tc.Assert(servers.IsEqualTo(tc.servers), check.Equals, tc.result,\n\t\t\tcheck.Commentf(tc.comment))\n\t}\n}", "func (l *List) Equal(m *List) bool {\n\treturn reflect.DeepEqual(l.entries, m.entries)\n}", "func (s *RedisSpec) Differs(expected ResourceInfo, observed ResourceInfo) bool {\n\tswitch expected.Obj.(type) {\n\tcase *resources.Secret:\n\t\t// Dont update a secret\n\t\treturn false\n\tcase *resources.Service:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t\texpected.Obj.(*resources.Service).Spec.ClusterIP = observed.Obj.(*resources.Service).Spec.ClusterIP\n\tcase *resources.PodDisruptionBudget:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t}\n\treturn true\n}", "func ValidateResources(resources *corev1.ResourceRequirements, defaults corev1.ResourceRequirements) {\n\t// check for nil maps\n\tif resources.Requests == nil {\n\t\tresources.Requests = make(corev1.ResourceList)\n\t}\n\tif resources.Limits == nil {\n\t\tresources.Limits = make(corev1.ResourceList)\n\t}\n\n\t// if not given, use default cpu requests\n\t_, ok := resources.Requests[corev1.ResourceCPU]\n\tif !ok {\n\t\tresources.Requests[corev1.ResourceCPU] = defaults.Requests[corev1.ResourceCPU]\n\t}\n\n\t// if not given, use default memory requests\n\t_, ok = resources.Requests[corev1.ResourceMemory]\n\tif !ok {\n\t\tresources.Requests[corev1.ResourceMemory] = defaults.Requests[corev1.ResourceMemory]\n\t}\n\n\t// if not given, use default cpu limits\n\t_, ok = resources.Limits[corev1.ResourceCPU]\n\tif !ok {\n\t\tresources.Limits[corev1.ResourceCPU] = defaults.Limits[corev1.ResourceCPU]\n\t}\n\n\t// if not given, use default memory limits\n\t_, ok = resources.Limits[corev1.ResourceMemory]\n\tif !ok {\n\t\tresources.Limits[corev1.ResourceMemory] = defaults.Limits[corev1.ResourceMemory]\n\t}\n}", "func Compare(expected, actual io.Reader) error {\n\texpScan := bufio.NewScanner(expected)\n\tactScan := bufio.NewScanner(actual)\n\n\tfor line := 1; ; line++ {\n\t\texp, hasExp := scanTrimRight(expScan)\n\t\tact, hasAct := scanTrimRight(actScan)\n\n\t\t// EOF at the same time\n\t\tif !hasExp && !hasAct {\n\t\t\treturn nil\n\t\t}\n\t\t// they are not equal\n\t\tif exp != act {\n\t\t\treturn newErr(line, exp, act)\n\t\t}\n\t\t// they are all exists and equal\n\t\tif hasExp && hasAct {\n\t\t\tcontinue\n\t\t}\n\t\t// verify all empty line lefts\n\t\tif err := verifyEOFSpace(\"actual\", actScan); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err := verifyEOFSpace(\"expected\", expScan); err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// at this point, they should all be same\n\t\treturn nil\n\t}\n}", "func (resource *ResourceType) Equals(other Type, override EqualityOverrides) bool {\n\tif resource == other {\n\t\t// Same reference\n\t\treturn true\n\t}\n\n\totherResource, ok := other.(*ResourceType)\n\tif !ok {\n\t\treturn false\n\t}\n\n\t// Do cheap tests earlier\n\tif resource.isStorageVersion != otherResource.isStorageVersion ||\n\t\tlen(resource.testcases) != len(otherResource.testcases) ||\n\t\tlen(resource.functions) != len(otherResource.functions) ||\n\t\t!TypeEquals(resource.spec, otherResource.spec, override) ||\n\t\t!TypeEquals(resource.status, otherResource.status, override) ||\n\t\tlen(resource.annotations) != len(otherResource.annotations) ||\n\t\tresource.scope != otherResource.scope ||\n\t\tresource.armType != otherResource.armType ||\n\t\t!TypeEquals(resource.apiVersionTypeName, otherResource.apiVersionTypeName) ||\n\t\t!resource.apiVersionEnumValue.Equals(&otherResource.apiVersionEnumValue) ||\n\t\t!resource.InterfaceImplementer.Equals(otherResource.InterfaceImplementer, override) {\n\t\treturn false\n\t}\n\n\t// Check same functions present\n\tfor name, fn := range otherResource.functions {\n\t\tourFn, ok := resource.functions[name]\n\t\tif !ok {\n\t\t\treturn false\n\t\t}\n\n\t\tif !ourFn.Equals(fn, override) {\n\t\t\treturn false\n\t\t}\n\t}\n\n\t// Check same test cases present\n\tfor name, testcase := range otherResource.testcases {\n\t\tourCase, ok := resource.testcases[name]\n\t\tif !ok {\n\t\t\t// Didn't find the func, not equal\n\t\t\treturn false\n\t\t}\n\n\t\tif !ourCase.Equals(testcase, override) {\n\t\t\t// Different testcase, even though same name; not-equal\n\t\t\treturn false\n\t\t}\n\t}\n\n\t// Check same annotations present in the same order\n\tfor i, ourAnnotation := range resource.annotations {\n\t\totherAnnotation := otherResource.annotations[i]\n\t\tif ourAnnotation != otherAnnotation {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func isInRes(expected string, res []string) bool {\n\tfor j := 0; j < len(res); j++ {\n\t\tif res[j] == expected {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func TestListResources_DuplicateResourceFilterByLabel(t *testing.T) {\n\tt.Parallel()\n\tctx := context.Background()\n\n\tbackend, err := lite.NewWithConfig(ctx, lite.Config{\n\t\tPath: t.TempDir(),\n\t\tClock: clockwork.NewFakeClock(),\n\t})\n\trequire.NoError(t, err)\n\n\tpresence := NewPresenceService(backend)\n\n\t// Same resource name, but have different labels.\n\tnames := []string{\"a\", \"a\", \"a\", \"a\"}\n\tlabels := []map[string]string{\n\t\t{\"env\": \"prod\"},\n\t\t{\"env\": \"dev\"},\n\t\t{\"env\": \"qa\"},\n\t\t{\"env\": \"dev\"},\n\t}\n\n\ttests := []struct {\n\t\tname string\n\t\tkind string\n\t\tinsertResources func()\n\t\twantNames []string\n\t}{\n\t\t{\n\t\t\tname: \"KindDatabaseServer\",\n\t\t\tkind: types.KindDatabaseServer,\n\t\t\tinsertResources: func() {\n\t\t\t\tfor i := 0; i < len(names); i++ {\n\t\t\t\t\tdb, err := types.NewDatabaseServerV3(types.Metadata{\n\t\t\t\t\t\tName: fmt.Sprintf(\"name-%v\", i),\n\t\t\t\t\t}, types.DatabaseServerSpecV3{\n\t\t\t\t\t\tHostID: \"_\",\n\t\t\t\t\t\tHostname: \"_\",\n\t\t\t\t\t\tDatabase: &types.DatabaseV3{\n\t\t\t\t\t\t\tMetadata: types.Metadata{\n\t\t\t\t\t\t\t\tName: names[i],\n\t\t\t\t\t\t\t\tLabels: labels[i],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tSpec: types.DatabaseSpecV3{\n\t\t\t\t\t\t\t\tProtocol: \"_\",\n\t\t\t\t\t\t\t\tURI: \"_\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t})\n\t\t\t\t\trequire.NoError(t, err)\n\t\t\t\t\t_, err = presence.UpsertDatabaseServer(ctx, db)\n\t\t\t\t\trequire.NoError(t, err)\n\t\t\t\t}\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"KindAppServer\",\n\t\t\tkind: types.KindAppServer,\n\t\t\tinsertResources: func() {\n\t\t\t\tfor i := 0; i < len(names); i++ {\n\t\t\t\t\tserver, err := types.NewAppServerV3(types.Metadata{\n\t\t\t\t\t\tName: fmt.Sprintf(\"name-%v\", i),\n\t\t\t\t\t}, types.AppServerSpecV3{\n\t\t\t\t\t\tHostID: \"_\",\n\t\t\t\t\t\tApp: &types.AppV3{\n\t\t\t\t\t\t\tMetadata: types.Metadata{\n\t\t\t\t\t\t\t\tName: names[i],\n\t\t\t\t\t\t\t\tLabels: labels[i],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tSpec: types.AppSpecV3{URI: \"_\"},\n\t\t\t\t\t\t},\n\t\t\t\t\t})\n\t\t\t\t\trequire.NoError(t, err)\n\t\t\t\t\t_, err = presence.UpsertApplicationServer(ctx, server)\n\t\t\t\t\trequire.NoError(t, err)\n\t\t\t\t}\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"KindKubernetesCluster\",\n\t\t\tkind: types.KindKubernetesCluster,\n\t\t\tinsertResources: func() {\n\t\t\t\tfor i := 0; i < len(names); i++ {\n\n\t\t\t\t\tkube, err := types.NewKubernetesClusterV3(\n\t\t\t\t\t\ttypes.Metadata{\n\t\t\t\t\t\t\tName: names[i],\n\t\t\t\t\t\t\tLabels: labels[i],\n\t\t\t\t\t\t},\n\t\t\t\t\t\ttypes.KubernetesClusterSpecV3{},\n\t\t\t\t\t)\n\t\t\t\t\trequire.NoError(t, err)\n\t\t\t\t\tkubeServer, err := types.NewKubernetesServerV3FromCluster(\n\t\t\t\t\t\tkube,\n\t\t\t\t\t\tfmt.Sprintf(\"host-%v\", i),\n\t\t\t\t\t\tfmt.Sprintf(\"hostID-%v\", i),\n\t\t\t\t\t)\n\t\t\t\t\trequire.NoError(t, err)\n\t\t\t\t\t// Upsert server.\n\t\t\t\t\t_, err = presence.UpsertKubernetesServer(ctx, kubeServer)\n\t\t\t\t\trequire.NoError(t, err)\n\n\t\t\t\t}\n\t\t\t},\n\t\t},\n\t}\n\n\tfor _, tc := range tests {\n\t\tt.Run(tc.name, func(t *testing.T) {\n\t\t\ttc.insertResources()\n\n\t\t\t// Look among the duplicated resource by label\n\t\t\tresp, err := presence.ListResources(ctx, proto.ListResourcesRequest{\n\t\t\t\tResourceType: tc.kind,\n\t\t\t\tNeedTotalCount: true,\n\t\t\t\tLimit: 5,\n\t\t\t\tSearchKeywords: []string{\"dev\"},\n\t\t\t})\n\t\t\trequire.NoError(t, err)\n\t\t\trequire.Len(t, resp.Resources, 1)\n\t\t\trequire.Equal(t, 1, resp.TotalCount)\n\t\t\trequire.Equal(t, map[string]string{\"env\": \"dev\"}, resp.Resources[0].GetAllLabels())\n\t\t})\n\t}\n}", "func (p *serviceEvaluator) MatchingResources(input []api.ResourceName) []api.ResourceName {\n\treturn quota.Intersection(input, serviceResources)\n}", "func (list *BlackWhiteList) Equal(newList *BlackWhiteList) bool {\n\tlist.m.RLock()\n\tif len(list.whitelist) != len(newList.whitelist) || len(list.Blacklist) != len(newList.Blacklist) {\n\t\tlist.m.RUnlock()\n\t\treturn false\n\t}\n\tfor i, iprange := range list.whitelist {\n\t\tif !iprange.Equal(newList.whitelist[i]) {\n\t\t\tlist.m.RUnlock()\n\t\t\treturn false\n\t\t}\n\t}\n\tfor i, iprange := range list.Blacklist {\n\t\tif !iprange.Equal(newList.Blacklist[i]) {\n\t\t\tlist.m.RUnlock()\n\t\t\treturn false\n\t\t}\n\t}\n\tlist.m.RUnlock()\n\treturn true\n}", "func Merge(resources ...v1.ResourceList) v1.ResourceList {\n\tresult := v1.ResourceList{}\n\tfor _, resourceList := range resources {\n\t\tfor resourceName, quantity := range resourceList {\n\t\t\tcurrent := result[resourceName]\n\t\t\tcurrent.Add(quantity)\n\t\t\tresult[resourceName] = current\n\t\t}\n\t}\n\treturn result\n}", "func ArePodSpecDifferent(lhs, rhs v1.PodSpec, strictTolerations bool) bool {\n\tchanged := false\n\n\tif len(lhs.Containers) != len(rhs.Containers) {\n\t\tchanged = true\n\t}\n\n\t// check nodeselectors\n\tif !areSelectorsSame(lhs.NodeSelector, rhs.NodeSelector) {\n\t\tchanged = true\n\t}\n\n\t// strictTolerations are for when we compare from the deployments or statefulsets\n\t// if we are seeing if rolled out pods contain changes we don't want strictTolerations\n\t// since k8s may add additional tolerations to pods\n\tif strictTolerations {\n\t\t// check tolerations\n\t\tif !areTolerationsSame(lhs.Tolerations, rhs.Tolerations) {\n\t\t\tchanged = true\n\t\t}\n\t} else {\n\t\t// check tolerations\n\t\tif !containsSameTolerations(lhs.Tolerations, rhs.Tolerations) {\n\t\t\tchanged = true\n\t\t}\n\t}\n\n\t// check container fields\n\tfor _, lContainer := range lhs.Containers {\n\t\tfound := false\n\n\t\tfor _, rContainer := range rhs.Containers {\n\t\t\t// Only compare the images of containers with the same name\n\t\t\tif lContainer.Name != rContainer.Name {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tfound = true\n\n\t\t\t// can't use reflect.DeepEqual here, due to k8s adding token mounts\n\t\t\t// check that rContainer is all found within lContainer and that they match by name\n\t\t\tif !containsSameVolumeMounts(lContainer.VolumeMounts, rContainer.VolumeMounts) {\n\t\t\t\tchanged = true\n\t\t\t}\n\n\t\t\tif lContainer.Image != rContainer.Image {\n\t\t\t\tchanged = true\n\t\t\t}\n\n\t\t\tif !comparators.EnvValueEqual(lContainer.Env, rContainer.Env) {\n\t\t\t\tchanged = true\n\t\t\t}\n\n\t\t\tif !reflect.DeepEqual(lContainer.Args, rContainer.Args) {\n\t\t\t\tchanged = true\n\t\t\t}\n\n\t\t\tif !reflect.DeepEqual(lContainer.Ports, rContainer.Ports) {\n\t\t\t\tchanged = true\n\t\t\t}\n\n\t\t\tif different, _ := utils.CompareResources(lContainer.Resources, rContainer.Resources); different {\n\t\t\t\tchanged = true\n\t\t\t}\n\t\t}\n\n\t\tif !found {\n\t\t\tchanged = true\n\t\t}\n\t}\n\treturn changed\n}", "func PrintResourceList(resources interface{}, output string, single bool) error {\n\tkt := reflect.ValueOf(resources)\n\t// Sometimes, we want to marshal the first resource of a slice or array as single item\n\tif kt.Kind() == reflect.Slice || kt.Kind() == reflect.Array {\n\t\tif single && kt.Len() == 1 {\n\t\t\treturn PrintResource(kt.Index(0).Interface(), output)\n\t\t}\n\n\t\t// If we have a zero len list, prevent printing \"null\"\n\t\tif kt.Len() == 0 {\n\t\t\treturn PrintResource([]string{}, output)\n\t\t}\n\t}\n\n\tswitch output {\n\tcase \"json\":\n\t\tjsonBytes, err := json.MarshalIndent(resources, \"\", \" \")\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unable to marshal resources to json: %w\", err)\n\t\t}\n\t\tfmt.Println(string(jsonBytes))\n\tcase \"yaml\":\n\t\tyamlBytes, err := yaml.Marshal(resources)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unable to marshal resources to yaml: %w\", err)\n\t\t}\n\t\tfmt.Print(string(yamlBytes))\n\tdefault:\n\t\treturn fmt.Errorf(\"unknown output format: %s\", output)\n\t}\n\treturn nil\n}", "func (a *Client) ListResources(ctx context.Context, params *ListResourcesParams) (*ListResourcesOK, error) {\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"listResources\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/approval_system/resource\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\"},\n\t\tParams: params,\n\t\tReader: &ListResourcesReader{formats: a.formats},\n\t\tAuthInfo: a.authInfo,\n\t\tContext: ctx,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*ListResourcesOK), nil\n\n}", "func (r Resource) Equal(other Resource) bool {\n\tswitch {\n\tcase r.ID != other.ID,\n\t\tr.Status != other.Status,\n\t\t!r.Since.Equal(other.Since):\n\t\treturn false\n\tdefault:\n\t\treturn true\n\t}\n}", "func resourceVersionsMatch(o1 interface{}, o2 interface{}) bool {\n\tr1, ok1 := o1.(metav1.Object)\n\tr2, ok2 := o2.(metav1.Object)\n\tif !ok1 || !ok2 {\n\t\tmsg := fmt.Sprintf(\"error decoding kube objects during update, o1 type: %v, o2 type: %v\",\n\t\t\treflect.TypeOf(o1),\n\t\t\treflect.TypeOf(o2))\n\t\tlog.Scope.Error(msg)\n\t\tstats.RecordEventError(msg)\n\t\treturn false\n\t}\n\treturn r1.GetResourceVersion() == r2.GetResourceVersion()\n}", "func TestListArrayResHumanReadable(t *testing.T) {\n\tvar command = \"LISTARRAY\"\n\tvar resJSON = `{\"command\":\"LISTARRAY\",\"rid\":\"fromCLI\",\n\t\"result\":{\"status\":{\"code\":0,\"description\":\"DONE\"},\n\t\"data\":{\"arrayList\": [{\"createDatetime\": \"2021-04-16 15:52:14 +0900\",\n\t\"devicelist\": [{\"name\": \"uram0\",\"type\": \"BUFFER\"},\n\t{\"name\": \"S4H2NE0M600736 \",\"type\": \"DATA\"},\n\t{\"name\": \"S4H2NE0M600745\",\"type\": \"DATA\"},\n\t{\"name\": \"S4H2NE0M600763 \",\"type\": \"DATA\"}],\n\t\"name\": \"ARRAY0\",\"status\":\"Mounted\",\"updateDatetime\": \"2021-04-16 15:52:14 +0900\"},\n\t{\"createDatetime\": \"2021-04-16 15:52:14 +0900\",\"devicelist\": [{\"name\": \"uram1\",\"type\": \"BUFFER\"},\n\t{\"name\": \"S4H2NE0M600744\",\"type\": \"DATA\"},{\"name\": \"S4H2NE0M600743 \",\"type\": \"DATA\"},\n\t{\"name\": \"S4H2NE0M600746 \",\"type\": \"DATA\"}],\"name\": \"ARRAY1\",\"status\":\"Unmounted\",\n\t\"updateDatetime\": \"2021-04-16 15:52:14 +0900\"}]}}}`\n\n\texpected := `Name: ARRAY0\n---------------------------\nDatetime Created: 2021-04-16 15:52:14 +0900\nDatetime Updated: 2021-04-16 15:52:14 +0900\nStatus: Mounted\n\nDevices\n-------------\nName: uram0\nType: BUFFER\n\nName: S4H2NE0M600736 \nType: DATA\n\nName: S4H2NE0M600745\nType: DATA\n\nName: S4H2NE0M600763 \nType: DATA\n\n\nName: ARRAY1\n---------------------------\nDatetime Created: 2021-04-16 15:52:14 +0900\nDatetime Updated: 2021-04-16 15:52:14 +0900\nStatus: Unmounted\n\nDevices\n-------------\nName: uram1\nType: BUFFER\n\nName: S4H2NE0M600744\nType: DATA\n\nName: S4H2NE0M600743 \nType: DATA\n\nName: S4H2NE0M600746 \nType: DATA`\n\n\t// mj: For testing, I temporarily redirect log output to buffer.\n\tvar buff bytes.Buffer\n\tlog.SetOutput(&buff)\n\tlog.SetFlags(0)\n\n\tdisplaymgr.PrintResponse(command, resJSON, false, false)\n\n\toutput := buff.String()\n\toutput = output[:len(output)-1] // Remove the last \\n from output string\n\n\tdist := testmgr.Levenshtein([]rune(expected), []rune(output))\n\n\t// TODO(mj): Two long texts can be different slightly.\n\t// dist > thresholdDist should be reivsed once we find a better way to test long texts.\n\tthresholdDist := 5\n\tif dist > thresholdDist {\n\t\tt.Errorf(\"Expected: %q Output: %q\", expected, output)\n\t}\n}", "func (list List) Equal(other List) bool {\n\tif other.Length() != list.Length() {\n\t\treturn false\n\t}\n\tfor idx, val := range list {\n\t\tif other[idx] != val {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func compareAuthSet(expected *model.AuthSet, actual *model.AuthSet, t *testing.T) {\n\tassert.Equal(t, expected.IdData, actual.IdData)\n\tassert.Equal(t, expected.PubKey, actual.PubKey)\n\tassert.Equal(t, expected.DeviceId, actual.DeviceId)\n\tassert.Equal(t, expected.IdDataStruct, actual.IdDataStruct)\n\tassert.Equal(t, expected.IdDataSha256, actual.IdDataSha256)\n\tassert.Equal(t, expected.Status, actual.Status)\n\tcompareTime(uto.Time(expected.Timestamp), uto.Time(actual.Timestamp), t)\n}", "func AssertEqualStringMap(expected map[string]string, result map[string]string) {\n\tAssertType(expected, result)\n\tif expected == nil && result == nil {\n\t\treturn\n\t}\n\tif len(expected) != len(result) {\n\t\tpanic(fmt.Sprintf(\"Error: [] Different count of items\\nExpected Value: %v\\nResult: %v\", expected, result))\n\t}\n\tfor expectedKey := range expected {\n\t\tif result[expectedKey] != expected[expectedKey] {\n\t\t\tpanic(fmt.Sprintf(\"Error: [] Item missing: %v.\\nExpected Value: %v\\nResult: %v\", expected[expectedKey], expected, result))\n\t\t}\n\t}\n}", "func LinkedListsAreEqual(list1, list2 btll.NodeList) bool {\n\tif list1.Length != list2.Length || (list1.Head.Data != list2.Head.Data || list1.Tail.Data != list2.Tail.Data) {\n\t\treturn false\n\t}\n\tfor list2.Head != nil || list1.Head != nil {\n\t\tif list1.Head.Data != list2.Head.Data || list1.Head.Freq != list2.Head.Freq {\n\t\t\treturn false\n\t\t}\n\t\tlist2.Head = list2.Head.Next\n\t\tlist1.Head = list1.Head.Next\n\t}\n\tif list1.Head == nil && list2.Head == nil {\n\t\treturn true\n\t}\n\treturn false\n}", "func ListResource(baseURLs []string, URI string) ([]string, error) {\n\tif strings.HasPrefix(URI, \"@\") {\n\t\tURI = string(URI[1:])\n\t}\n\tfor _, baseURL := range baseURLs {\n\t\tparent, matchingExpr := path.Split(URI)\n\t\tif parent != \"\" {\n\t\t\tbaseURL = toolbox.URLPathJoin(baseURL, parent)\n\t\t} else {\n\t\t\tmatchingExpr = URI\n\t\t}\n\t\tvar exprSuffix = \"\"\n\t\tif path.Ext(matchingExpr) == \"\" {\n\t\t\texprSuffix = \".+\"\n\t\t}\n\t\tregExprText := strings.Replace(matchingExpr, \"*\", \".+\", strings.Count(matchingExpr, \"*\"))\n\t\tregExprText = regExprText + exprSuffix\n\t\tif !strings.HasPrefix(regExprText, \".+\") {\n\t\t\tregExprText = \".+\" + regExprText\n\t\t}\n\t\tregExpression := regexp.MustCompile(regExprText)\n\t\tresource := url.NewResource(baseURL)\n\t\tstorageService, err := storage.NewServiceForURL(resource.URL, \"\")\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tobjects, err := storageService.List(resource.URL)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\n\t\tvar result = make([]string, 0)\n\t\tfor _, candidate := range objects {\n\t\t\tif !candidate.IsContent() {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif regExpression.MatchString(candidate.URL()) {\n\t\t\t\tresult = append(result, candidate.URL())\n\t\t\t}\n\t\t}\n\t\tif len(result) > 0 {\n\t\t\tsort.Strings(result)\n\t\t\treturn result, nil\n\t\t}\n\t}\n\treturn nil, nil\n}", "func (o *ObjectCountEvaluator) MatchingResources(input []api.ResourceName) []api.ResourceName {\n\treturn quota.Intersection(input, []api.ResourceName{o.ResourceName})\n}", "func (m *MockClientInterface) ListCustomResource(apiGroup, version, namespace, resourceKind string) (*operatorclient.CustomResourceList, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ListCustomResource\", apiGroup, version, namespace, resourceKind)\n\tret0, _ := ret[0].(*operatorclient.CustomResourceList)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (a *MemberAwaitility) WaitForExpectedNumberOfResources(kind string, expected int, list func() (int, error)) error {\n\terr := wait.Poll(a.RetryInterval, a.Timeout, func() (done bool, err error) {\n\t\tactual, err := list()\n\t\tif err != nil {\n\t\t\treturn false, err\n\t\t}\n\t\tif actual == expected {\n\t\t\ta.T.Logf(\"The number of %s resources matches\", kind)\n\t\t\treturn true, nil\n\t\t}\n\t\ta.T.Logf(\"Waiting for the expected number of %s resources. Actual: %d, Expected: %d\", kind, actual, expected)\n\t\treturn false, nil\n\t})\n\treturn err\n}", "func (m *MockDestinationRuleSet) List(filterResource ...func(*v1alpha3.DestinationRule) bool) []*v1alpha3.DestinationRule {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{}\n\tfor _, a := range filterResource {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"List\", varargs...)\n\tret0, _ := ret[0].([]*v1alpha3.DestinationRule)\n\treturn ret0\n}", "func Compare(t *testing.T, expected string, out string) {\n\tif out != expected {\n\t\tt.Error(Warn(`FAIL!`), \"\\n\\n\"+\n\t\t\t`Got: `+quoted(out)+\"\\n\"+\n\t\t\t`Expected: `+quoted(expected)+\"\\n\",\n\t\t)\n\t} else {\n\t\tt.Log(Okay(`PASS:`), quoted(strings.TrimSuffix(out, \"\\n\")))\n\t}\n}", "func diffRemediations(old, new []*compv1alpha1.ComplianceRemediation) bool {\n\tif old == nil {\n\t\treturn new == nil\n\t}\n\n\tif len(old) != len(new) {\n\t\treturn false\n\t}\n\n\tfor idx := range old {\n\t\toldRem, newRem := old[idx], new[idx]\n\t\tif oldRem.Spec.Current.Object.GetKind() != newRem.Spec.Current.Object.GetKind() {\n\t\t\treturn false\n\t\t}\n\n\t\t// should we be more picky and just compare what can be set with the remediations? e.g. OSImageURL can't\n\t\t// be set with a remediation..\n\t\tif !cmp.Equal(oldRem.Spec.Current.Object, newRem.Spec.Current.Object) {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func (m *MockKubernetesService) ListAssociatedResourcesForDeletion(clusterID string) (*do.KubernetesAssociatedResources, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ListAssociatedResourcesForDeletion\", clusterID)\n\tret0, _ := ret[0].(*do.KubernetesAssociatedResources)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestEqual(t *testing.T) {\n\ttables := []struct {\n\t\tx []string\n\t\ty []string\n\t\texpected bool\n\t}{\n\t\t{[]string{}, []string{}, true},\n\t\t{[]string{}, []string{\"\"}, false},\n\t\t{[]string{\"\"}, []string{\"\"}, true},\n\t\t{[]string{\"\"}, []string{\"a\"}, false},\n\t\t{[]string{\"a\"}, []string{\"a\", \"a\"}, false},\n\t\t{[]string{\"b\"}, []string{\"a\"}, false},\n\t\t{[]string{\"\", \"\", \"\"}, []string{\"\", \"\", \"\"}, true},\n\t\t{[]string{\"a\", \"b\", \"c\"}, []string{\"a\", \"b\", \"e\"}, false},\n\t}\n\n\tfor _, table := range tables {\n\t\tresult := Equal(table.x, table.y)\n\t\tif result != table.expected {\n\t\t\tt.Errorf(\"Match failed for (%s, %s). Expected %t, got %t\",\n\t\t\t\ttable.x, table.y, table.expected, result)\n\t\t}\n\t}\n}", "func (ro *ResourceOperations) List(parameters *ResourceListParameters) (*ResourceListResult, *AzureOperationResponse, error) {\n\tsubscriptionId := getSubscriptionId(ro.c, nil)\n\n\tpath := \"/subscriptions/\" + url.QueryEscape(subscriptionId)\n\n\tif parameters != nil {\n\t\tif parameters.ResourceGroupName != \"\" {\n\t\t\tpath += \"/resourcegroups/\" + url.QueryEscape(parameters.ResourceGroupName)\n\t\t}\n\t}\n\n\tpath += \"/resources?api-version=\" + url.QueryEscape(ro.c.apiVersion)\n\n\tif parameters != nil {\n\t\tif parameters.Top != 0 {\n\t\t\tpath += \"&$top=\" + strconv.Itoa(parameters.Top)\n\t\t}\n\n\t\tfilter := \"\"\n\n\t\tif parameters.ResourceType != \"\" {\n\t\t\tfilter += url.QueryEscape(\"resourceType eq '\" + parameters.ResourceType + \"'\")\n\t\t}\n\n\t\tif parameters.TagValue != \"\" {\n\t\t\tif filter != \"\" {\n\t\t\t\tfilter += url.QueryEscape(\" and \")\n\t\t\t}\n\t\t\tfilter += url.QueryEscape(\"tagValue eq '\" + parameters.ResourceType + \"'\")\n\t\t}\n\n\t\tif filter != \"\" {\n\t\t\tpath += \"&filter=\" + filter\n\t\t}\n\t}\n\n\tvar result ResourceListResult\n\tazureOperationResponse, err := ro.c.DoGet(path, &result)\n\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\treturn &result, azureOperationResponse, nil\n}", "func Compare[T any](t testing.TB, x, y T) bool {\n\treturn objectsAreEqual(x, y)\n}", "func (m *MockWasmDeploymentSet) List(filterResource ...func(*v1beta1.WasmDeployment) bool) []*v1beta1.WasmDeployment {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{}\n\tfor _, a := range filterResource {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"List\", varargs...)\n\tret0, _ := ret[0].([]*v1beta1.WasmDeployment)\n\treturn ret0\n}", "func (rm fakeRESTMapper) Equal(other fakeRESTMapper) bool {\n\treturn cmp.Equal(rm.defaultGroupVersions, other.defaultGroupVersions)\n}", "func TestAddResource(t *testing.T) {\n\ttestName := \"TestAddResource\"\n\tbeforeTest()\n\t// kinds to check for status\n\tvar kindsToCheckStatus = map[string]bool{\n\t\tAPPLICATION: true,\n\t\t\"Ingress\": true,\n\t\t\"Service\": true,\n\t\t\"Deployment\": true,\n\t\t\"StatefulSet\": true,\n\t\t\"NetworkPolicy\": true,\n\t\t//\t\t\"Kappnav\": true,\n\t}\n\n\t// resources to pre-populate\n\tvar files = []string{\n\t\t/* 0 */ KappnavConfigFile,\n\t\t/* 1 */ CrdApplication,\n\t\t/* 2 */ appBookinfo,\n\t\t/* 3 */ appProductpage,\n\t\t/* 4 */ appDetails,\n\t\t/* 5 */ appReviews,\n\t\t/* 6 */ deploymentDetailsV1,\n\t\t/* 7 */ deploymentProcuctpageV1,\n\t\t/* 8 */ deploymentReviewsV1,\n\t\t/* 9 */ deploymentReviewsV2,\n\t\t/* 10 */ serviceDetails,\n\t\t/* 11 */ serviceProductpage,\n\t\t/* 12 */ serviceReview,\n\t\t/* 13 */ networkpolicyProductpage,\n\t\t/* 14 */ ingressBookinfo,\n\t\t/* 15 */ networkpolicyReviews,\n\t\t//\t\t/* 16 */ kappnavCRFile,\n\t}\n\n\titeration0IDs, err := readResourceIDs(files)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t/* Iteration 0: all normal */\n\ttestActions := newTestActions(testName, kindsToCheckStatus)\n\tvar emptyIDs = []resourceID{}\n\ttestActions.addIteration(iteration0IDs, emptyIDs)\n\n\t// iteration 1: Add deploymentReviewsV3\n\t// /* 16 */deploymentReviewsV3,\n\tres, err := readOneResourceID(deploymentReviewsV3)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tarrayLength := len(iteration0IDs)\n\tvar iteration1IDs = make([]resourceID, arrayLength, arrayLength)\n\tcopy(iteration1IDs, iteration0IDs)\n\titeration1IDs = append(iteration1IDs, res)\n\titeration1IDs[2].expectedStatus = warning // bookfino warning\n\titeration1IDs[5].expectedStatus = warning // review app warning\n\titeration1IDs[16].expectedStatus = warning // new deployment starts with warning status\n\ttestActions.addIteration(iteration1IDs, emptyIDs)\n\n\t// iteration 2: stabilize the new deployment to Normal\n\tarrayLength = len(iteration1IDs)\n\tvar iteration2IDs = make([]resourceID, arrayLength, arrayLength)\n\tcopy(iteration2IDs, iteration1IDs)\n\titeration2IDs[2].expectedStatus = Normal // bookfino Normal\n\titeration2IDs[5].expectedStatus = Normal // review app Normal\n\titeration2IDs[16].expectedStatus = Normal // new deployment Normal\n\ttestActions.addIteration(iteration2IDs, emptyIDs)\n\n\t/* iteration 3: add a new app */\n\tvar newFiles = []string{\n\t\t/* 17 */ appRatings,\n\t\t/* 18 */ deploymentRatingsV1,\n\t\t/* 19 */ serviceRatings,\n\t}\n\tnewResources, err := readResourceIDs(newFiles)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tarrayLength = len(iteration2IDs)\n\tvar iteration3IDs = make([]resourceID, arrayLength, arrayLength)\n\tcopy(iteration3IDs, iteration2IDs)\n\tfor _, newRes := range newResources {\n\t\titeration3IDs = append(iteration3IDs, newRes)\n\t}\n\titeration3IDs[2].expectedStatus = warning // bookfino now warning due to ratings app\n\titeration3IDs[17].expectedStatus = warning // ratings app warning\n\titeration3IDs[18].expectedStatus = warning // ratings deployment warning\n\ttestActions.addIteration(iteration3IDs, emptyIDs)\n\n\t/* iteration 4: everything back to normal */\n\tarrayLength = len(iteration3IDs)\n\tvar iteration4IDs = make([]resourceID, arrayLength, arrayLength)\n\tcopy(iteration4IDs, iteration3IDs)\n\titeration4IDs[2].expectedStatus = Normal // bookfino app Normal\n\titeration4IDs[17].expectedStatus = Normal // ratings app Normal\n\titeration4IDs[18].expectedStatus = Normal // ratings deployment Normal\n\ttestActions.addIteration(iteration4IDs, emptyIDs)\n\n\t/* iteration 7: clean up */\n\ttestActions.addIteration(emptyIDs, emptyIDs)\n\n\tclusterWatcher, err := createClusterWatcher(iteration0IDs, testActions, StatusFailureRate)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer clusterWatcher.shutDown()\n\n\t// make all trasition of testAction\n\terr = testActions.transitionAll()\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n}", "func getResourceList(cpu, memory string) v1.ResourceList {\n\tres := v1.ResourceList{}\n\tif cpu != \"\" {\n\t\tres[v1.ResourceCPU] = resource.MustParse(cpu)\n\t}\n\tif memory != \"\" {\n\t\tres[v1.ResourceMemory] = resource.MustParse(memory)\n\t}\n\treturn res\n}", "func TestRunDiff(t *testing.T) {\n\tcases := map[string]struct {\n\t\tcomparers compare.ComparerSet\n\t\tresourceChange []plan.ResourcePlan\n\t\tpreHook func()\n\t\texpected int\n\t\texpectedOutput []string\n\t}{\n\t\t\"create returns false with create resource\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: false,\n\t\t\t\t\tDiffOutput: \"comparer fail\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"comparer fail\"},\n\t\t},\n\t\t\"create returns true with create resource\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: true,\n\t\t\t\t\tDiffOutput: \"comparer ok\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"comparer ok\"},\n\t\t},\n\t\t\"no matching comparer\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: false,\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tAddressReturns: \"address\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"\"},\n\t\t},\n\t\t\"no matching comparer with strict enabled\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: false,\n\t\t\t\t\tDiffOutput: \"comparer fail\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tAddressReturns: \"address\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tpreHook: func() {\n\t\t\t\tstrict = true\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"?\", \"address (no matching comparer)\"},\n\t\t},\n\t\t\"create returns true with multiple resources\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: true,\n\t\t\t\t\tDiffOutput: \"comparer ok\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address1\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address2\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"comparer ok\\ncomparer ok\"},\n\t\t},\n\t\t\"fails if there is at least 1 failure\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: false,\n\t\t\t\t\tDiffOutput: \"comparer fail\",\n\t\t\t\t},\n\t\t\t\tDestroyComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: true,\n\t\t\t\t\tDiffOutput: \"comparer ok\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address1\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tDeleteReturns: true,\n\t\t\t\t\tAddressReturns: \"address2\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"comparer fail\", \"comparer ok\"},\n\t\t},\n\t\t\"returns 1 if there is at least 1 failure and errorOnFail is set\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: false,\n\t\t\t\t\tDiffOutput: \"comparer fail\",\n\t\t\t\t},\n\t\t\t\tDestroyComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: true,\n\t\t\t\t\tDiffOutput: \"comparer ok\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address1\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tDeleteReturns: true,\n\t\t\t\t\tAddressReturns: \"address2\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tpreHook: func() {\n\t\t\t\terrorOnFail = true\n\t\t\t},\n\t\t\texpected: 1,\n\t\t\texpectedOutput: []string{\"comparer fail\", \"comparer ok\"},\n\t\t},\n\t\t\"only outputs failed with failedOnly\": {\n\t\t\tcomparers: compare.ComparerSet{\n\t\t\t\tCreateComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: false,\n\t\t\t\t\tDiffOutput: \"comparer fail\",\n\t\t\t\t},\n\t\t\t\tDestroyComparer: &comparefakes.FakeComparer{\n\t\t\t\t\tDiffReturns: true,\n\t\t\t\t\tDiffOutput: \"comparer ok\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tresourceChange: []plan.ResourcePlan{\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tCreateReturns: true,\n\t\t\t\t\tAddressReturns: \"address1\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t\t&planfakes.FakeResourcePlan{\n\t\t\t\t\tDeleteReturns: true,\n\t\t\t\t\tAddressReturns: \"address2\",\n\t\t\t\t\tNameReturns: \"name\",\n\t\t\t\t\tTypeReturns: \"type\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tpreHook: func() {\n\t\t\t\tfailedOnly = true\n\t\t\t},\n\t\t\texpected: 0,\n\t\t\texpectedOutput: []string{\"comparer fail\"},\n\t\t},\n\t\t// TODO: test case to ensure comparers are called correctly(matching type and number of calls)\n\t}\n\n\tfor name, tc := range cases {\n\t\tt.Run(name, func(t *testing.T) {\n\t\t\t// set default vars\n\t\t\terrorOnFail = false\n\t\t\tstrict = false\n\t\t\tfailedOnly = false\n\n\t\t\tif tc.preHook != nil {\n\t\t\t\ttc.preHook()\n\t\t\t}\n\n\t\t\tvar output bytes.Buffer\n\t\t\tif got := runDiff(&output, tc.resourceChange, tc.comparers); got != tc.expected {\n\t\t\t\tt.Errorf(\"Expected: %v but got %v\", tc.expected, got)\n\t\t\t}\n\n\t\t\tfor _, s := range tc.expectedOutput {\n\t\t\t\tif !strings.Contains(output.String(), s) {\n\t\t\t\t\tt.Errorf(\"Result string did not contain %v\", s)\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t}\n}", "func compareFormulaArgList(lhs, rhs, matchMode formulaArg, caseSensitive bool) byte {\n\tif len(lhs.List) < len(rhs.List) {\n\t\treturn criteriaL\n\t}\n\tif len(lhs.List) > len(rhs.List) {\n\t\treturn criteriaG\n\t}\n\tfor arg := range lhs.List {\n\t\tcriteria := compareFormulaArg(lhs.List[arg], rhs.List[arg], matchMode, caseSensitive)\n\t\tif criteria != criteriaEq {\n\t\t\treturn criteria\n\t\t}\n\t}\n\treturn criteriaEq\n}", "func (rm *RsrcManager) SpecDiffers(expected, observed *reconciler.Object) bool {\n\te := expected.Obj.(*Object).Bucket\n\to := observed.Obj.(*Object).Bucket\n\treturn !reflect.DeepEqual(e.Acl, o.Acl) ||\n\t\t!reflect.DeepEqual(e.Billing, o.Billing) ||\n\t\t!reflect.DeepEqual(e.Cors, o.Cors) ||\n\t\t!reflect.DeepEqual(e.DefaultEventBasedHold, o.DefaultEventBasedHold) ||\n\t\t!reflect.DeepEqual(e.Encryption, o.Encryption) ||\n\t\t!reflect.DeepEqual(e.Labels, o.Labels) ||\n\t\t!reflect.DeepEqual(e.Lifecycle, o.Lifecycle) ||\n\t\t!strings.EqualFold(e.Location, o.Location) ||\n\t\t!reflect.DeepEqual(e.Logging, o.Logging) ||\n\t\t!reflect.DeepEqual(e.Name, o.Name) ||\n\t\t!reflect.DeepEqual(e.Owner, o.Owner) ||\n\t\t!reflect.DeepEqual(e.StorageClass, o.StorageClass) ||\n\t\t!reflect.DeepEqual(e.Versioning, o.Versioning) ||\n\t\t!reflect.DeepEqual(e.Website, o.Website)\n}", "func TestCompare(t *testing.T, first Block, second Block) {\n\tjs, err := json.Marshal(first)\n\tif err != nil {\n\t\tt.Errorf(\"the error was expected to be nil, error returned: %s\", err.Error())\n\t\treturn\n\t}\n\n\terr = json.Unmarshal(js, second)\n\tif err != nil {\n\t\tt.Errorf(\"the error was expected to be nil, error returned: %s\", err.Error())\n\t\treturn\n\t}\n\n\treJS, err := json.Marshal(second)\n\tif err != nil {\n\t\tt.Errorf(\"the error was expected to be nil, error returned: %s\", err.Error())\n\t\treturn\n\t}\n\n\tif bytes.Compare(js, reJS) != 0 {\n\t\tt.Errorf(\"the transformed javascript is different.\\n%s\\n%s\", js, reJS)\n\t\treturn\n\t}\n\n\tif !first.Hash().Compare(second.Hash()) {\n\t\tt.Errorf(\"the instance conversion failed\")\n\t\treturn\n\t}\n}", "func CompareTokens(expected, actual []*Token) (bool, error) {\n\tif len(expected) != len(actual) {\n\t\treturn false, fmt.Errorf(\"Different lengths. Expected %d, Got %d\", len(expected), len(actual))\n\t}\n\tfor i := range expected {\n\t\tif expected[i].Type != actual[i].Type {\n\t\t\treturn false, fmt.Errorf(\"Different token types at index %d. Expected %v, Got %v. Value: %v\",\n\t\t\t\ti, expected[i].Type, actual[i].Type, expected[i].Value)\n\t\t}\n\t\tif expected[i].Value != actual[i].Value {\n\t\t\treturn false, fmt.Errorf(\"Different token values at index %d. Expected %v, Got %v\",\n\t\t\t\ti, expected[i].Value, actual[i].Value)\n\t\t}\n\t}\n\treturn true, nil\n}", "func listResources(clt resourcesAPIGetter, r *http.Request, resourceKind string) (*types.ListResourcesResponse, error) {\n\tvalues := r.URL.Query()\n\n\tlimit, err := queryLimitAsInt32(values, \"limit\", defaults.MaxIterationLimit)\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\n\t// Sort is expected in format `<fieldName>:<asc|desc>` where\n\t// index 0 is fieldName and index 1 is direction.\n\t// If a direction is not set, or is not recognized, it defaults to ASC.\n\tvar sortBy types.SortBy\n\tsortParam := values.Get(\"sort\")\n\tif sortParam != \"\" {\n\t\tvals := strings.Split(sortParam, \":\")\n\t\tif vals[0] != \"\" {\n\t\t\tsortBy.Field = vals[0]\n\t\t\tif len(vals) > 1 && vals[1] == \"desc\" {\n\t\t\t\tsortBy.IsDesc = true\n\t\t\t}\n\t\t}\n\t}\n\n\tstartKey := values.Get(\"startKey\")\n\treq := proto.ListResourcesRequest{\n\t\tResourceType: resourceKind,\n\t\tLimit: limit,\n\t\tStartKey: startKey,\n\t\tSortBy: sortBy,\n\t\tPredicateExpression: values.Get(\"query\"),\n\t\tSearchKeywords: client.ParseSearchKeywords(values.Get(\"search\"), ' '),\n\t\tUseSearchAsRoles: values.Get(\"searchAsRoles\") == \"yes\",\n\t}\n\n\treturn clt.ListResources(r.Context(), req)\n}", "func (pList *LinkedListNumber) Compare(aList *LinkedListNumber) int {\n\ts1 := pList.String()\n\ts2 := aList.String()\n\n\tif len(s1) > len(s2) {\n\t\treturn 1\n\t}\n\tif len(s1) < len(s2) {\n\t\treturn -1\n\t}\n\n\treturn strings.Compare(s1, s2)\n}", "func (s *SQLProxySpec) Differs(expected ResourceInfo, observed ResourceInfo) bool {\n\tswitch expected.Obj.(type) {\n\tcase *resources.Service:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t\texpected.Obj.(*resources.Service).Spec.ClusterIP = observed.Obj.(*resources.Service).Spec.ClusterIP\n\t}\n\treturn true\n}", "func WhenSortedToEqual(actual, expected []string) (string, bool) {\n\tsort.Strings(actual)\n\tsort.Strings(expected)\n\n\tif reflect.DeepEqual(actual, expected) {\n\t\treturn \"\", true\n\t}\n\treturn fmt.Sprintf(\" expected: %#v\\nto deeply be: %#v\\n\", expected, actual), false\n}", "func (m *MockList) Difference(other List) (List, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Difference\", other)\n\tret0, _ := ret[0].(List)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func serverListEquals(a []dns.Server, b []dns.Server) bool {\n\tif len(a) != len(b) {\n\t\treturn false\n\t}\n\tfor i := range a {\n\t\tif a[i] != b[i] {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func TestResourceIDs(t *testing.T) {\n\ttestCases := []struct {\n\t\tdesc string\n\t\tin []ResourceID\n\t\texpected string\n\t\texpectParseError bool\n\t}{\n\t\t{\n\t\t\tdesc: \"single id\",\n\t\t\tin: []ResourceID{{\n\t\t\t\tClusterName: \"one\",\n\t\t\t\tKind: KindNode,\n\t\t\t\tName: \"uuid\",\n\t\t\t}},\n\t\t\texpected: `[\"/one/node/uuid\"]`,\n\t\t},\n\t\t{\n\t\t\tdesc: \"multiple ids\",\n\t\t\tin: []ResourceID{{\n\t\t\t\tClusterName: \"one\",\n\t\t\t\tKind: KindNode,\n\t\t\t\tName: \"uuid-1\",\n\t\t\t}, {\n\t\t\t\tClusterName: \"two\",\n\t\t\t\tKind: KindDatabase,\n\t\t\t\tName: \"uuid-2\",\n\t\t\t}},\n\t\t\texpected: `[\"/one/node/uuid-1\",\"/two/db/uuid-2\"]`,\n\t\t},\n\t\t{\n\t\t\tdesc: \"no cluster name\",\n\t\t\tin: []ResourceID{{\n\t\t\t\tClusterName: \"\",\n\t\t\t\tKind: KindNode,\n\t\t\t\tName: \"uuid\",\n\t\t\t}},\n\t\t\texpected: `[\"//node/uuid\"]`,\n\t\t\texpectParseError: true,\n\t\t},\n\t\t{\n\t\t\tdesc: \"bad cluster name\",\n\t\t\tin: []ResourceID{{\n\t\t\t\tClusterName: \"/,\",\n\t\t\t\tKind: KindNode,\n\t\t\t\tName: \"uuid\",\n\t\t\t}},\n\t\t\texpected: `[\"//,/node/uuid\"]`,\n\t\t\texpectParseError: true,\n\t\t},\n\t\t{\n\t\t\tdesc: \"bad resource kind\",\n\t\t\tin: []ResourceID{{\n\t\t\t\tClusterName: \"one\",\n\t\t\t\tKind: \"not,/a,/kind\",\n\t\t\t\tName: \"uuid\",\n\t\t\t}},\n\t\t\texpected: `[\"/one/not,/a,/kind/uuid\"]`,\n\t\t\texpectParseError: true,\n\t\t},\n\t\t{\n\t\t\t// Any resource name is actually fine, test that the list parsing\n\t\t\t// doesn't break.\n\t\t\tdesc: \"bad resource name\",\n\t\t\tin: []ResourceID{{\n\t\t\t\tClusterName: \"one\",\n\t\t\t\tKind: KindNode,\n\t\t\t\tName: `really\"--,bad resource\\\"\\\\\"name`,\n\t\t\t}},\n\t\t\texpected: `[\"/one/node/really\\\"--,bad resource\\\\\\\"\\\\\\\\\\\"name\"]`,\n\t\t},\n\t}\n\tfor _, tc := range testCases {\n\t\tt.Run(tc.desc, func(t *testing.T) {\n\t\t\tout, err := ResourceIDsToString(tc.in)\n\t\t\trequire.NoError(t, err)\n\t\t\trequire.Equal(t, tc.expected, out)\n\n\t\t\t// Parse the ids from the string and make sure they match the\n\t\t\t// original.\n\t\t\tparsed, err := ResourceIDsFromString(out)\n\t\t\tif tc.expectParseError {\n\t\t\t\trequire.Error(t, err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\trequire.NoError(t, err)\n\t\t\trequire.Equal(t, tc.in, parsed)\n\t\t})\n\t}\n}", "func includesEqual(configured []string, desired []string) bool {\n\tif len(configured) != len(desired) {\n\t\treturn false\n\t}\n\tsort.Strings(configured)\n\tsort.Strings(desired)\n\tfor i := 0; i < len(configured); i++ {\n\t\tif configured[i] != desired[i] {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func (m *MockMutableList) Difference(other List) (List, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Difference\", other)\n\tret0, _ := ret[0].(List)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func AreSimilar(file1 string, file2 string) (bool, error) {\n\tf1, err := ioutil.ReadFile(file1)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tf2, err := ioutil.ReadFile(file2)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tvar data1 []JSONObject\n\terr = json.Unmarshal(f1, &data1)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tvar data2 []JSONObject\n\terr = json.Unmarshal(f2, &data2)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tareSimilar := areUnorderedArraysSimilar(data1, data2)\n\treturn areSimilar, nil\n}", "func validateEntries(t *testing.T, entries, expected []endpoint.Endpoint) {\n\tif len(entries) != len(expected) {\n\t\tt.Fatalf(\"expected %q to match %q\", entries, expected)\n\t}\n\n\tfor i := range entries {\n\t\tif entries[i] != expected[i] {\n\t\t\tt.Fatalf(\"expected %q to match %q\", entries, expected)\n\t\t}\n\t}\n}", "func (m *MockClusterAdmin) ListPartitionReassignments(arg0 string, arg1 []int32) (map[string]map[int32]*sarama.PartitionReplicaReassignmentsStatus, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ListPartitionReassignments\", arg0, arg1)\n\tret0, _ := ret[0].(map[string]map[int32]*sarama.PartitionReplicaReassignmentsStatus)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (p *Peer) CompareContent(contentList []string) []interface{} {\n\t// Convert to an interface array\n\tcl := make([]interface{}, len(contentList))\n\tfor i, v := range contentList {\n\t\tcl[i] = v\n\t}\n\tcontentWeHaveSet := mapset.NewSetFromSlice(cl)\n\n\tcontentField := p.GetState().GetPoolField(\"RequiredContent\")\n\tif contentField == nil {\n\t\treturn make([]interface{}, 0)\n\t}\n\tcontentFromPool := contentField.(state.SignedList).Data\n\n\t// Convert to an interface array\n\ts := make([]interface{}, len(contentFromPool))\n\tfor i, v := range contentFromPool {\n\t\ts[i] = v\n\t}\n\n\t// Create a set\n\tcontentWeNeed := mapset.NewSetFromSlice(s)\n\n\t// Return the difference of the two\n\treturn contentWeNeed.Difference(contentWeHaveSet).ToSlice()\n}", "func (m *MockVirtualDestinationSet) List(filterResource ...func(*v1beta1.VirtualDestination) bool) []*v1beta1.VirtualDestination {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{}\n\tfor _, a := range filterResource {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"List\", varargs...)\n\tret0, _ := ret[0].([]*v1beta1.VirtualDestination)\n\treturn ret0\n}", "func (s *MySQLSpec) Differs(expected ResourceInfo, observed ResourceInfo) bool {\n\tswitch expected.Obj.(type) {\n\tcase *resources.Secret:\n\t\t// Dont update a secret\n\t\treturn false\n\tcase *resources.Service:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t\texpected.Obj.(*resources.Service).Spec.ClusterIP = observed.Obj.(*resources.Service).Spec.ClusterIP\n\tcase *resources.PodDisruptionBudget:\n\t\texpected.Obj.SetResourceVersion(observed.Obj.GetResourceVersion())\n\t}\n\treturn true\n}", "func (s *WorkerSpec) Differs(expected ResourceInfo, observed ResourceInfo) bool {\n\t// TODO\n\treturn true\n}", "func (m *MockAuthorizationPolicySet) List(filterResource ...func(*v1beta1.AuthorizationPolicy) bool) []*v1beta1.AuthorizationPolicy {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{}\n\tfor _, a := range filterResource {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"List\", varargs...)\n\tret0, _ := ret[0].([]*v1beta1.AuthorizationPolicy)\n\treturn ret0\n}", "func PolicyRuleResourceMatches(rule *rbacv1.PolicyRule, requestedResource string) bool {\n\tfor _, ruleResource := range rule.Resources {\n\t\tif ruleResource == rbacv1.ResourceAll {\n\t\t\treturn true\n\t\t}\n\n\t\tif ruleResource == requestedResource {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func (m *MockCustomResourceClient) ListCustomResource(apiGroup, version, namespace, resourceKind string) (*operatorclient.CustomResourceList, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ListCustomResource\", apiGroup, version, namespace, resourceKind)\n\tret0, _ := ret[0].(*operatorclient.CustomResourceList)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func EqualsListArg(a, b ListArg) bool {\n\tif len(a) != len(b) {\n\t\treturn false\n\t}\n\tfor i := 0; i < len(a); i++ {\n\t\tif a[i] != b[i] {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func Compare(oldMods, newMods []infrav1.ClusterModule) bool {\n\tif len(oldMods) != len(newMods) {\n\t\treturn false\n\t}\n\n\tsort.SliceStable(oldMods, func(i, j int) bool {\n\t\treturn oldMods[i].TargetObjectName < oldMods[j].TargetObjectName\n\t})\n\tsort.SliceStable(newMods, func(i, j int) bool {\n\t\treturn newMods[i].TargetObjectName < newMods[j].TargetObjectName\n\t})\n\n\tfor i := range oldMods {\n\t\tif oldMods[i].ControlPlane == newMods[i].ControlPlane &&\n\t\t\toldMods[i].TargetObjectName == newMods[i].TargetObjectName &&\n\t\t\toldMods[i].ModuleUUID == newMods[i].ModuleUUID {\n\t\t\tcontinue\n\t\t}\n\t\treturn false\n\t}\n\treturn true\n}" ]
[ "0.65800613", "0.6343115", "0.6236883", "0.59654737", "0.55586445", "0.5427846", "0.5257756", "0.5227804", "0.5226022", "0.51560676", "0.5124971", "0.5118968", "0.50719607", "0.5029155", "0.50274366", "0.50094444", "0.5001012", "0.49900916", "0.49872172", "0.49872172", "0.49754643", "0.49513274", "0.49506095", "0.49362636", "0.49262276", "0.49089256", "0.48589316", "0.48559687", "0.48246104", "0.48190248", "0.47746217", "0.47680724", "0.47256938", "0.47227013", "0.47133273", "0.46947894", "0.46928263", "0.4661206", "0.46507126", "0.4648887", "0.46438324", "0.46413442", "0.46391377", "0.4636019", "0.45992756", "0.45918798", "0.45772552", "0.45683217", "0.45570713", "0.45535433", "0.45403773", "0.45398378", "0.45308685", "0.45161417", "0.45139617", "0.45137954", "0.4510926", "0.45108625", "0.4501077", "0.4493612", "0.44919604", "0.44915006", "0.44811213", "0.4478043", "0.4477016", "0.44733208", "0.44630545", "0.44629794", "0.44609463", "0.44574437", "0.44492915", "0.44399437", "0.44376808", "0.44376287", "0.44361448", "0.44359022", "0.4435548", "0.4434005", "0.44323403", "0.44260305", "0.4420314", "0.44152015", "0.44110817", "0.44045657", "0.44024506", "0.43971506", "0.43946806", "0.43921405", "0.43913674", "0.43895894", "0.4389123", "0.43846107", "0.43834823", "0.4380988", "0.43806165", "0.4379894", "0.43797404", "0.43780166", "0.43721202", "0.43650937" ]
0.83638287
0
IsValidNodeTopology checks the provided NodeResourceTopology object if it is wellformad, internally consistent and consistent with the given kubelet config object. Returns true if the NodeResourceTopology object is consistent and well formet, false otherwise; if return false, logs the failure reason.
func IsValidNodeTopology(nodeTopology *v1alpha2.NodeResourceTopology, kubeletConfig *kubeletconfig.KubeletConfiguration) bool { if nodeTopology == nil || len(nodeTopology.TopologyPolicies) == 0 { framework.Logf("failed to get topology policy from the node topology resource") return false } tmPolicy := string(topologypolicy.DetectTopologyPolicy(kubeletConfig.TopologyManagerPolicy, kubeletConfig.TopologyManagerScope)) if nodeTopology.TopologyPolicies[0] != tmPolicy { framework.Logf("topology policy mismatch got %q expected %q", nodeTopology.TopologyPolicies[0], tmPolicy) return false } expectedPolicyAttribute := v1alpha2.AttributeInfo{ Name: nfdtopologyupdater.TopologyManagerPolicyAttributeName, Value: kubeletConfig.TopologyManagerPolicy, } if !containsAttribute(nodeTopology.Attributes, expectedPolicyAttribute) { framework.Logf("topology policy attributes don't have correct topologyManagerPolicy attribute expected %v attributeList %v", expectedPolicyAttribute, nodeTopology.Attributes) return false } expectedScopeAttribute := v1alpha2.AttributeInfo{ Name: nfdtopologyupdater.TopologyManagerScopeAttributeName, Value: kubeletConfig.TopologyManagerScope, } if !containsAttribute(nodeTopology.Attributes, expectedScopeAttribute) { framework.Logf("topology policy attributes don't have correct topologyManagerScope attribute expected %v attributeList %v", expectedScopeAttribute, nodeTopology.Attributes) return false } if nodeTopology.Zones == nil || len(nodeTopology.Zones) == 0 { framework.Logf("failed to get topology zones from the node topology resource") return false } foundNodes := 0 for _, zone := range nodeTopology.Zones { // TODO constant not in the APIs if !strings.HasPrefix(strings.ToUpper(zone.Type), "NODE") { continue } foundNodes++ if !isValidCostList(zone.Name, zone.Costs) { framework.Logf("invalid cost list for zone %q", zone.Name) return false } if !isValidResourceList(zone.Name, zone.Resources) { framework.Logf("invalid resource list for zone %q", zone.Name) return false } } return foundNodes > 0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func nodeIsValidForTopologyAwareHints(node *corev1.Node) bool {\n\treturn !node.Status.Allocatable.Cpu().IsZero() && node.Labels[corev1.LabelTopologyZone] != \"\"\n}", "func (t Topology) Validate() error {\n\terrs := []string{}\n\n\t// Check all node metadatas are valid, and the keys are parseable, i.e.\n\t// contain a scope.\n\tfor nodeID, nmd := range t.Nodes {\n\t\tif nmd.Metadata == nil {\n\t\t\terrs = append(errs, fmt.Sprintf(\"node ID %q has nil metadata\", nodeID))\n\t\t}\n\t\tif _, _, ok := ParseNodeID(nodeID); !ok {\n\t\t\terrs = append(errs, fmt.Sprintf(\"invalid node ID %q\", nodeID))\n\t\t}\n\n\t\t// Check all adjancency keys has entries in Node.\n\t\tfor _, dstNodeID := range nmd.Adjacency {\n\t\t\tif _, ok := t.Nodes[dstNodeID]; !ok {\n\t\t\t\terrs = append(errs, fmt.Sprintf(\"node metadata missing from adjacency %q -> %q\", nodeID, dstNodeID))\n\t\t\t}\n\t\t}\n\n\t\t// Check all the edge metadatas have entries in adjacencies\n\t\tfor dstNodeID := range nmd.Edges {\n\t\t\tif _, ok := t.Nodes[dstNodeID]; !ok {\n\t\t\t\terrs = append(errs, fmt.Sprintf(\"node %s metadatas missing for edge %q\", dstNodeID, nodeID))\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(errs) > 0 {\n\t\treturn fmt.Errorf(\"%d error(s): %s\", len(errs), strings.Join(errs, \"; \"))\n\t}\n\n\treturn nil\n}", "func (d *portworx) ValidateVolumeTopology(vol *api.Volume, params map[string]string) error {\n\tvar topoMatches bool\n\tvar err error\n\tzone := params[torpedok8s.TopologyZoneK8sNodeLabel]\n\tnodes := node.GetNodesByTopologyZoneLabel(zone)\n\tfor _, node := range nodes {\n\t\tif topoMatches, err = d.isVolumeAttachedOnNode(vol, node); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif topoMatches {\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn &ErrCsiTopologyMismatch{\n\t\tVolName: vol.Locator.Name,\n\t\tCause: fmt.Errorf(\"volume [%s] is not attched on nodes with topology label [%s]\", vol.Id, zone),\n\t}\n}", "func (m *NodeTopo) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateBrickTopo(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateCluster(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateClusterTopo(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateHost(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateLocalID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateName(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validatePosition(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (ns NodeSolver) Validate() bool {\n\treturn config.ValidateNodeSolver(&ns)\n}", "func ValidateNodeSolver(ns RextNodeSolver) (hasError bool) {\n\tif len(ns.GetNodePath()) == 0 {\n\t\thasError = true\n\t\tlog.Errorln(\"node path is required in node solver config\")\n\t}\n\treturn hasError\n}", "func GetNodeTopology(ctx context.Context, topologyClient *topologyclientset.Clientset, nodeName string) *v1alpha2.NodeResourceTopology {\n\tvar nodeTopology *v1alpha2.NodeResourceTopology\n\tvar err error\n\tgomega.EventuallyWithOffset(1, func() bool {\n\t\tnodeTopology, err = topologyClient.TopologyV1alpha2().NodeResourceTopologies().Get(ctx, nodeName, metav1.GetOptions{})\n\t\tif err != nil {\n\t\t\tframework.Logf(\"failed to get the node topology resource: %v\", err)\n\t\t\treturn false\n\t\t}\n\t\treturn true\n\t}, time.Minute, 5*time.Second).Should(gomega.BeTrue())\n\treturn nodeTopology\n}", "func (r *reconciler) shouldEnableTopologyAwareHints(dns *operatorv1.DNS) (bool, error) {\n\tvar nodesList corev1.NodeList\n\tif err := r.cache.List(context.TODO(), &nodesList); err != nil {\n\t\treturn false, err\n\t}\n\tnodes := 0\n\tfor i := range nodesList.Items {\n\t\tif ignoreNodeForTopologyAwareHints(&nodesList.Items[i]) {\n\t\t\tcontinue\n\t\t}\n\t\tif !nodeIsValidForTopologyAwareHints(&nodesList.Items[i]) {\n\t\t\treturn false, nil\n\t\t}\n\t\tnodes++\n\t}\n\n\treturn nodes >= 2, nil\n}", "func (m *NodeTopoHost) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateName(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (element *ElasticsearchTopologyElement) Validate() error {\n\tvar merr = multierror.NewPrefixed(\"elasticsearch topology\")\n\tif element.NodeType == \"\" {\n\t\tmerr = merr.Append(errors.New(\"node_type cannot be empty\"))\n\t}\n\n\tif element.Size == 0 {\n\t\tmerr = merr.Append(errors.New(\"size cannot be empty\"))\n\t}\n\n\treturn merr.ErrorOrNil()\n}", "func (t *Tainter) IsNodeTainted(ctx context.Context) (bool, error) {\n\tnode, err := t.client.CoreV1().Nodes().Get(ctx, t.nodeName, metav1.GetOptions{})\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tfor i := range node.Spec.Taints {\n\t\tif node.Spec.Taints[i].Key == TaintKey {\n\t\t\treturn true, nil\n\t\t}\n\t}\n\n\treturn false, nil\n}", "func ValidateHost(host string, config *Config) error {\n\t_, ok := config.Topology[host]\n\tif !ok {\n\t\treturn fmt.Errorf(\"unable to find info for host %q in config file\", host)\n\t}\n\treturn nil\n}", "func (g *GuidePost) validateNodeConfig(q *msg.Request) (bool, error) {\n\tif q.Node.Config == nil {\n\t\treturn false, fmt.Errorf(\"NodeConfig subobject missing\")\n\t}\n\treturn g.validateBucketInRepository(\n\t\tq.Node.Config.RepositoryID,\n\t\tq.Node.Config.BucketID,\n\t)\n}", "func (s *NodeSystem) IsValid() (bool, []error) {\n\terrors := make([]error, 0)\n\terrors = append(errors, checkForOrphanMultiBranchesNode(s)...)\n\terrors = append(errors, checkForCyclicRedundancyInNodeLinks(s)...)\n\terrors = append(errors, checkForUndeclaredNodeInNodeLink(s)...)\n\terrors = append(errors, checkForMultipleInstanceOfSameNode(s)...)\n\terrors = append(errors, checkForMultipleLinksToNodeWithoutJoinMode(s)...)\n\n\tif len(errors) == 0 {\n\t\treturn true, nil\n\t}\n\treturn false, errors\n}", "func CheckTopologyChanges(rcc *CassandraClusterReconciler, cc *api.CassandraCluster,\n\tstatus *api.CassandraClusterStatus, oldCRD *api.CassandraCluster) (bool, string) {\n\n\tchangelog, _ := diff.Diff(oldCRD.Spec.Topology, cc.Spec.Topology)\n\n\tif hasChange(changelog, diff.UPDATE) ||\n\t\thasChange(changelog, diff.DELETE, \"DC.Rack\", \"-DC\") ||\n\t\thasChange(changelog, diff.CREATE, \"DC.Rack\", \"-DC\") {\n\t\tlogrus.WithFields(logrus.Fields{\"cluster\": cc.Name}).Warningf(\n\t\t\ttopologyChangeRefused+\"No change other than adding/removing a DC can happen: %v restored to %v\",\n\t\t\tcc.Spec.Topology, oldCRD.Spec.Topology)\n\t\treturn true, api.ActionCorrectCRDConfig.Name\n\t}\n\n\tif cc.GetDCSize() < oldCRD.GetDCSize()-1 {\n\t\tlogrus.WithFields(logrus.Fields{\"cluster\": cc.Name}).Warningf(\n\t\t\ttopologyChangeRefused+\"You can only remove 1 DC at a time, \"+\n\t\t\t\t\"not only a Rack: %v restored to %v\", cc.Spec.Topology, oldCRD.Spec.Topology)\n\t\treturn true, api.ActionCorrectCRDConfig.Name\n\t}\n\n\tif cc.GetDCRackSize() < oldCRD.GetDCRackSize() {\n\n\t\tif cc.Status.LastClusterAction == api.ActionScaleDown.Name &&\n\t\t\tcc.Status.LastClusterActionStatus != api.StatusDone {\n\t\t\tlogrus.WithFields(logrus.Fields{\"cluster\": cc.Name}).\n\t\t\t\tWarningf(topologyChangeRefused +\n\t\t\t\t\t\"You must wait to the end of ScaleDown to 0 before deleting a DC\")\n\t\t\treturn true, api.ActionCorrectCRDConfig.Name\n\t\t}\n\n\t\tdcName := cc.GetRemovedDCName(oldCRD)\n\n\t\t//We need to check how many nodes were in the old CRD (before the user delete it)\n\t\tif found, nbNodes := oldCRD.GetDCNodesPerRacksFromName(dcName); found && nbNodes > 0 {\n\t\t\tlogrus.WithFields(logrus.Fields{\"cluster\": cc.Name}).\n\t\t\t\tWarningf(topologyChangeRefused+\n\t\t\t\t\t\"You must scale down the DC %s to 0 before deleting it\", dcName)\n\t\t\treturn true, api.ActionCorrectCRDConfig.Name\n\t\t}\n\n\t\tlogrus.WithFields(logrus.Fields{\"cluster\": cc.Name}).Warningf(\"Removing DC %s\", dcName)\n\n\t\t//We apply this change to the Cluster status\n\t\treturn rcc.deleteDCObjects(cc, status)\n\t}\n\n\treturn false, \"\"\n}", "func (m *NodeTopoBrickTopo) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateName(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func ExampleSurface_ValidateNode() {\n\tsurface := NewSurface(10)\n\tnode := NewNode(15, 5, true)\n\tif err := surface.ValidateNode(node); err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\t// Output: Invalid/unsupported x-axis value supplied!\n}", "func (s *Layer) IsFailedNode(nodeID byte) (failed bool, err error) {\n\n\tdone := make(chan *frame.Frame)\n\n\trequest := &session.Request{\n\t\tFunctionID: protocol.FnIsNodeFailed,\n\t\tPayload: []byte{nodeID},\n\t\tHasReturn: true,\n\t\tReturnCallback: func(err error, ret *frame.Frame) bool {\n\t\t\tdone <- ret\n\t\t\treturn false\n\t\t},\n\t}\n\n\ts.sessionLayer.MakeRequest(request)\n\tret := <-done\n\n\tif ret == nil {\n\t\terr = errors.New(\"Error checking failure status\")\n\t\treturn\n\t}\n\n\tfailed = ret.Payload[1] == 1\n\n\treturn\n}", "func ValidateTopologyKey(fldPath *field.Path, topologyKey string) *field.Error {\n\tif len(topologyKey) == 0 {\n\t\treturn field.Required(fldPath, \"can not be empty\")\n\t}\n\treturn nil\n}", "func (b *Block) ValidateMerkleRoot() bool {\n\tif b.TxHashes == nil {\n\t\treturn false\n\t}\n\tnumHashes := len(b.TxHashes)\n\thashes := make([][]byte, numHashes)\n\t// Reverse each item in b.TxHashses\n\tfor i, hash := range b.TxHashes {\n\t\thashes[i] = make([]byte, len(hash))\n\t\tcopy(hashes[i], hash)\n\t\tutil.ReverseByteArray(hashes[i])\n\t}\n\troot := util.MerkleRoot(hashes)\n\tutil.ReverseByteArray(root)\n\treturn bytes.Equal(root, b.MerkleRoot[:])\n}", "func (m *NodeTopoClusterTopo) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateName(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *NodeTopo) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateBrickTopo(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateCluster(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateClusterTopo(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateHost(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidatePosition(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *TelemetryV2StackDriverConfig) GetTopology() *types.BoolValue {\n\tif m != nil {\n\t\treturn m.Topology\n\t}\n\treturn nil\n}", "func (m *NodeTopoPosition) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (r *Cluster) IsSchemaRegistryTLSEnabled() bool {\n\treturn r != nil &&\n\t\tr.Spec.Configuration.SchemaRegistry != nil &&\n\t\tr.Spec.Configuration.SchemaRegistry.TLS != nil &&\n\t\tr.Spec.Configuration.SchemaRegistry.TLS.Enabled\n}", "func (m *NodeStateNode) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateReadonly(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateServicelight(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateSmartfail(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (c *ClusterConfig) HasWindowsNodeGroup() bool {\n\tfor _, ng := range c.NodeGroups {\n\t\tif IsWindowsImage(ng.AMIFamily) {\n\t\t\treturn true\n\t\t}\n\t}\n\tfor _, ng := range c.ManagedNodeGroups {\n\t\tif IsWindowsImage(ng.AMIFamily) {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func (m *ClusterNodeStateSmartfail) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (pt MDTurbo) Validate() bool {\n\tif pt.Magic != 52426 {\n\t\treturn false\n\t}\n\tif pt.Partitions1[0].Start != 256 {\n\t\treturn false\n\t}\n\treturn true\n}", "func ValidateTopologySpreadConstraints(constraints []core.TopologySpreadConstraint, fldPath *field.Path) field.ErrorList {\n\tallErrs := field.ErrorList{}\n\n\tfor i, constraint := range constraints {\n\t\tsubFldPath := fldPath.Index(i)\n\t\tif err := ValidateMaxSkew(subFldPath.Child(\"maxSkew\"), constraint.MaxSkew); err != nil {\n\t\t\tallErrs = append(allErrs, err)\n\t\t}\n\t\tif err := ValidateTopologyKey(subFldPath.Child(\"topologyKey\"), constraint.TopologyKey); err != nil {\n\t\t\tallErrs = append(allErrs, err)\n\t\t}\n\t\tif err := ValidateWhenUnsatisfiable(subFldPath.Child(\"whenUnsatisfiable\"), constraint.WhenUnsatisfiable); err != nil {\n\t\t\tallErrs = append(allErrs, err)\n\t\t}\n\t\t// tuple {topologyKey, whenUnsatisfiable} denotes one kind of spread constraint\n\t\tif err := ValidateSpreadConstraintNotRepeat(subFldPath.Child(\"{topologyKey, whenUnsatisfiable}\"), constraint, constraints[i+1:]); err != nil {\n\t\t\tallErrs = append(allErrs, err)\n\t\t}\n\t\tallErrs = append(allErrs, validateMinDomains(subFldPath.Child(\"minDomains\"), constraint.MinDomains, constraint.WhenUnsatisfiable)...)\n\t}\n\n\treturn allErrs\n}", "func (n *Node) IsControlPlane() bool {\n\treturn n.Role() == constants.ControlPlaneNodeRoleValue\n}", "func (n *Node) IsValid() bool {\n\n\t// obviously a port number of zero won't work\n\tif n.TcpAddr.Port == 0 {\n\t\treturn false\n\t}\n\n\treturn true\n}", "func (ctl *Ctl) SpecIsValid() (bool, error) {\n\treturn true, nil\n}", "func (ctl *Ctl) SpecIsValid() (bool, error) {\n\treturn true, nil\n}", "func (m *NodeTopoCluster) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateName(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (n *Node) IsControlPlane() bool {\n\treturn n.Role == ControlPlaneRole\n}", "func (client Client) CheckRoute(domainGUID string, hostname string, path string, port int) (bool, Warnings, error) {\n\tvar query []Query\n\n\tif hostname != \"\" {\n\t\tquery = append(query, Query{Key: HostFilter, Values: []string{hostname}})\n\t}\n\n\tif path != \"\" {\n\t\tquery = append(query, Query{Key: PathFilter, Values: []string{path}})\n\t}\n\n\tif port != 0 {\n\t\tquery = append(query, Query{Key: PortFilter, Values: []string{fmt.Sprintf(\"%d\", port)}})\n\t}\n\n\tvar responseBody struct {\n\t\tMatchingRoute bool `json:\"matching_route\"`\n\t}\n\n\t_, warnings, err := client.MakeRequest(RequestParams{\n\t\tRequestName: internal.GetDomainRouteReservationsRequest,\n\t\tURIParams: internal.Params{\"domain_guid\": domainGUID},\n\t\tQuery: query,\n\t\tResponseBody: &responseBody,\n\t})\n\n\treturn responseBody.MatchingRoute, warnings, err\n}", "func (m *Mesh) CheckSanity() bool {\n\treturn m.faceStructure.checkSanity(uint32(len(m.Nodes))) && m.beamLattice.checkSanity(uint32(len(m.Nodes)))\n}", "func (c *LockReleaseController) verifyNodeExists(node *corev1.Node, expectedGCEInstanceID, expectedNodeInternalIP string) (bool, error) {\n\tif node == nil {\n\t\treturn false, nil\n\t}\n\tif node.Annotations == nil {\n\t\treturn false, fmt.Errorf(\"node %s annotations is nil\", node.Name)\n\t}\n\tinstanceID, ok := node.Annotations[gceInstanceIDKey]\n\tif !ok {\n\t\tklog.Warningf(\"Node %s missing key %s in node.annotations\", node.Name, gceInstanceIDKey)\n\t\treturn false, nil\n\t}\n\tif instanceID != expectedGCEInstanceID {\n\t\treturn false, nil\n\t}\n\tfor _, address := range node.Status.Addresses {\n\t\tif address.Type == corev1.NodeInternalIP && address.Address == expectedNodeInternalIP {\n\t\t\treturn true, nil\n\t\t}\n\t}\n\treturn false, nil\n}", "func (c GlobalConfig) IsNode() bool {\n\treturn RunMode(c.OBSMode).IsNode()\n}", "func IsValidKernelParams(s string) bool {\n\treturn reValidKernelParams.MatchString(s)\n}", "func (r *Cluster) IsSchemaRegistryMutualTLSEnabled() bool {\n\treturn r != nil &&\n\t\tr.IsSchemaRegistryTLSEnabled() &&\n\t\tr.Spec.Configuration.SchemaRegistry.TLS.RequireClientAuth\n}", "func (t *OnfSwitch_Switch_Port) ΛValidate(opts ...ygot.ValidationOption) error {\n\tif err := ytypes.Validate(SchemaTree[\"OnfSwitch_Switch_Port\"], t, opts...); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func getValidTopology(topologyMap map[string][]string) ([]string, []string) {\n\tvar regionValues []string\n\tvar zoneValues []string\n\tfor region, zones := range topologyMap {\n\t\tregionValues = append(regionValues, region)\n\t\tzoneValues = append(zoneValues, zones...)\n\t}\n\treturn regionValues, zoneValues\n}", "func (c Config) IsValid() bool {\n\treturn c.RandomTemperature > 0 && c.NumSimulation > 0\n}", "func (r *GlobalDNSZoneReconciler) IsValid(obj metav1.Object) (bool, error) {\n\tglobalDNSZone, ok := obj.(*redhatcopv1alpha1.GlobalDNSZone)\n\tif !ok {\n\t\treturn false, errs.New(\"unable to convert to GlobalDNSZone\")\n\t}\n\tfoundProviderDefinition := false\n\tif globalDNSZone.Spec.Provider.Route53 != nil {\n\t\tif foundProviderDefinition {\n\t\t\treturn false, errs.New(\"only one of the provider type field can be defined\")\n\t\t}\n\t\tfoundProviderDefinition = true\n\t\t// for route53 we just need to verify that the Zone is and that it controls the defined domain\n\t\troute53Client, err := tpdroute53.GetRoute53Client(context.TODO(), globalDNSZone, &r.ReconcilerBase)\n\t\tif err != nil {\n\t\t\tr.Log.Error(err, \"unable to retrieve route53 client for\", \"globalDNSZone\", globalDNSZone)\n\t\t\treturn false, err\n\t\t}\n\t\tinput := &route53.GetHostedZoneInput{\n\t\t\tId: aws.String(globalDNSZone.Spec.Provider.Route53.ZoneID),\n\t\t}\n\t\tresult, err := route53Client.GetHostedZone(input)\n\t\tif err != nil {\n\t\t\tr.Log.Error(err, \"unable to retrieve hosted zone with\", \"ZoneID\", globalDNSZone.Spec.Provider.Route53.ZoneID)\n\t\t\treturn false, err\n\t\t}\n\t\tif *result.HostedZone.Name != (globalDNSZone.Spec.Domain + \".\") {\n\t\t\terr := errs.New(\"route53 hosted zone name does not match global dns name\")\n\t\t\tr.Log.Error(err, \"aws route53 hosted zone\", \"name\", result.HostedZone.Name, \"does not match this global DNS zone name\", globalDNSZone.Spec.Domain+\".\")\n\t\t\treturn false, err\n\t\t}\n\t}\n\treturn true, nil\n}", "func (r *ResourceSpec) Valid() bool {\n\tif r.Name == \"\" {\n\t\tfmt.Println(\"no resource spec label\")\n\t\treturn false\n\t}\n\n\tfor _, c := range r.Credentials {\n\t\tif !c.Valid() {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func (s *SyncerClient) SupportsNodeResourceUpdates(timeout time.Duration) (bool, error) {\n\t// If a previous call has already marked the handshake as complete, then just return the value.\n\tif s.handshakeStatus.complete {\n\t\treturn s.supportsNodeResourceUpdates, nil\n\t}\n\n\tselect {\n\tcase <-s.handshakeStatus.helloReceivedChan:\n\t\ts.logCxt.Debug(\"Received MsgServerHello from server\")\n\t\ts.handshakeStatus.complete = true\n\t\treturn s.supportsNodeResourceUpdates, nil\n\tcase <-time.After(timeout):\n\t\t// fallthrough\n\t}\n\n\treturn false, fmt.Errorf(\"Timed out waiting for handshake to complete\")\n}", "func (tee *Tee) ValidateShard(ctx context.Context, keyspace, shard string) error {\n\terr := tee.primary.ValidateShard(ctx, keyspace, shard)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := tee.secondary.ValidateShard(ctx, keyspace, shard); err != nil {\n\t\t// not critical enough to fail\n\t\tlog.Warningf(\"secondary.ValidateShard(%v,%v) failed: %v\", keyspace, shard, err)\n\t}\n\treturn nil\n}", "func (r *Resource) Valid() bool {\n\tif r.Spec == nil {\n\t\tfmt.Println(\"no resource spec\")\n\t\treturn false\n\t}\n\n\treturn r.Spec.Valid()\n}", "func (m *HyperflexNode) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\t// validation for a type composition with MoBaseMo\n\tif err := m.MoBaseMo.Validate(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateCluster(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateClusterMember(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateIdentity(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateIP(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validatePhysicalServer(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateRole(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateStatus(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func IsValidPerformanceMode(performanceMode string, configType ConfigType) bool {\n\tif performanceMode == PerformanceModeEmpty {\n\t\treturn true\n\t}\n\n\tfor _, o := range ValidPerformanceModeOptions(configType) {\n\t\tif performanceMode == o {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}", "func WithTopologyNodeZone(nodeZone string) Option {\n\treturn makeOption(func(p *proxy) error {\n\t\tp.nodeZone = nodeZone\n\t\treturn nil\n\t})\n}", "func (m *NodeTopoHost) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *NodeInfo) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateBootTime(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateLastUpdatedDate(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validatePluginsInfo(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *AzureManagedControlPlane) validateMaxNodeProvisionTime() field.ErrorList {\n\tvar allErrs field.ErrorList\n\tif ptr.Deref(m.Spec.AutoScalerProfile.MaxNodeProvisionTime, \"\") != \"\" {\n\t\tif !rMaxNodeProvisionTime.MatchString(ptr.Deref(m.Spec.AutoScalerProfile.MaxNodeProvisionTime, \"\")) {\n\t\t\tallErrs = append(allErrs, field.Invalid(field.NewPath(\"Spec\", \"AutoscalerProfile\", \"MaxNodeProvisionTime\"), m.Spec.AutoScalerProfile.MaxNodeProvisionTime, \"invalid value\"))\n\t\t}\n\t}\n\treturn allErrs\n}", "func (proc *Processor) ValidateTimestamp(hornetTx *hornet.Transaction) (valid, broadcast bool) {\n\tsnapshotTimestamp := tangle.GetSnapshotInfo().Timestamp\n\ttxTimestamp := hornetTx.GetTimestamp()\n\n\tpastTime := time.Now().Add(-10 * time.Minute).Unix()\n\tfutureTime := time.Now().Add(10 * time.Minute).Unix()\n\n\t// we need to accept all txs since the snapshot timestamp for synchronization\n\tif txTimestamp >= snapshotTimestamp && txTimestamp < futureTime {\n\t\treturn true, txTimestamp >= pastTime\n\t}\n\n\t// ignore invalid timestamps for solid entry points\n\treturn tangle.SolidEntryPointsContain(hornetTx.GetTxHash()), false\n}", "func TestValidateRoute(t *testing.T) {\n\ttests := []struct {\n\t\tname string\n\t\thost string\n\t\tallowNonCompliant string\n\t\texpectedErrors int\n\t}{\n\t\t{\n\t\t\tname: \"Non-DNS-compliant host with non-compliance allowed\",\n\t\t\thost: \"host\",\n\t\t\tallowNonCompliant: \"true\",\n\t\t\texpectedErrors: 0,\n\t\t},\n\t\t{\n\t\t\tname: \"Non-DNS-compliant host with non-compliance not allowed\",\n\t\t\thost: \"host\",\n\t\t\tallowNonCompliant: \"false\",\n\t\t\texpectedErrors: 1,\n\t\t},\n\t\t{\n\t\t\tname: \"Non-DNS-compliant host without non-compliance annotation\",\n\t\t\thost: \"host\",\n\t\t\texpectedErrors: 1,\n\t\t},\n\t\t{\n\t\t\tname: \"Specified label too long\",\n\t\t\thost: \"1234567890-1234567890-1234567890-1234567890-1234567890-123456789.host.com\",\n\t\t\tallowNonCompliant: \"\",\n\t\t\texpectedErrors: 1,\n\t\t},\n\t\t{\n\t\t\tname: \"Specified label too long, is not an error with non-compliance allowed\",\n\t\t\thost: \"1234567890-1234567890-1234567890-1234567890-1234567890-123456789.host.com\",\n\t\t\tallowNonCompliant: \"true\",\n\t\t\texpectedErrors: 0,\n\t\t},\n\t\t{\n\t\t\tname: \"Specified host name too long\",\n\t\t\thost: \"1234567890-1234567890-1234567890-1234567890-1234567890.\" +\n\t\t\t\t\"1234567890-1234567890-1234567890-1234567890-1234567890.\" +\n\t\t\t\t\"1234567890-1234567890-1234567890-1234567890-1234567890.\" +\n\t\t\t\t\"1234567890-1234567890-1234567890-1234567890-1234567890.\" +\n\t\t\t\t\"1234567890-1234567890-1234567890-1\",\n\t\t\tallowNonCompliant: \"\",\n\t\t\texpectedErrors: 1,\n\t\t},\n\t\t{\n\t\t\tname: \"Specified host name too long, is still an error even with non-compliance allowed\",\n\t\t\thost: \"1234567890-1234567890-1234567890-1234567890-1234567890.\" +\n\t\t\t\t\"1234567890-1234567890-1234567890-1234567890-1234567890.\" +\n\t\t\t\t\"1234567890-1234567890-1234567890-1234567890-1234567890.\" +\n\t\t\t\t\"1234567890-1234567890-1234567890-1234567890-1234567890.\" +\n\t\t\t\t\"1234567890-1234567890-1234567890-1\",\n\t\t\tallowNonCompliant: \"true\",\n\t\t\texpectedErrors: 1,\n\t\t},\n\t\t{\n\t\t\tname: \"No host\",\n\t\t\thost: \"\",\n\t\t\tallowNonCompliant: \"\",\n\t\t\texpectedErrors: 1,\n\t\t},\n\t\t{\n\t\t\tname: \"Invalid DNS 952 host\",\n\t\t\thost: \"**\",\n\t\t\tallowNonCompliant: \"\",\n\t\t\texpectedErrors: 1,\n\t\t},\n\t}\n\n\tfor _, tc := range tests {\n\t\terrs := ValidateHost(tc.host, tc.allowNonCompliant, field.NewPath(\"spec.host\"))\n\t\tif len(errs) != tc.expectedErrors {\n\t\t\tt.Errorf(\"Test case %q expected %d error(s), got %d: %v\", tc.name, tc.expectedErrors, len(errs), errs)\n\t\t}\n\t}\n}", "func validateNtpOnCluster(ntpObj ntpTest) {\n\tBy(fmt.Sprintf(\"ts:%s Validating Cluster\", time.Now().String()))\n\n\tBy(fmt.Sprintf(\"Validates NTP config file on Quorum Nodes\"))\n\tfor _, qnode := range ts.tu.QuorumNodes {\n\t\tip := ts.tu.NameToIPMap[qnode]\n\t\tif ip == ntpObj.oldLeaderIP {\n\t\t\tcontinue // skip validation as cmd is paused on that node\n\t\t}\n\t\tvar ntpServers []string\n\t\tif ip == ntpObj.ntpLeaderIP {\n\t\t\tntpServers = ntpObj.externalNtpServers\n\t\t} else {\n\t\t\tntpServers = []string{ntpObj.ntpLeaderIP}\n\t\t}\n\n\t\tEventually(func() bool {\n\t\t\tntpConf := ts.tu.CommandOutput(ip, \"bash -c 'if [ -f /etc/pensando/pen-ntp/chrony.conf ] ; then cat /etc/pensando/pen-ntp/chrony.conf; fi' \")\n\t\t\tif strings.Count(ntpConf, \"server \") == len(ntpServers) {\n\t\t\t\tfor _, ntpServer := range ntpServers {\n\t\t\t\t\tif strings.Index(ntpConf, \"server \"+ntpServer+\" iburst\") == -1 {\n\t\t\t\t\t\tBy(fmt.Sprintf(\"%v not present in config. found %v\", ntpServer, ntpConf))\n\t\t\t\t\t\treturn false\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn true\n\t\t\t}\n\t\t\tBy(fmt.Sprintf(\"ntpserver: %v ntpconf: %v\", ntpServers, ntpConf))\n\t\t\treturn false\n\t\t}, 75, 5).Should(BeTrue(), \"NTP servers for %v quorum node should be %v\", qnode, ntpServers)\n\t}\n}", "func IsNodeCordoned(node *v1.Node) (bool, time.Time) {\n\tif node.Spec.Unschedulable {\n\t\tfor _, taint := range node.Spec.Taints {\n\t\t\tif taint.Key == v1.TaintNodeUnschedulable {\n\t\t\t\tif taint.TimeAdded != nil {\n\t\t\t\t\treturn true, taint.TimeAdded.Time\n\t\t\t\t}\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\treturn true, time.Time{}\n\t}\n\treturn false, time.Time{}\n}", "func ValidateClusterSpec(spec *kubermaticv1.ClusterSpec, dc *kubermaticv1.Datacenter, enabledFeatures features.FeatureGate, versionManager *version.Manager, currentVersion *semver.Semver, parentFieldPath *field.Path) field.ErrorList {\n\tallErrs := field.ErrorList{}\n\n\tif spec.HumanReadableName == \"\" {\n\t\tallErrs = append(allErrs, field.Required(parentFieldPath.Child(\"humanReadableName\"), \"no name specified\"))\n\t}\n\n\tif err := ValidateVersion(spec, versionManager, currentVersion, parentFieldPath.Child(\"version\")); err != nil {\n\t\tallErrs = append(allErrs, err)\n\t}\n\n\t// Validate if container runtime is valid for this cluster (in particular this checks for docker support).\n\tif err := ValidateContainerRuntime(spec); err != nil {\n\t\tallErrs = append(allErrs, field.Invalid(parentFieldPath.Child(\"containerRuntime\"), spec.ContainerRuntime, fmt.Sprintf(\"failed to validate container runtime: %s\", err)))\n\t}\n\n\tif !kubermaticv1.AllExposeStrategies.Has(spec.ExposeStrategy) {\n\t\tallErrs = append(allErrs, field.NotSupported(parentFieldPath.Child(\"exposeStrategy\"), spec.ExposeStrategy, kubermaticv1.AllExposeStrategies.Items()))\n\t}\n\n\t// Validate APIServerAllowedIPRanges for LoadBalancer expose strategy\n\tif spec.ExposeStrategy != kubermaticv1.ExposeStrategyLoadBalancer && spec.APIServerAllowedIPRanges != nil && len(spec.APIServerAllowedIPRanges.CIDRBlocks) > 0 {\n\t\tallErrs = append(allErrs, field.Forbidden(parentFieldPath.Child(\"APIServerAllowedIPRanges\"), \"Access control for API server is supported only for LoadBalancer expose strategy\"))\n\t}\n\n\t// Validate TunnelingAgentIP for Tunneling Expose strategy\n\tif spec.ExposeStrategy != kubermaticv1.ExposeStrategyTunneling && spec.ClusterNetwork.TunnelingAgentIP != \"\" {\n\t\tallErrs = append(allErrs, field.Forbidden(parentFieldPath.Child(\"TunnelingAgentIP\"), \"Tunneling agent IP can be configured only for Tunneling Expose strategy\"))\n\t}\n\n\t// External CCM is not supported for all providers and all Kubernetes versions.\n\tif spec.Features[kubermaticv1.ClusterFeatureExternalCloudProvider] {\n\t\tif !resources.ExternalCloudControllerFeatureSupported(dc, &spec.Cloud, spec.Version, versionManager.GetIncompatibilities()...) {\n\t\t\tallErrs = append(allErrs, field.Invalid(parentFieldPath.Child(\"features\").Key(kubermaticv1.ClusterFeatureExternalCloudProvider), true, \"external cloud-controller-manager is not supported for this cluster / provider combination\"))\n\t\t}\n\t}\n\n\tif spec.CNIPlugin != nil {\n\t\tif !cni.GetSupportedCNIPlugins().Has(spec.CNIPlugin.Type.String()) {\n\t\t\tallErrs = append(allErrs, field.NotSupported(parentFieldPath.Child(\"cniPlugin\", \"type\"), spec.CNIPlugin.Type.String(), sets.List(cni.GetSupportedCNIPlugins())))\n\t\t} else if versions, err := cni.GetAllowedCNIPluginVersions(spec.CNIPlugin.Type); err != nil || !versions.Has(spec.CNIPlugin.Version) {\n\t\t\tallErrs = append(allErrs, field.NotSupported(parentFieldPath.Child(\"cniPlugin\", \"version\"), spec.CNIPlugin.Version, sets.List(versions)))\n\t\t}\n\n\t\t// Dual-stack is not supported on Canal < v3.22\n\t\tif spec.ClusterNetwork.IPFamily == kubermaticv1.IPFamilyDualStack && spec.CNIPlugin.Type == kubermaticv1.CNIPluginTypeCanal {\n\t\t\tgte322Constraint, _ := semverlib.NewConstraint(\">= 3.22\")\n\t\t\tcniVer, _ := semverlib.NewVersion(spec.CNIPlugin.Version)\n\t\t\tif cniVer != nil && !gte322Constraint.Check(cniVer) {\n\t\t\t\tallErrs = append(allErrs, field.Forbidden(parentFieldPath.Child(\"cniPlugin\"), \"dual-stack not allowed on Canal CNI version lower than 3.22\"))\n\t\t\t}\n\t\t}\n\t}\n\n\tallErrs = append(allErrs, ValidateLeaderElectionSettings(&spec.ComponentsOverride.ControllerManager.LeaderElectionSettings, parentFieldPath.Child(\"componentsOverride\", \"controllerManager\", \"leaderElection\"))...)\n\tallErrs = append(allErrs, ValidateLeaderElectionSettings(&spec.ComponentsOverride.Scheduler.LeaderElectionSettings, parentFieldPath.Child(\"componentsOverride\", \"scheduler\", \"leaderElection\"))...)\n\n\texternalCCM := false\n\tif val, ok := spec.Features[kubermaticv1.ClusterFeatureExternalCloudProvider]; ok {\n\t\texternalCCM = val\n\t}\n\n\t// general cloud spec logic\n\tif errs := ValidateCloudSpec(spec.Cloud, dc, spec.ClusterNetwork.IPFamily, parentFieldPath.Child(\"cloud\"), externalCCM); len(errs) > 0 {\n\t\tallErrs = append(allErrs, errs...)\n\t}\n\n\tif errs := validateMachineNetworksFromClusterSpec(spec, parentFieldPath); len(errs) > 0 {\n\t\tallErrs = append(allErrs, errs...)\n\t}\n\n\tif errs := ValidateClusterNetworkConfig(&spec.ClusterNetwork, dc, spec.CNIPlugin, parentFieldPath.Child(\"networkConfig\")); len(errs) > 0 {\n\t\tallErrs = append(allErrs, errs...)\n\t}\n\n\tportRangeFld := field.NewPath(\"componentsOverride\", \"apiserver\", \"nodePortRange\")\n\tif err := ValidateNodePortRange(spec.ComponentsOverride.Apiserver.NodePortRange, portRangeFld); err != nil {\n\t\tallErrs = append(allErrs, err)\n\t}\n\n\tif errs := validateEncryptionConfiguration(spec, parentFieldPath.Child(\"encryptionConfiguration\")); len(errs) > 0 {\n\t\tallErrs = append(allErrs, errs...)\n\t}\n\n\treturn allErrs\n}", "func (manager *MetricsCacheManager) IsSystemInfoMetricsValid() bool {\n\tretention := manager.GetRetention()\n\n\tmanager.systemInfoMetricsMtx.RLock()\n\tdefer manager.systemInfoMetricsMtx.RUnlock()\n\n\treturn (!manager.systemInfoMetricsInvalid) &&\n\t\t(manager.systemInfoMetrics != nil) &&\n\t\t(time.Since(manager.systemInfoMetricsLastUpdatedTime) < retention)\n}", "func (r *Reservation) Validate() bool {\n\tr.Errors = make(map[string]string)\n\n\tif r.Start >= r.End {\n\t\tr.Errors[\"End\"] = \"End Time must be greater than Start Time\"\n\t}\n\n\tsession, err := mgo.Dial(os.Getenv(\"MONGODB_URI\"))\n\tutil.Check(err)\n\tdefer session.Close()\n\tc := session.DB(os.Getenv(\"MONGODB_DB\")).C(COLLECTION)\n\tvar results []Reservation\n\terr = c.Find(bson.M{\"month\": r.Month, \"day\": r.Day, \"year\": r.Year, \"location\": r.Location}).All(&results)\n\tutil.Check(err)\n\tfor _, reservation := range results {\n\t\tif r.End <= reservation.Start {\n\t\t\tcontinue\n\t\t}\n\t\tif r.Start >= reservation.End {\n\t\t\tcontinue\n\t\t}\n\t\ts := fmt.Sprintf(\"Reservation already booked for %s on %s from %s - %s\", reservation.Location.Name, reservation.Date(), reservation.StartTime(), reservation.EndTime())\n\t\tid := fmt.Sprintf(\"%d\", reservation.Id)\n\t\tr.Errors[id] = s\n\t}\n\n\treturn len(r.Errors) == 0\n}", "func IsNodeBeingDeleted(node *v1.Node, cl client.Client) (bool, error) {\n\t// check if node is managed by a cluster API machine and if the machine is marked for deletion\n\tif machineName, present := node.Annotations[constants.AnnotationClusterAPIMachine]; present && len(machineName) > 0 {\n\t\tmachine := &cluster_v1alpha1.Machine{}\n\t\terr := cl.Get(context.TODO(), client.ObjectKey{Name: machineName, Namespace: \"default\"}, machine)\n\t\tif err != nil {\n\t\t\treturn false, fmt.Errorf(\"failed to get machine: default/%s due to: %v\", machineName, err)\n\t\t}\n\n\t\tif machine.GetDeletionTimestamp() != nil {\n\t\t\tlogrus.Debugf(\"machine: %s is being deleted. timestamp set: %v.\",\n\t\t\t\tmachineName, machine.GetDeletionTimestamp())\n\t\t\treturn true, nil\n\t\t}\n\t}\n\treturn false, nil\n}", "func CheckRouteRule(routeRule IstioObject, namespace string, serviceName string, version string) bool {\n\tif routeRule == nil || routeRule.GetSpec() == nil {\n\t\treturn false\n\t}\n\tif FilterByDestination(routeRule.GetSpec(), namespace, serviceName, version) {\n\t\t// RouteRule defines a version in the DestinationWeight\n\t\tif routes, ok := routeRule.GetSpec()[\"route\"]; ok {\n\t\t\tif dRoutes, ok := routes.([]interface{}); ok {\n\t\t\t\tfor _, route := range dRoutes {\n\t\t\t\t\tif dRoute, ok := route.(map[string]interface{}); ok {\n\t\t\t\t\t\tif labels, ok := dRoute[\"labels\"]; ok {\n\t\t\t\t\t\t\tif dLabels, ok := labels.(map[string]interface{}); ok {\n\t\t\t\t\t\t\t\tif versionValue, ok := dLabels[\"version\"]; ok && versionValue == version {\n\t\t\t\t\t\t\t\t\treturn true\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn false\n}", "func validateNodeAffinity(na *core.NodeAffinity, fldPath *field.Path) field.ErrorList {\n\tallErrs := field.ErrorList{}\n\t// TODO: Uncomment the next three lines once RequiredDuringSchedulingRequiredDuringExecution is implemented.\n\t// if na.RequiredDuringSchedulingRequiredDuringExecution != nil {\n\t//\tallErrs = append(allErrs, ValidateNodeSelector(na.RequiredDuringSchedulingRequiredDuringExecution, fldPath.Child(\"requiredDuringSchedulingRequiredDuringExecution\"))...)\n\t// }\n\tif na.RequiredDuringSchedulingIgnoredDuringExecution != nil {\n\t\tallErrs = append(allErrs, ValidateNodeSelector(na.RequiredDuringSchedulingIgnoredDuringExecution, fldPath.Child(\"requiredDuringSchedulingIgnoredDuringExecution\"))...)\n\t}\n\tif len(na.PreferredDuringSchedulingIgnoredDuringExecution) > 0 {\n\t\tallErrs = append(allErrs, ValidatePreferredSchedulingTerms(na.PreferredDuringSchedulingIgnoredDuringExecution, fldPath.Child(\"preferredDuringSchedulingIgnoredDuringExecution\"))...)\n\t}\n\treturn allErrs\n}", "func (m *Node) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif !_Node_Host_Pattern.MatchString(m.GetHost()) {\n\t\treturn NodeValidationError{\n\t\t\tfield: \"Host\",\n\t\t\treason: \"value does not match regex pattern \\\"^\\\\\\\\*?[0-9a-zA-Z-._]+$\\\"\",\n\t\t}\n\t}\n\n\tif val := m.GetPort(); val < 1 || val > 65535 {\n\t\treturn NodeValidationError{\n\t\t\tfield: \"Port\",\n\t\t\treason: \"value must be inside range [1, 65535]\",\n\t\t}\n\t}\n\n\tif m.GetWeight() < 0 {\n\t\treturn NodeValidationError{\n\t\t\tfield: \"Weight\",\n\t\t\treason: \"value must be greater than or equal to 0\",\n\t\t}\n\t}\n\n\tfor key, val := range m.GetMetadata() {\n\t\t_ = val\n\n\t\t// no validation rules for Metadata[key]\n\n\t\tif v, ok := interface{}(val).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn NodeValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Metadata[%v]\", key),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (f *FilterRouter) ValidRouter(router string) (bool, map[string]string) {\n\tisok, params := f.tree.Match(router)\n\tif isok == nil {\n\t\treturn false, nil\n\t}\n\tif isok, ok := isok.(bool); ok {\n\t\treturn isok, params\n\t} else {\n\t\treturn false, nil\n\t}\n}", "func IsReplicaRebuildingFailed(reusableFailedReplica *longhorn.Replica) bool {\n\treplicaRebuildFailedCondition := types.GetCondition(reusableFailedReplica.Status.Conditions, longhorn.ReplicaConditionTypeRebuildFailed)\n\n\tif replicaRebuildFailedCondition.Status != longhorn.ConditionStatusTrue {\n\t\treturn true\n\t}\n\n\tswitch replicaRebuildFailedCondition.Reason {\n\tcase longhorn.ReplicaConditionReasonRebuildFailedDisconnection, longhorn.NodeConditionReasonManagerPodDown, longhorn.NodeConditionReasonKubernetesNodeGone, longhorn.NodeConditionReasonKubernetesNodeNotReady:\n\t\treturn false\n\tdefault:\n\t\treturn true\n\t}\n}", "func (o *V0037JobProperties) HasSocketsPerNode() bool {\n\tif o != nil && o.SocketsPerNode != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (m *socatManager) ValidHost(host string) bool {\n\t_, err := getSource(host)\n\treturn err == nil\n}", "func (m *DrivesDriveFirmwareNodeDrive) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *V0037JobProperties) GetSocketsPerNodeOk() (*int32, bool) {\n\tif o == nil || o.SocketsPerNode == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SocketsPerNode, true\n}", "func (tc *TestConfig) IsValid() bool {\n\treturn tc.APIConfig != nil && tc.err == nil\n}", "func (o *CreateTemplateRequestEntity) HasDisconnectedNodeAcknowledged() bool {\n\tif o != nil && o.DisconnectedNodeAcknowledged != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (c *Config) IsValid() bool {\n\tif c.DiffType == \"schema\" || c.DiffType == \"data\" {\n\t\treturn c.Source != \"\" && c.Target != \"\"\n\t} else if c.DiffType == \"md\" || c.DiffType == \"wiki\" || c.DiffType == \"sql\" {\n\t\treturn c.Source != \"\"\n\t}\n\treturn false\n}", "func (t *OnfSwitchModel_SwitchModel_Port) ΛValidate(opts ...ygot.ValidationOption) error {\n\tif err := ytypes.Validate(SchemaTree[\"OnfSwitchModel_SwitchModel_Port\"], t, opts...); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func isNodeValid(newNode, oldNode Node) bool {\n\t// if oldBlock.Index+1 != newBlock.Index {\n\t// \treturn false\n\t// }\n\n\t// Do a BFS and get the nodes attached to the newNode\n\tif oldNode.Hash != newNode.PrevLeftHash {\n\t\treturn false\n\t}\n\t// if PrevRightNode.Hash != newNode.PrevRightHash {\n\t// \treturn false\n\t// }\n\n\tif calculateHash(newNode) != newNode.Hash {\n\t\treturn false\n\t}\n\n\treturn true\n}", "func ignoreNodeForTopologyAwareHints(node *corev1.Node) bool {\n\treturn nodeHasExcludedLabels(node.Labels) || !nodeIsReady(node.Status)\n}", "func (conf *Config) IsSwarmCompatible() error {\n\tif conf.LiveRestoreEnabled {\n\t\treturn fmt.Errorf(\"--live-restore daemon configuration is incompatible with swarm mode\")\n\t}\n\treturn nil\n}", "func ensureTopologyRequirements(ctx context.Context, nodeSelection *e2epod.NodeSelection, cs clientset.Interface, driverInfo *storageframework.DriverInfo, minCount int) error {\n\tnodes, err := e2enode.GetReadySchedulableNodes(ctx, cs)\n\tframework.ExpectNoError(err)\n\tif len(nodes.Items) < minCount {\n\t\te2eskipper.Skipf(fmt.Sprintf(\"Number of available nodes is less than %d - skipping\", minCount))\n\t}\n\n\ttopologyKeys := driverInfo.TopologyKeys\n\tif len(topologyKeys) == 0 {\n\t\t// The driver does not have any topology restrictions\n\t\treturn nil\n\t}\n\n\ttopologyList, topologyCount, err := getCurrentTopologiesNumber(cs, nodes, topologyKeys)\n\tif err != nil {\n\t\treturn err\n\t}\n\tsuitableTopologies := []topology{}\n\tfor i, topo := range topologyList {\n\t\tif topologyCount[i] >= minCount {\n\t\t\tsuitableTopologies = append(suitableTopologies, topo)\n\t\t}\n\t}\n\tif len(suitableTopologies) == 0 {\n\t\te2eskipper.Skipf(\"No topology with at least %d nodes found - skipping\", minCount)\n\t}\n\t// Take the first suitable topology\n\te2epod.SetNodeAffinityTopologyRequirement(nodeSelection, suitableTopologies[0])\n\treturn nil\n}", "func CreateNodeResourceTopologies(ctx context.Context, extClient extclient.Interface) (*apiextensionsv1.CustomResourceDefinition, error) {\n\tcrd, err := NewNodeResourceTopologies()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Delete existing CRD (if any) with this we also get rid of stale objects\n\terr = extClient.ApiextensionsV1().CustomResourceDefinitions().Delete(ctx, crd.Name, metav1.DeleteOptions{})\n\tif err != nil && !errors.IsNotFound(err) {\n\t\treturn nil, fmt.Errorf(\"failed to delete NodeResourceTopology CRD: %w\", err)\n\t}\n\n\t// It takes time for the delete operation, wait until the CRD completely gone\n\tif err = wait.PollUntilContextTimeout(ctx, 5*time.Second, 1*time.Minute, true, func(ctx context.Context) (bool, error) {\n\t\t_, err = extClient.ApiextensionsV1().CustomResourceDefinitions().Get(ctx, crd.Name, metav1.GetOptions{})\n\t\tif err == nil {\n\t\t\treturn false, nil\n\t\t}\n\n\t\tif errors.IsNotFound(err) {\n\t\t\treturn true, nil\n\t\t}\n\t\treturn false, err\n\t}); err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to get NodeResourceTopology CRD: %w\", err)\n\t}\n\treturn extClient.ApiextensionsV1().CustomResourceDefinitions().Create(ctx, crd, metav1.CreateOptions{})\n}", "func (s *service) checkIfNodeIsConnected(ctx context.Context, arrayID string, nodeID string, rep *podmon.ValidateVolumeHostConnectivityResponse) error {\n\tctx, log, _ := GetRunidLog(ctx)\n\tlog.Infof(\"Checking if array %s is connected to node %s\", arrayID, nodeID)\n\tvar message string\n\trep.Connected = false\n\n\t// Initialize the Unity client to the 'arrayId' array\n\tctx, _ = setArrayIDContext(ctx, arrayID)\n\tunity, err := GetUnityClient(ctx, s, arrayID)\n\tif err != nil {\n\t\tmessage = fmt.Sprintf(\"Unable to get unity client for topology validation: %v\", err)\n\t\tlog.Info(message)\n\t\trep.Messages = append(rep.Messages, message)\n\t\treturn err\n\t}\n\n\t// Look up the 'nodeId' host on the array\n\thostnames := strings.Split(nodeID, \",\")\n\tshortName := hostnames[0]\n\tlongName := shortName\n\tif len(hostnames) > 1 {\n\t\tlongName = hostnames[1]\n\t}\n\thost, err := GetHostID(ctx, s, arrayID, shortName, longName)\n\tif err != nil {\n\t\tif status.Code(err) == codes.NotFound {\n\t\t\tmessage = fmt.Sprintf(\"Array %s does have any host with name '%s'\", arrayID, nodeID)\n\t\t} else {\n\t\t\tmessage = fmt.Sprintf(\"Host lookup failed. Error: %v\", err)\n\t\t}\n\t\tlog.Infof(message)\n\t\trep.Messages = append(rep.Messages, message)\n\t\trep.Connected = false\n\t\treturn nil\n\t}\n\n\t// Search in the list of FC initiators (if any)\n\tfcConnectivity := false\n\tif host != nil && len(host.HostContent.FcInitiators) != 0 {\n\t\tlog.Infof(\"Got FC Initiators, Checking health of initiators:%s\", host.HostContent.FcInitiators)\n\t\tfor _, initiator := range host.HostContent.FcInitiators {\n\t\t\tinitiatorID := initiator.ID\n\t\t\thostInitiator, err := FindHostInitiatorByID(ctx, unity, initiatorID)\n\t\t\tif err != nil {\n\t\t\t\tlog.Infof(\"Unable to get initiators: %s\", err)\n\t\t\t}\n\t\t\tif hostInitiator != nil {\n\t\t\t\thealthContent := hostInitiator.HostInitiatorContent.Health\n\t\t\t\tif healthContent.DescriptionIDs[0] == componentOkMessage {\n\t\t\t\t\tmessage = fmt.Sprintf(\"FC Health is good for array:%s, Health:%s\", arrayID, healthContent.DescriptionIDs[0])\n\t\t\t\t\tlog.Infof(message)\n\t\t\t\t\trep.Messages = append(rep.Messages, message)\n\t\t\t\t\trep.Connected = true\n\t\t\t\t\tfcConnectivity = true\n\t\t\t\t\tbreak\n\t\t\t\t} else {\n\t\t\t\t\tlog.Infof(\"FC Health is bad for array:%s, Health:%s\", arrayID, healthContent.DescriptionIDs[0])\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Search in the list of iSCSI initiators (if any) and there is no connectivity seen through FC\n\tif host != nil && len(host.HostContent.IscsiInitiators) != 0 && !fcConnectivity {\n\t\tlog.Infof(\"Got iSCSI Initiators, Checking health of initiators:%s\", host.HostContent.IscsiInitiators)\n\t\tfor _, initiator := range host.HostContent.IscsiInitiators {\n\t\t\tinitiatorID := initiator.ID\n\t\t\thostInitiator, err := FindHostInitiatorByID(ctx, unity, initiatorID)\n\t\t\tif err != nil {\n\t\t\t\tlog.Infof(\"Unable to get initiators: %s\", err)\n\t\t\t}\n\t\t\tif hostInitiator != nil {\n\t\t\t\thealthContent := hostInitiator.HostInitiatorContent.Health\n\t\t\t\tif healthContent.DescriptionIDs[0] == componentOkMessage {\n\t\t\t\t\tmessage = fmt.Sprintf(\"iSCSI Health is good for array:%s, Health:%s\", arrayID, healthContent.DescriptionIDs[0])\n\t\t\t\t\tlog.Infof(message)\n\t\t\t\t\trep.Messages = append(rep.Messages, message)\n\t\t\t\t\trep.Connected = true\n\t\t\t\t\tbreak\n\t\t\t\t} else {\n\t\t\t\t\tlog.Infof(\"iSCSI Health is bad for array:%s, Health:%s\", arrayID, healthContent.DescriptionIDs[0])\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (sc *StartupConfig) Validate() (bool, error) {\n\tif sc.Meta == nil {\n\t\treturn false, fmt.Errorf(\"meta object is nil\")\n\t}\n\n\tif sc.Server == nil {\n\t\t//return false, fmt.Errorf(\"Server is nil\")\n\t}\n\n\tif sc.Database == nil {\n\t\treturn false, fmt.Errorf(\"database object is nil\")\n\t}\n\n\tif sc.Session == nil {\n\t\treturn false, fmt.Errorf(\"session object is nil\")\n\t}\n\n\tif sc.Crypto == nil {\n\t\treturn false, fmt.Errorf(\"crypto object is nil\")\n\t}\n\n\tif sc.Secrets == nil {\n\t\treturn false, fmt.Errorf(\"secrets object is nil\")\n\t}\n\n\tif sc.Capsul == nil {\n\t\treturn false, fmt.Errorf(\"capsul object is nil\")\n\t}\n\n\tif sc.CustomCapsul == nil {\n\t\treturn false, fmt.Errorf(\"custom capsul object is nil\")\n\t}\n\n\tif sc.Create == nil {\n\t\treturn false, fmt.Errorf(\"create object is nil\")\n\t}\n\n\treturn true, nil\n}", "func (s *OpenconfigOfficeAp_System_TelnetServer) Validate() error {\n\tif err := ytypes.Validate(SchemaTree[\"OpenconfigOfficeAp_System_TelnetServer\"], s); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func IsComponentEnabledInSpec(componentName ComponentName, controlPlaneSpec *v1alpha1.IstioOperatorSpec) (bool, error) {\n\tif componentName == IngressComponentName {\n\t\treturn len(controlPlaneSpec.Components.IngressGateways) != 0, nil\n\t}\n\tif componentName == EgressComponentName {\n\t\treturn len(controlPlaneSpec.Components.EgressGateways) != 0, nil\n\t}\n\n\tcomponentNodeI, found, err := tpath.GetFromStructPath(controlPlaneSpec, \"Components.\"+string(componentName)+\".Enabled\")\n\tif err != nil {\n\t\treturn false, fmt.Errorf(\"error in IsComponentEnabledInSpec GetFromStructPath componentEnabled for component=%s: %s\",\n\t\t\tcomponentName, err)\n\t}\n\tif !found || componentNodeI == nil {\n\t\treturn false, nil\n\t}\n\tcomponentNode, ok := componentNodeI.(*v1alpha1.BoolValueForPB)\n\tif !ok {\n\t\treturn false, fmt.Errorf(\"component %s enabled has bad type %T, expect *v1alpha2.BoolValueForPB\", componentName, componentNodeI)\n\t}\n\tif componentNode == nil {\n\t\treturn false, nil\n\t}\n\treturn componentNode.Value, nil\n}", "func (t *OnfSwitch_Switch_State) ΛValidate(opts ...ygot.ValidationOption) error {\n\tif err := ytypes.Validate(SchemaTree[\"OnfSwitch_Switch_State\"], t, opts...); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (m *Maps) ValidCoordinate(c types.Coordinate) bool {\n\treturn c.X < width && c.X >= 0 && c.Y < height && c.Y >= 0\n}", "func IsNodeSchedulable(node *v1.Node) bool {\n\tif node == nil {\n\t\treturn false\n\t}\n\treturn !node.Spec.Unschedulable && IsNodeReady(node)\n}", "func (b *PodAffinityTermApplyConfiguration) WithTopologyKey(value string) *PodAffinityTermApplyConfiguration {\n\tb.TopologyKey = &value\n\treturn b\n}", "func (h *FriendlyHost) Valid() bool {\n\treturn svchost.IsValid(h.Raw)\n}", "func (conf *Config) IsSwarmCompatible() error {\n\tif conf.ClusterStore != \"\" || conf.ClusterAdvertise != \"\" {\n\t\treturn fmt.Errorf(\"--cluster-store and --cluster-advertise daemon configurations are incompatible with swarm mode\")\n\t}\n\tif conf.LiveRestoreEnabled {\n\t\treturn fmt.Errorf(\"--live-restore daemon configuration is incompatible with swarm mode\")\n\t}\n\treturn nil\n}", "func (o DrillSpecPodConfigPodSchedulingAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermOutput) TopologyKey() pulumi.StringOutput {\n\treturn o.ApplyT(func(v DrillSpecPodConfigPodSchedulingAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm) string {\n\t\treturn v.TopologyKey\n\t}).(pulumi.StringOutput)\n}", "func (l *LoginReqModel) IsOTTValid() error {\n\t// 1. Checking if it is even a number\n\t_, toIntErr := strconv.Atoi(l.OTT)\n\tif toIntErr != nil {\n\t\treturn errors.New(\"Cannot convert OTT to number\")\n\t}\n\t// 2. Cheking hour range\n\thourStr := l.OTT[0:2]\n\thourNum, _ := strconv.Atoi(hourStr)\n\tif hourNum < 0 || hourNum > 23 {\n\t\treturn errors.New(\"OTT hour not in range\")\n\t}\n\t// 3. Cheking minute range\n\tminuteStr := l.OTT[2:4]\n\tminuteNum, _ := strconv.Atoi(minuteStr)\n\tif hourNum < 0 || hourNum > 59 {\n\t\treturn errors.New(\"OTT minute not in range\")\n\t}\n\tl.Hour = uint8(hourNum)\n\tl.Minute = uint8(minuteNum)\n\treturn nil\n}", "func isNodeCapable(pod *v1.Pod, node v1.Node) (bool, []string, error) {\n\tfits := true\n\tfailReasons := []string{}\n\tfor _, predicateKey := range predicatesEvalOrderStore {\n\t\tfit, failures, err := predicatesFuncMap[predicateKey](pod, node)\n\t\tif err != nil {\n\t\t\treturn false, nil, err\n\t\t}\n\t\tfits = fits && fit\n\t\tfailReasons = append(failReasons, failures...)\n\t}\n\treturn fits, failReasons, nil\n}", "func (n *Node) Ready() bool {\n\t// Nodes that are not leaders will not have WireGuardIPs, so it is not required.\n\treturn n != nil && n.Endpoint != nil && !(n.Endpoint.IP == nil && n.Endpoint.DNS == \"\") && n.Endpoint.Port != 0 && n.Key != nil && n.InternalIP != nil && n.Subnet != nil && time.Now().Unix()-n.LastSeen < int64(resyncPeriod)*2/int64(time.Second)\n}", "func (d *portworx) IsNodeOutOfMaintenance(n node.Node) (bool, error) {\n\tstNode, err := d.GetDriverNode(&n)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tif stNode.Status == api.Status_STATUS_OK {\n\t\treturn true, nil\n\t}\n\n\treturn false, nil\n}", "func (o DrillSpecPodConfigPodSchedulingAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionOutput) TopologyKey() pulumi.StringOutput {\n\treturn o.ApplyT(func(v DrillSpecPodConfigPodSchedulingAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecution) string {\n\t\treturn v.TopologyKey\n\t}).(pulumi.StringOutput)\n}", "func (t *Tensor) IsValid() bool {\n\tif DimProduct(t.Dims) != int64(len(t.Buffer)/int(DtypeSize[t.Dtype])) {\n\t\treturn false\n\t}\n\treturn true\n}", "func (t *OpenconfigSystem_System_TelnetServer) Validate(opts ...ygot.ValidationOption) error {\n\tif err := ytypes.Validate(SchemaTree[\"OpenconfigSystem_System_TelnetServer\"], t, opts...); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}" ]
[ "0.61786824", "0.5564089", "0.5552909", "0.5381619", "0.50309676", "0.50297743", "0.5022979", "0.49563727", "0.49521244", "0.48063827", "0.47571453", "0.47314247", "0.4716992", "0.47108042", "0.4706679", "0.4685612", "0.4675897", "0.4675828", "0.46655697", "0.46429", "0.46191123", "0.4601381", "0.45890528", "0.45577574", "0.45546725", "0.44967118", "0.44962296", "0.44797787", "0.44695377", "0.44286942", "0.44273803", "0.442079", "0.44164336", "0.44164336", "0.43845984", "0.43741018", "0.43331048", "0.43275872", "0.43178424", "0.4317397", "0.4299714", "0.42965138", "0.4289519", "0.42862502", "0.42796874", "0.42642862", "0.42612374", "0.42604747", "0.42562532", "0.42334598", "0.42268735", "0.42250773", "0.4221504", "0.42167228", "0.42129126", "0.42072064", "0.4204843", "0.4198752", "0.41889718", "0.41889256", "0.4186641", "0.4181476", "0.4178169", "0.41747123", "0.41737473", "0.41733915", "0.41703606", "0.41622043", "0.41614306", "0.41595975", "0.41555503", "0.41547754", "0.4152608", "0.4146865", "0.4145915", "0.41440204", "0.41424534", "0.414032", "0.41228187", "0.4119236", "0.41184258", "0.41172996", "0.41127288", "0.41125068", "0.4112484", "0.4111", "0.41096413", "0.41057065", "0.41055194", "0.41045254", "0.4103285", "0.40980384", "0.40961263", "0.409173", "0.40894514", "0.40838", "0.40832978", "0.40806642", "0.4072926", "0.40712407" ]
0.8106002
0
ParseSoftwareAttribute parses the bytes into a SoftwareAttribute instance.
func ParseSoftwareAttribute(r *read.BigEndian, l uint16) (SoftwareAttribute, error) { sw, err := Read127CharString(r, l) return SoftwareAttribute{sw}, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func ParseAttribute(b []byte) (Attribute, error) {\n\tif len(b) < 22 {\n\t\treturn Attribute{}, fmt.Errorf(\"attribute data should be at least 22 bytes but is %d\", len(b))\n\t}\n\n\tr := binutil.NewLittleEndianReader(b)\n\n\tnameLength := r.Byte(0x09)\n\tnameOffset := r.Uint16(0x0A)\n\n\tname := \"\"\n\tif nameLength != 0 {\n\t\tnameBytes := r.Read(int(nameOffset), int(nameLength)*2)\n\t\tname = utf16.DecodeString(nameBytes, binary.LittleEndian)\n\t}\n\n\tresident := r.Byte(0x08) == 0x00\n\tvar attributeData []byte\n\tactualSize := uint64(0)\n\tallocatedSize := uint64(0)\n\tif resident {\n\t\tdataOffset := int(r.Uint16(0x14))\n\t\tuDataLength := r.Uint32(0x10)\n\t\tif int64(uDataLength) > maxInt {\n\t\t\treturn Attribute{}, fmt.Errorf(\"attribute data length %d overflows maximum int value %d\", uDataLength, maxInt)\n\t\t}\n\t\tdataLength := int(uDataLength)\n\t\texpectedDataLength := dataOffset + dataLength\n\n\t\tif len(b) < expectedDataLength {\n\t\t\treturn Attribute{}, fmt.Errorf(\"expected attribute data length to be at least %d but is %d\", expectedDataLength, len(b))\n\t\t}\n\n\t\tattributeData = r.Read(dataOffset, dataLength)\n\t} else {\n\t\tdataOffset := int(r.Uint16(0x20))\n\t\tif len(b) < dataOffset {\n\t\t\treturn Attribute{}, fmt.Errorf(\"expected attribute data length to be at least %d but is %d\", dataOffset, len(b))\n\t\t}\n\t\tallocatedSize = r.Uint64(0x28)\n\t\tactualSize = r.Uint64(0x30)\n\t\tattributeData = r.ReadFrom(int(dataOffset))\n\t}\n\n\treturn Attribute{\n\t\tType: AttributeType(r.Uint32(0)),\n\t\tResident: resident,\n\t\tName: name,\n\t\tFlags: AttributeFlags(r.Uint16(0x0C)),\n\t\tAttributeId: int(r.Uint16(0x0E)),\n\t\tAllocatedSize: allocatedSize,\n\t\tActualSize: actualSize,\n\t\tData: binutil.Duplicate(attributeData),\n\t}, nil\n}", "func parseAttribute(line string) Attribute {\n\tif !strings.HasPrefix(line, AttributeCommentPrefix) {\n\t\treturn nil\n\t}\n\tline = strings.TrimSpace(line[len(AttributeCommentPrefix):])\n\tcolon := strings.IndexRune(line, ':')\n\tvar key, value string\n\tif colon == -1 {\n\t\tkey = line\n\t} else {\n\t\tkey, value = line[:colon], line[colon+1:]\n\t}\n\tswitch key {\n\tcase \"linkage\":\n\t\treturn parseLinkageAttribute(value)\n\tcase \"name\":\n\t\treturn nameAttribute(strings.TrimSpace(value))\n\tcase \"thread_local\":\n\t\treturn tlsAttribute{}\n\tdefault:\n\t\t// FIXME decide what to do here. return error? log warning?\n\t\tpanic(\"unknown attribute key: \" + key)\n\t}\n\treturn nil\n}", "func (c *StickersCreateStickerSetRequest) SetSoftware(value string) {\n\tc.Flags.Set(3)\n\tc.Software = value\n}", "func ParseAttributes(b []byte) ([]Attribute, error) {\n\tif len(b) == 0 {\n\t\treturn []Attribute{}, nil\n\t}\n\tattributes := make([]Attribute, 0)\n\tfor len(b) > 0 {\n\t\tif len(b) < 4 {\n\t\t\treturn nil, fmt.Errorf(\"attribute header data should be at least 4 bytes but is %d\", len(b))\n\t\t}\n\n\t\tr := binutil.NewLittleEndianReader(b)\n\t\tattrType := r.Uint32(0)\n\t\tif attrType == uint32(AttributeTypeTerminator) {\n\t\t\tbreak\n\t\t}\n\n\t\tif len(b) < 8 {\n\t\t\treturn nil, fmt.Errorf(\"cannot read attribute header record length, data should be at least 8 bytes but is %d\", len(b))\n\t\t}\n\n\t\tuRecordLength := r.Uint32(0x04)\n\t\tif int64(uRecordLength) > maxInt {\n\t\t\treturn nil, fmt.Errorf(\"record length %d overflows maximum int value %d\", uRecordLength, maxInt)\n\t\t}\n\t\trecordLength := int(uRecordLength)\n\t\tif recordLength <= 0 {\n\t\t\treturn nil, fmt.Errorf(\"cannot handle attribute with zero or negative record length %d\", recordLength)\n\t\t}\n\n\t\tif recordLength > len(b) {\n\t\t\treturn nil, fmt.Errorf(\"attribute record length %d exceeds data length %d\", recordLength, len(b))\n\t\t}\n\n\t\trecordData := r.Read(0, recordLength)\n\t\tattribute, err := ParseAttribute(recordData)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tattributes = append(attributes, attribute)\n\t\tb = r.ReadFrom(recordLength)\n\t}\n\treturn attributes, nil\n}", "func parseFamily(b []byte) (Family, error) {\n\tad, err := netlink.NewAttributeDecoder(b)\n\tif err != nil {\n\t\treturn Family{}, err\n\t}\n\n\tvar f Family\n\tfor ad.Next() {\n\t\tswitch ad.Type() {\n\t\tcase unix.CTRL_ATTR_FAMILY_ID:\n\t\t\tf.ID = ad.Uint16()\n\t\tcase unix.CTRL_ATTR_FAMILY_NAME:\n\t\t\tf.Name = ad.String()\n\t\tcase unix.CTRL_ATTR_VERSION:\n\t\t\tv := ad.Uint32()\n\t\t\tif v > math.MaxUint8 {\n\t\t\t\treturn Family{}, errInvalidFamilyVersion\n\t\t\t}\n\n\t\t\tf.Version = uint8(v)\n\t\tcase unix.CTRL_ATTR_MCAST_GROUPS:\n\t\t\tad.Nested(parseMulticastGroups(&f.Groups))\n\t\t}\n\t}\n\n\tif err := ad.Err(); err != nil {\n\t\treturn Family{}, err\n\t}\n\n\treturn f, nil\n}", "func (setattr *FuseSetattrIn) ParseBinary(bcontent []byte) error {\n\terr := common.ParseBinary(bcontent, setattr)\n\n\treturn err\n}", "func parseStationInfo(b []byte) (*StationInfo, error) {\n\tattrs, err := netlink.UnmarshalAttributes(b)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar info StationInfo\n\tfor _, a := range attrs {\n\t\tswitch a.Type {\n\t\tcase unix.NL80211_ATTR_MAC:\n\t\t\tinfo.HardwareAddr = net.HardwareAddr(a.Data)\n\t\tcase unix.NL80211_ATTR_STA_INFO:\n\t\t\tnattrs, err := netlink.UnmarshalAttributes(a.Data)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tif err := (&info).parseAttributes(nattrs); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\t// Parsed the necessary data.\n\t\t\treturn &info, nil\n\t\t}\n\t}\n\n\t// No station info found\n\treturn nil, os.ErrNotExist\n}", "func parseSoftIRQStat(line string) (SoftIRQStat, uint64, error) {\n\tsoftIRQStat := SoftIRQStat{}\n\tvar total uint64\n\tvar prefix string\n\n\t_, err := fmt.Sscanf(line, \"%s %d %d %d %d %d %d %d %d %d %d %d\",\n\t\t&prefix, &total,\n\t\t&softIRQStat.Hi, &softIRQStat.Timer, &softIRQStat.NetTx, &softIRQStat.NetRx,\n\t\t&softIRQStat.Block, &softIRQStat.BlockIoPoll,\n\t\t&softIRQStat.Tasklet, &softIRQStat.Sched,\n\t\t&softIRQStat.Hrtimer, &softIRQStat.Rcu)\n\n\tif err != nil {\n\t\treturn SoftIRQStat{}, 0, fmt.Errorf(\"couldn't parse %q (softirq): %w\", line, err)\n\t}\n\n\treturn softIRQStat, total, nil\n}", "func NewSoftware(software string) Software {\n\treturn Software(software)\n}", "func (removexattr *FuseRemovexattrIn) ParseBinary(bcontent []byte) error {\n\n\tlength := len(bcontent)\n\tif length > 0 {\n\t\tremovexattr.Name = string(bcontent[:length-1])\n\t}\n\n\treturn nil\n}", "func (i SNSPlatformApplicationAttribute) ParseByName(s string) (SNSPlatformApplicationAttribute, error) {\n\tif val, ok := _SNSPlatformApplicationAttributeNameToValueMap[s]; ok {\n\t\t// parse ok\n\t\treturn val, nil\n\t}\n\n\t// error\n\treturn -1, fmt.Errorf(\"Enum Name of %s Not Expected In SNSPlatformApplicationAttribute Values List\", s)\n}", "func DecodeAttribute(b []byte) (*Attribute, []byte, error) {\n\tif len(b) < SizeofRtAttr {\n\t\treturn nil, nil, netlink.ErrNoData\n\t}\n\n\tlength := *(*uint16)(unsafe.Pointer(&b[0:2][0]))\n\tif uint16(len(b)) < length ||\n\t\tlength < SizeofRtAttr {\n\t\treturn nil, b, netlink.ErrNoData\n\t}\n\n\ta := &Attribute{}\n\ta.Type = *(*uint16)(unsafe.Pointer(&b[2:4][0]))\n\tdata_len := int(length) - RTA_STRUCT_ALEN\n\ta.data = make([]byte, data_len)\n\tcopy(a.data, b[RTA_STRUCT_ALEN:length])\n\n\tr := netlink.Align(int(length), RTA_ALIGNTO)\n\treturn a, b[r:], nil\n}", "func ParseAttribute(v []byte, c *Candidate) error {\n\tp := candidateParser{\n\t\tbuf: v,\n\t\tc: c,\n\t}\n\terr := p.parse()\n\treturn err\n}", "func (attr *Attribute) UnmarshalBinary(data []byte) error {\n\t// Read the common portion of the attribute record header\n\tif err := attr.Header.UnmarshalBinary(data); err != nil {\n\t\treturn err\n\t}\n\n\t// Sanity check the record length\n\tif int(attr.Header.RecordLength) > len(data) {\n\t\treturn ErrTruncatedData\n\t}\n\n\t// For the sake of simple bounds checking below, restrict our working\n\t// data set to this record\n\tif len(data) > int(attr.Header.RecordLength) {\n\t\tdata = data[:attr.Header.RecordLength]\n\t}\n\n\t// Read the form-specific portion of the attribute record header\n\tformHeader := data[AttributeRecordHeaderLength:]\n\tif attr.Header.Resident() {\n\t\tif err := attr.Resident.UnmarshalBinary(formHeader); err != nil {\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\tif err := attr.Nonresident.UnmarshalBinary(formHeader); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// Read the attribute name if it has one\n\tif attr.Header.NameLength > 0 {\n\t\tstart := int(attr.Header.NameOffset)\n\t\tlength := int(attr.Header.NameLength) * 2 // NameLength is in characters, assuming 16-bit unicode\n\t\tend := start + length\n\t\tif end > len(data) {\n\t\t\treturn ErrAttributeNameOutOfBounds\n\t\t}\n\t\tvar err error\n\t\tattr.Name, err = utf16ToString(data[start:end])\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// Read the attribute value if it's resident and nonzero\n\tif attr.Header.Resident() && attr.Resident.ValueLength > 0 {\n\t\tstart := int(attr.Resident.ValueOffset)\n\t\tlength := int(attr.Resident.ValueLength)\n\t\tend := start + length\n\t\tif end > len(data) {\n\t\t\treturn ErrAttributeValueOutOfBounds\n\t\t}\n\t\tattr.ResidentValue = make([]byte, length)\n\t\tcopy(attr.ResidentValue, data[start:end])\n\t}\n\n\treturn nil\n}", "func (getattr *FuseGetattrIn) ParseBinary(bcontent []byte) error {\n\terr := common.ParseBinary(bcontent, getattr)\n\n\treturn err\n}", "func (setxattr *FuseSetxattrIn) ParseBinary(bcontent []byte) error {\n\n\tlength := len(bcontent)\n\n\tif length < 8 {\n\t\treturn ErrDataLen\n\t}\n\n\tcommon.ParseBinary(bcontent[:4], &setxattr.Size)\n\tcommon.ParseBinary(bcontent[4:8], &setxattr.Flags)\n\n\tarray := bytes.Split(bcontent[8:], []byte{0})\n\n\tif len(array) < 2 {\n\t\treturn ErrDataLen\n\t}\n\n\tsetxattr.Name = string(array[0])\n\tsetxattr.Value = string(array[1])\n\n\treturn nil\n}", "func (parser *Parser) parseAttribute() *Attribute {\n\tattrNSIdx := parser.getLEWord(parser.ParserOffset)\n\tattrNameIdx := parser.getLEWord(parser.ParserOffset + (1 * WORD_SIZE))\n\tattrValueIdx := parser.getLEWord(parser.ParserOffset + (2 * WORD_SIZE))\n\tattrType := parser.getLEWord(parser.ParserOffset + (3 * WORD_SIZE))\n\tattrData := parser.getLEWord(parser.ParserOffset + (4 * WORD_SIZE))\n\n\tattr := new(Attribute)\n\tattr.Name = parser.getString(attrNameIdx)\n\n\tif uint32(attrNSIdx) == 0xFFFFFFFF {\n\t\tattr.Namespace = \"\"\n\t\tattr.Prefix = \"\"\n\t} else {\n\t\turi := parser.getString(attrNSIdx)\n\t\tif v, ok := parser.Namespaces[uri]; ok {\n\t\t\tattr.Namespace = uri\n\t\t\tattr.Prefix = v\n\t\t}\n\t}\n\n\tif uint32(attrValueIdx) == 0xFFFFFFFF {\n\t\tattr.Value = parser.getAttributeValue(attrType, attrData)\n\t} else {\n\t\tattr.Value = parser.getString(attrValueIdx)\n\t}\n\n\treturn attr\n}", "func ManifestUnpack([]byte) Manifest { panic(\"\") }", "func ParseGetaspecificPbxDeviceFirmwareResponse(rsp *http.Response) (*GetaspecificPbxDeviceFirmwareResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &GetaspecificPbxDeviceFirmwareResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest PbxDeviceFirmwares\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (p *Attribute) UnmarshalBinary(data []byte) error {\n\t// good old [length|field] encoding. length is an uint64\n\tif data[0] != MagicByte {\n\t\treturn errors.New(\"missing magic byte\")\n\t}\n\tif 8 >= (uint64)(len(data)) {\n\t\treturn errors.New(\"invalid length data\")\n\t}\n\tvar index uint64 = 1\n\tlengthName := binary.BigEndian.Uint64(data[index : index+8])\n\tindex += 8\n\tif index+lengthName >= (uint64)(len(data)) {\n\t\treturn errors.New(\"invalid name data\")\n\t}\n\tp.Name = string(data[index : index+lengthName])\n\tindex += lengthName\n\n\tlengthTypename := binary.BigEndian.Uint64(data[index : index+8])\n\tindex += 8\n\tif index+lengthTypename >= (uint64)(len(data)) {\n\t\treturn errors.New(\"invalid typename data\")\n\t}\n\tp.Typename = string(data[index : index+lengthTypename])\n\tindex += lengthTypename\n\n\tlengthValue := binary.BigEndian.Uint64(data[index : index+8])\n\tindex += 8\n\tif index+lengthValue > (uint64)(len(data)) {\n\t\treturn errors.New(\"invalid value data\")\n\t}\n\tp.Value = data[index : index+lengthValue]\n\treturn nil\n}", "func (release *FuseReleaseIn) ParseBinary(bcontent []byte) error {\n\n\tlength := len(bcontent)\n\n\tif length < 24 {\n\t\treturn ErrDataLen\n\t}\n\n\terr := common.ParseBinary(bcontent, release)\n\n\treturn err\n}", "func (_Distributor *DistributorFilterer) ParseBountyWasPaid(log types.Log) (*DistributorBountyWasPaid, error) {\n\tevent := new(DistributorBountyWasPaid)\n\tif err := _Distributor.contract.UnpackLog(event, \"BountyWasPaid\", log); err != nil {\n\t\treturn nil, err\n\t}\n\treturn event, nil\n}", "func parseMAC(s string) net.HardwareAddr {\n\tha, err := net.ParseMAC(s)\n\tpanicIfError(err)\n\treturn ha\n}", "func (s *Software) GetFrom(m *Message) error {\n\treturn (*TextAttribute)(s).GetFromAs(m, AttrSoftware)\n}", "func ParseMAC(s string) (m MAC, err error) {\n\tif len(s) == 12 {\n\t\ts = fmt.Sprintf(\"%s.%s.%s\", s[0:4], s[4:8], s[8:12])\n\t}\n\thw, err := net.ParseMAC(s)\n\tif len(hw) == 8 {\n\t\treturn nil, &net.AddrError{Err: \"EUI-64 not suported\", Addr: s}\n\t}\n\treturn MAC(hw), err\n}", "func (create *FuseCreateIn) ParseBinary(bcontent []byte) error {\n\n\tlength := len(bcontent)\n\n\tif length < 16 {\n\t\treturn ErrDataLen\n\t}\n\n\tcommon.ParseBinary(bcontent[:4], &create.Flags)\n\tcommon.ParseBinary(bcontent[4:8], &create.Mode)\n\tcommon.ParseBinary(bcontent[8:12], &create.Umask)\n\tcommon.ParseBinary(bcontent[12:16], &create.Padding)\n\n\t// length-1 是为了避开最后一个'\\0'字符\n\tcreate.Name = string(bcontent[16 : length-1])\n\n\treturn nil\n}", "func (i SNSSubscribeAttribute) ParseByName(s string) (SNSSubscribeAttribute, error) {\n\tif val, ok := _SNSSubscribeAttributeNameToValueMap[s]; ok {\n\t\t// parse ok\n\t\treturn val, nil\n\t}\n\n\t// error\n\treturn -1, fmt.Errorf(\"Enum Name of %s Not Expected In SNSSubscribeAttribute Values List\", s)\n}", "func ParseLine(line string) (Version, error) {\n\t// The line returned by go version for stable releases is:\n\t// \"go version go<version> <os>/<arch>\"\n\t// For unstable releases it is:\n\t// \"go version devel go<version> <timestamp> <os>/<arch>\"\n\tfields := strings.Fields(line)\n\tversion := fields[2] // field after \"go version\"\n\tif version == \"devel\" {\n\t\tversion = fields[3] // field after \"go version devel\"\n\t}\n\n\treturn Parse(version)\n}", "func ParseModemFirmwareManifest(ctx context.Context) (*mfwd.FirmwareManifestV2, error) {\n\tctx, st := timing.Start(ctx, \"ParseModemFirmwareManifest\")\n\tdefer st.End()\n\n\tmodemFirmwareProtoPath := GetModemFirmwareManifestPath()\n\toutput, err := testexec.CommandContext(ctx, \"cat\", modemFirmwareProtoPath).Output()\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"failed to access the firmware manifest: %s\", modemFirmwareProtoPath)\n\t}\n\n\ttesting.ContextLog(ctx, \"Parsing modem firmware proto\")\n\tmanifest := &mfwd.FirmwareManifestV2{}\n\tif err := proto.UnmarshalText(string(output), manifest); err != nil {\n\t\treturn nil, errors.Wrapf(err, \"failed to parse firmware manifest: %s\", modemFirmwareProtoPath)\n\t}\n\ttesting.ContextLog(ctx, \"Parsed successfully\")\n\n\treturn manifest, nil\n}", "func readAttributeSelection(bytes *Bytes) (attributeSelection AttributeSelection, err error) {\n\terr = bytes.ReadSubBytes(classUniversal, tagSequence, attributeSelection.readComponents)\n\tif err != nil {\n\t\terr = LdapError{fmt.Sprintf(\"readAttributeSelection:\\n%s\", err.Error())}\n\t\treturn\n\t}\n\treturn\n}", "func (i SNSPlatformApplicationAttribute) ParseByKey(s string) (SNSPlatformApplicationAttribute, error) {\n\tfor k, v := range _SNSPlatformApplicationAttributeValueToKeyMap {\n\t\tif v == s {\n\t\t\t// parse ok\n\t\t\treturn k, nil\n\t\t}\n\t}\n\n\t// error\n\treturn -1, fmt.Errorf(\"Enum Key of %s Not Expected In SNSPlatformApplicationAttribute Keys List\", s)\n}", "func (getxattr *FuseGetxattrIn) ParseBinary(bcontent []byte) error {\n\n\tlength := len(bcontent)\n\n\tif length < 8 {\n\t\treturn ErrDataLen\n\t}\n\n\tcommon.ParseBinary(bcontent[:4], &getxattr.Size)\n\tcommon.ParseBinary(bcontent[4:8], &getxattr.Padding)\n\n\tif length > 8 {\n\t\tgetxattr.Name = string(bcontent[8 : length-1])\n\t}\n\n\treturn nil\n}", "func (os *OS) UnmarshalText(b []byte) error {\n\tsplit := bytes.Split(b, []byte(\":\"))\n\tif len(split) != 2 {\n\t\treturn trace.BadParameter(\"OS should be in format vendor:version, got %q\", b)\n\t}\n\tos.Vendor = string(split[0])\n\tos.Version = string(split[1])\n\treturn nil\n}", "func (o *ConvergedinfraServerComplianceDetailsAllOf) SetFirmware(v string) {\n\to.Firmware = &v\n}", "func StructureDetailsFromBytes(markersBytes []byte) (markersPair *StructureDetails, consumedBytes int, err error) {\n\tmarshalUtil := marshalutil.New(markersBytes)\n\tif markersPair, err = StructureDetailsFromMarshalUtil(marshalUtil); err != nil {\n\t\terr = errors.Errorf(\"failed to parse StructureDetails from MarshalUtil: %w\", err)\n\t\treturn\n\t}\n\tconsumedBytes = marshalUtil.ReadOffset()\n\n\treturn\n}", "func meminfoFromBytes(buf []byte) (meminfomap, error) {\n\tret := make(meminfomap)\n\tfor _, line := range bytes.Split(buf, []byte{'\\n'}) {\n\t\tkv := bytes.SplitN(line, []byte{':'}, 2)\n\t\tif len(kv) != 2 {\n\t\t\t// invalid line?\n\t\t\tcontinue\n\t\t}\n\t\tkey := string(kv[0])\n\t\ttokens := bytes.SplitN(bytes.TrimSpace(kv[1]), []byte{' '}, 2)\n\t\tif len(tokens) > 0 {\n\t\t\tvalue, err := strconv.ParseUint(string(tokens[0]), 10, 64)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tret[key] = value\n\t\t}\n\t}\n\treturn ret, nil\n}", "func HexParse(signature string) (string, error) {\n\tsignature = preprocessHex(signature)\n\t// Validate the hexadecimal\n\t_, err := hex.DecodeString(signature)\n\treturn signature, err\n}", "func (o *PostPartsParams) SetFirmware(firmware *string) {\n\to.Firmware = firmware\n}", "func Parse(arg string) (Attr, error) {\n\targ = strings.TrimSpace(arg)\n\tattr := Attr{\n\t\tOriginal: arg,\n\t\tinputType: \"string\",\n\t}\n\tif len(arg) == 0 {\n\t\treturn attr, errors.New(\"argument can not be blank\")\n\t}\n\n\tparts := strings.Split(arg, \":\")\n\tattr.Name = name.New(parts[0])\n\tif len(parts) > 1 {\n\t\tattr.inputType = parts[1]\n\t}\n\n\tif len(parts) > 2 {\n\t\tattr.goType = parts[2]\n\t}\n\n\treturn attr, nil\n}", "func ParseGetaspecificPbxDeviceFirmwareBinaryResponse(rsp *http.Response) (*GetaspecificPbxDeviceFirmwareBinaryResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &GetaspecificPbxDeviceFirmwareBinaryResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest PbxDeviceFirmwareBinaries\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func Parse(arg string) (Attr, error) {\n\targ = strings.TrimSpace(arg)\n\tattr := Attr{\n\t\tOriginal: arg,\n\t\tcommonType: \"string\",\n\t}\n\tif len(arg) == 0 {\n\t\treturn attr, errors.New(\"argument can not be blank\")\n\t}\n\n\tparts := strings.Split(arg, \":\")\n\tattr.Name = name.New(parts[0])\n\tif len(parts) > 1 {\n\t\tattr.commonType = parts[1]\n\t}\n\n\tif len(parts) > 2 {\n\t\tattr.goType = parts[2]\n\t}\n\n\treturn attr, nil\n}", "func DDBAttributeFromKey(input string) (DDBAttribute, error) {\n\tattrs := regexpMatchAttribute.FindStringSubmatch(input)\n\tif len(attrs) == 0 {\n\t\treturn DDBAttribute{}, fmt.Errorf(\"parse attribute from key: %s\", input)\n\t}\n\tupperString := strings.ToUpper(attrs[2])\n\treturn DDBAttribute{\n\t\tName: &attrs[1],\n\t\tDataType: &upperString,\n\t}, nil\n}", "func (c *Capabilities) FromBytes(buf *bytes.Buffer) error {\n\tdec := gob.NewDecoder(buf)\n\tif err := dec.Decode(&c.Node); err != nil {\n\t\treturn err\n\t}\n\tif err := dec.Decode(&c.Ver); err != nil {\n\t\treturn err\n\t}\n\treturn dec.Decode(&c.Features)\n}", "func FromBytes(bs []byte) (*Event, error) {\n\t// TODO:\n\t// - consider 64 bit operating systems with 24 byte slices.\n\t// - is this the most efficient way to do this? Maybe just decode directly into a struct\n\t// - maybe check for input system requirements (ie, check that it is running on linux)\n\tif len(bs) != 16 {\n\t\treturn nil, ErrDecode{fmt.Sprintf(\"invalid event: should be %d bytes is %d bytes\", 16, len(bs))}\n\t}\n\n\tev := EVCode(binary.LittleEndian.Uint16(bs[8:10]))\n\tif ev >= EV_CNT {\n\t\treturn nil, ErrDecode{fmt.Sprintf(\"invalid event: %v\", ev)}\n\t}\n\n\tsec := binary.LittleEndian.Uint32(bs[0:4])\n\tusec := binary.LittleEndian.Uint32(bs[4:8])\n\n\tc := binary.LittleEndian.Uint16(bs[10:12])\n\n\treturn &Event{\n\t\tTime: time.Unix(int64(sec), int64(usec)),\n\t\tType: ev,\n\t\tCode: evCode(ev, c),\n\t\tValue: int32(binary.LittleEndian.Uint32(bs[12:16])),\n\t}, nil\n}", "func ParseUint16(strval string) (uint16, error) {\n\tval, err := parseUint(strval, 16)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn uint16(val), nil\n}", "func UnSerialiseFromString( packet []byte, parseAll bool ) (*BaseEvent,error) {\n var packetLen uint\n headerTemplate := fmt.Sprintf( \"%s%s:%%d\", FRAME_HEADER, PROTOCOL_VERSION_NUMBER )\n if numParsed,err:=fmt.Sscanf(string(packet), headerTemplate, &packetLen); numParsed != 1 {\n newErr := errors.New( \"UnSerialiseFromString error failed to parse header:\" + err.Error() + \" packet:\"+string(packet) )\n return nil,newErr\n } // if\n\n body := packet[FRAME_HEADER_LEN:]\n event := new( BaseEvent )\n err := event.ParseBody( body, parseAll )\n return event,err\n}", "func ParseProduct(name string) (Product, error) {\n\tif x, ok := _ProductValue[name]; ok {\n\t\treturn x, nil\n\t}\n\treturn Product(0), fmt.Errorf(\"%s is not a valid Product\", name)\n}", "func NewByteAttribute(attrName string) *SDK.AttributeDefinition {\n\treturn NewAttributeDefinition(attrName, \"B\")\n}", "func (fifi *FIAdditionalFIToFI) Parse(record string) error {\n\tif utf8.RuneCountInString(record) != 216 {\n\t\treturn NewTagWrongLengthErr(216, len(record))\n\t}\n\tfifi.tag = record[:6]\n\tfifi.AdditionalFIToFI.LineOne = fifi.parseStringField(record[6:41])\n\tfifi.AdditionalFIToFI.LineTwo = fifi.parseStringField(record[41:76])\n\tfifi.AdditionalFIToFI.LineThree = fifi.parseStringField(record[76:111])\n\tfifi.AdditionalFIToFI.LineFour = fifi.parseStringField(record[111:146])\n\tfifi.AdditionalFIToFI.LineFive = fifi.parseStringField(record[146:181])\n\tfifi.AdditionalFIToFI.LineSix = fifi.parseStringField(record[181:216])\n\treturn nil\n}", "func parseCPUStat(line string) (CPUStat, int64, error) {\n\tcpuStat := CPUStat{}\n\tvar cpu string\n\n\tcount, err := fmt.Sscanf(line, \"%s %f %f %f %f %f %f %f %f %f %f\",\n\t\t&cpu,\n\t\t&cpuStat.User, &cpuStat.Nice, &cpuStat.System, &cpuStat.Idle,\n\t\t&cpuStat.Iowait, &cpuStat.IRQ, &cpuStat.SoftIRQ, &cpuStat.Steal,\n\t\t&cpuStat.Guest, &cpuStat.GuestNice)\n\n\tif err != nil && err != io.EOF {\n\t\treturn CPUStat{}, -1, fmt.Errorf(\"couldn't parse %q (cpu): %w\", line, err)\n\t}\n\tif count == 0 {\n\t\treturn CPUStat{}, -1, fmt.Errorf(\"couldn't parse %q (cpu): 0 elements parsed\", line)\n\t}\n\n\tcpuStat.User /= userHZ\n\tcpuStat.Nice /= userHZ\n\tcpuStat.System /= userHZ\n\tcpuStat.Idle /= userHZ\n\tcpuStat.Iowait /= userHZ\n\tcpuStat.IRQ /= userHZ\n\tcpuStat.SoftIRQ /= userHZ\n\tcpuStat.Steal /= userHZ\n\tcpuStat.Guest /= userHZ\n\tcpuStat.GuestNice /= userHZ\n\n\tif cpu == \"cpu\" {\n\t\treturn cpuStat, -1, nil\n\t}\n\n\tcpuID, err := strconv.ParseInt(cpu[3:], 10, 64)\n\tif err != nil {\n\t\treturn CPUStat{}, -1, fmt.Errorf(\"couldn't parse %q (cpu/cpuid): %w\", line, err)\n\t}\n\n\treturn cpuStat, cpuID, nil\n}", "func (o *NetworkElementSummaryAllOf) SetFirmware(v string) {\n\to.Firmware = &v\n}", "func (b *BSS) parseAttributes(attrs []netlink.Attribute) error {\n\tfor _, a := range attrs {\n\t\tswitch a.Type {\n\t\tcase unix.NL80211_BSS_BSSID:\n\t\t\tb.BSSID = net.HardwareAddr(a.Data)\n\t\tcase unix.NL80211_BSS_FREQUENCY:\n\t\t\tb.Frequency = int(nlenc.Uint32(a.Data))\n\t\tcase unix.NL80211_BSS_BEACON_INTERVAL:\n\t\t\t// Raw value is in \"Time Units (TU)\". See:\n\t\t\t// https://en.wikipedia.org/wiki/Beacon_frame\n\t\t\tb.BeaconInterval = time.Duration(nlenc.Uint16(a.Data)) * 1024 * time.Microsecond\n\t\tcase unix.NL80211_BSS_SEEN_MS_AGO:\n\t\t\t// * @NL80211_BSS_SEEN_MS_AGO: age of this BSS entry in ms\n\t\t\tb.LastSeen = time.Duration(nlenc.Uint32(a.Data)) * time.Millisecond\n\t\tcase unix.NL80211_BSS_STATUS:\n\t\t\t// NOTE: BSSStatus copies the ordering of nl80211's BSS status\n\t\t\t// constants. This may not be the case on other operating systems.\n\t\t\tb.Status = BSSStatus(nlenc.Uint32(a.Data))\n\t\tcase unix.NL80211_BSS_INFORMATION_ELEMENTS:\n\t\t\ties, err := parseIEs(a.Data)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t// TODO(mdlayher): return more IEs if they end up being generally useful\n\t\t\tfor _, ie := range ies {\n\t\t\t\tswitch ie.ID {\n\t\t\t\tcase ieSSID:\n\t\t\t\t\tb.SSID = decodeSSID(ie.Data)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func ParseMachineSpec(file string) (*MachineSpec, error) {\n\tvar p []byte\n\tvar err error\n\tif file == \"-\" {\n\t\tp, err = ioutil.ReadAll(os.Stdin)\n\t} else {\n\t\tp, err = ioutil.ReadFile(file)\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar spec MachineSpec\n\tdec := json.NewDecoder(bytes.NewReader(p))\n\tdec.UseNumber()\n\tif err := dec.Decode(&spec); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &spec, nil\n}", "func (f *FSEIDFields) UnmarshalBinary(b []byte) error {\n\tl := len(b)\n\tif l < 2 {\n\t\treturn io.ErrUnexpectedEOF\n\t}\n\n\tf.Flags = b[0]\n\toffset := 1\n\n\tif f.HasChID() || f.HasCh() {\n\t\tif f.HasIPv4() {\n\t\t\tif l < offset+4 {\n\t\t\t\treturn io.ErrUnexpectedEOF\n\t\t\t}\n\t\t\tf.IPv4Address = net.IP(b[offset : offset+4])\n\t\t\toffset += 4\n\t\t}\n\t\tif f.HasIPv6() {\n\t\t\tif l < offset+16 {\n\t\t\t\treturn io.ErrUnexpectedEOF\n\t\t\t}\n\t\t\tf.IPv6Address = net.IP(b[offset : offset+16])\n\t\t\toffset += 16\n\t\t}\n\n\t\tif l <= offset {\n\t\t\treturn nil\n\t\t}\n\n\t\tf.ChooseID = b[offset:]\n\t\treturn nil\n\t}\n\n\tif l < offset+4 {\n\t\treturn nil\n\t}\n\tf.SEID = binary.BigEndian.Uint64(b[offset : offset+8])\n\toffset += 8\n\n\tif f.HasIPv4() {\n\t\tif l < offset+4 {\n\t\t\treturn io.ErrUnexpectedEOF\n\t\t}\n\t\tf.IPv4Address = net.IP(b[offset : offset+4])\n\t\toffset += 4\n\t}\n\tif f.HasIPv6() {\n\t\tif l < offset+16 {\n\t\t\treturn io.ErrUnexpectedEOF\n\t\t}\n\t\tf.IPv6Address = net.IP(b[offset : offset+16])\n\t}\n\n\treturn nil\n}", "func (r *Runtime) ParseModule(wasmBytes []byte) (*Module, error) {\n\treturn r.cfg.Environment.ParseModule(wasmBytes)\n}", "func (r *CAA) Parse(i string) error {\n\ts := strings.SplitN(i, \" \", 3)\n\tif len(s) != 3 {\n\t\treturn errors.New(\"CAA records need to have the format \\\"Flags Tag Value\\\"\")\n\t}\n\tf, err := strconv.ParseUint(s[0], 10, 8)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif f != 128 {\n\t\treturn errors.New(\"only first bit of flag may be set\")\n\t}\n\tr.Flags = uint8(f)\n\tif len(s[1]) > 255 {\n\t\treturn errors.New(\"tag length may not exceed 255 characters\")\n\t}\n\tr.Tag = s[1]\n\tr.Value = s[2]\n\treturn nil\n}", "func (d *Decoder) parseLine(line []byte) ([]pair, error) {\n\tvar add pair\n\tvar beg, offset int64\n\tvar esc bool\n\n\tstate := make(scanState, 0, 3)\n\tbuf := bytes.NewReader(line)\n\n\tfor r, sz, err := buf.ReadRune(); err == nil; r, sz, err = buf.ReadRune() {\n\t\tif r == 0xFFFD && sz == 1 {\n\t\t\treturn nil, errBadUnicode(line, offset)\n\t\t}\n\t\tswitch state.top() {\n\t\tcase scanNone:\n\t\t\tif unicode.IsSpace(r) {\n\t\t\t\t// skip\n\t\t\t} else if unicode.IsLetter(r) || unicode.IsNumber(r) {\n\t\t\t\tstate.push(scanAttr)\n\t\t\t\tbeg = offset\n\t\t\t} else {\n\t\t\t\treturn nil, errBadAttr(line, offset)\n\t\t\t}\n\t\tcase scanAttr:\n\t\t\tif unicode.IsSpace(r) {\n\t\t\t\tadd.attr = line[beg:offset]\n\t\t\t\td.pairbuf = append(d.pairbuf, add)\n\t\t\t\tif _, ok := d.attrs[string(add.attr)]; ok {\n\t\t\t\t\td.havemulti = true\n\t\t\t\t\td.multi[string(add.attr)] = struct{}{}\n\t\t\t\t} else {\n\t\t\t\t\td.attrs[string(add.attr)] = struct{}{}\n\t\t\t\t}\n\t\t\t\tadd.attr, add.val, esc = nil, nil, false\n\t\t\t\tstate.pop()\n\t\t\t} else if r == '=' {\n\t\t\t\tadd.attr = line[beg:offset]\n\t\t\t\tif _, ok := d.attrs[string(add.attr)]; ok {\n\t\t\t\t\td.havemulti = true\n\t\t\t\t\td.multi[string(add.attr)] = struct{}{}\n\t\t\t\t} else {\n\t\t\t\t\td.attrs[string(add.attr)] = struct{}{}\n\t\t\t\t}\n\t\t\t\tstate.pop()\n\t\t\t\tstate.push(scanValueStart)\n\t\t\t} else if !(r == '-' || unicode.IsLetter(r) || unicode.IsNumber(r)) {\n\t\t\t\treturn nil, errBadAttr(line, offset)\n\t\t\t}\n\t\tcase scanValueStart:\n\t\t\tbeg = offset\n\t\t\tstate.pop()\n\t\t\tstate.push(scanValue)\n\n\t\t\tif r == '\\'' {\n\t\t\t\tstate.push(scanQuoteStart)\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tfallthrough\n\t\tcase scanValue:\n\t\t\tif unicode.IsSpace(r) {\n\t\t\t\tstate.pop()\n\t\t\t\tadd.val = line[beg:offset]\n\t\t\t\tif esc {\n\t\t\t\t\tadd.val = bytes.Replace(add.val, []byte(\"''\"), []byte(\"'\"), -1)\n\t\t\t\t}\n\t\t\t\td.pairbuf = append(d.pairbuf, add)\n\t\t\t\tadd.attr, add.val = nil, nil\n\t\t\t}\n\t\tcase scanQuoteClose:\n\t\t\tstate.pop()\n\t\t\tif r == '\\'' {\n\t\t\t\tesc = true\n\t\t\t\tstate.push(scanQuoteValue)\n\t\t\t} else if unicode.IsSpace(r) {\n\t\t\t\tstate.pop()\n\t\t\t\tadd.val = line[beg : offset-1]\n\t\t\t\tif esc {\n\t\t\t\t\tadd.val = bytes.Replace(add.val, []byte(\"''\"), []byte(\"'\"), -1)\n\t\t\t\t}\n\t\t\t\td.pairbuf = append(d.pairbuf, add)\n\t\t\t\tadd.attr, add.val, esc = nil, nil, false\n\t\t\t} else {\n\t\t\t\treturn nil, errMissingSpace(line, offset)\n\t\t\t}\n\t\tcase scanQuoteStart:\n\t\t\tstate.pop()\n\t\t\tif r != '\\'' {\n\t\t\t\tbeg++\n\t\t\t\tstate.pop()\n\t\t\t\tstate.push(scanQuoteValue)\n\t\t\t} else {\n\t\t\t\tesc = true\n\t\t\t}\n\t\tcase scanQuoteValue:\n\t\t\tif r == '\\'' {\n\t\t\t\tstate.pop()\n\t\t\t\tstate.push(scanQuoteClose)\n\t\t\t} else if r == '\\n' {\n\t\t\t\treturn nil, errUnterminated(line, offset)\n\t\t\t}\n\t\t}\n\t\toffset += int64(sz)\n\t}\n\tswitch state.top() {\n\tcase scanQuoteValue, scanQuoteStart:\n\t\treturn nil, errUnterminated(line, offset)\n\tcase scanAttr:\n\t\tadd.attr = line[beg:offset]\n\t\tif _, ok := d.attrs[string(add.attr)]; ok {\n\t\t\td.havemulti = true\n\t\t\td.multi[string(add.attr)] = struct{}{}\n\t\t} else {\n\t\t\td.attrs[string(add.attr)] = struct{}{}\n\t\t}\n\t\td.pairbuf = append(d.pairbuf, add)\n\tcase scanValueStart:\n\t\tbeg = offset\n\t\tfallthrough\n\tcase scanQuoteClose:\n\t\toffset--\n\t\tfallthrough\n\tcase scanValue:\n\t\tadd.val = line[beg:offset]\n\t\tif esc {\n\t\t\tadd.val = bytes.Replace(add.val, []byte(\"''\"), []byte(\"'\"), -1)\n\t\t}\n\t\td.pairbuf = append(d.pairbuf, add)\n\t}\n\treturn d.pairbuf, nil\n}", "func Parse(b []byte) (UUID, error) {\n\tswitch len(b) {\n\tcase 16:\n\t\tvar u UUID\n\t\tcopy(u[:], b)\n\t\treturn u, nil\n\tcase 32:\n\t\tvar u UUID\n\t\t_, err := hex.Decode(u[:], b)\n\t\tif err != nil {\n\t\t\treturn u, ErrInvalidUUID\n\t\t}\n\t\treturn u, nil\n\tcase 36:\n\t\treturn parseFormatted(b)\n\tdefault:\n\t\treturn UUID{}, ErrInvalidUUID\n\t}\n}", "func (op *OptVendorOpts) FromBytes(data []byte) error {\n\tbuf := uio.NewBigEndianBuffer(data)\n\top.EnterpriseNumber = buf.Read32()\n\tif err := op.VendorOpts.FromBytesWithParser(buf.ReadAll(), vendParseOption); err != nil {\n\t\treturn err\n\t}\n\treturn buf.FinError()\n}", "func (ioctl *FuseIoctlIn) ParseBinary(bcontent []byte) error {\n\n\tlength := len(bcontent)\n\n\tif length < 32 {\n\t\treturn ErrDataLen\n\t}\n\n\tcommon.ParseBinary(bcontent[:8], &ioctl.Fh)\n\tcommon.ParseBinary(bcontent[8:12], &ioctl.Flags)\n\tcommon.ParseBinary(bcontent[12:16], &ioctl.Cmd)\n\tcommon.ParseBinary(bcontent[16:24], &ioctl.Arg)\n\tcommon.ParseBinary(bcontent[24:28], &ioctl.InSize)\n\tcommon.ParseBinary(bcontent[28:32], &ioctl.OutSize)\n\n\tioctl.InBuf = bcontent[32:]\n\n\treturn nil\n}", "func Parse(str string) (hash Hash, err error) {\n\terr = errHashFormat\n\tif len(str) != 2*Size {\n\t\treturn\n\t}\n\tfor i := range hash {\n\t\ta := unhex(str[2*i])\n\t\tb := unhex(str[2*i+1])\n\t\tif a == 255 || b == 255 {\n\t\t\treturn\n\t\t}\n\t\thash[i] = a<<4 | b\n\t}\n\terr = nil\n\treturn\n}", "func (a *Parser) parseBytes(b []byte) error {\n\tnvp := newNameValParser(a, b)\n\tvar name, value *symval\n\tvar err error\n\tfor {\n\t\tname, value, err = nvp.next()\n\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif name == nil && value == nil {\n\t\t\tbreak\n\t\t}\n\n\t\tif name == nil {\n\t\t\t// standalone name or value\n\t\t\tif a.isStandaloneBoolParameter(value) {\n\t\t\t\t// standalone name\n\t\t\t\tname, value = value, &symval{resolved: true, s: \"true\"}\n\t\t\t} else {\n\t\t\t\t// standalone value\n\t\t\t\tif _, ok := a.params[\"\"]; !ok {\n\t\t\t\t\treturn fmt.Errorf(`unexpected standalone value: \"%s\"`, value.s)\n\t\t\t\t}\n\t\t\t\t// the famous empty name\n\t\t\t\tname = &symval{resolved: true, s: \"\"}\n\t\t\t}\n\t\t}\n\n\t\t// assert name != nil && value != nil\n\n\t\toperator := a.operator(name.s)\n\t\tif operator != nil {\n\t\t\terr := operator.handle(value.s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else {\n\t\t\terr := a.setValue(name, value)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func (s Software) AddTo(m *Message) error {\n\treturn TextAttribute(s).AddToAs(m, AttrSoftware, softwareRawMaxB)\n}", "func (header *FuseInHeader) ParseBinary(bcontent []byte) error {\n\terr := common.ParseBinary(bcontent, header)\n\n\treturn err\n}", "func (c *Client) parseStationInfo(b []byte) (*StationInfo, error) {\n\tattrs, err := netlink.UnmarshalAttributes(b)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar info StationInfo\n\tfor _, a := range attrs {\n\t\tswitch a.Type {\n\t\tcase nl80211.AttrMac:\n\t\t\tcopy(info.HardwareAddr[:], a.Data)\n\n\t\tcase nl80211.AttrStaInfo:\n\t\t\tnattrs, err := netlink.UnmarshalAttributes(a.Data)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tif err := (&info).parseAttributes(nattrs); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\t// nl80211.AttrStaInfo is last attribute we are interested in\n\t\t\treturn &info, nil\n\n\t\tcase nl80211.AttrIfindex:\n\t\t\tifaceIndex := int(nlenc.Uint32(a.Data))\n\t\t\tiface, ok := c.interfaces[ifaceIndex]\n\t\t\tif !ok {\n\t\t\t\tiface.Index = ifaceIndex\n\t\t\t}\n\t\t\tinfo.Iface = *iface\n\n\t\tdefault:\n\t\t\t// The other attributes that are returned here: nl80211.AttrGeneration\n\t\t\t// No need to parse them for now.\n\t\t\tcontinue\n\t\t}\n\t}\n\n\t// No station info found\n\treturn nil, os.ErrNotExist\n}", "func (tag *CustomSegmentTag) Decode(line string) (m3u8.CustomTag, error) {\n\tvar err error\n\n\t// Since this is a Segment tag, we want to create a new tag every time it is decoded\n\t// as there can be one for each segment with\n\tnewTag := new(CustomSegmentTag)\n\n\tfor k, v := range m3u8.DecodeAttributeList(line[20:]) {\n\t\tswitch k {\n\t\tcase \"NAME\":\n\t\t\tnewTag.Name = v\n\t\tcase \"JEDI\":\n\t\t\tif v == \"YES\" {\n\t\t\t\tnewTag.Jedi = true\n\t\t\t} else if v == \"NO\" {\n\t\t\t\tnewTag.Jedi = false\n\t\t\t} else {\n\t\t\t\terr = errors.New(\"Valid strings for JEDI attribute are YES and NO.\")\n\t\t\t}\n\t\t}\n\t}\n\n\treturn newTag, err\n}", "func(r *Runtime) ParseModule(wasmBytes []byte) (*Module, error) {\n\treturn r.cfg.Environment.ParseModule(wasmBytes)\n}", "func (rename *FuseRename2In) ParseBinary(bcontent []byte) error {\n\n\tlength := len(bcontent)\n\n\tif length < 16 {\n\t\treturn ErrDataLen\n\t}\n\n\tcommon.ParseBinary(bcontent[:8], &rename.NewDir)\n\tcommon.ParseBinary(bcontent[8:12], &rename.Flags)\n\tcommon.ParseBinary(bcontent[12:16], &rename.Padding)\n\n\tarray := bytes.Split(bcontent[16:], []byte{0})\n\n\tif len(array) < 2 {\n\t\treturn ErrDataLen\n\t}\n\n\trename.OldName = string(array[0])\n\trename.NewName = string(array[1])\n\n\treturn nil\n}", "func ManufacturerData(id uint16, b []byte) Field {\n\treturn func(p *Packet) error {\n\t\td := append([]byte{uint8(id), uint8(id >> 8)}, b...)\n\t\treturn p.append(manufacturerData, d)\n\t}\n}", "func (e *Extras) FromString(str string) (err error) {\n\tattrs := strings.Split(str, \":\")\n\n\tss, err := strconv.Atoi(attrs[0])\n\tif err != nil {\n\t\treturn err\n\t}\n\te.SampleSet = SampleSet(ss)\n\n\tss, err = strconv.Atoi(attrs[1])\n\tif err != nil {\n\t\treturn err\n\t}\n\te.AdditionalSet = SampleSet(ss)\n\n\te.CustomIndex, err = strconv.Atoi(attrs[2])\n\tif err != nil {\n\t\treturn err\n\t}\n\n\te.SampleVolume, err = strconv.Atoi(attrs[3])\n\te.Filename = attrs[4]\n\treturn err\n}", "func parseDevstat(buf []byte) (Devstat, error) {\n\tvar ds Devstat\n\tbr := bytes.NewReader(buf)\n\t//\terr := binary.Read(br, binary.LittleEndian, &ds)\n\terr := common.Read(br, binary.LittleEndian, &ds)\n\tif err != nil {\n\t\treturn ds, err\n\t}\n\n\treturn ds, nil\n}", "func (c *StickersCreateStickerSetRequest) GetSoftware() (value string, ok bool) {\n\tif c == nil {\n\t\treturn\n\t}\n\tif !c.Flags.Has(3) {\n\t\treturn value, false\n\t}\n\treturn c.Software, true\n}", "func ParseBinary(binary string) (int, error) {\n\tvar value int\n\tfor p, bit := range reverse(binary) {\n\t\tsgn := 0\n\t\tif bit != '0' && bit != '1' {\n\t\t\treturn 0, ErrInvalidCharacter\n\t\t} else if bit == '1' {\n\t\t\tsgn = 1\n\t\t}\n\n\t\tvalue += sgn * (1 << uint(p))\n\t}\n\treturn value, nil\n}", "func ExampleAttributeDecoder_decode() {\n\t// Create a netlink.AttributeDecoder using some example attribute bytes\n\t// that are prepared for this example.\n\tad, err := netlink.NewAttributeDecoder(exampleAttributes())\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to create attribute decoder: %v\", err)\n\t}\n\n\t// Iterate attributes until completion, checking the type of each and\n\t// decoding them as appropriate.\n\tvar out decodeOut\n\tfor ad.Next() {\n\t\t// Check the type of the current attribute with ad.Type. Typically you\n\t\t// will find netlink attribute types and data values in C headers as\n\t\t// constants.\n\t\tswitch ad.Type() {\n\t\tcase 1:\n\t\t\t// Number is a uint16.\n\t\t\tout.Number = ad.Uint16()\n\t\tcase 2:\n\t\t\t// String is a string.\n\t\t\tout.String = ad.String()\n\t\tcase 3:\n\t\t\t// Nested is a nested structure, so we will use a method on the\n\t\t\t// nested type along with ad.Do to decode it in a concise way.\n\t\t\tad.Nested(out.Nested.decode)\n\t\t}\n\t}\n\n\t// Any errors encountered during decoding (including any errors from\n\t// decoding the nested attributes) will be returned here.\n\tif err := ad.Err(); err != nil {\n\t\tlog.Fatalf(\"failed to decode attributes: %v\", err)\n\t}\n\n\tfmt.Printf(`Number: %d\nString: %q\nNested:\n - A: %d\n - B: %d`,\n\t\tout.Number, out.String, out.Nested.A, out.Nested.B,\n\t)\n\t// Output:\n\t// Number: 1\n\t// String: \"hello world\"\n\t// Nested:\n\t// - A: 2\n\t// - B: 3\n}", "func extractMTI(str string) (spec Spec, mti, rest string, err error) {\n\tmti, rest = str[0:4], str[4:len(str)]\n\n\tspecName := \"\"\n\tswitch string(mti[0]) {\n\tcase \"0\":\n\t\tspecName = \"1987\"\n\tcase \"1\":\n\t\tspecName = \"1993\"\n\tcase \"2\":\n\t\tspecName = \"2003\"\n\tdefault:\n\t\treturn Spec{}, \"\", \"\", fmt.Errorf(\"Invalid mti version %v\", string(mti[0]))\n\t}\n\n\tspec, ok := specs[specName]\n\tif !ok {\n\t\treturn Spec{}, \"\", \"\", fmt.Errorf(\"iso8583:%s is not supported\", specName)\n\t}\n\n\treturn\n}", "func (ut *AddFirmwarePayload) Validate() (err error) {\n\tif ut.Etag == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"etag\"))\n\t}\n\tif ut.Module == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"module\"))\n\t}\n\tif ut.Profile == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"profile\"))\n\t}\n\tif ut.URL == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"url\"))\n\t}\n\tif ut.Meta == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"meta\"))\n\t}\n\treturn\n}", "func ParseRemoteSecretBytes(secretBytes []byte) (*RemoteSecret, error) {\n\tsecret := &RemoteSecret{}\n\terr := yaml.Unmarshal(secretBytes, &secret)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn secret, nil\n}", "func NewAttributePair(attributeVals []string, attrOwner *AttributeOwner) (*AttributePair, error) {\n\tif len(attributeVals) < 6 {\n\t\treturn nil, errors.New(\"Invalid attribute entry\")\n\t}\n\tvar attrPair = *new(AttributePair)\n\tif attrOwner != nil {\n\t\tattrPair.SetOwner(attrOwner)\n\t} else {\n\t\tattrPair.SetOwner(&AttributeOwner{strings.TrimSpace(attributeVals[0]), strings.TrimSpace(attributeVals[1])})\n\t}\n\tattrPair.SetAttributeName(strings.TrimSpace(attributeVals[2]))\n\tattrPair.SetAttributeValue([]byte(strings.TrimSpace(attributeVals[3])))\n\t//Reading validFrom date\n\tdateStr := strings.TrimSpace(attributeVals[4])\n\tif dateStr != \"\" {\n\t\tvar t time.Time\n\t\tvar err error\n\t\tif t, err = time.Parse(time.RFC3339, dateStr); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tattrPair.SetValidFrom(t)\n\t}\n\t//Reading validTo date\n\tdateStr = strings.TrimSpace(attributeVals[5])\n\tif dateStr != \"\" {\n\t\tvar t time.Time\n\t\tvar err error\n\t\tif t, err = time.Parse(time.RFC3339, dateStr); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tattrPair.SetValidTo(t)\n\t}\n\treturn &attrPair, nil\n}", "func (p *parser) parseAttributeSelector() (Selector, error) {\n\tif p.i >= len(p.s) {\n\t\treturn nil, fmt.Errorf(\"expected attribute selector ([attribute]), found EOF instead\")\n\t}\n\tif p.s[p.i] != '[' {\n\t\treturn nil, fmt.Errorf(\"expected attribute selector ([attribute]), found '%c' instead\", p.s[p.i])\n\t}\n\n\tp.i++\n\tp.skipWhitespace()\n\tkey, err := p.parseIdentifier()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tp.skipWhitespace()\n\tif p.i >= len(p.s) {\n\t\treturn nil, errors.New(\"unexpected EOF in attribute selector\")\n\t}\n\n\tif p.s[p.i] == ']' {\n\t\tp.i++\n\t\treturn attributeExistsSelector(key), nil\n\t}\n\n\tif p.i+2 >= len(p.s) {\n\t\treturn nil, errors.New(\"unexpected EOF in attribute selector\")\n\t}\n\n\top := p.s[p.i : p.i+2]\n\tif op[0] == '=' {\n\t\top = \"=\"\n\t} else if op[1] != '=' {\n\t\treturn nil, fmt.Errorf(`expected equality operator, found \"%s\" instead`, op)\n\t}\n\tp.i += len(op)\n\n\tp.skipWhitespace()\n\tif p.i >= len(p.s) {\n\t\treturn nil, errors.New(\"unexpected EOF in attribute selector\")\n\t}\n\tvar val string\n\tvar rx *regexp.Regexp\n\tif op == \"#=\" {\n\t\trx, err = p.parseRegex()\n\t} else {\n\t\tswitch p.s[p.i] {\n\t\tcase '\\'', '\"':\n\t\t\tval, err = p.parseString()\n\t\tdefault:\n\t\t\tval, err = p.parseIdentifier()\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tp.skipWhitespace()\n\tif p.i >= len(p.s) {\n\t\treturn nil, errors.New(\"unexpected EOF in attribute selector\")\n\t}\n\tif p.s[p.i] != ']' {\n\t\treturn nil, fmt.Errorf(\"expected ']', found '%c' instead\", p.s[p.i])\n\t}\n\tp.i++\n\n\tswitch op {\n\tcase \"=\":\n\t\treturn attributeEqualsSelector(key, val), nil\n\tcase \"!=\":\n\t\treturn attributeNotEqualSelector(key, val), nil\n\tcase \"~=\":\n\t\treturn attributeIncludesSelector(key, val), nil\n\tcase \"|=\":\n\t\treturn attributeDashmatchSelector(key, val), nil\n\tcase \"^=\":\n\t\treturn attributePrefixSelector(key, val), nil\n\tcase \"$=\":\n\t\treturn attributeSuffixSelector(key, val), nil\n\tcase \"*=\":\n\t\treturn attributeSubstringSelector(key, val), nil\n\tcase \"#=\":\n\t\treturn attributeRegexSelector(key, rx), nil\n\t}\n\n\treturn nil, fmt.Errorf(\"attribute operator %q is not supported\", op)\n}", "func (p *parser) parseAttributeSelector() (Selector, error) {\n\tif p.i >= len(p.s) {\n\t\treturn nil, fmt.Errorf(\"expected attribute selector ([attribute]), found EOF instead\")\n\t}\n\tif p.s[p.i] != '[' {\n\t\treturn nil, fmt.Errorf(\"expected attribute selector ([attribute]), found '%c' instead\", p.s[p.i])\n\t}\n\n\tp.i++\n\tp.skipWhitespace()\n\tkey, err := p.parseIdentifier()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tp.skipWhitespace()\n\tif p.i >= len(p.s) {\n\t\treturn nil, errors.New(\"unexpected EOF in attribute selector\")\n\t}\n\n\tif p.s[p.i] == ']' {\n\t\tp.i++\n\t\treturn attributeExistsSelector(key), nil\n\t}\n\n\tif p.i+2 >= len(p.s) {\n\t\treturn nil, errors.New(\"unexpected EOF in attribute selector\")\n\t}\n\n\top := p.s[p.i : p.i+2]\n\tif op[0] == '=' {\n\t\top = \"=\"\n\t} else if op[1] != '=' {\n\t\treturn nil, fmt.Errorf(`expected equality operator, found \"%s\" instead`, op)\n\t}\n\tp.i += len(op)\n\n\tp.skipWhitespace()\n\tif p.i >= len(p.s) {\n\t\treturn nil, errors.New(\"unexpected EOF in attribute selector\")\n\t}\n\tvar val string\n\tvar rx *regexp.Regexp\n\tif op == \"#=\" {\n\t\trx, err = p.parseRegex()\n\t} else {\n\t\tswitch p.s[p.i] {\n\t\tcase '\\'', '\"':\n\t\t\tval, err = p.parseString()\n\t\tdefault:\n\t\t\tval, err = p.parseIdentifier()\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tp.skipWhitespace()\n\tif p.i >= len(p.s) {\n\t\treturn nil, errors.New(\"unexpected EOF in attribute selector\")\n\t}\n\tif p.s[p.i] != ']' {\n\t\treturn nil, fmt.Errorf(\"expected ']', found '%c' instead\", p.s[p.i])\n\t}\n\tp.i++\n\n\tswitch op {\n\tcase \"=\":\n\t\treturn attributeEqualsSelector(key, val), nil\n\tcase \"!=\":\n\t\treturn attributeNotEqualSelector(key, val), nil\n\tcase \"~=\":\n\t\treturn attributeIncludesSelector(key, val), nil\n\tcase \"|=\":\n\t\treturn attributeDashmatchSelector(key, val), nil\n\tcase \"^=\":\n\t\treturn attributePrefixSelector(key, val), nil\n\tcase \"$=\":\n\t\treturn attributeSuffixSelector(key, val), nil\n\tcase \"*=\":\n\t\treturn attributeSubstringSelector(key, val), nil\n\tcase \"#=\":\n\t\treturn attributeRegexSelector(key, rx), nil\n\t}\n\n\treturn nil, fmt.Errorf(\"attribute operator %q is not supported\", op)\n}", "func (interrupt *FuseInterruptIn) ParseBinary(bcontent []byte) error {\n\n\tlength := len(bcontent)\n\n\tif length < 8 {\n\t\treturn ErrDataLen\n\t}\n\n\terr := common.ParseBinary(bcontent, interrupt)\n\n\treturn err\n}", "func (platform *Platform) ParseOSXVersion(versionData string) {\n\tlines := strings.Split(versionData, \"\\n\")\n\tfor _, l := range lines {\n\t\ts := strings.Split(l, \":\\t\")\n\t\tif len(s) == 2 {\n\t\t\tswitch s[0] {\n\t\t\tcase \"ProductName\":\n\t\t\t\tplatform.Name = s[1]\n\t\t\tcase \"ProductVersion\":\n\t\t\t\tplatform.Version = s[1]\n\t\t\tcase \"BuildVersion\":\n\t\t\t\tplatform.Build = s[1]\n\t\t\t}\n\t\t}\n\t}\n}", "func (i SNSSubscribeAttribute) ParseByKey(s string) (SNSSubscribeAttribute, error) {\n\tfor k, v := range _SNSSubscribeAttributeValueToKeyMap {\n\t\tif v == s {\n\t\t\t// parse ok\n\t\t\treturn k, nil\n\t\t}\n\t}\n\n\t// error\n\treturn -1, fmt.Errorf(\"Enum Key of %s Not Expected In SNSSubscribeAttribute Keys List\", s)\n}", "func (f *STAGFields) UnmarshalBinary(b []byte) error {\n\tl := len(b)\n\tif l < 3 {\n\t\treturn io.ErrUnexpectedEOF\n\t}\n\n\tf.Flags = b[0]\n\toffset := 1\n\n\tif f.HasPCP() {\n\t\tf.PCP = b[offset] & 0x07\n\t}\n\n\tif f.HasDEI() {\n\t\tf.DEIFlag = (b[offset] >> 3) & 0x01\n\t}\n\n\tif f.HasVID() {\n\t\tf.CVID = binary.BigEndian.Uint16(b[offset:offset+2]) & 0xf0ff\n\t}\n\n\treturn nil\n}", "func ParseBytes(data []byte) (Config, error) {\n\tc, err := parseConfig(data)\n\tif err != nil {\n\t\treturn Config{}, err\n\t}\n\treturn c, nil\n}", "func ParseSchemaLine(line string) (key string, t *SchemaType) {\n\tkeyValArr := strings.SplitN(line, \"=\", 2) // Split between key and value\n\n\tif len(keyValArr) != 2 { // Not a key=val\n\t\treturn\n\t}\n\n\tkey = keyValArr[0] // Set key to first position in array\n\trawVal := keyValArr[1] // Set our raw value\n\n\tparsedSt, parseErr := NewSchemaType(rawVal) // Attempt to parse our \"raw\" value to a SchemaType\n\n\tif parseErr == nil {\n\t\tt = parsedSt\n\t}\n\n\treturn\n}", "func DecodeAttributeList(b []byte) (AttributeList, []byte, error) {\n\tam := make(map[int]*Attribute)\n\n\tfor {\n\t\tatt, br, err := DecodeAttribute(b)\n\t\tif err == netlink.ErrNoData {\n\t\t\tbreak\n\t\t} else if err != nil {\n\t\t\treturn nil, b, err\n\t\t}\n\t\tam[int(att.Type)] = att\n\t\tb = br\n\t}\n\n\treturn AttributeList(am), b, nil\n}", "func ParseBytes(b []byte) ([]byte, error) {\n\treturn ParseString(string(b))\n}", "func Parse(val string) (info Info, err error) {\n\tvar ival int64\n\n\tswitch val {\n\tcase \"b\", \"bool\":\n\t\tinfo = Bool\n\t\treturn\n\n\tcase \"byte\":\n\t\tinfo = Byte\n\t\treturn\n\n\tcase \"rune\":\n\t\tinfo = Rune\n\t\treturn\n\t}\n\n\tm := reSized.FindStringSubmatch(val)\n\tif m != nil {\n\t\tswitch m[1] {\n\t\tcase \"i\", \"int\":\n\t\t\tinfo.Type = TInt\n\n\t\tcase \"u\", \"uint\":\n\t\t\tinfo.Type = TUint\n\n\t\tcase \"s\", \"string\":\n\t\t\tinfo.Type = TString\n\n\t\tdefault:\n\t\t\treturn info, fmt.Errorf(\"unknown type: %s\", val)\n\t\t}\n\t\tvar bits int64\n\t\tif len(m[2]) > 0 {\n\t\t\tbits, err = strconv.ParseInt(m[2], 10, 32)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tinfo.Bits = Size(bits)\n\t\tinfo.MinBits = info.Bits\n\t\treturn\n\t}\n\n\tm = reArr.FindStringSubmatch(val)\n\tif m == nil {\n\t\treturn info, fmt.Errorf(\"unknown type: %s\", val)\n\t}\n\tvar elType Info\n\telType, err = Parse(m[2])\n\tif err != nil {\n\t\treturn\n\t}\n\tival, err = strconv.ParseInt(m[1], 10, 32)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tinfo.Type = TArray\n\tinfo.Bits = Size(ival) * elType.Bits\n\tinfo.MinBits = info.Bits\n\tinfo.ElementType = &elType\n\tinfo.ArraySize = Size(ival)\n\n\treturn\n}", "func ParseBytes(content []byte) (Metalink, error) {\n\tmetafile := Metalink{}\n\n\terr := xml.Unmarshal(content, &metafile)\n\tif err != nil {\n\t\treturn metafile, err\n\t}\n\treturn metafile, nil\n}", "func ParseGetPbxDeviceFirmwareitemsResponse(rsp *http.Response) (*GetPbxDeviceFirmwareitemsResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &GetPbxDeviceFirmwareitemsResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest []PbxDeviceFirmwares2\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (pd *ProductData) UnmarshalBinary(buf []byte) error {\n\tif len(buf) < 14 {\n\t\treturn newBufError(ErrBufferTooShort, 14, len(buf))\n\t}\n\n\tcopy(pd.Key[:], buf[1:4])\n\tcopy(pd.DevCat[:], buf[4:6])\n\treturn nil\n}", "func (forget *FuseForgetIn) ParseBinary(bcontent []byte) error {\n\terr := common.ParseBinary(bcontent, forget)\n\n\treturn err\n}", "func FromBytes(bytes []byte) (Manifest, error) {\n\tmanifest := &manifest{}\n\n\t// Preserve the raw manifest so that manifest.Bytes() returns bytes in\n\t// the same order that they were passed to this function\n\tmanifest.raw = make([]byte, len(bytes))\n\tcopy(manifest.raw, bytes)\n\n\tsigned, _ := clearsign.Decode(bytes)\n\tif signed != nil {\n\t\tsignature, err := ioutil.ReadAll(signed.ArmoredSignature.Body)\n\t\tif err != nil {\n\t\t\treturn nil, util.Errorf(\"Could not read signature from pod manifest: %s\", err)\n\t\t}\n\t\tmanifest.signature = signature\n\n\t\t// the original plaintext is in signed.Plaintext, but the signature\n\t\t// corresponds to signed.Bytes, so that's what we need to save\n\t\tmanifest.plaintext = signed.Bytes\n\n\t\t// parse YAML from the message's plaintext instead\n\t\tbytes = signed.Plaintext\n\t}\n\n\tif err := yaml.Unmarshal(bytes, manifest); err != nil {\n\t\treturn nil, util.Errorf(\"Could not read pod manifest: %s\", err)\n\t}\n\tif err := ValidManifest(manifest); err != nil {\n\t\treturn nil, util.Errorf(\"invalid manifest: %s\", err)\n\t}\n\treturn manifest, nil\n}", "func ParseRemoveAddressFrame(r *bytes.Reader, version protocol.VersionNumber) (*RemoveAddressFrame, error) {\n\tframe := &RemoveAddressFrame{}\n\n\t// read the TypeByte\n\tif _, err := r.ReadByte(); err != nil {\n\t\treturn nil, err\n\t}\n\taid, err := r.ReadByte()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tframe.AddrID = protocol.AddressID(aid)\n\treturn frame, nil\n}", "func (cuseInit *CuseInitIn) ParseBinary(bcontent []byte) error {\n\n\tlength := len(bcontent)\n\n\tif length < 16 {\n\t\treturn ErrDataLen\n\t}\n\n\terr := common.ParseBinary(bcontent, cuseInit)\n\n\treturn err\n}", "func (p *MaxiiotPayload) Unmarshal(b []byte) (err error) {\n\tdefer func() {\n\t\tif res := recover(); res != nil {\n\t\t\terr = fmt.Errorf(\"panic: %v\", res)\n\t\t}\n\t}()\n\n\tlength := len(b)\n\tif length < 9 {\n\t\treturn errors.New(\"unspoorts maxiiot device protocol\")\n\t}\n\tflag := 0\n\tp.Header = b[flag]\n\tflag++\n\ttc := &TransCode{}\n\ttc.Unmarshal(b[flag])\n\tp.TransCode = tc\n\tflag++\n\tcopy(p.DeviceID[:], b[flag:flag+2])\n\tflag += 2\n\tswitch p.DeviceID {\n\tcase [2]byte{0x00, 0x06}:\n\t\tsmoke := &Smoke{}\n\t\tif err := smoke.Unmarshal(b[flag : length-2]); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tp.SensorData = smoke\n\tdefault:\n\t\treturn errors.New(\"unspoorts maxiiot device protocol\")\n\t}\n\n\treturn nil\n}", "func DescriptorFromBytes(data []byte) (*Descriptor, error) {\n\tvar d Descriptor\n\tif err := json.Unmarshal(data, &d); err != nil {\n\t\treturn nil, fmt.Errorf(\"descriptor: parsing failed: %v\", err)\n\t}\n\treturn &d, nil\n}", "func ParseReplacechangeaspecificPbxDeviceFirmwareResponse(rsp *http.Response) (*ReplacechangeaspecificPbxDeviceFirmwareResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &ReplacechangeaspecificPbxDeviceFirmwareResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest PbxDeviceFirmwares\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func parseLine(lineStr string) (Check, error) {\n\tcolumns := strings.Split(lineStr, \"|\")\n\tts, _ := strconv.ParseInt(columns[0], 10, 32)\n\n\treturn Check{ts, parseMeasurements(columns[1])}, nil\n}" ]
[ "0.6094672", "0.49585745", "0.49561727", "0.47174284", "0.4682577", "0.4675515", "0.4654408", "0.46499464", "0.45818946", "0.4525342", "0.45146385", "0.44944", "0.44848514", "0.44841257", "0.44420215", "0.44036722", "0.4384886", "0.43354794", "0.42904556", "0.42374355", "0.42350456", "0.4214246", "0.4190772", "0.41788664", "0.41753167", "0.4086966", "0.4084982", "0.4077412", "0.40750042", "0.40656254", "0.40528527", "0.4050024", "0.40395787", "0.4031215", "0.40243012", "0.40047845", "0.40037388", "0.40010795", "0.3997427", "0.3989618", "0.39889324", "0.398796", "0.39780492", "0.39762127", "0.39610803", "0.39489335", "0.39471012", "0.39451477", "0.3943798", "0.39232725", "0.3923177", "0.39174002", "0.39102903", "0.39004886", "0.38908502", "0.3887958", "0.38854954", "0.38836464", "0.38829175", "0.3880359", "0.38734293", "0.38718042", "0.38571918", "0.3854778", "0.38524354", "0.3850718", "0.38441285", "0.38416713", "0.38329434", "0.38159278", "0.38040376", "0.38013247", "0.3799827", "0.37985405", "0.37918264", "0.37884173", "0.37851453", "0.3784139", "0.37730485", "0.37730485", "0.37702316", "0.37689126", "0.37609148", "0.37605262", "0.37482718", "0.3743022", "0.3742555", "0.37422183", "0.37360024", "0.37341127", "0.37321505", "0.37319857", "0.37258038", "0.3716615", "0.37157127", "0.3711359", "0.37103945", "0.37047586", "0.37016013", "0.3699217" ]
0.82919925
0
false: false, "", 0, "false", "off", empty slice/map
func Bool(i interface{}) bool { if i == nil { return false } if v, ok := i.(bool); ok { return v } if s, ok := i.(string); ok { if _, ok := emptyStringMap[s]; ok { return false } return true } rv := reflect.ValueOf(i) switch rv.Kind() { case reflect.Ptr: return !rv.IsNil() case reflect.Map: fallthrough case reflect.Array: fallthrough case reflect.Slice: return rv.Len() != 0 case reflect.Struct: return true default: s := String(i) if _, ok := emptyStringMap[s]; ok { return false } return true } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func slice_literals() {\n tmp := []bool{true, true, false}\n fmt.Println(tmp)\n tmp[0] = false\n fmt.Println(tmp)\n}", "func init() {\n\tSliceOfString = make([]string, 0, 10)\n\tMapOfString = make(map[string]string)\n\tMapOfBool = make(map[string]bool)\n}", "func empty(thing []uint8) bool {\n\tfor _, item := range thing {\n\t\tif item != 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func newStripedBoolSlice(size int, choice bool) []bool {\n\tstriped := make([]bool, size)\n\tfor i := range striped {\n\t\tstriped[i] = choice\n\t\tchoice = !choice\n\t}\n\treturn striped\n}", "func equalNilAndEmpty(key, value string, params Parameter) bool {\n if v, ok := params[key]; ok {\n if v == nil {\n return true\n }\n vs := fmt.Sprintf(\"%v\", v)\n return vs == \"\" ||\n vs == \"0\" ||\n (reflect.TypeOf(v).Kind() == reflect.Bool && v.(bool) == false) ||\n vs == \"[]\" ||\n vs == \"map[]\"\n } else {\n return true\n }\n}", "func Test0() []bool {\n\tvar res0 []bool\n\treturn res0\n}", "func (c *ColBool) Reset() {\n\t*c = (*c)[:0]\n}", "func (t *Dense) bools() []bool { return *(*[]bool)(unsafe.Pointer(t.hdr)) }", "func (mm Uint64Uint64Map) NonEmpty() bool {\n\treturn mm.Size() > 0\n}", "func fnFalse(ctx Context, doc *JDoc, params []string) interface{} {\n\tstats := ctx.Value(EelTotalStats).(*ServiceStats)\n\tif params == nil || params[0] != \"\" {\n\t\tctx.Log().Error(\"error_type\", \"func_false\", \"op\", \"false\", \"cause\", \"wrong_number_of_parameters\", \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"no parameters expected in call to false function\"), \"false\", params})\n\t\treturn \"\"\n\t}\n\treturn false\n}", "func (p *SliceOfMap) RefSlice() bool {\n\treturn false\n}", "func (np *vpoint) zeroed() bool {\n\treturn np.elems == nil\n}", "func (p *SliceOfMap) Nil() bool {\n\treturn p == nil\n}", "func MustToStringMapBool(i interface{}) map[string]bool {\n\tv, _ := ToStringMapBool(i)\n\treturn v\n}", "func (p *SliceOfMap) InterSlice() bool {\n\treturn false\n}", "func (t T) Zero() bool { return t.cb == nil }", "func (b *Builder) Empty() bool { return b.sz == 0 }", "func newUniformBoolSlice(size int, choice bool) []bool {\n\tuniform := make([]bool, size)\n\tfor i := range uniform {\n\t\tuniform[i] = choice\n\t}\n\treturn uniform\n}", "func (p *SliceOfMap) Empty() bool {\n\tif p == nil || len(*p) == 0 {\n\t\treturn true\n\t}\n\treturn false\n}", "func encodeMetricBool(item bool) float64 {\n\tif item {\n\t\treturn 1\n\t}\n\treturn 0\n}", "func BenchmarkNoop(b *testing.B) {\n\ttcs := []struct {\n\t\tvaluePtrs bool\n\t\ttopLevel bool\n\t}{\n\t\t{false, false},\n\t\t{false, true},\n\t\t{true, false},\n\t\t{true, true},\n\t}\n\n\tfor _, tc := range tcs {\n\t\tb.Run(fmt.Sprintf(\"%+v\", tc), func(b *testing.B) {\n\t\t\tx, _ := demo.NewContainer(tc.valuePtrs)\n\t\t\tbench(b, x, tc.topLevel)\n\t\t})\n\t}\n}", "func (s SliceType) Null() bool {\n\treturn len(s) == 0\n}", "func (s SliceType) Null() bool {\n\treturn len(s) == 0\n}", "func (m *metricAerospikeNamespaceGeojsonRegionQueryFalsePositive) init() {\n\tm.data.SetName(\"aerospike.namespace.geojson.region_query_false_positive\")\n\tm.data.SetDescription(\"Number of points outside the region.\")\n\tm.data.SetUnit(\"{points}\")\n\tm.data.SetEmptySum()\n\tm.data.Sum().SetIsMonotonic(true)\n\tm.data.Sum().SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)\n}", "func BoolMap(src map[string]*bool) map[string]bool {\n\tdst := make(map[string]bool)\n\tfor k, val := range src {\n\t\tif val != nil {\n\t\t\tdst[k] = *val\n\t\t}\n\t}\n\treturn dst\n}", "func zeroSlice(b *[]byte) {\n\tfor i := range *b {\n\t\t(*b)[i] = 0\n\t}\n}", "func (b Bool) Length() int {\n\treturn 0\n}", "func init() {\n\tdutyTable = [4][8]bool{\n\t\t{false, false, false, false, false, false, false, true},\n\t\t{true, false, false, false, false, false, false, true},\n\t\t{true, false, false, false, false, true, true, true},\n\t\t{false, true, true, true, true, true, true, false},\n\t}\n}", "func False(fieldPtr interface{}) Filter {\n\treturn Not(True(fieldPtr))\n}", "func init() {\n\tif unsafe.Sizeof(false) != 1 {\n\t\tpanic(\"nsd20463/atomicbool assumes bools fit in 1 byte\")\n\t}\n}", "func suppressEquivalentTypeStringBoolean(k, old, new string, d *schema.ResourceData) bool {\n\tif old == \"false\" && new == \"0\" {\n\t\treturn true\n\t}\n\tif old == \"true\" && new == \"1\" {\n\t\treturn true\n\t}\n\treturn false\n}", "func WriteBool(buffer []byte, offset int, value bool) {\n if value {\n buffer[offset] = 1\n } else {\n buffer[offset] = 0\n }\n}", "func MaybeAllocateLimitedBoolArray(array []bool, length int) []bool {\n\tif cap(array) < length {\n\t\treturn make([]bool, length)\n\t}\n\tarray = array[:length]\n\tcopy(array, ZeroBoolColumn)\n\treturn array\n}", "func f1() {\n\tvar lmap map[string]bool\n\tfmt.Printf(\"%d\\n\", len(gmap)+1) // 1\n\tfmt.Printf(\"%d\\n\", len(lmap)+2) // 2\n}", "func False(Right) bool {\n\treturn false\n}", "func (sp SynProxy) filled() bool {\n\treturn sp.ISN != 0 || sp.ITS != 0 || sp.TSOff != 0\n}", "func BoolSlice(k string, v []bool) KeyValue {\n\treturn Key(k).BoolSlice(v)\n}", "func Test0(par0 bool) {\n}", "func ToStringMapBool(i interface{}) map[string]bool {\n\treturn cast.ToStringMapBool(i)\n}", "func (n Name) Zero() bool {\n\treturn len(n) == 0\n}", "func (m *Merge) isSettable(v reflect.Value) bool {\n\tswitch v.Kind() {\n\tcase reflect.Array, reflect.String:\n\t\treturn v.Len() != 0\n\tcase reflect.Bool:\n\t\treturn !v.Bool()\n\tcase reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:\n\t\treturn v.Int() != 0\n\tcase reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:\n\t\treturn v.Uint() != 0\n\tcase reflect.Float32, reflect.Float64:\n\t\treturn v.Float() != 0\n\tcase reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:\n\t\treturn !v.IsNil()\n\t}\n\n\treturn true\n}", "func (n Nil) True() bool { return false }", "func newAlmostUniformBoolSlice(indexChosen int, size int, choice bool) []bool {\n\talmostUniform := make([]bool, size)\n\tfor i := range almostUniform {\n\t\tif i == indexChosen {\n\t\t\talmostUniform[i] = choice\n\t\t} else {\n\t\t\talmostUniform[i] = !choice\n\t\t}\n\t}\n\treturn almostUniform\n}", "func zeroEffects(effects map[shared.ClientID]shared.Magnitude) bool {\n\tallZero := true\n\tfor _, mag := range effects {\n\t\tallZero = allZero && (mag == 0)\n\t}\n\treturn allZero\n}", "func emptyObjectDiffSuppressFunc(k, old, new string, d *schema.ResourceData) bool {\n\t// When a map inside a list contains only default values without explicit values set by\n\t// the user Terraform inteprets the map as not being present and the array length being\n\t// zero, resulting in bogus update that does nothing. Allow ignoring those.\n\tif old == \"1\" && new == \"0\" && strings.HasSuffix(k, \".#\") {\n\t\treturn true\n\t}\n\n\t// When a field is not set to any value and consequently is null (empty string) but had\n\t// a non-empty parameter before. Allow ignoring those.\n\tif new == \"\" && old != \"\" {\n\t\treturn true\n\t}\n\n\t// There is a bug in Terraform 0.11 which interprets \"true\" as \"0\" and \"false\" as \"1\"\n\tif (new == \"0\" && old == \"false\") || (new == \"1\" && old == \"true\") {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func nomatch(arr []int16, ints []int16) []string {\n\n\tdmap := make(map[int16]bool)\n\tfor _, v := range arr {\n\t\tif _, ok := dmap[v]; !ok {\n\t\t\tdmap[v] = true\n\t\t}\n\t}\n\n\tfor _, v := range ints {\n\t\tdelete(dmap, v)\n\t}\n\n\tretval := []string{}\n\n\tfor k := range dmap {\n\t\tretval = append(retval, fmt.Sprintf(\"%d\", k))\n\t}\n\n\treturn retval\n}", "func (s spanGroupSlowest) empty() bool {\n\treturn s == spanGroupSlowest{}\n}", "func EmptyMap[K comparable, V any]() *gocrest.Matcher[map[K]V] {\n\tmatcher := new(gocrest.Matcher[map[K]V])\n\tmatcher.Describe = \"empty value\"\n\tmatcher.Matches = func(actual map[K]V) bool {\n\t\treturn len(actual) == 0\n\t}\n\treturn matcher\n}", "func SliceMethodZero() {\n\tfmt.Println(\"Dilimler\")\n\n\t// Slices\n\tslicesScores := []int{1, 4, 293, 4, 9}\n\tfmt.Println(slicesScores)\n\n\t// Slices Make ile uzunluğu (dilmin boyutu) 10 olan dilim oluşturulur.\n\tslicesScoresMake := make([]int, 10)\n\tfmt.Println(slicesScoresMake)\n\n\t// Uzunluğu 0, Kapasitesi 10 olan bir dilim oluşturmu oluruz.\n\tslicesScoresMakeLength := make([]int, 0, 10)\n\tfmt.Println(slicesScoresMakeLength)\n\n\tslicesScoresMakeExOne := make([]int, 1, 10)\n\tslicesScoresMakeExOne[0] = 9033\n\tfmt.Println(slicesScoresMakeExOne)\n\n\tslicesScoresMakeExTwo := make([]int, 0, 10)\n\tslicesScoresMakeExTwo = append(slicesScoresMakeExTwo, 5)\n\tfmt.Println(slicesScoresMakeExTwo)\n\tslicesScoresMakeExTwo = slicesScoresMakeExTwo[0:8]\n\tslicesScoresMakeExTwo[7] = 9033\n\tfmt.Println(slicesScoresMakeExTwo)\n\n}", "func Bool(i interface{}) bool {\n\tif i == nil {\n\t\treturn false\n\t}\n\tif v, ok := i.(bool); ok {\n\t\treturn v\n\t}\n\tif s := String(i); s != \"\" && s != \"0\" && s != \"false\" && s != \"off\" {\n\t\treturn true\n\t}\n\treturn false\n}", "func (p *SliceOfMap) Single() bool {\n\treturn p.Len() == 1\n}", "func newBoolVector(f *fragment) *boolVector {\n\treturn &boolVector{\n\t\tf: f,\n\t}\n}", "func (f Factory) TestGetMapIDsEmpty(t *testing.T) {\n\ta := f.initAdapter(t)\n\tdefer f.free(a)\n\n\tslice, err := a.GetMapIDs(&store.MapFilter{Pagination: store.Pagination{Offset: 100000, Limit: 5}})\n\tif err != nil {\n\t\tt.Fatalf(\"a.GetMapIDs(): err: %s\", err)\n\t}\n\n\tif got, want := len(slice), 0; got != want {\n\t\tt.Errorf(\"len(slice) = %d want %d\", got, want)\n\t}\n}", "func nop(p *inlineItemParser, from, to inlineParserState, ch rune, w int) bool {\n\treturn true\n}", "func (b *ChangeBuffer) isEmpty() bool { return b.Len() == 0 }", "func BenchmarkGetBoolFromMap(b *testing.B) {\n\tm := make(map[string]bool)\n\tkey := \"BenchmarkGetBool\"\n\tm[key] = true\n\n\tfor i := 0; i < b.N; i++ {\n\t\tif !m[key] {\n\t\t\tb.Fatal(\"Map value was false\")\n\t\t}\n\t}\n}", "func nilSlices() {\n\tvar s []int\n\tfmt.Println(s, len(s), cap(s))\n\tif s == nil {\n\t\tfmt.Println(\"nil!\")\n\t}\n}", "func setSkipMirrorTrue(e *corev1.Endpoints) bool {\n\tskipMirrorVal, ok := e.Labels[discovery.LabelSkipMirror]\n\tif !ok || skipMirrorVal != \"true\" {\n\t\tif e.Labels == nil {\n\t\t\te.Labels = map[string]string{}\n\t\t}\n\t\te.Labels[discovery.LabelSkipMirror] = \"true\"\n\t\treturn true\n\t}\n\treturn false\n}", "func (m OrderedMap[K, V]) IsZero() bool {\n\treturn m.Len() == 0\n}", "func (s settableBool) String() string { return strconv.FormatBool(s.val) }", "func (options *Options) isIncludableZero() bool {\n\tb, ok := options.HashProp(\"includeZero\").(bool)\n\tif ok && b {\n\t\tnb, ok := options.Param(0).(int)\n\t\tif ok && nb == 0 {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}", "func (me TClipFillRuleType) IsNonzero() bool { return me.String() == \"nonzero\" }", "func ToBoolMap[S ~[]E, E comparable](slice S) map[E]bool {\n\tif len(slice) == 0 {\n\t\treturn nil\n\t}\n\tout := make(map[E]bool, len(slice))\n\tfor _, elem := range slice {\n\t\tout[elem] = true\n\t}\n\treturn out\n}", "func (node BoolVal) formatFast(buf *TrackedBuffer) {\n\tif node {\n\t\tbuf.WriteString(\"true\")\n\t} else {\n\t\tbuf.WriteString(\"false\")\n\t}\n}", "func (me TxsdFeTurbulenceTypeStitchTiles) IsNoStitch() bool { return me.String() == \"noStitch\" }", "func TestSectionsFalsey(t *testing.T) {\n\ttemplate := \"\\\"{{#boolean}}This should not be rendered.{{/boolean}}\\\"\"\n\tdata := map[string]interface{}{\"boolean\": false}\n\texpected := \"\\\"\\\"\"\n\tactual := Render(template, data)\n\n\tif actual != expected {\n\t\tt.Errorf(\"returned %#v, expected %#v\", actual, expected)\n\t}\n}", "func loadBool(i Instruction, ls *LuaState) {\n\ta, b, c := i.ABC()\n\ta += 1\n\n\tls.Push(LuaBool(b != 0))\n\tluaReplace(ls, a)\n\n\tif c != 0 {\n\t\tls.addPC(1)\n\t}\n}", "func (m *metricAerospikeNamespaceGeojsonRegionQueryFalsePositive) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}", "func (formatter) fBool(v *types.RecordValue) *types.RecordValue {\n\tif v.Value != strBoolTrue {\n\t\tv.Value = \"\"\n\t}\n\n\treturn v\n}", "func isEmpty() bool{\n\tif size == 0 {\n\t\treturn true\n\t} else {\n\t\treturn false\n\t}\n}", "func ExampleMap_fixingInitialValue() {\n\tps := newPSetForTesting() // use paramset.NewOrPanic()\n\n\tm := map[string]bool{\"x\": true}\n\tkeys := []string{\"x\", \"y\"}\n\n\tps.Add(\"my-map\", psetter.Map{Value: &m}, \"help text\")\n\n\tfmt.Println(\"Before parsing\")\n\tfor _, k := range keys {\n\t\tif v, ok := m[k]; ok {\n\t\t\tfmt.Printf(\"\\tm[%s] = %v\\n\", k, v)\n\t\t}\n\t}\n\tps.Parse([]string{\"-my-map\", \"x=false,y\"})\n\tfmt.Println(\"After parsing\")\n\tfor _, k := range keys {\n\t\tif v, ok := m[k]; ok {\n\t\t\tfmt.Printf(\"\\tm[%s] = %v\\n\", k, v)\n\t\t}\n\t}\n\t// Output:\n\t// Before parsing\n\t//\tm[x] = true\n\t// After parsing\n\t//\tm[x] = false\n\t//\tm[y] = true\n}", "func packBool(Data bool, pad *scratchpad) {\n\tif Data {\n\t\tpad.endAppend('s', 4, 't', 'r', 'u', 'e')\n\t} else {\n\t\tpad.endAppend('s', 5, 'f', 'a', 'l', 's', 'e')\n\t}\n}", "func boolArrayCapUp (old []bool)(new []bool) {\n new = make([]bool, cap(old)+1)\n copy(new, old)\n old = new\n return new\n}", "func (e StartElement) isZero() bool {\n\treturn len(e.Name.Local) == 0\n}", "func TestSectionsNestedFalsey(t *testing.T) {\n\ttemplate := \"| A {{#bool}}B {{#bool}}C{{/bool}} D{{/bool}} E |\"\n\tdata := map[string]interface{}{\"bool\": false}\n\texpected := \"| A E |\"\n\tactual := Render(template, data)\n\n\tif actual != expected {\n\t\tt.Errorf(\"returned %#v, expected %#v\", actual, expected)\n\t}\n}", "func False() TermT {\n\treturn TermT(C.yices_false())\n}", "func tryValueAsSlice0(v reflect.Value, err error) (reflect.Value, bool) {\n\tif v.Kind() != reflect.Slice || !strings.Contains(err.Error(), \"cannot unmarshal !!\") {\n\t\treturn v, false\n\t}\n\tif v.Len() == 0 {\n\t\tv.Set(reflect.Append(v, reflect.Indirect(reflect.New(v.Type().Elem()))))\n\t}\n\t// We are setting an entire array with one item; we always clear what\n\t// existed previously.\n\tv.Set(v.Slice(0, 1))\n\treturn v.Index(0), true\n}", "func (d desc) sliceMap() {\n\tl := []byte(d.letter)\n\tfmt.Println(l)\n}", "func (v View) Slice() []bool {\n\t// TODO: This forces an alloc, as an alternative a slice could be taken\n\t// as input that can be (re)used by the client. Are there use cases\n\t// where this would actually make sense?\n\tresult := make([]bool, v.Len())\n\tfor i, j := range v.index {\n\t\tresult[i] = v.data[j]\n\t}\n\treturn result\n}", "func (this *cyclingActivityStruct) Zero() bool {\n\tduration := this.duration\n\tdistanceKM := this.distanceKM\n\tdistanceKMZero := distanceKM.zero()\n\tenergyKJ := this.energyKJ\n\tresult := (duration == 0) && (distanceKMZero) && (energyKJ == 0)\n\treturn result\n}", "func ZeroSlice[T any](s []T) {\n\tfor i := range s {\n\t\ts[i] = *new(T)\n\t}\n}", "func Fill(value bool) *SimpleElement { return newSEBool(\"fill\", value) }", "func (s *StateObject) empty() bool {\n\treturn len(s.data.UTXOs) <= 0\n}", "func Bool(b bool) Cell {\n\tif b {\n\t\treturn True\n\t}\n\treturn Nil\n}", "func MaybeAllocateBoolArray(array []bool, length int) []bool {\n\tif cap(array) < length {\n\t\treturn make([]bool, length)\n\t}\n\tarray = array[:length]\n\tfor n := 0; n < length; n += copy(array[n:], ZeroBoolColumn) {\n\t}\n\treturn array\n}", "func isAllZero(resources *corev1.ResourceList) bool {\n\tfor _, value := range *resources {\n\t\tif !value.IsZero() {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func stringifyBool(b bool) string {\n\tif b {\n\t\treturn \"True\"\n\t}\n\treturn \"False\"\n}", "func (t *Dense) slice(start, end int) {\n\tswitch t.t.Kind() {\n\tcase reflect.Bool:\n\t\tdata := t.bools()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Int:\n\t\tdata := t.ints()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Int8:\n\t\tdata := t.int8s()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Int16:\n\t\tdata := t.int16s()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Int32:\n\t\tdata := t.int32s()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Int64:\n\t\tdata := t.int64s()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Uint:\n\t\tdata := t.uints()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Uint8:\n\t\tdata := t.uint8s()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Uint16:\n\t\tdata := t.uint16s()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Uint32:\n\t\tdata := t.uint32s()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Uint64:\n\t\tdata := t.uint64s()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Uintptr:\n\t\tdata := t.uintptrs()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Float32:\n\t\tdata := t.float32s()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Float64:\n\t\tdata := t.float64s()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Complex64:\n\t\tdata := t.complex64s()[start:end]\n\t\tt.fromSlice(data)\n\tcase reflect.Complex128:\n\t\tdata := t.complex128s()[start:end]\n\t\tt.fromSlice(data)\n\n\tcase reflect.String:\n\t\tdata := t.strings()[start:end]\n\t\tt.fromSlice(data)\n\n\tcase reflect.UnsafePointer:\n\t\tdata := t.unsafePointers()[start:end]\n\t\tt.fromSlice(data)\n\tdefault:\n\t\tv := reflect.ValueOf(t.v)\n\t\tv = v.Slice(start, end)\n\t\tt.fromSlice(v.Interface())\n\t}\n}", "func (x *ID) Zero() bool {\n\treturn len(x.GetData()) == 0\n}", "func (k QKEKey) mask() {\n\tfor i := 0; i < len(k); i++ {\n\t\tk[i].SetValue(0)\n\t}\n}", "func ToStringMapBoolE(i interface{}) (map[string]bool, error) {\n\treturn cast.ToStringMapBoolE(i)\n}", "func isZero(buffer []byte) bool {\n\tfor i := range buffer {\n\t\tif buffer[i] != 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func stringTrueFalse(source bool) string {\n\tif source {\n\t\treturn \"true\"\n\t}\n\n\treturn \"false\"\n}", "func (that *StrAnyMap) IsEmpty() bool {\n\treturn that.Size() == 0\n}", "func (am ArgMap) NonFlags() []string {\n\treturn am[\"_\"].([]string)\n}", "func (o Op) IsSlice3() bool", "func (p *StringMapBool) Len() int {\n\tif p == nil {\n\t\treturn 0\n\t}\n\treturn len(*p)\n}", "func (d *Decoder) ZeroEmpty(z bool) {\n\td.zeroEmpty = z\n}", "func Empty(i interface{}) bool {\n\tif i == nil {\n\t\treturn true\n\t}\n\n\tv := reflect.ValueOf(i)\n\n\tswitch v.Kind() {\n\tcase reflect.Map, reflect.Array, reflect.Slice, reflect.Struct:\n\t\tdata, _ := json.Marshal(i)\n\t\treturn string(data) == \"[]\" || string(data) == \"{}\"\n\tdefault:\n\t\treturn reflect.DeepEqual(i, reflect.Zero(reflect.TypeOf(i)).Interface())\n\t}\n}", "func (l *sampleList) Empty() bool { return len(l.samples) == 0 }" ]
[ "0.5626376", "0.5512267", "0.5465469", "0.53896105", "0.5348433", "0.5338991", "0.5324013", "0.52970034", "0.5254964", "0.5250206", "0.52483", "0.5214961", "0.5191665", "0.51714694", "0.5155821", "0.51013535", "0.50277764", "0.50219125", "0.5010307", "0.49857587", "0.49795234", "0.4974324", "0.4974324", "0.4965161", "0.4955051", "0.49498478", "0.49326065", "0.4920778", "0.49156025", "0.49134043", "0.49087998", "0.49087286", "0.489475", "0.48905376", "0.48854816", "0.4885237", "0.48800248", "0.4863706", "0.4847898", "0.48421633", "0.48188046", "0.4816894", "0.4814106", "0.4804287", "0.48006406", "0.48000368", "0.47971323", "0.47855335", "0.4784469", "0.47712946", "0.4770902", "0.47704864", "0.47658712", "0.47645566", "0.47566864", "0.47531328", "0.4752923", "0.47528437", "0.47492632", "0.47453284", "0.474287", "0.47397062", "0.47388673", "0.47372574", "0.47342226", "0.47340575", "0.47338685", "0.4733578", "0.4729679", "0.47286466", "0.47225308", "0.4721942", "0.47089103", "0.47015733", "0.46996826", "0.46979427", "0.4696713", "0.46928605", "0.46926636", "0.46925732", "0.46920595", "0.46910372", "0.4689993", "0.4685098", "0.46829402", "0.46823388", "0.46786827", "0.46761513", "0.46719593", "0.46714136", "0.46703774", "0.46677932", "0.46666032", "0.4662769", "0.46580806", "0.46578383", "0.46573654", "0.46527144", "0.46510366", "0.46475834" ]
0.5007939
19
SetGlobalBalancer set grpc balancer with scheme.
func SetGlobalBalancer(scheme string, builder selector.Builder) { mu.Lock() defer mu.Unlock() b := base.NewBalancerBuilder( scheme, &Builder{builder: builder}, base.Config{HealthCheck: true}, ) gBalancer.Register(b) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (p *ioThrottlerPool) SetGlobalLimit(r rate.Limit, b int) {\n\tp.mu.Lock()\n\tdefer p.mu.Unlock()\n\tp.globalLimiter.SetBurst(b)\n\tp.globalLimiter.SetLimit(r)\n\tp.updateBufferSize()\n}", "func startGrpcBalancer(host string, addresses []string) (error) {\n\t// Start tcp listening port\n\tlis, err := net.Listen(\"tcp\", host)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// Create server service\n\tgrpcServer := grpc.NewServer()\n\ts := &server.LoadBalancer{}\n\t// Init servers for routing\n\ts.InitServers(context.Background(), addresses)\n\tcalculate.RegisterCalculateMatrixServer(grpcServer, s)\n\t// Attach listener to server\n\tgrpcServer.Serve(lis)\n\treturn nil\n}", "func NewGRPCLBBalancer(r naming.Resolver) Balancer {\n\treturn &balancer{\n\t\tr: r,\n\t}\n}", "func (d *discovery) SetLoadBalancer(b LoadBalancer) {\n\td.loadBalancerLock.Lock()\n\tdefer d.loadBalancerLock.Unlock()\n\td.loadBalancer = b\n}", "func (ctl *Controller) SetGlobalBrightness(brightness uint8) {\n\tctl.brightness = brightness\n\n\t// Update the buffer to reflect this.\n\tfor i, clr := range ctl.ledColours {\n\t\tctl.updateBuffer(i, clr)\n\t}\n}", "func NewSetBalancer(ctx context.Context, enabled bool) (*SetBalancer, error) {\n\treturn &SetBalancer{\n\t\tbase: base{\n\t\t\tctx: ctx,\n\t\t\tresultch: make(chan RPCResult, 1),\n\t\t},\n\t\treq: &pb.SetBalancerRunningRequest{On: &enabled},\n\t}, nil\n}", "func initGRPCGateway(db *gorm.DB, roomUC usecase.RoomUseCase, addressMux string, addressRPCServer string) error {\n\n\tctx := context.Background()\n\tctx, cancel := context.WithCancel(ctx)\n\tdefer cancel()\n\n\tmux := runtime.NewServeMux()\n\tdialOptions := []grpc.DialOption{grpc.WithInsecure()}\n\n\terr := roompb.RegisterRoomServiceHandlerFromEndpoint(ctx, mux, addressRPCServer, dialOptions)\n\tif err != nil {\n\t\tfmt.Println(\"Error when register Room GRPC Gateway, cause: \", err)\n\t\treturn err\n\t}\n\n\t//Starting GRPC Gateway\n\tfmt.Println(\"Room GRPC Server started, using GRPC Gateway mode, on :\", addressMux)\n\tlis, err := net.Listen(\"tcp\", addressMux)\n\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to listen %v \", err)\n\t}\n\n\treturn http.Serve(lis, mux)\n\n}", "func InitRoundRobin(name string, endpoints []string) {\n\tif len(lb) == 0 {\n\t\tlb = make(map[string]*roundrobin.Balancer)\n\t}\n\n\tlb[name] = roundrobin.New(endpoints)\n}", "func SetBusiness(biz *types.Business) {\n\tif biz == nil {\n\t\tbiz = types.DefaultBusiness()\n\t} else {\n\t\tbiz.Init()\n\t}\n\tglobalBusiness = biz\n}", "func NewBalancer(opts []Options) (*Balancer, error) {\n\tif len(opts) == 0 {\n\t\treturn nil, errors.New(\"invalid options\")\n\t}\n\n\t// create balancer base on given options.\n\tbalancer := &Balancer{pool: make(pool, len(opts))}\n\n\tfor i := 0; i < len(opts); i++ {\n\t\tbalancer.pool[i] = newBackend(&opts[i])\n\t}\n\n\treturn balancer, nil\n}", "func BindGlobalFlags(v *viper.Viper, flags *pflag.FlagSet) {\n}", "func init() {\n\tbalancer.Register(orcaLBBuilder{})\n}", "func (reb *Manager) RunGlobalReb(smap *cluster.Smap, globRebID int64, buckets ...string) {\n\tmd := &globArgs{\n\t\tsmap: smap,\n\t\tconfig: cmn.GCO.Get(),\n\t}\n\tif len(buckets) == 0 || buckets[0] == \"\" {\n\t\tmd.ecUsed = reb.t.GetBowner().Get().IsECUsed()\n\t} else {\n\t\t// single bucket rebalance is AIS case only\n\t\tbck := cluster.Bck{Name: buckets[0], Provider: cmn.AIS}\n\t\tprops, ok := reb.t.GetBowner().Get().Get(&bck)\n\t\tif !ok {\n\t\t\tglog.Errorf(\"Bucket %q not found\", bck.Name)\n\t\t\treturn\n\t\t}\n\t\tmd.ecUsed = props.EC.Enabled\n\t}\n\n\tif !reb.globalRebPrecheck(md, globRebID) {\n\t\treturn\n\t}\n\tif !reb.globalRebInit(md, globRebID, buckets...) {\n\t\treturn\n\t}\n\n\t// At this point only one rebalance is running so we can safely enable regular GFN.\n\tgfn := reb.t.GetGFN(cluster.GFNGlobal)\n\tgfn.Activate()\n\tdefer gfn.Deactivate()\n\n\terrCnt := 0\n\tif err := reb.globalRebSyncAndRun(md); err == nil {\n\t\terrCnt = reb.globalRebWaitAck(md)\n\t} else {\n\t\tglog.Warning(err)\n\t}\n\treb.stage.Store(rebStageFin)\n\tfor errCnt != 0 && !reb.xreb.Aborted() {\n\t\terrCnt = reb.bcast(md, reb.waitFinExtended)\n\t}\n\treb.globalRebFini(md)\n\t// clean up all collected data\n\tif md.ecUsed {\n\t\treb.ecReb.cleanup()\n\t}\n\n\treb.nodeStages = make(map[string]uint32) // cleanup after run\n}", "func setSBRBackendGVK(\n\tsbr *v1alpha1.ServiceBindingRequest,\n\tresourceRef string,\n\tbackendGVK schema.GroupVersionKind,\n\tenvVarPrefix string,\n) {\n\tsbr.Spec.BackingServiceSelector = &v1alpha1.BackingServiceSelector{\n\t\tGroupVersionKind: metav1.GroupVersionKind{Group: backendGVK.Group, Version: backendGVK.Version, Kind: backendGVK.Kind},\n\t\tResourceRef: resourceRef,\n\t\tEnvVarPrefix: &envVarPrefix,\n\t}\n}", "func (c *ConfigRequest) SetGlobalConfig(g *ac.GlobalConfig) {\n\tc.V1.Sys.Mlsa = g.V1.Mlsa\n\n\tif logLevel := g.GetV1().GetLog().GetLevel().GetValue(); logLevel != \"\" {\n\t\tc.V1.Sys.Log.Level.Value = logLevel\n\t}\n\n\tif logFormat := g.GetV1().GetLog().GetFormat().GetValue(); logFormat != \"\" {\n\t\tc.V1.Sys.Log.Format.Value = logFormat\n\t}\n}", "func (_obj *Apilangpack) TarsSetProtocol(p m.Protocol) {\n\t_obj.s.TarsSetProtocol(p)\n}", "func NewMultiAddrBalancer(addrs []string) grpc.Balancer {\n\tr := NewHelpResolver(addrs)\n\treturn grpc.RoundRobin(r)\n}", "func NewGlobalLoadBalancerMonitorV1(options *GlobalLoadBalancerMonitorV1Options) (service *GlobalLoadBalancerMonitorV1, err error) {\n\tserviceOptions := &core.ServiceOptions{\n\t\tURL: DefaultServiceURL,\n\t\tAuthenticator: options.Authenticator,\n\t}\n\n\terr = core.ValidateStruct(options, \"options\")\n\tif err != nil {\n\t\treturn\n\t}\n\n\tbaseService, err := core.NewBaseService(serviceOptions)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tif options.URL != \"\" {\n\t\terr = baseService.SetServiceURL(options.URL)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\tservice = &GlobalLoadBalancerMonitorV1{\n\t\tService: baseService,\n\t\tCrn: options.Crn,\n\t}\n\n\treturn\n}", "func initGlobalConfig(secCfg tikvconfig.Security) {\n\tif secCfg.ClusterSSLCA != \"\" || secCfg.ClusterSSLCert != \"\" {\n\t\tconf := tidbconfig.GetGlobalConfig()\n\t\tconf.Security.ClusterSSLCA = secCfg.ClusterSSLCA\n\t\tconf.Security.ClusterSSLCert = secCfg.ClusterSSLCert\n\t\tconf.Security.ClusterSSLKey = secCfg.ClusterSSLKey\n\t\ttidbconfig.StoreGlobalConfig(conf)\n\t}\n}", "func (g *GRPC) SetHTTPServer(serv *http.Server) {\n\tg.httpServer = serv\n}", "func (m *MockLoadBalancerServiceIface) AssignToGlobalLoadBalancerRule(p *AssignToGlobalLoadBalancerRuleParams) (*AssignToGlobalLoadBalancerRuleResponse, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AssignToGlobalLoadBalancerRule\", p)\n\tret0, _ := ret[0].(*AssignToGlobalLoadBalancerRuleResponse)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func SetGlobalSelector(builder Builder) {\n\tglobalSelector = builder\n}", "func LoadBalancer_Classic(loadBalancer awselasticloadbalancing.LoadBalancer) LoadBalancer {\n\t_init_.Initialize()\n\n\tvar returns LoadBalancer\n\n\t_jsii_.StaticInvoke(\n\t\t\"monocdk.aws_codedeploy.LoadBalancer\",\n\t\t\"classic\",\n\t\t[]interface{}{loadBalancer},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func SetGlobalLogger(cores ...zapcore.Core) {\n\tcore := zapcore.NewTee(cores...)\n\tzap.ReplaceGlobals(zap.New(core))\n\n\tlogger = zap.L()\n}", "func (globalLoadBalancerMonitor *GlobalLoadBalancerMonitorV1) SetServiceURL(url string) error {\n\treturn globalLoadBalancerMonitor.Service.SetServiceURL(url)\n}", "func ConfigureLoadBalancer(clusterName string) error {\n\tallNodes, err := nodes.List(fmt.Sprintf(\"label=%s=%s\", constants.ClusterLabelKey, clusterName))\n\tif err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\n\t// identify external load balancer node\n\tloadBalancerNode, err := nodes.ExternalLoadBalancerNode(allNodes)\n\tif err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\n\t// collect info about the existing controlplane nodes\n\tvar backendServers = map[string]string{}\n\tcontrolPlaneNodes, err := nodes.SelectNodesByRole(\n\t\tallNodes,\n\t\tconstants.ControlPlaneNodeRoleValue,\n\t)\n\tif err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\tfor _, n := range controlPlaneNodes {\n\t\tcontrolPlaneIP, err := n.IP()\n\t\tif err != nil {\n\t\t\treturn errors.Wrapf(err, \"failed to get IP for node %s\", n.Name())\n\t\t}\n\t\tbackendServers[n.Name()] = fmt.Sprintf(\"%s:%d\", controlPlaneIP, 6443)\n\t}\n\n\t// create loadbalancer config data\n\tloadbalancerConfig, err := loadbalancer.Config(&loadbalancer.ConfigData{\n\t\tControlPlanePort: loadbalancer.ControlPlanePort,\n\t\tBackendServers: backendServers,\n\t})\n\tif err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\n\tif err := loadBalancerNode.WriteFile(loadbalancer.ConfigPath, loadbalancerConfig); err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\n\treturn errors.WithStack(docker.Kill(\"SIGHUP\", loadBalancerNode.Name()))\n}", "func InitGlobalFlags(flags *pflag.FlagSet) {\n\tcliconfig.InitLoggingLevel(flags)\n\tcliconfig.InitClientConfigFile(flags)\n\tcliconfig.InitChannelID(flags)\n\tcliconfig.InitUserName(flags)\n\tcliconfig.InitUserPassword(flags)\n\tcliconfig.InitOrgID(flags)\n\tcliconfig.InitMspID(flags)\n\tcliconfig.InitKeyType(flags)\n\tcliconfig.InitEphemeralFlag(flags)\n\tcliconfig.InitSigAlg(flags)\n\tcliconfig.InitTimeout(flags)\n\n}", "func InitGrpcResolver(namespace string, mgr ctrl.Manager) (*KubeResolver, error) {\n\tkr := &KubeResolver{\n\t\tnamespace: namespace, Client: mgr.GetClient(),\n\t\tresolvers: make(map[string][]*serviceResolver, 2),\n\t\tlogger: ctrl.Log.WithName(\"KubeResolver\"),\n\t}\n\terr := ctrl.NewControllerManagedBy(mgr).For(&corev1.Endpoints{}).Complete(kr)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tresolver.Register(kr)\n\tkr.logger.Info(\"Registered KubeResolver with kubebuilder and gRPC\")\n\treturn kr, nil\n}", "func ConfigureGlobalBBSCache(ctx context.Context, bbsURL, cafile, certfile, keyfile string, pollInterval time.Duration, testing bbs.Client) (*BBSCache, error) {\n\tglobalBBSCacheLock.Lock()\n\tdefer globalBBSCacheLock.Unlock()\n\n\tif globalBBSCache.configured {\n\t\treturn globalBBSCache, nil\n\t}\n\n\tglobalBBSCache.configured = true\n\tif testing != nil {\n\t\tglobalBBSCache.bbsAPIClient = testing\n\t} else {\n\t\tclientConfig := bbs.ClientConfig{\n\t\t\tURL: bbsURL,\n\t\t\tIsTLS: true,\n\t\t\tCAFile: cafile,\n\t\t\tCertFile: certfile,\n\t\t\tKeyFile: keyfile,\n\t\t\tClientSessionCacheSize: 0,\n\t\t\tMaxIdleConnsPerHost: 0,\n\t\t\tInsecureSkipVerify: false,\n\t\t\tRetries: 10,\n\t\t\tRequestTimeout: 5 * time.Second,\n\t\t}\n\t\tvar err error\n\t\tglobalBBSCache.bbsAPIClient, err = bbs.NewClientWithConfig(clientConfig)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tglobalBBSCache.bbsAPIClientLogger = lager.NewLogger(\"bbs\")\n\tglobalBBSCache.pollInterval = pollInterval\n\tglobalBBSCache.lastUpdated = time.Time{} // zero time\n\tglobalBBSCache.cancelContext = ctx\n\n\tgo globalBBSCache.start()\n\n\treturn globalBBSCache, nil\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func SetGlobalBus(bus EventBus) {\n\tglobalBus = bus\n}", "func (_obj *Apichannels) TarsSetProtocol(p m.Protocol) {\n\t_obj.s.TarsSetProtocol(p)\n}", "func SetGlobalGroup(iq IQ, roleName, group string) error {\n\treturn globalAuth(iq, http.MethodPut, roleName, MemberTypeGroup, group)\n}", "func SetGlobal(filename string) error {\n\turls, err := LoadURLs(filename)\n\tif err != nil {\n\t\treturn err\n\t}\n\tEnv = *urls\n\treturn nil\n}", "func configureServer(s *http.Server, scheme, addr string) {\n\n}", "func SetGlobalConfigMap(key string, value interface{}) {\n\tglobalConfig.Lock()\n\tglobalConfig.gMap[key] = value\n\tglobalConfig.Unlock()\n}", "func NewRandomLoadBalancer(ctx context.Context, frontend NetAddr, backends ...NetAddr) (*LoadBalancer, error) {\n\treturn newLoadBalancer(ctx, frontend, randomPolicy(), backends...)\n}", "func configureServer(s *http.Server, scheme, addr string) {\n}", "func configureServer(s *http.Server, scheme, addr string) {\n}", "func configureServer(s *http.Server, scheme, addr string) {\n}", "func configureServer(s *http.Server, scheme, addr string) {\n}", "func configureServer(s *http.Server, scheme, addr string) {\n}", "func configureServer(s *http.Server, scheme, addr string) {\n}", "func NewBalancer(name string, handler EventHandler, configure *conf.BalancersConfiguration) (balancer *Balancer) {\n\n\tbalancer = new(Balancer)\n\tbalancer.name = name\n\tbalancer.handler = handler\n\tconfiguration := configure.GetBalancerConfiguration(name)\n\tbalancer.balancerType = configuration.BalancerType\n\tbalancer.initQueue(configuration)\n\tbalancer.aggregatorManager = initAggregatorManager(configuration.AggreagatorName, balancer, configuration.WorkAggregatorConfiguration)\n\tbalancer.poolManager = initRoutinePoolManager(name, balancer.balancerType, balancer, handler, configuration)\n\treturn balancer\n}", "func (_obj *DataService) TarsSetProtocol(p m.Protocol) {\n\t_obj.s.TarsSetProtocol(p)\n}", "func (w *World) SetEnumGlobal() {\n\tif w.Desc == nil {\n\t\treturn\n\t}\n\trfl := w.Reflector(rtypes.T_Enums)\n\tif rfl.Name == \"\" {\n\t\treturn\n\t}\n\tw.Desc.GenerateEnumTypes()\n\tstate := w.State()\n\tenums, err := rfl.PushTo(state.Context(), w.Desc.EnumTypes)\n\tif err != nil {\n\t\treturn\n\t}\n\tstate.L.SetGlobal(\"Enum\", enums)\n}", "func (s) TestBalancer_AddressesChanging(t *testing.T) {\n\tctx, cancel := context.WithTimeout(context.Background(), defaultTestTimeout)\n\tdefer cancel()\n\n\tsrv1 := startServer(t, reportBoth)\n\tsrv2 := startServer(t, reportBoth)\n\tsrv3 := startServer(t, reportBoth)\n\tsrv4 := startServer(t, reportBoth)\n\n\t// srv1: weight 10\n\tsrv1.oobMetrics.SetQPS(10.0)\n\tsrv1.oobMetrics.SetApplicationUtilization(1.0)\n\t// srv2: weight 100\n\tsrv2.oobMetrics.SetQPS(10.0)\n\tsrv2.oobMetrics.SetApplicationUtilization(.1)\n\t// srv3: weight 20\n\tsrv3.oobMetrics.SetQPS(20.0)\n\tsrv3.oobMetrics.SetApplicationUtilization(1.0)\n\t// srv4: weight 200\n\tsrv4.oobMetrics.SetQPS(20.0)\n\tsrv4.oobMetrics.SetApplicationUtilization(.1)\n\n\tsc := svcConfig(t, oobConfig)\n\tif err := srv1.StartClient(grpc.WithDefaultServiceConfig(sc)); err != nil {\n\t\tt.Fatalf(\"Error starting client: %v\", err)\n\t}\n\tsrv2.Client = srv1.Client\n\taddrs := []resolver.Address{{Addr: srv1.Address}, {Addr: srv2.Address}, {Addr: srv3.Address}}\n\tsrv1.R.UpdateState(resolver.State{Addresses: addrs})\n\n\t// Call each backend once to ensure the weights have been received.\n\tensureReached(ctx, t, srv1.Client, 3)\n\ttime.Sleep(weightUpdatePeriod)\n\tcheckWeights(ctx, t, srvWeight{srv1, 1}, srvWeight{srv2, 10}, srvWeight{srv3, 2})\n\n\t// Add backend 4\n\taddrs = append(addrs, resolver.Address{Addr: srv4.Address})\n\tsrv1.R.UpdateState(resolver.State{Addresses: addrs})\n\ttime.Sleep(weightUpdatePeriod)\n\tcheckWeights(ctx, t, srvWeight{srv1, 1}, srvWeight{srv2, 10}, srvWeight{srv3, 2}, srvWeight{srv4, 20})\n\n\t// Shutdown backend 3. RPCs will no longer be routed to it.\n\tsrv3.Stop()\n\ttime.Sleep(weightUpdatePeriod)\n\tcheckWeights(ctx, t, srvWeight{srv1, 1}, srvWeight{srv2, 10}, srvWeight{srv4, 20})\n\n\t// Remove addresses 2 and 3. RPCs will no longer be routed to 2 either.\n\taddrs = []resolver.Address{{Addr: srv1.Address}, {Addr: srv4.Address}}\n\tsrv1.R.UpdateState(resolver.State{Addresses: addrs})\n\ttime.Sleep(weightUpdatePeriod)\n\tcheckWeights(ctx, t, srvWeight{srv1, 1}, srvWeight{srv4, 20})\n\n\t// Re-add 2 and remove the rest.\n\taddrs = []resolver.Address{{Addr: srv2.Address}}\n\tsrv1.R.UpdateState(resolver.State{Addresses: addrs})\n\ttime.Sleep(weightUpdatePeriod)\n\tcheckWeights(ctx, t, srvWeight{srv2, 10})\n\n\t// Re-add 4.\n\taddrs = append(addrs, resolver.Address{Addr: srv4.Address})\n\tsrv1.R.UpdateState(resolver.State{Addresses: addrs})\n\ttime.Sleep(weightUpdatePeriod)\n\tcheckWeights(ctx, t, srvWeight{srv2, 10}, srvWeight{srv4, 20})\n}", "func (_obj *WebApiAuth) TarsSetProtocol(p m.Protocol) {\n\t_obj.s.TarsSetProtocol(p)\n}", "func (r *GslbReconciler) SetupWithManager(mgr ctrl.Manager) error {\n\t// Figure out Gslb resource name to Reconcile when non controlled Endpoint is updated\n\n\tendpointMapFn := handler.ToRequestsFunc(\n\t\tfunc(a handler.MapObject) []reconcile.Request {\n\t\t\tgslbList := &k8gbv1beta1.GslbList{}\n\t\t\topts := []client.ListOption{\n\t\t\t\tclient.InNamespace(a.Meta.GetNamespace()),\n\t\t\t}\n\t\t\tc := mgr.GetClient()\n\t\t\terr := c.List(context.TODO(), gslbList, opts...)\n\t\t\tif err != nil {\n\t\t\t\tlog.Info().Msg(\"Can't fetch gslb objects\")\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tgslbName := \"\"\n\t\t\tfor _, gslb := range gslbList.Items {\n\t\t\t\tfor _, rule := range gslb.Spec.Ingress.Rules {\n\t\t\t\t\tfor _, path := range rule.HTTP.Paths {\n\t\t\t\t\t\tif path.Backend.ServiceName == a.Meta.GetName() {\n\t\t\t\t\t\t\tgslbName = gslb.Name\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif len(gslbName) > 0 {\n\t\t\t\treturn []reconcile.Request{\n\t\t\t\t\t{NamespacedName: types.NamespacedName{\n\t\t\t\t\t\tName: gslbName,\n\t\t\t\t\t\tNamespace: a.Meta.GetNamespace(),\n\t\t\t\t\t}},\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn nil\n\t\t})\n\n\tcreateGslbFromIngress := func(annotationKey string, annotationValue string, a handler.MapObject, strategy string) {\n\t\tlog.Info().Msgf(\"Detected strategy annotation(%s:%s) on Ingress(%s)\",\n\t\t\tannotationKey, annotationValue, a.Meta.GetName())\n\t\tc := mgr.GetClient()\n\t\tingressToReuse := &v1beta1.Ingress{}\n\t\terr := c.Get(context.Background(), client.ObjectKey{\n\t\t\tNamespace: a.Meta.GetNamespace(),\n\t\t\tName: a.Meta.GetName(),\n\t\t}, ingressToReuse)\n\t\tif err != nil {\n\t\t\tlog.Info().Msgf(\"Ingress(%s) does not exist anymore. Skipping Glsb creation...\", a.Meta.GetName())\n\t\t\treturn\n\t\t}\n\t\tgslbExist := &k8gbv1beta1.Gslb{}\n\t\terr = c.Get(context.Background(), client.ObjectKey{\n\t\t\tNamespace: a.Meta.GetNamespace(),\n\t\t\tName: a.Meta.GetName(),\n\t\t}, gslbExist)\n\t\tif err == nil {\n\t\t\tlog.Info().Msgf(\"Gslb(%s) already exists. Skipping Gslb creation...\", gslbExist.Name)\n\t\t\treturn\n\t\t}\n\t\tgslb := &k8gbv1beta1.Gslb{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tNamespace: a.Meta.GetNamespace(),\n\t\t\t\tName: a.Meta.GetName(),\n\t\t\t\tAnnotations: a.Meta.GetAnnotations(),\n\t\t\t},\n\t\t\tSpec: k8gbv1beta1.GslbSpec{\n\t\t\t\tIngress: k8gbv1beta1.FromV1Beta1IngressSpec(ingressToReuse.Spec),\n\t\t\t\tStrategy: k8gbv1beta1.Strategy{\n\t\t\t\t\tType: strategy,\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\n\t\tif strategy == failoverStrategy {\n\t\t\tfor annotationKey, annotationValue := range a.Meta.GetAnnotations() {\n\t\t\t\tif annotationKey == primaryGeoTagAnnotation {\n\t\t\t\t\tgslb.Spec.Strategy.PrimaryGeoTag = annotationValue\n\t\t\t\t}\n\t\t\t}\n\t\t\tif gslb.Spec.Strategy.PrimaryGeoTag == \"\" {\n\t\t\t\tlog.Info().Msgf(\"%s annotation is missing, skipping Gslb creation...\", primaryGeoTagAnnotation)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\tlog.Info().Msgf(\"Creating new Gslb(%s) out of Ingress annotation\", gslb.Name)\n\t\terr = c.Create(context.Background(), gslb)\n\t\tif err != nil {\n\t\t\tlog.Err(err).Msg(\"Glsb creation failed\")\n\t\t}\n\t}\n\tingressMapFn := handler.ToRequestsFunc(\n\t\tfunc(a handler.MapObject) []reconcile.Request {\n\t\t\tfor annotationKey, annotationValue := range a.Meta.GetAnnotations() {\n\t\t\t\tif annotationKey == strategyAnnotation {\n\t\t\t\t\tswitch annotationValue {\n\t\t\t\t\tcase roundRobinStrategy:\n\t\t\t\t\t\tcreateGslbFromIngress(annotationKey, annotationKey, a, roundRobinStrategy)\n\t\t\t\t\tcase failoverStrategy:\n\t\t\t\t\t\tcreateGslbFromIngress(annotationKey, annotationKey, a, failoverStrategy)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn nil\n\t\t})\n\n\treturn ctrl.NewControllerManagedBy(mgr).\n\t\tFor(&k8gbv1beta1.Gslb{}).\n\t\tOwns(&v1beta1.Ingress{}).\n\t\tOwns(&externaldns.DNSEndpoint{}).\n\t\tWatches(&source.Kind{Type: &corev1.Endpoints{}},\n\t\t\t&handler.EnqueueRequestsFromMapFunc{\n\t\t\t\tToRequests: endpointMapFn}).\n\t\tWatches(&source.Kind{Type: &v1beta1.Ingress{}},\n\t\t\t&handler.EnqueueRequestsFromMapFunc{\n\t\t\t\tToRequests: ingressMapFn}).\n\t\tComplete(r)\n}", "func Init(c *conf.Config) {\n\t// service\n\tinitService(c)\n\t// init grpc\n\tgrpcSvr = grpc.New(nil, arcSvc, newcomerSvc)\n\tengineOuter := bm.DefaultServer(c.BM.Outer)\n\t// init outer router\n\touterRouter(engineOuter)\n\tif err := engineOuter.Start(); err != nil {\n\t\tlog.Error(\"engineOuter.Start() error(%v) | config(%v)\", err, c)\n\t\tpanic(err)\n\t}\n}", "func (m *MockLoadBalancerServiceIface) UpdateGlobalLoadBalancerRule(p *UpdateGlobalLoadBalancerRuleParams) (*UpdateGlobalLoadBalancerRuleResponse, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"UpdateGlobalLoadBalancerRule\", p)\n\tret0, _ := ret[0].(*UpdateGlobalLoadBalancerRuleResponse)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func CreateBalancer(numWorker int) *LoadBalancer {\n\tdone := make(chan *Worker, numWorker)\n\tb := &LoadBalancer{make(Pool, 0, numWorker), done}\n\tfor i := 0; i < numWorker; i++ {\n\t\tw := &Worker{make(chan Job, JOB_BUFFER_LENGTH), 0, i}\n\t\theap.Push(&b.workerPool, w)\n\t\tgo w.work(done)\n\t}\n\treturn b\n}", "func (c Config) SetGlobal(option, value string) {\n\tif globals, ok := c[Globals]; ok {\n\t\tif settings, ok := globals.(map[string]string); ok {\n\t\t\tsettings[option] = value\n\t\t}\n\t}\n}", "func (gm GlobalManager) InitGlobalManager(ctx sdk.Context, totalLino types.Coin) sdk.Error {\n\treturn gm.storage.InitGlobalState(ctx, totalLino)\n}", "func NewRandom(res sd.Resolver) Balancer {\n\treturn &randomLB{\n\t\tres: res,\n\t}\n}", "func (broadcast *Broadcast) ChangeGlobalAllocationParam(ctx context.Context, creator string,\n\tparameter model.GlobalAllocationParam, reason string, privKeyHex string, seq int64) (*model.BroadcastResponse, error) {\n\tmsg := model.ChangeGlobalAllocationParamMsg{\n\t\tCreator: creator,\n\t\tParameter: parameter,\n\t\tReason: reason,\n\t}\n\treturn broadcast.broadcastTransaction(ctx, msg, privKeyHex, seq, \"\", false)\n}", "func NewGlobalLoadBalancerMonitorV1UsingExternalConfig(options *GlobalLoadBalancerMonitorV1Options) (globalLoadBalancerMonitor *GlobalLoadBalancerMonitorV1, err error) {\n\tif options.ServiceName == \"\" {\n\t\toptions.ServiceName = DefaultServiceName\n\t}\n\n\tif options.Authenticator == nil {\n\t\toptions.Authenticator, err = core.GetAuthenticatorFromEnvironment(options.ServiceName)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\tglobalLoadBalancerMonitor, err = NewGlobalLoadBalancerMonitorV1(options)\n\tif err != nil {\n\t\treturn\n\t}\n\n\terr = globalLoadBalancerMonitor.Service.ConfigureService(options.ServiceName)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tif options.URL != \"\" {\n\t\terr = globalLoadBalancerMonitor.Service.SetServiceURL(options.URL)\n\t}\n\treturn\n}", "func NewGRPCServer(logger log.Logger, protoAddr string, app types.Application) service.Service {\n\tproto, addr := tmnet.ProtocolAndAddress(protoAddr)\n\ts := &GRPCServer{\n\t\tlogger: logger,\n\t\tproto: proto,\n\t\taddr: addr,\n\t\tapp: app,\n\t}\n\ts.BaseService = *service.NewBaseService(logger, \"ABCIServer\", s)\n\treturn s\n}", "func (sv *globalSystemVariables) SetGlobal(name string, val interface{}) error {\n\tsv.mutex.Lock()\n\tdefer sv.mutex.Unlock()\n\tname = strings.ToLower(name)\n\tsysVar, ok := systemVars[name]\n\tif !ok {\n\t\treturn sql.ErrUnknownSystemVariable.New(name)\n\t}\n\tif sysVar.Scope == sql.SystemVariableScope_Session {\n\t\treturn sql.ErrSystemVariableSessionOnly.New(name)\n\t}\n\tif !sysVar.Dynamic || sysVar.ValueFunction != nil {\n\t\treturn sql.ErrSystemVariableReadOnly.New(name)\n\t}\n\tconvertedVal, _, err := sysVar.Type.Convert(val)\n\tif err != nil {\n\t\treturn err\n\t}\n\tsvv := sql.SystemVarValue{Var: sysVar, Val: convertedVal}\n\tsv.sysVarVals[name] = svv\n\tif sysVar.NotifyChanged != nil {\n\t\tsysVar.NotifyChanged(sql.SystemVariableScope_Global, svv)\n\t}\n\treturn nil\n}", "func Global(v *viper.Viper) {\n\tAppConf = v\n}", "func SetGlobalHttpClient(client *http.Client) {\n\tdefaultHttpClient = client\n}", "func (service *Service) initGRPC(ctx context.Context) error {\n\t// ============================= Initialize runtime mux =============================\n\tif service.runtimeMuxEndpoint == \"\" {\n\t\tservice.runtimeMuxEndpoint = \"/\"\n\t}\n\n\t// Apply servemux options to runtime muxer\n\tservice.runtimeMux = runtime.NewServeMux(service.serveMuxOptions...)\n\n\t// ============================= Initialize grpc proxy client =============================\n\tvar (\n\t\tgPort int\n\t\terr error\n\t)\n\n\tif service.cfg.ServiceTLSEnabled() {\n\t\tcreds, err := credentials.NewClientTLSFromFile(\n\t\t\tservice.cfg.ServiceTLSCertFile(), service.cfg.ServiceTLSServerName())\n\t\tif err != nil {\n\t\t\treturn errors.Wrapf(err,\n\t\t\t\t\"failed to create tls config for %s service\", service.cfg.ServiceTLSServerName())\n\t\t}\n\t\tservice.dialOptions = append(service.dialOptions, grpc.WithTransportCredentials(creds))\n\t\tgPort = service.cfg.HTTPort()\n\t} else {\n\t\tservice.dialOptions = append(service.dialOptions, grpc.WithInsecure())\n\t\tgPort = service.cfg.GRPCPort()\n\t}\n\n\t// Enable wait for ready RPCs\n\twaitForReadyUnaryInterceptor := func(\n\t\tctx context.Context,\n\t\tmethod string,\n\t\treq, reply interface{},\n\t\tcc *grpc.ClientConn,\n\t\tinvoker grpc.UnaryInvoker,\n\t\topts ...grpc.CallOption,\n\t) error {\n\t\treturn invoker(ctx, method, req, reply, cc, append(opts, grpc.WaitForReady(true))...)\n\t}\n\n\t// Add client unary interceptos\n\tunaryClientInterceptors := []grpc.UnaryClientInterceptor{waitForReadyUnaryInterceptor}\n\tfor _, unaryInterceptor := range service.unaryClientInterceptors {\n\t\tunaryClientInterceptors = append(unaryClientInterceptors, unaryInterceptor)\n\t}\n\n\t// Add client streaming interceptos\n\tstreamClientInterceptors := make([]grpc.StreamClientInterceptor, 0)\n\tfor _, streamInterceptor := range service.streamClientInterceptors {\n\t\tstreamClientInterceptors = append(streamClientInterceptors, streamInterceptor)\n\t}\n\n\t// Add inteceptors as dial option\n\tservice.dialOptions = append(service.dialOptions, []grpc.DialOption{\n\t\tgrpc.WithUnaryInterceptor(\n\t\t\tgrpc_middleware.ChainUnaryClient(unaryClientInterceptors...),\n\t\t),\n\t\tgrpc.WithStreamInterceptor(\n\t\t\tgrpc_middleware.ChainStreamClient(streamClientInterceptors...),\n\t\t),\n\t}...)\n\n\t// client connection to the reverse gateway\n\tservice.clientConn, err = conn.DialService(context.Background(), &conn.GRPCDialOptions{\n\t\tServiceName: \"self\",\n\t\tAddress: fmt.Sprintf(\"localhost:%d\", gPort),\n\t\tDialOptions: service.dialOptions,\n\t\tK8Service: false,\n\t})\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"client failed to dial to gRPC server\")\n\t}\n\n\t// ============================= Initialize grpc server =============================\n\t// Add transport credentials if secure option is passed\n\tif service.cfg.ServiceTLSEnabled() {\n\t\tcreds, err := credentials.NewServerTLSFromFile(\n\t\t\tservice.cfg.ServiceTLSCertFile(), service.cfg.ServiceTLSKeyFile())\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to create grpc server tls credentials: %v\", err)\n\t\t}\n\t\tservice.serverOptions = append(\n\t\t\tservice.serverOptions, grpc.Creds(creds),\n\t\t)\n\t}\n\n\t// Append interceptors as server options\n\tservice.serverOptions = append(\n\t\tservice.serverOptions, grpc_middleware.WithUnaryServerChain(service.unaryInterceptors...))\n\tservice.serverOptions = append(\n\t\tservice.serverOptions, grpc_middleware.WithStreamServerChain(service.streamInterceptors...))\n\n\tservice.gRPCServer = grpc.NewServer(service.serverOptions...)\n\n\t// register reflection on the gRPC server\n\treflection.Register(service.gRPCServer)\n\n\treturn nil\n}", "func (m *Modules) SetIngressGlobalRouteConfig(ctx context.Context, projectID string, c *config.GlobalRoutesConfig) error {\n\tmodule, err := m.loadModule(projectID)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn module.SetIngressGlobalRouteConfig(ctx, projectID, c)\n}", "func TestServiceRegistryExternalTrafficGlobalBeta(t *testing.T) {\n\tctx := genericapirequest.NewDefaultContext()\n\tstorage, _, server := NewTestREST(t, nil)\n\tdefer server.Terminate(t)\n\tsvc := &api.Service{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: \"external-lb-esipp\",\n\t\t\tAnnotations: map[string]string{\n\t\t\t\tapi.BetaAnnotationExternalTraffic: api.AnnotationValueExternalTrafficGlobal,\n\t\t\t},\n\t\t},\n\t\tSpec: api.ServiceSpec{\n\t\t\tSelector: map[string]string{\"bar\": \"baz\"},\n\t\t\tSessionAffinity: api.ServiceAffinityNone,\n\t\t\tType: api.ServiceTypeLoadBalancer,\n\t\t\tPorts: []api.ServicePort{{\n\t\t\t\tPort: 6502,\n\t\t\t\tProtocol: api.ProtocolTCP,\n\t\t\t\tTargetPort: intstr.FromInt(6502),\n\t\t\t}},\n\t\t},\n\t}\n\tcreated_svc, err := storage.Create(ctx, svc, false)\n\tif created_svc == nil || err != nil {\n\t\tt.Errorf(\"Unexpected failure creating service %v\", err)\n\t}\n\tcreated_service := created_svc.(*api.Service)\n\tif service.NeedsHealthCheck(created_service) {\n\t\tt.Errorf(\"Expecting health check not needed, returned health check needed instead\")\n\t}\n\t// Make sure the service does not have the health check node port allocated\n\tport := service.GetServiceHealthCheckNodePort(created_service)\n\tif port != 0 {\n\t\tt.Errorf(\"Unexpected allocation of health check node port: %v\", port)\n\t}\n}", "func rcSetGlobal(p *TCompiler, code *TCode) (*value.Value, error) {\n\tg := p.sys.Scopes.GetGlobal()\n\tvarV := g.GetByIndex(code.A)\n\tvarV.SetValue(p.regGet(code.B))\n\tp.moveNext()\n\treturn varV, nil\n}", "func (m *ApiMeta) GlobalFlagSet(cmd string) *flag.FlagSet {\n\tf := flag.NewFlagSet(cmd, flag.ContinueOnError)\n\n\tf.StringVar(&m.gateEndpoint, \"gate-endpoint\", \"http://localhost:8084\",\n\t\t\"Gate (API server) endpoint\")\n\n\tf.Usage = func() {}\n\n\treturn f\n}", "func SetWithBackends(cf kubernetes.ClientFactory, prom prometheus.ClientInterface) {\n\tclientFactory = cf\n\tprometheusClient = prom\n}", "func Init(c *conf.Config, s *service.Service) {\n\tsrv = s\n\t// init inner router\n\teng := bm.DefaultServer(c.BM)\n\tinitRouter(eng)\n\t// init inner server\n\tif err := eng.Start(); err != nil {\n\t\tlog.Error(\"bm.DefaultServer error(%v)\", err)\n\t\tpanic(err)\n\t}\n}", "func (s) TestBalancer_TwoAddresses_OOBThenPerCall(t *testing.T) {\n\tctx, cancel := context.WithTimeout(context.Background(), defaultTestTimeout)\n\tdefer cancel()\n\n\tsrv1 := startServer(t, reportBoth)\n\tsrv2 := startServer(t, reportBoth)\n\n\t// srv1 starts loaded and srv2 starts without load; ensure RPCs are routed\n\t// disproportionately to srv2 (10:1).\n\tsrv1.oobMetrics.SetQPS(10.0)\n\tsrv1.oobMetrics.SetApplicationUtilization(1.0)\n\n\tsrv2.oobMetrics.SetQPS(10.0)\n\tsrv2.oobMetrics.SetApplicationUtilization(.1)\n\n\t// For per-call metrics (not used initially), srv2 reports that it is\n\t// loaded and srv1 reports low load. After confirming OOB works, switch to\n\t// per-call and confirm the new routing weights are applied.\n\tsrv1.callMetrics.SetQPS(10.0)\n\tsrv1.callMetrics.SetApplicationUtilization(.1)\n\n\tsrv2.callMetrics.SetQPS(10.0)\n\tsrv2.callMetrics.SetApplicationUtilization(1.0)\n\n\tsc := svcConfig(t, oobConfig)\n\tif err := srv1.StartClient(grpc.WithDefaultServiceConfig(sc)); err != nil {\n\t\tt.Fatalf(\"Error starting client: %v\", err)\n\t}\n\taddrs := []resolver.Address{{Addr: srv1.Address}, {Addr: srv2.Address}}\n\tsrv1.R.UpdateState(resolver.State{Addresses: addrs})\n\n\t// Call each backend once to ensure the weights have been received.\n\tensureReached(ctx, t, srv1.Client, 2)\n\n\t// Wait for the weight update period to allow the new weights to be processed.\n\ttime.Sleep(weightUpdatePeriod)\n\tcheckWeights(ctx, t, srvWeight{srv1, 1}, srvWeight{srv2, 10})\n\n\t// Update to per-call weights.\n\tc := svcConfig(t, perCallConfig)\n\tparsedCfg := srv1.R.CC.ParseServiceConfig(c)\n\tif parsedCfg.Err != nil {\n\t\tpanic(fmt.Sprintf(\"Error parsing config %q: %v\", c, parsedCfg.Err))\n\t}\n\tsrv1.R.UpdateState(resolver.State{Addresses: addrs, ServiceConfig: parsedCfg})\n\n\t// Wait for the weight update period to allow the new weights to be processed.\n\ttime.Sleep(weightUpdatePeriod)\n\tcheckWeights(ctx, t, srvWeight{srv1, 10}, srvWeight{srv2, 1})\n}", "func (_obj *Apipayments) TarsSetProtocol(p m.Protocol) {\n\t_obj.s.TarsSetProtocol(p)\n}", "func (bs *BusinessServer) initBCSControllerClient() {\n\tctx := &grpclb.Context{\n\t\tTarget: bs.viper.GetString(\"bcscontroller.servicename\"),\n\t\tEtcdConfig: bs.etcdCfg,\n\t}\n\n\t// gRPC dial options, with insecure and timeout.\n\topts := []grpc.DialOption{\n\t\tgrpc.WithInsecure(),\n\t\tgrpc.WithTimeout(bs.viper.GetDuration(\"bcscontroller.calltimeout\")),\n\t}\n\n\t// build gRPC client of bcscontroller.\n\tconn, err := grpclb.NewGRPCConn(ctx, opts...)\n\tif err != nil {\n\t\tlogger.Fatal(\"can't create bcscontroller gRPC client, %+v\", err)\n\t}\n\tbs.bcsControllerConn = conn\n\tbs.bcsControllerCli = pbbcscontroller.NewBCSControllerClient(conn.Conn())\n\tlogger.Info(\"create bcs-controller gRPC client success.\")\n}", "func (s *Serverus) InitGRPC() {\n\tlog.Println(\"Initializing gRPC\")\n\ts.server = grpc.NewServer(\n\t\tgrpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(s.interceptors...)), // register the interceptors injected and the base\n\t)\n}", "func checkGlobalSet(t *testing.T, expError bool, fabMode, vlans, vxlans string) {\n\tgl := client.Global{\n\t\tName: \"global\",\n\t\tNetworkInfraType: fabMode,\n\t\tVlans: vlans,\n\t\tVxlans: vxlans,\n\t}\n\terr := contivClient.GlobalPost(&gl)\n\tif err != nil && !expError {\n\t\tt.Fatalf(\"Error setting global {%+v}. Err: %v\", gl, err)\n\t} else if err == nil && expError {\n\t\tt.Fatalf(\"Set global {%+v} succeded while expecing error\", gl)\n\t} else if err == nil {\n\t\t// verify global state\n\t\tgotGl, err := contivClient.GlobalGet(\"global\")\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Error getting global object. Err: %v\", err)\n\t\t}\n\n\t\t// verify expected values\n\t\tif gotGl.NetworkInfraType != fabMode || gotGl.Vlans != vlans || gotGl.Vxlans != vxlans {\n\t\t\tt.Fatalf(\"Error Got global state {%+v} does not match expected %s, %s, %s\", gotGl, fabMode, vlans, vxlans)\n\t\t}\n\n\t\t// verify the state created\n\t\tgCfg := &gstate.Cfg{}\n\t\tgCfg.StateDriver = stateStore\n\t\terr = gCfg.Read(\"\")\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Error reading global cfg state. Err: %v\", err)\n\t\t}\n\n\t\tif gCfg.Auto.VLANs != vlans || gCfg.Auto.VXLANs != vxlans {\n\t\t\tt.Fatalf(\"global config Vlan/Vxlan ranges %s/%s are not same as %s/%s\",\n\t\t\t\tgCfg.Auto.VLANs, gCfg.Auto.VXLANs, vlans, vxlans)\n\t\t}\n\n\t\t// verify global oper state\n\t\tgOper := &gstate.Oper{}\n\t\tgOper.StateDriver = stateStore\n\t\terr = gOper.Read(\"\")\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Error reading global oper state. Err: %v\", err)\n\t\t}\n\n\t\t// verify vxlan resources\n\t\tvxlanRsrc := &resources.AutoVXLANCfgResource{}\n\t\tvxlanRsrc.StateDriver = stateStore\n\t\tif err := vxlanRsrc.Read(\"global\"); err != nil {\n\t\t\tt.Fatalf(\"Error reading vxlan resource. Err: %v\", err)\n\t\t}\n\n\t\t// verify vlan resource\n\t\tvlanRsrc := &resources.AutoVLANCfgResource{}\n\t\tvlanRsrc.StateDriver = stateStore\n\t\tif err := vlanRsrc.Read(\"global\"); err != nil {\n\t\t\tt.Fatalf(\"Error reading vlan resource. Err: %v\", err)\n\t\t}\n\t}\n}", "func (h *Host) initBackends() {\n\n\t//check if we have any backends to proxy requests to\n\tif h.backends != nil {\n\n\t\t//loop over configs\n\t\tfor _, v := range h.backends.Map {\n\t\t\t//grab the server url and parse it\n\t\t\tserverUrl, err := url.Parse(v.(string))\n\n\t\t\t//if its not a valid url log the error\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(err)\n\t\t\t} else {\n\t\t\t\t//create a new proxy for our backend and add it to our proxy slice\n\t\t\t\th.proxies = append(h.proxies, httputil.NewSingleHostReverseProxy(serverUrl))\n\t\t\t}\n\t\t}\n\n\t\t//loop over proxies and block till one is needed\n\t\tgo func() {\n\t\t\tfor {\n\t\t\t\tfor _, p := range h.proxies {\n\t\t\t\t\th.proxyChannel <- p\n\t\t\t\t}\n\t\t\t}\n\t\t}()\n\t}\n}", "func SetBackend(backend SupportedBackend, b Backend) {\n\tswitch backend {\n\tcase YFinBackend:\n\t\tbackends.YFin = b\n\tcase BATSBackend:\n\t\tbackends.Bats = b\n\t}\n}", "func (gb *gcpBalancer) newSubConn() {\n\tgb.mu.Lock()\n\tdefer gb.mu.Unlock()\n\n\t// there are chances the newly created subconns are still connecting,\n\t// we can wait on those new subconns.\n\tfor _, scState := range gb.scStates {\n\t\tif scState == connectivity.Connecting {\n\t\t\treturn\n\t\t}\n\t}\n\n\tsc, err := gb.cc.NewSubConn(\n\t\tgb.addrs,\n\t\tbalancer.NewSubConnOptions{HealthCheckEnabled: healthCheckEnabled},\n\t)\n\tif err != nil {\n\t\tgrpclog.Errorf(\"grpcgcp.gcpBalancer: failed to NewSubConn: %v\", err)\n\t\treturn\n\t}\n\tgb.scRefs[sc] = &subConnRef{\n\t\tsubConn: sc,\n\t}\n\tgb.scStates[sc] = connectivity.Idle\n\tsc.Connect()\n}", "func (h *HTTPTransport) SetScheme(req *http.Request) {\n\tif req.URL.Scheme != \"\" {\n\t\treturn\n\t}\n\tif h.shouldUseTLS(req) {\n\t\treq.URL.Scheme = \"https\"\n\t} else {\n\t\treq.URL.Scheme = \"http\"\n\t}\n}", "func (f5 *BigIP) handleGlobalPolicyUpdate(msg comm.Message) comm.Message {\n\n\tif err := f5.upsertPool(msg); err != nil {\n\t\tmsg.Error = err.Error()\n\t\treturn msg\n\t}\n\n\t//create a draftPath policy\n\tglobalPolicy, draftName, draftPath := f5.getGlobalPolicyInfo(msg.Service.TLS)\n\n\tpolicyNeedsUpdate, policyRuleExist, err := f5.policyNeedsUpdate(f5.addPartitionToName(globalPolicy), msg)\n\tif err != nil {\n\t\tmsg.Error = err.Error()\n\t\treturn msg\n\t}\n\n\tif policyRuleExist && !policyNeedsUpdate {\n\t\tlog.Debugf(\"no policy update for %v\", msg.Service.Name)\n\t\treturn msg\n\t}\n\n\tif err := f5.cli.CreateDraftFromPolicy(f5.addPartitionToName(globalPolicy)); err != nil {\n\t\tmsg.Error = fmt.Sprintf(\"error creating %v policy %v\", draftPath, err.Error())\n\t\treturn msg\n\t}\n\tdefer func() {\n\t\tif err := f5.cli.DeletePolicy(draftName); err != nil {\n\t\t\tlog.Warnf(\"Error deleting draftPath policy %v %v\", globalPolicy, err)\n\t\t}\n\t}()\n\n\tif policyNeedsUpdate {\n\n\t\tlog.Debugf(\"updating policy %v\", globalPolicy)\n\n\t\tif err := f5.cli.ModifyPolicyRule(draftName, msg.Service.Name, f5.buildPolicyRuleFromMsg(msg)); err != nil {\n\t\t\tmsg.Error = fmt.Sprintf(\"could not modify policy rule %v %v\", msg.Service.Name, err.Error())\n\t\t\treturn msg\n\t\t}\n\n\t\tif err := f5.cli.PublishDraftPolicy(draftPath); err != nil {\n\t\t\tmsg.Error = fmt.Sprintf(\"could not publish draft %v %v\", globalPolicy, err.Error())\n\t\t\treturn msg\n\t\t}\n\n\t\treturn msg\n\t}\n\n\tif !policyRuleExist {\n\n\t\tlog.Debugf(\"updating policy %v with new rule for %v\", globalPolicy, msg.Service.Name)\n\n\t\tif err := f5.cli.AddRuleToPolicy(draftName, f5.buildPolicyRuleFromMsg(msg)); err != nil {\n\t\t\tmsg.Error = fmt.Sprintf(\"error adding rule %v to draftPath policy %v\", msg.Service.Name, err.Error())\n\t\t\treturn msg\n\t\t}\n\n\t\tif err := f5.cli.PublishDraftPolicy(draftPath); err != nil {\n\t\t\tmsg.Error = fmt.Sprintf(\"could not publish draft %v %v\", draftPath, err.Error())\n\t\t\treturn msg\n\t\t}\n\n\t\treturn msg\n\t}\n\n\treturn msg\n}", "func main() {\n\tvar (\n\t\t//port = flag.Int(\"port\", 7472, \"HTTP listening port for Prometheus metrics\")\n\t\t//name = flag.String(\"name\", \"lb-ippool\", \"configmap name in default namespace\")\n\t\tpath = flag.String(\"config\", \"\", \"config file\")\n\t\tkubeconfig = flag.String(\"kubeconfig\", \"\", \"absolute path to the kubeconfig file (only needed when running outside of k8s)\")\n\t)\n\n\tflag.Parse()\n\tif len(*path) == 0 {\n\t\tklog.Fatalf(fmt.Sprintf(\"config file is required\"))\n\t}\n\n\trestConfig, err := clientcmd.BuildConfigFromFlags(\"\", *kubeconfig)\n\tif err != nil {\n\t\tklog.Fatal(err)\n\t}\n\tclientset, err := kubernetes.NewForConfig(restConfig)\n\tif err != nil {\n\t\tklog.Fatal(err)\n\t}\n\tbroadcaster := record.NewBroadcaster()\n\tbroadcaster.StartRecordingToSink(&corev1.EventSinkImpl{Interface: corev1.New(clientset.CoreV1().RESTClient()).Events(\"\")})\n\trecorder := broadcaster.NewRecorder(scheme.Scheme, v1.EventSource{Component: \"lb-controller\"})\n\tqueue := workqueue.NewRateLimitingQueue(workqueue.DefaultControllerRateLimiter())\n\n\t// INFO: (1) 与 router server 建立 bgp session\n\ts := getSpeaker(*path)\n\n\tsvcWatcher := cache.NewListWatchFromClient(clientset.CoreV1().RESTClient(), \"services\",\n\t\tmetav1.NamespaceAll, fields.Everything())\n\tsvcIndexer, svcInformer := cache.NewIndexerInformer(svcWatcher, &v1.Service{}, 0, cache.ResourceEventHandlerFuncs{\n\t\tAddFunc: func(obj interface{}) {\n\t\t\tkey, err := cache.MetaNamespaceKeyFunc(obj)\n\t\t\tif err == nil {\n\t\t\t\tqueue.Add(svcKey(key))\n\t\t\t}\n\t\t},\n\t\tUpdateFunc: func(old interface{}, new interface{}) {\n\t\t\t//key, err := cache.MetaNamespaceKeyFunc(new)\n\t\t\t//if err == nil {\n\t\t\t//\t//queue.Add(svcKey(key))\n\t\t\t//\tklog.Infof(fmt.Sprintf(\"update %s\", key))\n\t\t\t//}\n\t\t},\n\t\tDeleteFunc: func(obj interface{}) {\n\t\t\t//key, err := cache.DeletionHandlingMetaNamespaceKeyFunc(obj)\n\t\t\t//if err == nil {\n\t\t\t//\t//queue.Add(svcKey(key))\n\t\t\t//\tklog.Infof(fmt.Sprintf(\"delete %s\", key))\n\t\t\t//}\n\t\t},\n\t}, cache.Indexers{})\n\n\tepWatcher := cache.NewListWatchFromClient(clientset.CoreV1().RESTClient(), \"endpoints\",\n\t\tmetav1.NamespaceAll, fields.Everything())\n\tepIndexer, epInformer := cache.NewIndexerInformer(epWatcher, &v1.Endpoints{}, 0, cache.ResourceEventHandlerFuncs{\n\t\tAddFunc: func(obj interface{}) {\n\t\t\t//key, err := cache.MetaNamespaceKeyFunc(obj)\n\t\t\t//if err == nil {\n\t\t\t//\t//queue.Add(svcKey(key))\n\t\t\t//\tklog.Info(key)\n\t\t\t//}\n\t\t},\n\t\tUpdateFunc: func(old interface{}, new interface{}) {\n\t\t\t//key, err := cache.MetaNamespaceKeyFunc(new)\n\t\t\t//if err == nil {\n\t\t\t//\tklog.Info(key)\n\t\t\t//\t//queue.Add(svcKey(key))\n\t\t\t//}\n\t\t},\n\t\tDeleteFunc: func(obj interface{}) {\n\t\t\t//key, err := cache.DeletionHandlingMetaNamespaceKeyFunc(obj)\n\t\t\t//if err == nil {\n\t\t\t//\t//queue.Add(svcKey(key))\n\t\t\t//\tklog.Info(key)\n\t\t\t//}\n\t\t},\n\t}, cache.Indexers{})\n\n\tstopCh := make(chan struct{})\n\tgo svcInformer.Run(stopCh)\n\tgo epInformer.Run(stopCh)\n\tif !cache.WaitForCacheSync(stopCh, svcInformer.HasSynced, epInformer.HasSynced) {\n\t\tklog.Fatalf(fmt.Sprintf(\"time out waiting for cache sync\"))\n\t}\n\n\tsync := func(key interface{}, queue workqueue.RateLimitingInterface) error {\n\t\tdefer queue.Done(key)\n\n\t\tswitch k := key.(type) {\n\t\tcase svcKey:\n\t\t\tsvc, exists, err := svcIndexer.GetByKey(string(k))\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif !exists {\n\t\t\t\treturn fmt.Errorf(\"not exist\")\n\t\t\t}\n\t\t\tendpoints, exists, err := epIndexer.GetByKey(string(k))\n\t\t\tif err != nil {\n\t\t\t\tklog.Errorf(\"failed to get endpoints\")\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif !exists {\n\t\t\t\treturn fmt.Errorf(\"not exist\")\n\t\t\t}\n\n\t\t\tif svc.(*v1.Service).Spec.Type != v1.ServiceTypeLoadBalancer {\n\t\t\t\treturn nil\n\t\t\t}\n\n\t\t\trecorder.Eventf(svc.(*v1.Service), v1.EventTypeNormal, \"SetBalancer\", \"advertise svc ip\")\n\t\t\ts.SetBalancer(string(k), svc.(*v1.Service), endpoints.(*v1.Endpoints))\n\t\t\treturn nil\n\t\tdefault:\n\t\t\tpanic(fmt.Sprintf(\"unknown key type for %s %T\", key, key))\n\t\t}\n\t}\n\n\tfor {\n\t\tkey, quit := queue.Get()\n\t\tif quit {\n\t\t\treturn\n\t\t}\n\n\t\terr := sync(key, queue)\n\t\tif err != nil {\n\t\t\tklog.Error(err)\n\t\t} else {\n\t\t\tqueue.Forget(key)\n\t\t}\n\t}\n}", "func SetGlobalOptions(opts ...Option) {\n\tglobalOptions = opts\n}", "func (c *Client) SetBackoff(backoff retryablehttp.Backoff) {\n\tc.modifyLock.RLock()\n\tdefer c.modifyLock.RUnlock()\n\tc.config.modifyLock.Lock()\n\tdefer c.config.modifyLock.Unlock()\n\n\tc.config.Backoff = backoff\n}", "func (s) TestBalancer_OneAddress(t *testing.T) {\n\ttestCases := []struct {\n\t\trt reportType\n\t\tcfg iwrr.LBConfig\n\t}{\n\t\t{rt: reportNone, cfg: perCallConfig},\n\t\t{rt: reportCall, cfg: perCallConfig},\n\t\t{rt: reportOOB, cfg: oobConfig},\n\t}\n\n\tfor _, tc := range testCases {\n\t\tt.Run(fmt.Sprintf(\"reportType:%v\", tc.rt), func(t *testing.T) {\n\t\t\tctx, cancel := context.WithTimeout(context.Background(), defaultTestTimeout)\n\t\t\tdefer cancel()\n\n\t\t\tsrv := startServer(t, tc.rt)\n\n\t\t\tsc := svcConfig(t, tc.cfg)\n\t\t\tif err := srv.StartClient(grpc.WithDefaultServiceConfig(sc)); err != nil {\n\t\t\t\tt.Fatalf(\"Error starting client: %v\", err)\n\t\t\t}\n\n\t\t\t// Perform many RPCs to ensure the LB policy works with 1 address.\n\t\t\tfor i := 0; i < 100; i++ {\n\t\t\t\tsrv.callMetrics.SetQPS(float64(i))\n\t\t\t\tsrv.oobMetrics.SetQPS(float64(i))\n\t\t\t\tif _, err := srv.Client.EmptyCall(ctx, &testpb.Empty{}); err != nil {\n\t\t\t\t\tt.Fatalf(\"Error from EmptyCall: %v\", err)\n\t\t\t\t}\n\t\t\t\ttime.Sleep(time.Millisecond) // Delay; test will run 100ms and should perform ~10 weight updates\n\t\t\t}\n\t\t})\n\t}\n}", "func (puo *PendingloanbindingUpdateOne) SetPointLB(i int) *PendingloanbindingUpdateOne {\n\tpuo.mutation.ResetPointLB()\n\tpuo.mutation.SetPointLB(i)\n\treturn puo\n}", "func NewSimpleBalancer(addr string) (sb *SimpleBalancer, err error) {\n\tsb = &SimpleBalancer{}\n\tsb.url, err = url.Parse(addr)\n\n\treturn\n}", "func desiredLoadBalancerService(ci *operatorv1.IngressController, deploymentRef metav1.OwnerReference, platform *configv1.PlatformStatus) (bool, *corev1.Service, error) {\n\tif ci.Status.EndpointPublishingStrategy.Type != operatorv1.LoadBalancerServiceStrategyType {\n\t\treturn false, nil, nil\n\t}\n\tservice := manifests.LoadBalancerService()\n\n\tname := controller.LoadBalancerServiceName(ci)\n\n\tservice.Namespace = name.Namespace\n\tservice.Name = name.Name\n\n\tif service.Labels == nil {\n\t\tservice.Labels = map[string]string{}\n\t}\n\tservice.Labels[\"router\"] = name.Name\n\tservice.Labels[manifests.OwningIngressControllerLabel] = ci.Name\n\n\tservice.Spec.Selector = controller.IngressControllerDeploymentPodSelector(ci).MatchLabels\n\n\tlb := ci.Status.EndpointPublishingStrategy.LoadBalancer\n\tisInternal := lb != nil && lb.Scope == operatorv1.InternalLoadBalancer\n\n\tif service.Annotations == nil {\n\t\tservice.Annotations = map[string]string{}\n\t}\n\n\tproxyNeeded, err := IsProxyProtocolNeeded(ci, platform)\n\tif err != nil {\n\t\treturn false, nil, fmt.Errorf(\"failed to determine if proxy protocol is proxyNeeded for ingresscontroller %q: %v\", ci.Name, err)\n\t}\n\n\tif platform != nil {\n\t\tif isInternal {\n\t\t\tannotation := InternalLBAnnotations[platform.Type]\n\t\t\tfor name, value := range annotation {\n\t\t\t\tservice.Annotations[name] = value\n\t\t\t}\n\n\t\t\t// Set the GCP Global Access annotation for internal load balancers on GCP only\n\t\t\tif platform.Type == configv1.GCPPlatformType {\n\t\t\t\tif lb != nil && lb.ProviderParameters != nil &&\n\t\t\t\t\tlb.ProviderParameters.Type == operatorv1.GCPLoadBalancerProvider &&\n\t\t\t\t\tlb.ProviderParameters.GCP != nil {\n\t\t\t\t\tglobalAccessEnabled := lb.ProviderParameters.GCP.ClientAccess == operatorv1.GCPGlobalAccess\n\t\t\t\t\tservice.Annotations[GCPGlobalAccessAnnotation] = strconv.FormatBool(globalAccessEnabled)\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tannotation := externalLBAnnotations[platform.Type]\n\t\t\tfor name, value := range annotation {\n\t\t\t\tservice.Annotations[name] = value\n\t\t\t}\n\t\t}\n\t\tswitch platform.Type {\n\t\tcase configv1.AWSPlatformType:\n\t\t\tservice.Annotations[awsLBHealthCheckIntervalAnnotation] = awsLBHealthCheckIntervalDefault\n\t\t\tif proxyNeeded {\n\t\t\t\tservice.Annotations[awsLBProxyProtocolAnnotation] = \"*\"\n\t\t\t}\n\t\t\tif lb != nil && lb.ProviderParameters != nil {\n\t\t\t\tif aws := lb.ProviderParameters.AWS; aws != nil && lb.ProviderParameters.Type == operatorv1.AWSLoadBalancerProvider {\n\t\t\t\t\tswitch aws.Type {\n\t\t\t\t\tcase operatorv1.AWSNetworkLoadBalancer:\n\t\t\t\t\t\tservice.Annotations[AWSLBTypeAnnotation] = AWSNLBAnnotation\n\t\t\t\t\t\t// NLBs require a different health check interval than CLBs.\n\t\t\t\t\t\t// See <https://bugzilla.redhat.com/show_bug.cgi?id=1908758>.\n\t\t\t\t\t\tservice.Annotations[awsLBHealthCheckIntervalAnnotation] = awsLBHealthCheckIntervalNLB\n\t\t\t\t\tcase operatorv1.AWSClassicLoadBalancer:\n\t\t\t\t\t\tif aws.ClassicLoadBalancerParameters != nil {\n\t\t\t\t\t\t\tif v := aws.ClassicLoadBalancerParameters.ConnectionIdleTimeout; v.Duration > 0 {\n\t\t\t\t\t\t\t\tservice.Annotations[awsELBConnectionIdleTimeoutAnnotation] = strconv.FormatUint(uint64(v.Round(time.Second).Seconds()), 10)\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif platform.AWS != nil && len(platform.AWS.ResourceTags) > 0 {\n\t\t\t\tvar additionalTags []string\n\t\t\t\tfor _, userTag := range platform.AWS.ResourceTags {\n\t\t\t\t\tif len(userTag.Key) > 0 {\n\t\t\t\t\t\tadditionalTags = append(additionalTags, userTag.Key+\"=\"+userTag.Value)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif len(additionalTags) > 0 {\n\t\t\t\t\tservice.Annotations[awsLBAdditionalResourceTags] = strings.Join(additionalTags, \",\")\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Set the load balancer for AWS to be as aggressive as Azure (2 fail @ 5s interval, 2 healthy)\n\t\t\tservice.Annotations[awsLBHealthCheckTimeoutAnnotation] = awsLBHealthCheckTimeoutDefault\n\t\t\tservice.Annotations[awsLBHealthCheckUnhealthyThresholdAnnotation] = awsLBHealthCheckUnhealthyThresholdDefault\n\t\t\tservice.Annotations[awsLBHealthCheckHealthyThresholdAnnotation] = awsLBHealthCheckHealthyThresholdDefault\n\t\tcase configv1.IBMCloudPlatformType, configv1.PowerVSPlatformType:\n\t\t\t// Set ExternalTrafficPolicy to type Cluster - IBM's LoadBalancer impl is created within the cluster.\n\t\t\t// LB places VIP on one of the worker nodes, using keepalived to maintain the VIP and ensuring redundancy\n\t\t\t// LB relies on iptable rules kube-proxy puts in to send traffic from the VIP node to the cluster\n\t\t\t// If policy is local, traffic is only sent to pods on the local node, as such Cluster enables traffic to flow to all the pods in the cluster\n\t\t\tservice.Spec.ExternalTrafficPolicy = corev1.ServiceExternalTrafficPolicyTypeCluster\n\t\t\tif proxyNeeded {\n\t\t\t\tservice.Annotations[iksLBEnableFeaturesAnnotation] = iksLBEnableFeaturesProxyProtocol\n\t\t\t}\n\n\t\tcase configv1.AlibabaCloudPlatformType:\n\t\t\tif !isInternal {\n\t\t\t\tservice.Annotations[alibabaCloudLBAddressTypeAnnotation] = alibabaCloudLBAddressTypeInternet\n\t\t\t}\n\t\t}\n\t\t// Azure load balancers are not customizable and are set to (2 fail @ 5s interval, 2 healthy)\n\t\t// GCP load balancers are not customizable and are set to (3 fail @ 8s interval, 1 healthy)\n\n\t\tif v, err := shouldUseLocalWithFallback(ci, service); err != nil {\n\t\t\treturn true, service, err\n\t\t} else if v {\n\t\t\tservice.Annotations[localWithFallbackAnnotation] = \"\"\n\t\t}\n\t}\n\n\tif ci.Spec.EndpointPublishingStrategy != nil {\n\t\tlb := ci.Spec.EndpointPublishingStrategy.LoadBalancer\n\t\tif lb != nil && len(lb.AllowedSourceRanges) > 0 {\n\t\t\tcidrs := make([]string, len(lb.AllowedSourceRanges))\n\t\t\tfor i, cidr := range lb.AllowedSourceRanges {\n\t\t\t\tcidrs[i] = string(cidr)\n\t\t\t}\n\t\t\tservice.Spec.LoadBalancerSourceRanges = cidrs\n\t\t}\n\t}\n\n\tservice.SetOwnerReferences([]metav1.OwnerReference{deploymentRef})\n\treturn true, service, nil\n}", "func (s *Servers) initiateGRPCServer(endpoint string, config RuntimeConfig) error {\n\taddr := fmt.Sprintf(\"%s:%d\", endpoint, config.port)\n\tln, err := net.Listen(\"tcp\", addr)\n\tif err != nil {\n\t\tlog.Fatalf(\"gRPC server: failed to listen: %v\", err)\n\t\tos.Exit(2)\n\t}\n\ts.gRPCListener = ln\n\n\topts := []grpc.ServerOption{\n\t\ts.getUnaryInterceptors(),\n\t\t// grpc.ChainUnaryInterceptor(\n\t\t// \ts.Backend.AuthInterceptor.Unary(),\n\t\t// \ts.Backend.ObserverRegistry.UnaryInterceptor,\n\t\t// ),\n\t\t// MaxConnectionAge is just to avoid long connection, to facilitate load balancing\n\t\t// MaxConnectionAgeGrace will torn them, default to infinity\n\t\tgrpc.KeepaliveParams(keepalive.ServerParameters{MaxConnectionAge: 2 * time.Minute}),\n\t}\n\n\t// load mutual TLS cert/key and root CA cert\n\tif config.tlsCaCert != \"\" && config.tlsCert != \"\" && config.tlsKey != \"\" {\n\t\tkeyPair, err := tls.LoadX509KeyPair(config.tlsCert, config.tlsKey)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Failed to load server TLS cert/key with error:%v\", err)\n\t\t}\n\n\t\tcert, err := ioutil.ReadFile(config.tlsCaCert)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Failed to load root CA cert file with error:%v\", err)\n\t\t}\n\n\t\tpool := x509.NewCertPool()\n\t\tpool.AppendCertsFromPEM(cert)\n\n\t\tta := credentials.NewTLS(&tls.Config{\n\t\t\tCertificates: []tls.Certificate{keyPair},\n\t\t\tClientCAs: pool,\n\t\t\tClientAuth: tls.RequireAndVerifyClientCert,\n\t\t})\n\n\t\topts = append(opts, grpc.Creds(ta))\n\t}\n\n\ts.gRPCServer = grpc.NewServer(opts...)\n\n\ts.registerGRPCService()\n\tlog.Printf(\"gRPC server serving at %s\", addr)\n\n\tfb := fallback.NewServer(fmt.Sprintf(\"%s:%d\", endpoint, config.fallbackPort), fmt.Sprintf(\"%s:%d\", endpoint, config.port))\n\ts.fallbackServer = fb\n\n\t// Register reflection service on gRPC server.\n\treflection.Register(s.gRPCServer)\n\n\treturn s.gRPCServer.Serve(ln)\n}", "func (gb *gcpBalancer) bindSubConn(bindKey string, sc balancer.SubConn) {\n\tgb.mu.Lock()\n\tdefer gb.mu.Unlock()\n\t_, ok := gb.affinityMap[bindKey]\n\tif !ok {\n\t\tgb.affinityMap[bindKey] = sc\n\t}\n\tgb.scRefs[sc].affinityIncr()\n}", "func (obj *ShopSys) TarsSetProtocol(p model.Protocol) {\n\tobj.s.TarsSetProtocol(p)\n}", "func (obj *ShopSys) TarsSetProtocol(p model.Protocol) {\n\tobj.s.TarsSetProtocol(p)\n}", "func (c *ClusterManager) GC(lbNames []string, nodePorts []backends.ServicePort) error {\n\t// On GC:\n\t// * Loadbalancers need to get deleted before backends.\n\t// * Backends are refcounted in a shared pool.\n\t// * We always want to GC backends even if there was an error in GCing\n\t// loadbalancers, because the next Sync could rely on the GC for quota.\n\t// * There are at least 2 cases for backend GC:\n\t// 1. The loadbalancer has been deleted.\n\t// 2. An update to the url map drops the refcount of a backend. This can\n\t// happen when an Ingress is updated, if we don't GC after the update\n\t// we'll leak the backend.\n\tlbErr := c.l7Pool.GC(lbNames)\n\tbeErr := c.backendPool.GC(nodePorts)\n\tif lbErr != nil {\n\t\treturn lbErr\n\t}\n\tif beErr != nil {\n\t\treturn beErr\n\t}\n\n\t// TODO(ingress#120): Move this to the backend pool so it mirrors creation\n\tif len(lbNames) == 0 {\n\t\tigName := c.ClusterNamer.InstanceGroup()\n\t\tglog.Infof(\"Deleting instance group %v\", igName)\n\t\tif err := c.instancePool.DeleteInstanceGroup(igName); err != err {\n\t\t\treturn err\n\t\t}\n\t\tglog.V(2).Infof(\"Shutting down firewall as there are no loadbalancers\")\n\t\tc.firewallPool.Shutdown()\n\t}\n\n\treturn nil\n}", "func (pu *PendingloanbindingUpdate) SetPointLB(i int) *PendingloanbindingUpdate {\n\tpu.mutation.ResetPointLB()\n\tpu.mutation.SetPointLB(i)\n\treturn pu\n}" ]
[ "0.5183199", "0.5077026", "0.49607387", "0.4854912", "0.47470388", "0.47414953", "0.4728902", "0.47090948", "0.46896395", "0.46880862", "0.46415812", "0.4627773", "0.46094668", "0.4597579", "0.45945796", "0.45730206", "0.45671153", "0.45562032", "0.4491078", "0.44766483", "0.44675103", "0.44379058", "0.44354638", "0.4430052", "0.44250864", "0.4391749", "0.43600962", "0.43537486", "0.4351377", "0.43476364", "0.43476364", "0.43476364", "0.43476364", "0.43476364", "0.43476364", "0.43476364", "0.43476364", "0.43476364", "0.43476364", "0.43302608", "0.43301943", "0.43276632", "0.43117958", "0.42937022", "0.42924556", "0.42709368", "0.42598256", "0.42598256", "0.42598256", "0.42598256", "0.42598256", "0.42598256", "0.42597863", "0.4244447", "0.42418763", "0.4218989", "0.42121693", "0.42104122", "0.42090714", "0.41916865", "0.41907904", "0.41881534", "0.4165824", "0.41566923", "0.41457132", "0.41419724", "0.41411185", "0.41395548", "0.41377342", "0.41221341", "0.4110992", "0.41102713", "0.41094357", "0.41088367", "0.41013786", "0.40728894", "0.40590194", "0.40504748", "0.404797", "0.404669", "0.40284777", "0.40105048", "0.39962938", "0.39939883", "0.39863002", "0.39760065", "0.3971908", "0.39705622", "0.3967872", "0.39446658", "0.39444727", "0.39419335", "0.3940914", "0.39405763", "0.3920878", "0.39152166", "0.3913234", "0.3913234", "0.39130405", "0.39129585" ]
0.8345059
0
Build creates a grpc Picker.
func (b *Builder) Build(info base.PickerBuildInfo) gBalancer.Picker { if len(info.ReadySCs) == 0 { // Block the RPC until a new picker is available via UpdateState(). return base.NewErrPicker(gBalancer.ErrNoSubConnAvailable) } nodes := make([]selector.Node, 0) for conn, info := range info.ReadySCs { ins, _ := info.Address.Attributes.Value("rawServiceInstance").(*registry.ServiceInstance) nodes = append(nodes, &grpcNode{ Node: selector.NewNode(info.Address.Addr, ins), subConn: conn, }) } p := &Picker{ selector: b.builder.Build(), } p.selector.Apply(nodes) return p }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*nodePickerBuilder) Build(info base.PickerBuildInfo) balancer.V2Picker {\n\tif len(info.ReadySCs) == 0 {\n\t\treturn base.NewErrPickerV2(balancer.ErrNoSubConnAvailable)\n\t}\n\n\tvar scs []balancer.SubConn\n\tfor sc := range info.ReadySCs {\n\t\tscs = append(scs, sc)\n\t}\n\n\treturn &nodePicker{\n\t\tsubConns: scs,\n\t}\n}", "func (gp Provider) Build(config config.Credentials) provider.Provider {\n\tclient := NewClient()\n\n\treturn &Provider{\n\t\tVerifier: provider.NewVerifierBasket(\n\t\t\tNewTeamVerifier(teamConfigsToTeam(config.Github.Teams), client),\n\t\t\tNewOrganizationVerifier(config.Github.Organizations, client),\n\t\t),\n\t}\n}", "func (cupBuilder *CupBuilder) Build() *Cup {\n return cupBuilder.cup\n}", "func (gb *gcpBalancer) regeneratePicker() {\n\tgb.mu.RLock()\n\tdefer gb.mu.RUnlock()\n\n\tif gb.state == connectivity.TransientFailure {\n\t\tgb.picker = newErrPicker(balancer.ErrTransientFailure)\n\t\treturn\n\t}\n\treadyRefs := []*subConnRef{}\n\n\t// Select ready subConns from subConn map.\n\tfor sc, scState := range gb.scStates {\n\t\tif scState == connectivity.Ready {\n\t\t\treadyRefs = append(readyRefs, gb.scRefs[sc])\n\t\t}\n\t}\n\tgb.picker = newGCPPicker(readyRefs, gb)\n}", "func NewPicker(store MediumSourceStorer) *Picker {\n\treturn &Picker{\n\t\tstore: store,\n\t}\n}", "func NewPickUp() Pickup {\n p := Pickup{name: \"Pickup\", vehicle: \"Pickup\", speed: 60, capacity: 2, isPrivate: true}\n return p\n}", "func (bb *gcpBalancerBuilder) Build(\n\tcc balancer.ClientConn,\n\topt balancer.BuildOptions,\n) balancer.Balancer {\n\treturn &gcpBalancer{\n\t\tcc: cc,\n\t\taffinityMap: make(map[string]balancer.SubConn),\n\t\tscRefs: make(map[balancer.SubConn]*subConnRef),\n\t\tscStates: make(map[balancer.SubConn]connectivity.State),\n\t\tcsEvltr: &connectivityStateEvaluator{},\n\t\t// Initialize picker to a picker that always return\n\t\t// ErrNoSubConnAvailable, because when state of a SubConn changes, we\n\t\t// may call UpdateState with this picker.\n\t\tpicker: newErrPicker(balancer.ErrNoSubConnAvailable),\n\t}\n}", "func (p *PopupWidget) Build() {\n\tif imgui.BeginPopup(p.name, int(p.flags)) {\n\t\tp.layout.Build()\n\t\timgui.EndPopup()\n\t}\n}", "func NewPeerPicker(tracker *Tracker, list *memberlist.Memberlist) PeerPicker {\n\treturn PeerPicker{\n\t\ttracker: tracker,\n\t\tourName: list.LocalNode().Name,\n\t}\n}", "func (opts BuilderOptions) Build(b Builder) (desc.Descriptor, error) {\n\treturn doBuild(b, opts)\n}", "func (rb *DataframeAnalyticsFieldSelectionBuilder) Build() DataframeAnalyticsFieldSelection {\n\treturn *rb.v\n}", "func (p *PopupModalWidget) Build() {\n\tif imgui.BeginPopupModalV(p.name, p.open, int(p.flags)) {\n\t\tp.layout.Build()\n\t\timgui.EndPopup()\n\t}\n}", "func (b *Builder) Build() (*corev1.PersistentVolumeClaim, error) {\n\tif len(b.errs) > 0 {\n\t\treturn nil, errors.Errorf(\"%+v\", b.errs)\n\t}\n\treturn b.pvc.object, nil\n}", "func (b *ChaincodeDataBuilder) Build() *ccprovider.ChaincodeData {\n\tcd := &ccprovider.ChaincodeData{\n\t\tName: b.name,\n\t\tVersion: b.version,\n\t\tVscc: b.vscc,\n\t}\n\n\tif b.policy != \"\" {\n\t\tpolicyEnv, err := policydsl.FromString(b.policy)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tpolicyBytes, err := proto.Marshal(policyEnv)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tcd.Policy = policyBytes\n\t}\n\treturn cd\n}", "func (qb QueryBuilder) Build() (*stackdriver.ProjectsTimeSeriesListCall, error) {\n\tif err := qb.validate(); err != nil {\n\t\treturn nil, err\n\t}\n\n\tfilter := qb.composeFilter()\n\n\tif qb.metricSelector.Empty() {\n\t\treturn qb.translator.createListTimeseriesRequest(filter, qb.metricKind, qb.metricValueType, \"\"), nil\n\t}\n\n\tfilterForSelector, reducer, err := qb.translator.filterForSelector(qb.metricSelector, allowedCustomMetricsLabelPrefixes, allowedCustomMetricsFullLabelNames)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn qb.translator.createListTimeseriesRequest(joinFilters(filterForSelector, filter), qb.metricKind, qb.metricValueType, reducer), nil\n}", "func BuildFrom(pvc *corev1.PersistentVolumeClaim) *Builder {\n\tif pvc == nil {\n\t\tb := NewBuilder()\n\t\tb.errs = append(\n\t\t\tb.errs,\n\t\t\terrors.New(\"failed to build pvc object: nil pvc\"),\n\t\t)\n\t\treturn b\n\t}\n\treturn &Builder{\n\t\tpvc: &PVC{\n\t\t\tobject: pvc,\n\t\t},\n\t}\n}", "func (b *CapabilityBuilder) Build() (object *Capability, err error) {\n\tobject = new(Capability)\n\tobject.bitmap_ = b.bitmap_\n\tobject.inherited = b.inherited\n\tobject.name = b.name\n\tobject.value = b.value\n\treturn\n}", "func (Codegen) Buildbox() error {\n\tfmt.Println(\"\\n=====> Building GRPC Buildbox...\\n\")\n\treturn trace.Wrap(sh.RunV(\n\t\t\"docker\", \"build\",\n\t\t\"--tag\", fmt.Sprint(\"satellite-grpc-buildbox:\", version()),\n\t\t\"--build-arg\", fmt.Sprint(\"GRPC_PROTOC_VER=\", grpcProtocVersion),\n\t\t\"--build-arg\", fmt.Sprint(\"GRPC_GOGO_PROTO_TAG=\", grpcGogoProtoTag),\n\t\t\"-f\", \"build.assets/grpc/Dockerfile\",\n\t\t\".\",\n\t))\n}", "func (b *corelibBalancer) rebuildpicker(OnOff bool) {\n\ttmp := make([]picker.ServerForPick, 0, len(b.servers))\n\tfor _, server := range b.servers {\n\t\tif server.Pickable() {\n\t\t\ttmp = append(tmp, server)\n\t\t}\n\t}\n\tb.picker.UpdateServers(tmp)\n\tif OnOff {\n\t\tb.c.resolver.Wake(resolver.CALL)\n\t}\n\treturn\n}", "func (b *PeerDependencyListBuilder) Build() (list *PeerDependencyList, err error) {\n\titems := make([]*PeerDependency, len(b.items))\n\tfor i, item := range b.items {\n\t\titems[i], err = item.Build()\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\tlist = new(PeerDependencyList)\n\tlist.items = items\n\treturn\n}", "func (c *Client) Build(params map[string]interface{}) (api.ClientAPI, error) {\n\tUsername, _ := params[\"Username\"].(string)\n\tPassword, _ := params[\"Password\"].(string)\n\tTenantName, _ := params[\"TenantName\"].(string)\n\tRegion, _ := params[\"Region\"].(string)\n\treturn AuthenticatedClient(AuthOptions{\n\t\tUsername: Username,\n\t\tPassword: Password,\n\t\tTenantName: TenantName,\n\t\tRegion: Region,\n\t})\n}", "func (b *balancerDiscovery) regeneratePicker() {\n\tif b.state == connectivity.TransientFailure {\n\t\tb.picker = b.newPicker(nil, balancer.ErrTransientFailure)\n\t\treturn\n\t}\n\tb.picker = b.newPicker(b.subConns, nil)\n}", "func (c *Client) Build(params map[string]interface{}) (api.ClientAPI, error) {\n\t// tenantName, _ := params[\"name\"].(string)\n\n\tidentity, _ := params[\"identity\"].(map[string]interface{})\n\tcompute, _ := params[\"compute\"].(map[string]interface{})\n\t// network, _ := params[\"network\"].(map[string]interface{})\n\n\tusername, _ := identity[\"Username\"].(string)\n\tpassword, _ := identity[\"Password\"].(string)\n\tdomainName, _ := identity[\"UserDomainName\"].(string)\n\n\tregion, _ := compute[\"Region\"].(string)\n\tprojectName, _ := compute[\"ProjectName\"].(string)\n\tprojectID, _ := compute[\"ProjectID\"].(string)\n\tdefaultImage, _ := compute[\"DefaultImage\"].(string)\n\n\treturn AuthenticatedClient(\n\t\tAuthOptions{\n\t\t\tUsername: username,\n\t\t\tPassword: password,\n\t\t\tRegion: region,\n\t\t\tDomainName: domainName,\n\t\t\tProjectName: projectName,\n\t\t\tProjectID: projectID,\n\t\t},\n\t\topenstack.CfgOptions{\n\t\t\tDefaultImage: defaultImage,\n\t\t},\n\t)\n}", "func New() *Bricker {\n\treturn &Bricker{\n\t\tconnection: make(map[string]connector.Connector),\n\t\tfirst: \"\",\n\t\tuids: make(map[uint32]string),\n\t\tsubscriber: make(map[hash.Hash]map[string]Subscriber),\n\t\tchoosers: make([]uint8, 0)}\n}", "func (b *CCSBuilder) Build() (object *CCS, err error) {\n\tobject = new(CCS)\n\tobject.id = b.id\n\tobject.href = b.href\n\tobject.bitmap_ = b.bitmap_\n\tobject.disableSCPChecks = b.disableSCPChecks\n\tobject.enabled = b.enabled\n\treturn\n}", "func (p *googleCloudProvider) Construct(_ context.Context, _ *rpc.ConstructRequest) (*rpc.ConstructResponse, error) {\n\treturn nil, status.Error(codes.Unimplemented, \"Construct is not yet implemented\")\n}", "func (builder *TsQueryCommandBuilder) Build() (Command, error) {\n\tif builder.protobuf == nil {\n\t\tpanic(\"builder.protobuf must not be nil\")\n\t}\n\n\tif len(builder.protobuf.GetQuery().GetBase()) == 0 {\n\t\treturn nil, ErrQueryRequired\n\t}\n\n\tif builder.protobuf.GetStream() && builder.callback == nil {\n\t\treturn nil, newClientError(\"TsQueryCommand requires a callback when streaming.\", nil)\n\t}\n\n\treturn &TsQueryCommand{\n\t\tprotobuf: builder.protobuf,\n\t\tcallback: builder.callback,\n\t}, nil\n}", "func (p *googleCloudProvider) Construct(_ context.Context, _ *rpc.ConstructRequest) (*rpc.ConstructResponse, error) {\n\n\treturn nil, status.Error(codes.Unimplemented, \"Construct is not yet implemented\")\n}", "func (c *configuration) BuildGrpcClients(cc ClientConnector) {\n\tclientProps := c.Properties.Client\n\tfor _, cli := range grpcClients {\n\t\tprop := new(ClientProperties)\n\t\tif err := mapstruct.Decode(prop, clientProps[cli.name]); err != nil {\n\t\t\tlog.Error(err)\n\t\t\tbreak\n\t\t}\n\t\tcc.Connect(cli.name, cli.cb, prop)\n\t}\n}", "func newChoiceBuilder(choiceDef *ChoiceDef) ChoiceBuilder {\n\treturn &chosenBuilder{\n\t\tchoiceDef: choiceDef,\n\t}\n}", "func (b *Builder) Build() (corev1.Container, error) {\n\terr := b.validate()\n\tif err != nil {\n\t\treturn corev1.Container{}, err\n\t}\n\treturn b.con.asContainer(), nil\n}", "func (b Builder) Build(name string) *CommandProcessor {\n\tcp := new(CommandProcessor)\n\tcp.TickingComponent = sim.NewTickingComponent(name, b.engine, b.freq, cp)\n\n\tunlimited := math.MaxInt32\n\tcp.ToDriver = sim.NewLimitNumMsgPort(cp, 1, name+\".ToDriver\")\n\tcp.toDriverSender = akitaext.NewBufferedSender(\n\t\tcp.ToDriver, buffering.NewBuffer(unlimited))\n\tcp.ToDMA = sim.NewLimitNumMsgPort(cp, 1, name+\".ToDispatcher\")\n\tcp.toDMASender = akitaext.NewBufferedSender(\n\t\tcp.ToDMA, buffering.NewBuffer(unlimited))\n\tcp.ToCUs = sim.NewLimitNumMsgPort(cp, 1, name+\".ToCUs\")\n\tcp.toCUsSender = akitaext.NewBufferedSender(\n\t\tcp.ToDMA, buffering.NewBuffer(unlimited))\n\tcp.ToTLBs = sim.NewLimitNumMsgPort(cp, 1, name+\".ToTLBs\")\n\tcp.toTLBsSender = akitaext.NewBufferedSender(\n\t\tcp.ToDMA, buffering.NewBuffer(unlimited))\n\tcp.ToRDMA = sim.NewLimitNumMsgPort(cp, 1, name+\".ToRDMA\")\n\tcp.toRDMASender = akitaext.NewBufferedSender(\n\t\tcp.ToDMA, buffering.NewBuffer(unlimited))\n\tcp.ToPMC = sim.NewLimitNumMsgPort(cp, 1, name+\".ToPMC\")\n\tcp.toPMCSender = akitaext.NewBufferedSender(\n\t\tcp.ToDMA, buffering.NewBuffer(unlimited))\n\tcp.ToAddressTranslators = sim.NewLimitNumMsgPort(cp, 1,\n\t\tname+\".ToAddressTranslators\")\n\tcp.toAddressTranslatorsSender = akitaext.NewBufferedSender(\n\t\tcp.ToDMA, buffering.NewBuffer(unlimited))\n\tcp.ToCaches = sim.NewLimitNumMsgPort(cp, 1, name+\".ToCaches\")\n\tcp.toCachesSender = akitaext.NewBufferedSender(\n\t\tcp.ToDMA, buffering.NewBuffer(unlimited))\n\n\tcp.bottomKernelLaunchReqIDToTopReqMap =\n\t\tmake(map[string]*protocol.LaunchKernelReq)\n\tcp.bottomMemCopyH2DReqIDToTopReqMap =\n\t\tmake(map[string]*protocol.MemCopyH2DReq)\n\tcp.bottomMemCopyD2HReqIDToTopReqMap =\n\t\tmake(map[string]*protocol.MemCopyD2HReq)\n\n\tb.buildDispatchers(cp)\n\n\tif b.visTracer != nil {\n\t\ttracing.CollectTrace(cp, b.visTracer)\n\t}\n\n\treturn cp\n}", "func NewClient(uri string, opts ...grpc.DialOption) (services.Project, error) {\n\t// Initialize connection pool\n\tpool, err := grpcpool.New(func() (*grpc.ClientConn, error) {\n\t\treturn grpc.Dial(uri, opts...)\n\t}, PoolInitial, PoolMax, PoolTimeout)\n\n\t// Close on destruction\n\truntime.SetFinalizer(pool, func(p *grpcpool.Pool) {\n\t\tif !p.IsClosed() {\n\t\t\tp.Close()\n\t\t}\n\t})\n\n\t// Return wrapper\n\treturn &client{\n\t\tpool: pool,\n\t}, err\n}", "func New(b builder.Builder, tracker, hosted string) *Builder {\n\t//create our new builder\n\tn := &Builder{\n\t\tb: b,\n\t\tbase: hosted,\n\t\trpc: gorpc.NewServer(),\n\t\ttcl: client.New(tracker, http.DefaultClient, client.JsonCodec),\n\t\tbq: rpc.NewBuilderQueue(),\n\t\tmux: http.NewServeMux(),\n\t\tdler: newDownloader(),\n\t}\n\n\t//register the build service in the rpc\n\tif err := n.rpc.RegisterService(n.bq, \"\"); err != nil {\n\t\tpanic(err)\n\t}\n\n\t//make sure we respond to pings\n\tif err := n.rpc.RegisterService(pinger.Pinger{}, \"\"); err != nil {\n\t\tpanic(err)\n\t}\n\n\t//register the codec\n\tn.rpc.RegisterCodec(json.NewCodec(), \"application/json\")\n\n\t//add the handlers to our mux\n\tn.mux.Handle(\"/\", n.rpc)\n\tn.mux.Handle(\"/download/\", http.StripPrefix(\"/download/\", n.dler))\n\n\t//start processing tasks\n\tgo n.run()\n\n\treturn n\n}", "func (b fileInputBuilder) Build(ctx context.Context, cfg task.ExecutorInputBuilderConfig, deps task.ExecutorInputBuilderDependencies, target *task.ExecutorInputBuilderTarget) error {\n\turi := cfg.AnnotatedValue.GetData()\n\tdeps.Log.Debug().\n\t\tInt(\"sequence-index\", cfg.AnnotatedValueIndex).\n\t\tStr(\"uri\", uri).\n\t\tStr(\"input\", cfg.InputSpec.Name).\n\t\tStr(\"task\", cfg.TaskSpec.Name).\n\t\tMsg(\"Preparing file input value\")\n\n\t// Prepare readonly volume for URI\n\tresp, err := deps.FileSystem.CreateVolumeForRead(ctx, &fs.CreateVolumeForReadRequest{\n\t\tURI: uri,\n\t\tOwner: &cfg.OwnerRef,\n\t\tNamespace: cfg.Pipeline.GetNamespace(),\n\t})\n\tif err != nil {\n\t\treturn maskAny(err)\n\t}\n\t// TODO handle case where node is different\n\tif nodeName := resp.GetNodeName(); nodeName != \"\" {\n\t\ttarget.NodeName = &nodeName\n\t}\n\n\t// Mount PVC or HostPath, depending on result\n\tvolName := util.FixupKubernetesName(fmt.Sprintf(\"input-%s-%d\", cfg.InputSpec.Name, cfg.AnnotatedValueIndex))\n\tif resp.GetVolumeName() != \"\" {\n\t\t// Get created PersistentVolume\n\t\tvar pv corev1.PersistentVolume\n\t\tpvKey := client.ObjectKey{\n\t\t\tName: resp.GetVolumeName(),\n\t\t}\n\t\tif err := deps.Client.Get(ctx, pvKey, &pv); err != nil {\n\t\t\tdeps.Log.Warn().Err(err).Msg(\"Failed to get PersistentVolume\")\n\t\t\treturn maskAny(err)\n\t\t}\n\n\t\t// Add PV to resources for deletion list (if needed)\n\t\tif resp.DeleteAfterUse {\n\t\t\ttarget.Resources = append(target.Resources, &pv)\n\t\t}\n\n\t\t// Create PVC\n\t\tpvcName := util.FixupKubernetesName(fmt.Sprintf(\"%s-%s-%s-%d-%s\", cfg.Pipeline.GetName(), cfg.TaskSpec.Name, cfg.InputSpec.Name, cfg.AnnotatedValueIndex, uniuri.NewLen(6)))\n\t\tstorageClassName := pv.Spec.StorageClassName\n\t\tpvc := corev1.PersistentVolumeClaim{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: pvcName,\n\t\t\t\tNamespace: cfg.Pipeline.GetNamespace(),\n\t\t\t\tOwnerReferences: []metav1.OwnerReference{cfg.OwnerRef},\n\t\t\t},\n\t\t\tSpec: corev1.PersistentVolumeClaimSpec{\n\t\t\t\tAccessModes: pv.Spec.AccessModes,\n\t\t\t\tVolumeName: resp.GetVolumeName(),\n\t\t\t\tResources: corev1.ResourceRequirements{\n\t\t\t\t\tRequests: pv.Spec.Capacity,\n\t\t\t\t},\n\t\t\t\tStorageClassName: &storageClassName,\n\t\t\t},\n\t\t}\n\t\tif err := deps.Client.Create(ctx, &pvc); err != nil {\n\t\t\treturn maskAny(err)\n\t\t}\n\t\ttarget.Resources = append(target.Resources, &pvc)\n\n\t\t// Add volume for the pod\n\t\tvol := corev1.Volume{\n\t\t\tName: volName,\n\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\tPersistentVolumeClaim: &corev1.PersistentVolumeClaimVolumeSource{\n\t\t\t\t\tClaimName: pvcName,\n\t\t\t\t\tReadOnly: true,\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\t\ttarget.Pod.Spec.Volumes = append(target.Pod.Spec.Volumes, vol)\n\t} else if resp.GetVolumeClaimName() != \"\" {\n\t\t// Add PVC to resources for deletion list (if needed)\n\t\tif resp.DeleteAfterUse {\n\t\t\t// Get created PersistentVolume\n\t\t\tvar pvc corev1.PersistentVolumeClaim\n\t\t\tpvcKey := client.ObjectKey{\n\t\t\t\tName: resp.GetVolumeClaimName(),\n\t\t\t\tNamespace: cfg.Pipeline.GetNamespace(),\n\t\t\t}\n\t\t\tif err := deps.Client.Get(ctx, pvcKey, &pvc); err != nil {\n\t\t\t\tdeps.Log.Warn().Err(err).Msg(\"Failed to get PersistentVolumeClaim\")\n\t\t\t\treturn maskAny(err)\n\t\t\t}\n\t\t\ttarget.Resources = append(target.Resources, &pvc)\n\t\t}\n\n\t\t// Add volume for the pod, unless such a volume already exists\n\t\tif vol, found := util.GetVolumeWithForPVC(&target.Pod.Spec, resp.GetVolumeClaimName()); !found {\n\t\t\tvol := corev1.Volume{\n\t\t\t\tName: volName,\n\t\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\t\tPersistentVolumeClaim: &corev1.PersistentVolumeClaimVolumeSource{\n\t\t\t\t\t\tClaimName: resp.GetVolumeClaimName(),\n\t\t\t\t\t\tReadOnly: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}\n\t\t\ttarget.Pod.Spec.Volumes = append(target.Pod.Spec.Volumes, vol)\n\t\t} else {\n\t\t\tvolName = vol.Name\n\t\t}\n\t} else if resp.GetVolumePath() != \"\" {\n\t\t// Mount VolumePath as HostPath volume\n\t\tdirType := corev1.HostPathDirectoryOrCreate\n\t\t// Add volume for the pod\n\t\tvol := corev1.Volume{\n\t\t\tName: volName,\n\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\tHostPath: &corev1.HostPathVolumeSource{\n\t\t\t\t\tPath: resp.GetVolumePath(),\n\t\t\t\t\tType: &dirType,\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\t\ttarget.Pod.Spec.Volumes = append(target.Pod.Spec.Volumes, vol)\n\t\t// Ensure pod is schedule on node\n\t\tif nodeName := resp.GetNodeName(); nodeName != \"\" {\n\t\t\tif target.Pod.Spec.NodeName == \"\" {\n\t\t\t\ttarget.Pod.Spec.NodeName = nodeName\n\t\t\t} else if target.Pod.Spec.NodeName != nodeName {\n\t\t\t\t// Found conflict\n\t\t\t\tdeps.Log.Error().\n\t\t\t\t\tStr(\"pod-nodeName\", target.Pod.Spec.NodeName).\n\t\t\t\t\tStr(\"pod-nodeNameRequest\", nodeName).\n\t\t\t\t\tMsg(\"Conflicting pod node spec\")\n\t\t\t\treturn maskAny(fmt.Errorf(\"Conflicting Node assignment\"))\n\t\t\t}\n\t\t}\n\t} else {\n\t\t// No valid respond\n\t\treturn maskAny(fmt.Errorf(\"FileSystem service return invalid response\"))\n\t}\n\n\t// Map volume in container fs namespace\n\tmountPath := filepath.Join(\"/koalja\", \"inputs\", cfg.InputSpec.Name, strconv.Itoa(cfg.AnnotatedValueIndex))\n\ttarget.Container.VolumeMounts = append(target.Container.VolumeMounts, corev1.VolumeMount{\n\t\tName: volName,\n\t\tReadOnly: true,\n\t\tMountPath: mountPath,\n\t\tSubPath: resp.GetSubPath(),\n\t})\n\n\t// Create template data\n\ttarget.TemplateData = append(target.TemplateData, map[string]interface{}{\n\t\t\"volumeName\": resp.GetVolumeName(),\n\t\t\"volumeClaimName\": resp.GetVolumeClaimName(),\n\t\t\"volumePath\": resp.GetVolumePath(),\n\t\t\"mountPath\": mountPath,\n\t\t\"subPath\": resp.GetSubPath(),\n\t\t\"nodeName\": resp.GetNodeName(),\n\t\t\"path\": filepath.Join(mountPath, resp.GetLocalPath()),\n\t\t\"base\": filepath.Base(resp.GetLocalPath()),\n\t\t\"dir\": filepath.Dir(resp.GetLocalPath()),\n\t})\n\n\treturn nil\n}", "func New() (grpc_fpl.FPLClient, func(), error) {\n\tport := viper.GetString(\"port\")\n\tconn, err := grpc.Dial(fmt.Sprintf(\"localhost:%v\", port), grpc.WithInsecure())\n\tif err != nil {\n\t\treturn nil, nil, fmt.Errorf(\"error while connecting to gRPC server at port %v: %v\", port, err)\n\t}\n\n\tclient := grpc_fpl.NewFPLClient(conn)\n\n\tcleanup := func() {\n\t\tif conn != nil {\n\t\t\tconn.Close()\n\t\t}\n\t}\n\treturn client, cleanup, nil\n}", "func (c Config) Build() (*Gaffer, error) {\n\n\t// Initialise Gaffer object\n\tg := &Gaffer{\n\t\tConfig: c,\n\t\tedge_buffer: map[EdgeKey]*Edge{},\n\t\tentity_buffer: map[EntityKey]*Entity{},\n\t\tbufferq: make(chan *Update, 5000),\n\t\tloadq: make(chan *map[string]interface{}, 50),\n\t}\n\n\t// Start loader and buffer manager goroutines\n\tgo g.Loader()\n\tgo g.BufferManager()\n\n\treturn g, nil\n}", "func newBuilder() balancer.Builder {\n\treturn base.NewBalancerBuilder(Name, &rrPickerBuilder{})\n}", "func (c *lscChoco) Build(thing sdk.Thing, sensors []Sensor, chanIDs []string) {\n\tc.thing = thing\n\tc.thingToken = c.thing.Key\n\tc.status = Status{State: state.CREATED}\n\tc.sensors = sensors\n\tc.channelIDs = chanIDs\n\tfor i := range c.sensors {\n\t\tc.sensors[i].SetState(state.CREATED)\n\t}\n\n}", "func (rb *CompositeAggregationBuilder) Build() CompositeAggregation {\n\treturn *rb.v\n}", "func BuildRPCToken(client, service, method string) ([]byte, error) {\n\ttok := RPCToken{\n\t\tClient: client,\n\t\tKind: TokenKindRPC,\n\t\tService: service,\n\t\tMethod: method,\n\t}\n\n\treturn json.Marshal(tok)\n}", "func createObjectPicker(n int64, distrib string) (ObjectSelector, error) {\n\n\tswitch distrib {\n\tcase ZIPFIAN_OBJECT_PICK:\n\t\tvar x = new(Zipf)\n\t\tx.SetParams(n, 0.8, 99)\n\t\treturn x, nil\n\tcase UNIFORM_OBJECT_PICK:\n\t\tvar y = new(Uniform)\n\t\ty.SetParams(n, 99)\n\t\treturn y, nil\n\t}\n\treturn nil, errors.New(\"Not a valid distribution for object selection\")\n}", "func BuildFactory(appearance int) GUIFactory {\n\tswitch appearance {\n\tcase React:\n\t\treturn new(ReactFactory)\n\tcase Vue:\n\t\treturn new(VueFactory)\n\tcase Angular:\n\t\treturn new(AngularFactory)\n\t}\n\treturn nil\n}", "func (pb PlannerBuilder) Build() Planner {\n\treturn &planner{\n\t\tlp: NewLogicalPlanner(pb.lopts...),\n\t\tpp: NewPhysicalPlanner(pb.popts...),\n\t}\n}", "func (x *fastReflection_RpcCommandOptions) New() protoreflect.Message {\n\treturn new(fastReflection_RpcCommandOptions)\n}", "func newBuilder() balancer.Builder {\n\treturn &gcpBalancerBuilder{\n\t\tname: Name,\n\t}\n}", "func (tmpl *APIClientTemplate) Build() APIClient {\n\treturn tmpl.BuildWithAuthorization(tmpl.Authorization)\n}", "func (b *Builder) Build(ctx context.Context, app *AppContext) error {\n\tif err := buildComponents(ctx, app); err != nil {\n\t\treturn fmt.Errorf(\"error building components: %v\", err)\n\t}\n\treturn nil\n}", "func (p PhoneCallAccepted) construct() PhoneCallClass { return &p }", "func (*SpecFactory) Build(resource string) runtime.Object {\n\n\tswitch resource {\n\tcase \"services\":\n\t\treturn &v1.Service{}\n\tcase \"configmaps\":\n\t\treturn &v1.ConfigMap{}\n\t}\n\n\tpanic(fmt.Errorf(\"no resource mapped for %s\", resource))\n}", "func BuildPbsbService(serviceEmail string, key []byte) (*pbsb.Service, error) {\n\tconf := &jwt.Config{\n\t\tEmail: serviceEmail,\n\t\tPrivateKey: key,\n\t\tScopes: []string{\n\t\t\tpbsb.CloudPlatformScope,\n\t\t\tpbsb.PubsubScope,\n\t\t},\n\t\tTokenURL: google.JWTTokenURL,\n\t}\n\n\tif service, err := pbsb.New(conf.Client(oauth2.NoContext)); err != nil {\n\t\treturn nil, err\n\t} else {\n\t\treturn service, nil\n\t}\n}", "func NewRPC(ctx context.Context, lgr log.Logger, addr string, opts ...RPCOption) (RPC, error) {\n\tvar cfg rpcConfig\n\tfor i, opt := range opts {\n\t\tif err := opt(&cfg); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"rpc option %d failed to apply to RPC config: %w\", i, err)\n\t\t}\n\t}\n\tif cfg.backoffAttempts < 1 { // default to at least 1 attempt, or it always fails to dial.\n\t\tcfg.backoffAttempts = 1\n\t}\n\tunderlying, err := dialRPCClientWithBackoff(ctx, lgr, addr, cfg.backoffAttempts, cfg.gethRPCOptions...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar wrapped RPC = &BaseRPCClient{c: underlying}\n\n\tif cfg.limit != 0 {\n\t\twrapped = NewRateLimitingClient(wrapped, rate.Limit(cfg.limit), cfg.burst)\n\t}\n\n\tif httpRegex.MatchString(addr) {\n\t\twrapped = NewPollingClient(ctx, lgr, wrapped, WithPollRate(cfg.httpPollInterval))\n\t}\n\n\treturn wrapped, nil\n}", "func (c *Config) Build() weather.Provider {\n\t// Build the OWM URL.\n\twURL := url.URL{\n\t\tScheme: \"http\",\n\t\tHost: \"api.wunderground.com\",\n\t\tPath: fmt.Sprintf(\"/api/%s/conditions/q/%s.json\", c.apiKey, c.query),\n\t}\n\treturn Provider(wURL.String())\n}", "func (b *builder) Build(url resolver.Target, cc resolver.ClientConn, _ resolver.BuildOptions) (resolver.Resolver, error) {\n\ttgt, err := parseURL(url.URL.String())\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"invalid consul URL: %w\", err)\n\t}\n\n\tcli, err := api.NewClient(tgt.consulConfig())\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to connect to the ConsulAPI: %w\", err)\n\t}\n\n\tplan, err := b.targetToPlan(tgt)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tr := NewResolver(cc, plan, WithLogger(b.logger.WithName(tgt.Service)), WithStrategies(b.strategies))\n\n\tgo func() {\n\t\t_ = plan.RunWithClientAndHclog(cli, nil)\n\t}()\n\tgo func() {\n\t\tr.resolveWithPeriod(cli, tgt)\n\t}()\n\n\treturn r, nil\n}", "func (rb *TotalFeatureImportanceClassBuilder) Build() TotalFeatureImportanceClass {\n\treturn *rb.v\n}", "func Create(ctx context.Context, client *selvpcclient.ServiceClient, createOpts CreateOpts) (*Project, *selvpcclient.ResponseResult, error) {\n\t// Nest create options into the parent \"project\" JSON structure.\n\ttype createProject struct {\n\t\tOptions CreateOpts `json:\"project\"`\n\t}\n\tcreateProjectOpts := &createProject{Options: createOpts}\n\trequestBody, err := json.Marshal(createProjectOpts)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\turl := strings.Join([]string{client.Endpoint, resourceURL}, \"/\")\n\tresponseResult, err := client.DoRequest(ctx, http.MethodPost, url, bytes.NewReader(requestBody))\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tif responseResult.Err != nil {\n\t\treturn nil, responseResult, responseResult.Err\n\t}\n\n\t// Extract a project from the response body.\n\tvar result struct {\n\t\tProject *Project `json:\"project\"`\n\t}\n\terr = responseResult.ExtractResult(&result)\n\tif err != nil {\n\t\treturn nil, responseResult, err\n\t}\n\n\treturn result.Project, responseResult, nil\n}", "func createObjectPicker(n int64, distrib string) (ObjectSelector, error) {\n\n\tswitch distrib {\n\tcase ZIPFIAN_OBJECT_PICK:\n\t\tvar x = new(Zipf)\n\t\tx.SetParams(n, 0.8, 99)\n\t\treturn x, nil\n\tcase UNIFORM_OBJECT_PICK:\n\t\tvar y = new(Uniform)\n\t\ty.SetParams(n, 99)\n\t\treturn y, nil\n\t}\n\n\tfmt.Println(\"what is the distribution :\", distrib)\n\treturn new(Uniform), errors.New(\"Not a valid distribution for object selection\")\n}", "func (rb *PercolateQueryBuilder) Build() PercolateQuery {\n\treturn *rb.v\n}", "func NewFromGRPC(conn *grpc.ClientConn, log *zap.Logger, timeout time.Duration) (api.Peer, error) {\n\tl := log.Named(`NewFromGRPC`)\n\tp := &peer{conn: conn, log: log.Named(`peer`), timeout: timeout}\n\tif err := p.initEndorserClient(); err != nil {\n\t\tl.Error(`Failed to initialize endorser client`, zap.Error(err))\n\t\treturn nil, errors.Wrap(err, `failed to initialize EndorserClient`)\n\t}\n\treturn p, nil\n}", "func NewGRPC(port string, options ...grpc.ServerOption) *GRPC {\n\tsrv := grpc.NewServer(options...)\n\treturn &GRPC{\n\t\tServer: srv,\n\t\tport: port,\n\t}\n}", "func (rb *PhraseSuggestOptionBuilder) Build() PhraseSuggestOption {\n\treturn *rb.v\n}", "func (rb *PluginStatsBuilder) Build() PluginStats {\n\treturn *rb.v\n}", "func newRPCClientService() (*rpcClientService, error) {\n return &rpcClientService{rpcCh: make(chan *sendRPCState)}, nil\n}", "func (p *googleCloudProvider) Create(ctx context.Context, req *rpc.CreateRequest) (*rpc.CreateResponse, error) {\n\turn := resource.URN(req.GetUrn())\n\tlabel := fmt.Sprintf(\"%s.Create(%s)\", p.name, urn)\n\tlogging.V(9).Infof(\"%s executing\", label)\n\n\t// Deserialize RPC inputs\n\tinputs, err := plugin.UnmarshalProperties(req.GetProperties(), plugin.MarshalOptions{\n\t\tLabel: fmt.Sprintf(\"%s.properties\", label), SkipNulls: true,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tinputsMap := inputs.Mappable()\n\n\tresourceKey := string(urn.Type())\n\tres, ok := p.resourceMap.Resources[resourceKey]\n\tif !ok {\n\t\treturn nil, errors.Errorf(\"resource %q not found\", resourceKey)\n\t}\n\n\turi, err := buildCreateUrl(res, inputs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tbody := p.prepareAPIInputs(inputs, nil, res.CreateProperties)\n\n\tvar op map[string]interface{}\n\tif res.AssetUpload {\n\t\tvar content []byte\n\t\tsource := inputs[\"source\"]\n\t\tif source.IsAsset() {\n\t\t\tcontent, err = source.AssetValue().Bytes()\n\t\t} else if source.IsArchive() {\n\t\t\tcontent, err = source.ArchiveValue().Bytes(resource.ZIPArchive)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\top, err = p.client.UploadWithTimeout(res.CreateVerb, uri, body, content, 0)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"error sending upload request: %s: %q %+v %d\", err, uri, inputs.Mappable(), len(content))\n\t\t}\n\t} else {\n\t\top, err = p.client.RequestWithTimeout(res.CreateVerb, uri, body, 0)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"error sending request: %s: %q %+v\", err, uri, inputs.Mappable())\n\t\t}\n\t}\n\n\tresp, err := p.waitForResourceOpCompletion(res.BaseUrl, op)\n\tif err != nil {\n\t\tif resp == nil {\n\t\t\treturn nil, errors.Wrapf(err, \"waiting for completion\")\n\t\t}\n\t\t// A partial failure may have occurred because we got an error and a response.\n\t\t// Try reading the resource state and return a partial error if there is some.\n\t\tid, idErr := calculateResourceId(res, inputsMap, resp)\n\t\tif idErr != nil {\n\t\t\treturn nil, errors.Wrapf(err, \"waiting for completion / calculate ID %s\", idErr)\n\t\t}\n\t\treadResp, getErr := p.client.RequestWithTimeout(\"GET\", id, nil, 0)\n\t\tif getErr != nil {\n\t\t\treturn nil, errors.Wrapf(err, \"waiting for completion / read state %s\", getErr)\n\t\t}\n\t\tcheckpoint, cpErr := plugin.MarshalProperties(\n\t\t\tcheckpointObject(inputs, readResp),\n\t\t\tplugin.MarshalOptions{Label: fmt.Sprintf(\"%s.partialCheckpoint\", label), KeepSecrets: true, SkipNulls: true},\n\t\t)\n\t\tif cpErr != nil {\n\t\t\treturn nil, errors.Wrapf(err, \"waiting for completion / checkpoint %s\", cpErr)\n\t\t}\n\t\treturn nil, partialError(id, err, checkpoint, req.GetProperties())\n\t}\n\n\t// Store both outputs and inputs into the state.\n\tcheckpoint, err := plugin.MarshalProperties(\n\t\tcheckpointObject(inputs, resp),\n\t\tplugin.MarshalOptions{Label: fmt.Sprintf(\"%s.checkpoint\", label), KeepSecrets: true, SkipNulls: true},\n\t)\n\n\tid, err := calculateResourceId(res, inputsMap, resp)\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"calculating resource ID\")\n\t}\n\n\treturn &rpc.CreateResponse{\n\t\tId: id,\n\t\tProperties: checkpoint,\n\t}, nil\n}", "func (c ChannelsChannelParticipants) construct() ChannelsChannelParticipantsClass { return &c }", "func newBuilder() balancer.Builder {\n\treturn base.NewBalancerBuilderV2(Name, &rrPickerBuilder{}, base.Config{HealthCheck: true})\n}", "func (m builder) build() (oci.SpecModifier, error) {\n\tif len(m.devices) == 0 && m.cdiSpec == nil {\n\t\treturn nil, nil\n\t}\n\n\tif m.cdiSpec != nil {\n\t\tmodifier := fromCDISpec{\n\t\t\tcdiSpec: &cdi.Spec{Spec: m.cdiSpec},\n\t\t}\n\t\treturn modifier, nil\n\t}\n\n\tregistry, err := cdi.NewCache(\n\t\tcdi.WithAutoRefresh(false),\n\t\tcdi.WithSpecDirs(m.specDirs...),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create CDI registry: %v\", err)\n\t}\n\n\tmodifier := fromRegistry{\n\t\tlogger: m.logger,\n\t\tregistry: registry,\n\t\tdevices: m.devices,\n\t}\n\n\treturn modifier, nil\n}", "func Build(ns string, app *parser.Appfile) (*v1alpha2.ApplicationConfiguration, []*v1alpha2.Component, error) {\n\tb := &builder{app}\n\treturn b.CompleteWithContext(ns)\n}", "func (rb *PagerDutyEventProxyBuilder) Build() PagerDutyEventProxy {\n\treturn *rb.v\n}", "func newComposeBuilder(appMan Manifest) Builder {\n return &ComposeBuilder{manifest: appMan}\n}", "func (p PhoneCallWaiting) construct() PhoneCallClass { return &p }", "func newBuilder() balancer.Builder {\n\treturn base.NewBalancerBuilderWithConfig(Name, &rrPickerBuilder{}, base.Config{HealthCheck: true})\n}", "func (s *Service) Build(ctx context.Context, buildOptions options.Build) error {\n\treturn s.build(ctx, buildOptions)\n}", "func (b *Builder) Build() (*RollDPoS, error) {\n\tif b.chain == nil {\n\t\treturn nil, errors.Wrap(ErrNewRollDPoS, \"blockchain APIs is nil\")\n\t}\n\tif b.broadcastHandler == nil {\n\t\treturn nil, errors.Wrap(ErrNewRollDPoS, \"broadcast callback is nil\")\n\t}\n\tif b.clock == nil {\n\t\tb.clock = clock.New()\n\t}\n\tb.cfg.DB.DbPath = b.cfg.Consensus.ConsensusDBPath\n\tctx, err := NewRollDPoSCtx(\n\t\tconsensusfsm.NewConsensusConfig(b.cfg.Consensus.FSM, b.cfg.DardanellesUpgrade, b.cfg.Genesis, b.cfg.Consensus.Delay),\n\t\tb.cfg.DB,\n\t\tb.cfg.SystemActive,\n\t\tb.cfg.Consensus.ToleratedOvertime,\n\t\tb.cfg.Genesis.TimeBasedRotation,\n\t\tb.chain,\n\t\tb.blockDeserializer,\n\t\tb.rp,\n\t\tb.broadcastHandler,\n\t\tb.delegatesByEpochFunc,\n\t\tb.proposersByEpochFunc,\n\t\tb.encodedAddr,\n\t\tb.priKey,\n\t\tb.clock,\n\t\tb.cfg.Genesis.BeringBlockHeight,\n\t)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"error when constructing consensus context\")\n\t}\n\tcfsm, err := consensusfsm.NewConsensusFSM(ctx, b.clock)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"error when constructing the consensus FSM\")\n\t}\n\treturn &RollDPoS{\n\t\tcfsm: cfsm,\n\t\tctx: ctx,\n\t\tstartDelay: b.cfg.Consensus.Delay,\n\t\tready: make(chan interface{}),\n\t}, nil\n}", "func New(target string, opts ...grpc.DialOption) (*Client, error) {\n\tconn, err := grpc.Dial(target, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Client{conn, quotaservice.NewQuotaServiceClient(conn)}, nil\n}", "func (cb *ClientBuilder) Build() *Client {\n\treturn cb.client\n}", "func (cb *ClientBuilder) Build() *Client {\n\treturn cb.client\n}", "func NewColorPicker() ColorPicker {\n\timageColors := make(map[string]Color)\n\n\treturn &colorPicker{\n\t\timageColors: imageColors,\n\t}\n}", "func Build(namespace, name, strategyType, fromKind, fromNamespace, fromName string) buildapi.Build {\n\treturn buildapi.Build{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tNamespace: namespace,\n\t\t\tName: name,\n\t\t\tSelfLink: \"/build/\" + name,\n\t\t},\n\t\tSpec: buildapi.BuildSpec{\n\t\t\tCommonSpec: CommonSpec(strategyType, fromKind, fromNamespace, fromName),\n\t\t},\n\t}\n}", "func (Codegen) Grpc() error {\n\tmg.Deps(Codegen.Buildbox)\n\tfmt.Println(\"\\n=====> Running GRPC Codegen inside docker...\\n\")\n\treturn trace.Wrap(sh.RunV(\n\t\t\"docker\", \"run\",\n\t\thostVolume(\"delegated\"),\n\t\tfmt.Sprint(\"satellite-grpc-buildbox:\", version()),\n\t\t\"sh\", \"-c\",\n\t\t\"cd /go/src/github.com/gravitational/satellite/ && go run mage.go internal:grpcAgent internal:grpcDebug\",\n\t))\n}", "func NewBuilder() *Builder {\n\treturn &Builder{pvc: &PVC{object: &corev1.PersistentVolumeClaim{}}}\n}", "func (b *Buddy) Build() Builder {\n\treturn Builder{b}\n}", "func (p PhoneCallRequested) construct() PhoneCallClass { return &p }", "func (builder *BitVectorBuilderData) Build(enableFasterSelect1 bool, enableFasterSelect0 bool) (SuccinctBitVector, error) {\n\tbuilder.vec.build(enableFasterSelect1, enableFasterSelect0)\n\tvec := builder.vec\n\tbuilder.vec = new(BitVectorData)\n\treturn vec, nil\n}", "func New(file string) ComposeFile {\n\tresult := ComposeFile{\n\t\tFile: []string{file},\n\t\tData: DockerCompose{\n\t\t\tVersion: \"3.7\",\n\t\t\tServices: make(map[string]*Service),\n\t\t},\n\t}\n\treturn result\n}", "func (x *fastReflection_ModuleOptions) New() protoreflect.Message {\n\treturn new(fastReflection_ModuleOptions)\n}", "func (a *ArgoCDRepoServer) CreateGRPC(gitClient git.Client) *grpc.Server {\n\tserver := grpc.NewServer(\n\t\tgrpc.StreamInterceptor(grpc_middleware.ChainStreamServer(\n\t\t\tgrpc_logrus.StreamServerInterceptor(a.log),\n\t\t\tgrpc_util.PanicLoggerStreamServerInterceptor(a.log),\n\t\t)),\n\t\tgrpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(\n\t\t\tgrpc_logrus.UnaryServerInterceptor(a.log),\n\t\t\tgrpc_util.PanicLoggerUnaryServerInterceptor(a.log),\n\t\t)),\n\t)\n\tversion.RegisterVersionServiceServer(server, &version.Server{})\n\tmanifestService := repository.NewService(a.ns, a.kubeclientset, gitClient)\n\trepository.RegisterRepositoryServiceServer(server, manifestService)\n\n\t// Register reflection service on gRPC server.\n\treflection.Register(server)\n\n\treturn server\n}", "func (p PhoneCall) construct() PhoneCallClass { return &p }", "func (b Builder) Build() (*VerifiableCredential, error) {\n\tif err := validator.New().Struct(b); err != nil {\n\t\treturn nil, err\n\t}\n\n\tif b.ProofVersion == 0 {\n\t\tb.ProofVersion = proof.V2\n\t}\n\n\tsuite, err := proof.SignatureSuites().GetSuiteForCredentials(b.SignatureType, b.ProofVersion)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// The \"id\" attribute is added if missing from the claim data.\n\tvar credSubjects = map[string]interface{}{SubjectIDAttribute: b.SubjectDID.String()}\n\tfor k, v := range b.Data {\n\t\tcredSubjects[k] = v\n\t}\n\n\toptions := &proof.ProofOptions{ProofPurpose: proof.AssertionMethodPurpose}\n\t// Compute the claim proofs for selective disclosure.\n\tvar claimProofs = make(map[string]proof.Proof, len(credSubjects))\n\tfor k, v := range credSubjects {\n\t\tcredential := &VerifiableCredential{\n\t\t\tMetadata: *b.Metadata,\n\t\t\tCredentialSubject: map[string]interface{}{k: v},\n\t\t}\n\t\tif err := suite.Sign(credential, b.Signer, options); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tclaimProofs[k] = *credential.Proof\n\t}\n\n\tcred := &VerifiableCredential{\n\t\tMetadata: *b.Metadata,\n\t\tCredentialSubject: credSubjects,\n\t\tClaimProofs: claimProofs,\n\t}\n\treturn cred, suite.Sign(cred, b.Signer, options)\n}", "func (tmpb *PoolBuilder) Build() pool.TaskPool {\n\n\tvar aTaskPool pool.TaskPool\n\n\tif tmpb.poolType == FIXED {\n\n\t\taTaskPool = pool.NewFixedTaskPool()\n\n\t} else if tmpb.poolType == CACHED {\n\n\t\taTaskPool = pool.NewCachedTaskPool(tmpb.maxCachePeriodInMillis)\n\n\t} else {\n\n\t\taTaskPool = pool.NewElasticTaskPool(tmpb.maxCachePeriodInMillis, tmpb.minWorkerCount)\n\n\t}\n\n\taTaskPool.SetMaxQueueCount(tmpb.maxQueueCount)\n\taTaskPool.SetMaxWorkerCount(tmpb.maxWorkerCount)\n\taTaskPool.SetCustomErrorFunction(tmpb.customErrorFunction)\n\n\treturn aTaskPool\n}", "func (r *Rest) Build(name string) *Rest {\n\tr.endpoints[name] = r.tmp\n\tr.tmp = RestEndPoint{}\n\treturn r\n}", "func CredentialCreationFromProto(cc *wantypes.CredentialCreation) *CredentialCreation {\n\tif cc == nil {\n\t\treturn nil\n\t}\n\treturn &CredentialCreation{\n\t\tResponse: publicKeyCredentialCreationOptionsFromProto(cc.PublicKey),\n\t}\n}", "func (client *BuildServiceClient) getSupportedBuildpackCreateRequest(ctx context.Context, resourceGroupName string, serviceName string, buildServiceName string, buildpackName string, options *BuildServiceClientGetSupportedBuildpackOptions) (*policy.Request, error) {\n\turlPath := \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedBuildpacks/{buildpackName}\"\n\tif client.subscriptionID == \"\" {\n\t\treturn nil, errors.New(\"parameter client.subscriptionID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{subscriptionId}\", url.PathEscape(client.subscriptionID))\n\tif resourceGroupName == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceGroupName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceGroupName}\", url.PathEscape(resourceGroupName))\n\tif serviceName == \"\" {\n\t\treturn nil, errors.New(\"parameter serviceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{serviceName}\", url.PathEscape(serviceName))\n\tif buildServiceName == \"\" {\n\t\treturn nil, errors.New(\"parameter buildServiceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{buildServiceName}\", url.PathEscape(buildServiceName))\n\tif buildpackName == \"\" {\n\t\treturn nil, errors.New(\"parameter buildpackName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{buildpackName}\", url.PathEscape(buildpackName))\n\treq, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.internal.Endpoint(), urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2023-01-01-preview\")\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header[\"Accept\"] = []string{\"application/json\"}\n\treturn req, nil\n}", "func newBuilder() balancer.Builder {\n\treturn base.NewBalancerBuilder(Name, &rrPickerBuilder{}, base.Config{HealthCheck: true})\n}", "func (d DialogPeer) construct() DialogPeerClass { return &d }", "func (client *BuildServiceClient) listSupportedBuildpacksCreateRequest(ctx context.Context, resourceGroupName string, serviceName string, buildServiceName string, options *BuildServiceClientListSupportedBuildpacksOptions) (*policy.Request, error) {\n\turlPath := \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedBuildpacks\"\n\tif client.subscriptionID == \"\" {\n\t\treturn nil, errors.New(\"parameter client.subscriptionID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{subscriptionId}\", url.PathEscape(client.subscriptionID))\n\tif resourceGroupName == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceGroupName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceGroupName}\", url.PathEscape(resourceGroupName))\n\tif serviceName == \"\" {\n\t\treturn nil, errors.New(\"parameter serviceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{serviceName}\", url.PathEscape(serviceName))\n\tif buildServiceName == \"\" {\n\t\treturn nil, errors.New(\"parameter buildServiceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{buildServiceName}\", url.PathEscape(buildServiceName))\n\treq, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.internal.Endpoint(), urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2023-01-01-preview\")\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header[\"Accept\"] = []string{\"application/json\"}\n\treturn req, nil\n}", "func Build(q interface{}, opts ...Option) ([]byte, error) {\n\tb := &builder{\n\t\tquery: q,\n\t\toperationType: \"query\",\n\t}\n\n\tfor _, opt := range opts {\n\t\topt(b)\n\t}\n\n\treturn b.build()\n}", "func (r *Resolver) Build(target resolver.Target, cc resolver.ClientConn, opts resolver.BuildOptions) (resolver.Resolver, error) {\n\tr.cc = cc\n\n\t// Update conn states if proactively updates already work\n\tinstances, err := r.discoverer.Instances()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tr.updateAddresses(instances)\n\tr.closing.Add(1)\n\tgo r.watcher()\n\treturn r, nil\n}", "func (p *googleCloudProvider) Create(ctx context.Context, req *rpc.CreateRequest) (*rpc.CreateResponse, error) {\n\turn := resource.URN(req.GetUrn())\n\tlabel := fmt.Sprintf(\"%s.Create(%s)\", p.name, urn)\n\tlogging.V(9).Infof(\"%s executing\", label)\n\n\t// Deserialize RPC inputs\n\tinputs, err := plugin.UnmarshalProperties(req.GetProperties(), plugin.MarshalOptions{\n\t\tLabel: fmt.Sprintf(\"%s.properties\", label), SkipNulls: true,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tunmodifiedInputs := deepcopy.Copy(inputs).(resource.PropertyMap)\n\tresourceKey := string(urn.Type())\n\n\tres, ok := p.resourceMap.Resources[resourceKey]\n\tif !ok {\n\t\treturn nil, errors.Errorf(\"resource %q not found\", resourceKey)\n\t}\n\tlogging.V(9).Infof(\"Looked up metadata for %q: %+v\", resourceKey, res)\n\n\t// Handle IamMember, IamBinding resources.\n\tif isIAMOverlay(urn) {\n\t\tinputs, err = inputsForIAMOverlayCreate(res, urn, inputs, p.client)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\turi, err := buildCreateURL(res, inputs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tinputsMap := inputs.Mappable()\n\n\tbody := p.prepareAPIInputs(inputs, nil, res.Create.SDKProperties)\n\n\tvar op map[string]interface{}\n\tvar contentType string\n\tif val, hasContentType := inputs[\"contentType\"]; hasContentType {\n\t\tcontentType = val.StringValue()\n\t}\n\n\tif res.AssetUpload {\n\t\top, err = p.handleAssetUpload(uri, &res, inputs, body)\n\t} else if needsMultiPartFormdataContentType(contentType, res) {\n\t\top, err = p.handleFormDataUpload(uri, &res, inputs)\n\t} else {\n\t\top, err = retryRequest(p.client, res.Create.Verb, uri, contentType, body)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"error sending request: %s: %q %+v\", err, uri, inputs.Mappable())\n\t\t}\n\t}\n\n\tresp, err := p.waitForResourceOpCompletion(urn, res.Create.CloudAPIOperation, op, nil)\n\tif err != nil {\n\t\tif resp == nil {\n\t\t\treturn nil, errors.Wrapf(err, \"waiting for completion\")\n\t\t}\n\t\t// A partial failure may have occurred because we got an error and a response.\n\t\t// Try reading the resource state and return a partial error if there is some.\n\t\tid, idErr := calculateResourceID(res, inputsMap, resp)\n\t\tif idErr != nil {\n\t\t\treturn nil, errors.Wrapf(err, \"waiting for completion / calculate ID %s\", idErr)\n\t\t}\n\t\treadResp, getErr := p.client.RequestWithTimeout(res.Read.Verb, resources.AssembleURL(res.RootURL, id), \"\", nil, 0)\n\t\tif getErr != nil {\n\t\t\treturn nil, errors.Wrapf(err, \"waiting for completion / read state %s\", getErr)\n\t\t}\n\t\tdefaults, defErr := extractDefaultsFromResponse(res, inputs, readResp)\n\t\tif defErr != nil {\n\t\t\treturn nil, errors.Wrapf(err, \"failed to extract defaults from response: %s\", defErr)\n\t\t}\n\t\tcheckpoint, cpErr := plugin.MarshalProperties(\n\t\t\tcheckpointObject(inputs, defaults, resp),\n\t\t\tplugin.MarshalOptions{Label: fmt.Sprintf(\"%s.partialCheckpoint\", label), KeepSecrets: true, SkipNulls: true},\n\t\t)\n\t\tif cpErr != nil {\n\t\t\treturn nil, errors.Wrapf(err, \"waiting for completion / checkpoint %s\", cpErr)\n\t\t}\n\t\treturn nil, partialError(id, err, checkpoint, req.GetProperties())\n\t}\n\n\t// There are several APIs where the response from the create/operation is non-standard or contains\n\t// stale data. When it comes to checkpointing, we want to store information that strictly matches what we\n\t// get from a subsequent read. As a result, we do an additional read call here and use that to checkpoint state.\n\t// This is likely superfluous/duplicative in many cases but erring on the side of correctness here instead.\n\tid, err := calculateResourceID(res, inputsMap, resp)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"object retrieval failure after successful create / calculate ID %w\", err)\n\t}\n\turl := id\n\tif !strings.HasPrefix(url, \"http\") {\n\t\turl = resources.AssembleURL(res.RootURL, url)\n\t}\n\tresp, err = p.client.RequestWithTimeout(res.Read.Verb, url, \"\", nil, 0)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"object retrieval failure after successful create / read state: %w\", err)\n\t}\n\tdefaults, err := extractDefaultsFromResponse(res, inputs, resp)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to extract defaults from response: %w\", err)\n\t}\n\t// Checkpoint defaults, outputs and inputs into the state.\n\tif isIAMOverlay(urn) {\n\t\tresp = unmodifiedInputs.Mappable()\n\t}\n\tcheckpoint, err := plugin.MarshalProperties(\n\t\tcheckpointObject(unmodifiedInputs, defaults, resp),\n\t\tplugin.MarshalOptions{Label: fmt.Sprintf(\"%s.checkpoint\", label), KeepSecrets: true, SkipNulls: true},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to marshal checkpoint: %w\", err)\n\t}\n\n\treturn &rpc.CreateResponse{\n\t\tId: id,\n\t\tProperties: checkpoint,\n\t}, nil\n}", "func NewProviderGRPC(cfg ProviderGRPCConfig) (*ProviderGRPC, error) {\n\tvar (\n\t\tport = cfg.Port\n\t)\n\tif port == 0 {\n\t\treturn nil, errors.New(\"Port must be specified\")\n\t}\n\n\tclient, err := gstorage.NewClient(context.Background())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tserver := grpc.NewServer()\n\ts := &ProviderGRPC{client, server, port}\n\tpb.RegisterStorageServer(server, s)\n\n\treturn s, nil\n}" ]
[ "0.64123386", "0.54491264", "0.5207384", "0.519233", "0.51873875", "0.5060041", "0.500911", "0.49661744", "0.49641725", "0.49387065", "0.4873675", "0.48730406", "0.48674744", "0.48580194", "0.48238623", "0.48182973", "0.4813414", "0.48090446", "0.47689024", "0.47326857", "0.472298", "0.47173777", "0.4714252", "0.47028232", "0.4657974", "0.4646954", "0.464248", "0.4640698", "0.4627448", "0.46177128", "0.46067518", "0.46032315", "0.45969844", "0.4575776", "0.45622355", "0.45620275", "0.45553702", "0.45458055", "0.454211", "0.4523647", "0.45215964", "0.4520909", "0.45207787", "0.45046082", "0.4486489", "0.44824702", "0.44690093", "0.4456611", "0.4425975", "0.4425165", "0.44249907", "0.44241765", "0.44193524", "0.4408623", "0.44069913", "0.44063196", "0.43991753", "0.43969265", "0.4394562", "0.4389627", "0.4378603", "0.43784538", "0.43737596", "0.43610385", "0.43590707", "0.43556008", "0.43553787", "0.43514198", "0.434673", "0.43427888", "0.4337698", "0.43356222", "0.4335578", "0.4329329", "0.43278053", "0.43198928", "0.43198928", "0.43146753", "0.43107393", "0.4309643", "0.43050283", "0.43048543", "0.42909476", "0.42800912", "0.4276427", "0.42745587", "0.42705044", "0.42670897", "0.42619225", "0.4257408", "0.42559502", "0.42554557", "0.42500275", "0.42477682", "0.4246343", "0.42430097", "0.42423904", "0.42402583", "0.423995", "0.42380658" ]
0.75783914
0
Get get a grpc trailer value.
func (t Trailer) Get(k string) string { v := metadata.MD(t).Get(k) if len(v) > 0 { return v[0] } return "" }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GrpcMetadataTrailer(ctx context.Context) *metadata.MD {\n\treturn ctx.Value(trailerKey{}).(*metadata.MD)\n}", "func (parser *PdfParser) GetTrailer() *PdfObjectDictionary {\n\treturn parser.trailer\n}", "func isTrailer(gRPCFrameByte byte) bool {\n\treturn gRPCFrameByte&(1<<7) == (1 << 7)\n}", "func (h *ResponseHeader) ReadTrailer(r *bufio.Reader) error {\n\tn := 1\n\tfor {\n\t\terr := h.tryReadTrailer(r, n)\n\t\tif err == nil {\n\t\t\treturn nil\n\t\t}\n\t\tif err != errNeedMore {\n\t\t\treturn err\n\t\t}\n\t\tn = r.Buffered() + 1\n\t}\n}", "func (h *ResponseHeader) Get(key string) string {\n\treturn string(h.Peek(key))\n}", "func (h *RequestHeader) ReadTrailer(r *bufio.Reader) error {\n\tn := 1\n\tfor {\n\t\terr := h.tryReadTrailer(r, n)\n\t\tif err == nil {\n\t\t\treturn nil\n\t\t}\n\t\tif err != errNeedMore {\n\t\t\treturn err\n\t\t}\n\t\tn = r.Buffered() + 1\n\t}\n}", "func (bgpr BlobsGetPropertiesResponse) CacheControl() string {\n\treturn bgpr.rawResponse.Header.Get(\"Cache-Control\")\n}", "func (cr CompileResponse) GetBody() string {\n\tif cr.Body == nil {\n\t\treturn \"\"\n\t}\n\n\treturn *cr.Body\n}", "func (h *ResponseHeader) TrailerHeader() []byte {\n\th.bufKV.value = h.bufKV.value[:0]\n\tfor _, t := range h.trailer {\n\t\tvalue := h.peek(t.key)\n\t\th.bufKV.value = appendHeaderLine(h.bufKV.value, t.key, value)\n\t}\n\th.bufKV.value = append(h.bufKV.value, strCRLF...)\n\treturn h.bufKV.value\n}", "func DecodeGRPCWebResponseBody(body io.Reader) ([]byte, GRPCWebTrailer, error) {\n\tcontent, err := ioutil.ReadAll(body)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tdecodedBytes := base64DecodeBytes(content)\n\tdata := bytes.NewReader(decodedBytes)\n\n\tvar msg []byte\n\n\t// Reading the first message.\n\tpayloadBytes, isTrailer, err := readPayloadBytes(data)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\t// Trailer must be the last message based on the protocol spec.\n\tif isTrailer {\n\t\treturn nil, makeTrailer(payloadBytes), nil\n\t}\n\n\tmsg = payloadBytes\n\n\t// Reading the second message.\n\tpayloadBytes, isTrailer, err = readPayloadBytes(data)\n\tif err != nil {\n\t\treturn msg, nil, err\n\t} else {\n\t\treturn msg, makeTrailer(payloadBytes), nil\n\t}\n}", "func (*LastTradeResponse) Descriptor() ([]byte, []int) {\n\treturn file_api_trading_proto_rawDescGZIP(), []int{58}\n}", "func (v *DCHttpResponse) GetProto() (proto string) {\n\treturn v.Raw.Proto\n}", "func (*TelemetryResponse) Descriptor() ([]byte, []int) {\n\treturn file_interservice_license_control_license_control_proto_rawDescGZIP(), []int{12}\n}", "func (dr downloadResponse) CacheControl() string {\n\treturn dr.rawResponse.Header.Get(\"Cache-Control\")\n}", "func (t *ProtocolHeaderHandlerImpl) WriteTripleFinalRspHeaderField(w http.ResponseWriter, grpcStatusCode int, grpcMessage string, traceProtoBin int) {\n}", "func (resp *BytesWatchDelResp) GetValue() []byte {\n\treturn nil\n}", "func (c WebCredential) GetEnd() string {\n\treturn c.End\n}", "func (rpr ReadPathResponse) CacheControl() string {\n\treturn rpr.rawResponse.Header.Get(\"Cache-Control\")\n}", "func (*CityResponse) Descriptor() ([]byte, []int) {\n\treturn file_pkg_apis_tra_v1alpha1_tra_proto_rawDescGZIP(), []int{1}\n}", "func (h Header) Get(key string) string {\n\treturn textproto.MIMEHeader(h).Get(key)\n}", "func (gppr GetPathPropertiesResponse) CacheControl() string {\n\treturn gppr.rawResponse.Header.Get(\"Cache-Control\")\n}", "func (h *ResponseHeader) writeTrailer(w *bufio.Writer) error {\n\t_, err := w.Write(h.TrailerHeader())\n\treturn err\n}", "func (c *webTransportClosedClient) GetStream() rpcc.Stream { return c.Stream }", "func (f *Future) Get() interface{} {\n\treturn <-f.response\n}", "func (f *Footer) Get(key []byte, readOptions ReadOptions) ([]byte, error) {\n\t_, ss := f.segmentLocs()\n\tif ss == nil {\n\t\tf.DecRef()\n\t\treturn nil, nil\n\t}\n\n\trv, err := ss.Get(key, readOptions)\n\tif err == nil && rv != nil && !readOptions.NoCopyValue {\n\t\trv = append(make([]byte, 0, len(rv)), rv...) // Copy.\n\t}\n\n\tf.DecRef()\n\n\treturn rv, err\n}", "func (*CBroadcast_WebRTCStopped_Response) Descriptor() ([]byte, []int) {\n\treturn file_steammessages_broadcast_steamclient_proto_rawDescGZIP(), []int{48}\n}", "func (*CMsgGetCookiesForURLResponse) Descriptor() ([]byte, []int) {\n\treturn file_steam_htmlmessages_proto_rawDescGZIP(), []int{101}\n}", "func (j *DSGit) ParseTrailer(ctx *Ctx, line string) {\n\tm := MatchGroups(GitTrailerPattern, line)\n\tif len(m) == 0 {\n\t\treturn\n\t}\n\toTrailer := m[\"name\"]\n\tlTrailer := strings.ToLower(oTrailer)\n\ttrailers, ok := GitAllowedTrailers[lTrailer]\n\tif !ok {\n\t\tif ctx.Debug > 1 {\n\t\t\tPrintf(\"Trailer %s/%s not in the allowed list %v, skipping\\n\", oTrailer, lTrailer, GitAllowedTrailers)\n\t\t}\n\t\treturn\n\t}\n\tfor _, trailer := range trailers {\n\t\tary, ok := j.Commit[trailer]\n\t\tif ok {\n\t\t\tif ctx.Debug > 1 {\n\t\t\t\tPrintf(\"trailer %s -> %s found in '%s'\\n\", oTrailer, trailer, line)\n\t\t\t}\n\t\t\t// Trailer can be the same as header value, we still want to have it - with \"-Trailer\" prefix added\n\t\t\t_, ok = ary.(string)\n\t\t\tif ok {\n\t\t\t\ttrailer += \"-Trailer\"\n\t\t\t\tary2, ok2 := j.Commit[trailer]\n\t\t\t\tif ok2 {\n\t\t\t\t\tif ctx.Debug > 1 {\n\t\t\t\t\t\tPrintf(\"renamed trailer %s -> %s found in '%s'\\n\", oTrailer, trailer, line)\n\t\t\t\t\t}\n\t\t\t\t\tj.Commit[trailer] = append(ary2.([]interface{}), m[\"value\"])\n\t\t\t\t} else {\n\t\t\t\t\tif ctx.Debug > 1 {\n\t\t\t\t\t\tPrintf(\"added renamed trailer %s\\n\", trailer)\n\t\t\t\t\t}\n\t\t\t\t\tj.Commit[trailer] = []interface{}{m[\"value\"]}\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tj.Commit[trailer] = j.UniqueStringArray(append(ary.([]interface{}), m[\"value\"]))\n\t\t\t\tif ctx.Debug > 1 {\n\t\t\t\t\tPrintf(\"appended trailer %s -> %s found in '%s'\\n\", oTrailer, trailer, line)\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tj.Commit[trailer] = []interface{}{m[\"value\"]}\n\t\t}\n\t}\n}", "func (imr *InvokeMethodResponse) Trailers() DaprInternalMetadata {\n\tif imr.r == nil {\n\t\treturn nil\n\t}\n\treturn imr.r.Trailers\n}", "func DecodeGrpcRespLicense(ctx context.Context, response interface{}) (interface{}, error) {\n\treturn response, nil\n}", "func (*GetResponse) Descriptor() ([]byte, []int) {\n\treturn file_grpc_exercicio_proto_rawDescGZIP(), []int{4}\n}", "func (resp *BytesWatchDelResp) GetPrevValue() []byte {\n\treturn nil\n}", "func (dr DownloadResponse) CacheControl() string {\n\treturn dr.dr.CacheControl()\n}", "func (v *ControlPlaneVersion) Get(obj *unstructured.Unstructured) (*string, error) {\n\tvalue, ok, err := unstructured.NestedString(obj.UnstructuredContent(), v.Path()...)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to retrieve control plane version\")\n\t}\n\tif !ok {\n\t\treturn nil, errors.Errorf(\"%s not found\", \".\"+strings.Join(v.Path(), \".\"))\n\t}\n\treturn &value, nil\n}", "func (x *fastReflection_ValidatorCurrentRewardsRecord) Get(descriptor protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch descriptor.FullName() {\n\tcase \"cosmos.distribution.v1beta1.ValidatorCurrentRewardsRecord.validator_address\":\n\t\tvalue := x.ValidatorAddress\n\t\treturn protoreflect.ValueOfString(value)\n\tcase \"cosmos.distribution.v1beta1.ValidatorCurrentRewardsRecord.rewards\":\n\t\tvalue := x.Rewards\n\t\treturn protoreflect.ValueOfMessage(value.ProtoReflect())\n\tdefault:\n\t\tif descriptor.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.distribution.v1beta1.ValidatorCurrentRewardsRecord\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.distribution.v1beta1.ValidatorCurrentRewardsRecord does not contain field %s\", descriptor.FullName()))\n\t}\n}", "func (r RPCHeader) Get(key string) string {\n\tif r == nil {\n\t\treturn \"\"\n\t}\n\tvalues, ok := r[key]\n\tif !ok {\n\t\treturn \"\"\n\t}\n\treturn strings.Join(values, \",\")\n}", "func (w *response) finalTrailers() Header {\n\tvar t Header\n\tfor k, vv := range w.handlerHeader {\n\t\tif strings.HasPrefix(k, TrailerPrefix) {\n\t\t\tif t == nil {\n\t\t\t\tt = make(Header)\n\t\t\t}\n\t\t\tt[strings.TrimPrefix(k, TrailerPrefix)] = vv\n\t\t}\n\t}\n\tfor _, k := range w.trailers {\n\t\tif t == nil {\n\t\t\tt = make(Header)\n\t\t}\n\t\tfor _, v := range w.handlerHeader[k] {\n\t\t\tt.Add(k, v)\n\t\t}\n\t}\n\treturn t\n}", "func (*GetTokenResponse) Descriptor() ([]byte, []int) {\n\treturn file_access_service_token_proto_rawDescGZIP(), []int{1}\n}", "func getRTVBody(c http.Client, url string) ([]byte, error) {\n\tlog.Printf(\"Fetching URL: %q\\n\", url)\n\tresp, err := c.Get(url)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != 200 {\n\t\treturn nil, errors.Errorf(\"Non-200 response fetching %s, %+v\", url, resp)\n\t}\n\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn body, nil\n}", "func (*GetResp) Descriptor() ([]byte, []int) {\n\treturn file_cache_cache_proto_rawDescGZIP(), []int{2}\n}", "func (*GetTripResponse) Descriptor() ([]byte, []int) {\n\treturn file_trip_proto_rawDescGZIP(), []int{3}\n}", "func (t *RPCCtx) Get(w http.ResponseWriter, r *http.Request) {\n\tt.Log.Handle(w, r, nil, \"begin\", \"RPCCtx\", \"Get\")\n\n\treqCtx := r.Context()\n\twhatever := reqCtx\n\n\tt.embed.Get(whatever)\n\n\tw.WriteHeader(200)\n\n\tt.Log.Handle(w, r, nil, \"end\", \"RPCCtx\", \"Get\")\n\n}", "func DecodeGrpcRespRouteTable(ctx context.Context, response interface{}) (interface{}, error) {\n\treturn response, nil\n}", "func (tl *TokenLimiter) Get() *Token {\n\treturn <-tl.ch\n}", "func (x *fastReflection_Bech32PrefixResponse) Get(descriptor protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch descriptor.FullName() {\n\tcase \"cosmos.auth.v1beta1.Bech32PrefixResponse.bech32_prefix\":\n\t\tvalue := x.Bech32Prefix\n\t\treturn protoreflect.ValueOfString(value)\n\tdefault:\n\t\tif descriptor.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.auth.v1beta1.Bech32PrefixResponse\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.auth.v1beta1.Bech32PrefixResponse does not contain field %s\", descriptor.FullName()))\n\t}\n}", "func (x *fastReflection_EventRetire) Get(descriptor protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch descriptor.FullName() {\n\tcase \"regen.ecocredit.v1alpha1.EventRetire.retirer\":\n\t\tvalue := x.Retirer\n\t\treturn protoreflect.ValueOfString(value)\n\tcase \"regen.ecocredit.v1alpha1.EventRetire.batch_denom\":\n\t\tvalue := x.BatchDenom\n\t\treturn protoreflect.ValueOfString(value)\n\tcase \"regen.ecocredit.v1alpha1.EventRetire.amount\":\n\t\tvalue := x.Amount\n\t\treturn protoreflect.ValueOfString(value)\n\tcase \"regen.ecocredit.v1alpha1.EventRetire.location\":\n\t\tvalue := x.Location\n\t\treturn protoreflect.ValueOfString(value)\n\tdefault:\n\t\tif descriptor.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: regen.ecocredit.v1alpha1.EventRetire\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message regen.ecocredit.v1alpha1.EventRetire does not contain field %s\", descriptor.FullName()))\n\t}\n}", "func (c *trustTokenOperationDoneClient) GetStream() rpcc.Stream { return c.Stream }", "func (*GetContractResponse) Descriptor() ([]byte, []int) {\n\treturn file_contract_proto_rawDescGZIP(), []int{8}\n}", "func (s FunctionResponse) GoString() string {\n\treturn s.String()\n}", "func (c *responseReceivedExtraInfoClient) GetStream() rpcc.Stream { return c.Stream }", "func (*StopTransactionResponse) Descriptor() ([]byte, []int) {\n\treturn file_transaction_proto_rawDescGZIP(), []int{3}\n}", "func (*TokenResponse) Descriptor() ([]byte, []int) {\n\treturn file_src_h4u_proto_rawDescGZIP(), []int{8}\n}", "func (bh *Header) Get(t Tag) string {\n\tswitch t {\n\tcase versionTag:\n\t\treturn bh.Version\n\tcase sortOrderTag:\n\t\treturn bh.SortOrder.String()\n\tcase groupOrderTag:\n\t\treturn bh.GroupOrder.String()\n\t}\n\tfor _, tp := range bh.otherTags {\n\t\tif t == tp.tag {\n\t\t\treturn tp.value\n\t\t}\n\t}\n\treturn \"\"\n}", "func (x *fastReflection_QueryParamsResponse) Get(descriptor protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch descriptor.FullName() {\n\tcase \"cosmos.auth.v1beta1.QueryParamsResponse.params\":\n\t\tvalue := x.Params\n\t\treturn protoreflect.ValueOfMessage(value.ProtoReflect())\n\tdefault:\n\t\tif descriptor.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.auth.v1beta1.QueryParamsResponse\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.auth.v1beta1.QueryParamsResponse does not contain field %s\", descriptor.FullName()))\n\t}\n}", "func (*GetLedgerEndResponse) Descriptor() ([]byte, []int) {\n\treturn file_com_daml_ledger_api_v1_transaction_service_proto_rawDescGZIP(), []int{8}\n}", "func DecodeGrpcRespRoute(ctx context.Context, response interface{}) (interface{}, error) {\n\treturn response, nil\n}", "func getMetadata(r *RTVCache) (*rtvData, error) {\n\t// Fetch the runtime metadata json\n\tb, err := getRTVBody(r.c, rtvHost+\"/rtv/metadata\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar d rtvData\n\terr = json.Unmarshal(b, &d)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t// Minimal validation of expected values.\n\tif d.RTV == \"\" {\n\t\treturn nil, errors.Errorf(\"Could not unmarshal RTV value from %s\", b)\n\t}\n\tif d.CSSURL == \"\" {\n\t\treturn nil, errors.Errorf(\"Could not unmarshal CSS URL value from %s\", b)\n\t}\n\tif _, err := url.Parse(d.CSSURL); err != nil {\n\t\treturn nil, errors.Wrapf(err, \"Error parsing CSS URL %s\", d.CSSURL)\n\t}\n\treturn &d, nil\n}", "func (*GetRsp) Descriptor() ([]byte, []int) {\n\treturn file_grpc_proto_rawDescGZIP(), []int{1}\n}", "func (*GetResponse) Descriptor() ([]byte, []int) {\n\treturn file_darkbase_darkbase_proto_rawDescGZIP(), []int{1}\n}", "func (x *fastReflection_QueryAccountResponse) Get(descriptor protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch descriptor.FullName() {\n\tcase \"cosmos.auth.v1beta1.QueryAccountResponse.account\":\n\t\tvalue := x.Account\n\t\treturn protoreflect.ValueOfMessage(value.ProtoReflect())\n\tdefault:\n\t\tif descriptor.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.auth.v1beta1.QueryAccountResponse\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.auth.v1beta1.QueryAccountResponse does not contain field %s\", descriptor.FullName()))\n\t}\n}", "func (mes *MarkerEncodingScheme) Tail(b byte, pos int) checked.Bytes { return mes.tails[int(b)][pos-1] }", "func (_e *MockDataReceiverService_PutMetricClient_Expecter) Trailer() *MockDataReceiverService_PutMetricClient_Trailer_Call {\n\treturn &MockDataReceiverService_PutMetricClient_Trailer_Call{Call: _e.mock.On(\"Trailer\")}\n}", "func (a API) GetCFilterHeaderGetRes() (out *string, e error) {\n\tout, _ = a.Result.(*string)\n\te, _ = a.Result.(error)\n\treturn \n}", "func (resp *BytesWatchPutResp) GetValue() []byte {\n\treturn resp.value\n}", "func (x *fastReflection_MsgDepositValidatorRewardsPoolResponse) Get(descriptor protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch descriptor.FullName() {\n\tdefault:\n\t\tif descriptor.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.distribution.v1beta1.MsgDepositValidatorRewardsPoolResponse\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.distribution.v1beta1.MsgDepositValidatorRewardsPoolResponse does not contain field %s\", descriptor.FullName()))\n\t}\n}", "func (c *requestInterceptedClient) GetStream() rpcc.Stream { return c.Stream }", "func (c *responseReceivedClient) GetStream() rpcc.Stream { return c.Stream }", "func (h *RequestHeader) Get(key string) string {\n\treturn string(h.Peek(key))\n}", "func (*ValueResponse) Descriptor() ([]byte, []int) {\n\treturn file_kvapi_proto_rawDescGZIP(), []int{2}\n}", "func (x *fastReflection_ValidatorHistoricalRewardsRecord) Get(descriptor protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch descriptor.FullName() {\n\tcase \"cosmos.distribution.v1beta1.ValidatorHistoricalRewardsRecord.validator_address\":\n\t\tvalue := x.ValidatorAddress\n\t\treturn protoreflect.ValueOfString(value)\n\tcase \"cosmos.distribution.v1beta1.ValidatorHistoricalRewardsRecord.period\":\n\t\tvalue := x.Period\n\t\treturn protoreflect.ValueOfUint64(value)\n\tcase \"cosmos.distribution.v1beta1.ValidatorHistoricalRewardsRecord.rewards\":\n\t\tvalue := x.Rewards\n\t\treturn protoreflect.ValueOfMessage(value.ProtoReflect())\n\tdefault:\n\t\tif descriptor.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.distribution.v1beta1.ValidatorHistoricalRewardsRecord\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.distribution.v1beta1.ValidatorHistoricalRewardsRecord does not contain field %s\", descriptor.FullName()))\n\t}\n}", "func (h *ResponseHeader) PeekTrailerKeys() [][]byte {\n\th.mulHeader = h.mulHeader[:0]\n\th.mulHeader = peekArgsKeys(h.mulHeader, h.trailer)\n\treturn h.mulHeader\n}", "func (f *fakeDiskUpdateWatchServer) SetTrailer(metadata.MD) {}", "func (x *fastReflection_QueryAccountInfoResponse) Get(descriptor protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch descriptor.FullName() {\n\tcase \"cosmos.auth.v1beta1.QueryAccountInfoResponse.info\":\n\t\tvalue := x.Info\n\t\treturn protoreflect.ValueOfMessage(value.ProtoReflect())\n\tdefault:\n\t\tif descriptor.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.auth.v1beta1.QueryAccountInfoResponse\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.auth.v1beta1.QueryAccountInfoResponse does not contain field %s\", descriptor.FullName()))\n\t}\n}", "func (h *RequestHeader) writeTrailer(w *bufio.Writer) error {\n\t_, err := w.Write(h.TrailerHeader())\n\treturn err\n}", "func (s *headerExtractor) Recv() (*flight.FlightData, error) {\n\tdata, err := s.stream.Recv()\n\ts.once.Do(func() {\n\t\ts.header, s.err = s.stream.Header()\n\t})\n\treturn data, err\n}", "func (*GetVegaTimeResponse) Descriptor() ([]byte, []int) {\n\treturn file_api_trading_proto_rawDescGZIP(), []int{100}\n}", "func (*CleartextChaincodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_fpc_fpc_proto_rawDescGZIP(), []int{8}\n}", "func (s ExportRevisionsToS3ResponseDetails) GoString() string {\n\treturn s.String()\n}", "func (*TradesByMarketResponse) Descriptor() ([]byte, []int) {\n\treturn file_api_trading_proto_rawDescGZIP(), []int{96}\n}", "func (*LogOutResponse) Descriptor() ([]byte, []int) {\n\treturn file_types_protobuf_authority_authority_proto_rawDescGZIP(), []int{3}\n}", "func (s UtteranceBotResponse) GoString() string {\n\treturn s.String()\n}", "func GetTimeout(header http.Header, defaultTimeout time.Duration) time.Duration {\n\tif timeout := header.Get(grpcTimeoutHeader); len(timeout) > 0 {\n\t\tif t, err := time.ParseDuration(timeout); err == nil {\n\t\t\treturn t\n\t\t}\n\t}\n\n\treturn defaultTimeout\n}", "func (*MemcacheGrabTailResponse) Descriptor() ([]byte, []int) {\n\treturn file_memcache_service_proto_rawDescGZIP(), []int{19}\n}", "func (c *Client) RenterDownloadHTTPResponseGet(siaPath string, offset, length uint64) (resp []byte, err error) {\n\tsiaPath = strings.TrimPrefix(siaPath, \"/\")\n\tquery := fmt.Sprintf(\"%s?offset=%d&length=%d&httpresp=true\", siaPath, offset, length)\n\tresp, err = c.getRawResponse(\"/renter/download/\" + query)\n\treturn\n}", "func (c APIClient) GetTag(tag string, writer io.Writer) error {\n\tgetTagClient, err := c.ObjectAPIClient.GetTag(\n\t\tc.Ctx(),\n\t\t&pfs.Tag{Name: tag},\n\t)\n\tif err != nil {\n\t\treturn grpcutil.ScrubGRPC(err)\n\t}\n\tif err := grpcutil.WriteFromStreamingBytesClient(getTagClient, writer); err != nil {\n\t\treturn grpcutil.ScrubGRPC(err)\n\t}\n\treturn nil\n}", "func (fctx *AvFormatContext) AvWriteTrailer() int {\n\treturn int(C.av_write_trailer((*C.struct_AVFormatContext)(fctx)))\n}", "func DecodeGrpcRespLicenseSpec(ctx context.Context, response interface{}) (interface{}, error) {\n\treturn response, nil\n}", "func (*TradesByPartyResponse) Descriptor() ([]byte, []int) {\n\treturn file_api_trading_proto_rawDescGZIP(), []int{66}\n}", "func (*ValueResponse) Descriptor() ([]byte, []int) {\n\treturn file_api_proto_rawDescGZIP(), []int{11}\n}", "func (o HttpRuleResponseOutput) Get() pulumi.StringOutput {\n\treturn o.ApplyT(func(v HttpRuleResponse) string { return v.Get }).(pulumi.StringOutput)\n}", "func (s StopContactStreamingOutput) GoString() string {\n\treturn s.String()\n}", "func (*CityResponse) Descriptor() ([]byte, []int) {\n\treturn file_api_proto_rawDescGZIP(), []int{6}\n}", "func (x *fastReflection_EventCancel) Get(descriptor protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch descriptor.FullName() {\n\tcase \"regen.ecocredit.v1alpha1.EventCancel.canceller\":\n\t\tvalue := x.Canceller\n\t\treturn protoreflect.ValueOfString(value)\n\tcase \"regen.ecocredit.v1alpha1.EventCancel.batch_denom\":\n\t\tvalue := x.BatchDenom\n\t\treturn protoreflect.ValueOfString(value)\n\tcase \"regen.ecocredit.v1alpha1.EventCancel.amount\":\n\t\tvalue := x.Amount\n\t\treturn protoreflect.ValueOfString(value)\n\tdefault:\n\t\tif descriptor.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: regen.ecocredit.v1alpha1.EventCancel\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message regen.ecocredit.v1alpha1.EventCancel does not contain field %s\", descriptor.FullName()))\n\t}\n}", "func (*TradeResponse) Descriptor() ([]byte, []int) {\n\treturn file_grpcoin_proto_rawDescGZIP(), []int{9}\n}", "func (h *RequestHeader) TrailerHeader() []byte {\n\th.bufKV.value = h.bufKV.value[:0]\n\tfor _, t := range h.trailer {\n\t\tvalue := h.peek(t.key)\n\t\th.bufKV.value = appendHeaderLine(h.bufKV.value, t.key, value)\n\t}\n\th.bufKV.value = append(h.bufKV.value, strCRLF...)\n\treturn h.bufKV.value\n}", "func (*CBroadcast_GetRTMPInfo_Response) Descriptor() ([]byte, []int) {\n\treturn file_steammessages_broadcast_steamclient_proto_rawDescGZIP(), []int{43}\n}", "func (x *fastReflection_MsgWithdrawDelegatorRewardResponse) Get(descriptor protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch descriptor.FullName() {\n\tcase \"cosmos.distribution.v1beta1.MsgWithdrawDelegatorRewardResponse.amount\":\n\t\tif len(x.Amount) == 0 {\n\t\t\treturn protoreflect.ValueOfList(&_MsgWithdrawDelegatorRewardResponse_1_list{})\n\t\t}\n\t\tlistValue := &_MsgWithdrawDelegatorRewardResponse_1_list{list: &x.Amount}\n\t\treturn protoreflect.ValueOfList(listValue)\n\tdefault:\n\t\tif descriptor.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.distribution.v1beta1.MsgWithdrawDelegatorRewardResponse\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.distribution.v1beta1.MsgWithdrawDelegatorRewardResponse does not contain field %s\", descriptor.FullName()))\n\t}\n}", "func (*StreamingResponse) Descriptor() ([]byte, []int) {\n\treturn file_api_protobuf_spec_example_example_proto_rawDescGZIP(), []int{4}\n}", "func (s PropertyResponse) GoString() string {\n\treturn s.String()\n}", "func (t *Triple) Get(dir string) string {\n\tif dir == \"s\" {\n\t\treturn t.Sub\n\t} else if dir == \"p\" {\n\t\treturn t.Pred\n\t} else if dir == \"prov\" || dir == \"c\" {\n\t\treturn t.Provenance\n\t} else if dir == \"o\" {\n\t\treturn t.Obj\n\t} else {\n\t\tpanic(fmt.Sprintf(\"No Such Triple Direction, %s\", dir))\n\t}\n}" ]
[ "0.6455265", "0.59738564", "0.54605633", "0.52981204", "0.52911794", "0.518575", "0.510424", "0.5035648", "0.5035331", "0.4973329", "0.4951593", "0.49217966", "0.4897951", "0.48978537", "0.4867257", "0.4833172", "0.48279515", "0.4811198", "0.47573626", "0.47364062", "0.47299218", "0.472903", "0.47224572", "0.47093958", "0.47006592", "0.46890047", "0.46869963", "0.46788388", "0.4674857", "0.46747333", "0.46631196", "0.46596113", "0.46585238", "0.46530086", "0.46480095", "0.46367475", "0.46306747", "0.46248114", "0.46219316", "0.46217683", "0.46121356", "0.46113676", "0.46103752", "0.4608323", "0.46061495", "0.46021488", "0.4599972", "0.4588963", "0.45822498", "0.45750755", "0.45665598", "0.45651498", "0.45629814", "0.45619652", "0.4557899", "0.45553038", "0.45512238", "0.45497847", "0.4547681", "0.45466748", "0.45450574", "0.45372728", "0.45367512", "0.45323542", "0.4528895", "0.45266637", "0.45194304", "0.45147222", "0.45129958", "0.4511363", "0.45092332", "0.45069084", "0.45058155", "0.44981617", "0.448962", "0.4489247", "0.4481712", "0.44808826", "0.44802895", "0.44791517", "0.4469017", "0.4465367", "0.44618613", "0.4461626", "0.4456738", "0.44555333", "0.44517958", "0.44505078", "0.44460863", "0.4441203", "0.44411212", "0.44394302", "0.44373932", "0.44339958", "0.44279", "0.4427772", "0.44267464", "0.44233048", "0.4412686", "0.44114274" ]
0.44370463
93
Given a list of args return an array of `ParamValue`s
func Params(params ...) []ParamValue { pStruct := reflect.NewValue(params).(*reflect.StructValue) par := make([]ParamValue, pStruct.NumField()) for n := 0; n < len(par); n++ { par[n] = param(pStruct.Field(n)) } return par }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func makeParams(args ...interface{}) []rpcValue {\n\tif len(args) == 0 {\n\t\treturn nil\n\t}\n\tarr := make([]rpcValue, 0, len(args))\n\tfor _, v := range args {\n\t\tarr = append(arr, makeValue(v))\n\t}\n\treturn arr\n}", "func (c ResolverGetUploadsByIDsFuncCall) Args() []interface{} {\n\ttrailing := []interface{}{}\n\tfor _, val := range c.Arg1 {\n\t\ttrailing = append(trailing, val)\n\t}\n\n\treturn append([]interface{}{c.Arg0}, trailing...)\n}", "func (c UploadServiceGetUploadsByIDsFuncCall) Args() []interface{} {\n\ttrailing := []interface{}{}\n\tfor _, val := range c.Arg1 {\n\t\ttrailing = append(trailing, val)\n\t}\n\n\treturn append([]interface{}{c.Arg0}, trailing...)\n}", "func argsFn(args ...OBJ) OBJ {\n\tl := len(os.Args[1:])\n\tresult := make([]OBJ, l)\n\tfor i, txt := range os.Args[1:] {\n\t\tresult[i] = &object.String{Value: txt}\n\t}\n\treturn &object.Array{Elements: result}\n}", "func (c UploadServiceGetListTagsFuncCall) Args() []interface{} {\n\ttrailing := []interface{}{}\n\tfor _, val := range c.Arg2 {\n\t\ttrailing = append(trailing, val)\n\t}\n\n\treturn append([]interface{}{c.Arg0, c.Arg1}, trailing...)\n}", "func GetParamValues(c echo.Context, paramNames ...string) ([]string, *echo.HTTPError) {\n\tret := make([]string, 0, len(paramNames))\n\tfor _, paramName := range paramNames {\n\t\tval := c.Param(paramName)\n\t\tif val == \"\" {\n\t\t\treturn []string{}, echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf(\"invalid/missing param %s\", paramName))\n\t\t}\n\t\tret = append(ret, val)\n\t}\n\treturn ret, nil\n}", "func ArgsList(args ...interface{}) Node {\n\tvar result Node\n\tfor _, arg := range args {\n\t\tif result.Node == nil {\n\t\t\tresult = ToNode(arg)\n\t\t} else {\n\t\t\tresult = Expr(\",\", result, arg)\n\t\t}\n\t}\n\treturn result\n}", "func ArgsArray() ([]ArgStruct, error) {\n\tvar argsArray []ArgStruct\n\targBytes := []byte(cliconfig.Config().Args())\n\tif strings.HasPrefix(cliconfig.Config().Args(), \"[\") {\n\t\tif err := json.Unmarshal(argBytes, &argsArray); err != nil {\n\t\t\treturn nil, errors.Errorf(\"Error unmarshaling JSON arg string: %v\", err)\n\t\t}\n\t} else {\n\t\targs := ArgStruct{}\n\t\tif err := json.Unmarshal(argBytes, &args); err != nil {\n\t\t\treturn nil, errors.Errorf(\"Error unmarshaling JSON arg string: %v\", err)\n\t\t}\n\t\targsArray = append(argsArray, args)\n\t}\n\treturn argsArray, nil\n}", "func Arr(args ...interface{}) []interface{} {\n\treturn args\n}", "func GetArgsArray(cList CommandList) (cmdArray []String){\n\n\t//var i uint32\n\tvar nArgs uint32\n\tnArgs = uint32(len(cList))\n\n\tcmdArray = make([]String, nArgs)\n\n\tfor i, arg := range cList {\n\n\t\tcmdArray[i] = String(arg)\n\n\t}\n\n\treturn\n\n}", "func (v *Value) Args() []string {\n\tif v.isBool {\n\t\treturn []string{}\n\t} else {\n\t\treturn v.args\n\t}\n}", "func (pvs ParamValues) Parse(*mcmp.Component) ([]ParamValue, error) {\n\treturn pvs, nil\n}", "func (c AutoIndexingServiceGetListTagsFuncCall) Args() []interface{} {\n\ttrailing := []interface{}{}\n\tfor _, val := range c.Arg2 {\n\t\ttrailing = append(trailing, val)\n\t}\n\n\treturn append([]interface{}{c.Arg0, c.Arg1}, trailing...)\n}", "func reflectArgs(fnType reflect.Type, args []Argument) []reflect.Value {\n\tin := make([]reflect.Value, len(args))\n\n\tfor k, arg := range args {\n\t\tif arg == nil {\n\t\t\t// Use the zero value of the function parameter type,\n\t\t\t// since \"reflect.Call\" doesn't accept \"nil\" parameters\n\t\t\tin[k] = reflect.New(fnType.In(k)).Elem()\n\t\t} else {\n\t\t\tin[k] = reflect.ValueOf(arg)\n\t\t}\n\t}\n\n\treturn in\n}", "func (c ResolverGetIndexesByIDsFuncCall) Args() []interface{} {\n\ttrailing := []interface{}{}\n\tfor _, val := range c.Arg1 {\n\t\ttrailing = append(trailing, val)\n\t}\n\n\treturn append([]interface{}{c.Arg0}, trailing...)\n}", "func parseParamList(list string) []string {\n\tlist = strings.TrimSpace(list)\n\tif list == \"\" {\n\t\treturn []string{}\n\t}\n\treturn regexp.MustCompile(`\\s*,\\s*`).Split(list, -1)\n}", "func getArgs(genericParams map[string]interface{}) ([]string, error) {\n\targs := []string{}\n\tval, found := genericParams[\"args\"]\n\tif found {\n\t\tvar isArray bool\n\t\targs, isArray = val.([]string)\n\t\tif !isArray {\n\t\t\treturn nil, fmt.Errorf(\"expected []string, found: %v\", val)\n\t\t}\n\t\tdelete(genericParams, \"args\")\n\t}\n\treturn args, nil\n}", "func parameteriseValues(args []string, valueMap map[string]string) []string {\n\tfor k, v := range valueMap {\n\t\tkey := strings.Replace(k, \"_\", \"-\", -1)\n\t\targs = append(args, \"--\"+key)\n\n\t\tif fmt.Sprintf(\"%v\", v) != \"\" {\n\t\t\targs = append(args, fmt.Sprintf(\"%v\", v))\n\t\t}\n\t}\n\n\treturn args\n}", "func (c PipelineRunFuncCall) Args() []interface{} {\n\treturn []interface{}{}\n}", "func (c AutoIndexingServiceGetIndexesByIDsFuncCall) Args() []interface{} {\n\ttrailing := []interface{}{}\n\tfor _, val := range c.Arg1 {\n\t\ttrailing = append(trailing, val)\n\t}\n\n\treturn append([]interface{}{c.Arg0}, trailing...)\n}", "func ParamSpecValueArray_(name string, nick string, blurb string, elementSpec *ParamSpec, flags ParamFlags) *ParamSpec {\n\tc_name := C.CString(name)\n\tdefer C.free(unsafe.Pointer(c_name))\n\n\tc_nick := C.CString(nick)\n\tdefer C.free(unsafe.Pointer(c_nick))\n\n\tc_blurb := C.CString(blurb)\n\tdefer C.free(unsafe.Pointer(c_blurb))\n\n\tc_element_spec := (*C.GParamSpec)(C.NULL)\n\tif elementSpec != nil {\n\t\tc_element_spec = (*C.GParamSpec)(elementSpec.ToC())\n\t}\n\n\tc_flags := (C.GParamFlags)(flags)\n\n\tretC := C.g_param_spec_value_array(c_name, c_nick, c_blurb, c_element_spec, c_flags)\n\tretGo := ParamSpecNewFromC(unsafe.Pointer(retC))\n\n\treturn retGo\n}", "func (e *Encoder) Args(v []driver.Value) error {\n\t_, err := e.Int16(int16(len(v)))\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor i := 0; i < len(v); i++ {\n\t\t_, err = e.Marshal(v[i])\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (c JobRunFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1, c.Arg2}\n}", "func (c UploadServiceGetUploadsFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func RunParamList(m map[string]interface{}) error {\n\tpl, err := NewParamList(m)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn pl.Run()\n}", "func GetGlobArgs(args []string) ([]string, error) {\n\n\tvar a []string\n\n\tfor _, v := range args {\n\t\tfiles, err := filepath.Glob(v)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\ta = append(a, files...)\n\t}\n\n\treturn a, nil\n}", "func (j *Env) createArgs(args []interface{}) (ptr unsafe.Pointer, refs []jobject, err error) {\n\tif len(args) == 0 {\n\t\treturn nil, nil, nil\n\t}\n\n\targList := make([]uint64, len(args))\n\trefs = make([]jobject, 0)\n\n\tfor i, arg := range args {\n\t\tswitch v := arg.(type) {\n\t\tcase *convertedArg:\n\t\t\targList[i] = uint64(v.ObjectRef.jobject)\n\t\t\trefs = append(refs, v.ObjectRef.jobject)\n\t\tcase jobj:\n\t\t\targList[i] = uint64(v.jobj())\n\t\tcase bool:\n\t\t\tif v {\n\t\t\t\targList[i] = uint64(jboolean(1))\n\t\t\t} else {\n\t\t\t\targList[i] = uint64(jboolean(0))\n\t\t\t}\n\t\tcase byte:\n\t\t\targList[i] = uint64(jbyte(v))\n\t\tcase uint16:\n\t\t\targList[i] = uint64(jchar(v))\n\t\tcase int16:\n\t\t\targList[i] = uint64(jshort(v))\n\t\tcase int32:\n\t\t\targList[i] = uint64(jint(v))\n\t\tcase int:\n\t\t\targList[i] = uint64(jint(int32(v)))\n\t\tcase int64:\n\t\t\targList[i] = uint64(jlong(v))\n\t\tcase float32:\n\t\t\targList[i] = uint64(jfloat(v))\n\t\tcase float64:\n\t\t\targList[i] = uint64(jdouble(v))\n\t\tcase []bool, []byte, []int16, []uint16, []int32, []int, []int64, []float32, []float64:\n\t\t\tif array, arrayErr := j.toJavaArray(v); arrayErr == nil {\n\t\t\t\targList[i] = uint64(array)\n\t\t\t\trefs = append(refs, array)\n\t\t\t} else {\n\t\t\t\terr = arrayErr\n\t\t\t}\n\t\tdefault:\n\t\t\terr = fmt.Errorf(\"JNIGI: argument not a valid value %T (%v)\", args[i], args[i])\n\t\t}\n\n\t\tif err != nil {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif err != nil {\n\t\tfor _, ref := range refs {\n\t\t\tdeleteLocalRef(j.jniEnv, ref)\n\t\t}\n\t\trefs = nil\n\t\treturn\n\t}\n\n\tif copyToC {\n\t\tptr = malloc(unsafe.Sizeof(uint64(0)) * uintptr(len(args)))\n\t\tdata := (*(*[big]uint64)(ptr))[:len(args)]\n\t\tcopy(data, argList)\n\t} else {\n\t\tptr = unsafe.Pointer(&argList[0])\n\t}\n\treturn\n}", "func (p Param) AsParams() []Param {\n\treturn []Param{p}\n}", "func (rs pipeline) Args() []Type {\n\trunnerArgs, ok := rs[0].(RunnerArgs)\n\tif ok {\n\t\treturn runnerArgs.Args()\n\t}\n\treturn []Type{Wildcard}\n}", "func TestMultipleArgs(t *testing.T) {\n\tflagSet := pflag.NewFlagSet(\"\", pflag.PanicOnError)\n\tregisterParamsFlags(flagSet)\n\tassert.NoError(t, flagSet.Parse([]string{\"-p\", \"key1=1\", \"-p\", \"key2=2\"}))\n\tvalues, errors := getParamsFromFlags(flagSet)\n\tassert.Len(t, values, 2)\n\tassert.Len(t, errors, 0)\n}", "func BuildArgs(s Servable, argsType reflect.Type, argsValue reflect.Value, req *http.Request, buildStructArg func(s Servable, typeName string, req *http.Request) (v reflect.Value, err error)) ([]reflect.Value, error) {\n\tfieldNum := argsType.NumField()\n\tparams := make([]reflect.Value, fieldNum)\n\tfor i := 0; i < fieldNum; i++ {\n\t\tfield := argsType.Field(i)\n\t\tfieldName := field.Name\n\t\tvalueType := argsValue.FieldByName(fieldName).Type()\n\t\tif field.Type.Kind() == reflect.Ptr && valueType.Elem().Kind() == reflect.Struct {\n\t\t\tconvertor := components(req).Convertor(valueType.Elem().Name())\n\t\t\tif convertor != nil {\n\t\t\t\tparams[i] = convertor(req)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tstructName := valueType.Elem().Name()\n\t\t\tv, err := buildStructArg(s, structName, req)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.New(fmt.Sprintf(\"turbo: failed to BuildArgs, error:%s\", err))\n\t\t\t}\n\t\t\tparams[i] = v\n\t\t\tcontinue\n\t\t}\n\t\tv, _ := findValue(fieldName, req)\n\t\tvalue, err := reflectValue(field.Type, argsValue.FieldByName(fieldName), v)\n\t\tlogErrorIf(err)\n\t\tparams[i] = value\n\t}\n\treturn params, nil\n}", "func Array(args ...interface{}) []interface{} {\n\treturn []interface{}{args}\n}", "func args(s *scanner, token string) []string {\n\ttoken = strings.TrimSpace(token)\n\n\tvar parts []string\n\tfor {\n\t\tif !strings.ContainsAny(token, \"[]\") {\n\t\t\tfor _, part := range strings.Split(token, \",\") {\n\t\t\t\tif part == \"\" || part == \" \" {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tparts = append(parts, part)\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\n\t\tstart := strings.Index(token, \"[\")\n\t\tend := strings.Index(token, \"]\")\n\t\tif end < start {\n\t\t\ts.Fatalf(\"malformed input: ']' appearing before '[' in %q\", token)\n\t\t}\n\n\t\tif len(token[:start]) > 0 { // capture elems before array\n\t\t\ttoken = strings.TrimSuffix(token, \",\")\n\t\t\tfor _, part := range strings.Split(token[:start], \",\") {\n\t\t\t\tif part == \"\" || part == \" \" {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tparts = append(parts, part)\n\t\t\t}\n\t\t}\n\t\tparts = append(parts, token[start:end+1]) // capture array\n\t\ttoken = token[end+1:] // trim processed prefix\n\t}\n\n\tfor i := range parts {\n\t\tparts[i] = strings.TrimSpace(parts[i])\n\t}\n\treturn parts\n}", "func (c DBStoreGetConfigurationPoliciesFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func (f *FlagSet) Args() []string { return f.args }", "func (c DBStoreGetUploadsFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func (c DBStoreGetUploadsFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func MultiBulkValue(commandName string, args ...interface{}) Value {\n\tvals := make([]Value, len(args)+1)\n\tvals[0] = StringValue(commandName)\n\tfor i, arg := range args {\n\t\tif rval, ok := arg.(Value); ok && rval.Type() == BulkString {\n\t\t\tvals[i+1] = rval\n\t\t\tcontinue\n\t\t}\n\t\tswitch arg := arg.(type) {\n\t\tdefault:\n\t\t\tvals[i+1] = StringValue(fmt.Sprintf(\"%v\", arg))\n\t\tcase []byte:\n\t\t\tvals[i+1] = StringValue(string(arg))\n\t\tcase string:\n\t\t\tvals[i+1] = StringValue(arg)\n\t\tcase nil:\n\t\t\tvals[i+1] = NullValue()\n\t\t}\n\t}\n\treturn ArrayValue(vals)\n}", "func argsToParams(args []string) map[string]string {\n\tif len(args) == 0 {\n\t\treturn nil\n\t}\n\tparams := make(map[string]string)\n\tfor _, a := range args {\n\t\tif !strings.Contains(a, \"=\") {\n\t\t\tcontinue\n\t\t}\n\t\tparts := strings.Split(a, \"=\")\n\t\t// Ignore any arguments that do not look like parameters\n\t\tif len(parts) != 2 {\n\t\t\tcontinue\n\t\t}\n\t\tparams[parts[0]] = parts[1]\n\t}\n\treturn params\n}", "func formatValuesFilesAsArgs(t *testing.T, valuesFiles []string) []string {\n\targs, err := formatValuesFilesAsArgsE(t, valuesFiles)\n\trequire.NoError(t, err)\n\treturn args\n}", "func (c PipelineAddFuncCall) Args() []interface{} {\n\treturn append([]interface{}{c.Arg0}, c.Arg1...)\n}", "func (c ExtensionStoreListPublishersFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func (parser *Parser) args() ([]*Expr, error) {\n\tparser.trace(\"ARGS\")\n\tdefer parser.untrace()\n\texpr, err := parser.expr()\n\tif err == ErrNoMatch {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tmoreExpr, err := parser.moreArgs()\n\tif err == ErrNoMatch {\n\t\treturn []*Expr{expr}, nil\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\texprs := append([]*Expr{expr}, moreExpr...)\n\treturn exprs, nil\n}", "func (c *Commands) parseArgs(args string) []string {\n\ttokens := strings.Split(args, c.ParamSep)\n\tfor index, token := range tokens {\n\t\ttokens[index] = strings.TrimSpace(token)\n\t}\n\treturn tokens\n}", "func (c AutoIndexingServiceGetIndexesFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func GetParamNames(c domain.CLIContext) []string {\n\treturn []string(c.Args())\n}", "func ArgumentsArray(args ...interface{}) *protocol.ArgumentArray {\n\tres, err := protocol.ArgumentArrayFromNatives(args)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\treturn res\n}", "func (c ExtensionStoreListFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func (c ResolverQueryResolverFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func (m MethodData) ArgList() string {\n\tparams := make([]string, len(m.Params))\n\tfor i, p := range m.Params {\n\t\tparams[i] = p.MethodArg()\n\t}\n\treturn strings.Join(params, \", \")\n}", "func (c ReleaseStoreGetArtifactsFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func callValuesI(b bindingInterface, inj Injections, args []interface{}) (ret []reflect.Value) {\n\ttargetArgCount := b.argCount()\n\tret = make([]reflect.Value, targetArgCount)\n\tic := 0 // count of found injections\n\tiai := 0\n\tfor ai := 0; ai < targetArgCount; ai++ {\n\t\tat := b.argType(ai)\n\t\tvar av reflect.Value\n\n\t\t// Check if this parameter needs to be injected\n\t\tif _, ok := b.base().injections[ai]; ok {\n\t\t\tif in, ok := inj[at]; ok { // a object of type at is provided by InvokeI call\n\t\t\t\tav = reflect.ValueOf(in).Convert(at)\n\t\t\t} else {\n\t\t\t\tpanic(fmt.Errorf(\"Injection for type \\\"%s\\\" not found.\", at))\n\t\t\t}\n\n\t\t\tic++ // skip one input param\n\t\t} else {\n\t\t\tif iai >= len(args) {\n\t\t\t\tpanic(fmt.Errorf(\"Invalid parameter count: %d/%d (%d injections applied)\", iai, len(args), ic))\n\t\t\t}\n\t\t\tav = reflect.ValueOf(args[iai]) // Value object of the current parameter\n\t\t\tiai++ //proceed to next input argument\n\t\t}\n\n\t\t// Assign final value to final call vector.\n\t\tret[ai] = b.base().container.convertParameterValue(av, at)\n\t}\n\n\tif targetArgCount != (iai + ic) {\n\t\tpanic(fmt.Errorf(\"Argument count does not match for method \\\"%s\\\". %d/%d. (%d injections applied)\", b.base().elemName, targetArgCount, (iai + ic), ic))\n\t}\n\n\treturn\n}", "func Args() []string { return CommandLine.args }", "func (c ResolverGetUploadByIDFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func GetParameters(p []string) []*cloudformation.Parameter {\n\tvar parameters []*cloudformation.Parameter\n\tfor _, val := range p {\n\t\tif strings.Contains(val, \"=\") {\n\t\t\tstrKeyPair := strings.Split(val, \"=\")\n\t\t\tif strings.Compare(strKeyPair[1], \"\") == 0 || strings.Compare(strKeyPair[1], \"nil\") == 0 {\n\t\t\t\tlog.Warn(\"Skipping blank parameter [%v]\", strKeyPair[0])\n\t\t\t\tcontinue\n\t\t\t} else {\n\t\t\t\tparameters = append(parameters, &cloudformation.Parameter{\n\t\t\t\t\tParameterKey: &strKeyPair[0],\n\t\t\t\t\tParameterValue: &strKeyPair[1],\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\t}\n\treturn parameters\n}", "func Args() []string\t{ return os.Args[flags.first_arg:len(os.Args)] }", "func (c JobNameFuncCall) Args() []interface{} {\n\treturn []interface{}{}\n}", "func ConvertToGoFuncArgs(args []Object) ([]interface{}, error) {\n\tfuncArgs := []interface{}{}\n\n\tfor _, arg := range args {\n\t\tswitch v := arg.(type) {\n\t\tcase *IntegerObject:\n\t\t\tswitch v.flag {\n\t\t\tcase f64:\n\t\t\t\tfuncArgs = append(funcArgs, float64(v.value))\n\t\t\t\tcontinue\n\t\t\tcase f32:\n\t\t\t\tfuncArgs = append(funcArgs, float32(v.value))\n\t\t\t\tcontinue\n\t\t\tcase ui64:\n\t\t\t\tfuncArgs = append(funcArgs, uint64(v.value))\n\t\t\t\tcontinue\n\t\t\tcase ui32:\n\t\t\t\tfuncArgs = append(funcArgs, uint32(v.value))\n\t\t\t\tcontinue\n\t\t\tcase ui16:\n\t\t\t\tfuncArgs = append(funcArgs, uint16(v.value))\n\t\t\t\tcontinue\n\t\t\tcase ui8:\n\t\t\t\tfuncArgs = append(funcArgs, uint8(v.value))\n\t\t\t\tcontinue\n\t\t\tcase i64:\n\t\t\t\tfuncArgs = append(funcArgs, int64(v.value))\n\t\t\t\tcontinue\n\t\t\tcase i32:\n\t\t\t\tfuncArgs = append(funcArgs, int32(v.value))\n\t\t\t\tcontinue\n\t\t\tcase i16:\n\t\t\t\tfuncArgs = append(funcArgs, int16(v.value))\n\t\t\t\tcontinue\n\t\t\tcase i8:\n\t\t\t\tfuncArgs = append(funcArgs, int8(v.value))\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\n\t\tfuncArgs = append(funcArgs, arg.Value())\n\t}\n\n\treturn funcArgs, nil\n}", "func parseArgsForParams(args []string) ([]string, []string, error) {\n\tvar paramArgs []string\n\tvar err error\n\n\ti := 0\n\n\t// iterate over the list of input arguments\n\t// append key value pair of --param <parameter name> <parameter value> as a string\n\t// in case of --param-file, read parameter file, append each JSON \"key: value\" pair as a string\n\tfor i < len(args) {\n\t\t// when arg is -P or --param-file, open and read the specified JSON file\n\t\tif args[i] == SHORT_CMD+FLAG_PARAMFILE_SHORT || args[i] == LONG_CMD+FLAG_PARAMFILE {\n\t\t\t// Command line parser library Cobra, assigns value of --param-file to utils.Flags.ParamFile\n\t\t\t// but at this point of execution, we still don't have utils.Flags.ParamFile assigned any value\n\t\t\t// and that's the reason, we have to explicitly parse the argument list to get the file name\n\t\t\tparamArgs, args, err = getValueFromArgs(args, i, paramArgs)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tfilename := paramArgs[len(paramArgs)-1]\n\t\t\t// drop the argument (--param-file) and its value from the argument list after retrieving filename\n\t\t\t// read file content as a single string and append it to the list of params\n\t\t\tfile, readErr := ioutil.ReadFile(filename)\n\t\t\tif readErr != nil {\n\t\t\t\terr = wskderrors.NewCommandError(FLAG_PARAMFILE+\"/\"+FLAG_PARAMFILE_SHORT,\n\t\t\t\t\twski18n.T(wski18n.ID_ERR_INVALID_PARAM_FILE_X_file_X,\n\t\t\t\t\t\tmap[string]interface{}{\n\t\t\t\t\t\t\twski18n.KEY_PATH: filename,\n\t\t\t\t\t\t\twski18n.KEY_ARG: FLAG_PARAMFILE + \"/\" + FLAG_PARAMFILE_SHORT,\n\t\t\t\t\t\t\twski18n.KEY_ERR: readErr}))\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tparamArgs[len(paramArgs)-1] = string(file)\n\t\t\t// --param can appear multiple times in a single invocation of whisk deploy\n\t\t\t// for example, wskdeploy -m manifest.yaml --param key1 value1 --param key2 value2\n\t\t\t// parse key value map for each --param from the argument list\n\t\t\t// drop each --param and key value map from the argument list after reading the map\n\t\t\t// append key value as a string to the list of params\n\t\t} else if args[i] == LONG_CMD+FLAG_PARAM {\n\t\t\tparamArgs, args, err = getKeyValueArgs(args, i, paramArgs)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t} else {\n\t\t\ti++\n\t\t}\n\t}\n\treturn args, paramArgs, nil\n}", "func CmdlineArgs() ([]CmdlineArg, error) {\n\tlines, err := ReadFileIntoLines(cmdlineFilePath)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error reading the file %s, %v\", cmdlineFilePath, err)\n\t}\n\tif len(lines) < 1 {\n\t\treturn nil, fmt.Errorf(\"no lines are retured\")\n\t}\n\tcmdlineArgs := strings.FieldsFunc(lines[0], splitAfterSpace)\n\tvar result = make([]CmdlineArg, 0, len(cmdlineArgs))\n\t// for commandline only one line is returned.\n\tfor _, words := range cmdlineArgs {\n\t\t// Ignore the keys that start with double quotes\n\t\tif strings.Index(words, \"\\\"\") == 0 {\n\t\t\tcontinue\n\t\t}\n\t\ttokens := strings.Split(words, \"=\")\n\t\tif len(tokens) < 2 {\n\t\t\tvar stats = CmdlineArg{\n\t\t\t\tKey: tokens[0],\n\t\t\t}\n\t\t\tresult = append(result, stats)\n\t\t} else {\n\t\t\t//remove quotes in the values\n\t\t\ttrimmedValue := strings.Trim(tokens[1], \"\\\"'\")\n\t\t\tvar stats = CmdlineArg{\n\t\t\t\tKey: tokens[0],\n\t\t\t\tValue: trimmedValue,\n\t\t\t}\n\t\t\tresult = append(result, stats)\n\t\t}\n\t}\n\treturn result, nil\n}", "func getValuesArgsE(t *testing.T, options *helm.Options, args ...string) ([]string, error) {\n\targs = append(args, formatSetValuesAsArgs(options.SetValues, \"--set\")...)\n\targs = append(args, formatSetValuesAsArgs(options.SetStrValues, \"--set-string\")...)\n\n\tvaluesFilesArgs, err := formatValuesFilesAsArgsE(t, options.ValuesFiles)\n\tif err != nil {\n\t\treturn args, errors.WithStackTrace(err)\n\t}\n\targs = append(args, valuesFilesArgs...)\n\n\tsetFilesArgs, err := formatSetFilesAsArgsE(t, options.SetFiles)\n\tif err != nil {\n\t\treturn args, errors.WithStackTrace(err)\n\t}\n\targs = append(args, setFilesArgs...)\n\treturn args, nil\n}", "func Args(args string) []string {\n\tvar backtickMatch []string\n\n\tif strings.Contains(args, \"`\") {\n\t\tbacktickMatch = argsPattern.FindStringSubmatch(args)\n\t\targs = argsPattern.ReplaceAllString(args, \"---\")\n\t}\n\ts := strings.Split(args, \" \")\n\n\tif len(backtickMatch) > 0 {\n\t\tfor i, v := range s {\n\t\t\tif v == \"---\" {\n\t\t\t\ts[i] = backtickMatch[0]\n\t\t\t}\n\t\t}\n\t}\n\n\treturn s\n}", "func namedValueToValue(named []driver.NamedValue) ([]driver.Value, error) {\n\tdargs := make([]driver.Value, len(named))\n\tfor n, param := range named {\n\t\tif len(param.Name) > 0 {\n\t\t\treturn nil, errors.New(\"sql: driver does not support the use of Named Parameters\")\n\t\t}\n\t\tdargs[n] = param.Value\n\t}\n\treturn dargs, nil\n}", "func namedValueToValue(named []driver.NamedValue) ([]driver.Value, error) {\n\tdargs := make([]driver.Value, len(named))\n\tfor n, param := range named {\n\t\tif len(param.Name) > 0 {\n\t\t\treturn nil, errors.New(\"sql: driver does not support the use of Named Parameters\")\n\t\t}\n\t\tdargs[n] = param.Value\n\t}\n\treturn dargs, nil\n}", "func namedValueToValue(named []driver.NamedValue) ([]driver.Value, error) {\n\tdargs := make([]driver.Value, len(named))\n\tfor n, param := range named {\n\t\tif len(param.Name) > 0 {\n\t\t\treturn nil, errors.New(\"sql: driver does not support the use of Named Parameters\")\n\t\t}\n\t\tdargs[n] = param.Value\n\t}\n\treturn dargs, nil\n}", "func namedValueToValue(named []driver.NamedValue) ([]driver.Value, error) {\n\tdargs := make([]driver.Value, len(named))\n\tfor n, param := range named {\n\t\tif len(param.Name) > 0 {\n\t\t\treturn nil, errors.New(\"sql: driver does not support the use of Named Parameters\")\n\t\t}\n\t\tdargs[n] = param.Value\n\t}\n\treturn dargs, nil\n}", "func (args *Args) list() []*Arg {\n\treturn args.items\n}", "func Valueize(values ...reflect.Value) []reflect.Value {\n\treturn values\n}", "func Args(cmd, prefix, sep string) []string {\n\targs := make([]string, 0)\n\tfor _, part := range strings.SplitN(cmd, sep, -1) {\n\t\tif part != prefix && strings.HasPrefix(part, prefix) {\n\t\t\targs = append(args, part)\n\t\t}\n\t}\n\treturn args\n}", "func (c ExtensionStoreGetPublisherFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func parseArgs(args2parse string) ([]*rspec.Arg, error) {\n\tvar Args []*rspec.Arg\n\targstrslice := strings.Split(args2parse, \",\")\n\tfor _, argstr := range argstrslice {\n\t\targs := strings.Split(argstr, \"/\")\n\t\tif len(args) == 4 {\n\t\t\tindex, err := strconv.Atoi(args[0])\n\t\t\tvalue, err := strconv.Atoi(args[1])\n\t\t\tvalue2, err := strconv.Atoi(args[2])\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tswitch args[3] {\n\t\t\tcase \"\":\n\t\t\tcase \"SCMP_CMP_NE\":\n\t\t\tcase \"SCMP_CMP_LT\":\n\t\t\tcase \"SCMP_CMP_LE\":\n\t\t\tcase \"SCMP_CMP_EQ\":\n\t\t\tcase \"SCMP_CMP_GE\":\n\t\t\tcase \"SCMP_CMP_GT\":\n\t\t\tcase \"SCMP_CMP_MASKED_EQ\":\n\t\t\tdefault:\n\t\t\t\treturn nil, fmt.Errorf(\"seccomp-sysctl args must be empty or one of SCMP_CMP_NE|SCMP_CMP_LT|SCMP_CMP_LE|SCMP_CMP_EQ|SCMP_CMP_GE|SCMP_CMP_GT|SCMP_CMP_MASKED_EQ\")\n\t\t\t}\n\t\t\top := rspec.Operator(args[3])\n\t\t\tArg := rspec.Arg{\n\t\t\t\tIndex: uint(index),\n\t\t\t\tValue: uint64(value),\n\t\t\t\tValueTwo: uint64(value2),\n\t\t\t\tOp: op,\n\t\t\t}\n\t\t\tArgs = append(Args, &Arg)\n\t\t} else {\n\t\t\treturn nil, fmt.Errorf(\"seccomp-sysctl args error: %s\", argstr)\n\t\t}\n\t}\n\treturn Args, nil\n}", "func tokenizeArgs(args string) []string {\n\treturn tokenizeRegex.FindAllString(args, -1)\n}", "func (j *Job) Args() []interface{} {\n\targs := make([]interface{}, len(j.args))\n\tfor i, arg := range j.args {\n\t\targs[i] = arg.Interface()\n\t}\n\treturn args\n}", "func varArgs(nums ...int) {\n for _, n := range nums {\n fmt.Println(n)\n }\n}", "func (sa StaticOption) OptionArgs() ([]string, error) { return []string{sa.value}, nil }", "func NewValues(kvs ...string) Values {\n\tif len(kvs)%2 == 1 {\n\t\tpanic(\"twister: even number args required for NewParam\")\n\t}\n\tm := make(Values)\n\tfor i := 0; i < len(kvs); i += 2 {\n\t\tm.Add(kvs[i], kvs[i+1])\n\t}\n\treturn m\n}", "func stringList(args ...interface{}) []string {\n\tvar x []string\n\tfor _, arg := range args {\n\t\tswitch arg := arg.(type) {\n\t\tcase []string:\n\t\t\tx = append(x, arg...)\n\t\tcase string:\n\t\t\tx = append(x, arg)\n\t\tdefault:\n\t\t\tpanic(\"stringList: invalid argument\")\n\t\t}\n\t}\n\treturn x\n}", "func getMultiValue(contentValueField string, as *args.Store) (result []string) {\n\t// No input? No result!\n\tif !utils.IsSet(contentValueField) {\n\t\treturn nil\n\t}\n\n\t// check whether a parameter reference was provided, i.e. something like \"param:<name>\"\n\tparamName := regexParamValue.FindStringSubmatch(contentValueField)\n\tif len(paramName) > 0 {\n\t\tutils.Assert(len(paramName) == 2, \"Should contain the matching text plus a single capturing group\")\n\n\t\targArray := as.GetArray(paramName[1])\n\t\tif len(argArray) > 0 {\n\t\t\treturn argArray\n\t\t}\n\t\treturn nil\n\t}\n\n\t// else assume that provided value was a static text\n\treturn []string{contentValueField}\n}", "func ParseLiteralArgumentList(\n\targumentList string,\n\tparameterTypes []sema.Type,\n\tinter *interpreter.Interpreter,\n) (\n\t[]cadence.Value,\n\terror,\n) {\n\tcode := []byte(argumentList)\n\targuments, errs := parser.ParseArgumentList(inter, code, parser.Config{})\n\tif len(errs) > 0 {\n\t\treturn nil, parser.Error{\n\t\t\tErrors: errs,\n\t\t}\n\t}\n\n\targumentCount := len(arguments)\n\tparameterCount := len(parameterTypes)\n\n\tif argumentCount != parameterCount {\n\t\treturn nil, parser.NewUnpositionedSyntaxError(\n\t\t\t\"invalid number of arguments: got %d, expected %d\",\n\t\t\targumentCount,\n\t\t\tparameterCount,\n\t\t)\n\t}\n\n\tresult := make([]cadence.Value, argumentCount)\n\n\tfor i, argument := range arguments {\n\t\tparameterType := parameterTypes[i]\n\t\tvalue, err := LiteralValue(inter, argument.Expression, parameterType)\n\t\tif err != nil {\n\t\t\treturn nil, parser.NewSyntaxError(\n\t\t\t\targument.Expression.StartPosition(),\n\t\t\t\t\"invalid argument at index %d: %v\", i, err,\n\t\t\t)\n\t\t}\n\t\tresult[i] = value\n\t}\n\n\treturn result, nil\n}", "func (ss Sources) Parse(cmp *mcmp.Component) ([]ParamValue, error) {\n\tvar pvs []ParamValue\n\tfor _, s := range ss {\n\t\tvar innerPVs []ParamValue\n\t\tvar err error\n\t\tif innerPVs, err = s.Parse(cmp); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpvs = append(pvs, innerPVs...)\n\t}\n\treturn pvs, nil\n}", "func (c UploadServiceGetAuditLogsForUploadFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func getValues(ctx context.Context) []string {\n\tmethod := rkgrpcctx.GetMethodName(ctx)\n\trpcType := rkgrpcctx.GetRpcType(ctx)\n\terr := rkgrpcctx.GetError(ctx)\n\n\tentryName, entryType, resCode := \"\", \"\", \"\"\n\tif set := getOptionSet(ctx); set != nil {\n\t\tentryName = set.EntryName\n\t\tentryType = set.EntryType\n\t\tresCode = status.Code(err).String()\n\t}\n\n\tgrpcService, grpcMethod := rkgrpcinter.GetGrpcInfo(method)\n\tgwMethod, gwPath, _, _ := rkgrpcinter.GetGwInfo(rkgrpcctx.GetIncomingHeaders(ctx))\n\n\tvalues := []string{\n\t\tentryName,\n\t\tentryType,\n\t\trkgrpcinter.Realm.String,\n\t\trkgrpcinter.Region.String,\n\t\trkgrpcinter.AZ.String,\n\t\trkgrpcinter.Domain.String,\n\t\trkgrpcinter.LocalHostname.String,\n\t\trkentry.GlobalAppCtx.GetAppInfoEntry().Version,\n\t\trkentry.GlobalAppCtx.GetAppInfoEntry().AppName,\n\t\tgrpcService,\n\t\tgrpcMethod,\n\t\tgwMethod,\n\t\tgwPath,\n\t\trpcType,\n\t\tresCode,\n\t}\n\n\treturn values\n}", "func (c ResolverIndexConfigurationFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func (n *FnInvNode) Args() []Expr { return n.args }", "func addValuesInArray(args []interface {})(float64){\n if args != nil {\n var totalVal float64 = 0\n for i := range args {\n val, err := strconv.ParseFloat(args[i].(string), 64)\n if err == nil {\n totalVal = totalVal + val\n }\n }\n return totalVal\n }\n return 0.0\n}", "func queryParamValuesToMap(values []string) []interface{} {\n\tv := make([]interface{}, len(values))\n\tfor i, value := range values {\n\t\tv[i] = value\n\t}\n\treturn v\n}", "func (c DBStoreSelectRepositoriesForRetentionScanFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1, c.Arg2}\n}", "func (c DBStoreUpdateUploadRetentionFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1, c.Arg2}\n}", "func (c ExtensionStoreGetByUUIDFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func ParseArgs(cfg any, args []string, allArgs map[string]reflect.Value, errNotFound bool) ([]string, error) {\n\tvar nonFlags []string\n\tvar err error\n\tfor len(args) > 0 {\n\t\ts := args[0]\n\t\targs = args[1:]\n\t\tif len(s) == 0 || s[0] != '-' || len(s) == 1 {\n\t\t\tnonFlags = append(nonFlags, s)\n\t\t\tcontinue\n\t\t}\n\n\t\tif s[1] == '-' && len(s) == 2 { // \"--\" terminates the flags\n\t\t\t// f.argsLenAtDash = len(f.args)\n\t\t\tnonFlags = append(nonFlags, args...)\n\t\t\tbreak\n\t\t}\n\t\targs, err = ParseArg(s, args, allArgs, errNotFound)\n\t\tif err != nil {\n\t\t\treturn nonFlags, err\n\t\t}\n\t}\n\treturn nonFlags, nil\n}", "func (c ReleaseStoreGetLatestBatchFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1, c.Arg2, c.Arg3}\n}", "func genArgs(optionMap map[string]string) []string {\n\toptions := []string{}\n\tfor k, v := range optionMap {\n\t\tif v != \"\" {\n\t\t\tk = fmt.Sprintf(\"%s=%s\", k, v)\n\t\t}\n\t\toptions = append(options, k)\n\t}\n\treturn options\n}", "func (c ExtensionStoreGetByExtensionIDFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func PluginArgsParse(rawArgs string) []string {\n\tss := strings.Split(rawArgs, \"\\\\,\")\n\n\tout := [][]string{}\n\tfor _, s := range ss {\n\t\tcleanArgs := []string{}\n\t\tfor _, arg := range strings.Split(s, \",\") {\n\t\t\targ = strings.Trim(arg, \" \")\n\t\t\targ = strings.Trim(arg, \"\\\"\")\n\t\t\targ = strings.Trim(arg, \"'\")\n\t\t\tcleanArgs = append(cleanArgs, arg)\n\t\t}\n\t\tout = append(out, cleanArgs)\n\t}\n\n\tret := []string{}\n\ttail := \"\"\n\n\tfor _, x := range out {\n\t\tfor j, y := range x {\n\t\t\tif j == 0 {\n\t\t\t\tif tail != \"\" {\n\t\t\t\t\tret = append(ret, tail+\",\"+y)\n\t\t\t\t\ttail = \"\"\n\t\t\t\t} else {\n\t\t\t\t\tret = append(ret, y)\n\t\t\t\t}\n\t\t\t} else if j == len(x)-1 {\n\t\t\t\ttail = y\n\t\t\t} else {\n\t\t\t\tret = append(ret, y)\n\t\t\t}\n\t\t}\n\t}\n\n\tif tail != \"\" {\n\t\tret = append(ret, tail)\n\t}\n\n\treturn ret\n}", "func formatSetValuesAsArgs(setValues map[string]string, flag string) []string {\n\targs := []string{}\n\n\t// To make it easier to test, go through the keys in sorted order\n\tkeys := collections.Keys(setValues)\n\tfor _, key := range keys {\n\t\tvalue := setValues[key]\n\t\targValue := fmt.Sprintf(\"%s=%s\", key, value)\n\t\targs = append(args, flag, argValue)\n\t}\n\n\treturn args\n}", "func (c ResolverQueueAutoIndexJobForRepoFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}", "func (c ConnDoFuncCall) Args() []interface{} {\n\treturn append([]interface{}{c.Arg0}, c.Arg1...)\n}", "func ParseParam(s string, inputs []abi.Argument) ([]interface{}, error) {\n\tvar args map[string]interface{}\n\tvar param []interface{}\n\terr := json.Unmarshal([]byte(s), &args)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfor _, v := range inputs {\n\t\tparam = util.ABIchangeType(param, args[v.Name], v.Type.String())\n\t\t//param = append(param, args[v.Name])\n\t}\n\n\treturn param, nil\n}", "func (c ConnSendFuncCall) Args() []interface{} {\n\treturn append([]interface{}{c.Arg0}, c.Arg1...)\n}", "func (p *parser) params() []Value {\n\tparams := make([]Value, 0, 4)\n\ttok := p.Scan()\n\tfor tok != scanner.EOF {\n\t\tswitch tok {\n\t\tcase '(':\n\t\t\tparams = append(params, p.command(p.Scan()))\n\t\t\tp.demands(')')\n\t\t\ttok = p.tok\n\t\t\tcontinue\n\t\tcase scanner.String, scanner.RawString:\n\t\t\tparams = append(params, strings.Trim(p.TokenText(), \"\\\"`\"))\n\t\tcase scanner.Int:\n\t\t\tif n, err := strconv.ParseInt(p.TokenText(), 0, 32); err != nil {\n\t\t\t\tp.parseError(\"integer\")\n\t\t\t\tparams = append(params, nil)\n\t\t\t} else {\n\t\t\t\tif p.negative {\n\t\t\t\t\tparams = append(params, -int(n))\n\t\t\t\t\tp.negative = false\n\t\t\t\t} else {\n\t\t\t\t\tparams = append(params, int(n))\n\t\t\t\t}\n\t\t\t}\n\t\tcase scanner.Float:\n\t\t\tif n, err := strconv.ParseFloat(p.TokenText(), 64); err != nil {\n\t\t\t\tp.parseError(\"float\")\n\t\t\t\tparams = append(params, nil)\n\t\t\t} else {\n\t\t\t\tif p.negative {\n\t\t\t\t\tparams = append(params, -float64(n))\n\t\t\t\t\tp.negative = false\n\t\t\t\t} else {\n\t\t\t\t\tparams = append(params, float64(n))\n\t\t\t\t}\n\t\t\t}\n\t\tcase ')':\n\t\t\treturn params\n\t\tcase '-':\n\t\tdefault:\n\t\t\tp.parseError(\"parameter\")\n\t\t}\n\t\ttok = p.Scan()\n\t}\n\treturn params\n}" ]
[ "0.66049415", "0.64075565", "0.63898826", "0.63461345", "0.6190019", "0.61842114", "0.61597705", "0.6145489", "0.61268765", "0.6112695", "0.60974354", "0.6086292", "0.6066323", "0.6032512", "0.6027523", "0.60196614", "0.60185295", "0.60125184", "0.59949327", "0.5989697", "0.58802485", "0.5818166", "0.5767701", "0.5759723", "0.574825", "0.57451147", "0.57417613", "0.57012", "0.5700795", "0.5697844", "0.5694399", "0.565833", "0.565098", "0.5649411", "0.5640805", "0.5637917", "0.5637917", "0.56373405", "0.56348896", "0.5616431", "0.559716", "0.55730563", "0.55662215", "0.55654573", "0.55549073", "0.5554621", "0.55531967", "0.5543823", "0.5539666", "0.55392754", "0.55362564", "0.55203485", "0.5519396", "0.5507869", "0.5486572", "0.5479104", "0.54775804", "0.54690576", "0.5465059", "0.54606897", "0.5460118", "0.5458888", "0.5452093", "0.5452093", "0.5452093", "0.5452093", "0.5422829", "0.5420443", "0.54200196", "0.5415837", "0.53984976", "0.53945565", "0.53900695", "0.5383165", "0.5355197", "0.5353844", "0.53488874", "0.53382015", "0.5322286", "0.5321916", "0.53190446", "0.5316436", "0.531612", "0.53139985", "0.5307032", "0.5303381", "0.52998227", "0.5295949", "0.529366", "0.5293218", "0.5292091", "0.52862924", "0.52860653", "0.52803123", "0.52800363", "0.52791536", "0.52651817", "0.5262993", "0.5252944", "0.52468514" ]
0.5878419
21
Deprecated: Use MetricSpec.ProtoReflect.Descriptor instead.
func (*MetricSpec) Descriptor() ([]byte, []int) { return file_api_adaptive_load_metric_spec_proto_rawDescGZIP(), []int{0} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*Metric) Descriptor() ([]byte, []int) {\n\treturn file_go_chromium_org_luci_analysis_proto_v1_metrics_proto_rawDescGZIP(), []int{2}\n}", "func (*TargetMetrics_Metric) Descriptor() ([]byte, []int) {\n\treturn file_asgt_type_target_metrics_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*Metrics) Descriptor() ([]byte, []int) {\n\treturn file_grpc_proto_rawDescGZIP(), []int{0}\n}", "func (*KafkaMeter) Descriptor() ([]byte, []int) {\n\treturn file_pkg_sinks_plugin_proto_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (*CLRMetric) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{1}\n}", "func (*Metric) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{0}\n}", "func (*TargetMetrics) Descriptor() ([]byte, []int) {\n\treturn file_asgt_type_target_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (*ListMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_go_chromium_org_luci_analysis_proto_v1_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (*MetricImplementation) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{6}\n}", "func (*WriteKafkaMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_sinks_plugin_proto_metrics_proto_rawDescGZIP(), []int{2}\n}", "func (*Metric) Descriptor() ([]byte, []int) {\n\treturn file_tlogpb_tlog_proto_rawDescGZIP(), []int{4}\n}", "func (*OnTargetAudienceMetrics) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{19}\n}", "func (*FeedbackMetrics) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{12}\n}", "func (Metric_Scale) EnumDescriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*MetricInfo) Descriptor() ([]byte, []int) {\n\treturn file_msgType_proto_rawDescGZIP(), []int{15}\n}", "func (*RateLimitingSampler) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_external_envoy_config_trace_v3_opencensus_proto_rawDescGZIP(), []int{4}\n}", "func (*ListMetricsResponse) Descriptor() ([]byte, []int) {\n\treturn file_go_chromium_org_luci_analysis_proto_v1_metrics_proto_rawDescGZIP(), []int{1}\n}", "func (*MetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{11}\n}", "func (*MetricsReport) Descriptor() ([]byte, []int) {\n\treturn file_determined_trial_v1_trial_proto_rawDescGZIP(), []int{9}\n}", "func (*ClrGC) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{2}\n}", "func (*MetricData) Descriptor() ([]byte, []int) {\n\treturn file_alameda_api_v1alpha1_datahub_common_metrics_proto_rawDescGZIP(), []int{1}\n}", "func (*MetricsServiceRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{18}\n}", "func (*ClrThread) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{3}\n}", "func (*TrialProfilerMetricLabels) Descriptor() ([]byte, []int) {\n\treturn file_determined_trial_v1_trial_proto_rawDescGZIP(), []int{3}\n}", "func (*TimerFamilySpec) Descriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{17}\n}", "func (*GetMetricsInfoResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{44}\n}", "func (*BaseMetrics) Descriptor() ([]byte, []int) {\n\treturn file_mitre_cvss_v3_cvss_proto_rawDescGZIP(), []int{0}\n}", "func (ResourceName) EnumDescriptor() ([]byte, []int) {\n\treturn file_alameda_api_v1alpha1_datahub_common_metrics_proto_rawDescGZIP(), []int{1}\n}", "func (*CLRMetricCollection) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{0}\n}", "func (*MetricSpecWithThreshold) Descriptor() ([]byte, []int) {\n\treturn file_api_adaptive_load_metric_spec_proto_rawDescGZIP(), []int{2}\n}", "func (*GetModelVersionMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{94}\n}", "func ProtoFromDescriptor(d protoreflect.Descriptor) proto.Message {\n\tswitch d := d.(type) {\n\tcase protoreflect.FileDescriptor:\n\t\treturn ProtoFromFileDescriptor(d)\n\tcase protoreflect.MessageDescriptor:\n\t\treturn ProtoFromMessageDescriptor(d)\n\tcase protoreflect.FieldDescriptor:\n\t\treturn ProtoFromFieldDescriptor(d)\n\tcase protoreflect.OneofDescriptor:\n\t\treturn ProtoFromOneofDescriptor(d)\n\tcase protoreflect.EnumDescriptor:\n\t\treturn ProtoFromEnumDescriptor(d)\n\tcase protoreflect.EnumValueDescriptor:\n\t\treturn ProtoFromEnumValueDescriptor(d)\n\tcase protoreflect.ServiceDescriptor:\n\t\treturn ProtoFromServiceDescriptor(d)\n\tcase protoreflect.MethodDescriptor:\n\t\treturn ProtoFromMethodDescriptor(d)\n\tdefault:\n\t\t// WTF??\n\t\tif res, ok := d.(DescriptorProtoWrapper); ok {\n\t\t\treturn res.AsProto()\n\t\t}\n\t\treturn nil\n\t}\n}", "func (*CMsgClientToGCPlayerStatsRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{143}\n}", "func (MetricType) EnumDescriptor() ([]byte, []int) {\n\treturn file_alameda_api_v1alpha1_datahub_common_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (*MetricsD) Descriptor() ([]byte, []int) {\n\treturn file_orc8r_protos_mconfig_mconfigs_proto_rawDescGZIP(), []int{7}\n}", "func (*AllowedValues) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{3}\n}", "func (TrialProfilerMetricLabels_ProfilerMetricType) EnumDescriptor() ([]byte, []int) {\n\treturn file_determined_trial_v1_trial_proto_rawDescGZIP(), []int{3, 0}\n}", "func (*MetricsResponse) Descriptor() ([]byte, []int) {\n\treturn file_protobuf_index_proto_rawDescGZIP(), []int{25}\n}", "func (ms Metric) MetricDescriptor() MetricDescriptor {\n\treturn newMetricDescriptor(&(*ms.orig).MetricDescriptor)\n}", "func (*CMatchTeamTimedStats) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{10}\n}", "func (*TrialMetrics) Descriptor() ([]byte, []int) {\n\treturn file_determined_trial_v1_trial_proto_rawDescGZIP(), []int{8}\n}", "func (*KafkaGauge) Descriptor() ([]byte, []int) {\n\treturn file_pkg_sinks_plugin_proto_metrics_proto_rawDescGZIP(), []int{1}\n}", "func (*MetricLimit) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_quota_quota_proto_rawDescGZIP(), []int{1}\n}", "func (*Deprecation) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{8}\n}", "func (*TrialSourceInfoMetric) Descriptor() ([]byte, []int) {\n\treturn file_determined_trial_v1_trial_proto_rawDescGZIP(), []int{11}\n}", "func (*RateLimitingSampler) Descriptor() ([]byte, []int) {\n\treturn file_opencensus_proto_trace_v1_trace_config_proto_rawDescGZIP(), []int{3}\n}", "func (*GC) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_JVMMetric_proto_rawDescGZIP(), []int{4}\n}", "func (StandardPTransforms_DeprecatedPrimitives) EnumDescriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{4, 1}\n}", "func (MetricsType) EnumDescriptor() ([]byte, []int) {\n\treturn file_proto_enums_provider_proto_rawDescGZIP(), []int{1}\n}", "func (*WatchLimitsRequest) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_limit_service_proto_rawDescGZIP(), []int{7}\n}", "func (*MonitoredResourceDescriptor) Descriptor() ([]byte, []int) {\n\treturn edgelq_monitoring_proto_v3_monitored_resource_descriptor_proto_rawDescGZIP(), []int{0}\n}", "func (*Performance) Descriptor() ([]byte, []int) {\n\treturn file_commissionService_proto_rawDescGZIP(), []int{2}\n}", "func (*Message6024) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{26}\n}", "func (*TimerInfo) Descriptor() ([]byte, []int) {\n\treturn file_grpc_proto_rawDescGZIP(), []int{0}\n}", "func (*ConstantSampler) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_external_envoy_config_trace_v3_opencensus_proto_rawDescGZIP(), []int{3}\n}", "func (*PlanChange_Removed) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0, 3}\n}", "func (*MetricConfiguration) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{5}\n}", "func (*QuotaMetric) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_quota_quota_proto_rawDescGZIP(), []int{0}\n}", "func (*PostModelVersionMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{93}\n}", "func (*AddPeerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*DatasetRequest) Descriptor() ([]byte, []int) {\n\treturn file_internal_proto_metric_proto_rawDescGZIP(), []int{0}\n}", "func (*TelemetryParams) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{62}\n}", "func (*Order) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{4}\n}", "func (*PlanChange) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0}\n}", "func (*Range) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{1}\n}", "func (*DurationConst) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{2}\n}", "func (*Sample) Descriptor() ([]byte, []int) {\n\treturn file_alameda_api_v1alpha1_datahub_common_metrics_proto_rawDescGZIP(), []int{0}\n}", "func ProtoFromMethodDescriptor(d protoreflect.MethodDescriptor) *descriptorpb.MethodDescriptorProto {\n\ttype canProto interface {\n\t\tMethodDescriptorProto() *descriptorpb.MethodDescriptorProto\n\t}\n\tif res, ok := d.(canProto); ok {\n\t\treturn res.MethodDescriptorProto()\n\t}\n\tif res, ok := d.(DescriptorProtoWrapper); ok {\n\t\tif md, ok := res.AsProto().(*descriptorpb.MethodDescriptorProto); ok {\n\t\t\treturn md\n\t\t}\n\t}\n\treturn protodesc.ToMethodDescriptorProto(d)\n}", "func (*CMatchPlayerTimedStats) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{9}\n}", "func (*MetricsRange) Descriptor() ([]byte, []int) {\n\treturn file_determined_project_v1_project_proto_rawDescGZIP(), []int{5}\n}", "func (*TelemetryExpectation) Descriptor() ([]byte, []int) {\n\treturn file_testvector_tv_proto_rawDescGZIP(), []int{8}\n}", "func (*WatchLimitRequest) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_limit_service_proto_rawDescGZIP(), []int{5}\n}", "func (*TrialProfilerMetricsBatch) Descriptor() ([]byte, []int) {\n\treturn file_determined_trial_v1_trial_proto_rawDescGZIP(), []int{4}\n}", "func (*DurationLT) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{5}\n}", "func (*ThresholdSpec) Descriptor() ([]byte, []int) {\n\treturn file_api_adaptive_load_metric_spec_proto_rawDescGZIP(), []int{1}\n}", "func (*BackfilledMetrics_Metadata) Descriptor() ([]byte, []int) {\n\treturn file_BackfillMetrics_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*DurationGT) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{7}\n}", "func (*Telemetry) Descriptor() ([]byte, []int) {\n\treturn file_huawei_telemetry_proto_rawDescGZIP(), []int{0}\n}", "func (*PatchCollectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{161}\n}", "func (*Memory) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_JVMMetric_proto_rawDescGZIP(), []int{2}\n}", "func (*LivenessProbeCF) Descriptor() ([]byte, []int) {\n\treturn file_pkg_kascfg_kascfg_proto_rawDescGZIP(), []int{13}\n}", "func (*Message12796) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{1}\n}", "func (*UpdateTelemetryReportedRequest) Descriptor() ([]byte, []int) {\n\treturn file_external_applications_applications_proto_rawDescGZIP(), []int{29}\n}", "func (*Timing) Descriptor() ([]byte, []int) {\n\treturn file_proto_google_fhir_proto_stu3_datatypes_proto_rawDescGZIP(), []int{40}\n}", "func LegacyLoadMessageDesc(t reflect.Type) protoreflect.MessageDescriptor {\n\treturn legacyLoadMessageDesc(t, \"\")\n}", "func (*Dist) Descriptor() ([]byte, []int) {\n\treturn file_github_com_google_cloudprober_metrics_proto_dist_proto_rawDescGZIP(), []int{0}\n}", "func (*Empty) Descriptor() ([]byte, []int) {\n\treturn file_pkg_sinks_plugin_proto_metrics_proto_rawDescGZIP(), []int{3}\n}", "func (*GetPeerInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{6}\n}", "func (*Thread) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_JVMMetric_proto_rawDescGZIP(), []int{5}\n}", "func (*PlanChange_Added) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*WatchRequestTypeProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{25}\n}", "func (*BackfilledMetrics) Descriptor() ([]byte, []int) {\n\treturn file_BackfillMetrics_proto_rawDescGZIP(), []int{0}\n}", "func (*Telemetry) Descriptor() ([]byte, []int) {\n\treturn file_protocol_proto_rawDescGZIP(), []int{0}\n}", "func (GCPhrase) EnumDescriptor() ([]byte, []int) {\n\treturn file_language_agent_JVMMetric_proto_rawDescGZIP(), []int{1}\n}", "func (*WatchRequest) Descriptor() ([]byte, []int) {\n\treturn file_authzed_api_v0_watch_service_proto_rawDescGZIP(), []int{0}\n}", "func (MetricImplementation_Language) EnumDescriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{6, 0}\n}", "func (TimeDomain_Enum) EnumDescriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{36, 0}\n}", "func (*FaultRateLimit) Descriptor() ([]byte, []int) {\n\treturn file_envoy_config_filter_fault_v2_fault_proto_rawDescGZIP(), []int{1}\n}", "func (*CountedDuration) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{11}\n}", "func (m *Metric) metricDescriptor(series ddapi.Series) *metricpb.MetricDescriptor {\n\td := &metricpb.MetricDescriptor{\n\t\tType: m.StackdriverName(),\n\t\t// Query results are gauges; there does not seem to be a way to get cumulative metrics from Datadog.\n\t\tMetricKind: metricpb.MetricDescriptor_GAUGE,\n\t\t// Datadog API does not declare value type, and the clinet library exposes all points as float64.\n\t\tValueType: metricpb.MetricDescriptor_DOUBLE,\n\t\tDescription: fmt.Sprintf(\"Datadog query: %s\", m.config.Query),\n\t\tDisplayName: *series.DisplayName,\n\t}\n\tif u, ok := series.GetUnitsOk(); ok {\n\t\t// Sometimes we get a slice of two pointers, but the second is nil.\n\t\tif len(u) == 2 && u[0] != nil && u[1] != nil {\n\t\t\t// Numerator and denominator.\n\t\t\td.Unit = fmt.Sprintf(\"%s/%s\", u[0].ShortName, u[1].ShortName)\n\t\t} else if len(u) > 0 && u[0] != nil {\n\t\t\td.Unit = u[0].ShortName\n\t\t}\n\t}\n\treturn d\n}" ]
[ "0.68842673", "0.68259895", "0.6754421", "0.66955364", "0.6689265", "0.6643883", "0.66222525", "0.65758026", "0.65588665", "0.6558328", "0.65321547", "0.6504689", "0.64963424", "0.64895487", "0.64671564", "0.64627165", "0.64118576", "0.640645", "0.6402437", "0.63927925", "0.63857055", "0.63838047", "0.63755524", "0.6375062", "0.6364738", "0.6344502", "0.6326873", "0.62970006", "0.6296076", "0.6282125", "0.62814546", "0.62762773", "0.6254236", "0.62446994", "0.6241785", "0.6240405", "0.6231912", "0.6219152", "0.6212177", "0.6207485", "0.61861545", "0.61841303", "0.61774045", "0.61756134", "0.6172337", "0.61403155", "0.6130686", "0.61302763", "0.6128156", "0.61163366", "0.61114854", "0.61073345", "0.60934126", "0.60924315", "0.60904217", "0.6079404", "0.6067346", "0.60643333", "0.6061143", "0.6058532", "0.605525", "0.60516375", "0.6041442", "0.6032486", "0.60319597", "0.6030676", "0.60280496", "0.6027291", "0.6027185", "0.6027019", "0.6026928", "0.60257334", "0.60246295", "0.6023129", "0.6016748", "0.5997877", "0.59961754", "0.5993665", "0.5993516", "0.5991539", "0.5990647", "0.5974846", "0.59692127", "0.59673035", "0.59665775", "0.5953907", "0.5953794", "0.59494406", "0.59489006", "0.59400326", "0.59382766", "0.59358793", "0.59331244", "0.5930013", "0.59291536", "0.5923956", "0.59239346", "0.59238034", "0.5921964", "0.59155774" ]
0.66302645
6
Deprecated: Use ThresholdSpec.ProtoReflect.Descriptor instead.
func (*ThresholdSpec) Descriptor() ([]byte, []int) { return file_api_adaptive_load_metric_spec_proto_rawDescGZIP(), []int{1} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*MetricSpecWithThreshold) Descriptor() ([]byte, []int) {\n\treturn file_api_adaptive_load_metric_spec_proto_rawDescGZIP(), []int{2}\n}", "func (*TelemetryThreshold) Descriptor() ([]byte, []int) {\n\treturn file_huawei_telemetry_proto_rawDescGZIP(), []int{5}\n}", "func (*NodeThreshold) Descriptor() ([]byte, []int) {\n\treturn file_models_model_node_threshold_proto_rawDescGZIP(), []int{0}\n}", "func (*AlertingCondition_Spec_TimeSeries_Threshold) Descriptor() ([]byte, []int) {\n\treturn edgelq_monitoring_proto_v3_alerting_condition_proto_rawDescGZIP(), []int{0, 0, 0, 1}\n}", "func (*CircuitBreakers_Thresholds) Descriptor() ([]byte, []int) {\n\treturn file_envoy_api_v2_cluster_circuit_breaker_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*RateLimitingSampler) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_external_envoy_config_trace_v3_opencensus_proto_rawDescGZIP(), []int{4}\n}", "func (*TargetMetrics) Descriptor() ([]byte, []int) {\n\treturn file_asgt_type_target_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (*LivenessProbeCF) Descriptor() ([]byte, []int) {\n\treturn file_pkg_kascfg_kascfg_proto_rawDescGZIP(), []int{13}\n}", "func (*OperationTypeThreshold) Descriptor() ([]byte, []int) {\n\treturn file_chain_proto_rawDescGZIP(), []int{7}\n}", "func (*DurationLT) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{5}\n}", "func (*ConstantSampler) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_external_envoy_config_trace_v3_opencensus_proto_rawDescGZIP(), []int{3}\n}", "func (x *fastReflection_LightClientAttackEvidence) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_LightClientAttackEvidence\n}", "func (*TargetMetrics_Metric) Descriptor() ([]byte, []int) {\n\treturn file_asgt_type_target_metrics_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*KafkaMeter) Descriptor() ([]byte, []int) {\n\treturn file_pkg_sinks_plugin_proto_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (TelemetryThreshold_ThresholdOpType) EnumDescriptor() ([]byte, []int) {\n\treturn file_huawei_telemetry_proto_rawDescGZIP(), []int{5, 0}\n}", "func (*RateLimitingSampler) Descriptor() ([]byte, []int) {\n\treturn file_opencensus_proto_trace_v1_trace_config_proto_rawDescGZIP(), []int{3}\n}", "func (*AccountThreshold) Descriptor() ([]byte, []int) {\n\treturn file_chain_proto_rawDescGZIP(), []int{9}\n}", "func (*ApiWarning) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_sql_v1_cloud_sql_resources_proto_rawDescGZIP(), []int{1}\n}", "func (TelemetrySelfDefinedEvent_ThresholdRelation) EnumDescriptor() ([]byte, []int) {\n\treturn file_huawei_telemetry_proto_rawDescGZIP(), []int{3, 0}\n}", "func (*FeaturestoreMonitoringConfig_ThresholdConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_featurestore_monitoring_proto_rawDescGZIP(), []int{0, 2}\n}", "func (*OnTargetAudienceMetrics) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{19}\n}", "func (*FaultRateLimit) Descriptor() ([]byte, []int) {\n\treturn file_envoy_config_filter_fault_v2_fault_proto_rawDescGZIP(), []int{1}\n}", "func (*FaultDelay) Descriptor() ([]byte, []int) {\n\treturn file_envoy_config_filter_fault_v2_fault_proto_rawDescGZIP(), []int{0}\n}", "func (*Range) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{1}\n}", "func (*DurationLTE) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{6}\n}", "func (*CLRMetric) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{1}\n}", "func (*MonitoredResourceDescriptor) Descriptor() ([]byte, []int) {\n\treturn edgelq_monitoring_proto_v3_monitored_resource_descriptor_proto_rawDescGZIP(), []int{0}\n}", "func (*OperationSetThreshold) Descriptor() ([]byte, []int) {\n\treturn file_chain_proto_rawDescGZIP(), []int{16}\n}", "func (*ApiWarning) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_sql_v1beta4_cloud_sql_resources_proto_rawDescGZIP(), []int{1}\n}", "func (SafetySetting_HarmBlockThreshold) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_ai_generativelanguage_v1beta2_safety_proto_rawDescGZIP(), []int{3, 0}\n}", "func (AlertingCondition_Spec_TimeSeries_Threshold_Compare) EnumDescriptor() ([]byte, []int) {\n\treturn edgelq_monitoring_proto_v3_alerting_condition_proto_rawDescGZIP(), []int{0, 0, 0, 1, 0}\n}", "func (*ConstantSampler) Descriptor() ([]byte, []int) {\n\treturn file_opencensus_proto_trace_v1_trace_config_proto_rawDescGZIP(), []int{2}\n}", "func (*TrialProfilerMetricLabels) Descriptor() ([]byte, []int) {\n\treturn file_determined_trial_v1_trial_proto_rawDescGZIP(), []int{3}\n}", "func (*TracingTagLiteral) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_v1_options_tracing_tracing_proto_rawDescGZIP(), []int{4}\n}", "func (*TracingTagLiteral) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_v1_options_tracing_tracing_proto_rawDescGZIP(), []int{4}\n}", "func (*ClrThread) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{3}\n}", "func (*DurationExLTGT) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{10}\n}", "func (*LabelledPayload) Descriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{59}\n}", "func (*CheckLiveRequest) Descriptor() ([]byte, []int) {\n\treturn file_health_proto_rawDescGZIP(), []int{2}\n}", "func (*PlanChange_Modified) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0, 1}\n}", "func (*ProbabilitySampler) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_external_envoy_config_trace_v3_opencensus_proto_rawDescGZIP(), []int{2}\n}", "func (*Span) Descriptor() ([]byte, []int) {\n\treturn file_google_devtools_cloudtrace_v2_trace_proto_rawDescGZIP(), []int{0}\n}", "func (*TracePercentages) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_v1_options_tracing_tracing_proto_rawDescGZIP(), []int{2}\n}", "func (*TracePercentages) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_v1_options_tracing_tracing_proto_rawDescGZIP(), []int{2}\n}", "func (*LabelDescriptor) Descriptor() ([]byte, []int) {\n\treturn edgelq_logging_proto_v1alpha2_common_proto_rawDescGZIP(), []int{0}\n}", "func (*ScanRunWarningTrace) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_websecurityscanner_v1_scan_run_warning_trace_proto_rawDescGZIP(), []int{0}\n}", "func (*FeedbackMetrics) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{12}\n}", "func (*TraceProto) Descriptor() ([]byte, []int) {\n\treturn file_internal_tracing_extended_extended_trace_proto_rawDescGZIP(), []int{0}\n}", "func (ConstantSampler_ConstantDecision) EnumDescriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_external_envoy_config_trace_v3_opencensus_proto_rawDescGZIP(), []int{3, 0}\n}", "func (StandardPTransforms_DeprecatedPrimitives) EnumDescriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{4, 1}\n}", "func (*DurationGTELTE) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{11}\n}", "func (*GetBalancerBandwidthResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{19}\n}", "func (*Module) Descriptor() ([]byte, []int) {\n\treturn file_google_devtools_cloudtrace_v2_trace_proto_rawDescGZIP(), []int{3}\n}", "func (*LoadBalancerStatsRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_test_proto_rawDescGZIP(), []int{9}\n}", "func (*WatchRequestTypeProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{25}\n}", "func (*Metric) Descriptor() ([]byte, []int) {\n\treturn file_tlogpb_tlog_proto_rawDescGZIP(), []int{4}\n}", "func (*PlanChange) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0}\n}", "func (*Metrics) Descriptor() ([]byte, []int) {\n\treturn file_grpc_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgClientToGCPlayerStatsRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{143}\n}", "func (*MetricSpec) Descriptor() ([]byte, []int) {\n\treturn file_api_adaptive_load_metric_spec_proto_rawDescGZIP(), []int{0}\n}", "func (*PlanChange_Removed) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0, 3}\n}", "func (*Labels) Descriptor() ([]byte, []int) {\n\treturn file_tlogpb_tlog_proto_rawDescGZIP(), []int{1}\n}", "func (*CircuitBreakers_Thresholds_RetryBudget) Descriptor() ([]byte, []int) {\n\treturn file_envoy_api_v2_cluster_circuit_breaker_proto_rawDescGZIP(), []int{0, 0, 0}\n}", "func (*CheckerChartV1) Descriptor() ([]byte, []int) {\n\treturn file_checker_v1_proto_rawDescGZIP(), []int{20}\n}", "func (ConstantSampler_ConstantDecision) EnumDescriptor() ([]byte, []int) {\n\treturn file_opencensus_proto_trace_v1_trace_config_proto_rawDescGZIP(), []int{2, 0}\n}", "func (*ListenerTracingSettings) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_v1_options_tracing_tracing_proto_rawDescGZIP(), []int{0}\n}", "func (*ListenerTracingSettings) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_v1_options_tracing_tracing_proto_rawDescGZIP(), []int{0}\n}", "func (*Metric) Descriptor() ([]byte, []int) {\n\treturn file_go_chromium_org_luci_analysis_proto_v1_metrics_proto_rawDescGZIP(), []int{2}\n}", "func (*DurationGT) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{7}\n}", "func (*CheckRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_api_servicecontrol_v1_service_controller_proto_rawDescGZIP(), []int{0}\n}", "func (*WriteKafkaMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_sinks_plugin_proto_metrics_proto_rawDescGZIP(), []int{2}\n}", "func (*GetByTargetRequest) Descriptor() ([]byte, []int) {\n\treturn file_audit_proto_rawDescGZIP(), []int{2}\n}", "func (*MetricsRange) Descriptor() ([]byte, []int) {\n\treturn file_determined_project_v1_project_proto_rawDescGZIP(), []int{5}\n}", "func (*FaultDelay_HeaderDelay) Descriptor() ([]byte, []int) {\n\treturn file_envoy_config_filter_fault_v2_fault_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*TelemetryParams) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{62}\n}", "func (*TelemetryExpectation) Descriptor() ([]byte, []int) {\n\treturn file_testvector_tv_proto_rawDescGZIP(), []int{8}\n}", "func (*Span_Attributes) Descriptor() ([]byte, []int) {\n\treturn file_google_devtools_cloudtrace_v2_trace_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*AllowedValues) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{3}\n}", "func (*PatchCollectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{161}\n}", "func (*Violation) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_expression_proto_rawDescGZIP(), []int{2}\n}", "func (*MetricInfo) Descriptor() ([]byte, []int) {\n\treturn file_msgType_proto_rawDescGZIP(), []int{15}\n}", "func (*HealthCheckRequest) Descriptor() ([]byte, []int) {\n\treturn file_internal_proto_files_domain_probes_proto_rawDescGZIP(), []int{0}\n}", "func (*CMatchTeamTimedStats) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{10}\n}", "func (*EvictWritersResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{11}\n}", "func (*DurationGTLT) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{9}\n}", "func (*Ping) Descriptor() ([]byte, []int) {\n\treturn file_api_protobuf_spec_example_example_proto_rawDescGZIP(), []int{5}\n}", "func (*PerformanceWhere) Descriptor() ([]byte, []int) {\n\treturn file_commissionService_proto_rawDescGZIP(), []int{3}\n}", "func (*ValidatorParams) Descriptor() ([]byte, []int) {\n\treturn file_tm_replay_proto_rawDescGZIP(), []int{17}\n}", "func (*ValidatorUpdate) Descriptor() ([]byte, []int) {\n\treturn file_tm_replay_proto_rawDescGZIP(), []int{9}\n}", "func (*APILevel) Descriptor() ([]byte, []int) {\n\treturn file_Notify_proto_rawDescGZIP(), []int{4}\n}", "func (*Validator) Descriptor() ([]byte, []int) {\n\treturn file_tm_replay_proto_rawDescGZIP(), []int{13}\n}", "func (*ApiListener) Descriptor() ([]byte, []int) {\n\treturn file_envoy_config_listener_v2_api_listener_proto_rawDescGZIP(), []int{0}\n}", "func (*FaultRateLimit_HeaderLimit) Descriptor() ([]byte, []int) {\n\treturn file_envoy_config_filter_fault_v2_fault_proto_rawDescGZIP(), []int{1, 1}\n}", "func (*TriggerBlockReportResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{17}\n}", "func (*TimerFamilySpec) Descriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{17}\n}", "func (*Span_TimeEvents) Descriptor() ([]byte, []int) {\n\treturn file_google_devtools_cloudtrace_v2_trace_proto_rawDescGZIP(), []int{0, 2}\n}", "func (*PatchAnnotationsStatusRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{5}\n}", "func (*TargetValue) Descriptor() ([]byte, []int) {\n\treturn file_packetbroker_api_iam_v1_service_proto_rawDescGZIP(), []int{7}\n}", "func (*DurationExGTELTE) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{12}\n}", "func (*Deprecation) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{8}\n}" ]
[ "0.6747329", "0.66270244", "0.63288754", "0.6282511", "0.6232712", "0.61830735", "0.61650217", "0.6095416", "0.60849476", "0.6023062", "0.6021123", "0.59890085", "0.59753263", "0.59735835", "0.59627837", "0.59447163", "0.59056884", "0.5885518", "0.5869348", "0.5867261", "0.58615", "0.5844535", "0.5841566", "0.5828869", "0.5813876", "0.58071864", "0.58023286", "0.5800866", "0.5797112", "0.57960635", "0.57918936", "0.57890487", "0.57889557", "0.57855445", "0.57855445", "0.5780618", "0.5777835", "0.577759", "0.5770897", "0.57568574", "0.5754434", "0.5747833", "0.57386816", "0.57386816", "0.57374924", "0.57181823", "0.57143813", "0.5707645", "0.57024974", "0.5696843", "0.56960744", "0.569587", "0.569255", "0.5689868", "0.56888807", "0.5686459", "0.5685306", "0.5683332", "0.56815517", "0.56795853", "0.5676913", "0.5674636", "0.5674096", "0.5673701", "0.5659917", "0.56589186", "0.56589186", "0.5657928", "0.56543213", "0.5650947", "0.5650147", "0.5648275", "0.56463474", "0.5644906", "0.56442", "0.564167", "0.5634302", "0.56271917", "0.56263167", "0.5621974", "0.56151587", "0.5613921", "0.5611349", "0.5609583", "0.5608225", "0.5607284", "0.5606836", "0.5606204", "0.5598591", "0.5598411", "0.559547", "0.55949193", "0.5590357", "0.5588497", "0.55868596", "0.5586737", "0.5582912", "0.55813646", "0.5579169", "0.5578477" ]
0.69594944
0
Deprecated: Use MetricSpecWithThreshold.ProtoReflect.Descriptor instead.
func (*MetricSpecWithThreshold) Descriptor() ([]byte, []int) { return file_api_adaptive_load_metric_spec_proto_rawDescGZIP(), []int{2} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*ThresholdSpec) Descriptor() ([]byte, []int) {\n\treturn file_api_adaptive_load_metric_spec_proto_rawDescGZIP(), []int{1}\n}", "func (*TelemetryThreshold) Descriptor() ([]byte, []int) {\n\treturn file_huawei_telemetry_proto_rawDescGZIP(), []int{5}\n}", "func (*AlertingCondition_Spec_TimeSeries_Threshold) Descriptor() ([]byte, []int) {\n\treturn edgelq_monitoring_proto_v3_alerting_condition_proto_rawDescGZIP(), []int{0, 0, 0, 1}\n}", "func (*TargetMetrics_Metric) Descriptor() ([]byte, []int) {\n\treturn file_asgt_type_target_metrics_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*TargetMetrics) Descriptor() ([]byte, []int) {\n\treturn file_asgt_type_target_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (*KafkaMeter) Descriptor() ([]byte, []int) {\n\treturn file_pkg_sinks_plugin_proto_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (*MetricSpec) Descriptor() ([]byte, []int) {\n\treturn file_api_adaptive_load_metric_spec_proto_rawDescGZIP(), []int{0}\n}", "func (*CircuitBreakers_Thresholds) Descriptor() ([]byte, []int) {\n\treturn file_envoy_api_v2_cluster_circuit_breaker_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*FeaturestoreMonitoringConfig_ThresholdConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_featurestore_monitoring_proto_rawDescGZIP(), []int{0, 2}\n}", "func (*NodeThreshold) Descriptor() ([]byte, []int) {\n\treturn file_models_model_node_threshold_proto_rawDescGZIP(), []int{0}\n}", "func (*RateLimitingSampler) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_external_envoy_config_trace_v3_opencensus_proto_rawDescGZIP(), []int{4}\n}", "func (*OnTargetAudienceMetrics) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{19}\n}", "func (*WriteKafkaMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_sinks_plugin_proto_metrics_proto_rawDescGZIP(), []int{2}\n}", "func (*CLRMetric) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{1}\n}", "func (TelemetryThreshold_ThresholdOpType) EnumDescriptor() ([]byte, []int) {\n\treturn file_huawei_telemetry_proto_rawDescGZIP(), []int{5, 0}\n}", "func (*Metric) Descriptor() ([]byte, []int) {\n\treturn file_go_chromium_org_luci_analysis_proto_v1_metrics_proto_rawDescGZIP(), []int{2}\n}", "func (*LivenessProbeCF) Descriptor() ([]byte, []int) {\n\treturn file_pkg_kascfg_kascfg_proto_rawDescGZIP(), []int{13}\n}", "func (*FeedbackMetrics) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{12}\n}", "func (*MonitoredResourceDescriptor) Descriptor() ([]byte, []int) {\n\treturn edgelq_monitoring_proto_v3_monitored_resource_descriptor_proto_rawDescGZIP(), []int{0}\n}", "func (*Metric) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{0}\n}", "func (*ClrThread) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{3}\n}", "func (*Metric) Descriptor() ([]byte, []int) {\n\treturn file_tlogpb_tlog_proto_rawDescGZIP(), []int{4}\n}", "func (*OperationTypeThreshold) Descriptor() ([]byte, []int) {\n\treturn file_chain_proto_rawDescGZIP(), []int{7}\n}", "func (*Metrics) Descriptor() ([]byte, []int) {\n\treturn file_grpc_proto_rawDescGZIP(), []int{0}\n}", "func (*MetricInfo) Descriptor() ([]byte, []int) {\n\treturn file_msgType_proto_rawDescGZIP(), []int{15}\n}", "func (*TrialProfilerMetricLabels) Descriptor() ([]byte, []int) {\n\treturn file_determined_trial_v1_trial_proto_rawDescGZIP(), []int{3}\n}", "func (*RateLimitingSampler) Descriptor() ([]byte, []int) {\n\treturn file_opencensus_proto_trace_v1_trace_config_proto_rawDescGZIP(), []int{3}\n}", "func (*MetricImplementation) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{6}\n}", "func (AlertingCondition_Spec_TimeSeries_Threshold_Compare) EnumDescriptor() ([]byte, []int) {\n\treturn edgelq_monitoring_proto_v3_alerting_condition_proto_rawDescGZIP(), []int{0, 0, 0, 1, 0}\n}", "func (*MetricsReport) Descriptor() ([]byte, []int) {\n\treturn file_determined_trial_v1_trial_proto_rawDescGZIP(), []int{9}\n}", "func (*ListMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_go_chromium_org_luci_analysis_proto_v1_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (*ConstantSampler) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_external_envoy_config_trace_v3_opencensus_proto_rawDescGZIP(), []int{3}\n}", "func (*Range) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{1}\n}", "func (*KafkaGauge) Descriptor() ([]byte, []int) {\n\treturn file_pkg_sinks_plugin_proto_metrics_proto_rawDescGZIP(), []int{1}\n}", "func (*FaultRateLimit) Descriptor() ([]byte, []int) {\n\treturn file_envoy_config_filter_fault_v2_fault_proto_rawDescGZIP(), []int{1}\n}", "func (TelemetrySelfDefinedEvent_ThresholdRelation) EnumDescriptor() ([]byte, []int) {\n\treturn file_huawei_telemetry_proto_rawDescGZIP(), []int{3, 0}\n}", "func (*DurationLT) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{5}\n}", "func (*MetricsRange) Descriptor() ([]byte, []int) {\n\treturn file_determined_project_v1_project_proto_rawDescGZIP(), []int{5}\n}", "func (*WatchLimitsRequest) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_limit_service_proto_rawDescGZIP(), []int{7}\n}", "func (*CLRMetricCollection) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{0}\n}", "func (*OperationSetThreshold) Descriptor() ([]byte, []int) {\n\treturn file_chain_proto_rawDescGZIP(), []int{16}\n}", "func (*ClrGC) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{2}\n}", "func (x *fastReflection_LightClientAttackEvidence) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_LightClientAttackEvidence\n}", "func (*WatchLimitRequest) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_limit_service_proto_rawDescGZIP(), []int{5}\n}", "func (*CMsgClientToGCPlayerStatsRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{143}\n}", "func (*ApiWarning) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_sql_v1_cloud_sql_resources_proto_rawDescGZIP(), []int{1}\n}", "func (Metric_Scale) EnumDescriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*MetricsServiceRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{18}\n}", "func (*MetricData) Descriptor() ([]byte, []int) {\n\treturn file_alameda_api_v1alpha1_datahub_common_metrics_proto_rawDescGZIP(), []int{1}\n}", "func (*AccountThreshold) Descriptor() ([]byte, []int) {\n\treturn file_chain_proto_rawDescGZIP(), []int{9}\n}", "func (*TimerFamilySpec) Descriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{17}\n}", "func (*GetModelVersionMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{94}\n}", "func (*PlanChange) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0}\n}", "func (SafetySetting_HarmBlockThreshold) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_ai_generativelanguage_v1beta2_safety_proto_rawDescGZIP(), []int{3, 0}\n}", "func (*BaseMetrics) Descriptor() ([]byte, []int) {\n\treturn file_mitre_cvss_v3_cvss_proto_rawDescGZIP(), []int{0}\n}", "func (*Sample) Descriptor() ([]byte, []int) {\n\treturn file_alameda_api_v1alpha1_datahub_common_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (*MetricConfiguration) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{5}\n}", "func (*ApiWarning) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_sql_v1beta4_cloud_sql_resources_proto_rawDescGZIP(), []int{1}\n}", "func (*MetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{11}\n}", "func (*DurationLTE) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{6}\n}", "func (*LoadBalancerStatsRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_test_proto_rawDescGZIP(), []int{9}\n}", "func (*TrialSourceInfoMetric) Descriptor() ([]byte, []int) {\n\treturn file_determined_trial_v1_trial_proto_rawDescGZIP(), []int{11}\n}", "func (*MetricLimit) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_quota_quota_proto_rawDescGZIP(), []int{1}\n}", "func (*Message7511) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{16}\n}", "func (ms Metric) MetricDescriptor() MetricDescriptor {\n\treturn newMetricDescriptor(&(*ms.orig).MetricDescriptor)\n}", "func (*CMatchTeamTimedStats) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{10}\n}", "func (*AllowedValues) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{3}\n}", "func (*LabelledPayload) Descriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{59}\n}", "func (ConstantSampler_ConstantDecision) EnumDescriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_external_envoy_config_trace_v3_opencensus_proto_rawDescGZIP(), []int{3, 0}\n}", "func (*TelemetryExpectation) Descriptor() ([]byte, []int) {\n\treturn file_testvector_tv_proto_rawDescGZIP(), []int{8}\n}", "func (*PlanChange_Removed) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0, 3}\n}", "func (*Performance) Descriptor() ([]byte, []int) {\n\treturn file_commissionService_proto_rawDescGZIP(), []int{2}\n}", "func (*MinMax) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{2}\n}", "func (*TelemetryRequest) Descriptor() ([]byte, []int) {\n\treturn file_automate_gateway_api_telemetry_telemetry_proto_rawDescGZIP(), []int{0}\n}", "func (*DurationExLTGT) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{10}\n}", "func (*MetricsWorkload) Descriptor() ([]byte, []int) {\n\treturn file_determined_trial_v1_trial_proto_rawDescGZIP(), []int{1}\n}", "func (*XqosTmPerformanceAlarm) Descriptor() ([]byte, []int) {\n\treturn file_huaweiV8R12_qos_notification_proto_rawDescGZIP(), []int{30}\n}", "func (*LabelDescriptor) Descriptor() ([]byte, []int) {\n\treturn edgelq_logging_proto_v1alpha2_common_proto_rawDescGZIP(), []int{0}\n}", "func (*Telemetry) Descriptor() ([]byte, []int) {\n\treturn file_huawei_telemetry_proto_rawDescGZIP(), []int{0}\n}", "func (*ConstantSampler) Descriptor() ([]byte, []int) {\n\treturn file_opencensus_proto_trace_v1_trace_config_proto_rawDescGZIP(), []int{2}\n}", "func (*PlanChange_Added) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*Thread) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_JVMMetric_proto_rawDescGZIP(), []int{5}\n}", "func (*DatasetRequest) Descriptor() ([]byte, []int) {\n\treturn file_internal_proto_metric_proto_rawDescGZIP(), []int{0}\n}", "func (*PlanChange_Modified) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0, 1}\n}", "func (*Message6024) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{26}\n}", "func (*DurationGT) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{7}\n}", "func (*DurationGTELTE) Descriptor() ([]byte, []int) {\n\treturn file_buf_validate_conformance_cases_wkt_duration_proto_rawDescGZIP(), []int{11}\n}", "func (*ThresholdReportingPluginConfig) Descriptor() ([]byte, []int) {\n\treturn file_core_services_ocr2_plugins_functions_config_config_types_proto_rawDescGZIP(), []int{0}\n}", "func (ResourceName) EnumDescriptor() ([]byte, []int) {\n\treturn file_alameda_api_v1alpha1_datahub_common_metrics_proto_rawDescGZIP(), []int{1}\n}", "func (*ClusterMonitorData) Descriptor() ([]byte, []int) {\n\treturn file_api_proto_rawDescGZIP(), []int{10}\n}", "func LegacyLoadMessageDesc(t reflect.Type) protoreflect.MessageDescriptor {\n\treturn legacyLoadMessageDesc(t, \"\")\n}", "func (*FaultRateLimit_HeaderLimit) Descriptor() ([]byte, []int) {\n\treturn file_envoy_config_filter_fault_v2_fault_proto_rawDescGZIP(), []int{1, 1}\n}", "func (*Empty) Descriptor() ([]byte, []int) {\n\treturn file_pkg_sinks_plugin_proto_metrics_proto_rawDescGZIP(), []int{3}\n}", "func (*Message12796) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{1}\n}", "func (*TelemetryParams) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{62}\n}", "func (*PerformanceWhere) Descriptor() ([]byte, []int) {\n\treturn file_commissionService_proto_rawDescGZIP(), []int{3}\n}", "func (*CircuitBreakers_Thresholds_RetryBudget) Descriptor() ([]byte, []int) {\n\treturn file_envoy_api_v2_cluster_circuit_breaker_proto_rawDescGZIP(), []int{0, 0, 0}\n}", "func ProtoFromDescriptor(d protoreflect.Descriptor) proto.Message {\n\tswitch d := d.(type) {\n\tcase protoreflect.FileDescriptor:\n\t\treturn ProtoFromFileDescriptor(d)\n\tcase protoreflect.MessageDescriptor:\n\t\treturn ProtoFromMessageDescriptor(d)\n\tcase protoreflect.FieldDescriptor:\n\t\treturn ProtoFromFieldDescriptor(d)\n\tcase protoreflect.OneofDescriptor:\n\t\treturn ProtoFromOneofDescriptor(d)\n\tcase protoreflect.EnumDescriptor:\n\t\treturn ProtoFromEnumDescriptor(d)\n\tcase protoreflect.EnumValueDescriptor:\n\t\treturn ProtoFromEnumValueDescriptor(d)\n\tcase protoreflect.ServiceDescriptor:\n\t\treturn ProtoFromServiceDescriptor(d)\n\tcase protoreflect.MethodDescriptor:\n\t\treturn ProtoFromMethodDescriptor(d)\n\tdefault:\n\t\t// WTF??\n\t\tif res, ok := d.(DescriptorProtoWrapper); ok {\n\t\t\treturn res.AsProto()\n\t\t}\n\t\treturn nil\n\t}\n}", "func (*ListMetricsResponse) Descriptor() ([]byte, []int) {\n\treturn file_go_chromium_org_luci_analysis_proto_v1_metrics_proto_rawDescGZIP(), []int{1}\n}", "func (ConstantSampler_ConstantDecision) EnumDescriptor() ([]byte, []int) {\n\treturn file_opencensus_proto_trace_v1_trace_config_proto_rawDescGZIP(), []int{2, 0}\n}" ]
[ "0.6960433", "0.65074533", "0.60811925", "0.6029226", "0.60165745", "0.6012746", "0.59527254", "0.5918514", "0.58969796", "0.5872111", "0.5858821", "0.5857826", "0.58539706", "0.58046436", "0.5786188", "0.5784575", "0.5756746", "0.572665", "0.56305903", "0.56272644", "0.5596084", "0.5594624", "0.5593038", "0.5589756", "0.55790037", "0.5573242", "0.5541749", "0.5540635", "0.55343306", "0.5524222", "0.5520098", "0.5517843", "0.55131066", "0.5477834", "0.5469404", "0.5464909", "0.5462445", "0.54600143", "0.54580456", "0.54419637", "0.5441328", "0.54411584", "0.5439449", "0.5432137", "0.542997", "0.54265684", "0.54093665", "0.5406215", "0.53925073", "0.53781223", "0.5373668", "0.5371227", "0.5369532", "0.5356953", "0.53562534", "0.5350958", "0.53457993", "0.5344443", "0.5341049", "0.5338952", "0.5320434", "0.53158295", "0.5310572", "0.53096735", "0.53084904", "0.53084666", "0.5302823", "0.5302126", "0.53013", "0.52908474", "0.52901345", "0.52897924", "0.5283903", "0.5279966", "0.5266287", "0.5249581", "0.5249454", "0.52469647", "0.5242142", "0.52403945", "0.5239539", "0.52353716", "0.52333915", "0.5229634", "0.52252746", "0.52202976", "0.52186376", "0.5214565", "0.5211602", "0.52113813", "0.52077305", "0.52017254", "0.5201628", "0.5200531", "0.5198861", "0.51978934", "0.5193695", "0.5192123", "0.51910126", "0.5185399" ]
0.7160946
0
You want to add the type of the input and also the type of the return
func greeting(name string) string { return "Hello" + name }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func add[Type IntOrString](items []Type) (result Type) {\n\tfor _, item := range items {\n\t\tresult = result + item\n\t}\n\treturn\n}", "func (fn *CXFunction) AddInput(prgrm *CXProgram, param *CXArgument) *CXFunction {\n\tfnInputs := fn.GetInputs(prgrm)\n\tfor _, inputIdx := range fnInputs {\n\t\tinput := prgrm.GetCXTypeSignatureFromArray(inputIdx)\n\t\tif input.Name == param.Name {\n\t\t\treturn fn\n\t\t}\n\t}\n\n\tparam.Package = fn.Package\n\tnewField := GetCXTypeSignatureRepresentationOfCXArg(prgrm, param)\n\n\tif fn.Inputs == nil {\n\t\tfn.Inputs = &CXStruct{}\n\t}\n\n\tnewFieldIdx := prgrm.AddCXTypeSignatureInArray(newField)\n\tfn.Inputs.AddField_TypeSignature(prgrm, newFieldIdx)\n\n\treturn fn\n}", "func DataType(){\n\tBool()\n\tFloat()\n\tComplex()\n\tStdInput()\n}", "func add(b, a interface{}) (interface{}, error) {\n\tav := reflect.ValueOf(a)\n\tbv := reflect.ValueOf(b)\n\n\tswitch av.Kind() {\n\tcase reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:\n\t\tswitch bv.Kind() {\n\t\tcase reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:\n\t\t\treturn av.Int() + bv.Int(), nil\n\t\tcase reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:\n\t\t\treturn av.Int() + int64(bv.Uint()), nil\n\t\tcase reflect.Float32, reflect.Float64:\n\t\t\treturn float64(av.Int()) + bv.Float(), nil\n\t\tdefault:\n\t\t\treturn nil, fmt.Errorf(\"add: unknown type for %q (%T)\", bv, b)\n\t\t}\n\tcase reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:\n\t\tswitch bv.Kind() {\n\t\tcase reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:\n\t\t\treturn int64(av.Uint()) + bv.Int(), nil\n\t\tcase reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:\n\t\t\treturn av.Uint() + bv.Uint(), nil\n\t\tcase reflect.Float32, reflect.Float64:\n\t\t\treturn float64(av.Uint()) + bv.Float(), nil\n\t\tdefault:\n\t\t\treturn nil, fmt.Errorf(\"add: unknown type for %q (%T)\", bv, b)\n\t\t}\n\tcase reflect.Float32, reflect.Float64:\n\t\tswitch bv.Kind() {\n\t\tcase reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:\n\t\t\treturn av.Float() + float64(bv.Int()), nil\n\t\tcase reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:\n\t\t\treturn av.Float() + float64(bv.Uint()), nil\n\t\tcase reflect.Float32, reflect.Float64:\n\t\t\treturn av.Float() + bv.Float(), nil\n\t\tdefault:\n\t\t\treturn nil, fmt.Errorf(\"add: unknown type for %q (%T)\", bv, b)\n\t\t}\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"add: unknown type for %q (%T)\", av, a)\n\t}\n}", "func checkandConvertType(args interface{})(string){\n switch v := args.(type) {\n case float64:\n //convert it to a string\n return strconv.FormatFloat(args.(float64), 'f', -1, 64)\n case string:\n return strings.Trim(args.(string), \" \")\n default:\n fmt.Printf(\"Other:%v\\n\", v)\n return \"Type mismatched\"\n }\n}", "func (this *DateAddStr) Type() value.Type { return value.STRING }", "func mainfu(myInt interface{} ) {\n \n// String\ns, ok := myInt.(string)\nif ok {\n fmt.Println(\"Yes string:\", s)\n}\n\n// Init\nk, ok := myInt.(int)\nif ok {\n fmt.Println(\"Yes this INT:\", k)\n}\n\n// Float 64\nv, ok := myInt.(float64)\nif ok {\nfmt.Println(\"Yes float64\", v)\n} else {\nfmt.Println(\"Failed without panicking!\")\n}\n\n}", "func FuncCustomType(_ C1) {}", "func wrap1(a any,f func(float64)float64)any{\n var t string = fmt.Sprintf(\"%T\", a)\n switch t {\n case \"int\": return f(float64(a.(int)))\n case \"int8\": return f(float64(a.(int8)))\n case \"int16\": return f(float64(a.(int16)))\n case \"int32\": return f(float64(a.(int32)))\n case \"int64\": return f(float64(a.(int64)))\n case \"uint\": return f(float64(a.(uint)))\n case \"uint8\": return f(float64(a.(uint8)))\n case \"uint16\": return f(float64(a.(uint16)))\n case \"uint32\": return f(float64(a.(uint32)))\n case \"uint64\": return f(float64(a.(uint64)))\n case \"float32\": return f(float64(a.(float32)))\n case \"float64\": return f(a.(float64))\n default: fmt.Println(\"Invalid type\")\n }\n return nil\n}", "func (rv *ReturnValue) Type() ObjectType { return RETURN_VALUE_OBJ }", "func (this *DateAddMillis) Type() value.Type { return value.NUMBER }", "func Add( a *context.Value, b *context.Value ) (*context.Value,error) {\n if a != nil && b != nil {\n switch a.OperationType( b ) {\n case context.VAR_BOOL:\n return context.IntValue( a.Int() + b.Int() ), nil\n case context.VAR_INT:\n return context.IntValue( a.Int() + b.Int() ), nil\n case context.VAR_FLOAT:\n return context.FloatValue( a.Float() + b.Float() ), nil\n case context.VAR_STRING:\n return context.StringValue( a.String() + b.String() ), nil\n case context.VAR_COMPLEX:\n return context.ComplexValue( a.Complex() + b.Complex() ), nil\n }\n }\n\n return nil, errors.New( \"Unsupported type for add\" )\n}", "func native(name string) []Type {}", "func (Output) typ() string { return \"output1\" }", "func wrap2(a,b any,f func(float64,float64)float64)any{\n var t string = fmt.Sprintf(\"%T\", a)\n var t2 string = fmt.Sprintf(\"%T\", b)\n var b2 float64\n switch t2 {\n case \"int\": b2 = float64(b.(int))\n case \"int8\": b2 = float64(b.(int8))\n case \"int16\": b2 = float64(b.(int16))\n case \"int32\": b2 = float64(b.(int32))\n case \"int64\": b2 = float64(b.(int64))\n case \"uint\": b2 = float64(b.(uint))\n case \"uint8\": b2 = float64(b.(uint8))\n case \"uint16\": b2 = float64(b.(uint16))\n case \"uint32\": b2 = float64(b.(uint32))\n case \"uint64\": b2 = float64(b.(uint64))\n case \"float32\": b2 = float64(b.(float32))\n case \"float64\": b2 = b.(float64)\n default: fmt.Println(\"Invalid type\")\n }\n switch t {\n case \"int\": return f(float64(a.(int)),b2)\n case \"int8\": return f(float64(a.(int8)),b2)\n case \"int16\": return f(float64(a.(int16)),b2)\n case \"int32\": return f(float64(a.(int32)),b2)\n case \"int64\": return f(float64(a.(int64)),b2)\n case \"uint\": return f(float64(a.(uint)),b2)\n case \"uint8\": return f(float64(a.(uint8)),b2)\n case \"uint16\": return f(float64(a.(uint16)),b2)\n case \"uint32\": return f(float64(a.(uint32)),b2)\n case \"uint64\": return f(float64(a.(uint64)),b2)\n case \"float32\": return f(float64(a.(float32)),b2)\n case \"float64\": return f(a.(float64),b2)\n default: fmt.Println(\"Invalid type\")\n }\n return nil\n}", "func makeTypedInput(numRows int, numCols int, t T) [][]TypedDatum {\n\tresult := make([][]TypedDatum, numRows)\n\tfor i := range result {\n\t\tresult[i] = make([]TypedDatum, numCols)\n\t}\n\tswitch t {\n\tcase Int64Type:\n\t\tfor i := 0; i < numRows; i++ {\n\t\t\tfor j := 0; j < numCols; j++ {\n\t\t\t\tresult[i][j] = TypedDatum{t: t, int64: int64(i)}\n\t\t\t}\n\t\t}\n\tcase Float64Type:\n\t\tfor i := 0; i < numRows; i++ {\n\t\t\tfor j := 0; j < numCols; j++ {\n\t\t\t\tresult[i][j] = TypedDatum{t: t, float64: float64(i)}\n\t\t\t}\n\t\t}\n\tdefault:\n\t\tpanic(\"unhandled type\")\n\t}\n\treturn result\n}", "func plusPlus(a, b, c int) int {\n\treturn a + b + c //return type is mandatory\n}", "func (fn *Function) TypeArgs() []types.Type { return fn.typeargs }", "func generalizeType(t1, t2 string) (string, bool) {\n\tif t1 == t2 {\n\t\treturn t1, true\n\t}\n\n\tif t1 == \"\" {\n\t\treturn t2, true\n\t}\n\n\tif t2 == \"\" {\n\t\treturn t1, true\n\t}\n\n\tswitch t1 {\n\tcase indexer.IntegerType:\n\t\tif t2 == indexer.FloatType {\n\t\t\treturn indexer.FloatType, true\n\t\t}\n\n\tcase indexer.FloatType:\n\t\tif t2 == indexer.IntegerType {\n\t\t\treturn indexer.FloatType, true\n\t\t}\n\n\tcase indexer.TextType:\n\t\tswitch t2 {\n\t\tcase indexer.StringType, indexer.DateType:\n\t\t\treturn indexer.TextType, true\n\t\t}\n\n\tcase indexer.StringType:\n\t\tswitch t2 {\n\t\tcase indexer.TextType, indexer.DateType:\n\t\t\treturn indexer.TextType, true\n\t\t}\n\n\tcase indexer.DateType:\n\t\tswitch t2 {\n\t\tcase indexer.StringType:\n\t\t\treturn indexer.StringType, true\n\t\tcase indexer.TextType:\n\t\t\treturn indexer.TextType, true\n\t\t}\n\t}\n\n\treturn \"\", false\n}", "func derive(datum, typ interface{}) (interface{}, error) {\n\tswitch t := typ.(type) {\n\t// Simple case. typ contains the primitive type name to set.\n\tcase string:\n\t\tif t == \"null\" && datum != nil {\n\t\t\treturn nil, errors.New(\"value cannot match null type\")\n\t\t}\n\t\treturn datum, nil\n\n\t// Assume record type for now\n\tcase map[string]interface{}:\n\t\tvar data map[string]interface{}\n\t\tvar fields []interface{}\n\t\tvar ok bool\n\t\tvar typ string\n\t\tvar out = map[string]interface{}{}\n\n\t\t// Short circuit to try and save non record types\n\t\tif typ, ok = t[\"type\"].(string); ok && typ != \"record\" {\n\t\t\treturn derive(datum, typ)\n\t\t}\n\n\t\tif data, ok = datum.(map[string]interface{}); !ok {\n\t\t\treturn nil, errors.New(\"invalid type provided for record field\")\n\t\t}\n\n\t\tif fields, ok = t[\"fields\"].([]interface{}); !ok {\n\t\t\treturn nil, errors.New(\"invalid format for fields\")\n\t\t}\n\n\t\t// Handle each field, constructing map\n\t\tfor _, f := range fields {\n\t\t\tvar field map[string]interface{}\n\t\t\tvar name string\n\t\t\tvar ok bool\n\n\t\t\tif field, ok = f.(map[string]interface{}); !ok {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif name, ok = field[\"name\"].(string); !ok {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// Recursively capture value\n\t\t\tif val, err := derive(data[name], field[\"type\"]); err == nil {\n\t\t\t\tout[name] = val\n\t\t\t}\n\t\t}\n\t\treturn out, nil\n\n\t// Union type\n\tcase []interface{}:\n\t\t// Nothing special if omitted / nil\n\t\tif datum == nil {\n\t\t\treturn nil, nil\n\t\t}\n\n\t\tout := map[string]interface{}{}\n\n\t\t// Check each potential type\n\t\tfor _, potential := range t {\n\t\t\t// Check if provided value is determined to be valid for potential type\n\t\t\tif val, err := derive(datum, potential); err == nil {\n\t\t\t\t// Value is valid, get correct name format and return\n\t\t\t\tif name, err := getTypeName(potential); err == nil {\n\t\t\t\t\tout[name] = val\n\t\t\t\t\treturn out, nil\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn nil, nil\n}", "func Whatami(i interface{}) string{\n switch i.(type) {\n case string:\n return \"string\"\n case int32,int64,int:\n return \"integer\"\n case bool:\n return \"Boolean\"\n default:\n return \"UnknownType\"\n }\n\n}", "func (this *Self) Type() value.Type { return value.JSON }", "func (p RProc) Type() Type { return p.Value().Type() }", "func expandTypes(api, need map[name]interface{}) map[name]interface{} {\n\tret := map[name]interface{}{}\n\n\tfor n, i := range need {\n\t\tret[n] = i\n\t\tswitch v := i.(type) {\n\t\tcase command:\n\t\t\taddName(api, ret, v.Name)\n\t\t\tif v.BoxedInput {\n\t\t\t\taddName(api, ret, v.Inputs.Ref)\n\t\t\t} else {\n\t\t\t\tfor _, f := range v.Inputs.Fields(api) {\n\t\t\t\t\taddName(api, ret, f.Type)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif v.Output.Type != \"\" {\n\t\t\t\taddName(api, ret, v.Output.Type)\n\t\t\t}\n\t\tcase event:\n\t\t\taddName(api, ret, v.Name)\n\t\t\tif v.BoxedData {\n\t\t\t\taddName(api, ret, v.Data.Ref)\n\t\t\t} else {\n\t\t\t\tfor _, f := range v.Data.Fields(api) {\n\t\t\t\t\taddName(api, ret, f.Type)\n\t\t\t\t}\n\t\t\t}\n\t\tcase structType:\n\t\t\taddName(api, ret, v.Name)\n\t\t\tfor _, f := range v.Fields {\n\t\t\t\taddName(api, ret, f.Type)\n\t\t\t}\n\t\t\tif v.Base != \"\" {\n\t\t\t\tfor _, f := range api[v.Base].(structType).Fields {\n\t\t\t\t\taddName(api, ret, f.Type)\n\t\t\t\t}\n\t\t\t}\n\t\tcase simpleUnion:\n\t\t\taddName(api, ret, v.Name)\n\t\t\tfor _, typ := range v.Options {\n\t\t\t\taddName(api, ret, typ)\n\t\t\t}\n\t\tcase flatUnion:\n\t\t\taddName(api, ret, v.Name)\n\t\t\tfor _, f := range v.Base.Fields(api) {\n\t\t\t\taddName(api, ret, f.Type)\n\t\t\t}\n\t\t\tfor _, typ := range v.Options {\n\t\t\t\tfor _, f := range api[typ].(structType).Fields {\n\t\t\t\t\taddName(api, ret, f.Type)\n\t\t\t\t}\n\t\t\t}\n\t\tcase alternate:\n\t\t\taddName(api, ret, v.Name)\n\t\t\tfor _, typ := range v.Options {\n\t\t\t\taddName(api, ret, typ)\n\t\t\t}\n\t\tcase enum:\n\t\t\taddName(api, ret, v.Name)\n\t\t}\n\t}\n\treturn ret\n}", "func (f Function) ReturnType(argTypes []cty.Type) (cty.Type, error) {\n\tvals := make([]cty.Value, len(argTypes))\n\tfor i, ty := range argTypes {\n\t\tvals[i] = cty.UnknownVal(ty)\n\t}\n\treturn f.ReturnTypeForValues(vals)\n}", "func ( /* receiver */ ) bmi() /* return type */ {\n}", "func switchWithDataTypes()(){\n getInterfaceDatatype := func(i interface{}) {\n switch t := i.(type) {\n case bool:\n fmt.Println(\"I'm a bool\")\n case int:\n fmt.Println(\"I'm an int\")\n default:\n fmt.Printf(\"Don't know type %T\\n\", t)\n }\n }\n getInterfaceDatatype(true)\n getInterfaceDatatype(1)\n getInterfaceDatatype(\"hey\")\n getInterfaceDatatype(2.0)\n}", "func add(this js.Value, i []js.Value) interface{} {\n\tin1, in2 := getInputValues(i)\n\tsetValueById(i[2].String(), in1+in2)\n\treturn nil\n}", "func generica(inter interface{}) {\n\tfmt.Println(inter)\n}", "func type2CR1S1(query1, query2, split1, split2 string, inputType int) {\n\tdb := dbConnec()\n\trows, err := db.Query(query1)\n\tdefer rows.Close()\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tvar sumCR1, sumS1, f, s float64\n\tfor rows.Next() {\n\t\tf++\n\t\tvar x string\n\t\terr := rows.Scan(&x)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tif inputType == 2 {\n\t\t\tlog.Println(split1, \":\", x)\n\t\t}\n\t\tif inputType == 3 {\n\t\t\tif sumC1, err := strconv.ParseFloat(x, 32); err == nil {\n\t\t\t\tsumCR1 += sumC1\n\t\t\t}\n\t\t}\n\t}\n\tif inputType == 3 {\n\t\tlog.Println(\"Average\", split1, \": \", sumCR1/f)\n\t}\n\n\trows1, err := db.Query(query2)\n\tdefer rows1.Close()\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tfor rows1.Next() {\n\t\ts++\n\t\tvar y string\n\t\terr := rows1.Scan(&y)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tif inputType == 2 {\n\t\t\tlog.Println(split2, \":\", y)\n\t\t}\n\t\tif inputType == 3 {\n\t\t\tif sumSens1, err := strconv.ParseFloat(y, 32); err == nil {\n\t\t\t\tsumS1 += sumSens1\n\t\t\t}\n\t\t}\n\t}\n\tif inputType == 3 {\n\t\tlog.Println(\"Average\", split2, \": \", sumS1/s)\n\t}\n}", "func createTypeCast(prog *Program, in value.Value, to types.Type) (value.Value, error) {\n\n\tinType := in.Type()\n\tfromInt := types.IsInt(inType)\n\tfromFloat := types.IsFloat(inType)\n\n\ttoInt := types.IsInt(to)\n\ttoFloat := types.IsFloat(to)\n\n\tinSize := typeSize(inType)\n\toutSize := typeSize(to)\n\n\t// If the cast would not change the type, just return the in value, nil\n\tif types.Equal(inType, to) {\n\t\treturn in, nil\n\t}\n\n\tif c, ok := in.(*constant.Int); ok && types.IsInt(to) {\n\t\tc.Typ = to.(*types.IntType)\n\t\treturn c, nil\n\t}\n\n\tif c, ok := in.(*constant.Float); ok && types.IsFloat(to) {\n\t\tc.Typ = to.(*types.FloatType)\n\t\treturn c, nil\n\t}\n\n\tif types.Equal(to, types.Void) {\n\t\treturn nil, nil\n\t}\n\n\tif types.IsPointer(inType) && types.IsPointer(to) {\n\t\treturn prog.Compiler.CurrentBlock().NewBitCast(in, to), nil\n\t}\n\n\tif fromFloat && toInt {\n\t\treturn prog.Compiler.CurrentBlock().NewFPToSI(in, to), nil\n\t}\n\n\tif fromInt && toFloat {\n\t\treturn prog.Compiler.CurrentBlock().NewSIToFP(in, to), nil\n\t}\n\n\tif fromInt && toInt {\n\t\tif inSize < outSize {\n\t\t\treturn prog.Compiler.CurrentBlock().NewSExt(in, to), nil\n\t\t}\n\t\tif inSize == outSize {\n\t\t\treturn in, nil\n\t\t}\n\t\treturn prog.Compiler.CurrentBlock().NewTrunc(in, to), nil\n\t}\n\n\tif fromFloat && toFloat {\n\t\tif inSize < outSize {\n\t\t\treturn prog.Compiler.CurrentBlock().NewFPExt(in, to), nil\n\t\t}\n\t\tif inSize == outSize {\n\t\t\treturn in, nil\n\t\t}\n\t\treturn prog.Compiler.CurrentBlock().NewFPTrunc(in, to), nil\n\t}\n\n\t// If the cast would not change the type, just return the in value, nil\n\tif types.Equal(inType, to) {\n\t\treturn in, nil\n\t}\n\n\tif types.IsPointer(inType) && types.IsInt(to) {\n\t\treturn prog.Compiler.CurrentBlock().NewPtrToInt(in, to), nil\n\t}\n\n\tif types.IsInt(inType) && types.IsPointer(to) {\n\t\treturn prog.Compiler.CurrentBlock().NewIntToPtr(in, to), nil\n\t}\n\n\treturn nil, fmt.Errorf(\"Failed to typecast type %s to %s\", inType.String(), to)\n}", "func (this *ObjectAdd) Type() value.Type { return value.OBJECT }", "func (arg1 *UConverter) GetType() UConverterType", "func MultitypeSum(input []interface{}) float64 {\n\treturn 0.0 // TODO implement\n}", "func GetType(args ...interface{}) {\n\tfor _, v := range args {\n\t\tswitch v.(type) {\n\t\tcase int32:\n\t\t\tfmt.Println(\"This is a int32\")\n\t\tcase int64:\n\t\t\tfmt.Println(\"This is a int64\")\n\t\tcase float32:\n\t\t\tfmt.Println(\"This is a float32\")\n\t\tcase float64:\n\t\t\tfmt.Println(\"This is a float64\")\n\t\tcase string:\n\t\t\tfmt.Println(\"This is a string\")\n\t\tdefault:\n\t\t\tfmt.Println(\"unknown type\")\n\t\t}\n\t}\n}", "func (q QueryFunc) ReturnType() string {\n\tnm := q.Output.Name\n\tif q.IsList {\n\t\treturn \"[]\" + nm\n\t}\n\treturn \"*\" + nm\n}", "func (fn NoArgFunc) Type() Type { return fn.SQLType }", "func replyDataType(reply interface{}, err error) (dt DataType, outputErr error) {\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tswitch reply := reply.(type) {\n\tcase string:\n\t\tswitch reply {\n\t\tcase \"FLOAT\":\n\t\t\tdt = TypeFloat\n\t\tcase \"DOUBLE\":\n\t\t\tdt = TypeDouble\n\t\tcase \"INT8\":\n\t\t\tdt = TypeInt8\n\t\tcase \"INT16\":\n\t\t\tdt = TypeInt16\n\t\tcase \"INT32\":\n\t\t\tdt = TypeInt32\n\t\tcase \"INT64\":\n\t\t\tdt = TypeInt64\n\t\tcase \"UINT8\":\n\t\t\tdt = TypeUint8\n\t\tcase \"UINT16\":\n\t\t\tdt = TypeUint16\n\t\t}\n\t\treturn dt, nil\n\tcase nil:\n\t\treturn \"\", ErrNil\n\n\t}\n\treturn \"\", fmt.Errorf(\"redisai-go: unexpected type for replyDataType, got type %T\", reply)\n}", "func Test_Addtyping(t *testing.T) {\n\tws := New()\n\tws.Move(2)\n\n\ttesthelpers.AssertBool(t, true, ws.Beforeslice(0))\n\ttesthelpers.AssertBool(t, true, ws.Beforeslice(1))\n\n\tws.Addtyping([]byte{'a'}, 2)\n\ttesthelpers.AssertString(t, \"a\", ws.String())\n\n\ttesthelpers.AssertBool(t, true, ws.Beforeslice(1))\n\n\ttesthelpers.AssertBool(t, true, ws.Inslice(2))\n\ttesthelpers.AssertBool(t, true, ws.Inslice(3))\n\ttesthelpers.AssertBool(t, false, ws.Inslice(4))\n\n\ttesthelpers.AssertInt(t, 2, ws.Offset)\n\ttesthelpers.AssertBool(t, true, ws.Afterslice(2, 0))\n\ttesthelpers.AssertBool(t, false, ws.Afterslice(1, 1))\n\n\tws.Addtyping([]byte{'b'}, 3)\n\ttesthelpers.AssertString(t, \"ab\", ws.String())\n\ttesthelpers.AssertBool(t, true, ws.Inslice(4))\n\ttesthelpers.AssertBool(t, false, ws.Inslice(5))\n\n\tp, q := ws.Extent()\n\ttesthelpers.AssertInt(t, 2, p)\n\ttesthelpers.AssertInt(t, 4, q)\n\n\tws.Addtyping([]byte{'c'}, 4)\n\ttesthelpers.AssertString(t, \"abc\", ws.String())\n\ttesthelpers.AssertInt(t, len(\"abc\"), ws.Ntyper())\n\n\ttesthelpers.AssertInt(t, 2, ws.Offset)\n\tws.Addtyping([]byte{'X'}, 2)\n\ttesthelpers.AssertString(t, \"Xabc\", ws.String())\n\ttesthelpers.AssertInt(t, len(\"Xabc\"), ws.Ntyper())\n\n\tws.Addtyping([]byte{'Y'}, 3)\n\ttesthelpers.AssertString(t, \"XYabc\", ws.String())\n\n}", "func main() {\n\t/* Main types \n\t string\n\t bool\n\t int\n\t int\t int8\t int16\t int32\t int64\n\t uint\t uint8\t uint16\t uint32 uint64 uintptr\n\t byte - alias for uint8\n\t rune - alias for int32\n\t float32 float64\n\t complex64 complex128\n\t*/\n\t\n\n\t// Using var\n\t// var name = \"Mike\"\n\tvar age int32 = 20\n\tconst isCool = true\n\n\t// Shorthand\n\t// name := \"Mike\"\n\t// email := \"[email protected]\"\n\tsize := 1.3\n\n\tname, email := \"Mike\", \"[email protected]\"\n\n\tfmt.Println(name, age, isCool, email)\n\n\t// get type\n\tfmt.Printf(\"%T\\n\", name)\n\tfmt.Printf(\"%T\\n\", age)\n\tfmt.Printf(\"%T\\n\", isCool)\n\tfmt.Printf(\"%T\\n\", size)\n\t\n}", "func (this *ObjectLength) Type() value.Type { return value.NUMBER }", "func (this *ObjectLength) Type() value.Type { return value.NUMBER }", "func oneType(records models.Records) interface{} {\n\t//fmt.Printf(\"yamlwrite:oneType len=%d type=%s\\n\", len(records), records[0].Type)\n\trtype := records[0].Type\n\tswitch rtype {\n\tcase \"A\", \"AAAA\", \"NS\":\n\t\tvv := complexVals{\n\t\t\tType: rtype,\n\t\t\tTTL: records[0].TTL,\n\t\t}\n\t\tif len(records) == 1 {\n\t\t\tvv.Value = records[0].GetTargetField()\n\t\t} else {\n\t\t\tfor _, rc := range records {\n\t\t\t\tvv.Values = append(vv.Values, rc.GetTargetCombined())\n\t\t\t}\n\t\t}\n\t\treturn vv\n\tcase \"MX\":\n\t\tvv := complexFields{\n\t\t\tType: rtype,\n\t\t\tTTL: records[0].TTL,\n\t\t}\n\t\tfor _, rc := range records {\n\t\t\tvv.Fields = append(vv.Fields, fields{\n\t\t\t\tValue: rc.GetTargetField(),\n\t\t\t\tPriority: rc.MxPreference,\n\t\t\t})\n\t\t}\n\t\treturn vv\n\tcase \"SRV\":\n\t\tvv := complexFields{\n\t\t\tType: rtype,\n\t\t\tTTL: records[0].TTL,\n\t\t}\n\t\tfor _, rc := range records {\n\t\t\tvv.Fields = append(vv.Fields, fields{\n\t\t\t\tValue: rc.GetTargetField(),\n\t\t\t\tPriority: rc.SrvPriority,\n\t\t\t\tSrvWeight: rc.SrvWeight,\n\t\t\t\tSrvPort: rc.SrvPort,\n\t\t\t})\n\t\t}\n\t\treturn vv\n\tcase \"TXT\":\n\t\tvv := complexVals{\n\t\t\tType: rtype,\n\t\t\tTTL: records[0].TTL,\n\t\t}\n\t\tif len(records) == 1 {\n\t\t\tvv.Value = strings.Replace(models.StripQuotes(records[0].GetTargetField()), `;`, `\\;`, -1)\n\t\t} else {\n\t\t\tfor _, rc := range records {\n\t\t\t\tvv.Values = append(vv.Values, models.StripQuotes(rc.GetTargetCombined()))\n\t\t\t}\n\t\t}\n\t\treturn vv\n\n\tdefault:\n\t\tpanic(fmt.Errorf(\"yamlwrite:oneType rtype=%s not implemented\", rtype))\n\t}\n}", "func primitiveConversion(inType, outType reflect.Type) (string, bool) {\n\tswitch inType.Kind() {\n\tcase reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,\n\t\treflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,\n\t\treflect.Float32, reflect.Float64:\n\t\tswitch outType.Kind() {\n\t\tcase reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,\n\t\t\treflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,\n\t\t\treflect.Float32, reflect.Float64:\n\t\t\treturn outType.Name(), true\n\t\t}\n\t}\n\treturn \"\", false\n}", "func numberType(f func(...Expression) Expression) func(...Expression) (Expression, error) {\n\n\treturn func(args ...Expression) (Expression, error) {\n\t\tfor _, arg := range args {\n\t\t\tif _, ok := arg.(Number); !ok {\n\t\t\t\treturn nil, errors.New(fmt.Sprintf(\"Type Error: Recieved %T, Expected Number\", arg))\n\t\t\t}\n\t\t}\n\t\treturn f(args...), nil\n\t}\n}", "func matchFuncType(src, test reflect.Type) error {\n\tvar errorStr string\n\n\tsourceKind := src.Kind()\n\ttestKind := test.Kind()\n\tif sourceKind != reflect.Slice {\n\t\terrorStr = fmt.Sprintf(\"Expected type (slice) but received src of type (%v)\\n\", sourceKind)\n\t} else if testKind != reflect.Func {\n\t\terrorStr = fmt.Sprintf(\"Expected type (func) but received testFunc of type (%v)\\n\", testKind)\n\t}\n\n\ttestOutput := test.Out(0).Kind()\n\ttestInput := test.In(0)\n\telemType := src.Elem()\n\tif test.NumIn() != 1 {\n\t\terrorStr = \"Expected testFunc to have 1 input\\n\"\n\t} else if testInput != elemType {\n\t\terrorStr = fmt.Sprintf(\"Received slice of type (%v), but testFunc expects type (%v)\", elemType, testInput)\n\t} else if test.NumOut() != 1 || testOutput != reflect.Bool {\n\t\terrorStr = \"Expected func to have 1 return of type (bool)\\n\"\n\t}\n\n\tif errorStr != \"\" {\n\t\treturn errors.New(errorStr)\n\t}\n\n\treturn nil\n}", "func matchFuncType(src, test reflect.Type) error {\n\tvar errorStr string\n\n\tsourceKind := src.Kind()\n\ttestKind := test.Kind()\n\tif sourceKind != reflect.Slice {\n\t\terrorStr = fmt.Sprintf(\"Expected type (slice) but received src of type (%v)\\n\", sourceKind)\n\t} else if testKind != reflect.Func {\n\t\terrorStr = fmt.Sprintf(\"Expected type (func) but received testFunc of type (%v)\\n\", testKind)\n\t}\n\n\ttestOutput := test.Out(0).Kind()\n\ttestInput := test.In(0)\n\telemType := src.Elem()\n\tif test.NumIn() != 1 {\n\t\terrorStr = \"Expected testFunc to have 1 input\\n\"\n\t} else if testInput != elemType {\n\t\terrorStr = fmt.Sprintf(\"Received slice of type (%v), but testFunc expects type (%v)\", elemType, testInput)\n\t} else if test.NumOut() != 1 || testOutput != reflect.Bool {\n\t\terrorStr = \"Expected func to have 1 return of type (bool)\\n\"\n\t}\n\n\tif errorStr != \"\" {\n\t\treturn errors.New(errorStr)\n\t}\n\n\treturn nil\n}", "func FuncAddRet() error { return nil }", "func (this *DateDiffStr) Type() value.Type { return value.NUMBER }", "func (decode *decoder) ensureOutType(outType reflect.Type) error {\n\tswitch outType.Kind() {\n\tcase reflect.Slice:\n\t\tfallthrough\n\tcase reflect.Array:\n\t\treturn nil\n\t}\n\treturn fmt.Errorf(\"cannot use \" + outType.String() + \", only slice or array supported\")\n}", "func (this *Mod) Type() value.Type { return value.NUMBER }", "func (this *DatePartStr) Type() value.Type { return value.NUMBER }", "func (m *Module) typeDef(old *ast.NamedType) {\n\ttyp := m.getType(old.Name)\n\tdef := m.irType(old.Def)\n\tswitch typ := typ.(type) {\n\tcase *types.VoidType:\n\t\t_, ok := def.(*types.VoidType)\n\t\tif !ok {\n\t\t\tpanic(fmt.Errorf(\"invalid type; expected *types.VoidType, got %T\", def))\n\t\t}\n\t\t// nothing to do.\n\tcase *types.FuncType:\n\t\td, ok := def.(*types.FuncType)\n\t\tif !ok {\n\t\t\tpanic(fmt.Errorf(\"invalid type; expected *types.FuncType, got %T\", def))\n\t\t}\n\t\ttyp.Ret = d.Ret\n\t\ttyp.Params = d.Params\n\t\ttyp.Variadic = d.Variadic\n\tcase *types.IntType:\n\t\td, ok := def.(*types.IntType)\n\t\tif !ok {\n\t\t\tpanic(fmt.Errorf(\"invalid type; expected *types.IntType, got %T\", def))\n\t\t}\n\t\ttyp.Size = d.Size\n\tcase *types.FloatType:\n\t\td, ok := def.(*types.FloatType)\n\t\tif !ok {\n\t\t\tpanic(fmt.Errorf(\"invalid type; expected *types.FloatType, got %T\", def))\n\t\t}\n\t\ttyp.Kind = d.Kind\n\tcase *types.PointerType:\n\t\td, ok := def.(*types.PointerType)\n\t\tif !ok {\n\t\t\tpanic(fmt.Errorf(\"invalid type; expected *types.PointerType, got %T\", def))\n\t\t}\n\t\ttyp.Elem = d.Elem\n\t\ttyp.AddrSpace = d.AddrSpace\n\tcase *types.VectorType:\n\t\td, ok := def.(*types.VectorType)\n\t\tif !ok {\n\t\t\tpanic(fmt.Errorf(\"invalid type; expected *types.VectorType, got %T\", def))\n\t\t}\n\t\ttyp.Elem = d.Elem\n\t\ttyp.Len = d.Len\n\tcase *types.LabelType:\n\t\t_, ok := def.(*types.LabelType)\n\t\tif !ok {\n\t\t\tpanic(fmt.Errorf(\"invalid type; expected *types.LabelType, got %T\", def))\n\t\t}\n\t\t// nothing to do.\n\tcase *types.MetadataType:\n\t\t_, ok := def.(*types.MetadataType)\n\t\tif !ok {\n\t\t\tpanic(fmt.Errorf(\"invalid type; expected *types.MetadataType, got %T\", def))\n\t\t}\n\t\t// nothing to do.\n\tcase *types.ArrayType:\n\t\td, ok := def.(*types.ArrayType)\n\t\tif !ok {\n\t\t\tpanic(fmt.Errorf(\"invalid type; expected *types.ArrayType, got %T\", def))\n\t\t}\n\t\ttyp.Elem = d.Elem\n\t\ttyp.Len = d.Len\n\tcase *types.StructType:\n\t\td, ok := def.(*types.StructType)\n\t\tif !ok {\n\t\t\tpanic(fmt.Errorf(\"invalid type; expected *types.StructType, got %T\", def))\n\t\t}\n\t\ttyp.Fields = d.Fields\n\t\ttyp.Opaque = d.Opaque\n\tdefault:\n\t\tpanic(fmt.Errorf(\"support for type %T not yet implemented\", typ))\n\t}\n}", "func (s *BaseSyslParserListener) EnterTransform_return_type(ctx *Transform_return_typeContext) {}", "func appendParams(container []*factory.MetaData, params ...interface{}) (retVal []*factory.MetaData, err error) {\n\tretVal = container\n\tif len(params) == 0 || params[0] == nil {\n\t\terr = ErrInvalidObjectType\n\t\treturn\n\t}\n\n\tif len(params) > 1 && reflect.TypeOf(params[0]).Kind() != reflect.String {\n\t\tfor _, param := range params {\n\t\t\tretVal, err = appendParam(retVal, param)\n\t\t}\n\t} else {\n\t\tretVal, err = appendParam(retVal, params...)\n\t}\n\treturn\n}", "func (this *Element) Type() value.Type { return value.JSON }", "func type2CR1S2(query1, query2, split1, split2, split3 string, inputType int) {\n\tdb := dbConnec()\n\trows, err := db.Query(query1)\n\tdefer rows.Close()\n\tvar sumCR1, sumS1, sumS2, f, s float64\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tfor rows.Next() {\n\t\tf++\n\t\tvar x string\n\t\terr := rows.Scan(&x)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tif inputType == 2 {\n\t\t\tlog.Println(split1, \":\", x)\n\t\t}\n\t\tif inputType == 3 {\n\t\t\tif sumC1, err := strconv.ParseFloat(x, 32); err == nil {\n\t\t\t\tsumCR1 += sumC1\n\t\t\t}\n\t\t}\n\t}\n\tif inputType == 3 {\n\t\tlog.Println(\"Average\", split1, \": \", sumCR1/f)\n\t}\n\n\trows1, err := db.Query(query2)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tfor rows1.Next() {\n\t\ts++\n\t\tvar y, z string\n\t\terr := rows1.Scan(&y, &z)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tif inputType == 2 {\n\t\t\tlog.Println(split2, \":\", y, split3, \":\", z)\n\t\t}\n\t\tif inputType == 3 {\n\t\t\tif sumSens1, err := strconv.ParseFloat(y, 32); err == nil {\n\t\t\t\tsumS1 += sumSens1\n\t\t\t}\n\t\t\tif sumSens2, err := strconv.ParseFloat(z, 32); err == nil {\n\t\t\t\tsumS2 += sumSens2\n\t\t\t}\n\t\t}\n\t}\n\tif inputType == 3 {\n\t\tlog.Println(\"Average\", split2, \": \", sumS1/s, \" Average\", split3, \": \", sumS2/s)\n\t}\n}", "func add1(x, y int) int{\n return x + y\n}", "func type2CR1S3(query1, query2, split1, split2, split3, split4 string, inputType int) {\n\tdb := dbConnec()\n\trows, err := db.Query(query1)\n\tdefer rows.Close()\n\tvar sumCR1, sumS1, sumS2, sumS3, f, s float64\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tfor rows.Next() {\n\t\tf++\n\t\tvar x string\n\t\terr := rows.Scan(&x)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tif inputType == 2 {\n\t\t\tlog.Println(split1, \":\", x)\n\t\t}\n\t\tif inputType == 3 {\n\t\t\tif sumC1, err := strconv.ParseFloat(x, 32); err == nil {\n\t\t\t\tsumCR1 += sumC1\n\t\t\t}\n\t\t}\n\t}\n\tif inputType == 3 {\n\t\tlog.Println(\"Average\", split1, \": \", sumCR1/f)\n\t}\n\trows1, err := db.Query(query2)\n\tdefer rows1.Close()\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tfor rows1.Next() {\n\t\ts++\n\t\tvar y, z, w string\n\t\terr := rows1.Scan(&y, &z, &w)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tif inputType == 2 {\n\t\t\tlog.Println(split2, \":\", y, \" \", split3, \":\", z, \" \", split4, \":\", w)\n\t\t}\n\t\tif inputType == 3 {\n\t\t\tif sumSens1, err := strconv.ParseFloat(y, 32); err == nil {\n\t\t\t\tsumS1 += sumSens1\n\t\t\t}\n\t\t\tif sumSens2, err := strconv.ParseFloat(z, 32); err == nil {\n\t\t\t\tsumS2 += sumSens2\n\t\t\t}\n\t\t\tif sumSens3, err := strconv.ParseFloat(w, 32); err == nil {\n\t\t\t\tsumS3 += sumSens3\n\t\t\t}\n\t\t}\n\t}\n\tif inputType == 3 {\n\t\tlog.Println(\"Average\", split2, \": \", sumS1/s, \" Average\", split3, \": \", sumS2/s, \" Average\", split4, \": \", sumS3/s)\n\t}\n}", "func primitiveTypes() {\n\tvar i int = 1\n\tvar f float64 = 1.1\n\tvar b bool = false\n\ts := \"Hello Go\"\n\tvar stringVal, intVal, boolVal = \"String value\", 1, true\n\tprintln(i, f, b, s, stringVal, intVal, boolVal)\n}", "func (w *Writer) getType(obj interface{}, length int) []byte {\n\t// check length\n\tw.checkLength(length)\n\tvar tmp = make([]byte, 8)\n\n\tswitch objType := obj.(type) {\n\tcase int8:\n\t\ttmp[0] = byte(objType)\n\tcase uint8:\n\t\ttmp[0] = byte(objType)\n\tcase int16:\n\t\ttmp[0] = byte(objType)\n\t\ttmp[1] = byte(objType >> 8)\n\tcase uint16:\n\t\ttmp[0] = byte(objType)\n\t\ttmp[1] = byte(objType >> 8)\n\tcase int32:\n\t\ttmp[0] = byte(objType)\n\t\ttmp[1] = byte(objType >> 8)\n\t\ttmp[2] = byte(objType >> 16)\n\t\ttmp[3] = byte(objType >> 24)\n\tcase uint32:\n\t\ttmp[0] = byte(objType)\n\t\ttmp[1] = byte(objType >> 8)\n\t\ttmp[2] = byte(objType >> 16)\n\t\ttmp[3] = byte(objType >> 24)\n\tcase int:\n\t\ttmp[0] = byte(objType)\n\t\ttmp[1] = byte(objType >> 8)\n\t\ttmp[2] = byte(objType >> 16)\n\t\ttmp[3] = byte(objType >> 24)\n\tcase int64:\n\t\ttmp[0] = byte(objType)\n\t\ttmp[1] = byte(objType >> 8)\n\t\ttmp[2] = byte(objType >> 16)\n\t\ttmp[3] = byte(objType >> 24)\n\t\ttmp[4] = byte(objType >> 32)\n\t\ttmp[5] = byte(objType >> 40)\n\t\ttmp[6] = byte(objType >> 48)\n\t\ttmp[7] = byte(objType >> 56)\n\tcase uint64:\n\t\ttmp[0] = byte(objType)\n\t\ttmp[1] = byte(objType >> 8)\n\t\ttmp[2] = byte(objType >> 16)\n\t\ttmp[3] = byte(objType >> 24)\n\t\ttmp[4] = byte(objType >> 32)\n\t\ttmp[5] = byte(objType >> 40)\n\t\ttmp[6] = byte(objType >> 48)\n\t\ttmp[7] = byte(objType >> 56)\n\tdefault:\n\t\tlog.Error(\"Unknown data type:\", reflect.TypeOf(obj))\n\t\treturn nil\n\t}\n\n\treturn tmp[:length]\n}", "func (r *replayer) value(s *compiler.S, val *codegen.Value, ty semantic.Type) *codegen.Value {\n\tswitch ty := semantic.Underlying(ty).(type) {\n\tcase *semantic.Builtin:\n\t\tswitch ty {\n\t\tcase semantic.BoolType:\n\t\t\treturn r.asmBool(s, val)\n\n\t\tcase semantic.IntType, semantic.UintType, semantic.SizeType, semantic.CharType,\n\t\t\tsemantic.Int8Type, semantic.Uint8Type,\n\t\t\tsemantic.Int16Type, semantic.Uint16Type,\n\t\t\tsemantic.Int32Type, semantic.Uint32Type,\n\t\t\tsemantic.Int64Type, semantic.Uint64Type:\n\t\t\treturn r.asmValue(s, val.Cast(r.T.Uint64), r.asmType(ty))\n\n\t\tcase semantic.Float32Type:\n\t\t\treturn r.asmValue(s, val.Bitcast(r.T.Uint32).Cast(r.T.Uint64), r.asmType(ty))\n\n\t\tcase semantic.Float64Type:\n\t\t\treturn r.asmValue(s, val.Bitcast(r.T.Uint64), r.asmType(ty))\n\t\t}\n\tcase *semantic.Pointer:\n\t\treturn s.Select(s.LessThan(val, s.Scalar(observableAddressStart).Cast(val.Type())),\n\t\t\tr.asmAbsolutePtr(s, val),\n\t\t\tr.observedPtr(s, val, ty))\n\tcase *semantic.StaticArray:\n\t\tif isRemapped(ty.ValueType) {\n\t\t\tr.Fail(\"Static array parameters of remapped types currently not supported\")\n\t\t}\n\t\treturn r.addConstant(s, val, ty)\n\t}\n\tr.Fail(\"Unhandled type %v\", ty.Name())\n\treturn nil\n}", "func type2CR2S1(query1, query2, split1, split2, split3 string, inputType int) {\n\tdb := dbConnec()\n\trows, err := db.Query(query1)\n\tdefer rows.Close()\n\tvar sumCR1, sumCR2, sumS1, f, s float64\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tfor rows.Next() {\n\t\tf++\n\t\tvar x, y string\n\t\terr := rows.Scan(&x, &y)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tif inputType == 2 {\n\t\t\tlog.Println(split1, \":\", x, split2, \":\", y)\n\t\t}\n\t\tif inputType == 3 {\n\t\t\tif sumC1, err := strconv.ParseFloat(x, 32); err == nil {\n\t\t\t\tsumCR1 += sumC1\n\t\t\t}\n\t\t\tif sumC2, err := strconv.ParseFloat(y, 32); err == nil {\n\t\t\t\tsumCR2 += sumC2\n\t\t\t}\n\t\t}\n\t}\n\tif inputType == 3 {\n\t\tlog.Println(\"Average\", split1, \": \", sumCR1/f, \"Average\", split2, \": \", sumCR2/f)\n\t}\n\n\trows1, err := db.Query(query2)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tfor rows1.Next() {\n\t\ts++\n\t\tvar z string\n\t\terr := rows1.Scan(&z)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tif inputType == 2 {\n\t\t\tlog.Println(split3, \":\", z)\n\t\t}\n\t\tif inputType == 3 {\n\t\t\tif sumSens1, err := strconv.ParseFloat(z, 32); err == nil {\n\t\t\t\tsumS1 += sumSens1\n\t\t\t}\n\t\t}\n\t}\n\tif inputType == 3 {\n\t\tlog.Println(\"Average\", split3, \": \", sumS1/s)\n\t}\n}", "func (fn *formulaFuncs) TYPE(argsList *list.List) formulaArg {\n\tif argsList.Len() != 1 {\n\t\treturn newErrorFormulaArg(formulaErrorVALUE, \"TYPE requires 1 argument\")\n\t}\n\ttoken := argsList.Front().Value.(formulaArg)\n\tswitch token.Type {\n\tcase ArgError:\n\t\treturn newNumberFormulaArg(16)\n\tcase ArgMatrix:\n\t\treturn newNumberFormulaArg(64)\n\tcase ArgNumber, ArgEmpty:\n\t\tif token.Boolean {\n\t\t\treturn newNumberFormulaArg(4)\n\t\t}\n\t\treturn newNumberFormulaArg(1)\n\tdefault:\n\t\treturn newNumberFormulaArg(2)\n\t}\n}", "func add(this js.Value, args []js.Value) interface{} {\n\ta := args[0].Int()\n\tb := args[1].Int()\n\n\tthis.Set(\"result\", js.ValueOf(a+b))\n\treturn nil\n}", "func type1(){\n\tsum := 0\n\tvar n int\n\tfmt.Scanln(&n)\n\tfor i := 0; i < n; i++{\n\t\tsum+=i\n\t}\n\tfmt.Println(sum)\n}", "func genExample(schema *openapi3.Schema) interface{} {\n\tif schema.Example != nil {\n\t\treturn schema.Example\n\t}\n\n\tif schema.Default != nil {\n\t\treturn schema.Default\n\t}\n\n\tswitch schema.Type {\n\tcase \"null\":\n\t\treturn nil\n\tcase \"bool\":\n\t\treturn true\n\tcase \"integer\":\n\t\treturn 1\n\tcase \"number\":\n\t\treturn 1.0\n\tcase \"string\":\n\t\treturn \"string\"\n\tcase \"array\":\n\t\titem := genExample(schema.Items.Value)\n\t\tcount := 1\n\t\tif schema.MinItems > 0 {\n\t\t\tcount = int(schema.MinItems)\n\t\t}\n\n\t\tvalue := []interface{}{}\n\t\tfor i := 0; i < count; i++ {\n\t\t\tvalue = append(value, item)\n\t\t}\n\t\treturn value\n\tcase \"object\":\n\t\tvalue := map[string]interface{}{}\n\t\tfor k, s := range schema.Properties {\n\t\t\tvalue[k] = genExample(s.Value)\n\t\t}\n\t\treturn value\n\t}\n\n\treturn nil\n}", "func (a *Addition) Type(left querypb.Type) querypb.Type {\n\treturn left\n}", "func Add(out1 *LooseFieldElement, arg1 *TightFieldElement, arg2 *TightFieldElement) {\n\tx1 := (arg1[0] + arg2[0])\n\tx2 := (arg1[1] + arg2[1])\n\tx3 := (arg1[2] + arg2[2])\n\tout1[0] = x1\n\tout1[1] = x2\n\tout1[2] = x3\n}", "func (fieldsInfo *objectFieldsInfo) addType(v interface{}) *objectFieldsInfo {\n\n\tfis, err := getFieldsInfo(v, \"protobuf\", \"name\")\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\tt, err := getStructType(v)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\tif t == nil {\n\t\tpanic(\"addType: type must be not nil\")\n\t}\n\n\tfieldsInfo.fieldsReflect[t] = fis\n\n\tfor k := range fis {\n\t\tif l, ok := fieldsInfo.nameFieldsType[k]; ok {\n\t\t\tfieldsInfo.nameFieldsType[k] = append(l, t)\n\t\t} else {\n\t\t\tfieldsInfo.nameFieldsType[k] = []reflect.Type{t}\n\t\t}\n\t}\n\n\treturn fieldsInfo\n}", "func checkType(i interface{}) {\n\t// create instance of interface and check its type (note: type is a keyword here)\n\tswitch v := i.(type) {\n\t// check if it's an int\n\tcase int:\n\t\tfmt.Printf(\"Type: %T and Value: %v\\n\", v, v)\n\t// check if it's a float\n\tcase float64:\n\t\tfmt.Printf(\"Type: %T and Value: %v\\n\", v, v)\n\t// if neither of the above, default to below and return whatever it actually is\n\tdefault:\n\t\tfmt.Printf(\"Type: %T and Value: %v\\n\", v, v)\n\t}\n}", "func encodeForRequest(v interface{}) interface{} {\n\tif v == nil {\n\t\treturn nil\n\t}\n\n\trv := reflect.ValueOf(v)\n\trvi := reflect.Indirect(rv)\n\trt := rvi.Type()\n\tif rt.Kind() == reflect.Struct {\n\t\tswitch v.(type) {\n\t\tcase time.Time, *time.Time, Date, *Date:\n\t\t\tswitch v.(type) {\n\t\t\tcase time.Time:\n\t\t\t\treturn Date(v.(time.Time))\n\t\t\tcase *time.Time:\n\t\t\t\treturn Date(*v.(*time.Time))\n\t\t\tcase Date, *Date:\n\t\t\t\treturn v\n\t\t\tcase string:\n\t\t\t\treturn map[string]interface{}{\n\t\t\t\t\t\"__type\": \"Date\",\n\t\t\t\t\t\"iso\": v.(string),\n\t\t\t\t}\n\t\t\t}\n\t\tcase Pointer, *Pointer:\n\t\t\treturn v\n\t\tcase GeoPoint, *GeoPoint:\n\t\t\treturn v\n\t\tcase ACL, *ACL:\n\t\t\treturn v\n\t\tcase AuthData, *AuthData:\n\t\t\tb, _ := json.Marshal(v)\n\t\t\treturn string(b)\n\t\tdefault:\n\t\t\tvar cname string\n\n\t\t\tif tmp, ok := reflect.Zero(rvi.Type()).Interface().(iClassName); ok {\n\t\t\t\tcname = tmp.ClassName()\n\t\t\t} else if tmp, ok := reflect.New(rvi.Type()).Interface().(iClassName); ok {\n\t\t\t\tcname = tmp.ClassName()\n\t\t\t} else {\n\t\t\t\tcname = rt.Name()\n\t\t\t}\n\n\t\t\tif idf := rvi.FieldByName(\"Id\"); idf.IsValid() {\n\t\t\t\tid := idf.Interface().(string)\n\t\t\t\treturn Pointer{\n\t\t\t\t\tId: id,\n\t\t\t\t\tClassName: cname,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t} else if rt.Kind() == reflect.Slice {\n\t\tvals := make([]interface{}, 0, rv.Len())\n\t\tfor i := 0; i < rv.Len(); i++ {\n\t\t\tvals = append(vals, encodeForRequest(rv.Index(i).Interface()))\n\t\t}\n\t\treturn vals\n\t}\n\n\treturn v\n}", "func getType(arg Any) string {\n\treturn format(\"%T\", arg)\n}", "func typeValue(valueType string, value interface{}) interface{} {\n\tswitch valueType {\n\tdefault:\n\t\treturn 0\n\tcase \"int\":\n\t\treturn int(value.(float64))\n\tcase \"float64\":\n\t\treturn value.(float64)\n\t}\n}", "func typeOfReturnValue(value interface{}) (t Type, className string, err error) {\n\tif v, ok := value.(string); ok {\n\t\treturn typeOfValue(ObjectType(v))\n\t}\n\treturn typeOfValue(value)\n}", "func typeToInt(args ...DataType) (DataType, error) {\n\tif len(args) != 1 {\n\t\treturn nil, fmt.Errorf(\"Type.toInt expects exactly 1 argument\")\n\t}\n\n\tswitch object := args[0].(type) {\n\tcase *StringType:\n\t\ti, err := strconv.Atoi(object.Value)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"Type.toInt can't convert '%s' to Integer\", object.Value)\n\t\t}\n\t\treturn &IntegerType{Value: int64(i)}, nil\n\tcase *FloatType:\n\t\treturn &IntegerType{Value: int64(object.Value)}, nil\n\tcase *BooleanType:\n\t\tresult := 0\n\t\tif object.Value {\n\t\t\tresult = 1\n\t\t}\n\t\treturn &IntegerType{Value: int64(result)}, nil\n\tcase *IntegerType:\n\t\treturn object, nil\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Type.toInt can't convert '%s' to Integer\", object.Type())\n\t}\n}", "func isAcceptingInput(oldFn reflect.Value) bool {\n\treturn oldFn.Type().NumIn() == 1\n}", "func AutotypeValue(input interface{}) interface{} {\n\tif strValue, ok := input.(string); ok {\n\t\tif intVal, err := strconv.ParseInt(strValue, 10, 64); err == nil {\n\t\t\treturn intVal\n\t\t} else if floatVal, err := strconv.ParseFloat(strValue, 64); err == nil {\n\t\t\treturn floatVal\n\t\t} else if strValue == \"true\" {\n\t\t\treturn true\n\t\t} else if strValue == \"false\" {\n\t\t\treturn false\n\t\t} else if strValue == \"null\" {\n\t\t\treturn nil\n\t\t}\n\t}\n\n\treturn input\n}", "func Add(t Type) Type {\n\tTypes.Store(t.Extension, t)\n\treturn t\n}", "func toPattern(theType string) string {\n\tvar thePattern string\n\n\tswitch theType {\n\tcase \"string\":\n\t\tthePattern = \"\\\\w+\" //xmmm or [a-zA-Z0-9]+\n\n\tcase \"int\":\n\t\tthePattern = \"[0-9]+\"\n\tdefault:\n\n\t}\n\n\treturn thePattern\n}", "func (m *BinaryOperatorAdd) Type() Type {\n\treturn IntType{}\n}", "func typeAssert(value interface{}) {\n\tv, ok := value.(string)\n\n\tif ok {\n\t\tfmt.Println(\"It's string and value : \", v)\n\t\treturn\n\t}\n\n\ti, ok := value.(int)\n\tif ok {\n\t\tfmt.Println(\"It's int and value : \", i)\n\t\treturn\n\t}\n\n}", "func _() {\n\tX(Interface[*F /* ERROR got 1 arguments but 2 type parameters */ [string]](Impl{}))\n}", "func convertField(v interface{}) interface{} {\n\tswitch v := v.(type) {\n\tcase float64:\n\t\treturn v\n\tcase int64:\n\t\treturn v\n\tcase string:\n\t\treturn v\n\tcase bool:\n\t\treturn v\n\tcase int:\n\t\treturn int64(v)\n\tcase uint:\n\t\treturn uint64(v)\n\tcase uint64:\n\t\treturn uint64(v)\n\tcase []byte:\n\t\treturn string(v)\n\tcase int32:\n\t\treturn int64(v)\n\tcase int16:\n\t\treturn int64(v)\n\tcase int8:\n\t\treturn int64(v)\n\tcase uint32:\n\t\treturn uint64(v)\n\tcase uint16:\n\t\treturn uint64(v)\n\tcase uint8:\n\t\treturn uint64(v)\n\tcase float32:\n\t\treturn float64(v)\n\tcase *float64:\n\t\tif v != nil {\n\t\t\treturn *v\n\t\t}\n\tcase *int64:\n\t\tif v != nil {\n\t\t\treturn *v\n\t\t}\n\tcase *string:\n\t\tif v != nil {\n\t\t\treturn *v\n\t\t}\n\tcase *bool:\n\t\tif v != nil {\n\t\t\treturn *v\n\t\t}\n\tcase *int:\n\t\tif v != nil {\n\t\t\treturn int64(*v)\n\t\t}\n\tcase *uint:\n\t\tif v != nil {\n\t\t\treturn uint64(*v)\n\t\t}\n\tcase *uint64:\n\t\tif v != nil {\n\t\t\treturn uint64(*v)\n\t\t}\n\tcase *[]byte:\n\t\tif v != nil {\n\t\t\treturn string(*v)\n\t\t}\n\tcase *int32:\n\t\tif v != nil {\n\t\t\treturn int64(*v)\n\t\t}\n\tcase *int16:\n\t\tif v != nil {\n\t\t\treturn int64(*v)\n\t\t}\n\tcase *int8:\n\t\tif v != nil {\n\t\t\treturn int64(*v)\n\t\t}\n\tcase *uint32:\n\t\tif v != nil {\n\t\t\treturn uint64(*v)\n\t\t}\n\tcase *uint16:\n\t\tif v != nil {\n\t\t\treturn uint64(*v)\n\t\t}\n\tcase *uint8:\n\t\tif v != nil {\n\t\t\treturn uint64(*v)\n\t\t}\n\tcase *float32:\n\t\tif v != nil {\n\t\t\treturn float64(*v)\n\t\t}\n\tdefault:\n\t\treturn nil\n\t}\n\treturn nil\n}", "func (g *javaGen) javaType(T types.Type) string {\n\tif isErrorType(T) {\n\t\t// The error type is usually translated into an exception in\n\t\t// Java, however the type can be exposed in other ways, such\n\t\t// as an exported field.\n\t\treturn \"String\"\n\t}\n\tswitch T := T.(type) {\n\tcase *types.Basic:\n\t\tswitch T.Kind() {\n\t\tcase types.Bool, types.UntypedBool:\n\t\t\treturn \"boolean\"\n\t\tcase types.Int:\n\t\t\treturn \"long\"\n\t\tcase types.Int8:\n\t\t\treturn \"byte\"\n\t\tcase types.Int16:\n\t\t\treturn \"short\"\n\t\tcase types.Int32, types.UntypedRune: // types.Rune\n\t\t\treturn \"int\"\n\t\tcase types.Int64, types.UntypedInt:\n\t\t\treturn \"long\"\n\t\tcase types.Uint8: // types.Byte\n\t\t\t// TODO(crawshaw): Java bytes are signed, so this is\n\t\t\t// questionable, but vital.\n\t\t\treturn \"byte\"\n\t\t// TODO(crawshaw): case types.Uint, types.Uint16, types.Uint32, types.Uint64:\n\t\tcase types.Float32:\n\t\t\treturn \"float\"\n\t\tcase types.Float64, types.UntypedFloat:\n\t\t\treturn \"double\"\n\t\tcase types.String, types.UntypedString:\n\t\t\treturn \"String\"\n\t\tdefault:\n\t\t\tg.errorf(\"unsupported basic type: %s\", T)\n\t\t\treturn \"TODO\"\n\t\t}\n\tcase *types.Slice:\n\t\telem := g.javaType(T.Elem())\n\t\treturn elem + \"[]\"\n\n\tcase *types.Pointer:\n\t\tif _, ok := T.Elem().(*types.Named); ok {\n\t\t\treturn g.javaType(T.Elem())\n\t\t}\n\t\tpanic(fmt.Sprintf(\"unsupported pointer to type: %s\", T))\n\tcase *types.Named:\n\t\tn := T.Obj()\n\t\tif n.Pkg() != g.pkg {\n\t\t\tnPkgName := \"<nilpkg>\"\n\t\t\tif nPkg := n.Pkg(); nPkg != nil {\n\t\t\t\tnPkgName = nPkg.Name()\n\t\t\t}\n\t\t\tpanic(fmt.Sprintf(\"type %s is in package %s, must be defined in package %s\", n.Name(), nPkgName, g.pkg.Name()))\n\t\t}\n\t\t// TODO(crawshaw): more checking here\n\t\treturn n.Name()\n\tdefault:\n\t\tg.errorf(\"unsupported javaType: %#+v, %s\\n\", T, T)\n\t\treturn \"TODO\"\n\t}\n}", "func main() {\n\tfmt.Printf(\"Type of %s is %T\\n\", \"hello\", \"hello\")\n}", "func jsonifyType(typeName string) string {\n\tswitch typeName {\n\tcase stringType:\n\t\treturn stringJSONType\n\tcase boolType:\n\t\treturn booleanJSONType\n\tcase intType, int32Type, int64Type:\n\t\treturn integerJSONType\n\tcase float32Type, float64Type:\n\t\treturn floatJSONType\n\tcase byteType:\n\t\treturn stringJSONType\n\t}\n\tfmt.Println(\"jsonifyType called with a complex type \", typeName)\n\tpanic(\"jsonifyType called with a complex type\")\n}", "func (invoker *pluginInvoker) canonicalize() error {\n\n\tvar inputType0, outputType0, outputType1 reflect.Type = nil, nil, nil\n\tif invoker.fn.Type().NumIn() > 0 {\n\t\tinputType0 = invoker.fn.Type().In(0)\n\t}\n\tif invoker.fn.Type().NumOut() > 0 {\n\t\toutputType0 = invoker.fn.Type().Out(0)\n\t}\n\tif invoker.fn.Type().NumOut() > 1 {\n\t\toutputType1 = invoker.fn.Type().Out(1)\n\t}\n\tif invoker.fn.Type().NumOut() > 2 {\n\t\treturn fmt.Errorf(\"too many return values in %#v\", invoker.fn)\n\t}\n\n\t// Is the function working with channels?\n\tif inputType0 != nil && inputType0.Kind() == reflect.Chan &&\n\t\toutputType0 != nil && outputType0.Kind() == reflect.Chan {\n\t\tif !canReceive(inputType0) || !canReceive(outputType0) {\n\t\t\treturn fmt.Errorf(\"wrong direction of channels in function %#v\", invoker.fn)\n\t\t}\n\n\t\tif outputType1 == nil || outputType1.Kind() == reflect.Chan && outputType1.Elem() == errorType && canReceive(outputType1) {\n\t\t\t// Already exactly what we want\n\t\t\tinvoker.inType = inputType0.Elem()\n\t\t\treturn nil\n\t\t} else {\n\t\t\treturn fmt.Errorf(\"second return type of function should be ([<-]chan error) in %#v\", invoker.fn)\n\t\t}\n\t} else {\n\t\t// The original fn could have any of the following forms:\n\t\t// f(X) (Y, error)\n\t\t// f(X) Y\n\t\t// f(X)\n\t\t// f(X) error\n\t\t// f() (Y, error)\n\t\t// f() Y\n\t\t// f()\n\t\t// f() error\n\n\t\toldFn := invoker.fn\n\n\t\t// TODO: check IN or OUT are not channel\n\t\tinvoker.inType = reflect.TypeOf(struct{}{})\n\t\tif oldFn.Type().NumIn() > 1 {\n\t\t\treturn fmt.Errorf(\"too many arguments to non streaming function: %#v\", oldFn)\n\t\t} else if isAcceptingInput(oldFn) {\n\t\t\tinvoker.inType = oldFn.Type().In(0)\n\t\t}\n\n\t\toutType := reflect.TypeOf(struct{}{})\n\t\tif oldFn.Type().NumOut() > 2 {\n\t\t\treturn fmt.Errorf(\"too many return values for non streaming function: %#v\", oldFn)\n\t\t} else if hasReturnValue(oldFn) {\n\t\t\toutType = oldFn.Type().Out(0)\n\t\t}\n\n\t\twrapper := func(args []reflect.Value) []reflect.Value {\n\t\t\tin := args[0]\n\t\t\tout := makeChannel(outType)\n\t\t\terrs := makeChannel(errorType)\n\n\t\t\tgo func() {\n\t\t\t\tdefer out.Close()\n\t\t\t\tdefer errs.Close()\n\n\t\t\t\ti, open := in.Recv()\n\t\t\t\tTrace.Printf(\"[-Function Wrapper->] In function, input = %#v, open=%v\\n\", i, open)\n\t\t\t\tvar fnResult []reflect.Value\n\t\t\t\tif open {\n\t\t\t\t\t// original function receiving actual input\n\t\t\t\t\tfnResult = oldFn.Call([] reflect.Value{i})\n\t\t\t\t} else if !isAcceptingInput(oldFn) {\n\t\t\t\t\t// input channel closed immediately. Invoke original zero-arg fn\n\t\t\t\t\tfnResult = oldFn.Call([]reflect.Value{})\n\t\t\t\t} else {\n\t\t\t\t\t// input closed early, because of earlier (eg unmarshalling) error. Do nothing\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\tTrace.Printf(\"[-Function Wrapper->] In function, result = %#v\\n\", unwrap(fnResult))\n\t\t\t\tif isErroring(oldFn) && !fnResult[oldFn.Type().NumOut()-1].IsNil() {\n\t\t\t\t\tTrace.Printf(\"[-Function Wrapper->] Sending error %#v\", fnResult[oldFn.Type().NumOut()-1])\n\t\t\t\t\terrs.Send(fnResult[oldFn.Type().NumOut()-1])\n\t\t\t\t} else if hasReturnValue(oldFn) {\n\t\t\t\t\tTrace.Printf(\"[-Function Wrapper->] Sending result %#v\", fnResult[0])\n\t\t\t\t\tout.Send(fnResult[0])\n\t\t\t\t}\n\t\t\t}()\n\t\t\treturn []reflect.Value{out, errs}\n\t\t}\n\n\t\tcInType := reflect.ChanOf(reflect.RecvDir, invoker.inType)\n\t\tcOutType := reflect.ChanOf(reflect.BothDir, outType)\n\t\tcErrorType := reflect.ChanOf(reflect.BothDir, errorType)\n\t\tt := reflect.FuncOf([]reflect.Type{cInType}, []reflect.Type{cOutType, cErrorType}, false)\n\t\tinvoker.fn = reflect.MakeFunc(t, wrapper)\n\n\t\treturn nil\n\t}\n}", "func main(){\n\t// var num1 float64 = 5.6\n\t// var num2 float64 = 9.5\n\n\t// var num1,num2 float64 = 5.6,9.5\n\t// num1,num2 := 5.6,9.5\t//float64 type would be assigned by default\n\t/////if we declare a variable and dont use it program wouldn't execute\n\t/////and will through an error variable not used\n\t// fmt.Println(\"The sum of \", num1 , \" and \", num2, \"is \" , add(num1,num2))\n\t/*this will give error beacause arguments require float 32 but num1 and num2 \n\tare assigned float 64 by default*/\n\t// w1,w2 := \"Hey\",\"There\"\n\t// fmt.Println(multiple(w1,w2))\n\n\tvar a int = 62\n\tvar b float64 = float64(a) //type casting\n\tfmt.Println(b)\n\tx:=a //x will be type int\n\tfmt.Println(x)\n\n}", "func convertToStarlark(it interface{}) (starlark.Value, error) {\n\tswitch x := it.(type) {\n\tcase int:\n\t\treturn starlark.MakeInt(x), nil\n\tcase float64:\n\t\treturn starlark.Float(x), nil\n\tcase string:\n\t\treturn starlark.String(x), nil\n\tdefault:\n\t\treturn starlark.None, fmt.Errorf(\"unknown type of %v\", reflect.TypeOf(it))\n\t}\n}", "func Primitive(name string, fun PrimitiveFunction, result *Object, args []*Object, rest *Object, defaults []*Object, keys []*Object) *Object {\n\t// the rest type indicates arguments past the end of args will all have the given type. the length must be checked by primitive\n\t// -> they are all optional, then. So, (<any>+) must be expressed as (<any> <any>*)\n\tidx := len(primitives)\n\targc := len(args)\n\tif defaults != nil {\n\t\tdefc := len(defaults)\n\t\tif defc > argc {\n\t\t\tpanic(\"more default argument values than types: \" + name)\n\t\t}\n\t\tif keys != nil {\n\t\t\tif len(keys) != defc {\n\t\t\t\tpanic(\"Argument keys must have same length as argument defaults\")\n\t\t\t}\n\t\t}\n\t\targc = argc - defc\n\t\tfor i := 0; i < defc; i++ {\n\t\t\tt := args[argc+i]\n\t\t\tif t != AnyType && defaults[i].Type != t {\n\t\t\t\tpanic(\"argument default's type (\" + defaults[i].Type.text + \") doesn't match declared type (\" + t.text + \")\")\n\t\t\t}\n\t\t}\n\t} else {\n\t\tif keys != nil {\n\t\t\tpanic(\"Cannot have argument keys without argument defaults\")\n\t\t}\n\t}\n\tsignature := functionSignatureFromTypes(result, args, rest) // functionSignatureFromTypes was defined in runtime.go - 184 line\n\tprim := &primitive{name, fun, signature, idx, argc, result, args, rest, defaults, keys}\n\tprimitives = append(primitives, prim)\n\treturn &Object{Type: FunctionType, primitive: prim}\n}", "func type2CR1S4(query1, query2, split1, split2, split3, split4, split5 string, inputType int) {\n\tdb := dbConnec()\n\trows, err := db.Query(query1)\n\tdefer rows.Close()\n\tvar sumCR1, sumS1, sumS2, sumS3, sumS4, f, s float64\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tfor rows.Next() {\n\t\tf++\n\t\tvar x string\n\t\terr := rows.Scan(&x)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tif inputType == 2 {\n\t\t\tlog.Println(split1, \":\", x)\n\t\t}\n\t\tif inputType == 3 {\n\t\t\tif sumC1, err := strconv.ParseFloat(x, 32); err == nil {\n\t\t\t\tsumCR1 += sumC1\n\t\t\t}\n\t\t}\n\t}\n\tif inputType == 3 {\n\t\tlog.Println(\"Average\", split1, \": \", sumCR1/f)\n\t}\n\n\trows1, err := db.Query(query2)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tfor rows1.Next() {\n\t\ts++\n\t\tvar y, z, w, k string\n\t\terr := rows1.Scan(&y, &z, &w, &k)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tif inputType == 2 {\n\t\t\tlog.Println(split2, \":\", y, \" \", split3, \":\", z, \" \", split4, \":\", w, \" \", split5, \":\", k)\n\t\t}\n\t\tif inputType == 3 {\n\t\t\tif sumSens1, err := strconv.ParseFloat(y, 32); err == nil {\n\t\t\t\tsumS1 += sumSens1\n\t\t\t}\n\t\t\tif sumSens2, err := strconv.ParseFloat(z, 32); err == nil {\n\t\t\t\tsumS2 += sumSens2\n\t\t\t}\n\t\t\tif sumSens3, err := strconv.ParseFloat(w, 32); err == nil {\n\t\t\t\tsumS3 += sumSens3\n\t\t\t}\n\t\t\tif sumSens4, err := strconv.ParseFloat(k, 32); err == nil {\n\t\t\t\tsumS4 += sumSens4\n\t\t\t}\n\t\t}\n\t}\n\tif inputType == 3 {\n\t\tlog.Println(\"Average\", split2, \": \", sumS1/s, \" Average\", split3, \": \", sumS2/s, \" Average\", split4, \": \", sumS3/s, \" Average\", split5, \": \", sumS4/s)\n\t}\n}", "func (l langType) ChangeType(register string, regTyp interface{}, v interface{}, errorInfo string) string {\n\t//fmt.Printf(\"DEBUG CHANGE TYPE: %v -- %v\\n\", regTyp, v)\n\tswitch v.(ssa.Value).(type) {\n\tcase *ssa.Function:\n\t\trx := v.(*ssa.Function).Signature.Recv()\n\t\tpf := \"\"\n\t\tif rx != nil { // it is not the name of a normal function, but that of a method, so append the method description\n\t\t\tpf = rx.Type().String() // NOTE no underlying()\n\t\t} else {\n\t\t\tif v.(*ssa.Function).Pkg != nil {\n\t\t\t\tpf = v.(*ssa.Function).Pkg.Object.Name()\n\t\t\t}\n\t\t}\n\t\treturn register + \"=\" +\n\t\t\t\"new Closure(Go_\" + l.LangName(pf, v.(*ssa.Function).Name()) + \".call,[]);\"\n\tdefault:\n\t\tswitch v.(ssa.Value).Type().Underlying().(type) {\n\t\tcase *types.Basic:\n\t\t\tif v.(ssa.Value).Type().Underlying().(*types.Basic).Kind() == types.UnsafePointer {\n\t\t\t\t/* from https://groups.google.com/forum/#!topic/golang-dev/6eDTDZPWvoM\n\t\t\t\t \tTreat unsafe.Pointer -> *T conversions by returning new(T).\n\t\t\t\t \tThis is incorrect but at least preserves type-safety...\n\t\t\t\t\tTODO decide how UnsafePointer should fail!\n\t\t\t\t*/\n\t\t\t\treturn register + \"=new UnsafePointer(\" + l.LangType(regTyp.(types.Type), true, errorInfo) + \");\"\n\t\t\t}\n\t\t}\n\t}\n\treturn register + `=` + l.IndirectValue(v, errorInfo) + \";\" // usually, this is a no-op as far as Haxe is concerned\n\n}", "func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl ir.Nodes) {\n\tvar t *types.Type\n\tvar i int\n\n\tlno := base.Pos\n\tdefer func() { base.Pos = lno }()\n\n\tvar n ir.Node\n\tif len(nl) == 1 {\n\t\tn = nl[0]\n\t}\n\n\ti = 0\n\tfor _, tl := range tstruct.Fields().Slice() {\n\t\tt = tl.Type\n\t\tif tl.IsDDD() {\n\t\t\tif isddd {\n\t\t\t\tn = nl[i]\n\t\t\t\tir.SetPos(n)\n\t\t\t\tif n.Type() != nil {\n\t\t\t\t\tnl[i] = assignconvfn(n, t)\n\t\t\t\t}\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\t// TODO(mdempsky): Make into ... call with implicit slice.\n\t\t\tfor ; i < len(nl); i++ {\n\t\t\t\tn = nl[i]\n\t\t\t\tir.SetPos(n)\n\t\t\t\tif n.Type() != nil {\n\t\t\t\t\tnl[i] = assignconvfn(n, t.Elem())\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn\n\t\t}\n\n\t\tn = nl[i]\n\t\tir.SetPos(n)\n\t\tif n.Type() != nil {\n\t\t\tnl[i] = assignconvfn(n, t)\n\t\t}\n\t\ti++\n\t}\n}", "func requirementsOf(requirementType string) {\n\n}", "func jsToGo(input Uint32_t) (interface{}, error) {\n\n\t// the Go value...\n\tvar value interface{}\n\n\t// Now I will get the result if any...\n\tif Jerry_value_is_null(input) {\n\t\treturn nil, nil\n\t} else if Jerry_value_is_undefined(input) {\n\t\treturn nil, nil\n\t} else if Jerry_value_is_error(input) {\n\t\t// In that case I will return the error.\n\t\tlog.Println(\"----> error found!\")\n\t} else if Jerry_value_is_number(input) {\n\t\tvalue = Jerry_get_number_value(input)\n\t} else if Jerry_value_is_string(input) {\n\t\tvalue = jsStrToGoStr(input)\n\t} else if Jerry_value_is_boolean(input) {\n\t\tvalue = Jerry_get_boolean_value(input)\n\t} else if Jerry_value_is_typedarray(input) {\n\t\t/** not implemented **/\n\n\t} else if Jerry_value_is_array(input) {\n\t\tcount := (uint32)(C.get_array_length(uint32_t_To_Jerry_value_t(input)))\n\t\t// So here I got a array without type so I will get it property by index\n\t\t// and interpret each result.\n\t\tvalue = make([]interface{}, 0)\n\t\tvar i uint32\n\t\tfor i = 0; i < count; i++ {\n\t\t\te := jerry_value_t_To_uint32_t(C.get_property_by_index(uint32_t_To_Jerry_value_t(input), C.uint32_t(i)))\n\t\t\tv, err := jsToGo(e)\n\t\t\tif err == nil {\n\t\t\t\tvalue = append(value.([]interface{}), v)\n\t\t\t}\n\t\t}\n\t} else if Jerry_value_is_object(input) {\n\t\t// The go object will be a copy of the Js object.\n\t\tif Jerry_object_own_property(input, \"uuid_\") {\n\t\t\tuuid_ := Jerry_get_object_property(input, \"uuid_\")\n\t\t\tdefer Jerry_release_value(uuid_)\n\n\t\t\t// Get the uuid string.\n\t\t\tuuid, _ := jsToGo(uuid_)\n\n\t\t\t// Return and object reference.\n\t\t\tvalue = GoJavaScript.NewObjectRef(uuid.(string))\n\t\t} else {\n\t\t\tstringified := Jerry_json_stringfy(input)\n\t\t\t// if there is no error\n\t\t\tif !Jerry_value_is_error(stringified) {\n\t\t\t\tjsonStr := jsStrToGoStr(stringified)\n\t\t\t\tif strings.Index(jsonStr, \"TYPENAME\") != -1 {\n\t\t\t\t\t// So here I will create a remote action and tell the client to\n\t\t\t\t\t// create a Go object from jsonStr. The object will be set by\n\t\t\t\t\t// the client on the server.\n\t\t\t\t\treturn GoJavaScript.CallGoFunction(\"Client\", \"CreateGoObject\", jsonStr)\n\t\t\t\t}\n\n\t\t\t\t// In that case the object has no go representation...\n\t\t\t\t// and must be use only in JS.\n\t\t\t\treturn nil, nil\n\t\t\t} else {\n\t\t\t\t// Continue any way with nil object instead of an error...\n\t\t\t\treturn nil, nil //errors.New(\"fail to stringfy object!\")\n\t\t\t}\n\t\t}\n\t} else if Jerry_value_is_function(input) {\n\t\t// Here a function is found\n\t\tlog.Println(\"---> function found!\", input)\n\t} else if Jerry_value_is_abort(input) {\n\t\t// Here a function is found\n\t\tlog.Println(\"--->abort!\", input)\n\t} else if Jerry_value_is_arraybuffer(input) {\n\t\t// Here a function is found\n\t\tlog.Println(\"--->array buffer!\", input)\n\t} else if Jerry_value_is_constructor(input) {\n\t\t// Here a function is found\n\t\tlog.Println(\"--->constructor!\", input)\n\t} else if Jerry_value_is_promise(input) {\n\t\t// Here a function is found\n\t\tlog.Println(\"--->promise!\", input)\n\t} else {\n\t\tlog.Println(\"---> not implemented Jerry value type.\")\n\t}\n\n\treturn value, nil\n}", "func newWithTypeInfo(args ...interface{}) (rval Native, flag BitFlag) {\n\n\t// no arguments passed, return nil instance\n\tif len(args) == 0 {\n\t\treturn nil, Nil.Type().Flag()\n\t}\n\n\t// multiple arguments have been passed\n\tif len(args) > 1 {\n\n\t\t// allocate slice of natives\n\t\tvar nats = make([]Native, 0, len(args))\n\n\t\t// range over arguments\n\t\tfor _, arg := range args {\n\n\t\t\t// allocate native instance to temporary assign\n\t\t\t// converted argument to, when created\n\t\t\tvar nat Native\n\n\t\t\t// recursively create native instances and corresponding\n\t\t\t// type flags\n\t\t\tnat, flag = newWithTypeInfo(arg)\n\n\t\t\t// append native instance to preallocated slice of\n\t\t\t// natives\n\t\t\tnats = append(nats, nat)\n\n\t\t\t// OR concatenate flag type flags created by previously\n\t\t\t// converted arguments\n\t\t\tflag = flag | nat.Type().Flag()\n\t\t}\n\n\t\t// if flag length is one, all arguments yielded identical type.\n\t\t// return unboxed vector and type pure flag, to indicate all\n\t\t// members type\n\t\tif FlagLength(flag) == 1 {\n\t\t\t// return unboxed vector of natives\n\t\t\treturn conNativeVector(flag, nats...), flag\n\t\t}\n\n\t\t// argument types are mixed, return slice of native instances\n\t\t// and multi typed flag\n\t\treturn NewSlice(nats...), flag\n\t}\n\n\t// a single argument has been passed, assign to temporary value\n\tvar temp = args[0]\n\n\t// switch on temporary values type, convert and assign corresponding\n\t// instance of typed native to return value.\n\tswitch temp.(type) {\n\tcase bool:\n\t\trval = BoolVal(temp.(bool))\n\tcase int, int64:\n\t\trval = IntVal(temp.(int))\n\tcase int8:\n\t\trval = Int8Val(temp.(int8))\n\tcase int16:\n\t\trval = Int16Val(temp.(int16))\n\tcase int32:\n\t\trval = Int32Val(temp.(int32))\n\tcase uint, uint64:\n\t\trval = UintVal(temp.(uint))\n\tcase uint16:\n\t\trval = Uint16Val(temp.(uint16))\n\tcase uint32:\n\t\trval = Int32Val(temp.(int32))\n\tcase float32:\n\t\trval = Flt32Val(temp.(float32))\n\tcase float64:\n\t\trval = FltVal(temp.(float64))\n\tcase complex64:\n\t\trval = ImagVal(temp.(complex64))\n\tcase complex128:\n\t\trval = ImagVal(temp.(complex128))\n\tcase byte:\n\t\trval = ByteVal(temp.(byte))\n\tcase []byte:\n\t\trval = BytesVal(temp.([]byte))\n\tcase string:\n\t\trval = StrVal(temp.(string))\n\tcase error:\n\t\trval = ErrorVal{temp.(error)}\n\tcase time.Time:\n\t\trval = TimeVal(temp.(time.Time))\n\tcase time.Duration:\n\t\trval = DuraVal(temp.(time.Duration))\n\tcase *big.Int:\n\t\tv := BigIntVal(*temp.(*big.Int))\n\t\trval = &v\n\tcase *big.Float:\n\t\tv := BigFltVal(*temp.(*big.Float))\n\t\trval = &v\n\tcase *big.Rat:\n\t\tv := RatioVal(*temp.(*big.Rat))\n\t\trval = &v\n\tcase func(...Native) Native:\n\t\trval = Expression(temp.(func(...Native) Native))\n\tcase TyNat:\n\t\trval = BitFlag(temp.(TyNat))\n\tcase BitFlag:\n\t\trval = TyNat(temp.(BitFlag))\n\tcase Native:\n\t\trval = temp.(Native)\n\tcase []Native:\n\t\trval = DataSlice(temp.([]Native))\n\t}\n\t// return typed native instance and corresponding type flag\n\treturn rval, rval.Type().Flag()\n}", "func example_test_func(a uint64, b, c interface{}, d uint64) {\n\n}", "func packType(v interface{}) byte {\n\tswitch v.(type) {\n\tcase nil:\n\t\treturn ptNone\n\tcase string:\n\t\treturn ptString\n\tcase int32:\n\t\treturn ptInt\n\tcase float32:\n\t\treturn ptFloat\n\tcase uint32:\n\t\treturn ptPtr\n\tcase []uint16:\n\t\treturn ptWString\n\tcase color.NRGBA:\n\t\treturn ptColor\n\tcase uint64:\n\t\treturn ptUint64\n\tdefault:\n\t\tpanic(\"invalid vdf.Node\")\n\t}\n}", "func (rv *ReturnValue) Type() ObjectType {\n\treturn RETURN_VALUE\n}", "func (t *FunctionType) ReturnType() types.Type {\n\tif t.returnType != nil {\n\t\treturn t.returnType\n\t}\n\tif len(t.Out) == 0 {\n\t\tt.returnType = types.PrimitiveTypeVoid\n\t} else if len(t.Out) == 1 {\n\t\tt.returnType = t.Out[0].Type\n\t} else {\n\t\tst := &types.StructType{TypeBase: t.funcType.TypeBase}\n\t\tst.SetName(\"\")\n\t\tfor i, p := range t.Out {\n\t\t\tf := &types.StructField{Name: \"f\" + strconv.Itoa(i), Type: types.RemoveGroup(p.Type)}\n\t\t\tst.Fields = append(st.Fields, f)\n\t\t}\n\t\tt.returnType = st\n\t}\n\treturn t.returnType\n}" ]
[ "0.6031924", "0.57332414", "0.56366175", "0.54564303", "0.54341835", "0.53188676", "0.53181654", "0.52928877", "0.5157182", "0.5127778", "0.5097387", "0.50824916", "0.5081217", "0.5073113", "0.50724816", "0.50678504", "0.50589323", "0.5040073", "0.5036362", "0.49995402", "0.49874395", "0.49774712", "0.49694332", "0.49685264", "0.49558127", "0.49494827", "0.49428594", "0.49272716", "0.49261653", "0.49207255", "0.49185866", "0.4913294", "0.48909327", "0.48860034", "0.48840195", "0.48823276", "0.48822993", "0.48734173", "0.48674694", "0.4861132", "0.4858641", "0.4858641", "0.48571742", "0.4855516", "0.48427936", "0.4840495", "0.4840495", "0.48310634", "0.4823883", "0.48184058", "0.48174", "0.48096833", "0.47929513", "0.47928306", "0.47913858", "0.47860575", "0.47834453", "0.47777513", "0.47744632", "0.47734526", "0.47690588", "0.47652566", "0.47643945", "0.4762802", "0.4761079", "0.47594994", "0.47496122", "0.4741583", "0.47327596", "0.4722295", "0.47168806", "0.47162533", "0.47129348", "0.4710709", "0.47088337", "0.47081953", "0.46942344", "0.46923226", "0.46876553", "0.46812657", "0.46801972", "0.46791667", "0.46787408", "0.46752164", "0.46697372", "0.46651688", "0.46648696", "0.46642542", "0.46623462", "0.46595162", "0.46586922", "0.4655891", "0.46546957", "0.4654257", "0.4654147", "0.4652156", "0.4651996", "0.46500558", "0.46429494", "0.4640588", "0.4635349" ]
0.0
-1
Deprecated: Use Scorecard.ProtoReflect.Descriptor instead.
func (*Scorecard) Descriptor() ([]byte, []int) { return file_google_monitoring_dashboard_v1_scorecard_proto_rawDescGZIP(), []int{0} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*Score) Descriptor() ([]byte, []int) {\n\treturn file_mitre_cvss_v3_cvss_proto_rawDescGZIP(), []int{2}\n}", "func (*BaseScore) Descriptor() ([]byte, []int) {\n\treturn file_mitre_cvss_v3_cvss_proto_rawDescGZIP(), []int{3}\n}", "func (*CMsgPlayerCard) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{38}\n}", "func (*CMsgPlayerConductScorecard) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{168}\n}", "func (*PatchCollectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{161}\n}", "func (*Decl) Descriptor() ([]byte, []int) {\n\treturn file_google_api_expr_v1alpha1_checked_proto_rawDescGZIP(), []int{2}\n}", "func (StandardPTransforms_DeprecatedPrimitives) EnumDescriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{4, 1}\n}", "func (x *fastReflection_LightClientAttackEvidence) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_LightClientAttackEvidence\n}", "func (*AddPeerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*SafetyFeedback) Descriptor() ([]byte, []int) {\n\treturn file_google_ai_generativelanguage_v1beta2_safety_proto_rawDescGZIP(), []int{1}\n}", "func (*Modifier) Descriptor() ([]byte, []int) {\n\treturn file_FillerGame_proto_rawDescGZIP(), []int{6}\n}", "func (*CMsgPlayerConductScorecardRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{167}\n}", "func ProtoFromDescriptor(d protoreflect.Descriptor) proto.Message {\n\tswitch d := d.(type) {\n\tcase protoreflect.FileDescriptor:\n\t\treturn ProtoFromFileDescriptor(d)\n\tcase protoreflect.MessageDescriptor:\n\t\treturn ProtoFromMessageDescriptor(d)\n\tcase protoreflect.FieldDescriptor:\n\t\treturn ProtoFromFieldDescriptor(d)\n\tcase protoreflect.OneofDescriptor:\n\t\treturn ProtoFromOneofDescriptor(d)\n\tcase protoreflect.EnumDescriptor:\n\t\treturn ProtoFromEnumDescriptor(d)\n\tcase protoreflect.EnumValueDescriptor:\n\t\treturn ProtoFromEnumValueDescriptor(d)\n\tcase protoreflect.ServiceDescriptor:\n\t\treturn ProtoFromServiceDescriptor(d)\n\tcase protoreflect.MethodDescriptor:\n\t\treturn ProtoFromMethodDescriptor(d)\n\tdefault:\n\t\t// WTF??\n\t\tif res, ok := d.(DescriptorProtoWrapper); ok {\n\t\t\treturn res.AsProto()\n\t\t}\n\t\treturn nil\n\t}\n}", "func (*AddPeerResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{30}\n}", "func (*CMsgPlayerCard_StatModifier) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{38, 0}\n}", "func (*FeedbackMetrics) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{12}\n}", "func (x *fastReflection_Params) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_Params\n}", "func (*TokenProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{0}\n}", "func (*ValidatorUpdate) Descriptor() ([]byte, []int) {\n\treturn file_tm_replay_proto_rawDescGZIP(), []int{9}\n}", "func (*Deprecation) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{8}\n}", "func (*TraceProto) Descriptor() ([]byte, []int) {\n\treturn file_internal_tracing_extended_extended_trace_proto_rawDescGZIP(), []int{0}\n}", "func (x *fastReflection_Evidence) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_Evidence\n}", "func (x *fastReflection_Supply) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_Supply\n}", "func (*StandardProtocols) Descriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{54}\n}", "func (x *fastReflection_Metadata) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_Metadata\n}", "func (x *fastReflection_FlagOptions) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_FlagOptions\n}", "func (*CMsgCustomGameWhitelistForEdit) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{50}\n}", "func (*GetPeerInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{6}\n}", "func (x *fastReflection_MsgUpdateParams) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_MsgUpdateParams\n}", "func (*OnTargetAudienceMetrics) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{19}\n}", "func (AttackComplexity) EnumDescriptor() ([]byte, []int) {\n\treturn file_mitre_cvss_v3_cvss_proto_rawDescGZIP(), []int{1}\n}", "func (*CMsgCustomGameWhitelist) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{49}\n}", "func (*CMsgClientToGCRecyclePlayerCard) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{214}\n}", "func (*Game) Descriptor() ([]byte, []int) {\n\treturn file_public_rules_rules_proto_rawDescGZIP(), []int{5}\n}", "func (*Instant) Descriptor() ([]byte, []int) {\n\treturn file_proto_google_fhir_proto_stu3_datatypes_proto_rawDescGZIP(), []int{7}\n}", "func (*Instance) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{28}\n}", "func (*LeaderInfoProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{39}\n}", "func (*Leaderboard) Descriptor() ([]byte, []int) {\n\treturn file_api_proto_rawDescGZIP(), []int{49}\n}", "func (*CSVCMsg_HltvReplay) Descriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{51}\n}", "func (*ValidatorUpdates) Descriptor() ([]byte, []int) {\n\treturn file_core_abci_v1alpha1_abci_proto_rawDescGZIP(), []int{6}\n}", "func (*CodeLens) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{164}\n}", "func (*Example) Descriptor() ([]byte, []int) {\n\treturn file_google_ai_generativelanguage_v1beta2_discuss_service_proto_rawDescGZIP(), []int{4}\n}", "func (*CMsgClientToGCGetProfileCardStats) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{139}\n}", "func (*Message3920) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{17}\n}", "func (*NetProtoTalker) Descriptor() ([]byte, []int) {\n\treturn file_pkg_smgrpc_smgrpc_proto_rawDescGZIP(), []int{1}\n}", "func (*PrivateApiCF) Descriptor() ([]byte, []int) {\n\treturn file_pkg_kascfg_kascfg_proto_rawDescGZIP(), []int{24}\n}", "func (*Description) Descriptor() ([]byte, []int) {\n\treturn file_Harmony_proto_rawDescGZIP(), []int{4}\n}", "func (x *fastReflection_ServiceCommandDescriptor) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_ServiceCommandDescriptor\n}", "func (*Decl_FunctionDecl_Overload) Descriptor() ([]byte, []int) {\n\treturn file_google_api_expr_v1alpha1_checked_proto_rawDescGZIP(), []int{2, 1, 0}\n}", "func (*Message5903) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{34}\n}", "func (*CMsgClientToGCGetProfileCard) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{137}\n}", "func (*EACLRecord) Descriptor() ([]byte, []int) {\n\treturn file_v2_acl_grpc_types_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgFlipLobbyTeams) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{90}\n}", "func (*CSVCMsg_GameEventListDescriptorT) Descriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{44, 1}\n}", "func (StandardProtocols_Enum) EnumDescriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{54, 0}\n}", "func (*CMsgInspectElement) Descriptor() ([]byte, []int) {\n\treturn file_steam_htmlmessages_proto_rawDescGZIP(), []int{145}\n}", "func (*Message6024) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{26}\n}", "func (*Draw) Descriptor() ([]byte, []int) {\n\treturn file_public_rules_rules_proto_rawDescGZIP(), []int{2}\n}", "func (*Message7920) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{20}\n}", "func (*Card) Descriptor() ([]byte, []int) {\n\treturn file_proto_stripe_proto_rawDescGZIP(), []int{4}\n}", "func (*CMsgDOTAProfileCard) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{22}\n}", "func (*CMsgDOTARealtimeGameStats_PlayerDetails) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{27, 5}\n}", "func (*CCLCMsg_HltvReplay) Descriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{52}\n}", "func (*CSVCMsg_HltvReplay) Descriptor() ([]byte, []int) {\n\treturn file_netmessages_proto_rawDescGZIP(), []int{51}\n}", "func (*CSVCMsg_GameEventListDescriptorT) Descriptor() ([]byte, []int) {\n\treturn file_netmessages_proto_rawDescGZIP(), []int{44, 1}\n}", "func (*CMatchTeamTimedStats) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{10}\n}", "func (*LeaderboardRecord) Descriptor() ([]byte, []int) {\n\treturn file_api_proto_rawDescGZIP(), []int{51}\n}", "func (*Team) Descriptor() ([]byte, []int) {\n\treturn file_teams_v1_teams_proto_rawDescGZIP(), []int{12}\n}", "func (*MyProto) Descriptor() ([]byte, []int) {\n\treturn file_my_proto_proto_rawDescGZIP(), []int{0}\n}", "func (*CMatchClip) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{14}\n}", "func (x *fastReflection_ModuleOptions) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_ModuleOptions\n}", "func (*GetCollectorRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{163}\n}", "func (*Embed) Descriptor() ([]byte, []int) {\n\treturn file_chat_v1_messages_proto_rawDescGZIP(), []int{2}\n}", "func (*Message) Descriptor() ([]byte, []int) {\n\treturn file_google_ai_generativelanguage_v1beta2_discuss_service_proto_rawDescGZIP(), []int{2}\n}", "func (*Correctness) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{13}\n}", "func (*Message5908) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{13}\n}", "func (*PlanChange_Removed) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0, 3}\n}", "func (*PlayerPoint) Descriptor() ([]byte, []int) {\n\treturn file_msgdata_proto_rawDescGZIP(), []int{18}\n}", "func (*CMatchPlayerTimedStats) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{9}\n}", "func (*ListCollectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{164}\n}", "func (*CandidateInfoProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{41}\n}", "func (*CMsgClientToGCPlayerStatsRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{143}\n}", "func (*Message12818) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{5}\n}", "func (*RoleInfoProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{42}\n}", "func (*PrivateVisibility) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_dns_v1_dns_zone_proto_rawDescGZIP(), []int{2}\n}", "func (*Message6108) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{31}\n}", "func (x *fastReflection_RpcCommandOptions) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_RpcCommandOptions\n}", "func (*GetPeerInfoResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{28}\n}", "func ProtoFromMethodDescriptor(d protoreflect.MethodDescriptor) *descriptorpb.MethodDescriptorProto {\n\ttype canProto interface {\n\t\tMethodDescriptorProto() *descriptorpb.MethodDescriptorProto\n\t}\n\tif res, ok := d.(canProto); ok {\n\t\treturn res.MethodDescriptorProto()\n\t}\n\tif res, ok := d.(DescriptorProtoWrapper); ok {\n\t\tif md, ok := res.AsProto().(*descriptorpb.MethodDescriptorProto); ok {\n\t\t\treturn md\n\t\t}\n\t}\n\treturn protodesc.ToMethodDescriptorProto(d)\n}", "func (*Player) Descriptor() ([]byte, []int) {\n\treturn file_messages_proto_rawDescGZIP(), []int{6}\n}", "func (*Reference) Descriptor() ([]byte, []int) {\n\treturn file_google_api_expr_v1alpha1_checked_proto_rawDescGZIP(), []int{3}\n}", "func (x *fastReflection_MsgUpdateParamsResponse) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_MsgUpdateParamsResponse\n}", "func (*Validator) Descriptor() ([]byte, []int) {\n\treturn file_tm_replay_proto_rawDescGZIP(), []int{13}\n}", "func (x *fastReflection_Input) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_Input\n}", "func (*UpdateTeam) Descriptor() ([]byte, []int) {\n\treturn file_teams_v1_teams_proto_rawDescGZIP(), []int{6}\n}", "func (CVSSv3_Impact) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_devtools_containeranalysis_v1beta1_cvss_cvss_proto_rawDescGZIP(), []int{0, 5}\n}", "func (*CMsgClientToGCVoteForLeagueGameMVP) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{181}\n}", "func (*Message7511) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{16}\n}", "func (*PolicyRule) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_skv2_api_multicluster_v1alpha1_cluster_proto_rawDescGZIP(), []int{2}\n}", "func (*Message12817) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{22}\n}" ]
[ "0.700281", "0.6879392", "0.68285745", "0.6728023", "0.66748387", "0.6673123", "0.6658392", "0.6644106", "0.66355675", "0.66153055", "0.66022944", "0.66015166", "0.65768534", "0.6576063", "0.65572953", "0.6551013", "0.65416706", "0.6523413", "0.6518851", "0.651338", "0.6506201", "0.6505877", "0.6505526", "0.6494367", "0.6485224", "0.64839387", "0.6471758", "0.6467003", "0.64669275", "0.6465438", "0.6459129", "0.6453696", "0.64466804", "0.64353865", "0.6432846", "0.6432553", "0.64294493", "0.64293236", "0.6428831", "0.6424936", "0.64244473", "0.6423159", "0.64169365", "0.6413482", "0.6412987", "0.6410625", "0.64102036", "0.640894", "0.6406266", "0.64036953", "0.64028776", "0.640216", "0.6399701", "0.6396498", "0.639573", "0.6395544", "0.6394525", "0.63883597", "0.6385302", "0.6384902", "0.63845325", "0.63831234", "0.6381341", "0.63783234", "0.63782936", "0.63754934", "0.63738054", "0.63684064", "0.6367496", "0.6364238", "0.6363453", "0.6361802", "0.6357843", "0.63552064", "0.6355072", "0.6355033", "0.63545233", "0.63534653", "0.6350837", "0.6350258", "0.6349757", "0.6347458", "0.6347246", "0.6346134", "0.63445216", "0.63385284", "0.63375926", "0.63373774", "0.6334668", "0.6331724", "0.6329643", "0.6328221", "0.6326576", "0.63246226", "0.63223726", "0.63197297", "0.6318145", "0.63177913", "0.63177454", "0.63160974" ]
0.72033995
0
Deprecated: Use Scorecard_GaugeView.ProtoReflect.Descriptor instead.
func (*Scorecard_GaugeView) Descriptor() ([]byte, []int) { return file_google_monitoring_dashboard_v1_scorecard_proto_rawDescGZIP(), []int{0, 0} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*ClrGC) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{2}\n}", "func (*KafkaGauge) Descriptor() ([]byte, []int) {\n\treturn file_pkg_sinks_plugin_proto_metrics_proto_rawDescGZIP(), []int{1}\n}", "func (*CLRMetric) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{1}\n}", "func (*CLRMetricCollection) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{0}\n}", "func (*Metrics) Descriptor() ([]byte, []int) {\n\treturn file_grpc_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgClientToGCPlayerStatsRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{143}\n}", "func (*Deprecation) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{8}\n}", "func (*Metric) Descriptor() ([]byte, []int) {\n\treturn file_go_chromium_org_luci_analysis_proto_v1_metrics_proto_rawDescGZIP(), []int{2}\n}", "func (*FeedbackMetrics) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{12}\n}", "func (*TargetMetrics_Metric) Descriptor() ([]byte, []int) {\n\treturn file_asgt_type_target_metrics_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*OnTargetAudienceMetrics) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{19}\n}", "func (*CMsgClientToGCGiveTip) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{239}\n}", "func (*BaseMetrics) Descriptor() ([]byte, []int) {\n\treturn file_mitre_cvss_v3_cvss_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgClientToGCVoteForLeagueGameMVP) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{181}\n}", "func (*CMsgGCPlayerInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{117}\n}", "func (*GC) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_JVMMetric_proto_rawDescGZIP(), []int{4}\n}", "func (*KafkaMeter) Descriptor() ([]byte, []int) {\n\treturn file_pkg_sinks_plugin_proto_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgDOTARealtimeGameStats) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{27}\n}", "func (*RefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{16}\n}", "func (*TargetMetrics) Descriptor() ([]byte, []int) {\n\treturn file_asgt_type_target_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgMapStatsSnapshot) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{90}\n}", "func (*MetricInfo) Descriptor() ([]byte, []int) {\n\treturn file_msgType_proto_rawDescGZIP(), []int{15}\n}", "func (*NumericValue) Descriptor() ([]byte, []int) {\n\treturn file_google_analytics_admin_v1alpha_access_report_proto_rawDescGZIP(), []int{10}\n}", "func (*PostTrendingMetricsViewRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{171}\n}", "func (*Metric) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{0}\n}", "func (*RateLimitingSampler) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_external_envoy_config_trace_v3_opencensus_proto_rawDescGZIP(), []int{4}\n}", "func (*ClrThread) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{3}\n}", "func (*Message7920) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{20}\n}", "func (*CMsgGlobalMapStats) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{91}\n}", "func (*CMsgClientToGCVoteForMVP) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{182}\n}", "func (*Message12818) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{5}\n}", "func (*NetRateTalker) Descriptor() ([]byte, []int) {\n\treturn file_pkg_smgrpc_smgrpc_proto_rawDescGZIP(), []int{2}\n}", "func (*GPULabelRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{10}\n}", "func (*CMatchTeamTimedStats) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{10}\n}", "func (*MetricImplementation) Descriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{6}\n}", "func (*CMsgClientToGCTeammateStatsRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{187}\n}", "func (*PlanChange_Added) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*ListTrendingMetricsViewsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{172}\n}", "func (*Message6024) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{26}\n}", "func (Metric_Scale) EnumDescriptor() ([]byte, []int) {\n\treturn file_api_assessment_metric_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*MetricData) Descriptor() ([]byte, []int) {\n\treturn file_alameda_api_v1alpha1_datahub_common_metrics_proto_rawDescGZIP(), []int{1}\n}", "func (*GetMetricsInfoResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{44}\n}", "func (*GoogleProfilerCF) Descriptor() ([]byte, []int) {\n\treturn file_pkg_kascfg_kascfg_proto_rawDescGZIP(), []int{12}\n}", "func (*CMsgDOTALiveLeagueGameUpdate) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{104}\n}", "func (*Metric) Descriptor() ([]byte, []int) {\n\treturn file_tlogpb_tlog_proto_rawDescGZIP(), []int{4}\n}", "func (*TrialProfilerMetricLabels) Descriptor() ([]byte, []int) {\n\treturn file_determined_trial_v1_trial_proto_rawDescGZIP(), []int{3}\n}", "func (*MetricsServing) Descriptor() ([]byte, []int) {\n\treturn file_pkg_webview_view_proto_rawDescGZIP(), []int{9}\n}", "func (*Message3920) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{17}\n}", "func (x *fastReflection_FlagOptions) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_FlagOptions\n}", "func (*Message6108) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{31}\n}", "func (*TelemetryParams) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{62}\n}", "func (VersionView) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_appengine_v1_appengine_proto_rawDescGZIP(), []int{0}\n}", "func (StatusMessage_Reference) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_devtools_clouddebugger_v2_data_proto_rawDescGZIP(), []int{1, 0}\n}", "func (*GetCollectorRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{163}\n}", "func (*CMsgClientToGCGiveTipResponse) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{240}\n}", "func (*Message5908) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{13}\n}", "func (*ClientStatsPayload) Descriptor() ([]byte, []int) {\n\treturn file_datadog_trace_stats_proto_rawDescGZIP(), []int{1}\n}", "func (*PostModelVersionMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{93}\n}", "func (*GetModelVersionMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{94}\n}", "func (*Preferences) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{8}\n}", "func (*BackfilledMetrics) Descriptor() ([]byte, []int) {\n\treturn file_BackfillMetrics_proto_rawDescGZIP(), []int{0}\n}", "func (StandardPTransforms_DeprecatedPrimitives) EnumDescriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{4, 1}\n}", "func (*AddPeerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*CMsgProfileUpdate) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{277}\n}", "func (*ListMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_go_chromium_org_luci_analysis_proto_v1_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgClientToGCGetProfileCardStats) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{139}\n}", "func (*Message7928) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{18}\n}", "func (*CMsgGCNotificationsRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{113}\n}", "func (*MaximizeConversions) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v14_common_bidding_proto_rawDescGZIP(), []int{6}\n}", "func (*Message6054) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{23}\n}", "func (*CMsgDOTABotDebugInfo) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{41}\n}", "func (*CMsgGameDataSpecialValueBonus) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{68}\n}", "func (*Message7921) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{19}\n}", "func (*MetricsServiceRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{18}\n}", "func (*BackfilledMetrics_Data) Descriptor() ([]byte, []int) {\n\treturn file_BackfillMetrics_proto_rawDescGZIP(), []int{0, 1}\n}", "func (*CMsgCustomGameWhitelistForEdit) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{50}\n}", "func (*CSOEconItemDropRateBonus) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{78}\n}", "func (*CMatchPlayerTimedStats) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{9}\n}", "func (*ClientGroupedStats) Descriptor() ([]byte, []int) {\n\treturn file_datadog_trace_stats_proto_rawDescGZIP(), []int{3}\n}", "func (*JVMMetricCollection) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_JVMMetric_proto_rawDescGZIP(), []int{0}\n}", "func (*Message12817) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{22}\n}", "func (*Message7511) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{16}\n}", "func (*MetricsReport) Descriptor() ([]byte, []int) {\n\treturn file_determined_trial_v1_trial_proto_rawDescGZIP(), []int{9}\n}", "func (*Telemetry) Descriptor() ([]byte, []int) {\n\treturn file_huawei_telemetry_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgDOTARealtimeGameStats_GraphData) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{27, 11}\n}", "func (*NetProtoTalker) Descriptor() ([]byte, []int) {\n\treturn file_pkg_smgrpc_smgrpc_proto_rawDescGZIP(), []int{1}\n}", "func (*TraceProto) Descriptor() ([]byte, []int) {\n\treturn file_internal_tracing_extended_extended_trace_proto_rawDescGZIP(), []int{0}\n}", "func (*DiagOperation) Descriptor() ([]byte, []int) {\n\treturn file_testvector_tv_proto_rawDescGZIP(), []int{10}\n}", "func (*PatchCollectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{161}\n}", "func (*CMsgDOTABotDebugInfo_Bot) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{41, 0}\n}", "func (*CMsgDOTARealtimeGameStatsTerse) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{28}\n}", "func (*MultiTrendingMetricsViewResponse) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{173}\n}", "func (*MaximizeConversionValue) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v14_common_bidding_proto_rawDescGZIP(), []int{7}\n}", "func (*CMsgConsumableUsage) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{46}\n}", "func (*CMsgDOTAClientToGCQuickStatsRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{251}\n}", "func (*Message6578) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{7}\n}", "func (*MetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{11}\n}", "func (*CSVCMsg_GameEventListDescriptorT) Descriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{44, 1}\n}", "func (*Degree) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_talent_v4beta1_profile_proto_rawDescGZIP(), []int{11}\n}", "func (*RefreshResponse) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{17}\n}" ]
[ "0.6824529", "0.68163484", "0.67460763", "0.66216606", "0.65765965", "0.65765387", "0.6563826", "0.65418273", "0.6529831", "0.65259284", "0.65070504", "0.6475942", "0.6463522", "0.6460196", "0.6457537", "0.6445932", "0.6419697", "0.6413116", "0.6407112", "0.63850945", "0.63491607", "0.63464916", "0.6337833", "0.63311464", "0.63238573", "0.6320074", "0.6318079", "0.6315722", "0.63131756", "0.63122004", "0.6310013", "0.6302924", "0.629308", "0.62917036", "0.6284138", "0.6281044", "0.6279408", "0.62707406", "0.6269557", "0.6266669", "0.62636006", "0.62608784", "0.6255406", "0.6251367", "0.6251156", "0.62481964", "0.62480706", "0.6243464", "0.624083", "0.6234853", "0.6218241", "0.62159765", "0.6214936", "0.62145895", "0.6213625", "0.6209406", "0.619898", "0.6198606", "0.6195309", "0.61946297", "0.6193919", "0.61934304", "0.618907", "0.6187731", "0.6186804", "0.6185477", "0.6185024", "0.6183286", "0.61825806", "0.6174484", "0.6168433", "0.61678296", "0.61656106", "0.61643136", "0.61632603", "0.61610913", "0.61609954", "0.6160161", "0.6159802", "0.61588025", "0.6158729", "0.6157504", "0.6154262", "0.6153343", "0.615176", "0.61477655", "0.61467457", "0.6146026", "0.614566", "0.6144133", "0.61411685", "0.6140872", "0.6138836", "0.6135872", "0.6135528", "0.6131107", "0.6128428", "0.6126325", "0.61255145", "0.61235535" ]
0.77736974
0
Deprecated: Use Scorecard_SparkChartView.ProtoReflect.Descriptor instead.
func (*Scorecard_SparkChartView) Descriptor() ([]byte, []int) { return file_google_monitoring_dashboard_v1_scorecard_proto_rawDescGZIP(), []int{0, 1} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*FeedbackMetrics) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{12}\n}", "func (*Scorecard_GaugeView) Descriptor() ([]byte, []int) {\n\treturn file_google_monitoring_dashboard_v1_scorecard_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*Chart) Descriptor() ([]byte, []int) {\n\treturn file_helm_api_proto_rawDescGZIP(), []int{20}\n}", "func (*ChartInfo) Descriptor() ([]byte, []int) {\n\treturn file_helm_api_proto_rawDescGZIP(), []int{24}\n}", "func (*ChartOptions) Descriptor() ([]byte, []int) {\n\treturn file_google_monitoring_dashboard_v1_xychart_proto_rawDescGZIP(), []int{1}\n}", "func (*PostModelVersionMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{93}\n}", "func (*ListTrendingMetricsViewsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{172}\n}", "func (*GetModelVersionMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{94}\n}", "func (*XyChart) Descriptor() ([]byte, []int) {\n\treturn file_google_monitoring_dashboard_v1_xychart_proto_rawDescGZIP(), []int{0}\n}", "func (*Chart) Descriptor() ([]byte, []int) {\n\treturn file_svr_chart_v1_chart_db_proto_rawDescGZIP(), []int{0}\n}", "func (*OnTargetAudienceMetrics) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{19}\n}", "func (*ReadTensorboardUsageResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{7}\n}", "func (*Deprecation) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{8}\n}", "func (*ReadTensorboardTimeSeriesDataResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{37}\n}", "func (*Scorecard) Descriptor() ([]byte, []int) {\n\treturn file_google_monitoring_dashboard_v1_scorecard_proto_rawDescGZIP(), []int{0}\n}", "func (*PostTrendingMetricsViewRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{171}\n}", "func (*Score) Descriptor() ([]byte, []int) {\n\treturn file_mitre_cvss_v3_cvss_proto_rawDescGZIP(), []int{2}\n}", "func (*SafetyFeedback) Descriptor() ([]byte, []int) {\n\treturn file_google_ai_generativelanguage_v1beta2_safety_proto_rawDescGZIP(), []int{1}\n}", "func (*ReadTensorboardUsageRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{6}\n}", "func (x *fastReflection_LightClientAttackEvidence) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_LightClientAttackEvidence\n}", "func (*ReadTensorboardTimeSeriesDataRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{36}\n}", "func (*QueryPlanStatusResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{25}\n}", "func (*FabricView) Descriptor() ([]byte, []int) {\n\treturn file_fabric_view_data_proto_rawDescGZIP(), []int{0}\n}", "func (*Message5908) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{13}\n}", "func (*MetricsServiceRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{18}\n}", "func (*CheckerChartV1) Descriptor() ([]byte, []int) {\n\treturn file_checker_v1_proto_rawDescGZIP(), []int{20}\n}", "func (*UpdateTensorboardTimeSeriesRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{32}\n}", "func (*BaseMetrics) Descriptor() ([]byte, []int) {\n\treturn file_mitre_cvss_v3_cvss_proto_rawDescGZIP(), []int{0}\n}", "func (*BatchReadTensorboardTimeSeriesDataResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{35}\n}", "func (*ExportTensorboardTimeSeriesDataResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{43}\n}", "func (*GetChartReq) Descriptor() ([]byte, []int) {\n\treturn file_helm_api_proto_rawDescGZIP(), []int{9}\n}", "func (*DeleteTensorboardTimeSeriesRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{33}\n}", "func (*TraceProto) Descriptor() ([]byte, []int) {\n\treturn file_internal_tracing_extended_extended_trace_proto_rawDescGZIP(), []int{0}\n}", "func (x *fastReflection_FlagOptions) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_FlagOptions\n}", "func (*Message12818) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{5}\n}", "func (*VodPlayInfoModel) Descriptor() ([]byte, []int) {\n\treturn file_vod_business_vod_play_proto_rawDescGZIP(), []int{0}\n}", "func (*Message3920) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{17}\n}", "func (*BaseScore) Descriptor() ([]byte, []int) {\n\treturn file_mitre_cvss_v3_cvss_proto_rawDescGZIP(), []int{3}\n}", "func (*DiagOperation) Descriptor() ([]byte, []int) {\n\treturn file_testvector_tv_proto_rawDescGZIP(), []int{10}\n}", "func (*ListChartReq) Descriptor() ([]byte, []int) {\n\treturn file_helm_api_proto_rawDescGZIP(), []int{27}\n}", "func (*GetTensorboardTimeSeriesRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{29}\n}", "func (*Message12796) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{1}\n}", "func (*Message6108) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{31}\n}", "func (*UpdateTensorboardRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{4}\n}", "func (*AnalysisMessageWeakSchema) Descriptor() ([]byte, []int) {\n\treturn file_analysis_v1alpha1_message_proto_rawDescGZIP(), []int{1}\n}", "func (*MultiTrendingMetricsViewResponse) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{173}\n}", "func (x *fastReflection_Evidence) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_Evidence\n}", "func (*Message7928) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{18}\n}", "func (*Message6052) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{25}\n}", "func (*LabelledPayload) Descriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{59}\n}", "func (*ListTensorboardTimeSeriesResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{31}\n}", "func (*Message5903) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{34}\n}", "func (*Message7511) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{16}\n}", "func (*GetTensorboardRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{1}\n}", "func (*MetricsServing) Descriptor() ([]byte, []int) {\n\treturn file_pkg_webview_view_proto_rawDescGZIP(), []int{9}\n}", "func (*Message7920) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{20}\n}", "func (*Message12817) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{22}\n}", "func (*XyChart_DataSet) Descriptor() ([]byte, []int) {\n\treturn file_google_monitoring_dashboard_v1_xychart_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*Message10319) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{6}\n}", "func (*Draw) Descriptor() ([]byte, []int) {\n\treturn file_public_rules_rules_proto_rawDescGZIP(), []int{2}\n}", "func (*Message7921) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{19}\n}", "func (*GetMetricsInfoResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{44}\n}", "func (*StandardProtocols) Descriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{54}\n}", "func (*Message6024) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{26}\n}", "func (*Forecast) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{18}\n}", "func (*GenerateProductMixIdeasRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{7}\n}", "func (*BatchReadTensorboardTimeSeriesDataRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{34}\n}", "func (*ListTensorboardTimeSeriesRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{30}\n}", "func (*GetBalancerBandwidthResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{19}\n}", "func (*FeedbackRequest) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{10}\n}", "func (*Message5907) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{32}\n}", "func (*CSVCMsg_PaintmapData) Descriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{42}\n}", "func (*CSVCMsg_SetView) Descriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{29}\n}", "func (*MetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{11}\n}", "func (*QueryDatasetComponentDiff) Descriptor() ([]byte, []int) {\n\treturn file_modeldb_versioning_Dataset_proto_rawDescGZIP(), []int{8}\n}", "func (*PlanChange_Removed) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0, 3}\n}", "func (*BatchCreateTensorboardTimeSeriesResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{27}\n}", "func (*GetDatanodeInfoResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{13}\n}", "func (*GenerateProductMixIdeasResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{9}\n}", "func (*Preferences) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{8}\n}", "func (*CLRMetric) Descriptor() ([]byte, []int) {\n\treturn file_language_agent_CLRMetric_proto_rawDescGZIP(), []int{1}\n}", "func (*CodeLens) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{164}\n}", "func (*Message3850) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{14}\n}", "func (*ListMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_go_chromium_org_luci_analysis_proto_v1_metrics_proto_rawDescGZIP(), []int{0}\n}", "func (*SubmitDiskBalancerPlanResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{21}\n}", "func (*CMsgHideToolTip) Descriptor() ([]byte, []int) {\n\treturn file_steam_htmlmessages_proto_rawDescGZIP(), []int{64}\n}", "func (*VectorClock) Descriptor() ([]byte, []int) {\n\treturn file_pkg_proto_l3_proto_rawDescGZIP(), []int{3}\n}", "func (*Example) Descriptor() ([]byte, []int) {\n\treturn file_google_ai_generativelanguage_v1beta2_discuss_service_proto_rawDescGZIP(), []int{4}\n}", "func (*ConsumableTrafficPolyline) Descriptor() ([]byte, []int) {\n\treturn file_google_maps_fleetengine_v1_traffic_proto_rawDescGZIP(), []int{1}\n}", "func (*Tensorboard) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_proto_rawDescGZIP(), []int{0}\n}", "func (*CSVCMsg_PaintmapData) Descriptor() ([]byte, []int) {\n\treturn file_netmessages_proto_rawDescGZIP(), []int{42}\n}", "func (*ListMetricsResponse) Descriptor() ([]byte, []int) {\n\treturn file_go_chromium_org_luci_analysis_proto_v1_metrics_proto_rawDescGZIP(), []int{1}\n}", "func (*DisplayData) Descriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{60}\n}", "func (*ExportTensorboardTimeSeriesDataRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{42}\n}", "func (*Module) Descriptor() ([]byte, []int) {\n\treturn file_google_devtools_cloudtrace_v2_trace_proto_rawDescGZIP(), []int{3}\n}", "func (*StandardCoders) Descriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{29}\n}", "func (*CancelPlanResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{23}\n}", "func (*DatasetDiff) Descriptor() ([]byte, []int) {\n\treturn file_modeldb_versioning_Dataset_proto_rawDescGZIP(), []int{7}\n}", "func (*Message7919) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{21}\n}", "func (*ReadTensorboardUsageResponse_PerUserUsageData) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_tensorboard_service_proto_rawDescGZIP(), []int{7, 0}\n}" ]
[ "0.65494704", "0.6429943", "0.63701975", "0.6346591", "0.6337553", "0.62929755", "0.6288999", "0.62860525", "0.6271876", "0.6259072", "0.6255981", "0.6247951", "0.6228296", "0.6202115", "0.6201875", "0.6154162", "0.61452585", "0.6145207", "0.61450595", "0.61404777", "0.61375725", "0.61119765", "0.6111276", "0.6103721", "0.6097426", "0.60960966", "0.60947895", "0.60924107", "0.6092196", "0.60900444", "0.60889894", "0.6084264", "0.6081079", "0.6079819", "0.6076615", "0.6064916", "0.60608596", "0.60604376", "0.6055613", "0.6052481", "0.6050425", "0.6049879", "0.60419905", "0.60413665", "0.6038328", "0.60309374", "0.6029024", "0.6027254", "0.60258645", "0.60242593", "0.60228074", "0.6016516", "0.6015802", "0.60132575", "0.6011468", "0.60099083", "0.60091114", "0.6008845", "0.60075396", "0.60066", "0.60019684", "0.6000723", "0.599948", "0.5997407", "0.5993053", "0.5990179", "0.59901476", "0.59872574", "0.5986467", "0.59806556", "0.59724504", "0.5971712", "0.5970583", "0.59646773", "0.59562427", "0.5952351", "0.59511304", "0.5950511", "0.5950151", "0.59488773", "0.59461516", "0.5944357", "0.594431", "0.5942905", "0.5941601", "0.59404707", "0.59392697", "0.5937463", "0.593651", "0.5936062", "0.59337306", "0.59289783", "0.59282625", "0.5927599", "0.5923738", "0.5920761", "0.5918468", "0.59171265", "0.59131384", "0.590532" ]
0.7641592
0
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be nonnil.
func (in *AESConfiguration) DeepCopyInto(out *AESConfiguration) { *out = *in if in.Keys != nil { in, out := &in.Keys, &out.Keys *out = make([]Key, len(*in)) copy(*out, *in) } return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (r *RunInfo) DeepCopyInto(out *RunInfo) {\n\t*out = *r\n}", "func (in *Base) DeepCopyInto(out *Base) {\n\t*out = *in\n\treturn\n}", "func (in *ForkObject) DeepCopyInto(out *ForkObject) {\n\t*out = *in\n}", "func (in *TargetObjectInfo) DeepCopyInto(out *TargetObjectInfo) {\n\t*out = *in\n}", "func (in *DebugObjectInfo) DeepCopyInto(out *DebugObjectInfo) {\n\t*out = *in\n}", "func (in *Input) DeepCopyInto(out *Input) {\n\t*out = *in\n}", "func (u *SSN) DeepCopyInto(out *SSN) {\n\t*out = *u\n}", "func (in *ExistPvc) DeepCopyInto(out *ExistPvc) {\n\t*out = *in\n}", "func (in *DockerStep) DeepCopyInto(out *DockerStep) {\n\t*out = *in\n\tif in.Inline != nil {\n\t\tin, out := &in.Inline, &out.Inline\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tout.Auth = in.Auth\n\treturn\n}", "func (in *Container) DeepCopyInto(out *Container) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.Command != nil {\n\t\tin, out := &in.Command, &out.Command\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.LifeCycleScript != nil {\n\t\tin, out := &in.LifeCycleScript, &out.LifeCycleScript\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *RuntimeRef) DeepCopyInto(out *RuntimeRef) {\n\t*out = *in\n}", "func (in *Ibft2) DeepCopyInto(out *Ibft2) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *TestResult) DeepCopyInto(out *TestResult) {\n\t*out = *in\n}", "func (in *Haproxy) DeepCopyInto(out *Haproxy) {\n\t*out = *in\n\treturn\n}", "func (in *SSH) DeepCopyInto(out *SSH) {\n\t*out = *in\n\treturn\n}", "func (in *Runtime) DeepCopyInto(out *Runtime) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (b *Base64) DeepCopyInto(out *Base64) {\n\t*out = *b\n}", "func (in *EventDependencyTransformer) DeepCopyInto(out *EventDependencyTransformer) {\n\t*out = *in\n\treturn\n}", "func (in *StageOutput) DeepCopyInto(out *StageOutput) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Dependent) DeepCopyInto(out *Dependent) {\n\t*out = *in\n\treturn\n}", "func (in *GitFileGeneratorItem) DeepCopyInto(out *GitFileGeneratorItem) {\n\t*out = *in\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *AnsibleStep) DeepCopyInto(out *AnsibleStep) {\n\t*out = *in\n\treturn\n}", "func (in *Forks) DeepCopyInto(out *Forks) {\n\t*out = *in\n\tif in.DAO != nil {\n\t\tin, out := &in.DAO, &out.DAO\n\t\t*out = new(uint)\n\t\t**out = **in\n\t}\n}", "func (in *ContainerPort) DeepCopyInto(out *ContainerPort) {\n\t*out = *in\n}", "func (in *General) DeepCopyInto(out *General) {\n\t*out = *in\n\treturn\n}", "func (in *IsoContainer) DeepCopyInto(out *IsoContainer) {\n\t*out = *in\n}", "func (in *Git) DeepCopyInto(out *Git) {\n\t*out = *in\n\treturn\n}", "func (in *ConfigFile) DeepCopyInto(out *ConfigFile) {\n\t*out = *in\n}", "func (in *BackupProgress) DeepCopyInto(out *BackupProgress) {\n\t*out = *in\n}", "func (in *DataDisk) DeepCopyInto(out *DataDisk) {\n\t*out = *in\n}", "func (u *MAC) DeepCopyInto(out *MAC) {\n\t*out = *u\n}", "func (in *PhaseStep) DeepCopyInto(out *PhaseStep) {\n\t*out = *in\n}", "func (in *Variable) DeepCopyInto(out *Variable) {\n\t*out = *in\n}", "func (in *RestoreProgress) DeepCopyInto(out *RestoreProgress) {\n\t*out = *in\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Path) DeepCopyInto(out *Path) {\n\t*out = *in\n\treturn\n}", "func (in *NamespacedObjectReference) DeepCopyInto(out *NamespacedObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *GitDirectoryGeneratorItem) DeepCopyInto(out *GitDirectoryGeneratorItem) {\n\t*out = *in\n}", "func (in *NamePath) DeepCopyInto(out *NamePath) {\n\t*out = *in\n\treturn\n}", "func (in *ConsoleCreateObj) DeepCopyInto(out *ConsoleCreateObj) {\n\t*out = *in\n}", "func (in *UsedPipelineRun) DeepCopyInto(out *UsedPipelineRun) {\n\t*out = *in\n}", "func (in *BuildTemplate) DeepCopyInto(out *BuildTemplate) {\n\t*out = *in\n\tif in.Cmd != nil {\n\t\tin, out := &in.Cmd, &out.Cmd\n\t\t*out = make([]BuildTemplateStep, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n}", "func (in *ObjectInfo) DeepCopyInto(out *ObjectInfo) {\n\t*out = *in\n\tout.GroupVersionKind = in.GroupVersionKind\n\treturn\n}", "func (in *Files) DeepCopyInto(out *Files) {\n\t*out = *in\n}", "func (in *Source) DeepCopyInto(out *Source) {\n\t*out = *in\n\tif in.Dependencies != nil {\n\t\tin, out := &in.Dependencies, &out.Dependencies\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MavenRepositories != nil {\n\t\tin, out := &in.MavenRepositories, &out.MavenRepositories\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\treturn\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *StackBuild) DeepCopyInto(out *StackBuild) {\n\t*out = *in\n\treturn\n}", "func (in *BuildTaskRef) DeepCopyInto(out *BuildTaskRef) {\n\t*out = *in\n\treturn\n}", "func (in *PathInfo) DeepCopyInto(out *PathInfo) {\n\t*out = *in\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *PoA) DeepCopyInto(out *PoA) {\n\t*out = *in\n}", "func (in *Section) DeepCopyInto(out *Section) {\n\t*out = *in\n\tif in.SecretRefs != nil {\n\t\tin, out := &in.SecretRefs, &out.SecretRefs\n\t\t*out = make([]SecretReference, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Files != nil {\n\t\tin, out := &in.Files, &out.Files\n\t\t*out = make([]FileMount, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *DNSSelection) DeepCopyInto(out *DNSSelection) {\n\t*out = *in\n\tif in.Include != nil {\n\t\tin, out := &in.Include, &out.Include\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Exclude != nil {\n\t\tin, out := &in.Exclude, &out.Exclude\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n}", "func (in *ReleaseVersion) DeepCopyInto(out *ReleaseVersion) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Command) DeepCopyInto(out *Command) {\n\t*out = *in\n\tif in.Flags != nil {\n\t\tin, out := &in.Flags, &out.Flags\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Value != nil {\n\t\tin, out := &in.Value, &out.Value\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *PathRule) DeepCopyInto(out *PathRule) {\n\t*out = *in\n\treturn\n}", "func (in *DockerLifecycleData) DeepCopyInto(out *DockerLifecycleData) {\n\t*out = *in\n}", "func (in *RunScriptStepConfig) DeepCopyInto(out *RunScriptStepConfig) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *Checksum) DeepCopyInto(out *Checksum) {\n\t*out = *in\n}", "func (in *DomainNameOutput) DeepCopyInto(out *DomainNameOutput) {\n\t*out = *in\n}", "func (in *InterfaceStruct) DeepCopyInto(out *InterfaceStruct) {\n\t*out = *in\n\tif in.val != nil {\n\t\tin, out := &in.val, &out.val\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Ref) DeepCopyInto(out *Ref) {\n\t*out = *in\n}", "func (in *MemorySpec) DeepCopyInto(out *MemorySpec) {\n\t*out = *in\n}", "func (in *BuildJenkinsInfo) DeepCopyInto(out *BuildJenkinsInfo) {\n\t*out = *in\n\treturn\n}", "func (in *VirtualDatabaseBuildObject) DeepCopyInto(out *VirtualDatabaseBuildObject) {\n\t*out = *in\n\tif in.Incremental != nil {\n\t\tin, out := &in.Incremental, &out.Incremental\n\t\t*out = new(bool)\n\t\t**out = **in\n\t}\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]v1.EnvVar, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\tout.Git = in.Git\n\tin.Source.DeepCopyInto(&out.Source)\n\tif in.Webhooks != nil {\n\t\tin, out := &in.Webhooks, &out.Webhooks\n\t\t*out = make([]WebhookSecret, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *MaintenanceInfo) DeepCopyInto(out *MaintenanceInfo) {\n\t*out = *in\n\treturn\n}", "func (in *KopsNode) DeepCopyInto(out *KopsNode) {\n\t*out = *in\n\treturn\n}", "func (in *FalconAPI) DeepCopyInto(out *FalconAPI) {\n\t*out = *in\n}", "func (in *EBS) DeepCopyInto(out *EBS) {\n\t*out = *in\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n\treturn\n}", "func (in *Empty) DeepCopyInto(out *Empty) {\n\t*out = *in\n\tout.XXX_NoUnkeyedLiteral = in.XXX_NoUnkeyedLiteral\n\tif in.XXX_unrecognized != nil {\n\t\tin, out := &in.XXX_unrecognized, &out.XXX_unrecognized\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ComponentDistGit) DeepCopyInto(out *ComponentDistGit) {\n\t*out = *in\n\treturn\n}", "func (in *Memory) DeepCopyInto(out *Memory) {\n\t*out = *in\n\tout.Required = in.Required.DeepCopy()\n}", "func (in *Persistence) DeepCopyInto(out *Persistence) {\n\t*out = *in\n\tout.Size = in.Size.DeepCopy()\n\treturn\n}", "func (in *ManagedDisk) DeepCopyInto(out *ManagedDisk) {\n\t*out = *in\n}", "func (e *Email) DeepCopyInto(out *Email) {\n\t*out = *e\n}", "func (in *ImageInfo) DeepCopyInto(out *ImageInfo) {\n\t*out = *in\n}", "func (in *ShootRef) DeepCopyInto(out *ShootRef) {\n\t*out = *in\n}", "func (in *N3000Fpga) DeepCopyInto(out *N3000Fpga) {\n\t*out = *in\n}", "func (in *Node) DeepCopyInto(out *Node) {\n\t*out = *in\n\tif in.FailStatus != nil {\n\t\tin, out := &in.FailStatus, &out.FailStatus\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MigratingSlots != nil {\n\t\tin, out := &in.MigratingSlots, &out.MigratingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.ImportingSlots != nil {\n\t\tin, out := &in.ImportingSlots, &out.ImportingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n}", "func (in *BuiltInAdapter) DeepCopyInto(out *BuiltInAdapter) {\n\t*out = *in\n}", "func (in *NetflowType) DeepCopyInto(out *NetflowType) {\n\t*out = *in\n\treturn\n}", "func (in *CPUSpec) DeepCopyInto(out *CPUSpec) {\n\t*out = *in\n}", "func (in *LoopState) DeepCopyInto(out *LoopState) {\n\t*out = *in\n}" ]
[ "0.82150793", "0.81278837", "0.8103475", "0.80859107", "0.808366", "0.80668724", "0.806427", "0.8026438", "0.80119294", "0.7996162", "0.7991859", "0.79883516", "0.79867214", "0.79867214", "0.79865664", "0.7985051", "0.7975424", "0.7972233", "0.796902", "0.796902", "0.796902", "0.79675347", "0.7961868", "0.79614633", "0.79451776", "0.79451776", "0.7944049", "0.7943105", "0.7941959", "0.7941959", "0.7939773", "0.79381335", "0.7936481", "0.7929676", "0.7924561", "0.7915237", "0.79118174", "0.7910918", "0.79092526", "0.79085267", "0.79084533", "0.7905077", "0.790474", "0.7904188", "0.7904188", "0.7903653", "0.79035604", "0.79023683", "0.7898657", "0.7897374", "0.78911847", "0.7890479", "0.7889666", "0.7888898", "0.78875107", "0.7886158", "0.7884866", "0.7884866", "0.7884182", "0.788048", "0.7874711", "0.78745437", "0.78745437", "0.7873115", "0.78724474", "0.78708154", "0.787052", "0.7864988", "0.7863134", "0.7863134", "0.7863134", "0.7862709", "0.78623676", "0.7860133", "0.7858168", "0.78573614", "0.7856399", "0.78510904", "0.78451645", "0.7844784", "0.7841376", "0.78374505", "0.78366643", "0.78362054", "0.783535", "0.7832941", "0.78302246", "0.7828928", "0.7826221", "0.7824716", "0.7824198", "0.7822571", "0.78196645", "0.7818943", "0.7812506", "0.7812284", "0.7811011", "0.78107023", "0.78106093", "0.7809095", "0.7808075" ]
0.0
-1
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AESConfiguration.
func (in *AESConfiguration) DeepCopy() *AESConfiguration { if in == nil { return nil } out := new(AESConfiguration) in.DeepCopyInto(out) return out }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (in *EncryptionConfiguration) DeepCopy() *EncryptionConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EncryptionConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EncryptionConfig) DeepCopy() *EncryptionConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EncryptionConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretEngineConfiguration) DeepCopy() *SecretEngineConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretEngineConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AutoMLSecurityConfig) DeepCopy() *AutoMLSecurityConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AutoMLSecurityConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretboxConfiguration) DeepCopy() *SecretboxConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretboxConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewAES(aesKey, hmacKey []byte) (*AESCTREncryptor, error) {\n\tif len(hmacKey) == 0 {\n\t\treturn nil, ErrHmacKeyTooShort\n\t}\n\n\tblock, err := aes.NewCipher(aesKey)\n\tif err != nil {\n\t\treturn nil, ErrKeyTooShort\n\t}\n\n\treturn &AESCTREncryptor{\n\t\taesKey: aesKey,\n\t\thmacKey: hmacKey,\n\t\tblock: block,\n\t}, nil\n}", "func (in *AWSConfiguration) DeepCopy() *AWSConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AWSConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewAESCrypto(key []byte, iv ...byte) (*AESCrypto, error) {\n\tb, err := aes.NewCipher(key)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tr := &AESCrypto{\n\t\tblock: b,\n\t\tkey: key,\n\t\tiv: iv,\n\t}\n\n\tif len(iv) == 0 {\n\t\tr.iv = key[:b.BlockSize()]\n\t}\n\n\treturn r, nil\n}", "func (in *DataEncryption) DeepCopy() *DataEncryption {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DataEncryption)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EKSCFConfiguration) DeepCopy() *EKSCFConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EKSCFConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EncryptionProperties) DeepCopy() *EncryptionProperties {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EncryptionProperties)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AzureConfiguration) DeepCopy() *AzureConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AzureConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewAES(\n\tmode modeType,\n\tkey string,\n\tpassword string,\n\tencoding encodingType,\n) (AES, error) {\n\n\tif mode == \"\" {\n\t\tmode = ModeGCM\n\t}\n\tif mode == ModeGCM && password == \"\" {\n\t\treturn nil, errors.New(\"password is required in gcm mode\")\n\t}\n\tif mode != ModeGCM && mode != ModeCBC {\n\t\treturn nil, errors.New(\"only support gcm and cbc mode\")\n\t}\n\n\tif key == \"\" {\n\t\tkey = DefaultKey\n\t}\n\tkeyBuf, err := base64.StdEncoding.DecodeString(key)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tkeySize := len(keyBuf) * 8\n\tif keySize != 256 {\n\t\tif mode == ModeCBC {\n\t\t\treturn nil, errors.New(\"key requires a 256-bit base64 encoded string with cbc mode\")\n\t\t}\n\t\tif keySize != 128 && keySize != 192 {\n\t\t\treturn nil, errors.New(\"key requires a 128-bit, 192-bit or 256-bit base64 encoded string\")\n\t\t}\n\t}\n\n\tif encoding == \"\" {\n\t\tencoding = Base64\n\t}\n\n\tif mode == ModeGCM {\n\t\treturn &GCM{\n\t\t\tKey: keyBuf,\n\t\t\tPassword: password,\n\t\t\tVersion: []byte{0x01, 0x03},\n\t\t\tEncoding: encoding,\n\t\t}, nil\n\t}\n\treturn &CBC{\n\t\tKey: keyBuf,\n\t\tVersion: []byte{0x01, 0x04},\n\t\tEncoding: encoding,\n\t}, nil\n}", "func (in *ApplicationConfiguration) DeepCopy() *ApplicationConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ApplicationConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AdvancedConfig) DeepCopy() *AdvancedConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AdvancedConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewAES128Cipher(mode cipherMode) Cipher {\n\tencryptor := aesEncryptor()\n\tencryptor.keylen = 16 // AES128\n\n\treturn getCipher(mode, encryptor)\n}", "func (in *DatabaseEncryption) DeepCopy() *DatabaseEncryption {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DatabaseEncryption)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KMSConfiguration) DeepCopy() *KMSConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KMSConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DatabaseSecretEngineConfig) DeepCopy() *DatabaseSecretEngineConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DatabaseSecretEngineConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewAES192Cipher(mode cipherMode) Cipher {\n\tencryptor := aesEncryptor()\n\tencryptor.keylen = 24 // AES192\n\n\treturn getCipher(mode, encryptor)\n}", "func (in *MariaDBConfiguration) DeepCopy() *MariaDBConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MariaDBConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SharedMemoryStoreConfig) DeepCopy() *SharedMemoryStoreConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SharedMemoryStoreConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ShuffleConfig) DeepCopy() *ShuffleConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ShuffleConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ElasticsearchConfiguration) DeepCopy() *ElasticsearchConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ElasticsearchConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (ob *AESCipher) Encrypt(plaintext []byte) (ciphertext []byte, err error) {\n\tif len(ob.cryptoKey) != 32 {\n\t\treturn nil, logError(0xE64A2E, errKeySize)\n\t}\n\t// nonce is a byte array filled with cryptographically secure random bytes\n\tn := ob.gcm.NonceSize()\n\tnonce := make([]byte, n)\n\t_, err = io.ReadFull(rand.Reader, nonce)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tciphertext = ob.gcm.Seal(\n\t\tnonce, // dst []byte,\n\t\tnonce, // nonce []byte,\n\t\tplaintext, // plaintext []byte,\n\t\tnil, // additionalData []byte) []byte\n\t)\n\treturn ciphertext, nil\n}", "func (in *RedisConfiguration) DeepCopy() *RedisConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(RedisConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ApplicationConfigurationList) DeepCopy() *ApplicationConfigurationList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ApplicationConfigurationList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewConfiguration(authKeySize int, encryptEnabled bool, encapEnabled bool, wireguardEnabled bool, hsIpcacheDSRenabled bool, mtu int, mtuDetectIP net.IP) Configuration {\n\tencryptOverhead := 0\n\n\tif mtu == 0 {\n\t\tvar err error\n\n\t\tif mtuDetectIP != nil {\n\t\t\tmtu, err = getMTUFromIf(mtuDetectIP)\n\t\t} else {\n\t\t\tmtu, err = autoDetect()\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.WithError(err).Warning(\"Unable to automatically detect MTU\")\n\t\t\tmtu = EthernetMTU\n\t\t}\n\t}\n\n\tif encryptEnabled {\n\t\t// Add the difference between the default and the actual key sizes here\n\t\t// to account for users specifying non-default auth key lengths.\n\t\tencryptOverhead = EncryptionIPsecOverhead + (authKeySize - EncryptionDefaultAuthKeyLength)\n\t}\n\n\tfullTunnelOverhead := TunnelOverhead\n\tif hsIpcacheDSRenabled {\n\t\tfullTunnelOverhead += DsrTunnelOverhead\n\t}\n\n\tconf := Configuration{\n\t\tstandardMTU: mtu,\n\t\ttunnelMTU: mtu - (fullTunnelOverhead + encryptOverhead),\n\t\tpostEncryptMTU: mtu - TunnelOverhead,\n\t\tpreEncryptMTU: mtu - encryptOverhead,\n\t\tencapEnabled: encapEnabled,\n\t\tencryptEnabled: encryptEnabled,\n\t\twireguardEnabled: wireguardEnabled,\n\t}\n\n\tif conf.tunnelMTU < 0 {\n\t\tconf.tunnelMTU = 0\n\t}\n\n\treturn conf\n}", "func (in *IsoConfiguration) DeepCopy() *IsoConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IsoConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfiguration) DeepCopy() *LoggingConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfiguration) DeepCopy() *LoggingConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewAESCBC(key, iv []byte) AESCBC {\n\t// If the key is nil, we create a random one.\n\tif key == nil {\n\t\tkey = RandomBytes(aes.BlockSize)\n\t}\n\t// If the IV is nil, then we create a random one.\n\tif iv == nil {\n\t\tiv = RandomBytes(len(key))\n\t}\n\tblock, err := aes.NewCipher(key)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn AESCBC{block: block, key: key, iv: iv}\n}", "func (in *EKSManagedConfiguration) DeepCopy() *EKSManagedConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EKSManagedConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KubebenchConfig) DeepCopy() *KubebenchConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubebenchConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MSSQLServerTransparentDataEncryption) DeepCopy() *MSSQLServerTransparentDataEncryption {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MSSQLServerTransparentDataEncryption)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (conf *Configuration) Copy() *Configuration {\n\tvar dup Configuration\n\n\t_ = Copy(&dup, conf)\n\n\tdup.TestMode = conf.TestMode\n\n\treturn &dup\n}", "func AESDecrypt(src []byte, key []byte, padding paddingMode) ([]byte, error) {\n\tblock, err := aes.NewCipher(key)\n\tif err != nil {\n\t\treturn make([]byte, 0), err\n\t}\n\tvar blockMode = cipher.NewCBCDecrypter(block, key[:block.BlockSize()])\n\tvar result = make([]byte, len(src))\n\tblockMode.CryptBlocks(result, src)\n\tunpaddingFunc, err := GetUnpadding(padding)\n\tif err != nil {\n\t\treturn make([]byte, 0), err\n\t}\n\treturn unpaddingFunc(result), nil\n}", "func (in *AWSConfig) DeepCopy() *AWSConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AWSConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewAesWithKey() *Aes {\n\treturn &Aes{Key: defaultKey}\n}", "func (in *AutoscaleConfig) DeepCopy() *AutoscaleConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AutoscaleConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func EnAES(in, key, iv []byte) ([]byte, error) {\n\tcip, err := aes.NewCipher(key)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tout := make([]byte, len(in))\n\tcipher.NewCFBEncrypter(cip, iv).XORKeyStream(out, in)\n\treturn out, nil\n}", "func (c *AIAConfig) Copy() *AIAConfig {\n\treturn &AIAConfig{\n\t\tc.AiaURL,\n\t\tc.OcspURL,\n\t\tc.CrlURL,\n\t\tc.CRLExpiry,\n\t\tc.OCSPExpiry,\n\t\tc.CRLRenewal,\n\t}\n}", "func AESCBCEncryptWithMAC(key []byte, input []byte) ([]byte, error) {\n\taesKey := key[:32]\n\thmacKey := key[32:]\n\n\tencrypted, err := AESCBCEncrypt(aesKey, input)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tmac := HMACSHA256(hmacKey, encrypted)\n\tencrypted = append(encrypted, mac...)\n\treturn encrypted, nil\n}", "func AESEncrypt(src []byte, key []byte, padding paddingMode) ([]byte, error) {\n\tblock, err := aes.NewCipher(key)\n\tif err != nil {\n\t\treturn make([]byte, 0), err\n\t}\n\tvar blockMode = cipher.NewCBCEncrypter(block, key[:block.BlockSize()])\n\tpaddingFunc, err := GetPadding(padding)\n\tif err != nil {\n\t\treturn make([]byte, 0), err\n\t}\n\tsrc = paddingFunc(src, block.BlockSize())\n\tvar result = make([]byte, len(src))\n\tblockMode.CryptBlocks(result, src)\n\treturn result, nil\n}", "func (in *AdmissionWebhookConfiguration) DeepCopy() *AdmissionWebhookConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AdmissionWebhookConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *CASignedConfig) DeepCopy() *CASignedConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(CASignedConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func decryptAES(ciphertext []byte, Key []byte) (plaintext []byte, err error){\n\tif(len(ciphertext)<=userlib.BlockSize){\n\t\treturn nil,errors.New(strings.ToTitle(\"Invalid Ciphertext\"))\n\t}\n\tiv := ciphertext[:userlib.BlockSize]\n\tcipher := userlib.CFBDecrypter(Key, iv)\n\tcipher.XORKeyStream(ciphertext[userlib.BlockSize:], ciphertext[userlib.BlockSize:])\n\tplaintext = ciphertext[userlib.BlockSize:]\n\treturn plaintext,nil\n}", "func Encrypt_AES_CBC(plaintext, key []byte) (ciphertext []byte, err error) {\n\tif len(plaintext)%aes.BlockSize != 0 {\n\t\tplaintext = PKCS5Padding(plaintext, aes.BlockSize)\n\t}\n\n\tvar block cipher.Block\n\tif block, err = aes.NewCipher(key); err != nil {\n\t\treturn\n\t}\n\n\t// The IV needs to be unique, but not secure. Therefore it's common to\n\t// include it at the beginning of the ciphertext.\n\tciphertext = make([]byte, aes.BlockSize+len(plaintext))\n\tiv := ciphertext[:aes.BlockSize]\n\tif _, err = io.ReadFull(rand.Reader, iv); err != nil {\n\t\treturn\n\t}\n\n\tstream := cipher.NewCBCEncrypter(block, iv)\n\tstream.CryptBlocks(ciphertext[aes.BlockSize:], plaintext)\n\treturn\n}", "func (conf *Config) Equals(other *Config) bool {\n\tif !conf.UnicastConfig.Equals(other.UnicastConfig) {\n\t\treturn false\n\t}\n\tif !conf.ExtensionConfig.Equals(other.ExtensionConfig) {\n\t\treturn false\n\t}\n\treturn true\n}", "func EncryptAES(src string) (based string, err error) {\n\t//Initial []byte token\n\ttoken, err := hex.DecodeString(\"46356afe55fa3cea9cbe73ad442cad47\")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// Block from Cipher\n\tblock, err := aes.NewCipher(token)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t\treturn\n\t}\n\tblockSize := block.BlockSize()\n\tecbe := cipher.NewCBCEncrypter(block, token[:blockSize])\n\tcontent := PKCS5Padding([]byte(src), blockSize)\n\t// Initial crypt value\n\tcrypted := make([]byte, len(content))\n\tecbe.CryptBlocks(crypted, content)\n\tbased = base64.StdEncoding.EncodeToString(crypted)\n\treturn\n}", "func (in *AzureConfig) DeepCopy() *AzureConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AzureConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *DataLakeResource) SetEncryptionConfiguration(v *DataLakeEncryptionConfiguration) *DataLakeResource {\n\ts.EncryptionConfiguration = v\n\treturn s\n}", "func (in *AutoscalerConfig) DeepCopy() *AutoscalerConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AutoscalerConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DatabaseSecretEngineConfigList) DeepCopy() *DatabaseSecretEngineConfigList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DatabaseSecretEngineConfigList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AuthenticationOperatorConfig) DeepCopy() *AuthenticationOperatorConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AuthenticationOperatorConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AESConfiguration) DeepCopyInto(out *AESConfiguration) {\n\t*out = *in\n\tif in.Keys != nil {\n\t\tin, out := &in.Keys, &out.Keys\n\t\t*out = make([]Key, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func EncryptAES(b, key, iv []byte) []byte {\n\tcipher, err := aes.NewCipher(key)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tbs := cipher.BlockSize()\n\n\tif iv != nil && len(iv) != bs {\n\t\tpanic(fmt.Sprintf(\"IV size is %v; need %v\", len(iv), bs))\n\t}\n\tprev := iv\n\n\tif len(b)%bs != 0 {\n\t\tpanic(fmt.Sprintf(\"buffer size %v isn't multiple of block size %v\", len(b), bs))\n\t}\n\n\tvar enc []byte\n\tfor i := 0; i < len(b); i += bs {\n\t\t// Get the source block, padding it if needed.\n\t\tn := bs\n\t\tif rem := len(b) - i; rem < bs {\n\t\t\tn = rem\n\t\t}\n\t\tsrc := b[i : i+n]\n\n\t\t// If using CBC, XOR with the previous ciphertext block (or the initialization vector).\n\t\tif iv != nil {\n\t\t\tsrc = XOR(src, prev)\n\t\t}\n\n\t\t// Encrypt the block and save it to XOR against the next plaintext block (for CBC).\n\t\tdst := make([]byte, bs)\n\t\tcipher.Encrypt(dst, src)\n\t\tenc = append(enc, dst...)\n\t\tprev = dst\n\t}\n\treturn enc\n}", "func (in *AvailabilityConfig) DeepCopy() *AvailabilityConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AvailabilityConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewAEADCipher(key []byte) (cipher.AEAD, error) {\n\tvar (\n\t\tkeyval []byte\n\t\tcipherBlock cipher.Block\n\t\taeadCipher cipher.AEAD\n\t\terr error\n\t)\n\n\t//If no key is provided, generate one.\n\tif key == nil {\n\t\tkeyval = make([]byte, 32)\n\t\t_, err = rand.Read(keyval)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t} else {\n\t\tswitch len(key) {\n\t\tcase 16, 24, 36:\n\t\tdefault:\n\t\t\treturn nil, fmt.Errorf(\"An aead key must be of length 16. 24, or 32. This key is of length: \", len(key))\n\t\t}\n\t\tkeyval = key\n\t}\n\n\t//The key is used to create an AES Cipher Block\n\tcipherBlock, err = aes.NewCipher(keyval)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t//The AES Cipher Block is used to create an AEAD GCM which is a 128-bit, block cipher wrapped\n\t//in a Galois Counter Mode with the standard nonce length.\n\t//This is used to encrypt/decrypt all subscriber identifiers in the hidden fields of TBD 2nd Factor Selection Forms.\n\taeadCipher, err = cipher.NewGCM(cipherBlock)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn aeadCipher, nil\n}", "func AESCBCEncrypt(key []byte, input []byte) ([]byte, error) {\n\tpaddedInput := pad(input)\n\n\tblock, err := aes.NewCipher(key)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\toutput := make([]byte, aes.BlockSize+len(paddedInput))\n\tiv := output[:aes.BlockSize]\n\tif _, err := io.ReadFull(rand.Reader, iv); err != nil {\n\t\treturn nil, err\n\t}\n\n\tmode := cipher.NewCBCEncrypter(block, iv)\n\tmode.CryptBlocks(output[aes.BlockSize:], paddedInput)\n\n\treturn output, nil\n}", "func NewCipher(key []byte) cipher.Block {\n\texpkey := keyExpansion(key)\n\treturn cipherAES{expkey}\n}", "func (in *ObjectStorageConfig) DeepCopy() *ObjectStorageConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ObjectStorageConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *CassandraConfig) DeepCopy() *CassandraConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(CassandraConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func encryptAES(data []byte, key []byte) (ciphertext []byte){\n\tciphertext = make([]byte, userlib.BlockSize+len(data))\n\tiv := ciphertext[:userlib.BlockSize]\n\tcopy(iv, userlib.RandomBytes(userlib.BlockSize))\n\tcipher := userlib.CFBEncrypter(key, iv)\n\tcipher.XORKeyStream(ciphertext[userlib.BlockSize:], data)\n\treturn\n}", "func (in *AzureConfigList) DeepCopy() *AzureConfigList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AzureConfigList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AuthenticatorGroupsConfig) DeepCopy() *AuthenticatorGroupsConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AuthenticatorGroupsConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func AESEncrypt(message, AESKey []byte) ([]byte, error) {\n\tblock, err := aes.NewCipher(AESKey)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// The IV needs to be unique, but not secure. Therefore it's common to\n\t// include it at the beginning of the ciphertext.\n\tciphertext := make([]byte, aes.BlockSize+len(message))\n\tiv := ciphertext[:aes.BlockSize]\n\tif _, err := io.ReadFull(rand.Reader, iv); err != nil {\n\t\treturn nil, err\n\t}\n\tstream := cipher.NewCFBEncrypter(block, iv)\n\tstream.XORKeyStream(ciphertext[aes.BlockSize:], message)\n\n\treturn ciphertext, nil\n}", "func (in *IdentityConfiguration) DeepCopy() *IdentityConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IdentityConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *DataLakeConfiguration) SetEncryptionConfiguration(v *DataLakeEncryptionConfiguration) *DataLakeConfiguration {\n\ts.EncryptionConfiguration = v\n\treturn s\n}", "func (in *ShieldedInstanceConfig) DeepCopy() *ShieldedInstanceConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ShieldedInstanceConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfig) DeepCopy() *LoggingConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func AesEncrypt(source []byte, keyStr string) ([]byte, error) {\n\tsrcLength := len(source)\n\tvar usize uint64\n\theadSize := int(unsafe.Sizeof(usize))\n\thead := make([]byte, headSize, headSize+len(source))\n\tsource = append(head, source...)\n\tlen0 := len(source)\n\tif r := len0 % aes.BlockSize; r > 0 {\n\t\tlen0 += aes.BlockSize - r\n\t\tpadding := make([]byte, aes.BlockSize-r)\n\t\tif _, err := io.ReadFull(rand.Reader, padding); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tsource = append(source, padding...)\n\t}\n\tbuffer := make([]byte, len0+aes.BlockSize)\n\n\tiv := buffer[:aes.BlockSize]\n\tif _, err := io.ReadFull(rand.Reader, iv); err != nil {\n\t\treturn nil, err\n\t}\n\tusize = uint64(srcLength)\n\tbinary.BigEndian.PutUint64(source, usize)\n\tblock, ec := aes.NewCipher(padKey([]byte(keyStr)))\n\tif ec != nil {\n\t\treturn nil, ec\n\t}\n\tmode := cipher.NewCBCEncrypter(block, iv)\n\tmode.CryptBlocks(buffer[aes.BlockSize:], source)\n\treturn buffer, nil\n}", "func (in *AssociatedAlertsConfig) DeepCopy() *AssociatedAlertsConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AssociatedAlertsConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KubeadmConfig) DeepCopy() *KubeadmConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubeadmConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigurationList) DeepCopy() *ConfigurationList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigurationList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AuthenticationOperatorConfigList) DeepCopy() *AuthenticationOperatorConfigList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AuthenticationOperatorConfigList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ContextConfigs) DeepCopy() *ContextConfigs {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ContextConfigs)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func DeserializeConfiguration(encoded []byte) (*Configuration, error) {\n\tif len(encoded) != confLength {\n\t\treturn nil, internal.ErrConfigurationInvalidLength\n\t}\n\n\treturn &Configuration{\n\t\tGroup: Group(encoded[0]),\n\t\tKDF: hash.Hashing(encoded[1]),\n\t\tMAC: hash.Hashing(encoded[2]),\n\t\tHash: hash.Hashing(encoded[3]),\n\t\tMHF: mhf.Identifier(encoded[4]),\n\t\tMode: Mode(encoded[5]),\n\t\tNonceLen: encoding.OS2IP(encoded[6:]),\n\t}, nil\n}", "func (in *ApplyConfig) DeepCopy() *ApplyConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ApplyConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ProviderConfiguration) DeepCopy() *ProviderConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ProviderConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewAesStream(readWriter io.ReadWriter, aesKey, iv []byte) (*AesStream, error) {\n\tif len(iv) != aes.BlockSize {\n\t\tpanic(\"Length of iv in NewAesCrypter does not match the AES block size\")\n\t}\n\t// Create the AES cipher\n\taesCipher, err := aes.NewCipher(aesKey)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t// TODO: Is it safe to initialize both the encrypter and the decrypter to the same IV?\n\taesStream := &AesStream{\n\t\treadWriter: readWriter,\n\t\tcfbEncrypter: cipher.NewCFBEncrypter(aesCipher, iv),\n\t\tcfbDecrypter: cipher.NewCFBDecrypter(aesCipher, iv),\n\t}\n\treturn aesStream, nil\n}", "func (in *KubemanagerConfig) DeepCopy() *KubemanagerConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubemanagerConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *RedisConfig) DeepCopy() *RedisConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(RedisConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func AESEnccrypt(plaintext []byte, key string) []byte {\n\tcipher, err := aes.NewCipher([]byte(key))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tbs := cipher.BlockSize()\n\tif len(plaintext)%bs != 0 {\n\t\tpanic(\"Need a multiple of the blocksize\")\n\t}\n\n\tciphertext := make([]byte, len(plaintext))\n\tfor len(plaintext) > 0 {\n\t\tcipher.Encrypt(ciphertext, plaintext)\n\t\tplaintext = plaintext[bs:]\n\t\tciphertext = ciphertext[bs:]\n\t}\n\n\treturn ciphertext\n}", "func (in *MSSQLServerTransparentDataEncryptionList) DeepCopy() *MSSQLServerTransparentDataEncryptionList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MSSQLServerTransparentDataEncryptionList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ShuffleServerConfig) DeepCopy() *ShuffleServerConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ShuffleServerConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func AESDecrypt(encrypted, key []byte) ([]byte, error) {\n\tvar err error\n\tvar block cipher.Block\n\tvar iv = key[:aes.BlockSize]\n\tdecrypted := make([]byte, len(encrypted))\n\tif block, err = aes.NewCipher(key); err != nil {\n\t\treturn nil, err\n\t}\n\tdecrypter := cipher.NewCFBDecrypter(block, iv)\n\tdecrypter.XORKeyStream(decrypted, encrypted)\n\treturn decrypted, nil\n}", "func NewAes(key string) *Aes {\n\treturn &Aes{Key: key}\n}", "func (in *GKEMasterAuthorizedNetworksConfig) DeepCopy() *GKEMasterAuthorizedNetworksConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(GKEMasterAuthorizedNetworksConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MSSQLServerTransparentDataEncryptionParameters) DeepCopy() *MSSQLServerTransparentDataEncryptionParameters {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MSSQLServerTransparentDataEncryptionParameters)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AWSConfigList) DeepCopy() *AWSConfigList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AWSConfigList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AWSAccessRequestConfiguration) DeepCopy() *AWSAccessRequestConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AWSAccessRequestConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func AESDecrypt(cipherkey, ciphertext []byte) ([]byte, error) {\n\tblock, err := aes.NewCipher(cipherkey)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsrc := make([]byte, hex.DecodedLen(len(ciphertext)))\n\t_, err = hex.Decode(src, ciphertext)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tbs := block.BlockSize()\n\tr := make([]byte, len(src))\n\tdst := r\n\tfor len(src) > 0 {\n\t\tblock.Decrypt(dst, src)\n\t\tsrc = src[bs:]\n\t\tdst = dst[bs:]\n\t}\n\treturn removePad(r)\n}", "func DecryptAES(enc, key, iv []byte) []byte {\n\tcipher, err := aes.NewCipher(key)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tbs := cipher.BlockSize()\n\n\tif iv != nil && len(iv) != bs {\n\t\tpanic(fmt.Sprintf(\"IV size is %v; need %v\", len(iv), bs))\n\t}\n\tprev := iv\n\n\tdec := make([]byte, 0, len(enc))\n\tfor i := 0; i < len(enc); i += bs {\n\t\tsrc := make([]byte, bs)\n\t\tdst := make([]byte, bs)\n\t\tn := copy(src, enc[i:])\n\t\tcipher.Decrypt(dst, src)\n\n\t\tif iv != nil {\n\t\t\tdst = XOR(dst, prev)\n\t\t}\n\n\t\tdec = append(dec, dst[:n]...)\n\t\tprev = src\n\t}\n\treturn dec\n}", "func (in *BootConfiguration) DeepCopy() *BootConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BootConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewAESOpt(secret string) (*AESOpt, error) {\n\tif len(secret) != 64 {\n\t\treturn nil, errors.New(\"Secret must be 64 character\")\n\t}\n\tkey, err := hex.DecodeString(secret)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"NewAESOpt.hex.DecodeString\")\n\t}\n\n\t//Create a new Cipher Block from the key\n\tblock, err := aes.NewCipher(key)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"NewAESOpt.aes.NewCipher\")\n\t}\n\n\t//Create a new GCM - https://en.wikipedia.org/wiki/Galois/Counter_Mode\n\t//https://golang.org/pkg/crypto/cipher/#NewGCM\n\taesGCM, err := cipher.NewGCM(block)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"NewAESOpt.cipher.NewGCM\")\n\t}\n\n\treturn &AESOpt{\n\t\taesGCM: aesGCM,\n\t}, nil\n}", "func (in *MasterAuthorizedNetworksConfig) DeepCopy() *MasterAuthorizedNetworksConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MasterAuthorizedNetworksConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}" ]
[ "0.6966809", "0.64778304", "0.5603832", "0.53495884", "0.53042156", "0.52616113", "0.5155239", "0.5146213", "0.51294535", "0.5119548", "0.5056005", "0.4989806", "0.49877337", "0.49724552", "0.49615574", "0.49553683", "0.4941303", "0.49344137", "0.48960802", "0.4832625", "0.4827364", "0.48179698", "0.47963744", "0.47851822", "0.47366694", "0.4706727", "0.4691784", "0.46696088", "0.46656266", "0.46439534", "0.46439534", "0.4632355", "0.46079278", "0.46076107", "0.46076107", "0.46076107", "0.45989886", "0.4598223", "0.45974976", "0.4554455", "0.45493475", "0.45478", "0.45364144", "0.4533185", "0.45273444", "0.45240578", "0.4521409", "0.4517186", "0.4514158", "0.4494403", "0.44805065", "0.44793257", "0.44785288", "0.44740903", "0.44668213", "0.44590485", "0.44535285", "0.44519153", "0.444576", "0.44448784", "0.44413248", "0.4439484", "0.4426765", "0.4423864", "0.44213775", "0.44206014", "0.44161457", "0.4411676", "0.43993694", "0.43799916", "0.43752164", "0.435681", "0.43454686", "0.43313637", "0.4326619", "0.43239734", "0.43125653", "0.43101257", "0.43081674", "0.43037108", "0.43023223", "0.42946145", "0.42822775", "0.42757452", "0.42721495", "0.42712998", "0.42692685", "0.42687812", "0.426517", "0.42644364", "0.42620978", "0.42601165", "0.425922", "0.42574707", "0.4250499", "0.4242166", "0.42351356", "0.4233664", "0.42330053", "0.42325997" ]
0.89656043
0
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be nonnil.
func (in *EncryptionConfiguration) DeepCopyInto(out *EncryptionConfiguration) { *out = *in out.TypeMeta = in.TypeMeta if in.Resources != nil { in, out := &in.Resources, &out.Resources *out = make([]ResourceConfiguration, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } } return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (r *RunInfo) DeepCopyInto(out *RunInfo) {\n\t*out = *r\n}", "func (in *Base) DeepCopyInto(out *Base) {\n\t*out = *in\n\treturn\n}", "func (in *ForkObject) DeepCopyInto(out *ForkObject) {\n\t*out = *in\n}", "func (in *TargetObjectInfo) DeepCopyInto(out *TargetObjectInfo) {\n\t*out = *in\n}", "func (in *DebugObjectInfo) DeepCopyInto(out *DebugObjectInfo) {\n\t*out = *in\n}", "func (in *Input) DeepCopyInto(out *Input) {\n\t*out = *in\n}", "func (u *SSN) DeepCopyInto(out *SSN) {\n\t*out = *u\n}", "func (in *ExistPvc) DeepCopyInto(out *ExistPvc) {\n\t*out = *in\n}", "func (in *DockerStep) DeepCopyInto(out *DockerStep) {\n\t*out = *in\n\tif in.Inline != nil {\n\t\tin, out := &in.Inline, &out.Inline\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tout.Auth = in.Auth\n\treturn\n}", "func (in *Container) DeepCopyInto(out *Container) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.Command != nil {\n\t\tin, out := &in.Command, &out.Command\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.LifeCycleScript != nil {\n\t\tin, out := &in.LifeCycleScript, &out.LifeCycleScript\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *RuntimeRef) DeepCopyInto(out *RuntimeRef) {\n\t*out = *in\n}", "func (in *Ibft2) DeepCopyInto(out *Ibft2) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *TestResult) DeepCopyInto(out *TestResult) {\n\t*out = *in\n}", "func (in *Haproxy) DeepCopyInto(out *Haproxy) {\n\t*out = *in\n\treturn\n}", "func (in *SSH) DeepCopyInto(out *SSH) {\n\t*out = *in\n\treturn\n}", "func (in *Runtime) DeepCopyInto(out *Runtime) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (b *Base64) DeepCopyInto(out *Base64) {\n\t*out = *b\n}", "func (in *EventDependencyTransformer) DeepCopyInto(out *EventDependencyTransformer) {\n\t*out = *in\n\treturn\n}", "func (in *StageOutput) DeepCopyInto(out *StageOutput) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Dependent) DeepCopyInto(out *Dependent) {\n\t*out = *in\n\treturn\n}", "func (in *GitFileGeneratorItem) DeepCopyInto(out *GitFileGeneratorItem) {\n\t*out = *in\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *AnsibleStep) DeepCopyInto(out *AnsibleStep) {\n\t*out = *in\n\treturn\n}", "func (in *Forks) DeepCopyInto(out *Forks) {\n\t*out = *in\n\tif in.DAO != nil {\n\t\tin, out := &in.DAO, &out.DAO\n\t\t*out = new(uint)\n\t\t**out = **in\n\t}\n}", "func (in *ContainerPort) DeepCopyInto(out *ContainerPort) {\n\t*out = *in\n}", "func (in *General) DeepCopyInto(out *General) {\n\t*out = *in\n\treturn\n}", "func (in *IsoContainer) DeepCopyInto(out *IsoContainer) {\n\t*out = *in\n}", "func (in *Git) DeepCopyInto(out *Git) {\n\t*out = *in\n\treturn\n}", "func (in *ConfigFile) DeepCopyInto(out *ConfigFile) {\n\t*out = *in\n}", "func (in *BackupProgress) DeepCopyInto(out *BackupProgress) {\n\t*out = *in\n}", "func (u *MAC) DeepCopyInto(out *MAC) {\n\t*out = *u\n}", "func (in *DataDisk) DeepCopyInto(out *DataDisk) {\n\t*out = *in\n}", "func (in *PhaseStep) DeepCopyInto(out *PhaseStep) {\n\t*out = *in\n}", "func (in *Variable) DeepCopyInto(out *Variable) {\n\t*out = *in\n}", "func (in *RestoreProgress) DeepCopyInto(out *RestoreProgress) {\n\t*out = *in\n}", "func (in *Path) DeepCopyInto(out *Path) {\n\t*out = *in\n\treturn\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *NamespacedObjectReference) DeepCopyInto(out *NamespacedObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *GitDirectoryGeneratorItem) DeepCopyInto(out *GitDirectoryGeneratorItem) {\n\t*out = *in\n}", "func (in *NamePath) DeepCopyInto(out *NamePath) {\n\t*out = *in\n\treturn\n}", "func (in *ConsoleCreateObj) DeepCopyInto(out *ConsoleCreateObj) {\n\t*out = *in\n}", "func (in *UsedPipelineRun) DeepCopyInto(out *UsedPipelineRun) {\n\t*out = *in\n}", "func (in *BuildTemplate) DeepCopyInto(out *BuildTemplate) {\n\t*out = *in\n\tif in.Cmd != nil {\n\t\tin, out := &in.Cmd, &out.Cmd\n\t\t*out = make([]BuildTemplateStep, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n}", "func (in *ObjectInfo) DeepCopyInto(out *ObjectInfo) {\n\t*out = *in\n\tout.GroupVersionKind = in.GroupVersionKind\n\treturn\n}", "func (in *Files) DeepCopyInto(out *Files) {\n\t*out = *in\n}", "func (in *Source) DeepCopyInto(out *Source) {\n\t*out = *in\n\tif in.Dependencies != nil {\n\t\tin, out := &in.Dependencies, &out.Dependencies\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MavenRepositories != nil {\n\t\tin, out := &in.MavenRepositories, &out.MavenRepositories\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\treturn\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *StackBuild) DeepCopyInto(out *StackBuild) {\n\t*out = *in\n\treturn\n}", "func (in *BuildTaskRef) DeepCopyInto(out *BuildTaskRef) {\n\t*out = *in\n\treturn\n}", "func (in *PathInfo) DeepCopyInto(out *PathInfo) {\n\t*out = *in\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *PoA) DeepCopyInto(out *PoA) {\n\t*out = *in\n}", "func (in *Section) DeepCopyInto(out *Section) {\n\t*out = *in\n\tif in.SecretRefs != nil {\n\t\tin, out := &in.SecretRefs, &out.SecretRefs\n\t\t*out = make([]SecretReference, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Files != nil {\n\t\tin, out := &in.Files, &out.Files\n\t\t*out = make([]FileMount, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n}", "func (in *DNSSelection) DeepCopyInto(out *DNSSelection) {\n\t*out = *in\n\tif in.Include != nil {\n\t\tin, out := &in.Include, &out.Include\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Exclude != nil {\n\t\tin, out := &in.Exclude, &out.Exclude\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ReleaseVersion) DeepCopyInto(out *ReleaseVersion) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *PathRule) DeepCopyInto(out *PathRule) {\n\t*out = *in\n\treturn\n}", "func (in *Command) DeepCopyInto(out *Command) {\n\t*out = *in\n\tif in.Flags != nil {\n\t\tin, out := &in.Flags, &out.Flags\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Value != nil {\n\t\tin, out := &in.Value, &out.Value\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *DockerLifecycleData) DeepCopyInto(out *DockerLifecycleData) {\n\t*out = *in\n}", "func (in *RunScriptStepConfig) DeepCopyInto(out *RunScriptStepConfig) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *Checksum) DeepCopyInto(out *Checksum) {\n\t*out = *in\n}", "func (in *DomainNameOutput) DeepCopyInto(out *DomainNameOutput) {\n\t*out = *in\n}", "func (in *InterfaceStruct) DeepCopyInto(out *InterfaceStruct) {\n\t*out = *in\n\tif in.val != nil {\n\t\tin, out := &in.val, &out.val\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Ref) DeepCopyInto(out *Ref) {\n\t*out = *in\n}", "func (in *MemorySpec) DeepCopyInto(out *MemorySpec) {\n\t*out = *in\n}", "func (in *BuildJenkinsInfo) DeepCopyInto(out *BuildJenkinsInfo) {\n\t*out = *in\n\treturn\n}", "func (in *MaintenanceInfo) DeepCopyInto(out *MaintenanceInfo) {\n\t*out = *in\n\treturn\n}", "func (in *VirtualDatabaseBuildObject) DeepCopyInto(out *VirtualDatabaseBuildObject) {\n\t*out = *in\n\tif in.Incremental != nil {\n\t\tin, out := &in.Incremental, &out.Incremental\n\t\t*out = new(bool)\n\t\t**out = **in\n\t}\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]v1.EnvVar, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\tout.Git = in.Git\n\tin.Source.DeepCopyInto(&out.Source)\n\tif in.Webhooks != nil {\n\t\tin, out := &in.Webhooks, &out.Webhooks\n\t\t*out = make([]WebhookSecret, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *KopsNode) DeepCopyInto(out *KopsNode) {\n\t*out = *in\n\treturn\n}", "func (in *FalconAPI) DeepCopyInto(out *FalconAPI) {\n\t*out = *in\n}", "func (in *EBS) DeepCopyInto(out *EBS) {\n\t*out = *in\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n\treturn\n}", "func (in *Empty) DeepCopyInto(out *Empty) {\n\t*out = *in\n\tout.XXX_NoUnkeyedLiteral = in.XXX_NoUnkeyedLiteral\n\tif in.XXX_unrecognized != nil {\n\t\tin, out := &in.XXX_unrecognized, &out.XXX_unrecognized\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ComponentDistGit) DeepCopyInto(out *ComponentDistGit) {\n\t*out = *in\n\treturn\n}", "func (in *Memory) DeepCopyInto(out *Memory) {\n\t*out = *in\n\tout.Required = in.Required.DeepCopy()\n}", "func (in *Persistence) DeepCopyInto(out *Persistence) {\n\t*out = *in\n\tout.Size = in.Size.DeepCopy()\n\treturn\n}", "func (in *ManagedDisk) DeepCopyInto(out *ManagedDisk) {\n\t*out = *in\n}", "func (e *Email) DeepCopyInto(out *Email) {\n\t*out = *e\n}", "func (in *ImageInfo) DeepCopyInto(out *ImageInfo) {\n\t*out = *in\n}", "func (in *ShootRef) DeepCopyInto(out *ShootRef) {\n\t*out = *in\n}", "func (in *N3000Fpga) DeepCopyInto(out *N3000Fpga) {\n\t*out = *in\n}", "func (in *NetflowType) DeepCopyInto(out *NetflowType) {\n\t*out = *in\n\treturn\n}", "func (in *Node) DeepCopyInto(out *Node) {\n\t*out = *in\n\tif in.FailStatus != nil {\n\t\tin, out := &in.FailStatus, &out.FailStatus\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MigratingSlots != nil {\n\t\tin, out := &in.MigratingSlots, &out.MigratingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.ImportingSlots != nil {\n\t\tin, out := &in.ImportingSlots, &out.ImportingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n}", "func (in *BuiltInAdapter) DeepCopyInto(out *BuiltInAdapter) {\n\t*out = *in\n}", "func (in *CPUSpec) DeepCopyInto(out *CPUSpec) {\n\t*out = *in\n}", "func (in *LoopState) DeepCopyInto(out *LoopState) {\n\t*out = *in\n}" ]
[ "0.8213356", "0.8126085", "0.81016994", "0.8084519", "0.808198", "0.80653447", "0.8062857", "0.80252475", "0.80096555", "0.79938054", "0.7990593", "0.79874957", "0.7984788", "0.7984788", "0.79846936", "0.7983698", "0.79741865", "0.7970389", "0.79674816", "0.79674816", "0.79674816", "0.79669946", "0.79599875", "0.795973", "0.7943344", "0.7943344", "0.7942179", "0.79414415", "0.79397684", "0.79397684", "0.79372966", "0.79358184", "0.7935006", "0.79276186", "0.7922576", "0.79145026", "0.79108834", "0.7909888", "0.79079306", "0.7907861", "0.7906696", "0.7903708", "0.7903179", "0.7902649", "0.7902327", "0.7902327", "0.7901929", "0.79005504", "0.7897558", "0.7895483", "0.7889539", "0.7888715", "0.7888293", "0.7887502", "0.78859216", "0.78841585", "0.78836566", "0.78836566", "0.7882205", "0.787906", "0.78740764", "0.7873032", "0.7873032", "0.78708595", "0.78696084", "0.7869057", "0.78685385", "0.7863212", "0.7861394", "0.7861394", "0.7861394", "0.7861163", "0.78610516", "0.78584725", "0.785615", "0.78555363", "0.78546166", "0.78496337", "0.784387", "0.7843478", "0.78398615", "0.7835206", "0.78349835", "0.7834799", "0.78337663", "0.7830135", "0.7828457", "0.7826969", "0.78261864", "0.7823209", "0.78229225", "0.7821229", "0.7818359", "0.7817294", "0.7811081", "0.7810498", "0.7809535", "0.78090113", "0.7808808", "0.78087187", "0.7806432" ]
0.0
-1
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfiguration.
func (in *EncryptionConfiguration) DeepCopy() *EncryptionConfiguration { if in == nil { return nil } out := new(EncryptionConfiguration) in.DeepCopyInto(out) return out }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (in *EncryptionConfig) DeepCopy() *EncryptionConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EncryptionConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *DataLakeResource) SetEncryptionConfiguration(v *DataLakeEncryptionConfiguration) *DataLakeResource {\n\ts.EncryptionConfiguration = v\n\treturn s\n}", "func (in *EncryptionProperties) DeepCopy() *EncryptionProperties {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EncryptionProperties)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o BucketReplicationConfigRuleDestinationOutput) EncryptionConfiguration() BucketReplicationConfigRuleDestinationEncryptionConfigurationPtrOutput {\n\treturn o.ApplyT(func(v BucketReplicationConfigRuleDestination) *BucketReplicationConfigRuleDestinationEncryptionConfiguration {\n\t\treturn v.EncryptionConfiguration\n\t}).(BucketReplicationConfigRuleDestinationEncryptionConfigurationPtrOutput)\n}", "func (s *DataLakeConfiguration) SetEncryptionConfiguration(v *DataLakeEncryptionConfiguration) *DataLakeConfiguration {\n\ts.EncryptionConfiguration = v\n\treturn s\n}", "func (in *AESConfiguration) DeepCopy() *AESConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AESConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DataEncryption) DeepCopy() *DataEncryption {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DataEncryption)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DatabaseEncryption) DeepCopy() *DatabaseEncryption {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DatabaseEncryption)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o InstanceOutput) EncryptionConfig() InstanceEncryptionConfigOutput {\n\treturn o.ApplyT(func(v *Instance) InstanceEncryptionConfigOutput { return v.EncryptionConfig }).(InstanceEncryptionConfigOutput)\n}", "func (s *KinesisVideoStreamConfig) SetEncryptionConfig(v *EncryptionConfig) *KinesisVideoStreamConfig {\n\ts.EncryptionConfig = v\n\treturn s\n}", "func (s *S3Config) SetEncryptionConfig(v *EncryptionConfig) *S3Config {\n\ts.EncryptionConfig = v\n\treturn s\n}", "func (in *SecretEngineConfiguration) DeepCopy() *SecretEngineConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretEngineConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *NetworkConfiguration) DeepCopy() *NetworkConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(NetworkConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o JobCopyOutput) DestinationEncryptionConfiguration() JobCopyDestinationEncryptionConfigurationPtrOutput {\n\treturn o.ApplyT(func(v JobCopy) *JobCopyDestinationEncryptionConfiguration {\n\t\treturn v.DestinationEncryptionConfiguration\n\t}).(JobCopyDestinationEncryptionConfigurationPtrOutput)\n}", "func (in *EKSCFConfiguration) DeepCopy() *EKSCFConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EKSCFConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretboxConfiguration) DeepCopy() *SecretboxConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretboxConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *CreateJobOutput) SetEncryption(v *Encryption) *CreateJobOutput {\n\ts.Encryption = v\n\treturn s\n}", "func (o JobCopyPtrOutput) DestinationEncryptionConfiguration() JobCopyDestinationEncryptionConfigurationPtrOutput {\n\treturn o.ApplyT(func(v *JobCopy) *JobCopyDestinationEncryptionConfiguration {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.DestinationEncryptionConfiguration\n\t}).(JobCopyDestinationEncryptionConfigurationPtrOutput)\n}", "func (in *IsoConfiguration) DeepCopy() *IsoConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IsoConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o JobLoadOutput) DestinationEncryptionConfiguration() JobLoadDestinationEncryptionConfigurationPtrOutput {\n\treturn o.ApplyT(func(v JobLoad) *JobLoadDestinationEncryptionConfiguration {\n\t\treturn v.DestinationEncryptionConfiguration\n\t}).(JobLoadDestinationEncryptionConfigurationPtrOutput)\n}", "func (s *CaptionSource) SetEncryption(v *Encryption) *CaptionSource {\n\ts.Encryption = v\n\treturn s\n}", "func (in *AWSConfiguration) DeepCopy() *AWSConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AWSConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *Artwork) SetEncryption(v *Encryption) *Artwork {\n\ts.Encryption = v\n\treturn s\n}", "func (in *LoggingConfiguration) DeepCopy() *LoggingConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfiguration) DeepCopy() *LoggingConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *JobOutput) SetEncryption(v *Encryption) *JobOutput {\n\ts.Encryption = v\n\treturn s\n}", "func (o JobLoadPtrOutput) DestinationEncryptionConfiguration() JobLoadDestinationEncryptionConfigurationPtrOutput {\n\treturn o.ApplyT(func(v *JobLoad) *JobLoadDestinationEncryptionConfiguration {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.DestinationEncryptionConfiguration\n\t}).(JobLoadDestinationEncryptionConfigurationPtrOutput)\n}", "func (o *IamLdapBasePropertiesAllOf) SetEnableEncryption(v bool) {\n\to.EnableEncryption = &v\n}", "func (o JobQueryOutput) DestinationEncryptionConfiguration() JobQueryDestinationEncryptionConfigurationPtrOutput {\n\treturn o.ApplyT(func(v JobQuery) *JobQueryDestinationEncryptionConfiguration {\n\t\treturn v.DestinationEncryptionConfiguration\n\t}).(JobQueryDestinationEncryptionConfigurationPtrOutput)\n}", "func (s *JobWatermark) SetEncryption(v *Encryption) *JobWatermark {\n\ts.Encryption = v\n\treturn s\n}", "func NewConfiguration(authKeySize int, encryptEnabled bool, encapEnabled bool, wireguardEnabled bool, hsIpcacheDSRenabled bool, mtu int, mtuDetectIP net.IP) Configuration {\n\tencryptOverhead := 0\n\n\tif mtu == 0 {\n\t\tvar err error\n\n\t\tif mtuDetectIP != nil {\n\t\t\tmtu, err = getMTUFromIf(mtuDetectIP)\n\t\t} else {\n\t\t\tmtu, err = autoDetect()\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.WithError(err).Warning(\"Unable to automatically detect MTU\")\n\t\t\tmtu = EthernetMTU\n\t\t}\n\t}\n\n\tif encryptEnabled {\n\t\t// Add the difference between the default and the actual key sizes here\n\t\t// to account for users specifying non-default auth key lengths.\n\t\tencryptOverhead = EncryptionIPsecOverhead + (authKeySize - EncryptionDefaultAuthKeyLength)\n\t}\n\n\tfullTunnelOverhead := TunnelOverhead\n\tif hsIpcacheDSRenabled {\n\t\tfullTunnelOverhead += DsrTunnelOverhead\n\t}\n\n\tconf := Configuration{\n\t\tstandardMTU: mtu,\n\t\ttunnelMTU: mtu - (fullTunnelOverhead + encryptOverhead),\n\t\tpostEncryptMTU: mtu - TunnelOverhead,\n\t\tpreEncryptMTU: mtu - encryptOverhead,\n\t\tencapEnabled: encapEnabled,\n\t\tencryptEnabled: encryptEnabled,\n\t\twireguardEnabled: wireguardEnabled,\n\t}\n\n\tif conf.tunnelMTU < 0 {\n\t\tconf.tunnelMTU = 0\n\t}\n\n\treturn conf\n}", "func (s *JobInput) SetEncryption(v *Encryption) *JobInput {\n\ts.Encryption = v\n\treturn s\n}", "func (o JobQueryPtrOutput) DestinationEncryptionConfiguration() JobQueryDestinationEncryptionConfigurationPtrOutput {\n\treturn o.ApplyT(func(v *JobQuery) *JobQueryDestinationEncryptionConfiguration {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.DestinationEncryptionConfiguration\n\t}).(JobQueryDestinationEncryptionConfigurationPtrOutput)\n}", "func (k Key) Encryption() []byte {\n\tl := len(k) / 2\n\treturn k[:l:l]\n}", "func (s *CaptionFormat) SetEncryption(v *Encryption) *CaptionFormat {\n\ts.Encryption = v\n\treturn s\n}", "func (in *EndpointConfigurationParameters) DeepCopy() *EndpointConfigurationParameters {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EndpointConfigurationParameters)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigurationSpec) DeepCopy() *ConfigurationSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigurationSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigurationSpec) DeepCopy() *ConfigurationSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigurationSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ApplicationConfiguration) DeepCopy() *ApplicationConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ApplicationConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *BootConfiguration) DeepCopy() *BootConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BootConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o BackupOutput) EncryptionConfigEncryptionType() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Backup) pulumi.StringOutput { return v.EncryptionConfigEncryptionType }).(pulumi.StringOutput)\n}", "func (in *ProviderConfiguration) DeepCopy() *ProviderConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ProviderConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *ExportRevisionsToS3RequestDetails) SetEncryption(v *ExportServerSideEncryption) *ExportRevisionsToS3RequestDetails {\n\ts.Encryption = v\n\treturn s\n}", "func (in *IdentityConfiguration) DeepCopy() *IdentityConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IdentityConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *OCIConfiguration) DeepCopy() *OCIConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(OCIConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *NetworkPolicyConfig) DeepCopy() *NetworkPolicyConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(NetworkPolicyConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *NetworkPolicyConfig) DeepCopy() *NetworkPolicyConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(NetworkPolicyConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ControllerConfiguration) DeepCopy() *ControllerConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ControllerConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KVConfiguration) DeepCopy() *KVConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KVConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EKSManagedConfiguration) DeepCopy() *EKSManagedConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EKSManagedConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o *DeleteDataSourceParams) SetEncryptionKey(encryptionKey *string) {\n\to.EncryptionKey = encryptionKey\n}", "func (s *AutoExportRevisionToS3RequestDetails) SetEncryption(v *ExportServerSideEncryption) *AutoExportRevisionToS3RequestDetails {\n\ts.Encryption = v\n\treturn s\n}", "func (in *KMSConfiguration) DeepCopy() *KMSConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KMSConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o ServiceResponseOutput) EncryptionConfig() EncryptionConfigResponseOutput {\n\treturn o.ApplyT(func(v ServiceResponse) EncryptionConfigResponse { return v.EncryptionConfig }).(EncryptionConfigResponseOutput)\n}", "func (o *UcsdBackupInfoAllOf) GetEncryptionKey() string {\n\tif o == nil || o.EncryptionKey == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.EncryptionKey\n}", "func (in *IpConfiguration) DeepCopy() *IpConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IpConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ElasticsearchConfiguration) DeepCopy() *ElasticsearchConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ElasticsearchConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *VolumeConfig) DeepCopy() *VolumeConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(VolumeConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *CertificateRenewalConfig) DeepCopy() *CertificateRenewalConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(CertificateRenewalConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *ExportAssetsToS3RequestDetails) SetEncryption(v *ExportServerSideEncryption) *ExportAssetsToS3RequestDetails {\n\ts.Encryption = v\n\treturn s\n}", "func (o *DeleteDataSourceParams) SetEncryptionMode(encryptionMode *string) {\n\to.EncryptionMode = encryptionMode\n}", "func (in *IngressTLSConfig) DeepCopy() *IngressTLSConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IngressTLSConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *NetworkConfig) DeepCopy() *NetworkConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(NetworkConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (m *ProjectionMapping) GetEncryptionConfig() conf.Encryption {\n\treturn m.Encryption\n}", "func (s *ExportRevisionsToS3ResponseDetails) SetEncryption(v *ExportServerSideEncryption) *ExportRevisionsToS3ResponseDetails {\n\ts.Encryption = v\n\treturn s\n}", "func (in *KubebenchConfig) DeepCopy() *KubebenchConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubebenchConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *PrivateconnectionVpcPeeringConfig) DeepCopy() *PrivateconnectionVpcPeeringConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PrivateconnectionVpcPeeringConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *InputConfig) DeepCopy() *InputConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(InputConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *SseConfiguration) SetKmsEncryptionConfig(v *KmsEncryptionConfig) *SseConfiguration {\n\ts.KmsEncryptionConfig = v\n\treturn s\n}", "func (in *DatabaseSecretEngineConfig) DeepCopy() *DatabaseSecretEngineConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DatabaseSecretEngineConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretEngineConfiguration) DeepCopyInto(out *SecretEngineConfiguration) {\n\t*out = *in\n\tif in.AWS != nil {\n\t\tin, out := &in.AWS, &out.AWS\n\t\t*out = new(AWSConfiguration)\n\t\t(*in).DeepCopyInto(*out)\n\t}\n\tif in.Azure != nil {\n\t\tin, out := &in.Azure, &out.Azure\n\t\t*out = new(AzureConfiguration)\n\t\t**out = **in\n\t}\n\tif in.GCP != nil {\n\t\tin, out := &in.GCP, &out.GCP\n\t\t*out = new(GCPConfiguration)\n\t\t**out = **in\n\t}\n\tif in.Postgres != nil {\n\t\tin, out := &in.Postgres, &out.Postgres\n\t\t*out = new(PostgresConfiguration)\n\t\t(*in).DeepCopyInto(*out)\n\t}\n\tif in.MongoDB != nil {\n\t\tin, out := &in.MongoDB, &out.MongoDB\n\t\t*out = new(MongoDBConfiguration)\n\t\t(*in).DeepCopyInto(*out)\n\t}\n\tif in.Redis != nil {\n\t\tin, out := &in.Redis, &out.Redis\n\t\t*out = new(RedisConfiguration)\n\t\t(*in).DeepCopyInto(*out)\n\t}\n\tif in.MySQL != nil {\n\t\tin, out := &in.MySQL, &out.MySQL\n\t\t*out = new(MySQLConfiguration)\n\t\t(*in).DeepCopyInto(*out)\n\t}\n\tif in.MariaDB != nil {\n\t\tin, out := &in.MariaDB, &out.MariaDB\n\t\t*out = new(MariaDBConfiguration)\n\t\t(*in).DeepCopyInto(*out)\n\t}\n\tif in.KV != nil {\n\t\tin, out := &in.KV, &out.KV\n\t\t*out = new(KVConfiguration)\n\t\t**out = **in\n\t}\n\tif in.Elasticsearch != nil {\n\t\tin, out := &in.Elasticsearch, &out.Elasticsearch\n\t\t*out = new(ElasticsearchConfiguration)\n\t\t(*in).DeepCopyInto(*out)\n\t}\n\treturn\n}", "func (o *StorageNetAppCifsShareAllOf) SetEncryption(v string) {\n\to.Encryption = &v\n}", "func (in *AdvancedConfig) DeepCopy() *AdvancedConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AdvancedConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o InventoryDestinationBucketPtrOutput) Encryption() InventoryDestinationBucketEncryptionPtrOutput {\n\treturn o.ApplyT(func(v *InventoryDestinationBucket) *InventoryDestinationBucketEncryption {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Encryption\n\t}).(InventoryDestinationBucketEncryptionPtrOutput)\n}", "func (o *Wireless) SetEncryption(v string) {\n\to.Encryption = &v\n}", "func (in *ContainerImageConfiguration) DeepCopy() *ContainerImageConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ContainerImageConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AdmissionWebhookConfiguration) DeepCopy() *AdmissionWebhookConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AdmissionWebhookConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MSSQLServerTransparentDataEncryption) DeepCopy() *MSSQLServerTransparentDataEncryption {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MSSQLServerTransparentDataEncryption)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o AsymmetricEncryptedSecretOutput) EncryptionAlgorithm() EncryptionAlgorithmOutput {\n\treturn o.ApplyT(func(v AsymmetricEncryptedSecret) EncryptionAlgorithm { return v.EncryptionAlgorithm }).(EncryptionAlgorithmOutput)\n}", "func (in *AuthenticationOperatorConfig) DeepCopy() *AuthenticationOperatorConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AuthenticationOperatorConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *StartBotRecommendationInput) SetEncryptionSetting(v *EncryptionSetting) *StartBotRecommendationInput {\n\ts.EncryptionSetting = v\n\treturn s\n}", "func (in *DeploymentConfiguration) DeepCopy() *DeploymentConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DeploymentConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *StartBotRecommendationOutput) SetEncryptionSetting(v *EncryptionSetting) *StartBotRecommendationOutput {\n\ts.EncryptionSetting = v\n\treturn s\n}", "func (in *KubeadmConfig) DeepCopy() *KubeadmConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubeadmConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *ExportAssetsToS3ResponseDetails) SetEncryption(v *ExportServerSideEncryption) *ExportAssetsToS3ResponseDetails {\n\ts.Encryption = v\n\treturn s\n}", "func (constr Construction) Encrypt(dst, src []byte) {\n\ttemp := [16]byte{}\n\tcopy(temp[:], src)\n\n\ttemp = encoding.ComposedBlocks(constr).Encode(temp)\n\n\tcopy(dst, temp[:])\n}", "func (g *GenericVaultBackend) EncryptionConfigPath() string {\n\treturn filepath.Join(g.Path(), \"encryption-config\")\n}", "func (in *EncryptionConfiguration) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *NetDeviceConfig) DeepCopy() *NetDeviceConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(NetDeviceConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o *StorageNetAppCifsShareAllOf) GetEncryption() string {\n\tif o == nil || o.Encryption == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Encryption\n}", "func (in *RedisConfiguration) DeepCopy() *RedisConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(RedisConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigurationList) DeepCopy() *ConfigurationList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigurationList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c Configuration) Clone() Configuration {\n\treturn Configuration{\n\t\tEDATool: c.EDATool,\n\t\tInputFile: c.InputFile,\n\t\tOutputFile: c.OutputFile,\n\t\tLastUpdated: c.LastUpdated,\n\t}\n}", "func (o *IamLdapBasePropertiesAllOf) GetEnableEncryption() bool {\n\tif o == nil || o.EnableEncryption == nil {\n\t\tvar ret bool\n\t\treturn ret\n\t}\n\treturn *o.EnableEncryption\n}", "func (c *Config) DeepCopy() *Config {\n\tif c == nil {\n\t\treturn &Config{}\n\t}\n\tcfg := *c\n\treturn &cfg\n}", "func (in *ApplicationConfigurationSpec) DeepCopy() *ApplicationConfigurationSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ApplicationConfigurationSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o InventoryDestinationBucketOutput) Encryption() InventoryDestinationBucketEncryptionPtrOutput {\n\treturn o.ApplyT(func(v InventoryDestinationBucket) *InventoryDestinationBucketEncryption { return v.Encryption }).(InventoryDestinationBucketEncryptionPtrOutput)\n}" ]
[ "0.81821805", "0.69133514", "0.67663676", "0.6730935", "0.6721722", "0.6714277", "0.65690655", "0.6349009", "0.6184369", "0.60215807", "0.59392107", "0.5796518", "0.56959784", "0.5585565", "0.5557125", "0.5539182", "0.5539182", "0.5539182", "0.55084985", "0.54757833", "0.54676163", "0.5440936", "0.5420741", "0.5409741", "0.5374624", "0.53595287", "0.53588474", "0.53588474", "0.5349624", "0.53295153", "0.5291965", "0.52844894", "0.52609766", "0.5248489", "0.5244222", "0.52323616", "0.52308196", "0.52153087", "0.5196572", "0.5184887", "0.5184887", "0.5168087", "0.51653224", "0.5152831", "0.5147331", "0.5133785", "0.51305395", "0.51279354", "0.5125688", "0.5125688", "0.51116973", "0.510776", "0.50861406", "0.5081224", "0.5077895", "0.5054636", "0.5049154", "0.5033954", "0.5029655", "0.5020012", "0.5005512", "0.50011605", "0.4978785", "0.49718893", "0.493431", "0.49312368", "0.4930719", "0.4916804", "0.49145666", "0.49120423", "0.49064898", "0.490469", "0.48939458", "0.48894572", "0.48880678", "0.48824966", "0.4880412", "0.48755142", "0.48576957", "0.48546055", "0.48528993", "0.4849519", "0.48361003", "0.48281524", "0.48123592", "0.48048753", "0.47914922", "0.47900325", "0.4785608", "0.47787935", "0.47775367", "0.47737807", "0.47735175", "0.47728443", "0.4770085", "0.47689664", "0.47677588", "0.47664142", "0.4763375", "0.47545955" ]
0.895874
0
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *EncryptionConfiguration) DeepCopyObject() runtime.Object { if c := in.DeepCopy(); c != nil { return c } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (in *ConsoleQuickStart) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *ServingRuntime) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Keevakind) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Run) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Pentesting) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Version) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *DroidVirt) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *ConsoleSample) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *ConsoleQuickStartList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Kernel) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *ConsoleService) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *AppBinding) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Execution) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *PipelineRun) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Runner) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Terminal) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Terminal) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *ConsoleCLIDownload) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *CliApp) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *DirectorBind) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t} else {\n\t\treturn nil\n\t}\n}", "func (in *DiscoveryService) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Baremetal) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Egeria) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Qliksense) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Idler) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *DroidVirtList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *RpaasBind) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Host) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *ServingRuntimeList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *RedisEnterprise) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *SSLProxy) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *ConsoleLink) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *GitHubBinding) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *MobileSecurityServiceBind) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *DOMachine) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Space) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Space) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *KeevakindList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *DockerBind) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t} else {\n\t\treturn nil\n\t}\n}", "func (in *RPMCompose) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Build) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Build) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Build) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Build) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *SecretEngine) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *VultrMachine) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *SpaceBinding) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Redis) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Ray) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Redpanda) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *KeptnCore) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Crd) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Sidekick) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *RunList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Test) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *ManagedSeed) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Method) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *DirectorBindList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t} else {\n\t\treturn nil\n\t}\n}", "func (in *IdGenerate) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *ExecutionList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *FundPool) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *RestQL) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *FalconContainer) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *RedisGCache) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *ChaosApi) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *VirtualDatabase) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *VirtualService) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *AppBindingList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Notary) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *QliksenseList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *DataExport) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *DataExport) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *FusionAppInstance) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *IdGenerateList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *DataDownload) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Guestbook) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *User) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Memcached) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *ConsoleCLIDownloadList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *GlobalUser) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *PipelineRunList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *CliAppList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Hawtio) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *VirtualNodeList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *RTCPeerConnection) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *EventingBackend) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *GitHubBindingList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *AtRest) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *OpenStackPlaybookGenerator) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Registry) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *UsersDB2) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Ghost) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *DiscoveryServiceCertificate) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *WorkflowRun) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *SD) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *CrunchyBridgeConnection) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *ManagedSeedList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Manager) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *Framework) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *FusionApp) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *PyTorchJob) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}" ]
[ "0.73118716", "0.7157215", "0.71052337", "0.7099557", "0.7055543", "0.7039787", "0.70361906", "0.7028781", "0.6997706", "0.69724673", "0.69699717", "0.69541174", "0.6951929", "0.6948912", "0.6936584", "0.6933856", "0.6933856", "0.6926833", "0.6922871", "0.69208986", "0.6910952", "0.68988454", "0.68945056", "0.689282", "0.68874973", "0.68858397", "0.6885442", "0.6876728", "0.6875352", "0.6874131", "0.6862784", "0.6859539", "0.68586844", "0.6850529", "0.68501276", "0.68457294", "0.68457294", "0.68454725", "0.68393195", "0.6838104", "0.6837139", "0.6837139", "0.6837139", "0.6837139", "0.68330866", "0.6832333", "0.6829182", "0.6828011", "0.6827433", "0.68274075", "0.68239087", "0.68168485", "0.68135357", "0.6812687", "0.68121284", "0.6805111", "0.68014735", "0.67932034", "0.6789401", "0.6782944", "0.6782449", "0.67818314", "0.67816544", "0.67808276", "0.67804974", "0.6768294", "0.6768069", "0.6766543", "0.67626894", "0.67613906", "0.6755548", "0.6755548", "0.67482626", "0.6746819", "0.67458177", "0.67404664", "0.67388374", "0.67380244", "0.6736224", "0.67346346", "0.6733956", "0.6732834", "0.6732262", "0.6731639", "0.6731512", "0.67279434", "0.6727401", "0.67249155", "0.6723593", "0.67206407", "0.671899", "0.6716081", "0.6713702", "0.67133504", "0.6711048", "0.6709987", "0.6709619", "0.6708846", "0.67083585", "0.67080545", "0.67075753" ]
0.0
-1
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be nonnil.
func (in *IdentityConfiguration) DeepCopyInto(out *IdentityConfiguration) { *out = *in return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (r *RunInfo) DeepCopyInto(out *RunInfo) {\n\t*out = *r\n}", "func (in *Base) DeepCopyInto(out *Base) {\n\t*out = *in\n\treturn\n}", "func (in *ForkObject) DeepCopyInto(out *ForkObject) {\n\t*out = *in\n}", "func (in *TargetObjectInfo) DeepCopyInto(out *TargetObjectInfo) {\n\t*out = *in\n}", "func (in *DebugObjectInfo) DeepCopyInto(out *DebugObjectInfo) {\n\t*out = *in\n}", "func (in *Input) DeepCopyInto(out *Input) {\n\t*out = *in\n}", "func (u *SSN) DeepCopyInto(out *SSN) {\n\t*out = *u\n}", "func (in *ExistPvc) DeepCopyInto(out *ExistPvc) {\n\t*out = *in\n}", "func (in *DockerStep) DeepCopyInto(out *DockerStep) {\n\t*out = *in\n\tif in.Inline != nil {\n\t\tin, out := &in.Inline, &out.Inline\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tout.Auth = in.Auth\n\treturn\n}", "func (in *Container) DeepCopyInto(out *Container) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.Command != nil {\n\t\tin, out := &in.Command, &out.Command\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.LifeCycleScript != nil {\n\t\tin, out := &in.LifeCycleScript, &out.LifeCycleScript\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *RuntimeRef) DeepCopyInto(out *RuntimeRef) {\n\t*out = *in\n}", "func (in *Ibft2) DeepCopyInto(out *Ibft2) {\n\t*out = *in\n\treturn\n}", "func (in *TestResult) DeepCopyInto(out *TestResult) {\n\t*out = *in\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *Haproxy) DeepCopyInto(out *Haproxy) {\n\t*out = *in\n\treturn\n}", "func (in *SSH) DeepCopyInto(out *SSH) {\n\t*out = *in\n\treturn\n}", "func (in *Runtime) DeepCopyInto(out *Runtime) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (b *Base64) DeepCopyInto(out *Base64) {\n\t*out = *b\n}", "func (in *EventDependencyTransformer) DeepCopyInto(out *EventDependencyTransformer) {\n\t*out = *in\n\treturn\n}", "func (in *StageOutput) DeepCopyInto(out *StageOutput) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Dependent) DeepCopyInto(out *Dependent) {\n\t*out = *in\n\treturn\n}", "func (in *GitFileGeneratorItem) DeepCopyInto(out *GitFileGeneratorItem) {\n\t*out = *in\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *AnsibleStep) DeepCopyInto(out *AnsibleStep) {\n\t*out = *in\n\treturn\n}", "func (in *Forks) DeepCopyInto(out *Forks) {\n\t*out = *in\n\tif in.DAO != nil {\n\t\tin, out := &in.DAO, &out.DAO\n\t\t*out = new(uint)\n\t\t**out = **in\n\t}\n}", "func (in *ContainerPort) DeepCopyInto(out *ContainerPort) {\n\t*out = *in\n}", "func (in *General) DeepCopyInto(out *General) {\n\t*out = *in\n\treturn\n}", "func (in *IsoContainer) DeepCopyInto(out *IsoContainer) {\n\t*out = *in\n}", "func (in *Git) DeepCopyInto(out *Git) {\n\t*out = *in\n\treturn\n}", "func (in *ConfigFile) DeepCopyInto(out *ConfigFile) {\n\t*out = *in\n}", "func (in *BackupProgress) DeepCopyInto(out *BackupProgress) {\n\t*out = *in\n}", "func (in *DataDisk) DeepCopyInto(out *DataDisk) {\n\t*out = *in\n}", "func (in *PhaseStep) DeepCopyInto(out *PhaseStep) {\n\t*out = *in\n}", "func (u *MAC) DeepCopyInto(out *MAC) {\n\t*out = *u\n}", "func (in *Variable) DeepCopyInto(out *Variable) {\n\t*out = *in\n}", "func (in *RestoreProgress) DeepCopyInto(out *RestoreProgress) {\n\t*out = *in\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *NamespacedObjectReference) DeepCopyInto(out *NamespacedObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Path) DeepCopyInto(out *Path) {\n\t*out = *in\n\treturn\n}", "func (in *GitDirectoryGeneratorItem) DeepCopyInto(out *GitDirectoryGeneratorItem) {\n\t*out = *in\n}", "func (in *NamePath) DeepCopyInto(out *NamePath) {\n\t*out = *in\n\treturn\n}", "func (in *ConsoleCreateObj) DeepCopyInto(out *ConsoleCreateObj) {\n\t*out = *in\n}", "func (in *UsedPipelineRun) DeepCopyInto(out *UsedPipelineRun) {\n\t*out = *in\n}", "func (in *BuildTemplate) DeepCopyInto(out *BuildTemplate) {\n\t*out = *in\n\tif in.Cmd != nil {\n\t\tin, out := &in.Cmd, &out.Cmd\n\t\t*out = make([]BuildTemplateStep, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n}", "func (in *ObjectInfo) DeepCopyInto(out *ObjectInfo) {\n\t*out = *in\n\tout.GroupVersionKind = in.GroupVersionKind\n\treturn\n}", "func (in *Files) DeepCopyInto(out *Files) {\n\t*out = *in\n}", "func (in *Source) DeepCopyInto(out *Source) {\n\t*out = *in\n\tif in.Dependencies != nil {\n\t\tin, out := &in.Dependencies, &out.Dependencies\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MavenRepositories != nil {\n\t\tin, out := &in.MavenRepositories, &out.MavenRepositories\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\treturn\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *StackBuild) DeepCopyInto(out *StackBuild) {\n\t*out = *in\n\treturn\n}", "func (in *BuildTaskRef) DeepCopyInto(out *BuildTaskRef) {\n\t*out = *in\n\treturn\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *PathInfo) DeepCopyInto(out *PathInfo) {\n\t*out = *in\n}", "func (in *PoA) DeepCopyInto(out *PoA) {\n\t*out = *in\n}", "func (in *Section) DeepCopyInto(out *Section) {\n\t*out = *in\n\tif in.SecretRefs != nil {\n\t\tin, out := &in.SecretRefs, &out.SecretRefs\n\t\t*out = make([]SecretReference, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Files != nil {\n\t\tin, out := &in.Files, &out.Files\n\t\t*out = make([]FileMount, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n}", "func (in *DNSSelection) DeepCopyInto(out *DNSSelection) {\n\t*out = *in\n\tif in.Include != nil {\n\t\tin, out := &in.Include, &out.Include\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Exclude != nil {\n\t\tin, out := &in.Exclude, &out.Exclude\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ReleaseVersion) DeepCopyInto(out *ReleaseVersion) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Command) DeepCopyInto(out *Command) {\n\t*out = *in\n\tif in.Flags != nil {\n\t\tin, out := &in.Flags, &out.Flags\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Value != nil {\n\t\tin, out := &in.Value, &out.Value\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *PathRule) DeepCopyInto(out *PathRule) {\n\t*out = *in\n\treturn\n}", "func (in *DockerLifecycleData) DeepCopyInto(out *DockerLifecycleData) {\n\t*out = *in\n}", "func (in *RunScriptStepConfig) DeepCopyInto(out *RunScriptStepConfig) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *Checksum) DeepCopyInto(out *Checksum) {\n\t*out = *in\n}", "func (in *DomainNameOutput) DeepCopyInto(out *DomainNameOutput) {\n\t*out = *in\n}", "func (in *InterfaceStruct) DeepCopyInto(out *InterfaceStruct) {\n\t*out = *in\n\tif in.val != nil {\n\t\tin, out := &in.val, &out.val\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Ref) DeepCopyInto(out *Ref) {\n\t*out = *in\n}", "func (in *MemorySpec) DeepCopyInto(out *MemorySpec) {\n\t*out = *in\n}", "func (in *BuildJenkinsInfo) DeepCopyInto(out *BuildJenkinsInfo) {\n\t*out = *in\n\treturn\n}", "func (in *VirtualDatabaseBuildObject) DeepCopyInto(out *VirtualDatabaseBuildObject) {\n\t*out = *in\n\tif in.Incremental != nil {\n\t\tin, out := &in.Incremental, &out.Incremental\n\t\t*out = new(bool)\n\t\t**out = **in\n\t}\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]v1.EnvVar, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\tout.Git = in.Git\n\tin.Source.DeepCopyInto(&out.Source)\n\tif in.Webhooks != nil {\n\t\tin, out := &in.Webhooks, &out.Webhooks\n\t\t*out = make([]WebhookSecret, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *MaintenanceInfo) DeepCopyInto(out *MaintenanceInfo) {\n\t*out = *in\n\treturn\n}", "func (in *KopsNode) DeepCopyInto(out *KopsNode) {\n\t*out = *in\n\treturn\n}", "func (in *FalconAPI) DeepCopyInto(out *FalconAPI) {\n\t*out = *in\n}", "func (in *EBS) DeepCopyInto(out *EBS) {\n\t*out = *in\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n\treturn\n}", "func (in *Empty) DeepCopyInto(out *Empty) {\n\t*out = *in\n\tout.XXX_NoUnkeyedLiteral = in.XXX_NoUnkeyedLiteral\n\tif in.XXX_unrecognized != nil {\n\t\tin, out := &in.XXX_unrecognized, &out.XXX_unrecognized\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ComponentDistGit) DeepCopyInto(out *ComponentDistGit) {\n\t*out = *in\n\treturn\n}", "func (in *Memory) DeepCopyInto(out *Memory) {\n\t*out = *in\n\tout.Required = in.Required.DeepCopy()\n}", "func (in *Persistence) DeepCopyInto(out *Persistence) {\n\t*out = *in\n\tout.Size = in.Size.DeepCopy()\n\treturn\n}", "func (in *ManagedDisk) DeepCopyInto(out *ManagedDisk) {\n\t*out = *in\n}", "func (e *Email) DeepCopyInto(out *Email) {\n\t*out = *e\n}", "func (in *ImageInfo) DeepCopyInto(out *ImageInfo) {\n\t*out = *in\n}", "func (in *ShootRef) DeepCopyInto(out *ShootRef) {\n\t*out = *in\n}", "func (in *N3000Fpga) DeepCopyInto(out *N3000Fpga) {\n\t*out = *in\n}", "func (in *NetflowType) DeepCopyInto(out *NetflowType) {\n\t*out = *in\n\treturn\n}", "func (in *BuiltInAdapter) DeepCopyInto(out *BuiltInAdapter) {\n\t*out = *in\n}", "func (in *Node) DeepCopyInto(out *Node) {\n\t*out = *in\n\tif in.FailStatus != nil {\n\t\tin, out := &in.FailStatus, &out.FailStatus\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MigratingSlots != nil {\n\t\tin, out := &in.MigratingSlots, &out.MigratingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.ImportingSlots != nil {\n\t\tin, out := &in.ImportingSlots, &out.ImportingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n}", "func (in *CPUSpec) DeepCopyInto(out *CPUSpec) {\n\t*out = *in\n}", "func (in *LoopState) DeepCopyInto(out *LoopState) {\n\t*out = *in\n}" ]
[ "0.8215289", "0.81280124", "0.81039286", "0.80862963", "0.8083811", "0.80673146", "0.8064545", "0.8026454", "0.8012046", "0.7996313", "0.799204", "0.79887754", "0.7987097", "0.7986994", "0.7986994", "0.79854053", "0.7975989", "0.7972486", "0.79695636", "0.79695636", "0.79695636", "0.7967528", "0.79624444", "0.7961954", "0.7945754", "0.7945754", "0.7944541", "0.79428566", "0.7942668", "0.7942668", "0.7940451", "0.793851", "0.7936731", "0.79294837", "0.79252166", "0.7915377", "0.7911627", "0.7911138", "0.7909384", "0.790913", "0.7908773", "0.7905649", "0.79050326", "0.7904594", "0.7904594", "0.7904235", "0.79036915", "0.79020816", "0.78988886", "0.78977424", "0.7891376", "0.7891024", "0.7889831", "0.78890276", "0.7887135", "0.788637", "0.7885264", "0.7885264", "0.7884786", "0.7880785", "0.78745943", "0.78745943", "0.78745407", "0.78734446", "0.78724426", "0.78713626", "0.78713554", "0.78652424", "0.7863321", "0.7863321", "0.7863321", "0.7863293", "0.7862628", "0.7860664", "0.7858556", "0.785785", "0.78571486", "0.7851332", "0.78453225", "0.78448987", "0.78415996", "0.7837483", "0.7837037", "0.7836443", "0.78351796", "0.78329664", "0.7831094", "0.7829445", "0.7826582", "0.7824499", "0.78242797", "0.78227437", "0.78192484", "0.7818843", "0.78128535", "0.7812535", "0.78111476", "0.78111106", "0.781107", "0.78093034", "0.7808775" ]
0.0
-1
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new IdentityConfiguration.
func (in *IdentityConfiguration) DeepCopy() *IdentityConfiguration { if in == nil { return nil } out := new(IdentityConfiguration) in.DeepCopyInto(out) return out }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (in *WorkloadIdentityConfig) DeepCopy() *WorkloadIdentityConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(WorkloadIdentityConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Identity) DeepCopy() *Identity {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Identity)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IdentityParameters) DeepCopy() *IdentityParameters {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IdentityParameters)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IsoConfiguration) DeepCopy() *IsoConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IsoConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IpConfiguration) DeepCopy() *IpConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IpConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IdentityProvider) DeepCopy() *IdentityProvider {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IdentityProvider)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IstioConfig) DeepCopy() *IstioConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IstioConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *PKIConfig) DeepCopy() *PKIConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PKIConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *OCIConfiguration) DeepCopy() *OCIConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(OCIConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IdentityProviderParameters) DeepCopy() *IdentityProviderParameters {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IdentityProviderParameters)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *DatastoreProperties) SetIdentityProviderConfiguration(v *IdentityProviderConfiguration) *DatastoreProperties {\n\ts.IdentityProviderConfiguration = v\n\treturn s\n}", "func (in *LabelIdentity) DeepCopy() *LabelIdentity {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LabelIdentity)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c *Provider) IdentityConfig() msp.IdentityConfig {\n\treturn c.identityConfig\n}", "func (in *ContainernodepoolShieldedInstanceConfig) DeepCopy() *ContainernodepoolShieldedInstanceConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ContainernodepoolShieldedInstanceConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *InputConfig) DeepCopy() *InputConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(InputConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfiguration) DeepCopy() *LoggingConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfiguration) DeepCopy() *LoggingConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IdentityProviderDetails) DeepCopy() *IdentityProviderDetails {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IdentityProviderDetails)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EKSManagedConfiguration) DeepCopy() *EKSManagedConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EKSManagedConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IdentityObservation) DeepCopy() *IdentityObservation {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IdentityObservation)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *UserAssignedIdentityDetails) DeepCopy() *UserAssignedIdentityDetails {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(UserAssignedIdentityDetails)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *UserAssignedIdentityDetails) DeepCopy() *UserAssignedIdentityDetails {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(UserAssignedIdentityDetails)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *CreateFHIRDatastoreInput) SetIdentityProviderConfiguration(v *IdentityProviderConfiguration) *CreateFHIRDatastoreInput {\n\ts.IdentityProviderConfiguration = v\n\treturn s\n}", "func (c *IssuerConfig) Copy() *IssuerConfig {\n\td := new(IssuerConfig)\n\tcopier.Copy(d, c)\n\treturn d\n}", "func (in *AuthenticationOperatorConfig) DeepCopy() *AuthenticationOperatorConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AuthenticationOperatorConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EncryptionConfiguration) DeepCopy() *EncryptionConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EncryptionConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IdentityProviderType) DeepCopy() *IdentityProviderType {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IdentityProviderType)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ContainerImageConfiguration) DeepCopy() *ContainerImageConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ContainerImageConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *NotificationsConfig) DeepCopy() *NotificationsConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(NotificationsConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LabelIdentityList) DeepCopy() *LabelIdentityList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LabelIdentityList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *CASignedConfig) DeepCopy() *CASignedConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(CASignedConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ContainernodepoolSandboxConfig) DeepCopy() *ContainernodepoolSandboxConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ContainernodepoolSandboxConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ImageConfig) DeepCopy() *ImageConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ImageConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *NotificationConfig) DeepCopy() *NotificationConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(NotificationConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EKSCFConfiguration) DeepCopy() *EKSCFConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EKSCFConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IdentityConfiguration) DeepCopyInto(out *IdentityConfiguration) {\n\t*out = *in\n\treturn\n}", "func (in *Configurator) DeepCopy() *Configurator {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configurator)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IdentityProviderList) DeepCopy() *IdentityProviderList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IdentityProviderList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfig) DeepCopy() *LoggingConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AutoscaleConfig) DeepCopy() *AutoscaleConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AutoscaleConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *NamespaceConfig) DeepCopy() *NamespaceConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(NamespaceConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DiamondConfig) DeepCopy() *DiamondConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DiamondConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IdentityProviderObservation) DeepCopy() *IdentityProviderObservation {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IdentityProviderObservation)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LabelIdentitySpec) DeepCopy() *LabelIdentitySpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LabelIdentitySpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IngressConfig) DeepCopy() *IngressConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IngressConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *NetworkConfiguration) DeepCopy() *NetworkConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(NetworkConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ApplicationConfiguration) DeepCopy() *ApplicationConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ApplicationConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IdentityProviderSpec) DeepCopy() *IdentityProviderSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IdentityProviderSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o ClusterOutput) WorkloadIdentityConfig() ClusterWorkloadIdentityConfigOutput {\n\treturn o.ApplyT(func(v *Cluster) ClusterWorkloadIdentityConfigOutput { return v.WorkloadIdentityConfig }).(ClusterWorkloadIdentityConfigOutput)\n}", "func (in *MongoDBConfiguration) DeepCopy() *MongoDBConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MongoDBConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AutoMLSecurityConfig) DeepCopy() *AutoMLSecurityConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AutoMLSecurityConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AWSAccessRequestConfiguration) DeepCopy() *AWSAccessRequestConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AWSAccessRequestConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *UsernameConfigurationType) DeepCopy() *UsernameConfigurationType {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(UsernameConfigurationType)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ProviderConfiguration) DeepCopy() *ProviderConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ProviderConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EmailConfigurationType) DeepCopy() *EmailConfigurationType {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EmailConfigurationType)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ServerIdentityParameters) DeepCopy() *ServerIdentityParameters {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ServerIdentityParameters)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o ClusterOutput) IdentityServiceConfig() ClusterIdentityServiceConfigOutput {\n\treturn o.ApplyT(func(v *Cluster) ClusterIdentityServiceConfigOutput { return v.IdentityServiceConfig }).(ClusterIdentityServiceConfigOutput)\n}", "func (in *BaseKubernetesContainerConfiguration) DeepCopy() *BaseKubernetesContainerConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BaseKubernetesContainerConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretboxConfiguration) DeepCopy() *SecretboxConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretboxConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigFile) DeepCopy() *ConfigFile {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigFile)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AESConfiguration) DeepCopy() *AESConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AESConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *BatchAccountIdentity) DeepCopy() *BatchAccountIdentity {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BatchAccountIdentity)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ObjectStorageConfig) DeepCopy() *ObjectStorageConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ObjectStorageConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (mr *MockClientMockRecorder) IdentityConfig() *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"IdentityConfig\", reflect.TypeOf((*MockClient)(nil).IdentityConfig))\n}", "func identityConfig(nbits int) (native.Identity, error) {\n\t// TODO guard higher up\n\tident := native.Identity{}\n\tif nbits < 1024 {\n\t\treturn ident, errors.New(\"bitsize less than 1024 is considered unsafe\")\n\t}\n\n\tlog.Infof(\"generating %v-bit RSA keypair...\", nbits)\n\tsk, pk, err := ci.GenerateKeyPair(ci.RSA, nbits)\n\tif err != nil {\n\t\treturn ident, err\n\t}\n\n\t// currently storing key unencrypted. in the future we need to encrypt it.\n\t// TODO(security)\n\tskbytes, err := sk.Bytes()\n\tif err != nil {\n\t\treturn ident, err\n\t}\n\tident.PrivKey = base64.StdEncoding.EncodeToString(skbytes)\n\n\tid, err := peer.IDFromPublicKey(pk)\n\tif err != nil {\n\t\treturn ident, err\n\t}\n\tident.PeerID = id.Pretty()\n\tlog.Infof(\"new peer identity: %s\\n\", ident.PeerID)\n\treturn ident, nil\n}", "func (in *MultiClusterConfig) DeepCopy() *MultiClusterConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MultiClusterConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MariaDBConfiguration) DeepCopy() *MariaDBConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MariaDBConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EncryptionConfig) DeepCopy() *EncryptionConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EncryptionConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *PrivateEndpointIPConfiguration) DeepCopy() *PrivateEndpointIPConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PrivateEndpointIPConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AWSConfiguration) DeepCopy() *AWSConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AWSConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConnectionprofileSslConfig) DeepCopy() *ConnectionprofileSslConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConnectionprofileSslConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *GcePersistentDiskCsiDriverConfig) DeepCopy() *GcePersistentDiskCsiDriverConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(GcePersistentDiskCsiDriverConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LabelingJobAlgorithmsConfig) DeepCopy() *LabelingJobAlgorithmsConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LabelingJobAlgorithmsConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IngressTLSConfig) DeepCopy() *IngressTLSConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IngressTLSConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func LDAPIdentityProvider() *LDAPIdentityProviderApplyConfiguration {\n\treturn &LDAPIdentityProviderApplyConfiguration{}\n}", "func (in *RedisConfiguration) DeepCopy() *RedisConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(RedisConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggerConfig) DeepCopy() *LoggerConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggerConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MetricsConfiguration) DeepCopy() *MetricsConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MetricsConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IngressShimConfig) DeepCopy() *IngressShimConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IngressShimConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c *Config) DeepCopy() *Config {\n\tif c == nil {\n\t\treturn &Config{}\n\t}\n\tcfg := *c\n\treturn &cfg\n}", "func (in *OCICacheConfiguration) DeepCopy() *OCICacheConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(OCICacheConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *FrontendIPConfig) DeepCopy() *FrontendIPConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(FrontendIPConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ControllerManagerLoggerConfiguration) DeepCopy() *ControllerManagerLoggerConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ControllerManagerLoggerConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DeviceSecretVerifierConfigType) DeepCopy() *DeviceSecretVerifierConfigType {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DeviceSecretVerifierConfigType)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *NetworkPolicyConfig) DeepCopy() *NetworkPolicyConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(NetworkPolicyConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *NetworkPolicyConfig) DeepCopy() *NetworkPolicyConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(NetworkPolicyConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ShieldedInstanceConfig) DeepCopy() *ShieldedInstanceConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ShieldedInstanceConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewConfiguration(authKeySize int, encryptEnabled bool, encapEnabled bool, wireguardEnabled bool, hsIpcacheDSRenabled bool, mtu int, mtuDetectIP net.IP) Configuration {\n\tencryptOverhead := 0\n\n\tif mtu == 0 {\n\t\tvar err error\n\n\t\tif mtuDetectIP != nil {\n\t\t\tmtu, err = getMTUFromIf(mtuDetectIP)\n\t\t} else {\n\t\t\tmtu, err = autoDetect()\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.WithError(err).Warning(\"Unable to automatically detect MTU\")\n\t\t\tmtu = EthernetMTU\n\t\t}\n\t}\n\n\tif encryptEnabled {\n\t\t// Add the difference between the default and the actual key sizes here\n\t\t// to account for users specifying non-default auth key lengths.\n\t\tencryptOverhead = EncryptionIPsecOverhead + (authKeySize - EncryptionDefaultAuthKeyLength)\n\t}\n\n\tfullTunnelOverhead := TunnelOverhead\n\tif hsIpcacheDSRenabled {\n\t\tfullTunnelOverhead += DsrTunnelOverhead\n\t}\n\n\tconf := Configuration{\n\t\tstandardMTU: mtu,\n\t\ttunnelMTU: mtu - (fullTunnelOverhead + encryptOverhead),\n\t\tpostEncryptMTU: mtu - TunnelOverhead,\n\t\tpreEncryptMTU: mtu - encryptOverhead,\n\t\tencapEnabled: encapEnabled,\n\t\tencryptEnabled: encryptEnabled,\n\t\twireguardEnabled: wireguardEnabled,\n\t}\n\n\tif conf.tunnelMTU < 0 {\n\t\tconf.tunnelMTU = 0\n\t}\n\n\treturn conf\n}", "func (in *DebugRuleConfiguration) DeepCopy() *DebugRuleConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DebugRuleConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewIdentityBuilder() *IdentityBuilder {\n\treturn &IdentityBuilder{}\n}", "func (in *GitPipelineInputConfig) DeepCopy() *GitPipelineInputConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(GitPipelineInputConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in Config) DeepCopy() Config {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Config)\n\tin.DeepCopyInto(out)\n\treturn *out\n}", "func (in *SecretAccessRequestConfiguration) DeepCopy() *SecretAccessRequestConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretAccessRequestConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *RootCredentialConfig) DeepCopy() *RootCredentialConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(RootCredentialConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (mr *MockProvidersMockRecorder) IdentityConfig() *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"IdentityConfig\", reflect.TypeOf((*MockProviders)(nil).IdentityConfig))\n}", "func DeepCopy_v1_ImagePolicyConfig(in interface{}, out interface{}, c *conversion.Cloner) error {\n\t{\n\t\tin := in.(*ImagePolicyConfig)\n\t\tout := out.(*ImagePolicyConfig)\n\t\t*out = *in\n\t\tif in.ResolutionRules != nil {\n\t\t\tin, out := &in.ResolutionRules, &out.ResolutionRules\n\t\t\t*out = make([]ImageResolutionPolicyRule, len(*in))\n\t\t\tcopy(*out, *in)\n\t\t}\n\t\tif in.ExecutionRules != nil {\n\t\t\tin, out := &in.ExecutionRules, &out.ExecutionRules\n\t\t\t*out = make([]ImageExecutionPolicyRule, len(*in))\n\t\t\tfor i := range *in {\n\t\t\t\tif err := DeepCopy_v1_ImageExecutionPolicyRule(&(*in)[i], &(*out)[i], c); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}\n}", "func (in *JobConfig) DeepCopy() *JobConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(JobConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}" ]
[ "0.7011574", "0.6334423", "0.60347545", "0.57493186", "0.571666", "0.56987476", "0.5546887", "0.5438429", "0.5377126", "0.5345208", "0.5251954", "0.5242645", "0.523284", "0.510815", "0.5105444", "0.50964445", "0.50964445", "0.5054068", "0.5006278", "0.50049424", "0.49703652", "0.49703652", "0.4963517", "0.4949122", "0.49209714", "0.49170834", "0.49078172", "0.4821004", "0.4818515", "0.4816502", "0.4815983", "0.4815983", "0.4815983", "0.4808762", "0.48078445", "0.47874022", "0.47645986", "0.47441095", "0.47425395", "0.4732507", "0.47275105", "0.47136685", "0.47065902", "0.46757588", "0.4670484", "0.46699986", "0.4669732", "0.46661282", "0.4661819", "0.46588525", "0.46502912", "0.46201393", "0.46199834", "0.4607506", "0.4587328", "0.4585849", "0.45780692", "0.45577076", "0.45507148", "0.4530932", "0.4509271", "0.4508524", "0.45001006", "0.44976774", "0.44867283", "0.446962", "0.44671455", "0.4445113", "0.44442797", "0.44417033", "0.44352448", "0.44248304", "0.4423414", "0.44067353", "0.44055554", "0.44043252", "0.43862972", "0.43669933", "0.43667287", "0.4362432", "0.43609518", "0.4349862", "0.4348224", "0.4341806", "0.43393987", "0.43318668", "0.433093", "0.43298262", "0.43298262", "0.43296975", "0.43184915", "0.43184307", "0.43184003", "0.43026546", "0.43015775", "0.4295964", "0.42925015", "0.42913282", "0.4283049", "0.4278405" ]
0.84669703
0
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be nonnil.
func (in *KMSConfiguration) DeepCopyInto(out *KMSConfiguration) { *out = *in if in.CacheSize != nil { in, out := &in.CacheSize, &out.CacheSize *out = new(int32) **out = **in } if in.Timeout != nil { in, out := &in.Timeout, &out.Timeout *out = new(v1.Duration) **out = **in } return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (r *RunInfo) DeepCopyInto(out *RunInfo) {\n\t*out = *r\n}", "func (in *Base) DeepCopyInto(out *Base) {\n\t*out = *in\n\treturn\n}", "func (in *ForkObject) DeepCopyInto(out *ForkObject) {\n\t*out = *in\n}", "func (in *TargetObjectInfo) DeepCopyInto(out *TargetObjectInfo) {\n\t*out = *in\n}", "func (in *DebugObjectInfo) DeepCopyInto(out *DebugObjectInfo) {\n\t*out = *in\n}", "func (in *Input) DeepCopyInto(out *Input) {\n\t*out = *in\n}", "func (u *SSN) DeepCopyInto(out *SSN) {\n\t*out = *u\n}", "func (in *ExistPvc) DeepCopyInto(out *ExistPvc) {\n\t*out = *in\n}", "func (in *DockerStep) DeepCopyInto(out *DockerStep) {\n\t*out = *in\n\tif in.Inline != nil {\n\t\tin, out := &in.Inline, &out.Inline\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tout.Auth = in.Auth\n\treturn\n}", "func (in *Container) DeepCopyInto(out *Container) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.Command != nil {\n\t\tin, out := &in.Command, &out.Command\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.LifeCycleScript != nil {\n\t\tin, out := &in.LifeCycleScript, &out.LifeCycleScript\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *RuntimeRef) DeepCopyInto(out *RuntimeRef) {\n\t*out = *in\n}", "func (in *Ibft2) DeepCopyInto(out *Ibft2) {\n\t*out = *in\n\treturn\n}", "func (in *TestResult) DeepCopyInto(out *TestResult) {\n\t*out = *in\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *Haproxy) DeepCopyInto(out *Haproxy) {\n\t*out = *in\n\treturn\n}", "func (in *SSH) DeepCopyInto(out *SSH) {\n\t*out = *in\n\treturn\n}", "func (in *Runtime) DeepCopyInto(out *Runtime) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (b *Base64) DeepCopyInto(out *Base64) {\n\t*out = *b\n}", "func (in *EventDependencyTransformer) DeepCopyInto(out *EventDependencyTransformer) {\n\t*out = *in\n\treturn\n}", "func (in *StageOutput) DeepCopyInto(out *StageOutput) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Dependent) DeepCopyInto(out *Dependent) {\n\t*out = *in\n\treturn\n}", "func (in *GitFileGeneratorItem) DeepCopyInto(out *GitFileGeneratorItem) {\n\t*out = *in\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *AnsibleStep) DeepCopyInto(out *AnsibleStep) {\n\t*out = *in\n\treturn\n}", "func (in *Forks) DeepCopyInto(out *Forks) {\n\t*out = *in\n\tif in.DAO != nil {\n\t\tin, out := &in.DAO, &out.DAO\n\t\t*out = new(uint)\n\t\t**out = **in\n\t}\n}", "func (in *ContainerPort) DeepCopyInto(out *ContainerPort) {\n\t*out = *in\n}", "func (in *General) DeepCopyInto(out *General) {\n\t*out = *in\n\treturn\n}", "func (in *IsoContainer) DeepCopyInto(out *IsoContainer) {\n\t*out = *in\n}", "func (in *Git) DeepCopyInto(out *Git) {\n\t*out = *in\n\treturn\n}", "func (in *ConfigFile) DeepCopyInto(out *ConfigFile) {\n\t*out = *in\n}", "func (in *BackupProgress) DeepCopyInto(out *BackupProgress) {\n\t*out = *in\n}", "func (in *DataDisk) DeepCopyInto(out *DataDisk) {\n\t*out = *in\n}", "func (in *PhaseStep) DeepCopyInto(out *PhaseStep) {\n\t*out = *in\n}", "func (u *MAC) DeepCopyInto(out *MAC) {\n\t*out = *u\n}", "func (in *Variable) DeepCopyInto(out *Variable) {\n\t*out = *in\n}", "func (in *RestoreProgress) DeepCopyInto(out *RestoreProgress) {\n\t*out = *in\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *NamespacedObjectReference) DeepCopyInto(out *NamespacedObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Path) DeepCopyInto(out *Path) {\n\t*out = *in\n\treturn\n}", "func (in *GitDirectoryGeneratorItem) DeepCopyInto(out *GitDirectoryGeneratorItem) {\n\t*out = *in\n}", "func (in *NamePath) DeepCopyInto(out *NamePath) {\n\t*out = *in\n\treturn\n}", "func (in *ConsoleCreateObj) DeepCopyInto(out *ConsoleCreateObj) {\n\t*out = *in\n}", "func (in *UsedPipelineRun) DeepCopyInto(out *UsedPipelineRun) {\n\t*out = *in\n}", "func (in *BuildTemplate) DeepCopyInto(out *BuildTemplate) {\n\t*out = *in\n\tif in.Cmd != nil {\n\t\tin, out := &in.Cmd, &out.Cmd\n\t\t*out = make([]BuildTemplateStep, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n}", "func (in *ObjectInfo) DeepCopyInto(out *ObjectInfo) {\n\t*out = *in\n\tout.GroupVersionKind = in.GroupVersionKind\n\treturn\n}", "func (in *Files) DeepCopyInto(out *Files) {\n\t*out = *in\n}", "func (in *Source) DeepCopyInto(out *Source) {\n\t*out = *in\n\tif in.Dependencies != nil {\n\t\tin, out := &in.Dependencies, &out.Dependencies\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MavenRepositories != nil {\n\t\tin, out := &in.MavenRepositories, &out.MavenRepositories\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\treturn\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *StackBuild) DeepCopyInto(out *StackBuild) {\n\t*out = *in\n\treturn\n}", "func (in *BuildTaskRef) DeepCopyInto(out *BuildTaskRef) {\n\t*out = *in\n\treturn\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *PathInfo) DeepCopyInto(out *PathInfo) {\n\t*out = *in\n}", "func (in *PoA) DeepCopyInto(out *PoA) {\n\t*out = *in\n}", "func (in *Section) DeepCopyInto(out *Section) {\n\t*out = *in\n\tif in.SecretRefs != nil {\n\t\tin, out := &in.SecretRefs, &out.SecretRefs\n\t\t*out = make([]SecretReference, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Files != nil {\n\t\tin, out := &in.Files, &out.Files\n\t\t*out = make([]FileMount, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n}", "func (in *DNSSelection) DeepCopyInto(out *DNSSelection) {\n\t*out = *in\n\tif in.Include != nil {\n\t\tin, out := &in.Include, &out.Include\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Exclude != nil {\n\t\tin, out := &in.Exclude, &out.Exclude\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ReleaseVersion) DeepCopyInto(out *ReleaseVersion) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Command) DeepCopyInto(out *Command) {\n\t*out = *in\n\tif in.Flags != nil {\n\t\tin, out := &in.Flags, &out.Flags\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Value != nil {\n\t\tin, out := &in.Value, &out.Value\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *PathRule) DeepCopyInto(out *PathRule) {\n\t*out = *in\n\treturn\n}", "func (in *DockerLifecycleData) DeepCopyInto(out *DockerLifecycleData) {\n\t*out = *in\n}", "func (in *RunScriptStepConfig) DeepCopyInto(out *RunScriptStepConfig) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *Checksum) DeepCopyInto(out *Checksum) {\n\t*out = *in\n}", "func (in *DomainNameOutput) DeepCopyInto(out *DomainNameOutput) {\n\t*out = *in\n}", "func (in *InterfaceStruct) DeepCopyInto(out *InterfaceStruct) {\n\t*out = *in\n\tif in.val != nil {\n\t\tin, out := &in.val, &out.val\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Ref) DeepCopyInto(out *Ref) {\n\t*out = *in\n}", "func (in *MemorySpec) DeepCopyInto(out *MemorySpec) {\n\t*out = *in\n}", "func (in *BuildJenkinsInfo) DeepCopyInto(out *BuildJenkinsInfo) {\n\t*out = *in\n\treturn\n}", "func (in *VirtualDatabaseBuildObject) DeepCopyInto(out *VirtualDatabaseBuildObject) {\n\t*out = *in\n\tif in.Incremental != nil {\n\t\tin, out := &in.Incremental, &out.Incremental\n\t\t*out = new(bool)\n\t\t**out = **in\n\t}\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]v1.EnvVar, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\tout.Git = in.Git\n\tin.Source.DeepCopyInto(&out.Source)\n\tif in.Webhooks != nil {\n\t\tin, out := &in.Webhooks, &out.Webhooks\n\t\t*out = make([]WebhookSecret, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *MaintenanceInfo) DeepCopyInto(out *MaintenanceInfo) {\n\t*out = *in\n\treturn\n}", "func (in *KopsNode) DeepCopyInto(out *KopsNode) {\n\t*out = *in\n\treturn\n}", "func (in *FalconAPI) DeepCopyInto(out *FalconAPI) {\n\t*out = *in\n}", "func (in *EBS) DeepCopyInto(out *EBS) {\n\t*out = *in\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n\treturn\n}", "func (in *Empty) DeepCopyInto(out *Empty) {\n\t*out = *in\n\tout.XXX_NoUnkeyedLiteral = in.XXX_NoUnkeyedLiteral\n\tif in.XXX_unrecognized != nil {\n\t\tin, out := &in.XXX_unrecognized, &out.XXX_unrecognized\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ComponentDistGit) DeepCopyInto(out *ComponentDistGit) {\n\t*out = *in\n\treturn\n}", "func (in *Memory) DeepCopyInto(out *Memory) {\n\t*out = *in\n\tout.Required = in.Required.DeepCopy()\n}", "func (in *Persistence) DeepCopyInto(out *Persistence) {\n\t*out = *in\n\tout.Size = in.Size.DeepCopy()\n\treturn\n}", "func (in *ManagedDisk) DeepCopyInto(out *ManagedDisk) {\n\t*out = *in\n}", "func (e *Email) DeepCopyInto(out *Email) {\n\t*out = *e\n}", "func (in *ImageInfo) DeepCopyInto(out *ImageInfo) {\n\t*out = *in\n}", "func (in *ShootRef) DeepCopyInto(out *ShootRef) {\n\t*out = *in\n}", "func (in *N3000Fpga) DeepCopyInto(out *N3000Fpga) {\n\t*out = *in\n}", "func (in *NetflowType) DeepCopyInto(out *NetflowType) {\n\t*out = *in\n\treturn\n}", "func (in *BuiltInAdapter) DeepCopyInto(out *BuiltInAdapter) {\n\t*out = *in\n}", "func (in *Node) DeepCopyInto(out *Node) {\n\t*out = *in\n\tif in.FailStatus != nil {\n\t\tin, out := &in.FailStatus, &out.FailStatus\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MigratingSlots != nil {\n\t\tin, out := &in.MigratingSlots, &out.MigratingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.ImportingSlots != nil {\n\t\tin, out := &in.ImportingSlots, &out.ImportingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n}", "func (in *CPUSpec) DeepCopyInto(out *CPUSpec) {\n\t*out = *in\n}", "func (in *LoopState) DeepCopyInto(out *LoopState) {\n\t*out = *in\n}" ]
[ "0.8215289", "0.81280124", "0.81039286", "0.80862963", "0.8083811", "0.80673146", "0.8064545", "0.8026454", "0.8012046", "0.7996313", "0.799204", "0.79887754", "0.7987097", "0.7986994", "0.7986994", "0.79854053", "0.7975989", "0.7972486", "0.79695636", "0.79695636", "0.79695636", "0.7967528", "0.79624444", "0.7961954", "0.7945754", "0.7945754", "0.7944541", "0.79428566", "0.7942668", "0.7942668", "0.7940451", "0.793851", "0.7936731", "0.79294837", "0.79252166", "0.7915377", "0.7911627", "0.7911138", "0.7909384", "0.790913", "0.7908773", "0.7905649", "0.79050326", "0.7904594", "0.7904594", "0.7904235", "0.79036915", "0.79020816", "0.78988886", "0.78977424", "0.7891376", "0.7891024", "0.7889831", "0.78890276", "0.7887135", "0.788637", "0.7885264", "0.7885264", "0.7884786", "0.7880785", "0.78745943", "0.78745943", "0.78745407", "0.78734446", "0.78724426", "0.78713626", "0.78713554", "0.78652424", "0.7863321", "0.7863321", "0.7863321", "0.7863293", "0.7862628", "0.7860664", "0.7858556", "0.785785", "0.78571486", "0.7851332", "0.78453225", "0.78448987", "0.78415996", "0.7837483", "0.7837037", "0.7836443", "0.78351796", "0.78329664", "0.7831094", "0.7829445", "0.7826582", "0.7824499", "0.78242797", "0.78227437", "0.78192484", "0.7818843", "0.78128535", "0.7812535", "0.78111476", "0.78111106", "0.781107", "0.78093034", "0.7808775" ]
0.0
-1
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KMSConfiguration.
func (in *KMSConfiguration) DeepCopy() *KMSConfiguration { if in == nil { return nil } out := new(KMSConfiguration) in.DeepCopyInto(out) return out }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (in *KalmConfig) DeepCopy() *KalmConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KalmConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KubemanagerConfig) DeepCopy() *KubemanagerConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubemanagerConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EKSManagedConfiguration) DeepCopy() *EKSManagedConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EKSManagedConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KubeadmConfig) DeepCopy() *KubeadmConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubeadmConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KopsConfig) DeepCopy() *KopsConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KopsConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EKSCFConfiguration) DeepCopy() *EKSCFConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EKSCFConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KVConfiguration) DeepCopy() *KVConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KVConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KubebenchConfig) DeepCopy() *KubebenchConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubebenchConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KubeadmConfigList) DeepCopy() *KubeadmConfigList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubeadmConfigList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KubeadmConfigSpec) DeepCopy() *KubeadmConfigSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubeadmConfigSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SmsConfigurationType) DeepCopy() *SmsConfigurationType {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SmsConfigurationType)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *CrdManagementConfiguration) DeepCopy() *CrdManagementConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(CrdManagementConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MetricsConfiguration) DeepCopy() *MetricsConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MetricsConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MySQLConfiguration) DeepCopy() *MySQLConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MySQLConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AWSAccessRequestConfiguration) DeepCopy() *AWSAccessRequestConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AWSAccessRequestConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KMSKeySpec) DeepCopy() *KMSKeySpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KMSKeySpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KVMConfig) DeepCopy() *KVMConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KVMConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MariaDBConfiguration) DeepCopy() *MariaDBConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MariaDBConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KMSKeyList) DeepCopy() *KMSKeyList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KMSKeyList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ControllerManagerConfiguration) DeepCopy() *ControllerManagerConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ControllerManagerConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ControllerManagerConfiguration) DeepCopy() *ControllerManagerConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ControllerManagerConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ControllerManagerLoggerConfiguration) DeepCopy() *ControllerManagerLoggerConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ControllerManagerLoggerConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KokuMetricsConfig) DeepCopy() *KokuMetricsConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KokuMetricsConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KMSKey) DeepCopy() *KMSKey {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KMSKey)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *GKEPrivateClusterConfig) DeepCopy() *GKEPrivateClusterConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(GKEPrivateClusterConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ControllerManagerWebhookConfiguration) DeepCopy() *ControllerManagerWebhookConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ControllerManagerWebhookConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ControllerManagerWebhookConfiguration) DeepCopy() *ControllerManagerWebhookConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ControllerManagerWebhookConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c *Client) GetKmsConfig(ctx context.Context, req *netapppb.GetKmsConfigRequest, opts ...gax.CallOption) (*netapppb.KmsConfig, error) {\n\treturn c.internalClient.GetKmsConfig(ctx, req, opts...)\n}", "func (in *GKEMasterAuthorizedNetworksConfig) DeepCopy() *GKEMasterAuthorizedNetworksConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(GKEMasterAuthorizedNetworksConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretboxConfiguration) DeepCopy() *SecretboxConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretboxConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretAccessRequestConfiguration) DeepCopy() *SecretAccessRequestConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretAccessRequestConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *SseConfiguration) SetKmsEncryptionConfig(v *KmsEncryptionConfig) *SseConfiguration {\n\ts.KmsEncryptionConfig = v\n\treturn s\n}", "func (in *StickinessConfig) DeepCopy() *StickinessConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(StickinessConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *GetKubeconfigOptions) DeepCopy() *GetKubeconfigOptions {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(GetKubeconfigOptions)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfiguration) DeepCopy() *LoggingConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfiguration) DeepCopy() *LoggingConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (k Kconfig) KconfigEqual(other Kconfig) bool {\n\treturn reflect.DeepEqual(k.Spec, other.Spec)\n}", "func (in *ControllerManagerControllerConfiguration) DeepCopy() *ControllerManagerControllerConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ControllerManagerControllerConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ControllerManagerControllerConfiguration) DeepCopy() *ControllerManagerControllerConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ControllerManagerControllerConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KubeAuthConfiguration) DeepCopy() *KubeAuthConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubeAuthConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SharedMemoryStoreConfig) DeepCopy() *SharedMemoryStoreConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SharedMemoryStoreConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DCGMExporterServiceMonitorConfig) DeepCopy() *DCGMExporterServiceMonitorConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DCGMExporterServiceMonitorConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MemoryQOSCfg) DeepCopy() *MemoryQOSCfg {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MemoryQOSCfg)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MultiClusterConfig) DeepCopy() *MultiClusterConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MultiClusterConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DeviceConfigurationType) DeepCopy() *DeviceConfigurationType {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DeviceConfigurationType)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *GKEClusterConfig) DeepCopy() *GKEClusterConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(GKEClusterConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigurationSpec) DeepCopy() *ConfigurationSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigurationSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigurationSpec) DeepCopy() *ConfigurationSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigurationSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o JobCopyDestinationEncryptionConfigurationOutput) KmsKeyVersion() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobCopyDestinationEncryptionConfiguration) *string { return v.KmsKeyVersion }).(pulumi.StringPtrOutput)\n}", "func (in *KVMConfigSpecKVMK8sKVM) DeepCopy() *KVMConfigSpecKVMK8sKVM {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KVMConfigSpecKVMK8sKVM)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KubeConfig) DeepCopy() *KubeConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubeConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KubeConfig) DeepCopy() *KubeConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubeConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *GKEClusterConfigList) DeepCopy() *GKEClusterConfigList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(GKEClusterConfigList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfigurationSpec) DeepCopy() *LoggingConfigurationSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfigurationSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c *Config) Copy() *Config {\n\tc.Lock()\n\tdefer c.Unlock()\n\tn := &Config{\n\t\tm: make(map[string]interface{}),\n\t}\n\tfor key, value := range c.m {\n\t\tn.m[key] = value\n\t}\n\treturn n\n}", "func (in *KokuMetricsConfigSpec) DeepCopy() *KokuMetricsConfigSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KokuMetricsConfigSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *GKEClusterConfigSpec) DeepCopy() *GKEClusterConfigSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(GKEClusterConfigSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *BcsConfigMapSpec) DeepCopy() *BcsConfigMapSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BcsConfigMapSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KVMConfigSpec) DeepCopy() *KVMConfigSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KVMConfigSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *PrivateLinkServiceOperatorConfigMaps) DeepCopy() *PrivateLinkServiceOperatorConfigMaps {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PrivateLinkServiceOperatorConfigMaps)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *GKENodeTaintConfig) DeepCopy() *GKENodeTaintConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(GKENodeTaintConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EncryptionConfiguration) DeepCopy() *EncryptionConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EncryptionConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MongoDBConfiguration) DeepCopy() *MongoDBConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MongoDBConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KVMConfigSpecKVM) DeepCopy() *KVMConfigSpecKVM {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KVMConfigSpecKVM)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KVMConfigList) DeepCopy() *KVMConfigList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KVMConfigList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *PodMemoryQOSConfig) DeepCopy() *PodMemoryQOSConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PodMemoryQOSConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretEngineConfiguration) DeepCopy() *SecretEngineConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretEngineConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *TargetNamespaceConfig) DeepCopy() *TargetNamespaceConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(TargetNamespaceConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AWSConfiguration) DeepCopy() *AWSConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AWSConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *PrivateLinkServiceIpConfiguration) DeepCopy() *PrivateLinkServiceIpConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PrivateLinkServiceIpConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigMapTemplate) DeepCopy() *ConfigMapTemplate {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigMapTemplate)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KokuMetricsConfigList) DeepCopy() *KokuMetricsConfigList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KokuMetricsConfigList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *RedisConfiguration) DeepCopy() *RedisConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(RedisConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c *Client) ListKmsConfigs(ctx context.Context, req *netapppb.ListKmsConfigsRequest, opts ...gax.CallOption) *KmsConfigIterator {\n\treturn c.internalClient.ListKmsConfigs(ctx, req, opts...)\n}", "func (in *BootConfiguration) DeepCopy() *BootConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BootConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *BaseKubernetesContainerConfiguration) DeepCopy() *BaseKubernetesContainerConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BaseKubernetesContainerConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Servers_Configuration_Spec) DeepCopy() *Servers_Configuration_Spec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Servers_Configuration_Spec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *BcsConfigMap) DeepCopy() *BcsConfigMap {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BcsConfigMap)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o JobCopyDestinationEncryptionConfigurationPtrOutput) KmsKeyVersion() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *JobCopyDestinationEncryptionConfiguration) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.KmsKeyVersion\n\t}).(pulumi.StringPtrOutput)\n}", "func (in *ConfigurationList) DeepCopy() *ConfigurationList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigurationList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (proxy *StandAloneProxyConfig) DeepCopy() *StandAloneProxyConfig {\n\tif proxy == nil {\n\t\treturn nil\n\t}\n\tcloned := new(StandAloneProxyConfig)\n\tcloned.proxyCredentials = make(map[string]*ProxyUser)\n\tcloned.managementServers = make(map[url.URL]*ManagementServer)\n\tcloned.managedArrays = make(map[string]*StorageArray)\n\tfor key, value := range proxy.managedArrays {\n\t\tarray := *value\n\t\tcloned.managedArrays[key] = &array\n\t}\n\tfor key, value := range proxy.managementServers {\n\t\tcloned.managementServers[key] = value.DeepCopy()\n\t}\n\tfor key, value := range proxy.proxyCredentials {\n\t\tcreds := *value\n\t\tcloned.proxyCredentials[key] = &creds\n\t}\n\treturn cloned\n}", "func (in *JKSKeystore) DeepCopy() *JKSKeystore {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(JKSKeystore)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *GKENodeConfig) DeepCopy() *GKENodeConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(GKENodeConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (g *GKE) KubeConfig(ctx context.Context) (*rest.Config, error) {\n\tcluster, location, project, useInternalIP, err := clpFromClusterCtx(g.SkyCtx)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to extract cluster info from %v: %v\", g, err)\n\t}\n\treturn BuildKubeRestConfSACred(ctx, cluster, location, project, useInternalIP, g.svcAcctKeyFile, g.userAgent)\n}", "func (in *WorkloadMetadataConfig) DeepCopy() *WorkloadMetadataConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(WorkloadMetadataConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *OpenShiftControllerManagerConfig) DeepCopy() *OpenShiftControllerManagerConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(OpenShiftControllerManagerConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func UnmarshalKMSSettings(m map[string]json.RawMessage, result interface{}) (err error) {\n\tobj := new(KMSSettings)\n\terr = core.UnmarshalPrimitive(m, \"location\", &obj.Location)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = core.UnmarshalPrimitive(m, \"encryption_scheme\", &obj.EncryptionScheme)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = core.UnmarshalPrimitive(m, \"resource_group\", &obj.ResourceGroup)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = core.UnmarshalModel(m, \"primary_crk\", &obj.PrimaryCrk, UnmarshalKMSSettingsPrimaryCrk)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = core.UnmarshalModel(m, \"secondary_crk\", &obj.SecondaryCrk, UnmarshalKMSSettingsSecondaryCrk)\n\tif err != nil {\n\t\treturn\n\t}\n\treflect.ValueOf(result).Elem().Set(reflect.ValueOf(obj))\n\treturn\n}", "func (c *Config) DeepCopy() *Config {\n\tif c == nil {\n\t\treturn &Config{}\n\t}\n\tcfg := *c\n\treturn &cfg\n}", "func NewConfigurationSSEKMS(kmsMasterKey string) *Configuration {\n\treturn &Configuration{\n\t\tRules: []Rule{\n\t\t\t{\n\t\t\t\tApply: ApplySSEByDefault{\n\t\t\t\t\tKmsMasterKeyID: kmsMasterKey,\n\t\t\t\t\tSSEAlgorithm: \"aws:kms\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func (in *SharedMemoryServerConfig) DeepCopy() *SharedMemoryServerConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SharedMemoryServerConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o JobLoadDestinationEncryptionConfigurationOutput) KmsKeyVersion() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobLoadDestinationEncryptionConfiguration) *string { return v.KmsKeyVersion }).(pulumi.StringPtrOutput)\n}", "func (in *ApplicationConfiguration) DeepCopy() *ApplicationConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ApplicationConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MutatingWebhookConfigurationRef) DeepCopy() *MutatingWebhookConfigurationRef {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MutatingWebhookConfigurationRef)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *GCPConfiguration) DeepCopy() *GCPConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(GCPConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ServiceAccountControllerConfiguration) DeepCopy() *ServiceAccountControllerConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ServiceAccountControllerConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o BackupOutput) EncryptionConfigKmsKeyName() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *Backup) pulumi.StringPtrOutput { return v.EncryptionConfigKmsKeyName }).(pulumi.StringPtrOutput)\n}", "func (in *KVMConfigSpecKVMK8sKVMDocker) DeepCopy() *KVMConfigSpecKVMK8sKVMDocker {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KVMConfigSpecKVMK8sKVMDocker)\n\tin.DeepCopyInto(out)\n\treturn out\n}" ]
[ "0.7320824", "0.6503556", "0.6401234", "0.6188418", "0.6117832", "0.6105569", "0.59326637", "0.5710905", "0.5679013", "0.56328255", "0.5623401", "0.5505632", "0.54571265", "0.5398764", "0.5378795", "0.5364599", "0.53576845", "0.5333917", "0.5312741", "0.5305671", "0.5305671", "0.5291316", "0.5280115", "0.5252718", "0.52487415", "0.5234498", "0.5234498", "0.51478785", "0.51456535", "0.5106915", "0.5099548", "0.5069849", "0.5054385", "0.50503707", "0.5005388", "0.5005388", "0.49932897", "0.4992017", "0.4992017", "0.49617696", "0.4924222", "0.48887536", "0.4858364", "0.48524597", "0.48356938", "0.48325393", "0.48060256", "0.48060256", "0.47911215", "0.47910473", "0.47861952", "0.47861952", "0.47845626", "0.4778098", "0.47766158", "0.47713622", "0.4770698", "0.4770698", "0.4770698", "0.47675568", "0.47667867", "0.47665957", "0.47654215", "0.4759552", "0.47566843", "0.4731189", "0.4727384", "0.47254223", "0.4719976", "0.47167408", "0.47139132", "0.47138572", "0.47088185", "0.4708703", "0.4703641", "0.46925375", "0.46886942", "0.46869993", "0.46865875", "0.46738216", "0.46694887", "0.465537", "0.46449688", "0.4644965", "0.46390256", "0.4630661", "0.4629305", "0.46220487", "0.46194556", "0.46170318", "0.46160033", "0.46084353", "0.46027786", "0.45994836", "0.45982057", "0.45975778", "0.45952702", "0.4595106", "0.45949808", "0.45881918" ]
0.81662655
0
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be nonnil.
func (in *Key) DeepCopyInto(out *Key) { *out = *in return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (r *RunInfo) DeepCopyInto(out *RunInfo) {\n\t*out = *r\n}", "func (in *Base) DeepCopyInto(out *Base) {\n\t*out = *in\n\treturn\n}", "func (in *ForkObject) DeepCopyInto(out *ForkObject) {\n\t*out = *in\n}", "func (in *TargetObjectInfo) DeepCopyInto(out *TargetObjectInfo) {\n\t*out = *in\n}", "func (in *DebugObjectInfo) DeepCopyInto(out *DebugObjectInfo) {\n\t*out = *in\n}", "func (in *Input) DeepCopyInto(out *Input) {\n\t*out = *in\n}", "func (u *SSN) DeepCopyInto(out *SSN) {\n\t*out = *u\n}", "func (in *ExistPvc) DeepCopyInto(out *ExistPvc) {\n\t*out = *in\n}", "func (in *DockerStep) DeepCopyInto(out *DockerStep) {\n\t*out = *in\n\tif in.Inline != nil {\n\t\tin, out := &in.Inline, &out.Inline\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tout.Auth = in.Auth\n\treturn\n}", "func (in *Container) DeepCopyInto(out *Container) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.Command != nil {\n\t\tin, out := &in.Command, &out.Command\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.LifeCycleScript != nil {\n\t\tin, out := &in.LifeCycleScript, &out.LifeCycleScript\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *RuntimeRef) DeepCopyInto(out *RuntimeRef) {\n\t*out = *in\n}", "func (in *Ibft2) DeepCopyInto(out *Ibft2) {\n\t*out = *in\n\treturn\n}", "func (in *TestResult) DeepCopyInto(out *TestResult) {\n\t*out = *in\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *Haproxy) DeepCopyInto(out *Haproxy) {\n\t*out = *in\n\treturn\n}", "func (in *SSH) DeepCopyInto(out *SSH) {\n\t*out = *in\n\treturn\n}", "func (in *Runtime) DeepCopyInto(out *Runtime) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (b *Base64) DeepCopyInto(out *Base64) {\n\t*out = *b\n}", "func (in *EventDependencyTransformer) DeepCopyInto(out *EventDependencyTransformer) {\n\t*out = *in\n\treturn\n}", "func (in *StageOutput) DeepCopyInto(out *StageOutput) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Dependent) DeepCopyInto(out *Dependent) {\n\t*out = *in\n\treturn\n}", "func (in *GitFileGeneratorItem) DeepCopyInto(out *GitFileGeneratorItem) {\n\t*out = *in\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *AnsibleStep) DeepCopyInto(out *AnsibleStep) {\n\t*out = *in\n\treturn\n}", "func (in *Forks) DeepCopyInto(out *Forks) {\n\t*out = *in\n\tif in.DAO != nil {\n\t\tin, out := &in.DAO, &out.DAO\n\t\t*out = new(uint)\n\t\t**out = **in\n\t}\n}", "func (in *ContainerPort) DeepCopyInto(out *ContainerPort) {\n\t*out = *in\n}", "func (in *General) DeepCopyInto(out *General) {\n\t*out = *in\n\treturn\n}", "func (in *IsoContainer) DeepCopyInto(out *IsoContainer) {\n\t*out = *in\n}", "func (in *Git) DeepCopyInto(out *Git) {\n\t*out = *in\n\treturn\n}", "func (in *ConfigFile) DeepCopyInto(out *ConfigFile) {\n\t*out = *in\n}", "func (in *BackupProgress) DeepCopyInto(out *BackupProgress) {\n\t*out = *in\n}", "func (in *DataDisk) DeepCopyInto(out *DataDisk) {\n\t*out = *in\n}", "func (in *PhaseStep) DeepCopyInto(out *PhaseStep) {\n\t*out = *in\n}", "func (u *MAC) DeepCopyInto(out *MAC) {\n\t*out = *u\n}", "func (in *Variable) DeepCopyInto(out *Variable) {\n\t*out = *in\n}", "func (in *RestoreProgress) DeepCopyInto(out *RestoreProgress) {\n\t*out = *in\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *NamespacedObjectReference) DeepCopyInto(out *NamespacedObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Path) DeepCopyInto(out *Path) {\n\t*out = *in\n\treturn\n}", "func (in *GitDirectoryGeneratorItem) DeepCopyInto(out *GitDirectoryGeneratorItem) {\n\t*out = *in\n}", "func (in *NamePath) DeepCopyInto(out *NamePath) {\n\t*out = *in\n\treturn\n}", "func (in *ConsoleCreateObj) DeepCopyInto(out *ConsoleCreateObj) {\n\t*out = *in\n}", "func (in *UsedPipelineRun) DeepCopyInto(out *UsedPipelineRun) {\n\t*out = *in\n}", "func (in *BuildTemplate) DeepCopyInto(out *BuildTemplate) {\n\t*out = *in\n\tif in.Cmd != nil {\n\t\tin, out := &in.Cmd, &out.Cmd\n\t\t*out = make([]BuildTemplateStep, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n}", "func (in *ObjectInfo) DeepCopyInto(out *ObjectInfo) {\n\t*out = *in\n\tout.GroupVersionKind = in.GroupVersionKind\n\treturn\n}", "func (in *Files) DeepCopyInto(out *Files) {\n\t*out = *in\n}", "func (in *Source) DeepCopyInto(out *Source) {\n\t*out = *in\n\tif in.Dependencies != nil {\n\t\tin, out := &in.Dependencies, &out.Dependencies\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MavenRepositories != nil {\n\t\tin, out := &in.MavenRepositories, &out.MavenRepositories\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\treturn\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *StackBuild) DeepCopyInto(out *StackBuild) {\n\t*out = *in\n\treturn\n}", "func (in *BuildTaskRef) DeepCopyInto(out *BuildTaskRef) {\n\t*out = *in\n\treturn\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *PathInfo) DeepCopyInto(out *PathInfo) {\n\t*out = *in\n}", "func (in *PoA) DeepCopyInto(out *PoA) {\n\t*out = *in\n}", "func (in *Section) DeepCopyInto(out *Section) {\n\t*out = *in\n\tif in.SecretRefs != nil {\n\t\tin, out := &in.SecretRefs, &out.SecretRefs\n\t\t*out = make([]SecretReference, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Files != nil {\n\t\tin, out := &in.Files, &out.Files\n\t\t*out = make([]FileMount, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n}", "func (in *DNSSelection) DeepCopyInto(out *DNSSelection) {\n\t*out = *in\n\tif in.Include != nil {\n\t\tin, out := &in.Include, &out.Include\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Exclude != nil {\n\t\tin, out := &in.Exclude, &out.Exclude\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ReleaseVersion) DeepCopyInto(out *ReleaseVersion) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Command) DeepCopyInto(out *Command) {\n\t*out = *in\n\tif in.Flags != nil {\n\t\tin, out := &in.Flags, &out.Flags\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Value != nil {\n\t\tin, out := &in.Value, &out.Value\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *PathRule) DeepCopyInto(out *PathRule) {\n\t*out = *in\n\treturn\n}", "func (in *DockerLifecycleData) DeepCopyInto(out *DockerLifecycleData) {\n\t*out = *in\n}", "func (in *RunScriptStepConfig) DeepCopyInto(out *RunScriptStepConfig) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *Checksum) DeepCopyInto(out *Checksum) {\n\t*out = *in\n}", "func (in *DomainNameOutput) DeepCopyInto(out *DomainNameOutput) {\n\t*out = *in\n}", "func (in *InterfaceStruct) DeepCopyInto(out *InterfaceStruct) {\n\t*out = *in\n\tif in.val != nil {\n\t\tin, out := &in.val, &out.val\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Ref) DeepCopyInto(out *Ref) {\n\t*out = *in\n}", "func (in *MemorySpec) DeepCopyInto(out *MemorySpec) {\n\t*out = *in\n}", "func (in *BuildJenkinsInfo) DeepCopyInto(out *BuildJenkinsInfo) {\n\t*out = *in\n\treturn\n}", "func (in *VirtualDatabaseBuildObject) DeepCopyInto(out *VirtualDatabaseBuildObject) {\n\t*out = *in\n\tif in.Incremental != nil {\n\t\tin, out := &in.Incremental, &out.Incremental\n\t\t*out = new(bool)\n\t\t**out = **in\n\t}\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]v1.EnvVar, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\tout.Git = in.Git\n\tin.Source.DeepCopyInto(&out.Source)\n\tif in.Webhooks != nil {\n\t\tin, out := &in.Webhooks, &out.Webhooks\n\t\t*out = make([]WebhookSecret, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *MaintenanceInfo) DeepCopyInto(out *MaintenanceInfo) {\n\t*out = *in\n\treturn\n}", "func (in *KopsNode) DeepCopyInto(out *KopsNode) {\n\t*out = *in\n\treturn\n}", "func (in *FalconAPI) DeepCopyInto(out *FalconAPI) {\n\t*out = *in\n}", "func (in *EBS) DeepCopyInto(out *EBS) {\n\t*out = *in\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n\treturn\n}", "func (in *Empty) DeepCopyInto(out *Empty) {\n\t*out = *in\n\tout.XXX_NoUnkeyedLiteral = in.XXX_NoUnkeyedLiteral\n\tif in.XXX_unrecognized != nil {\n\t\tin, out := &in.XXX_unrecognized, &out.XXX_unrecognized\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ComponentDistGit) DeepCopyInto(out *ComponentDistGit) {\n\t*out = *in\n\treturn\n}", "func (in *Memory) DeepCopyInto(out *Memory) {\n\t*out = *in\n\tout.Required = in.Required.DeepCopy()\n}", "func (in *Persistence) DeepCopyInto(out *Persistence) {\n\t*out = *in\n\tout.Size = in.Size.DeepCopy()\n\treturn\n}", "func (in *ManagedDisk) DeepCopyInto(out *ManagedDisk) {\n\t*out = *in\n}", "func (e *Email) DeepCopyInto(out *Email) {\n\t*out = *e\n}", "func (in *ImageInfo) DeepCopyInto(out *ImageInfo) {\n\t*out = *in\n}", "func (in *ShootRef) DeepCopyInto(out *ShootRef) {\n\t*out = *in\n}", "func (in *N3000Fpga) DeepCopyInto(out *N3000Fpga) {\n\t*out = *in\n}", "func (in *NetflowType) DeepCopyInto(out *NetflowType) {\n\t*out = *in\n\treturn\n}", "func (in *BuiltInAdapter) DeepCopyInto(out *BuiltInAdapter) {\n\t*out = *in\n}", "func (in *Node) DeepCopyInto(out *Node) {\n\t*out = *in\n\tif in.FailStatus != nil {\n\t\tin, out := &in.FailStatus, &out.FailStatus\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MigratingSlots != nil {\n\t\tin, out := &in.MigratingSlots, &out.MigratingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.ImportingSlots != nil {\n\t\tin, out := &in.ImportingSlots, &out.ImportingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n}", "func (in *CPUSpec) DeepCopyInto(out *CPUSpec) {\n\t*out = *in\n}", "func (in *LoopState) DeepCopyInto(out *LoopState) {\n\t*out = *in\n}" ]
[ "0.8215289", "0.81280124", "0.81039286", "0.80862963", "0.8083811", "0.80673146", "0.8064545", "0.8026454", "0.8012046", "0.7996313", "0.799204", "0.79887754", "0.7987097", "0.7986994", "0.7986994", "0.79854053", "0.7975989", "0.7972486", "0.79695636", "0.79695636", "0.79695636", "0.7967528", "0.79624444", "0.7961954", "0.7945754", "0.7945754", "0.7944541", "0.79428566", "0.7942668", "0.7942668", "0.7940451", "0.793851", "0.7936731", "0.79294837", "0.79252166", "0.7915377", "0.7911627", "0.7911138", "0.7909384", "0.790913", "0.7908773", "0.7905649", "0.79050326", "0.7904594", "0.7904594", "0.7904235", "0.79036915", "0.79020816", "0.78988886", "0.78977424", "0.7891376", "0.7891024", "0.7889831", "0.78890276", "0.7887135", "0.788637", "0.7885264", "0.7885264", "0.7884786", "0.7880785", "0.78745943", "0.78745943", "0.78745407", "0.78734446", "0.78724426", "0.78713626", "0.78713554", "0.78652424", "0.7863321", "0.7863321", "0.7863321", "0.7863293", "0.7862628", "0.7860664", "0.7858556", "0.785785", "0.78571486", "0.7851332", "0.78453225", "0.78448987", "0.78415996", "0.7837483", "0.7837037", "0.7836443", "0.78351796", "0.78329664", "0.7831094", "0.7829445", "0.7826582", "0.7824499", "0.78242797", "0.78227437", "0.78192484", "0.7818843", "0.78128535", "0.7812535", "0.78111476", "0.78111106", "0.781107", "0.78093034", "0.7808775" ]
0.0
-1
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Key.
func (in *Key) DeepCopy() *Key { if in == nil { return nil } out := new(Key) in.DeepCopyInto(out) return out }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (in *KeyReference) DeepCopy() *KeyReference {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KeyReference)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SignerKey) DeepCopy() *SignerKey {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SignerKey)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *TrustKey) DeepCopy() *TrustKey {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(TrustKey)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *NameKey) DeepCopy() *NameKey {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(NameKey)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SignKey) DeepCopy() *SignKey {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SignKey)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KMSKey) DeepCopy() *KMSKey {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KMSKey)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *HlsContentProtection) SetKey(v string) *HlsContentProtection {\n\ts.Key = &v\n\treturn s\n}", "func (o *LabelProperties) SetKey(v string) {\n\n\to.Key = &v\n\n}", "func (s *PlayReadyDrm) SetKey(v string) *PlayReadyDrm {\n\ts.Key = &v\n\treturn s\n}", "func (s *AssetDestinationEntry) SetKey(v string) *AssetDestinationEntry {\n\ts.Key = &v\n\treturn s\n}", "func (s *CaptionSource) SetKey(v string) *CaptionSource {\n\ts.Key = &v\n\treturn s\n}", "func (o *LongProperty) SetKey(v string) {\n\to.Key = &v\n}", "func (s *EnvironmentParameter) SetKey(v string) *EnvironmentParameter {\n\ts.Key = &v\n\treturn s\n}", "func NewKey(kdf kdf.HKDF, key []byte) *Key {\n\trootKey := Key{\n\t\tkdf: kdf,\n\t\tkey: key,\n\t}\n\n\treturn &rootKey\n}", "func (o *CustomHostMetadataKey) SetKey(v string) {\n\to.Key = v\n}", "func (in *ProjectKey) DeepCopy() *ProjectKey {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ProjectKey)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *ConfigurationTag) SetKey(v string) *ConfigurationTag {\n\ts.Key = &v\n\treturn s\n}", "func (s *ProvisioningParameter) SetKey(v string) *ProvisioningParameter {\n\ts.Key = &v\n\treturn s\n}", "func (s *AssetSourceEntry) SetKey(v string) *AssetSourceEntry {\n\ts.Key = &v\n\treturn s\n}", "func NewKey() *Key {\n\tkeypair, err := crypto.GenerateKeyPair()\n\tcommon.FatalIfErr(err, \"There was an error generating a key pair\")\n\treturn &Key{\n\t\tID: uuid.NewRandom(),\n\t\tKeyPair: &keypair,\n\t}\n}", "func (s *KeyValuePair) SetKey(v string) *KeyValuePair {\n\ts.Key = &v\n\treturn s\n}", "func (s *ListServiceInstancesFilter) SetKey(v string) *ListServiceInstancesFilter {\n\ts.Key = &v\n\treturn s\n}", "func (o *ApiKey) SetKey(v string) {\n\to.Key = &v\n}", "func (o *ResourceDefinitionFilter) SetKey(v string) {\n\to.Key = v\n}", "func (o *SearchTagItem) SetKey(v string) {\n\to.Key = &v\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func NewKey(key string) (*Key, error) {\n\tif len(key) != 32 {\n\t\treturn nil, fmt.Errorf(\"invalid key[%s]'s len[%d]\", key, len(key))\n\t}\n\tpayload, err := hex.DecodeString(key)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"invalid key[%s]\", key)\n\t}\n\tbuf := bytes.NewBuffer(payload)\n\tvar rt uint64\n\tvar counter uint64\n\tif err := binary.Read(buf, binary.BigEndian, &rt); err != nil {\n\t\treturn nil, fmt.Errorf(\"invalid key[%s]\", key)\n\t}\n\tif err := binary.Read(buf, binary.BigEndian, &counter); err != nil {\n\t\treturn nil, fmt.Errorf(\"invalid key[%s]\", key)\n\t}\n\treturn &Key{Rand: uint32(rt >> 40), Timestamp: rt << 24 >> 24, Counter: counter}, nil\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *Tag) SetKey(v string) *Tag {\n\ts.Key = &v\n\treturn s\n}", "func (s *FeatureParameter) SetKey(v string) *FeatureParameter {\n\ts.Key = &v\n\treturn s\n}", "func (s *TagRef) SetKey(v string) *TagRef {\n\ts.Key = &v\n\treturn s\n}", "func (o *OAUTHKey) DeepCopy() *OAUTHKey {\n\n\tif o == nil {\n\t\treturn nil\n\t}\n\n\tout := &OAUTHKey{}\n\to.DeepCopyInto(out)\n\n\treturn out\n}", "func (s *JobOutput) SetKey(v string) *JobOutput {\n\ts.Key = &v\n\treturn s\n}", "func NewKey() (key *Key, err error) {\n\tpriv, pub, err := native.GenerateKeyPair(\"\")\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\n\treturn &Key{\n\t\tPriv: priv,\n\t\tPub: pub,\n\t}, nil\n}", "func (s *CreateJobOutput) SetKey(v string) *CreateJobOutput {\n\ts.Key = &v\n\treturn s\n}", "func (transaction *AccountCreateTransaction) SetKey(key Key) *AccountCreateTransaction {\n\ttransaction._RequireNotFrozen()\n\ttransaction.key = key\n\treturn transaction\n}", "func (s *S3ObjectSource) SetKey(v string) *S3ObjectSource {\n\ts.Key = &v\n\treturn s\n}", "func NewKey(key jwk.Key, kid string, exp, nbf *time.Time) *Key {\n\treturn &Key{\n\t\tKey: key,\n\t\tkid: kid,\n\t\texp: exp,\n\t\tnbf: nbf,\n\t}\n}", "func (a *Aes) SetKey(value string) *Aes {\n\ta.Key = value\n\treturn a\n}", "func (s *JobInput) SetKey(v string) *JobInput {\n\ts.Key = &v\n\treturn s\n}", "func (o *DeployKey) SetKey(v string) {\n\to.Key = &v\n}", "func (j *JetStreamRecord) SetKey(k string) *JetStreamRecord {\n\tj.key = k\n\treturn j\n}", "func (s *SyncBlockerContext) SetKey(v string) *SyncBlockerContext {\n\ts.Key = &v\n\treturn s\n}", "func (o *ChartDataPoint) SetKey(v string) {\n\to.Key = &v\n}", "func (s *TagSet) SetKey(v string) *TagSet {\n\ts.Key = &v\n\treturn s\n}", "func (n *Node) DeepKeyCopy() store.LocalKey {\n\treturn n.DeepCopy()\n}", "func (s *Encryption) SetKey(v string) *Encryption {\n\ts.Key = &v\n\treturn s\n}", "func (k *Key) GetKey() string {\n\tif k == nil || k.Key == nil {\n\t\treturn \"\"\n\t}\n\treturn *k.Key\n}", "func (s *SecurityKey) SetKey(v string) *SecurityKey {\n\ts.Key = &v\n\treturn s\n}", "func (s *Output_) SetKey(v string) *Output_ {\n\ts.Key = &v\n\treturn s\n}", "func NewKey(saddr, daddr util.Address, sport, dport uint16, topicName string, requestAPIKey, requestAPIVersion uint16) Key {\n\treturn Key{\n\t\tConnectionKey: types.NewConnectionKey(saddr, daddr, sport, dport),\n\t\tTopicName: topicName,\n\t\tRequestAPIKey: requestAPIKey,\n\t\tRequestVersion: requestAPIVersion,\n\t}\n}", "func NewKey(applicationKey string, apiKey string) Key {\n\treturn Key{applicationKey: applicationKey, apiKey: apiKey}\n}", "func NewKey(b []byte) Key {\n\tk := newKey(b)\n\t//\tif bytes.HasPrefix(k.object[:], specialPrefix) &&\n\t//\tk != Empty {\n\t//\t\treturn Invalid\n\t//\t}\n\treturn k\n}", "func (transaction *AccountUpdateTransaction) SetKey(key Key) *AccountUpdateTransaction {\n\ttransaction._RequireNotFrozen()\n\ttransaction.key = key\n\treturn transaction\n}", "func (k Keys) HashKey() interface{} { return k[0] }", "func NewKey(s *secret.SecretsItemResponse) *Key {\n\treturn &Key{\n\t\tUser: s.Values[secretTypes.User],\n\t\tIdentifier: s.Values[secretTypes.Identifier],\n\t\tPublicKeyData: s.Values[secretTypes.PublicKeyData],\n\t\tPublicKeyFingerprint: s.Values[secretTypes.PublicKeyFingerprint],\n\t\tPrivateKeyData: s.Values[secretTypes.PrivateKeyData],\n\t}\n}", "func (c *Client) SetKey(key string) *Client {\n\tc.Key = key\n\treturn c\n}", "func (o *GetJobEventsParams) SetKey(key *string) {\n\to.Key = key\n}", "func (instance *DSInstance) NewKey(kind string, name string, parent *datastore.Key) *datastore.Key {\n\tkey := datastore.NameKey(kind, name, parent)\n\tkey.Namespace = instance.namespace\n\treturn key\n}", "func (s *S3Location) SetKey(v string) *S3Location {\n\ts.Key = &v\n\treturn s\n}", "func NewKeyed(key []byte) hash.Hash {\n\td := new(digest)\n\td.key = key\n\td.Reset()\n\treturn d\n}", "func (s *AssociateSecurityKeyInput) SetKey(v string) *AssociateSecurityKeyInput {\n\ts.Key = &v\n\treturn s\n}", "func NewKey(expiry time.Time) (*KeyData, error) {\n\tkey := make([]byte, 16)\n\tif _, err := rand.Read(key); err != nil {\n\t\treturn nil, err\n\t}\n\treturn &KeyData{\n\t\tKey: key,\n\t\tExpiry: expiry,\n\t}, nil\n}", "func (key twofishKey) Key() []byte {\n\treturn key[:]\n}", "func (o *RemoveAPIKeyPrivilegeParams) SetKey(key string) {\n\to.Key = key\n}", "func NewKey(value uint64) Key {\n\treturn Key(value)\n}", "func (r KeyGenerator) NewKey() (Key, error) {\n\tif len(r.buffer) == 0 {\n\t\tgo func() {\n\t\t\tr.fetchKeys()\n\t\t}()\n\t}\n\n\tentry := <-r.buffer\n\treturn entry.key, entry.err\n}", "func (v *KeyValue_SetValueV2_Args) GetKey() (o Key) {\n\tif v != nil {\n\t\to = v.Key\n\t}\n\treturn\n}", "func (o *ApiKey) GetKey() string {\n\tif o == nil || o.Key == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Key\n}", "func (in *ApiKey) DeepCopy() *ApiKey {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ApiKey)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func NewKey(field string, value interface{}) *Key {\n\treturn &Key{\n\t\tfield: field,\n\t\tvalue: value,\n\t}\n}", "func (akv StringKeyValue) Key() string {\n\treturn akv.orig.Key\n}", "func (e Encoder) AppendKey(dst []byte, key string) []byte {\n\tif len(dst) < 1 {\n\t\tdst = e.AppendBeginMarker(dst)\n\t}\n\treturn e.AppendString(dst, key)\n}", "func (ƨ *DiffTestResult) Key(key *datastore.Key) *KeyedDiffTestResult {\n\treturn &KeyedDiffTestResult{\n\t\tDiffTestResult: ƨ,\n\t\tKey: key,\n\t}\n}", "func NewKey() (*rsa.PrivateKey, error) {\n\treturn rsa.GenerateKey(rand.Reader, 2048)\n}", "func (ck *CertKey) Key() []byte { return ck.key }", "func NewKey(id uint32, dport uint16, proto uint8, trafficDirection uint8) PolicyKey {\n\t// For now prefix length is derived from the proto and dport values\n\t// This will have to be exposed to the caller when port ranges are supported.\n\tprefixLen := StaticPrefixBits\n\tif proto != 0 {\n\t\tprefixLen += NexthdrBits\n\t\tif dport != 0 {\n\t\t\tprefixLen += DestPortBits\n\t\t}\n\t}\n\treturn PolicyKey{\n\t\tPrefixlen: prefixLen,\n\t\tIdentity: id,\n\t\tTrafficDirection: trafficDirection,\n\t\tNexthdr: proto,\n\t\tDestPortNetwork: byteorder.HostToNetwork16(dport),\n\t}\n}", "func (in *SSHKeyPair) DeepCopy() *SSHKeyPair {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SSHKeyPair)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o *CustomHostMetadataKey) GetKey() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Key\n}", "func (key StepsCacheKey) Key() (string, error) {\n\treturn marshalAndHashStepsCacheKey(key, []string{})\n}", "func (k *Key) Equals(k1 *Key) bool {\n\tif k1 == nil {\n\t\treturn false\n\t}\n\treturn k.Rand == k1.Rand && k.Timestamp == k1.Timestamp && k.Counter == k1.Counter\n}", "func NewKey() *[KeySize]byte {\r\n\tp := randBytes(KeySize)\r\n\tif p == nil {\r\n\t\treturn nil\r\n\t}\r\n\tdefer zero(p)\r\n\tvar key [KeySize]byte\r\n\tcopy(key[:], p)\r\n\treturn &key\r\n}", "func New(key interface{}) (Key, error) {\n\tif key == nil {\n\t\treturn nil, errors.New(`jwk.New requires a non-nil key`)\n\t}\n\n\tvar ptr interface{}\n\tswitch v := key.(type) {\n\tcase rsa.PrivateKey:\n\t\tptr = &v\n\tcase rsa.PublicKey:\n\t\tptr = &v\n\tcase ecdsa.PrivateKey:\n\t\tptr = &v\n\tcase ecdsa.PublicKey:\n\t\tptr = &v\n\tdefault:\n\t\tptr = v\n\t}\n\n\tswitch rawKey := ptr.(type) {\n\tcase *rsa.PrivateKey:\n\t\tk := NewRSAPrivateKey()\n\t\tif err := k.FromRaw(rawKey); err != nil {\n\t\t\treturn nil, errors.Wrapf(err, `failed to initialize %T from %T`, k, rawKey)\n\t\t}\n\t\treturn k, nil\n\tcase *rsa.PublicKey:\n\t\tk := NewRSAPublicKey()\n\t\tif err := k.FromRaw(rawKey); err != nil {\n\t\t\treturn nil, errors.Wrapf(err, `failed to initialize %T from %T`, k, rawKey)\n\t\t}\n\t\treturn k, nil\n\tcase *ecdsa.PrivateKey:\n\t\tk := NewECDSAPrivateKey()\n\t\tif err := k.FromRaw(rawKey); err != nil {\n\t\t\treturn nil, errors.Wrapf(err, `failed to initialize %T from %T`, k, rawKey)\n\t\t}\n\t\treturn k, nil\n\tcase *ecdsa.PublicKey:\n\t\tk := NewECDSAPublicKey()\n\t\tif err := k.FromRaw(rawKey); err != nil {\n\t\t\treturn nil, errors.Wrapf(err, `failed to initialize %T from %T`, k, rawKey)\n\t\t}\n\t\treturn k, nil\n\tcase []byte:\n\t\tk := NewSymmetricKey()\n\t\tif err := k.FromRaw(rawKey); err != nil {\n\t\t\treturn nil, errors.Wrapf(err, `failed to initialize %T from %T`, k, rawKey)\n\t\t}\n\t\treturn k, nil\n\tdefault:\n\t\treturn nil, errors.Errorf(`invalid key type '%T' for jwk.New`, key)\n\t}\n}" ]
[ "0.73871976", "0.714478", "0.709059", "0.6912021", "0.6843181", "0.6613832", "0.65244293", "0.64699113", "0.64592344", "0.642474", "0.64237607", "0.64236516", "0.6421394", "0.641982", "0.6363098", "0.63512987", "0.63231707", "0.6312608", "0.6305274", "0.6305012", "0.6303637", "0.62961084", "0.6291304", "0.62858695", "0.62683356", "0.62473667", "0.62473243", "0.62467307", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6246343", "0.6238299", "0.62236613", "0.62182873", "0.62101257", "0.6188641", "0.61846113", "0.6170784", "0.614482", "0.61425865", "0.61159", "0.61135185", "0.6088872", "0.6074415", "0.60695726", "0.6068463", "0.6065476", "0.60269314", "0.60262793", "0.60223323", "0.60214", "0.600413", "0.59914094", "0.5965155", "0.5954169", "0.59448385", "0.59228665", "0.59169436", "0.58994436", "0.58986753", "0.5885643", "0.58672106", "0.5866227", "0.58636427", "0.58441556", "0.58260536", "0.5819937", "0.58187777", "0.581428", "0.58120704", "0.5809726", "0.5808344", "0.5794041", "0.57595754", "0.5742875", "0.5741434", "0.57380915", "0.5733938", "0.5718384", "0.5708727", "0.5704226", "0.57038975", "0.569383", "0.56892943", "0.56872946" ]
0.862469
1
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be nonnil.
func (in *ProviderConfiguration) DeepCopyInto(out *ProviderConfiguration) { *out = *in if in.AESGCM != nil { in, out := &in.AESGCM, &out.AESGCM *out = new(AESConfiguration) (*in).DeepCopyInto(*out) } if in.AESCBC != nil { in, out := &in.AESCBC, &out.AESCBC *out = new(AESConfiguration) (*in).DeepCopyInto(*out) } if in.Secretbox != nil { in, out := &in.Secretbox, &out.Secretbox *out = new(SecretboxConfiguration) (*in).DeepCopyInto(*out) } if in.Identity != nil { in, out := &in.Identity, &out.Identity *out = new(IdentityConfiguration) **out = **in } if in.KMS != nil { in, out := &in.KMS, &out.KMS *out = new(KMSConfiguration) (*in).DeepCopyInto(*out) } return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (r *RunInfo) DeepCopyInto(out *RunInfo) {\n\t*out = *r\n}", "func (in *Base) DeepCopyInto(out *Base) {\n\t*out = *in\n\treturn\n}", "func (in *ForkObject) DeepCopyInto(out *ForkObject) {\n\t*out = *in\n}", "func (in *TargetObjectInfo) DeepCopyInto(out *TargetObjectInfo) {\n\t*out = *in\n}", "func (in *DebugObjectInfo) DeepCopyInto(out *DebugObjectInfo) {\n\t*out = *in\n}", "func (in *Input) DeepCopyInto(out *Input) {\n\t*out = *in\n}", "func (u *SSN) DeepCopyInto(out *SSN) {\n\t*out = *u\n}", "func (in *ExistPvc) DeepCopyInto(out *ExistPvc) {\n\t*out = *in\n}", "func (in *DockerStep) DeepCopyInto(out *DockerStep) {\n\t*out = *in\n\tif in.Inline != nil {\n\t\tin, out := &in.Inline, &out.Inline\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tout.Auth = in.Auth\n\treturn\n}", "func (in *Container) DeepCopyInto(out *Container) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.Command != nil {\n\t\tin, out := &in.Command, &out.Command\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.LifeCycleScript != nil {\n\t\tin, out := &in.LifeCycleScript, &out.LifeCycleScript\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *RuntimeRef) DeepCopyInto(out *RuntimeRef) {\n\t*out = *in\n}", "func (in *Ibft2) DeepCopyInto(out *Ibft2) {\n\t*out = *in\n\treturn\n}", "func (in *TestResult) DeepCopyInto(out *TestResult) {\n\t*out = *in\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *Haproxy) DeepCopyInto(out *Haproxy) {\n\t*out = *in\n\treturn\n}", "func (in *SSH) DeepCopyInto(out *SSH) {\n\t*out = *in\n\treturn\n}", "func (in *Runtime) DeepCopyInto(out *Runtime) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (b *Base64) DeepCopyInto(out *Base64) {\n\t*out = *b\n}", "func (in *EventDependencyTransformer) DeepCopyInto(out *EventDependencyTransformer) {\n\t*out = *in\n\treturn\n}", "func (in *StageOutput) DeepCopyInto(out *StageOutput) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Dependent) DeepCopyInto(out *Dependent) {\n\t*out = *in\n\treturn\n}", "func (in *GitFileGeneratorItem) DeepCopyInto(out *GitFileGeneratorItem) {\n\t*out = *in\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *AnsibleStep) DeepCopyInto(out *AnsibleStep) {\n\t*out = *in\n\treturn\n}", "func (in *Forks) DeepCopyInto(out *Forks) {\n\t*out = *in\n\tif in.DAO != nil {\n\t\tin, out := &in.DAO, &out.DAO\n\t\t*out = new(uint)\n\t\t**out = **in\n\t}\n}", "func (in *ContainerPort) DeepCopyInto(out *ContainerPort) {\n\t*out = *in\n}", "func (in *General) DeepCopyInto(out *General) {\n\t*out = *in\n\treturn\n}", "func (in *IsoContainer) DeepCopyInto(out *IsoContainer) {\n\t*out = *in\n}", "func (in *Git) DeepCopyInto(out *Git) {\n\t*out = *in\n\treturn\n}", "func (in *ConfigFile) DeepCopyInto(out *ConfigFile) {\n\t*out = *in\n}", "func (in *BackupProgress) DeepCopyInto(out *BackupProgress) {\n\t*out = *in\n}", "func (in *DataDisk) DeepCopyInto(out *DataDisk) {\n\t*out = *in\n}", "func (in *PhaseStep) DeepCopyInto(out *PhaseStep) {\n\t*out = *in\n}", "func (u *MAC) DeepCopyInto(out *MAC) {\n\t*out = *u\n}", "func (in *Variable) DeepCopyInto(out *Variable) {\n\t*out = *in\n}", "func (in *RestoreProgress) DeepCopyInto(out *RestoreProgress) {\n\t*out = *in\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *NamespacedObjectReference) DeepCopyInto(out *NamespacedObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Path) DeepCopyInto(out *Path) {\n\t*out = *in\n\treturn\n}", "func (in *GitDirectoryGeneratorItem) DeepCopyInto(out *GitDirectoryGeneratorItem) {\n\t*out = *in\n}", "func (in *NamePath) DeepCopyInto(out *NamePath) {\n\t*out = *in\n\treturn\n}", "func (in *ConsoleCreateObj) DeepCopyInto(out *ConsoleCreateObj) {\n\t*out = *in\n}", "func (in *UsedPipelineRun) DeepCopyInto(out *UsedPipelineRun) {\n\t*out = *in\n}", "func (in *BuildTemplate) DeepCopyInto(out *BuildTemplate) {\n\t*out = *in\n\tif in.Cmd != nil {\n\t\tin, out := &in.Cmd, &out.Cmd\n\t\t*out = make([]BuildTemplateStep, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n}", "func (in *ObjectInfo) DeepCopyInto(out *ObjectInfo) {\n\t*out = *in\n\tout.GroupVersionKind = in.GroupVersionKind\n\treturn\n}", "func (in *Files) DeepCopyInto(out *Files) {\n\t*out = *in\n}", "func (in *Source) DeepCopyInto(out *Source) {\n\t*out = *in\n\tif in.Dependencies != nil {\n\t\tin, out := &in.Dependencies, &out.Dependencies\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MavenRepositories != nil {\n\t\tin, out := &in.MavenRepositories, &out.MavenRepositories\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\treturn\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *StackBuild) DeepCopyInto(out *StackBuild) {\n\t*out = *in\n\treturn\n}", "func (in *BuildTaskRef) DeepCopyInto(out *BuildTaskRef) {\n\t*out = *in\n\treturn\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *PathInfo) DeepCopyInto(out *PathInfo) {\n\t*out = *in\n}", "func (in *PoA) DeepCopyInto(out *PoA) {\n\t*out = *in\n}", "func (in *Section) DeepCopyInto(out *Section) {\n\t*out = *in\n\tif in.SecretRefs != nil {\n\t\tin, out := &in.SecretRefs, &out.SecretRefs\n\t\t*out = make([]SecretReference, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Files != nil {\n\t\tin, out := &in.Files, &out.Files\n\t\t*out = make([]FileMount, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n}", "func (in *DNSSelection) DeepCopyInto(out *DNSSelection) {\n\t*out = *in\n\tif in.Include != nil {\n\t\tin, out := &in.Include, &out.Include\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Exclude != nil {\n\t\tin, out := &in.Exclude, &out.Exclude\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ReleaseVersion) DeepCopyInto(out *ReleaseVersion) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Command) DeepCopyInto(out *Command) {\n\t*out = *in\n\tif in.Flags != nil {\n\t\tin, out := &in.Flags, &out.Flags\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Value != nil {\n\t\tin, out := &in.Value, &out.Value\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *PathRule) DeepCopyInto(out *PathRule) {\n\t*out = *in\n\treturn\n}", "func (in *DockerLifecycleData) DeepCopyInto(out *DockerLifecycleData) {\n\t*out = *in\n}", "func (in *RunScriptStepConfig) DeepCopyInto(out *RunScriptStepConfig) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *Checksum) DeepCopyInto(out *Checksum) {\n\t*out = *in\n}", "func (in *DomainNameOutput) DeepCopyInto(out *DomainNameOutput) {\n\t*out = *in\n}", "func (in *InterfaceStruct) DeepCopyInto(out *InterfaceStruct) {\n\t*out = *in\n\tif in.val != nil {\n\t\tin, out := &in.val, &out.val\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Ref) DeepCopyInto(out *Ref) {\n\t*out = *in\n}", "func (in *MemorySpec) DeepCopyInto(out *MemorySpec) {\n\t*out = *in\n}", "func (in *BuildJenkinsInfo) DeepCopyInto(out *BuildJenkinsInfo) {\n\t*out = *in\n\treturn\n}", "func (in *VirtualDatabaseBuildObject) DeepCopyInto(out *VirtualDatabaseBuildObject) {\n\t*out = *in\n\tif in.Incremental != nil {\n\t\tin, out := &in.Incremental, &out.Incremental\n\t\t*out = new(bool)\n\t\t**out = **in\n\t}\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]v1.EnvVar, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\tout.Git = in.Git\n\tin.Source.DeepCopyInto(&out.Source)\n\tif in.Webhooks != nil {\n\t\tin, out := &in.Webhooks, &out.Webhooks\n\t\t*out = make([]WebhookSecret, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *MaintenanceInfo) DeepCopyInto(out *MaintenanceInfo) {\n\t*out = *in\n\treturn\n}", "func (in *KopsNode) DeepCopyInto(out *KopsNode) {\n\t*out = *in\n\treturn\n}", "func (in *FalconAPI) DeepCopyInto(out *FalconAPI) {\n\t*out = *in\n}", "func (in *EBS) DeepCopyInto(out *EBS) {\n\t*out = *in\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n\treturn\n}", "func (in *Empty) DeepCopyInto(out *Empty) {\n\t*out = *in\n\tout.XXX_NoUnkeyedLiteral = in.XXX_NoUnkeyedLiteral\n\tif in.XXX_unrecognized != nil {\n\t\tin, out := &in.XXX_unrecognized, &out.XXX_unrecognized\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ComponentDistGit) DeepCopyInto(out *ComponentDistGit) {\n\t*out = *in\n\treturn\n}", "func (in *Memory) DeepCopyInto(out *Memory) {\n\t*out = *in\n\tout.Required = in.Required.DeepCopy()\n}", "func (in *Persistence) DeepCopyInto(out *Persistence) {\n\t*out = *in\n\tout.Size = in.Size.DeepCopy()\n\treturn\n}", "func (in *ManagedDisk) DeepCopyInto(out *ManagedDisk) {\n\t*out = *in\n}", "func (e *Email) DeepCopyInto(out *Email) {\n\t*out = *e\n}", "func (in *ImageInfo) DeepCopyInto(out *ImageInfo) {\n\t*out = *in\n}", "func (in *ShootRef) DeepCopyInto(out *ShootRef) {\n\t*out = *in\n}", "func (in *N3000Fpga) DeepCopyInto(out *N3000Fpga) {\n\t*out = *in\n}", "func (in *NetflowType) DeepCopyInto(out *NetflowType) {\n\t*out = *in\n\treturn\n}", "func (in *BuiltInAdapter) DeepCopyInto(out *BuiltInAdapter) {\n\t*out = *in\n}", "func (in *Node) DeepCopyInto(out *Node) {\n\t*out = *in\n\tif in.FailStatus != nil {\n\t\tin, out := &in.FailStatus, &out.FailStatus\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MigratingSlots != nil {\n\t\tin, out := &in.MigratingSlots, &out.MigratingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.ImportingSlots != nil {\n\t\tin, out := &in.ImportingSlots, &out.ImportingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n}", "func (in *CPUSpec) DeepCopyInto(out *CPUSpec) {\n\t*out = *in\n}", "func (in *LoopState) DeepCopyInto(out *LoopState) {\n\t*out = *in\n}" ]
[ "0.8215289", "0.81280124", "0.81039286", "0.80862963", "0.8083811", "0.80673146", "0.8064545", "0.8026454", "0.8012046", "0.7996313", "0.799204", "0.79887754", "0.7987097", "0.7986994", "0.7986994", "0.79854053", "0.7975989", "0.7972486", "0.79695636", "0.79695636", "0.79695636", "0.7967528", "0.79624444", "0.7961954", "0.7945754", "0.7945754", "0.7944541", "0.79428566", "0.7942668", "0.7942668", "0.7940451", "0.793851", "0.7936731", "0.79294837", "0.79252166", "0.7915377", "0.7911627", "0.7911138", "0.7909384", "0.790913", "0.7908773", "0.7905649", "0.79050326", "0.7904594", "0.7904594", "0.7904235", "0.79036915", "0.79020816", "0.78988886", "0.78977424", "0.7891376", "0.7891024", "0.7889831", "0.78890276", "0.7887135", "0.788637", "0.7885264", "0.7885264", "0.7884786", "0.7880785", "0.78745943", "0.78745943", "0.78745407", "0.78734446", "0.78724426", "0.78713626", "0.78713554", "0.78652424", "0.7863321", "0.7863321", "0.7863321", "0.7863293", "0.7862628", "0.7860664", "0.7858556", "0.785785", "0.78571486", "0.7851332", "0.78453225", "0.78448987", "0.78415996", "0.7837483", "0.7837037", "0.7836443", "0.78351796", "0.78329664", "0.7831094", "0.7829445", "0.7826582", "0.7824499", "0.78242797", "0.78227437", "0.78192484", "0.7818843", "0.78128535", "0.7812535", "0.78111476", "0.78111106", "0.781107", "0.78093034", "0.7808775" ]
0.0
-1
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ProviderConfiguration.
func (in *ProviderConfiguration) DeepCopy() *ProviderConfiguration { if in == nil { return nil } out := new(ProviderConfiguration) in.DeepCopyInto(out) return out }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (in *Provider) DeepCopy() *Provider {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Provider)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Provider) DeepCopy() *Provider {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Provider)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ProviderSettings) DeepCopy() *ProviderSettings {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ProviderSettings)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ProviderDescription) DeepCopy() *ProviderDescription {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ProviderDescription)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ProviderRef) DeepCopy() *ProviderRef {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ProviderRef)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ProviderSettingsSpec) DeepCopy() *ProviderSettingsSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ProviderSettingsSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AzureConfigStatusProvider) DeepCopy() *AzureConfigStatusProvider {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AzureConfigStatusProvider)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AuthorizationPolicy_ExtensionProvider) DeepCopy() *AuthorizationPolicy_ExtensionProvider {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AuthorizationPolicy_ExtensionProvider)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func ProviderCfg(cfg config.Configurator) (*Config, func(), error) {\n\tc := &Config{\n\t\tinvoker: invoker.NewInvoker(),\n\t}\n\te := cfg.UnmarshalKeyOnReload(UnmarshalKey, c)\n\treturn c, func() {}, e\n}", "func (client *AccessGovernanceCPClient) ConfigurationProvider() *common.ConfigurationProvider {\n\treturn client.config\n}", "func (c *config) WithProvider(provider string) Config {\n\tc.provider = provider\n\treturn c\n}", "func NewConfigProvider(options ...ConfigProviderOption) ConfigProvider {\n\treturn newConfigProvider(options...)\n}", "func NewConfigProvider() *ConfigProvider {\n\treturn &ConfigProvider{}\n}", "func (client *NotificationDataPlaneClient) ConfigurationProvider() *common.ConfigurationProvider {\n\treturn client.config\n}", "func (in *MetricProvider) DeepCopy() *MetricProvider {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MetricProvider)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (client *GovernanceClient) ConfigurationProvider() *common.ConfigurationProvider {\n\treturn client.config\n}", "func (client *DnsClient) ConfigurationProvider() *common.ConfigurationProvider {\n\treturn client.config\n}", "func (client *IdentityClient) ConfigurationProvider() *common.ConfigurationProvider {\n\treturn client.config\n}", "func (in *ExternalDNSInfobloxProviderOptions) DeepCopy() *ExternalDNSInfobloxProviderOptions {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ExternalDNSInfobloxProviderOptions)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (client *RoverClusterClient) ConfigurationProvider() *common.ConfigurationProvider {\n\treturn client.config\n}", "func New(p Provider) (*Config, error) {\n\tm, err := p.Provide()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tc := &Config{\n\t\tm: m,\n\t}\n\treturn c, nil\n}", "func (in *ExternalDNSBlueCatProviderOptions) DeepCopy() *ExternalDNSBlueCatProviderOptions {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ExternalDNSBlueCatProviderOptions)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretProviderClass) DeepCopy() *SecretProviderClass {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretProviderClass)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (p *ProviderConfig) AddProviderConfig(name string, provider interface{}) error {\n\tif utils.IsOneOf(name, p.GetAllProviderNames()) {\n\t\treturn fmt.Errorf(\"the provider name '%s' is already used\", name)\n\t}\n\n\tswitch providerSpec := provider.(type) {\n\tcase *models.AlibabaCloudSpec:\n\t\tif p.Alibaba == nil {\n\t\t\tp.Alibaba = make(map[string]*models.AlibabaCloudSpec)\n\t\t}\n\t\tp.Alibaba[name] = providerSpec\n\tcase *models.AnexiaCloudSpec:\n\t\tif p.Anexia == nil {\n\t\t\tp.Anexia = make(map[string]*models.AnexiaCloudSpec)\n\t\t}\n\t\tp.Anexia[name] = providerSpec\n\tcase *models.AWSCloudSpec:\n\t\tif p.Aws == nil {\n\t\t\tp.Aws = make(map[string]*models.AWSCloudSpec)\n\t\t}\n\t\tp.Aws[name] = providerSpec\n\tcase *models.AzureCloudSpec:\n\t\tif p.Azure == nil {\n\t\t\tp.Azure = make(map[string]*models.AzureCloudSpec)\n\t\t}\n\t\tp.Azure[name] = providerSpec\n\tcase *models.DigitaloceanCloudSpec:\n\t\tif p.Digitalocean == nil {\n\t\t\tp.Digitalocean = make(map[string]*models.DigitaloceanCloudSpec)\n\t\t}\n\t\tp.Digitalocean[name] = providerSpec\n\tcase *models.FakeCloudSpec:\n\t\tif p.Fake == nil {\n\t\t\tp.Fake = make(map[string]*models.FakeCloudSpec)\n\t\t}\n\t\tp.Fake[name] = providerSpec\n\tcase *models.GCPCloudSpec:\n\t\tif p.Gcp == nil {\n\t\t\tp.Gcp = make(map[string]*models.GCPCloudSpec)\n\t\t}\n\t\tp.Gcp[name] = providerSpec\n\tcase *models.HetznerCloudSpec:\n\t\tif p.Hetzner == nil {\n\t\t\tp.Hetzner = make(map[string]*models.HetznerCloudSpec)\n\t\t}\n\t\tp.Hetzner[name] = providerSpec\n\tcase *models.KubevirtCloudSpec:\n\t\tif p.Kubevirt == nil {\n\t\t\tp.Kubevirt = make(map[string]*models.KubevirtCloudSpec)\n\t\t}\n\t\tp.Kubevirt[name] = providerSpec\n\tcase *models.OpenstackCloudSpec:\n\t\tif p.Openstack == nil {\n\t\t\tp.Openstack = make(map[string]*models.OpenstackCloudSpec)\n\t\t}\n\t\tp.Openstack[name] = providerSpec\n\tcase *models.PacketCloudSpec:\n\t\tif p.Packet == nil {\n\t\t\tp.Packet = make(map[string]*models.PacketCloudSpec)\n\t\t}\n\t\tp.Packet[name] = providerSpec\n\tcase *models.VSphereCloudSpec:\n\t\tif p.Vsphere == nil {\n\t\t\tp.Vsphere = make(map[string]*models.VSphereCloudSpec)\n\t\t}\n\t\tp.Vsphere[name] = providerSpec\n\tdefault:\n\t\treturn fmt.Errorf(\"failed to determine the correct cloudSpecType\")\n\t}\n\n\treturn nil\n}", "func (exporter *Exporter) ExportProviderConfiguration(region string) {\n\tfmt.Printf(configurationTemplateProvider, region)\n}", "func (client *ArtifactsClient) ConfigurationProvider() *common.ConfigurationProvider {\n\treturn client.config\n}", "func NewProvider() *ProviderConfig {\n\tproviderConfig := &ProviderConfig{\n\t\tAlibaba: make(map[string]*models.AlibabaCloudSpec),\n\t\tAnexia: make(map[string]*models.AnexiaCloudSpec),\n\t\tAws: make(map[string]*models.AWSCloudSpec),\n\t\tAzure: make(map[string]*models.AzureCloudSpec),\n\t\tDigitalocean: make(map[string]*models.DigitaloceanCloudSpec),\n\t\tFake: make(map[string]*models.FakeCloudSpec),\n\t\tGcp: make(map[string]*models.GCPCloudSpec),\n\t\tHetzner: make(map[string]*models.HetznerCloudSpec),\n\t\tKubevirt: make(map[string]*models.KubevirtCloudSpec),\n\t\tOpenstack: make(map[string]*models.OpenstackCloudSpec),\n\t\tPacket: make(map[string]*models.PacketCloudSpec),\n\t\tVsphere: make(map[string]*models.VSphereCloudSpec),\n\t}\n\n\tproviderConfig.Alibaba[\"Alibaba\"] = newAlibabaCloudSpec()\n\tproviderConfig.Anexia[\"Anexia\"] = newAnexiaCloudSpec()\n\tproviderConfig.Aws[\"Aws\"] = newAWSCloudSpec()\n\tproviderConfig.Azure[\"Azure\"] = newAzureCloudSpec()\n\tproviderConfig.Digitalocean[\"Digitalocean\"] = newDigitaloceanCloudSpec()\n\tproviderConfig.Fake[\"Fake\"] = newFakeCloudSpec()\n\tproviderConfig.Gcp[\"Gcp\"] = newGCPCloudSpec()\n\tproviderConfig.Hetzner[\"Hetzner\"] = newHetznerCloudSpec()\n\tproviderConfig.Kubevirt[\"Kubevirt\"] = newKubevirtCloudSpec()\n\tproviderConfig.Openstack[\"Openstack\"] = newOpenstackCloudSpec()\n\tproviderConfig.Packet[\"Packet\"] = newPacketCloudSpec()\n\tproviderConfig.Vsphere[\"Vsphere\"] = newVSphereCloudSpec()\n\n\treturn providerConfig\n}", "func (client *StorageGatewayClient) ConfigurationProvider() *common.ConfigurationProvider {\n\treturn client.config\n}", "func (client *OccMetricsClient) ConfigurationProvider() *common.ConfigurationProvider {\n\treturn client.config\n}", "func (mg *SQLStoredProcedure) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *Firewall) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (in *PrivateconnectionVpcPeeringConfig) DeepCopy() *PrivateconnectionVpcPeeringConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PrivateconnectionVpcPeeringConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (client *BastionClient) ConfigurationProvider() *common.ConfigurationProvider {\n\treturn client.config\n}", "func (p Provider) GetConfig() model.ProviderConfig {\n\treturn p.config\n}", "func LoadConfigProvider(appName string) *viper.Viper {\n\treturn readViperConfig(appName)\n}", "func LoadConfigProvider(appName string) *viper.Viper {\n\treturn readViperConfig(appName)\n}", "func WithProvider(provider Provider) Option {\n\treturn func(cfg *config) {\n\t\tcfg.provider = provider\n\t}\n}", "func (client *ComplianceDocClient) ConfigurationProvider() *common.ConfigurationProvider {\n\treturn client.config\n}", "func (in *SecretProviderClassSpec) DeepCopy() *SecretProviderClassSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretProviderClassSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (mg *EC2NetworkInterface) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *InstanceProfile) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (in *KubemanagerConfig) DeepCopy() *KubemanagerConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubemanagerConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (mg *IPv4CIDRBlockAssociation) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *GremlinGraph) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func ProviderSpec() *ProviderSpecApplyConfiguration {\n\treturn &ProviderSpecApplyConfiguration{}\n}", "func (mg *GremlinGraph) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *Subnetwork) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *Network) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *EC2LaunchTemplate) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *Policy) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *Subnet) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (in *DevicePluginConfig) DeepCopy() *DevicePluginConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DevicePluginConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (mg *Firewall) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *VpcPeeringConnection) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (in *DeviceSecretVerifierConfigType) DeepCopy() *DeviceSecretVerifierConfigType {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DeviceSecretVerifierConfigType)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (mg *UserGroupMembership) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func LoadConfigProvider(appName string) Provider {\r\n\treturn readViperConfig(appName)\r\n}", "func (mg *SQLStoredProcedure) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *Group) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func NewProvider(cfg Config) *Provider {\n\treturn &Provider{\n\t\tConfig: cfg,\n\t}\n}", "func (in *EndpointConfigurationParameters) DeepCopy() *EndpointConfigurationParameters {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EndpointConfigurationParameters)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (mg *NotebookWorkspace) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *EC2NetworkInterface) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *Instance) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *Instance) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *InstanceProfile) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *GroupPolicyAttachment) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *SecurityGroupRule) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (mg *Network) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *SecurityGroup) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (in *PrivateLinkServiceIpConfiguration) DeepCopy() *PrivateLinkServiceIpConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PrivateLinkServiceIpConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func ProvidersConfig(t *testing.T) *config.Config {\n\tconst (\n\t\ttestClientKey = \"provider-test-client-key\"\n\t\ttestSecret = \"provider-test-secret\"\n\t\ttestCallback = \"http://auth.exmaple.com/test/callback\"\n\t)\n\tsetEnv(t, config.Auth0DomainEnv, \"example.com\")\n\tsetEnv(t, config.OpenIDConnectURLEnv, MockOpenIDConnect(t))\n\tsetEnv(t, config.AzureADTenantEnv, string(azureadv2.CommonTenant))\n\tvar testScopes = []string{\"test-scope-1\", \"test-scope-2\"}\n\tc := Config(t)\n\ta := config.Authorization{\n\t\tProviders: map[provider.Name]config.Provider{},\n\t}\n\ta.UseInternal = true\n\tfor name := range provider.External {\n\t\ta.Providers[name] = config.Provider{\n\t\t\tClientKey: testClientKey,\n\t\t\tSecret: testSecret,\n\t\t\tCallbackURL: testCallback,\n\t\t\tScopes: testScopes,\n\t\t}\n\t}\n\t// FIXME: core/auth/providers.go:162\n\tt.Log(\"skipping \", provider.Apple)\n\tdelete(a.Providers, provider.Apple)\n\tc.Authorization = a\n\treturn c\n}", "func (mg *Policy) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *OpenIDConnectProvider) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (in *EKSManagedConfiguration) DeepCopy() *EKSManagedConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EKSManagedConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KalmConfig) DeepCopy() *KalmConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KalmConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (mg *VpcEndpoint) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *EC2LaunchTemplate) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *Group) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *GremlinDatabase) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (store *credentialConfig) Provider() credentials.Provider {\n\treturn credentials.UseKey(store.getCredentialReader()).Passphrase(store.PassphraseReader())\n}", "func (mg *Subnetwork) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *Router) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *Instance) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *Instance) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *ManagedSSLCertificate) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func CfgProviderConfig(providerConfig string) ManagerConfigOpt {\n\treturn func(config *ManagerConfig) {\n\t\tconfig.ProviderConfig = providerConfig\n\t}\n}", "func (mg *Subnet) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (mg *AccessKey) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *Router) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (mg *GremlinDatabase) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (in *SecretEngineConfiguration) DeepCopy() *SecretEngineConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretEngineConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ExternalDNSAWSProviderOptions) DeepCopy() *ExternalDNSAWSProviderOptions {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ExternalDNSAWSProviderOptions)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (mg *VpcPeeringConnection) SetProviderConfigReference(r *xpv1.Reference) {\n\tmg.Spec.ProviderConfigReference = r\n}", "func (in *PrivateEndpointIPConfiguration) DeepCopy() *PrivateEndpointIPConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PrivateEndpointIPConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (mg *Address) GetProviderConfigReference() *xpv1.Reference {\n\treturn mg.Spec.ProviderConfigReference\n}", "func (proxy *StandAloneProxyConfig) DeepCopy() *StandAloneProxyConfig {\n\tif proxy == nil {\n\t\treturn nil\n\t}\n\tcloned := new(StandAloneProxyConfig)\n\tcloned.proxyCredentials = make(map[string]*ProxyUser)\n\tcloned.managementServers = make(map[url.URL]*ManagementServer)\n\tcloned.managedArrays = make(map[string]*StorageArray)\n\tfor key, value := range proxy.managedArrays {\n\t\tarray := *value\n\t\tcloned.managedArrays[key] = &array\n\t}\n\tfor key, value := range proxy.managementServers {\n\t\tcloned.managementServers[key] = value.DeepCopy()\n\t}\n\tfor key, value := range proxy.proxyCredentials {\n\t\tcreds := *value\n\t\tcloned.proxyCredentials[key] = &creds\n\t}\n\treturn cloned\n}" ]
[ "0.65710604", "0.65710604", "0.65028113", "0.63433236", "0.5991025", "0.59304255", "0.58877087", "0.5869135", "0.57617694", "0.5751949", "0.56482095", "0.56043553", "0.5583803", "0.55688334", "0.55674565", "0.5523267", "0.5475685", "0.5468203", "0.5455061", "0.54211265", "0.5414148", "0.5398919", "0.5368394", "0.5357251", "0.53501856", "0.5343473", "0.53274935", "0.5324562", "0.52237767", "0.522306", "0.52222013", "0.5209832", "0.5209566", "0.51961744", "0.5191946", "0.5191946", "0.5179865", "0.5162668", "0.5146989", "0.51357234", "0.51336485", "0.51217353", "0.51075643", "0.5098117", "0.5094373", "0.508392", "0.5080976", "0.5078239", "0.50771576", "0.50686216", "0.5066181", "0.5062687", "0.5050596", "0.5046654", "0.5034266", "0.5030429", "0.5023633", "0.50200176", "0.5016167", "0.5010959", "0.50093627", "0.5006649", "0.50061756", "0.50047976", "0.50047976", "0.4998675", "0.4980776", "0.4976389", "0.4970914", "0.4970914", "0.4970914", "0.49694198", "0.49692383", "0.4968876", "0.49681914", "0.4962657", "0.49622768", "0.49617374", "0.4954744", "0.4953938", "0.4952861", "0.49484053", "0.49439412", "0.49420917", "0.49418384", "0.49405605", "0.49396357", "0.49396357", "0.49384362", "0.49373424", "0.4932232", "0.49300846", "0.4922397", "0.49210003", "0.4918259", "0.4911395", "0.49106824", "0.49105853", "0.49082574", "0.49065247" ]
0.8349167
0
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be nonnil.
func (in *ResourceConfiguration) DeepCopyInto(out *ResourceConfiguration) { *out = *in if in.Resources != nil { in, out := &in.Resources, &out.Resources *out = make([]string, len(*in)) copy(*out, *in) } if in.Providers != nil { in, out := &in.Providers, &out.Providers *out = make([]ProviderConfiguration, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } } return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (r *RunInfo) DeepCopyInto(out *RunInfo) {\n\t*out = *r\n}", "func (in *Base) DeepCopyInto(out *Base) {\n\t*out = *in\n\treturn\n}", "func (in *ForkObject) DeepCopyInto(out *ForkObject) {\n\t*out = *in\n}", "func (in *TargetObjectInfo) DeepCopyInto(out *TargetObjectInfo) {\n\t*out = *in\n}", "func (in *DebugObjectInfo) DeepCopyInto(out *DebugObjectInfo) {\n\t*out = *in\n}", "func (in *Input) DeepCopyInto(out *Input) {\n\t*out = *in\n}", "func (u *SSN) DeepCopyInto(out *SSN) {\n\t*out = *u\n}", "func (in *ExistPvc) DeepCopyInto(out *ExistPvc) {\n\t*out = *in\n}", "func (in *DockerStep) DeepCopyInto(out *DockerStep) {\n\t*out = *in\n\tif in.Inline != nil {\n\t\tin, out := &in.Inline, &out.Inline\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tout.Auth = in.Auth\n\treturn\n}", "func (in *Container) DeepCopyInto(out *Container) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.Command != nil {\n\t\tin, out := &in.Command, &out.Command\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.LifeCycleScript != nil {\n\t\tin, out := &in.LifeCycleScript, &out.LifeCycleScript\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *RuntimeRef) DeepCopyInto(out *RuntimeRef) {\n\t*out = *in\n}", "func (in *Ibft2) DeepCopyInto(out *Ibft2) {\n\t*out = *in\n\treturn\n}", "func (in *TestResult) DeepCopyInto(out *TestResult) {\n\t*out = *in\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *Haproxy) DeepCopyInto(out *Haproxy) {\n\t*out = *in\n\treturn\n}", "func (in *SSH) DeepCopyInto(out *SSH) {\n\t*out = *in\n\treturn\n}", "func (in *Runtime) DeepCopyInto(out *Runtime) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (b *Base64) DeepCopyInto(out *Base64) {\n\t*out = *b\n}", "func (in *EventDependencyTransformer) DeepCopyInto(out *EventDependencyTransformer) {\n\t*out = *in\n\treturn\n}", "func (in *StageOutput) DeepCopyInto(out *StageOutput) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Dependent) DeepCopyInto(out *Dependent) {\n\t*out = *in\n\treturn\n}", "func (in *GitFileGeneratorItem) DeepCopyInto(out *GitFileGeneratorItem) {\n\t*out = *in\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *AnsibleStep) DeepCopyInto(out *AnsibleStep) {\n\t*out = *in\n\treturn\n}", "func (in *Forks) DeepCopyInto(out *Forks) {\n\t*out = *in\n\tif in.DAO != nil {\n\t\tin, out := &in.DAO, &out.DAO\n\t\t*out = new(uint)\n\t\t**out = **in\n\t}\n}", "func (in *ContainerPort) DeepCopyInto(out *ContainerPort) {\n\t*out = *in\n}", "func (in *General) DeepCopyInto(out *General) {\n\t*out = *in\n\treturn\n}", "func (in *IsoContainer) DeepCopyInto(out *IsoContainer) {\n\t*out = *in\n}", "func (in *Git) DeepCopyInto(out *Git) {\n\t*out = *in\n\treturn\n}", "func (in *ConfigFile) DeepCopyInto(out *ConfigFile) {\n\t*out = *in\n}", "func (in *BackupProgress) DeepCopyInto(out *BackupProgress) {\n\t*out = *in\n}", "func (in *DataDisk) DeepCopyInto(out *DataDisk) {\n\t*out = *in\n}", "func (in *PhaseStep) DeepCopyInto(out *PhaseStep) {\n\t*out = *in\n}", "func (u *MAC) DeepCopyInto(out *MAC) {\n\t*out = *u\n}", "func (in *Variable) DeepCopyInto(out *Variable) {\n\t*out = *in\n}", "func (in *RestoreProgress) DeepCopyInto(out *RestoreProgress) {\n\t*out = *in\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *NamespacedObjectReference) DeepCopyInto(out *NamespacedObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Path) DeepCopyInto(out *Path) {\n\t*out = *in\n\treturn\n}", "func (in *GitDirectoryGeneratorItem) DeepCopyInto(out *GitDirectoryGeneratorItem) {\n\t*out = *in\n}", "func (in *NamePath) DeepCopyInto(out *NamePath) {\n\t*out = *in\n\treturn\n}", "func (in *ConsoleCreateObj) DeepCopyInto(out *ConsoleCreateObj) {\n\t*out = *in\n}", "func (in *UsedPipelineRun) DeepCopyInto(out *UsedPipelineRun) {\n\t*out = *in\n}", "func (in *BuildTemplate) DeepCopyInto(out *BuildTemplate) {\n\t*out = *in\n\tif in.Cmd != nil {\n\t\tin, out := &in.Cmd, &out.Cmd\n\t\t*out = make([]BuildTemplateStep, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n}", "func (in *ObjectInfo) DeepCopyInto(out *ObjectInfo) {\n\t*out = *in\n\tout.GroupVersionKind = in.GroupVersionKind\n\treturn\n}", "func (in *Files) DeepCopyInto(out *Files) {\n\t*out = *in\n}", "func (in *Source) DeepCopyInto(out *Source) {\n\t*out = *in\n\tif in.Dependencies != nil {\n\t\tin, out := &in.Dependencies, &out.Dependencies\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MavenRepositories != nil {\n\t\tin, out := &in.MavenRepositories, &out.MavenRepositories\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\treturn\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *StackBuild) DeepCopyInto(out *StackBuild) {\n\t*out = *in\n\treturn\n}", "func (in *BuildTaskRef) DeepCopyInto(out *BuildTaskRef) {\n\t*out = *in\n\treturn\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *PathInfo) DeepCopyInto(out *PathInfo) {\n\t*out = *in\n}", "func (in *PoA) DeepCopyInto(out *PoA) {\n\t*out = *in\n}", "func (in *Section) DeepCopyInto(out *Section) {\n\t*out = *in\n\tif in.SecretRefs != nil {\n\t\tin, out := &in.SecretRefs, &out.SecretRefs\n\t\t*out = make([]SecretReference, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Files != nil {\n\t\tin, out := &in.Files, &out.Files\n\t\t*out = make([]FileMount, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n}", "func (in *DNSSelection) DeepCopyInto(out *DNSSelection) {\n\t*out = *in\n\tif in.Include != nil {\n\t\tin, out := &in.Include, &out.Include\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Exclude != nil {\n\t\tin, out := &in.Exclude, &out.Exclude\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ReleaseVersion) DeepCopyInto(out *ReleaseVersion) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Command) DeepCopyInto(out *Command) {\n\t*out = *in\n\tif in.Flags != nil {\n\t\tin, out := &in.Flags, &out.Flags\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Value != nil {\n\t\tin, out := &in.Value, &out.Value\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *PathRule) DeepCopyInto(out *PathRule) {\n\t*out = *in\n\treturn\n}", "func (in *DockerLifecycleData) DeepCopyInto(out *DockerLifecycleData) {\n\t*out = *in\n}", "func (in *RunScriptStepConfig) DeepCopyInto(out *RunScriptStepConfig) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *Checksum) DeepCopyInto(out *Checksum) {\n\t*out = *in\n}", "func (in *DomainNameOutput) DeepCopyInto(out *DomainNameOutput) {\n\t*out = *in\n}", "func (in *InterfaceStruct) DeepCopyInto(out *InterfaceStruct) {\n\t*out = *in\n\tif in.val != nil {\n\t\tin, out := &in.val, &out.val\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Ref) DeepCopyInto(out *Ref) {\n\t*out = *in\n}", "func (in *MemorySpec) DeepCopyInto(out *MemorySpec) {\n\t*out = *in\n}", "func (in *BuildJenkinsInfo) DeepCopyInto(out *BuildJenkinsInfo) {\n\t*out = *in\n\treturn\n}", "func (in *VirtualDatabaseBuildObject) DeepCopyInto(out *VirtualDatabaseBuildObject) {\n\t*out = *in\n\tif in.Incremental != nil {\n\t\tin, out := &in.Incremental, &out.Incremental\n\t\t*out = new(bool)\n\t\t**out = **in\n\t}\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]v1.EnvVar, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\tout.Git = in.Git\n\tin.Source.DeepCopyInto(&out.Source)\n\tif in.Webhooks != nil {\n\t\tin, out := &in.Webhooks, &out.Webhooks\n\t\t*out = make([]WebhookSecret, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *MaintenanceInfo) DeepCopyInto(out *MaintenanceInfo) {\n\t*out = *in\n\treturn\n}", "func (in *KopsNode) DeepCopyInto(out *KopsNode) {\n\t*out = *in\n\treturn\n}", "func (in *FalconAPI) DeepCopyInto(out *FalconAPI) {\n\t*out = *in\n}", "func (in *EBS) DeepCopyInto(out *EBS) {\n\t*out = *in\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n\treturn\n}", "func (in *Empty) DeepCopyInto(out *Empty) {\n\t*out = *in\n\tout.XXX_NoUnkeyedLiteral = in.XXX_NoUnkeyedLiteral\n\tif in.XXX_unrecognized != nil {\n\t\tin, out := &in.XXX_unrecognized, &out.XXX_unrecognized\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ComponentDistGit) DeepCopyInto(out *ComponentDistGit) {\n\t*out = *in\n\treturn\n}", "func (in *Memory) DeepCopyInto(out *Memory) {\n\t*out = *in\n\tout.Required = in.Required.DeepCopy()\n}", "func (in *Persistence) DeepCopyInto(out *Persistence) {\n\t*out = *in\n\tout.Size = in.Size.DeepCopy()\n\treturn\n}", "func (in *ManagedDisk) DeepCopyInto(out *ManagedDisk) {\n\t*out = *in\n}", "func (e *Email) DeepCopyInto(out *Email) {\n\t*out = *e\n}", "func (in *ImageInfo) DeepCopyInto(out *ImageInfo) {\n\t*out = *in\n}", "func (in *ShootRef) DeepCopyInto(out *ShootRef) {\n\t*out = *in\n}", "func (in *N3000Fpga) DeepCopyInto(out *N3000Fpga) {\n\t*out = *in\n}", "func (in *NetflowType) DeepCopyInto(out *NetflowType) {\n\t*out = *in\n\treturn\n}", "func (in *BuiltInAdapter) DeepCopyInto(out *BuiltInAdapter) {\n\t*out = *in\n}", "func (in *Node) DeepCopyInto(out *Node) {\n\t*out = *in\n\tif in.FailStatus != nil {\n\t\tin, out := &in.FailStatus, &out.FailStatus\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MigratingSlots != nil {\n\t\tin, out := &in.MigratingSlots, &out.MigratingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.ImportingSlots != nil {\n\t\tin, out := &in.ImportingSlots, &out.ImportingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n}", "func (in *CPUSpec) DeepCopyInto(out *CPUSpec) {\n\t*out = *in\n}", "func (in *LoopState) DeepCopyInto(out *LoopState) {\n\t*out = *in\n}" ]
[ "0.8215289", "0.81280124", "0.81039286", "0.80862963", "0.8083811", "0.80673146", "0.8064545", "0.8026454", "0.8012046", "0.7996313", "0.799204", "0.79887754", "0.7987097", "0.7986994", "0.7986994", "0.79854053", "0.7975989", "0.7972486", "0.79695636", "0.79695636", "0.79695636", "0.7967528", "0.79624444", "0.7961954", "0.7945754", "0.7945754", "0.7944541", "0.79428566", "0.7942668", "0.7942668", "0.7940451", "0.793851", "0.7936731", "0.79294837", "0.79252166", "0.7915377", "0.7911627", "0.7911138", "0.7909384", "0.790913", "0.7908773", "0.7905649", "0.79050326", "0.7904594", "0.7904594", "0.7904235", "0.79036915", "0.79020816", "0.78988886", "0.78977424", "0.7891376", "0.7891024", "0.7889831", "0.78890276", "0.7887135", "0.788637", "0.7885264", "0.7885264", "0.7884786", "0.7880785", "0.78745943", "0.78745943", "0.78745407", "0.78734446", "0.78724426", "0.78713626", "0.78713554", "0.78652424", "0.7863321", "0.7863321", "0.7863321", "0.7863293", "0.7862628", "0.7860664", "0.7858556", "0.785785", "0.78571486", "0.7851332", "0.78453225", "0.78448987", "0.78415996", "0.7837483", "0.7837037", "0.7836443", "0.78351796", "0.78329664", "0.7831094", "0.7829445", "0.7826582", "0.7824499", "0.78242797", "0.78227437", "0.78192484", "0.7818843", "0.78128535", "0.7812535", "0.78111476", "0.78111106", "0.781107", "0.78093034", "0.7808775" ]
0.0
-1
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceConfiguration.
func (in *ResourceConfiguration) DeepCopy() *ResourceConfiguration { if in == nil { return nil } out := new(ResourceConfiguration) in.DeepCopyInto(out) return out }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (in *ResourceConfig) DeepCopy() *ResourceConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *BaseKubernetesResourceConfig) DeepCopy() *BaseKubernetesResourceConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BaseKubernetesResourceConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceOptions) DeepCopy() *ResourceOptions {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceOptions)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IpConfiguration_ARM) DeepCopy() *IpConfiguration_ARM {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IpConfiguration_ARM)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LabelingJobResourceConfig) DeepCopy() *LabelingJobResourceConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LabelingJobResourceConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceDescription) DeepCopy() *ResourceDescription {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceDescription)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *RegistryConfiguration) DeepCopy() *RegistryConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(RegistryConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Resource) DeepCopy() *Resource {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Resource)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Resource) DeepCopy() *Resource {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Resource)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceOutput) DeepCopy() *ResourceOutput {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceOutput)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceCondition) DeepCopy() *ResourceCondition {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceCondition)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceAttribute) DeepCopy() *ResourceAttribute {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceAttribute)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ApplicationGatewaySubResource) DeepCopy() *ApplicationGatewaySubResource {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ApplicationGatewaySubResource)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *RedisConfiguration) DeepCopy() *RedisConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(RedisConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *PrivateEndpointIPConfiguration_ARM) DeepCopy() *PrivateEndpointIPConfiguration_ARM {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PrivateEndpointIPConfiguration_ARM)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ApplicationGatewaySubResource_ARM) DeepCopy() *ApplicationGatewaySubResource_ARM {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ApplicationGatewaySubResource_ARM)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceSpec) DeepCopy() *ResourceSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceSpec) DeepCopy() *ResourceSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceSpec) DeepCopy() *ResourceSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceSpec) DeepCopy() *ResourceSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *PrivateLinkServiceIpConfiguration_ARM) DeepCopy() *PrivateLinkServiceIpConfiguration_ARM {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PrivateLinkServiceIpConfiguration_ARM)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceThresholdStrategy) DeepCopy() *ResourceThresholdStrategy {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceThresholdStrategy)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceSelector) DeepCopy() *ResourceSelector {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceSelector)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceServerParameters) DeepCopy() *ResourceServerParameters {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceServerParameters)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *CrdManagementConfiguration) DeepCopy() *CrdManagementConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(CrdManagementConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *OCIConfiguration) DeepCopy() *OCIConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(OCIConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceEvent) DeepCopy() *ResourceEvent {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceEvent)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DnsresolverSubResource_ARM) DeepCopy() *DnsresolverSubResource_ARM {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DnsresolverSubResource_ARM)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceRef) DeepCopy() *ResourceRef {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceRef)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *BaseKubernetesResourceConfig) DeepCopyInterface() interface{} {\n\treturn in.DeepCopy()\n}", "func (in *RestApi_EndpointConfiguration) DeepCopy() *RestApi_EndpointConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(RestApi_EndpointConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IpConfiguration_STATUS_ARM) DeepCopy() *IpConfiguration_STATUS_ARM {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IpConfiguration_STATUS_ARM)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ExtendedResource) DeepCopy() *ExtendedResource {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ExtendedResource)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ControllerConfiguration) DeepCopy() *ControllerConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ControllerConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceCapacityThreshold) DeepCopy() *ResourceCapacityThreshold {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceCapacityThreshold)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ControllerResource) DeepCopy() *ControllerResource {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ControllerResource)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *CertificateRenewalConfig) DeepCopy() *CertificateRenewalConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(CertificateRenewalConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *RelabelConfig) DeepCopy() *RelabelConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(RelabelConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EKSCFConfiguration) DeepCopy() *EKSCFConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EKSCFConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *K8SResourcePolicy) DeepCopy() *K8SResourcePolicy {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(K8SResourcePolicy)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceMap) DeepCopy() *ResourceMap {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceMap)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ApplicationConfiguration) DeepCopy() *ApplicationConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ApplicationConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AdmissionWebhookConfiguration) DeepCopy() *AdmissionWebhookConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AdmissionWebhookConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Resources) DeepCopy() *Resources {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Resources)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Resources) DeepCopy() *Resources {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Resources)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Resources) DeepCopy() *Resources {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Resources)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Resources) DeepCopy() *Resources {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Resources)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceSigningProfile) DeepCopy() *ResourceSigningProfile {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceSigningProfile)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *BastionHostIPConfiguration_ARM) DeepCopy() *BastionHostIPConfiguration_ARM {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BastionHostIPConfiguration_ARM)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceRequirements) DeepCopy() *ResourceRequirements {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceRequirements)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceRequirements) DeepCopy() *ResourceRequirements {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceRequirements)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceRequirements) DeepCopy() *ResourceRequirements {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceRequirements)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceServer) DeepCopy() *ResourceServer {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceServer)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *RestoreResourceHook) DeepCopy() *RestoreResourceHook {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(RestoreResourceHook)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DebugRuleConfiguration) DeepCopy() *DebugRuleConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DebugRuleConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DnsresolverSubResource) DeepCopy() *DnsresolverSubResource {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DnsresolverSubResource)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceRestoreResourceInfo) DeepCopy() *ResourceRestoreResourceInfo {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceRestoreResourceInfo)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceReference) DeepCopy() *ResourceReference {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceReference)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceEventList) DeepCopy() *ResourceEventList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceEventList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ControllerManagerWebhookConfiguration) DeepCopy() *ControllerManagerWebhookConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ControllerManagerWebhookConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ControllerManagerWebhookConfiguration) DeepCopy() *ControllerManagerWebhookConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ControllerManagerWebhookConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceUsageExportConfig) DeepCopy() *ResourceUsageExportConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceUsageExportConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MutatingWebhookConfigurationRef) DeepCopy() *MutatingWebhookConfigurationRef {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MutatingWebhookConfigurationRef)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *PolicyResource) DeepCopy() *PolicyResource {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PolicyResource)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigurationSpec) DeepCopy() *ConfigurationSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigurationSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigurationSpec) DeepCopy() *ConfigurationSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigurationSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MongoDBConfiguration) DeepCopy() *MongoDBConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MongoDBConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceEventSpec) DeepCopy() *ResourceEventSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceEventSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceRequirement) DeepCopy() *ResourceRequirement {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceRequirement)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceExport) DeepCopy() *ResourceExport {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceExport)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceExport) DeepCopy() *ResourceExport {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceExport)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *GCPConfiguration) DeepCopy() *GCPConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(GCPConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfiguration) DeepCopy() *LoggingConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfiguration) DeepCopy() *LoggingConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IpConfiguration_STATUS) DeepCopy() *IpConfiguration_STATUS {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IpConfiguration_STATUS)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func WithResourceAttributes(attributes map[string]string) Option {\n\treturn func(c *Config) {\n\t\tc.ResourceAttributes = attributes\n\t}\n}", "func (in *ResourceStatus) DeepCopy() *ResourceStatus {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceStatus)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceStatus) DeepCopy() *ResourceStatus {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceStatus)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DeviceConfigurationType) DeepCopy() *DeviceConfigurationType {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DeviceConfigurationType)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *PrivateDnsZoneConfig_ARM) DeepCopy() *PrivateDnsZoneConfig_ARM {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PrivateDnsZoneConfig_ARM)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *NetworkConfiguration) DeepCopy() *NetworkConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(NetworkConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *IpConfiguration) DeepCopy() *IpConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(IpConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in KubernetesResourceMap) DeepCopy() KubernetesResourceMap {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubernetesResourceMap)\n\tin.DeepCopyInto(out)\n\treturn *out\n}", "func (in *DomainName_EndpointConfiguration) DeepCopy() *DomainName_EndpointConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DomainName_EndpointConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceDiscovererSpec) DeepCopy() *ResourceDiscovererSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceDiscovererSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EndpointConfigurationParameters) DeepCopy() *EndpointConfigurationParameters {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EndpointConfigurationParameters)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *PrivateLinkServiceIpConfigurationProperties_ARM) DeepCopy() *PrivateLinkServiceIpConfigurationProperties_ARM {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PrivateLinkServiceIpConfigurationProperties_ARM)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *AwsResourceRef) DeepCopy() *AwsResourceRef {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AwsResourceRef)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceList) DeepCopy() *ResourceList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ResourceList) DeepCopy() *ResourceList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigurationList) DeepCopy() *ConfigurationList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigurationList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *CollectionConfiguration) DeepCopy() *CollectionConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(CollectionConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c *Config) DeepCopy() *Config {\n\tif c == nil {\n\t\treturn &Config{}\n\t}\n\tcfg := *c\n\treturn &cfg\n}", "func (in *BackupResourceHook) DeepCopy() *BackupResourceHook {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BackupResourceHook)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func LoadResourceConfig() map[string]ResourceConfig {\n\treturn getDefaultResourceConfig()\n}", "func (in *MariaDBConfiguration) DeepCopy() *MariaDBConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MariaDBConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *RestoreResourceHookSpec) DeepCopy() *RestoreResourceHookSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(RestoreResourceHookSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}" ]
[ "0.7895176", "0.6825849", "0.67865694", "0.6739", "0.6708907", "0.6551092", "0.613349", "0.60686964", "0.60686964", "0.6034275", "0.6017182", "0.5848022", "0.58381516", "0.5824164", "0.5757487", "0.57524174", "0.5726038", "0.5726038", "0.5726038", "0.57107085", "0.57107085", "0.57107085", "0.57107085", "0.56992215", "0.5695592", "0.56795007", "0.5519691", "0.5518238", "0.5511697", "0.55082566", "0.5490514", "0.546124", "0.5435611", "0.5425961", "0.54207206", "0.53966695", "0.5395642", "0.5390621", "0.5387943", "0.53853935", "0.5374838", "0.5365735", "0.5362526", "0.533373", "0.5328879", "0.5322848", "0.5304739", "0.5304739", "0.5304739", "0.5304739", "0.530031", "0.5289826", "0.52710444", "0.52710444", "0.52710444", "0.52667767", "0.52661705", "0.52655625", "0.52543664", "0.5252686", "0.5252648", "0.5242177", "0.523542", "0.523542", "0.52330494", "0.52270496", "0.521278", "0.5200666", "0.5200666", "0.5178746", "0.5152194", "0.5148623", "0.51367074", "0.51367074", "0.51163685", "0.51107097", "0.51107097", "0.5098078", "0.5092771", "0.50858086", "0.50858086", "0.50837386", "0.50811726", "0.50781107", "0.50760204", "0.5062348", "0.50503373", "0.50409657", "0.5040562", "0.50377136", "0.5035848", "0.5035549", "0.5035549", "0.503297", "0.5024159", "0.5017947", "0.5015719", "0.50107986", "0.5007292", "0.50054586" ]
0.8736436
0
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be nonnil.
func (in *SecretboxConfiguration) DeepCopyInto(out *SecretboxConfiguration) { *out = *in if in.Keys != nil { in, out := &in.Keys, &out.Keys *out = make([]Key, len(*in)) copy(*out, *in) } return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (r *RunInfo) DeepCopyInto(out *RunInfo) {\n\t*out = *r\n}", "func (in *Base) DeepCopyInto(out *Base) {\n\t*out = *in\n\treturn\n}", "func (in *ForkObject) DeepCopyInto(out *ForkObject) {\n\t*out = *in\n}", "func (in *TargetObjectInfo) DeepCopyInto(out *TargetObjectInfo) {\n\t*out = *in\n}", "func (in *DebugObjectInfo) DeepCopyInto(out *DebugObjectInfo) {\n\t*out = *in\n}", "func (in *Input) DeepCopyInto(out *Input) {\n\t*out = *in\n}", "func (u *SSN) DeepCopyInto(out *SSN) {\n\t*out = *u\n}", "func (in *ExistPvc) DeepCopyInto(out *ExistPvc) {\n\t*out = *in\n}", "func (in *DockerStep) DeepCopyInto(out *DockerStep) {\n\t*out = *in\n\tif in.Inline != nil {\n\t\tin, out := &in.Inline, &out.Inline\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tout.Auth = in.Auth\n\treturn\n}", "func (in *Container) DeepCopyInto(out *Container) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.Command != nil {\n\t\tin, out := &in.Command, &out.Command\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.LifeCycleScript != nil {\n\t\tin, out := &in.LifeCycleScript, &out.LifeCycleScript\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *RuntimeRef) DeepCopyInto(out *RuntimeRef) {\n\t*out = *in\n}", "func (in *Ibft2) DeepCopyInto(out *Ibft2) {\n\t*out = *in\n\treturn\n}", "func (in *TestResult) DeepCopyInto(out *TestResult) {\n\t*out = *in\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n\treturn\n}", "func (in *Haproxy) DeepCopyInto(out *Haproxy) {\n\t*out = *in\n\treturn\n}", "func (in *SSH) DeepCopyInto(out *SSH) {\n\t*out = *in\n\treturn\n}", "func (in *Runtime) DeepCopyInto(out *Runtime) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n}", "func (b *Base64) DeepCopyInto(out *Base64) {\n\t*out = *b\n}", "func (in *EventDependencyTransformer) DeepCopyInto(out *EventDependencyTransformer) {\n\t*out = *in\n\treturn\n}", "func (in *StageOutput) DeepCopyInto(out *StageOutput) {\n\t*out = *in\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *ObjectReference) DeepCopyInto(out *ObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Dependent) DeepCopyInto(out *Dependent) {\n\t*out = *in\n\treturn\n}", "func (in *GitFileGeneratorItem) DeepCopyInto(out *GitFileGeneratorItem) {\n\t*out = *in\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *AnsibleStep) DeepCopyInto(out *AnsibleStep) {\n\t*out = *in\n\treturn\n}", "func (in *Forks) DeepCopyInto(out *Forks) {\n\t*out = *in\n\tif in.DAO != nil {\n\t\tin, out := &in.DAO, &out.DAO\n\t\t*out = new(uint)\n\t\t**out = **in\n\t}\n}", "func (in *ContainerPort) DeepCopyInto(out *ContainerPort) {\n\t*out = *in\n}", "func (in *General) DeepCopyInto(out *General) {\n\t*out = *in\n\treturn\n}", "func (in *IsoContainer) DeepCopyInto(out *IsoContainer) {\n\t*out = *in\n}", "func (in *Git) DeepCopyInto(out *Git) {\n\t*out = *in\n\treturn\n}", "func (in *ConfigFile) DeepCopyInto(out *ConfigFile) {\n\t*out = *in\n}", "func (in *BackupProgress) DeepCopyInto(out *BackupProgress) {\n\t*out = *in\n}", "func (in *DataDisk) DeepCopyInto(out *DataDisk) {\n\t*out = *in\n}", "func (in *PhaseStep) DeepCopyInto(out *PhaseStep) {\n\t*out = *in\n}", "func (u *MAC) DeepCopyInto(out *MAC) {\n\t*out = *u\n}", "func (in *Variable) DeepCopyInto(out *Variable) {\n\t*out = *in\n}", "func (in *RestoreProgress) DeepCopyInto(out *RestoreProgress) {\n\t*out = *in\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *DataExportObjectReference) DeepCopyInto(out *DataExportObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *NamespacedObjectReference) DeepCopyInto(out *NamespacedObjectReference) {\n\t*out = *in\n\treturn\n}", "func (in *Path) DeepCopyInto(out *Path) {\n\t*out = *in\n\treturn\n}", "func (in *GitDirectoryGeneratorItem) DeepCopyInto(out *GitDirectoryGeneratorItem) {\n\t*out = *in\n}", "func (in *NamePath) DeepCopyInto(out *NamePath) {\n\t*out = *in\n\treturn\n}", "func (in *ConsoleCreateObj) DeepCopyInto(out *ConsoleCreateObj) {\n\t*out = *in\n}", "func (in *UsedPipelineRun) DeepCopyInto(out *UsedPipelineRun) {\n\t*out = *in\n}", "func (in *BuildTemplate) DeepCopyInto(out *BuildTemplate) {\n\t*out = *in\n\tif in.Cmd != nil {\n\t\tin, out := &in.Cmd, &out.Cmd\n\t\t*out = make([]BuildTemplateStep, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n}", "func (in *ObjectInfo) DeepCopyInto(out *ObjectInfo) {\n\t*out = *in\n\tout.GroupVersionKind = in.GroupVersionKind\n\treturn\n}", "func (in *Files) DeepCopyInto(out *Files) {\n\t*out = *in\n}", "func (in *Source) DeepCopyInto(out *Source) {\n\t*out = *in\n\tif in.Dependencies != nil {\n\t\tin, out := &in.Dependencies, &out.Dependencies\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MavenRepositories != nil {\n\t\tin, out := &in.MavenRepositories, &out.MavenRepositories\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\treturn\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "func (in *StackBuild) DeepCopyInto(out *StackBuild) {\n\t*out = *in\n\treturn\n}", "func (in *BuildTaskRef) DeepCopyInto(out *BuildTaskRef) {\n\t*out = *in\n\treturn\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "func (in *PathInfo) DeepCopyInto(out *PathInfo) {\n\t*out = *in\n}", "func (in *PoA) DeepCopyInto(out *PoA) {\n\t*out = *in\n}", "func (in *Section) DeepCopyInto(out *Section) {\n\t*out = *in\n\tif in.SecretRefs != nil {\n\t\tin, out := &in.SecretRefs, &out.SecretRefs\n\t\t*out = make([]SecretReference, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Files != nil {\n\t\tin, out := &in.Files, &out.Files\n\t\t*out = make([]FileMount, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n}", "func (in *DNSSelection) DeepCopyInto(out *DNSSelection) {\n\t*out = *in\n\tif in.Include != nil {\n\t\tin, out := &in.Include, &out.Include\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Exclude != nil {\n\t\tin, out := &in.Exclude, &out.Exclude\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ReleaseVersion) DeepCopyInto(out *ReleaseVersion) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "func (in *Command) DeepCopyInto(out *Command) {\n\t*out = *in\n\tif in.Flags != nil {\n\t\tin, out := &in.Flags, &out.Flags\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Value != nil {\n\t\tin, out := &in.Value, &out.Value\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *PathRule) DeepCopyInto(out *PathRule) {\n\t*out = *in\n\treturn\n}", "func (in *DockerLifecycleData) DeepCopyInto(out *DockerLifecycleData) {\n\t*out = *in\n}", "func (in *RunScriptStepConfig) DeepCopyInto(out *RunScriptStepConfig) {\n\t*out = *in\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *Checksum) DeepCopyInto(out *Checksum) {\n\t*out = *in\n}", "func (in *DomainNameOutput) DeepCopyInto(out *DomainNameOutput) {\n\t*out = *in\n}", "func (in *InterfaceStruct) DeepCopyInto(out *InterfaceStruct) {\n\t*out = *in\n\tif in.val != nil {\n\t\tin, out := &in.val, &out.val\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "func (in *Ref) DeepCopyInto(out *Ref) {\n\t*out = *in\n}", "func (in *MemorySpec) DeepCopyInto(out *MemorySpec) {\n\t*out = *in\n}", "func (in *BuildJenkinsInfo) DeepCopyInto(out *BuildJenkinsInfo) {\n\t*out = *in\n\treturn\n}", "func (in *VirtualDatabaseBuildObject) DeepCopyInto(out *VirtualDatabaseBuildObject) {\n\t*out = *in\n\tif in.Incremental != nil {\n\t\tin, out := &in.Incremental, &out.Incremental\n\t\t*out = new(bool)\n\t\t**out = **in\n\t}\n\tif in.Env != nil {\n\t\tin, out := &in.Env, &out.Env\n\t\t*out = make([]v1.EnvVar, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\tout.Git = in.Git\n\tin.Source.DeepCopyInto(&out.Source)\n\tif in.Webhooks != nil {\n\t\tin, out := &in.Webhooks, &out.Webhooks\n\t\t*out = make([]WebhookSecret, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *MaintenanceInfo) DeepCopyInto(out *MaintenanceInfo) {\n\t*out = *in\n\treturn\n}", "func (in *KopsNode) DeepCopyInto(out *KopsNode) {\n\t*out = *in\n\treturn\n}", "func (in *FalconAPI) DeepCopyInto(out *FalconAPI) {\n\t*out = *in\n}", "func (in *EBS) DeepCopyInto(out *EBS) {\n\t*out = *in\n}", "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n\treturn\n}", "func (in *Empty) DeepCopyInto(out *Empty) {\n\t*out = *in\n\tout.XXX_NoUnkeyedLiteral = in.XXX_NoUnkeyedLiteral\n\tif in.XXX_unrecognized != nil {\n\t\tin, out := &in.XXX_unrecognized, &out.XXX_unrecognized\n\t\t*out = make([]byte, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func (in *ComponentDistGit) DeepCopyInto(out *ComponentDistGit) {\n\t*out = *in\n\treturn\n}", "func (in *Memory) DeepCopyInto(out *Memory) {\n\t*out = *in\n\tout.Required = in.Required.DeepCopy()\n}", "func (in *Persistence) DeepCopyInto(out *Persistence) {\n\t*out = *in\n\tout.Size = in.Size.DeepCopy()\n\treturn\n}", "func (in *ManagedDisk) DeepCopyInto(out *ManagedDisk) {\n\t*out = *in\n}", "func (e *Email) DeepCopyInto(out *Email) {\n\t*out = *e\n}", "func (in *ImageInfo) DeepCopyInto(out *ImageInfo) {\n\t*out = *in\n}", "func (in *ShootRef) DeepCopyInto(out *ShootRef) {\n\t*out = *in\n}", "func (in *N3000Fpga) DeepCopyInto(out *N3000Fpga) {\n\t*out = *in\n}", "func (in *NetflowType) DeepCopyInto(out *NetflowType) {\n\t*out = *in\n\treturn\n}", "func (in *BuiltInAdapter) DeepCopyInto(out *BuiltInAdapter) {\n\t*out = *in\n}", "func (in *Node) DeepCopyInto(out *Node) {\n\t*out = *in\n\tif in.FailStatus != nil {\n\t\tin, out := &in.FailStatus, &out.FailStatus\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.MigratingSlots != nil {\n\t\tin, out := &in.MigratingSlots, &out.MigratingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\tif in.ImportingSlots != nil {\n\t\tin, out := &in.ImportingSlots, &out.ImportingSlots\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n}", "func (in *CPUSpec) DeepCopyInto(out *CPUSpec) {\n\t*out = *in\n}", "func (in *LoopState) DeepCopyInto(out *LoopState) {\n\t*out = *in\n}" ]
[ "0.8215289", "0.81280124", "0.81039286", "0.80862963", "0.8083811", "0.80673146", "0.8064545", "0.8026454", "0.8012046", "0.7996313", "0.799204", "0.79887754", "0.7987097", "0.7986994", "0.7986994", "0.79854053", "0.7975989", "0.7972486", "0.79695636", "0.79695636", "0.79695636", "0.7967528", "0.79624444", "0.7961954", "0.7945754", "0.7945754", "0.7944541", "0.79428566", "0.7942668", "0.7942668", "0.7940451", "0.793851", "0.7936731", "0.79294837", "0.79252166", "0.7915377", "0.7911627", "0.7911138", "0.7909384", "0.790913", "0.7908773", "0.7905649", "0.79050326", "0.7904594", "0.7904594", "0.7904235", "0.79036915", "0.79020816", "0.78988886", "0.78977424", "0.7891376", "0.7891024", "0.7889831", "0.78890276", "0.7887135", "0.788637", "0.7885264", "0.7885264", "0.7884786", "0.7880785", "0.78745943", "0.78745943", "0.78745407", "0.78734446", "0.78724426", "0.78713626", "0.78713554", "0.78652424", "0.7863321", "0.7863321", "0.7863321", "0.7863293", "0.7862628", "0.7860664", "0.7858556", "0.785785", "0.78571486", "0.7851332", "0.78453225", "0.78448987", "0.78415996", "0.7837483", "0.7837037", "0.7836443", "0.78351796", "0.78329664", "0.7831094", "0.7829445", "0.7826582", "0.7824499", "0.78242797", "0.78227437", "0.78192484", "0.7818843", "0.78128535", "0.7812535", "0.78111476", "0.78111106", "0.781107", "0.78093034", "0.7808775" ]
0.0
-1
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretboxConfiguration.
func (in *SecretboxConfiguration) DeepCopy() *SecretboxConfiguration { if in == nil { return nil } out := new(SecretboxConfiguration) in.DeepCopyInto(out) return out }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (c *VaultConfig) Copy() *VaultConfig {\n\tif c == nil {\n\t\treturn nil\n\t}\n\n\tvar o VaultConfig\n\to.Address = c.Address\n\n\to.Enabled = c.Enabled\n\n\to.Namespace = c.Namespace\n\n\to.RenewToken = c.RenewToken\n\n\tif c.Retry != nil {\n\t\to.Retry = c.Retry.Copy()\n\t}\n\n\tif c.SSL != nil {\n\t\to.SSL = c.SSL.Copy()\n\t}\n\n\to.Token = c.Token\n\n\to.VaultAgentTokenFile = c.VaultAgentTokenFile\n\n\tif c.Transport != nil {\n\t\to.Transport = c.Transport.Copy()\n\t}\n\n\to.UnwrapToken = c.UnwrapToken\n\n\to.DefaultLeaseDuration = c.DefaultLeaseDuration\n\to.LeaseRenewalThreshold = c.LeaseRenewalThreshold\n\n\to.K8SAuthRoleName = c.K8SAuthRoleName\n\to.K8SServiceAccountToken = c.K8SServiceAccountToken\n\to.K8SServiceAccountTokenPath = c.K8SServiceAccountTokenPath\n\to.K8SServiceMountPath = c.K8SServiceMountPath\n\n\treturn &o\n}", "func (in *SecretEngineConfiguration) DeepCopy() *SecretEngineConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretEngineConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigMapSecret) DeepCopy() *ConfigMapSecret {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigMapSecret)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DatabaseSecretEngineConfig) DeepCopy() *DatabaseSecretEngineConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DatabaseSecretEngineConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *VaultSecret) DeepCopy() *VaultSecret {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(VaultSecret)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Secret) DeepCopy() *Secret {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Secret)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretAccessRequestConfiguration) DeepCopy() *SecretAccessRequestConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretAccessRequestConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c *Config) Copy() (*Config, error) {\n\tnewC := New()\n\tc.Viper.Unmarshal(&newC.plainTextConfig)\n\tc.Viper.Unmarshal(&newC.secureConfig)\n\treturn newC, nil\n}", "func (in *BcsSecret) DeepCopy() *BcsSecret {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BcsSecret)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DeviceSecretVerifierConfigType) DeepCopy() *DeviceSecretVerifierConfigType {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DeviceSecretVerifierConfigType)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c Config) Copy() Config {\n\tdst := Config{}\n\tdst.Credentials = c.Credentials\n\tdst.Endpoint = c.Endpoint\n\tdst.Region = c.Region\n\tdst.DisableSSL = c.DisableSSL\n\tdst.ManualSend = c.ManualSend\n\tdst.HTTPClient = c.HTTPClient\n\tdst.LogHTTPBody = c.LogHTTPBody\n\tdst.LogLevel = c.LogLevel\n\tdst.Logger = c.Logger\n\tdst.MaxRetries = c.MaxRetries\n\tdst.DisableParamValidation = c.DisableParamValidation\n\tdst.DisableComputeChecksums = c.DisableComputeChecksums\n\tdst.S3ForcePathStyle = c.S3ForcePathStyle\n\tdst.DomainMode = c.DomainMode\n\tdst.SignerVersion = c.SignerVersion\n\treturn dst\n}", "func (in *DatabaseSecretEngineConfigSpec) DeepCopy() *DatabaseSecretEngineConfigSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DatabaseSecretEngineConfigSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (old ContainerConfig) Copy() ContainerConfig {\n\t// Copy all fields\n\tres := old\n\n\t// Make deep copy of slices\n\t// none yet - placeholder\n\n\treturn res\n}", "func (proxy *StandAloneProxyConfig) DeepCopy() *StandAloneProxyConfig {\n\tif proxy == nil {\n\t\treturn nil\n\t}\n\tcloned := new(StandAloneProxyConfig)\n\tcloned.proxyCredentials = make(map[string]*ProxyUser)\n\tcloned.managementServers = make(map[url.URL]*ManagementServer)\n\tcloned.managedArrays = make(map[string]*StorageArray)\n\tfor key, value := range proxy.managedArrays {\n\t\tarray := *value\n\t\tcloned.managedArrays[key] = &array\n\t}\n\tfor key, value := range proxy.managementServers {\n\t\tcloned.managementServers[key] = value.DeepCopy()\n\t}\n\tfor key, value := range proxy.proxyCredentials {\n\t\tcreds := *value\n\t\tcloned.proxyCredentials[key] = &creds\n\t}\n\treturn cloned\n}", "func (in *CredentialSecret) DeepCopy() *CredentialSecret {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(CredentialSecret)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o *Config) Copy(s Config) {\n o.Enable = s.Enable\n o.RouterId = s.RouterId\n o.AsNumber = s.AsNumber\n o.BfdProfile = s.BfdProfile\n o.RejectDefaultRoute = s.RejectDefaultRoute\n o.InstallRoute = s.InstallRoute\n o.AggregateMed = s.AggregateMed\n o.DefaultLocalPreference = s.DefaultLocalPreference\n o.AsFormat = s.AsFormat\n o.AlwaysCompareMed = s.AlwaysCompareMed\n o.DeterministicMedComparison = s.DeterministicMedComparison\n o.EcmpMultiAs = s.EcmpMultiAs\n o.EnforceFirstAs = s.EnforceFirstAs\n o.EnableGracefulRestart = s.EnableGracefulRestart\n o.StaleRouteTime = s.StaleRouteTime\n o.LocalRestartTime = s.LocalRestartTime\n o.MaxPeerRestartTime = s.MaxPeerRestartTime\n o.ReflectorClusterId = s.ReflectorClusterId\n o.ConfederationMemberAs = s.ConfederationMemberAs\n o.AllowRedistributeDefaultRoute = s.AllowRedistributeDefaultRoute\n}", "func (in *VaultSecretSpec) DeepCopy() *VaultSecretSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(VaultSecretSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ContainernodepoolSandboxConfig) DeepCopy() *ContainernodepoolSandboxConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ContainernodepoolSandboxConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *PasswordSecret) DeepCopy() *PasswordSecret {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(PasswordSecret)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretInfo) DeepCopy() *SecretInfo {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretInfo)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (o *Block) GetSecretConfiguration(ctx context.Context, options map[string]dbus.Variant) (configuration []struct {\n\tV0 string\n\tV1 map[string]dbus.Variant\n}, err error) {\n\terr = o.object.CallWithContext(ctx, InterfaceBlock+\".GetSecretConfiguration\", 0, options).Store(&configuration)\n\treturn\n}", "func (opts OAuth2WellKnownConfiguration) Copy() (optsCopy OAuth2WellKnownConfiguration) {\n\toptsCopy = OAuth2WellKnownConfiguration{\n\t\tCommonDiscoveryOptions: opts.CommonDiscoveryOptions,\n\t\tOAuth2DiscoveryOptions: opts.OAuth2DiscoveryOptions,\n\t}\n\n\tif opts.OAuth2DeviceAuthorizationGrantDiscoveryOptions != nil {\n\t\toptsCopy.OAuth2DeviceAuthorizationGrantDiscoveryOptions = &OAuth2DeviceAuthorizationGrantDiscoveryOptions{}\n\t\t*optsCopy.OAuth2DeviceAuthorizationGrantDiscoveryOptions = *opts.OAuth2DeviceAuthorizationGrantDiscoveryOptions\n\t}\n\n\tif opts.OAuth2MutualTLSClientAuthenticationDiscoveryOptions != nil {\n\t\toptsCopy.OAuth2MutualTLSClientAuthenticationDiscoveryOptions = &OAuth2MutualTLSClientAuthenticationDiscoveryOptions{}\n\t\t*optsCopy.OAuth2MutualTLSClientAuthenticationDiscoveryOptions = *opts.OAuth2MutualTLSClientAuthenticationDiscoveryOptions\n\t}\n\n\tif opts.OAuth2IssuerIdentificationDiscoveryOptions != nil {\n\t\toptsCopy.OAuth2IssuerIdentificationDiscoveryOptions = &OAuth2IssuerIdentificationDiscoveryOptions{}\n\t\t*optsCopy.OAuth2IssuerIdentificationDiscoveryOptions = *opts.OAuth2IssuerIdentificationDiscoveryOptions\n\t}\n\n\tif opts.OAuth2JWTIntrospectionResponseDiscoveryOptions != nil {\n\t\toptsCopy.OAuth2JWTIntrospectionResponseDiscoveryOptions = &OAuth2JWTIntrospectionResponseDiscoveryOptions{}\n\t\t*optsCopy.OAuth2JWTIntrospectionResponseDiscoveryOptions = *opts.OAuth2JWTIntrospectionResponseDiscoveryOptions\n\t}\n\n\tif opts.OAuth2JWTSecuredAuthorizationRequestDiscoveryOptions != nil {\n\t\toptsCopy.OAuth2JWTSecuredAuthorizationRequestDiscoveryOptions = &OAuth2JWTSecuredAuthorizationRequestDiscoveryOptions{}\n\t\t*optsCopy.OAuth2JWTSecuredAuthorizationRequestDiscoveryOptions = *opts.OAuth2JWTSecuredAuthorizationRequestDiscoveryOptions\n\t}\n\n\tif opts.OAuth2PushedAuthorizationDiscoveryOptions != nil {\n\t\toptsCopy.OAuth2PushedAuthorizationDiscoveryOptions = &OAuth2PushedAuthorizationDiscoveryOptions{}\n\t\t*optsCopy.OAuth2PushedAuthorizationDiscoveryOptions = *opts.OAuth2PushedAuthorizationDiscoveryOptions\n\t}\n\n\treturn optsCopy\n}", "func (c *Config) Copy() *Config {\n\td := new(Config)\n\tcopier.Copy(d, c)\n\treturn d\n}", "func (o *Config) Copy(s Config) {\n\to.Enable = s.Enable\n\to.RouterId = s.RouterId\n\to.AsNumber = s.AsNumber\n\to.BfdProfile = s.BfdProfile\n\to.RejectDefaultRoute = s.RejectDefaultRoute\n\to.InstallRoute = s.InstallRoute\n\to.AggregateMed = s.AggregateMed\n\to.DefaultLocalPreference = s.DefaultLocalPreference\n\to.AsFormat = s.AsFormat\n\to.AlwaysCompareMed = s.AlwaysCompareMed\n\to.DeterministicMedComparison = s.DeterministicMedComparison\n\to.EcmpMultiAs = s.EcmpMultiAs\n\to.EnforceFirstAs = s.EnforceFirstAs\n\to.EnableGracefulRestart = s.EnableGracefulRestart\n\to.StaleRouteTime = s.StaleRouteTime\n\to.LocalRestartTime = s.LocalRestartTime\n\to.MaxPeerRestartTime = s.MaxPeerRestartTime\n\to.ReflectorClusterId = s.ReflectorClusterId\n\to.ConfederationMemberAs = s.ConfederationMemberAs\n\to.AllowRedistributeDefaultRoute = s.AllowRedistributeDefaultRoute\n}", "func (in *AESConfiguration) DeepCopy() *AESConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(AESConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigMapSecretSpec) DeepCopy() *ConfigMapSecretSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigMapSecretSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *BcsSecretSpec) DeepCopy() *BcsSecretSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BcsSecretSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KubemanagerConfig) DeepCopy() *KubemanagerConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubemanagerConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DopplerSecret) DeepCopy() *DopplerSecret {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DopplerSecret)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ContainernodepoolShieldedInstanceConfig) DeepCopy() *ContainernodepoolShieldedInstanceConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ContainernodepoolShieldedInstanceConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConfigMapSecretList) DeepCopy() *ConfigMapSecretList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigMapSecretList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KMSConfiguration) DeepCopy() *KMSConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KMSConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DatabaseSecretEngineConfigList) DeepCopy() *DatabaseSecretEngineConfigList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DatabaseSecretEngineConfigList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *BcsSecretList) DeepCopy() *BcsSecretList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(BcsSecretList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (cb *ConfigBuilder) WithPassword(password string) *ConfigBuilder {\n\tcb.config.Password = password\n\treturn cb\n}", "func (proxyConfig *ProxyConfig) DeepCopy() *ProxyConfig {\n\tif proxyConfig == nil {\n\t\treturn nil\n\t}\n\tcloned := ProxyConfig{}\n\tcloned = *proxyConfig\n\tcloned.LinkProxyConfig = proxyConfig.LinkProxyConfig.DeepCopy()\n\tcloned.StandAloneProxyConfig = proxyConfig.StandAloneProxyConfig.DeepCopy()\n\treturn &cloned\n}", "func (in *RootCredentialConfig) DeepCopy() *RootCredentialConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(RootCredentialConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretSpec) DeepCopy() *SecretSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretSpec) DeepCopy() *SecretSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func GetSecretConfig(secret *v1.Secret) Config {\n\treturn Config{\n\t\tNamespace: secret.Namespace,\n\t\tResourceName: secret.Name,\n\t\tResourceAnnotations: secret.Annotations,\n\t\tAnnotation: options.SecretUpdateOnChangeAnnotation,\n\t\tSHAValue: GetSHAfromSecret(secret.Data),\n\t\tType: constants.SecretEnvVarPostfix,\n\t}\n}", "func (in *ToolchainSecret) DeepCopy() *ToolchainSecret {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ToolchainSecret)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (proxy *LinkedProxyConfig) DeepCopy() *LinkedProxyConfig {\n\tif proxy == nil {\n\t\treturn nil\n\t}\n\tcloned := new(LinkedProxyConfig)\n\tcloned.Primary = proxy.Primary.DeepCopy()\n\tif proxy.Backup != nil {\n\t\tcloned.Backup = proxy.Backup.DeepCopy()\n\t}\n\treturn cloned\n}", "func (in *MySQLConfiguration) DeepCopy() *MySQLConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MySQLConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c *Config) DeepCopy() *Config {\n\tif c == nil {\n\t\treturn &Config{}\n\t}\n\tcfg := *c\n\treturn &cfg\n}", "func (in *KubebenchConfig) DeepCopy() *KubebenchConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KubebenchConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *CheSecret) DeepCopy() *CheSecret {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(CheSecret)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (b *BackendConfiguration) ToSecretConfig() (SecretConfig, error) {\n\tuserName, password, err := helper.StartStdInput()\n\tif err != nil {\n\t\treturn SecretConfig{}, err\n\t}\n\n\treturn SecretConfig{\n\t\tName: b.Name,\n\t\tNamespace: b.NameSpace,\n\t\tUser: userName,\n\t\tPwd: password,\n\t}, nil\n}", "func (conf *Configuration) Copy() *Configuration {\n\tvar dup Configuration\n\n\t_ = Copy(&dup, conf)\n\n\tdup.TestMode = conf.TestMode\n\n\treturn &dup\n}", "func (c *IssuerConfig) Copy() *IssuerConfig {\n\td := new(IssuerConfig)\n\tcopier.Copy(d, c)\n\treturn d\n}", "func (in *StickinessConfig) DeepCopy() *StickinessConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(StickinessConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConsoleConfig) DeepCopy() *ConsoleConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConsoleConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KVMConfig) DeepCopy() *KVMConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KVMConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EncryptionConfiguration) DeepCopy() *EncryptionConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EncryptionConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ToolchainConfig) DeepCopy() *ToolchainConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ToolchainConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *VaultSecretReference) DeepCopy() *VaultSecretReference {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(VaultSecretReference)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DatabaseSecretEngineConfig) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (in *SharedMemoryServerConfig) DeepCopy() *SharedMemoryServerConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SharedMemoryServerConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MongoDBConfiguration) DeepCopy() *MongoDBConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MongoDBConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (cc *ConnConfig) Copy() *ConnConfig {\n\tnewConfig := new(ConnConfig)\n\t*newConfig = *cc\n\tnewConfig.Config = *newConfig.Config.Copy()\n\treturn newConfig\n}", "func ConfigClone(c *tls.Config,) *tls.Config", "func (in *DopplerSecretSpec) DeepCopy() *DopplerSecretSpec {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DopplerSecretSpec)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SandboxConfig) DeepCopy() *SandboxConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SandboxConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (config Config) Copy() Config {\n\tnewConfig := Config{\n\t\tNum: config.Num,\n\t\tShards: config.Shards,\n\t\tGroups: copyConfigGroups(config.Groups),\n\t}\n\treturn newConfig\n}", "func (in *MemberSecret) DeepCopy() *MemberSecret {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MemberSecret)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (b *Bot) DumpConfig() error {\n\treturn b.conf.ToFile(\"\")\n}", "func (c *SecretConfig) ToSecret() corev1.Secret {\n\treturn corev1.Secret{\n\t\tTypeMeta: metav1.TypeMeta{\n\t\t\tAPIVersion: ApiVersion,\n\t\t\tKind: KindSecret,\n\t\t},\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: c.Name,\n\t\t\tNamespace: c.Namespace,\n\t\t},\n\t\tStringData: map[string]string{\n\t\t\t\"password\": c.Pwd,\n\t\t\t\"user\": c.User,\n\t\t},\n\t\tType: \"Opaque\",\n\t}\n}", "func (in *WebhookSecret) DeepCopy() *WebhookSecret {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(WebhookSecret)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ShieldedInstanceConfig) DeepCopy() *ShieldedInstanceConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ShieldedInstanceConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *CaptchaConfig) DeepCopy() *CaptchaConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(CaptchaConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (opts OpenIDConnectWellKnownConfiguration) Copy() (optsCopy OpenIDConnectWellKnownConfiguration) {\n\toptsCopy = OpenIDConnectWellKnownConfiguration{\n\t\tOAuth2WellKnownConfiguration: opts.OAuth2WellKnownConfiguration.Copy(),\n\t\tOpenIDConnectDiscoveryOptions: opts.OpenIDConnectDiscoveryOptions,\n\t}\n\n\tif opts.OpenIDConnectFrontChannelLogoutDiscoveryOptions != nil {\n\t\toptsCopy.OpenIDConnectFrontChannelLogoutDiscoveryOptions = &OpenIDConnectFrontChannelLogoutDiscoveryOptions{}\n\t\t*optsCopy.OpenIDConnectFrontChannelLogoutDiscoveryOptions = *opts.OpenIDConnectFrontChannelLogoutDiscoveryOptions\n\t}\n\n\tif opts.OpenIDConnectBackChannelLogoutDiscoveryOptions != nil {\n\t\toptsCopy.OpenIDConnectBackChannelLogoutDiscoveryOptions = &OpenIDConnectBackChannelLogoutDiscoveryOptions{}\n\t\t*optsCopy.OpenIDConnectBackChannelLogoutDiscoveryOptions = *opts.OpenIDConnectBackChannelLogoutDiscoveryOptions\n\t}\n\n\tif opts.OpenIDConnectSessionManagementDiscoveryOptions != nil {\n\t\toptsCopy.OpenIDConnectSessionManagementDiscoveryOptions = &OpenIDConnectSessionManagementDiscoveryOptions{}\n\t\t*optsCopy.OpenIDConnectSessionManagementDiscoveryOptions = *opts.OpenIDConnectSessionManagementDiscoveryOptions\n\t}\n\n\tif opts.OpenIDConnectRPInitiatedLogoutDiscoveryOptions != nil {\n\t\toptsCopy.OpenIDConnectRPInitiatedLogoutDiscoveryOptions = &OpenIDConnectRPInitiatedLogoutDiscoveryOptions{}\n\t\t*optsCopy.OpenIDConnectRPInitiatedLogoutDiscoveryOptions = *opts.OpenIDConnectRPInitiatedLogoutDiscoveryOptions\n\t}\n\n\tif opts.OpenIDConnectPromptCreateDiscoveryOptions != nil {\n\t\toptsCopy.OpenIDConnectPromptCreateDiscoveryOptions = &OpenIDConnectPromptCreateDiscoveryOptions{}\n\t\t*optsCopy.OpenIDConnectPromptCreateDiscoveryOptions = *opts.OpenIDConnectPromptCreateDiscoveryOptions\n\t}\n\n\tif opts.OpenIDConnectClientInitiatedBackChannelAuthFlowDiscoveryOptions != nil {\n\t\toptsCopy.OpenIDConnectClientInitiatedBackChannelAuthFlowDiscoveryOptions = &OpenIDConnectClientInitiatedBackChannelAuthFlowDiscoveryOptions{}\n\t\t*optsCopy.OpenIDConnectClientInitiatedBackChannelAuthFlowDiscoveryOptions = *opts.OpenIDConnectClientInitiatedBackChannelAuthFlowDiscoveryOptions\n\t}\n\n\tif opts.OpenIDConnectJWTSecuredAuthorizationResponseModeDiscoveryOptions != nil {\n\t\toptsCopy.OpenIDConnectJWTSecuredAuthorizationResponseModeDiscoveryOptions = &OpenIDConnectJWTSecuredAuthorizationResponseModeDiscoveryOptions{}\n\t\t*optsCopy.OpenIDConnectJWTSecuredAuthorizationResponseModeDiscoveryOptions = *opts.OpenIDConnectJWTSecuredAuthorizationResponseModeDiscoveryOptions\n\t}\n\n\tif opts.OpenIDFederationDiscoveryOptions != nil {\n\t\toptsCopy.OpenIDFederationDiscoveryOptions = &OpenIDFederationDiscoveryOptions{}\n\t\t*optsCopy.OpenIDFederationDiscoveryOptions = *opts.OpenIDFederationDiscoveryOptions\n\t}\n\n\treturn optsCopy\n}", "func (in *CASignedConfig) DeepCopy() *CASignedConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(CASignedConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c Configuration) Clone() Configuration {\n\treturn Configuration{\n\t\tEDATool: c.EDATool,\n\t\tInputFile: c.InputFile,\n\t\tOutputFile: c.OutputFile,\n\t\tLastUpdated: c.LastUpdated,\n\t}\n}", "func (in *ConfigConnectorConfig) DeepCopy() *ConfigConnectorConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConfigConnectorConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretReference) DeepCopy() *SecretReference {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretReference)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretReference) DeepCopy() *SecretReference {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretReference)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretReference) DeepCopy() *SecretReference {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretReference)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *KVConfiguration) DeepCopy() *KVConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KVConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretRef) DeepCopy() *SecretRef {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretRef)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretRef) DeepCopy() *SecretRef {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretRef)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *SecretRef) DeepCopy() *SecretRef {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SecretRef)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *MariaDBConfiguration) DeepCopy() *MariaDBConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(MariaDBConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ConnectionPoolSettings) DeepCopy() *ConnectionPoolSettings {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ConnectionPoolSettings)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ShuffleServerConfig) DeepCopy() *ShuffleServerConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ShuffleServerConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c *Config) Copy() *Config {\n\tconfigCopy := &Config{\n\t\tFilter: c.Filter,\n\t\tIncludeMsgTypes: util.CopyStringSlice(c.IncludeMsgTypes),\n\t\tSubject: c.Subject,\n\t}\n\tif c.Email != nil {\n\t\tconfigCopy.Email = &EmailNotifierConfig{\n\t\t\tEmails: util.CopyStringSlice(c.Email.Emails),\n\t\t}\n\t}\n\tif c.Chat != nil {\n\t\tconfigCopy.Chat = &ChatNotifierConfig{\n\t\t\tRoomID: c.Chat.RoomID,\n\t\t}\n\t}\n\tif c.PubSub != nil {\n\t\tconfigCopy.PubSub = &PubSubNotifierConfig{\n\t\t\tTopic: c.PubSub.Topic,\n\t\t}\n\t}\n\tif c.Monorail != nil {\n\t\tconfigCopy.Monorail = &MonorailNotifierConfig{\n\t\t\tProject: c.Monorail.Project,\n\t\t\tOwner: c.Monorail.Owner,\n\t\t\tCC: util.CopyStringSlice(c.Monorail.CC),\n\t\t\tComponents: util.CopyStringSlice(c.Monorail.Components),\n\t\t\tLabels: util.CopyStringSlice(c.Monorail.Labels),\n\t\t}\n\t}\n\treturn configCopy\n}", "func (in *ServerConfig) DeepCopy() *ServerConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ServerConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c *config) WithPassword(password string) Config {\n\tc.password = password\n\treturn c\n}", "func (in *KVMConfigList) DeepCopy() *KVMConfigList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(KVMConfigList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *DatabaseSecretEngineConfigList) DeepCopyObject() runtime.Object {\n\tif c := in.DeepCopy(); c != nil {\n\t\treturn c\n\t}\n\treturn nil\n}", "func (conf *ThrapConfig) Clone() *ThrapConfig {\n\tif conf == nil {\n\t\treturn nil\n\t}\n\n\tc := &ThrapConfig{\n\t\tVCS: make(map[string]*VCSConfig, len(conf.VCS)),\n\t\tOrchestrator: make(map[string]*OrchestratorConfig, len(conf.Orchestrator)),\n\t\tRegistry: make(map[string]*RegistryConfig, len(conf.Registry)),\n\t\tSecrets: make(map[string]*SecretsConfig, len(conf.Secrets)),\n\t}\n\n\tfor k, v := range conf.VCS {\n\t\tc.VCS[k] = v.Clone()\n\t}\n\tfor k, v := range conf.Orchestrator {\n\t\tc.Orchestrator[k] = v.Clone()\n\t}\n\tfor k, v := range conf.Registry {\n\t\tc.Registry[k] = v.Clone()\n\t}\n\tfor k, v := range conf.Secrets {\n\t\tc.Secrets[k] = v.Clone()\n\t}\n\n\treturn conf\n}", "func (cfg *appConfig) copy() appConfig { return *cfg }", "func (in *DiamondConfig) DeepCopy() *DiamondConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(DiamondConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *ObjectStorageConfig) DeepCopy() *ObjectStorageConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ObjectStorageConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (c *Config) Copy() *Config {\n\tc.Lock()\n\tdefer c.Unlock()\n\tn := &Config{\n\t\tm: make(map[string]interface{}),\n\t}\n\tfor key, value := range c.m {\n\t\tn.m[key] = value\n\t}\n\treturn n\n}", "func (in *VaultSecretList) DeepCopy() *VaultSecretList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(VaultSecretList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfiguration) DeepCopy() *LoggingConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *LoggingConfiguration) DeepCopy() *LoggingConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(LoggingConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (co *ConfigOptions) Clone() (*ConfigOptions, error) {\n\tout, err := yaml.Marshal(co)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tnewCo := &ConfigOptions{}\n\terr = yaml.Unmarshal(out, newCo)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn newCo, nil\n}", "func (in *SharedMemoryStoreConfig) DeepCopy() *SharedMemoryStoreConfig {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(SharedMemoryStoreConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *EKSCFConfiguration) DeepCopy() *EKSCFConfiguration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(EKSCFConfiguration)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *Configuration) DeepCopy() *Configuration {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Configuration)\n\tin.DeepCopyInto(out)\n\treturn out\n}" ]
[ "0.60144943", "0.5966961", "0.5891478", "0.5733064", "0.5586271", "0.55468184", "0.55379814", "0.55241823", "0.5468137", "0.54423493", "0.5412706", "0.53246456", "0.53073645", "0.52828056", "0.52819806", "0.527624", "0.525599", "0.5251909", "0.5250702", "0.52440095", "0.5237304", "0.5223912", "0.52163714", "0.51898414", "0.5180599", "0.5171406", "0.5162189", "0.5154262", "0.51525426", "0.51477784", "0.5121849", "0.5100011", "0.5076809", "0.50718963", "0.506771", "0.50631225", "0.5056182", "0.505489", "0.505489", "0.505216", "0.5041107", "0.5008642", "0.50072306", "0.49966687", "0.49919817", "0.49893504", "0.4968937", "0.4942438", "0.4940173", "0.49357915", "0.49304864", "0.49125263", "0.49085802", "0.48927593", "0.4884021", "0.48762038", "0.4874974", "0.48644447", "0.4859461", "0.48588717", "0.48520452", "0.4838714", "0.48381528", "0.48320577", "0.48238185", "0.48199207", "0.48192886", "0.48161247", "0.48074582", "0.4802351", "0.47781578", "0.4774746", "0.47638658", "0.47513387", "0.47513387", "0.47513387", "0.47473267", "0.4740429", "0.4740429", "0.4740429", "0.4735441", "0.4728189", "0.472274", "0.4714049", "0.47060144", "0.4705047", "0.4698211", "0.46963742", "0.46867976", "0.46863317", "0.46732464", "0.46679264", "0.46440026", "0.4635198", "0.46248907", "0.46248907", "0.462217", "0.46144766", "0.4614418", "0.46057272" ]
0.83404136
0
IsOk check response code is equal to 200
func (r *Response) IsOk() bool { return r.Code == ok }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (resp *Response) OK() bool {\n\treturn resp.StatusCode < 400\n}", "func (w *responseWrapper) IsOK() bool {\n\treturn w.status == 200\n}", "func (resp *Response) Ok() bool {\n\treturn resp.OK()\n}", "func (s *APIStatusResponse) OK() bool {\n\treturn s.StatusCode == \"ok\"\n}", "func isOK(statusCode int) bool {\n\treturn statusCode < minHTTPErrorStatusCode\n}", "func isResponseSuccess(resp *http.Response) bool {\n\tif resp == nil {\n\t\treturn false\n\t}\n\tstatusCode := resp.StatusCode\n\n\treturn statusCode >= http.StatusOK && statusCode <= 299\n}", "func (cr *ClientResponse) Ok() bool {\n\treturn cr.ok\n}", "func (c *Client) IsOK() bool {\n\turl := fmt.Sprintf(\"%s/v1/sys/health\", c.addr)\n\n\tr, _ := http.NewRequest(http.MethodGet, url, nil)\n\t//r.Header.Add(\"X-Vault-Token\", \"root\")\n\n\tresp, err := http.DefaultClient.Do(r)\n\tif err != nil {\n\t\treturn false\n\t}\n\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn false\n\t}\n\n\treturn true\n}", "func (s statusCode) Successful() bool { return s >= 200 && s < 300 }", "func (r XMLResponse) IsOK() bool {\n\treturn r.Status == ErrorOK\n}", "func (ir *IndexResponse) IsOk() bool {\n\treturn ir.Ok\n}", "func (vr *VersionResponse) IsOk() bool {\n\treturn len(vr.version) > 0\n}", "func (er *ExitResponse) IsOk() bool {\n\treturn er.Ok\n}", "func (res *Response) isSuccessfulResponse() bool {\n\treturn res.IsSuccessfulResponse\n}", "func (h Health) Ok() bool {\n\treturn h.success\n}", "func (o *WebSvmGetOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func alwaysOk(http.ResponseWriter, *http.Request, int) (int, error) { return 0, nil }", "func (sr *SearchResponse) IsOk() bool {\n\t// Empty responses (meaning no matches) are not errors...\n\treturn len(sr.Documents) > 0\n}", "func (o *GetMeOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func OK(r *http.ResponseWriter) error {\n\tresponse := *r\n\tresponse.WriteHeader(200)\n\treturn nil\n}", "func (resp *Response) StatusOk(w http.ResponseWriter) {\n\tresp.Ok = true\n\twrite(resp, w)\n}", "func (dr *DeleteResponse) IsOk() bool {\n\treturn dr.ok\n}", "func (res *ClientHTTPResponse) CheckOKResponse(okResponses []int) {\n\tfor _, okResponse := range okResponses {\n\t\tif res.rawResponse.StatusCode == okResponse {\n\t\t\treturn\n\t\t}\n\t}\n\n\tres.req.Logger.Warn(\"Unknown response status code\",\n\t\tzap.Int(\"status code\", res.rawResponse.StatusCode),\n\t)\n}", "func (o *ApiResponse) GetCodeOk() (int32, bool) {\n\tif o == nil || o.Code == nil {\n\t\tvar ret int32\n\t\treturn ret, false\n\t}\n\treturn *o.Code, true\n}", "func Ok(msg string) *Response {\n\treturn &Response{\n\t\tCode: http.StatusOK,\n\t\tMessage: msg,\n\t\tData: nil,\n\t}\n}", "func IsSuccessHTTPRequest(t *testing.T, actual *http.Response, err error) bool {\n\tif err != nil {\n\t\tt.Error(\"Unexpected error occered\")\n\t\treturn false\n\t}\n\texpected := http.StatusOK\n\tif actual.StatusCode != expected {\n\t\tt.Errorf(\"Status code error. Expected %v, but got %v\", expected, actual.StatusCode)\n\t\treturn false\n\t}\n\treturn true\n}", "func CheckCode(res *http.Response) {\n\tif res.StatusCode != 200 {\n\t\tlog.Fatalf(\"status code err: %d %s\", res.StatusCode, res.Status)\n\t}\n}", "func (o *ValidateUsingPUTOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func IsSuccess(code int) bool {\n\treturn http.StatusOK <= code && code < http.StatusMultipleChoices\n}", "func (r *Response) IsSuccess() bool {\n\treturn (r.Code == http.StatusOK || r.Code == http.StatusConflict)\n}", "func (fault SOAPFault) Ok() bool {\n\treturn fault.Code == \"\"\n}", "func (o *VerifyConnectionOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func requestOk(w http.ResponseWriter, r *http.Request) bool {\n\tif r.Body == nil {\n\t\thttp.Error(w, \"Please send a request body\", 400)\n\t\treturn false\n\t}\n\treturn true\n}", "func (c *requestContext) ok() {\n\tc.Writer.Header().Set(\"Content-Type\", \"text/plain; charset=utf-8\")\n\tc.Writer.WriteHeader(200)\n\tfmt.Fprintln(c.Writer, \"OK\")\n}", "func (r *Responder) OK() { r.write(http.StatusOK) }", "func (o *ApiResponse) GetCodeOk() (*int32, bool) {\n\tif o == nil || IsNil(o.Code) {\n\t\treturn nil, false\n\t}\n\treturn o.Code, true\n}", "func Is2xx(status int) bool {\n\treturn status >= 200 && status < 300\n}", "func checkResponse(t *testing.T, resp *http.Response, err error) {\n\tassert.Nil(t, err)\n\tassert.Equal(t, 200, resp.StatusCode)\n}", "func CheckOKResponse(r *http.Response) error {\n\tisOK := r.StatusCode >= 200 && r.StatusCode <= 299\n\tif !isOK {\n\t\treturn errors.New(\"response error with http status: \"+r.Status, errors.Internal)\n\t}\n\treturn nil\n}", "func (o *ObjectsGetOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (r *GetReservationRS) Ok() bool {\n\treturn len(r.Errors.Error) <= 0\n}", "func is200(t *testing.T, ts *httptest.Server, path string) []byte {\n\tres, body := checkGet(t, ts, path)\n\tif res.StatusCode != 200 {\n\t\tt.Fatalf(\"Expected status %d, got %d. Path: %s\", 200, res.StatusCode, path)\n\t}\n\treturn body\n}", "func ok(w http.ResponseWriter, r *http.Request, c *Context) {\n\tfmt.Fprintln(w, \"ok\")\n}", "func (r *Reply) Ok() *Reply {\n\treturn r.Status(http.StatusOK)\n}", "func (o *GetRequestTrackerOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (r *BaseStandard) IsOk() bool {\n\tif r.AuditInfo.StatusCode < http.StatusOK || r.AuditInfo.StatusCode >= http.StatusMultipleChoices {\n\t\treturn false\n\t}\n\n\tif !r.HasItems() {\n\t\treturn false\n\t}\n\n\tif len(r.AuditInfo.Errors.Items) > 0 {\n\t\treturn false\n\t}\n\n\treturn true\n}", "func (r Response) OK(code string, payload Payload, header ...ResponseHeader) {\n\tr.Response(code, http.OK, payload, header...)\n}", "func (o *UploadResponse) HasOk() bool {\n\tif o != nil && o.Ok != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (o *MetroclusterSvmGetOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *WebModifyOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *NodesGetOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *PcloudImagesGetallOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (a *App) Ok(w http.ResponseWriter, r *http.Request) {\n\tw.WriteHeader(http.StatusOK)\n\tlogerr(w.Write(randomByteSlice(10, \"OK\", \"0123456789abcdef\")))\n}", "func (o *IpspacesGetOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *WebSvmGetOK) IsSuccess() bool {\n\treturn true\n}", "func (o *SoftwarePackageGetOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *OpenAPIDiscoveryOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *DNSGetOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func Ok(c *routing.Context, msg string, service string) error {\n\tResponse(c, msg, 200, service, \"application/json\")\n\treturn nil\n}", "func (o *UserGetMeOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func IsHttpStatusOK(statusCode int) bool {\n\treturn statusCode >= http.StatusOK && statusCode < http.StatusMultipleChoices\n}", "func healthcheckok(writer http.ResponseWriter, request *http.Request) {\n\twriter.WriteHeader(200)\n}", "func (o *GetAlertsOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *VscanOnDemandGetOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func validate(status int, _ string) bool {\n\treturn status == 200\n}", "func (suite *HandlerTestSuite) assertSuccessfulResponse(response events.APIGatewayProxyResponse) {\n\tsuite.True(response.StatusCode >= 200 && response.StatusCode <= 299)\n}", "func (o *GetPublishersOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *SyncProjectsPrincipalsUsingPOSTOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *GetRunDownstreamLineageOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *GetCertificateInfoOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (c *CountHandler) OkResponse(resp http.ResponseWriter, req *http.Request) {\n\tc.numRequests++\n\tresp.WriteHeader(http.StatusOK)\n\tresp.Write([]byte(\"{\\\"stat\\\": \\\"ok\\\"}\"))\n}", "func ExpectOK(res *http.Response, msg string) error {\n\treturn ExpectStatus(res, http.StatusOK, msg)\n}", "func (o *GetDeploymentActionsUsingGET2OK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *DescribeServerOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *GetVersionOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func Is2xx(code int) bool {\n\treturn (code >= 200) && (code <= 299)\n}", "func TestServerResponseOk(t *testing.T) {\n\ttestServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\tw.WriteHeader(http.StatusOK)\n\t\tw.Write([]byte(`{}`))\n\t}))\n\tresult, err := sendMessage(testServer.URL, \"[email protected]\", \"test\")\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\tif result != false {\n\t\tt.Fail()\n\t}\n}", "func successStatus(status int) bool {\n\treturn status >= 200 && status <= 399\n}", "func (o *ErrorResponseWeb) GetCodeOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Code, true\n}", "func (o *WebModifyDefault) IsSuccess() bool {\n\treturn o._statusCode/100 == 2\n}", "func (result *HTTPResult) Successful() bool {\n\treturn result.Status < 400\n}", "func statusGood(status int) bool {\n\treturn status >= 200 && status <= 299\n}", "func (o *SyncPerspectiveGroupUsingPOST2OK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (req *Request) OK(body string) {\n\treq.Reply(http.StatusOK, body)\n}", "func (o *GetProjectOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *UploadResponse) GetOkOk() (*bool, bool) {\n\tif o == nil || o.Ok == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Ok, true\n}", "func (o *SignupOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func OK(condition bool, statusCode int, message string, args ...interface{}) {\n\tif err := ok(condition, statusCode, message, args...); err != nil {\n\t\tpanic(err)\n\t}\n}", "func (o *SearchAclsOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func WaitForHTTPStatusOk(ctx context.Context, transport http.RoundTripper, urltocheck string) error {\n\tcli := &http.Client{\n\t\tTransport: transport,\n\t\tTimeout: 10 * time.Second,\n\t}\n\n\treq, err := http.NewRequest(\"GET\", urltocheck, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn wait.PollImmediateUntil(time.Second, func() (bool, error) {\n\t\tresp, err := cli.Do(req)\n\t\tif err, ok := err.(*url.Error); ok {\n\t\t\tif err, ok := err.Err.(*net.OpError); ok {\n\t\t\t\tif err, ok := err.Err.(*os.SyscallError); ok {\n\t\t\t\t\tif err.Err == syscall.ENETUNREACH {\n\t\t\t\t\t\treturn false, nil\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err.Timeout() || err.Err == io.EOF || err.Err == io.ErrUnexpectedEOF {\n\t\t\t\treturn false, nil\n\t\t\t}\n\t\t}\n\t\tif err == io.EOF {\n\t\t\treturn false, nil\n\t\t}\n\t\tif err != nil {\n\t\t\treturn false, err\n\t\t}\n\t\treturn resp != nil && resp.StatusCode == http.StatusOK, nil\n\t}, ctx.Done())\n}", "func (o *GetV1AlertsOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *S3GroupGetOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *UnauthorizedErrorResponse) HasOk() bool {\n\tif o != nil && o.Ok != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (o *ObjectsCreateOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func (o *ResponseEntity) GetStatusCodeOk() (*string, bool) {\n\tif o == nil || o.StatusCode == nil {\n\t\treturn nil, false\n\t}\n\treturn o.StatusCode, true\n}", "func (o *GetLoadBalancerOK) IsCode(code int) bool {\n\treturn code == 200\n}", "func IsSuccess(statusCode int) bool {\n\treturn statusCode >= 200 && statusCode <= 299\n}", "func respondOk(writer http.ResponseWriter) {\n\twriter.WriteHeader(http.StatusOK)\n\twriter.Header().Set(\"Content-Type\", \"application/json\")\n\tio.WriteString(writer, `{ \"status\": \"OK\" }`)\n}", "func (o *WebModifyOK) IsSuccess() bool {\n\treturn true\n}", "func isSuccessHTTPResponse(resp *http.Response, err error) (isSuccess bool, realError error) {\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tif resp != nil {\n\t\t// HTTP 2xx suggests a successful response\n\t\tif 199 < resp.StatusCode && resp.StatusCode < 300 {\n\t\t\treturn true, nil\n\t\t}\n\n\t\treturn false, fmt.Errorf(\"failed with HTTP status code %d\", resp.StatusCode)\n\t}\n\n\t// This shouldn't happen, it only ensures all exceptions are handled.\n\treturn false, fmt.Errorf(\"failed with unknown error\")\n}" ]
[ "0.7763328", "0.7720846", "0.74847406", "0.7463899", "0.7392119", "0.73865634", "0.7201045", "0.7040394", "0.70257586", "0.6942052", "0.68642926", "0.68560714", "0.677577", "0.67510307", "0.6729229", "0.671001", "0.66298383", "0.65708536", "0.6570777", "0.65460277", "0.65407306", "0.651619", "0.6503107", "0.6490824", "0.64867175", "0.64851964", "0.6485109", "0.6484076", "0.64515734", "0.6422833", "0.6417678", "0.6414394", "0.64009124", "0.6399303", "0.63981694", "0.639475", "0.6380017", "0.63786733", "0.63702434", "0.6363522", "0.6356344", "0.63436157", "0.6331071", "0.6330961", "0.63087136", "0.6289296", "0.6261871", "0.626041", "0.6258926", "0.62547654", "0.6230273", "0.6221629", "0.6220299", "0.62160844", "0.62149924", "0.6207108", "0.61969", "0.61938864", "0.61916226", "0.6190963", "0.6189018", "0.6183937", "0.6171287", "0.6171103", "0.6163916", "0.615282", "0.6143739", "0.6136825", "0.613518", "0.61339366", "0.61320364", "0.61201096", "0.6120085", "0.6118192", "0.6117659", "0.61100894", "0.6109676", "0.61019605", "0.60953313", "0.6091407", "0.60899293", "0.60859907", "0.6084041", "0.6082306", "0.6076544", "0.60745907", "0.6072186", "0.6071362", "0.60692644", "0.6065048", "0.6056482", "0.60531807", "0.6052673", "0.6041824", "0.60416967", "0.60414404", "0.60375583", "0.6036582", "0.6024362", "0.60219556" ]
0.7804289
0
Deprecated: Use SyncLocationReq.ProtoReflect.Descriptor instead.
func (*SyncLocationReq) Descriptor() ([]byte, []int) { return file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{0} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*UpdateLocationRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_protobuf_spec_connection_user_v1_proto_rawDescGZIP(), []int{2}\n}", "func (*SyncLocationRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{1}\n}", "func (*GetLocationRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_protobuf_spec_connection_user_v1_proto_rawDescGZIP(), []int{3}\n}", "func (*RefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{16}\n}", "func (*UpdatePermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{9}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_contact_proto_rawDescGZIP(), []int{12}\n}", "func (*UpdateDomainMappingRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_appengine_v1_appengine_proto_rawDescGZIP(), []int{40}\n}", "func (*SyncRequest) Descriptor() ([]byte, []int) {\n\treturn file_recordwants_proto_rawDescGZIP(), []int{12}\n}", "func (*SyncRequest) Descriptor() ([]byte, []int) {\n\treturn file_ysync_proto_rawDescGZIP(), []int{0}\n}", "func (*UpdateTelemetryReportedRequest) Descriptor() ([]byte, []int) {\n\treturn file_external_applications_applications_proto_rawDescGZIP(), []int{29}\n}", "func (*PatchAnnotationsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{4}\n}", "func (*PatchCollectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{161}\n}", "func (*MemberReceiveAddressUpdateReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{86}\n}", "func (*SyncRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_proto_rawDescGZIP(), []int{19}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_weather_proto_rawDescGZIP(), []int{6}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_recordwants_proto_rawDescGZIP(), []int{6}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_teams_v1_teams_proto_rawDescGZIP(), []int{5}\n}", "func (*UpdateContactRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_essentialcontacts_v1_service_proto_rawDescGZIP(), []int{6}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_task_v1_task_proto_rawDescGZIP(), []int{7}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_Ultimate_Super_WebDev_Corp_gateway_services_customer_customer_proto_rawDescGZIP(), []int{4}\n}", "func (*UpdateModelRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_automl_v1_service_proto_rawDescGZIP(), []int{14}\n}", "func (*UpdateEndpointRequest) Descriptor() ([]byte, []int) {\n\treturn file_endpoint_api_proto_rawDescGZIP(), []int{14}\n}", "func (*PatchConceptsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{34}\n}", "func (*GetBlockLocalPathInfoRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{6}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_patrol_proto_rawDescGZIP(), []int{10}\n}", "func (*PatchAnnotationsStatusRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{5}\n}", "func (*MemberAddressUpdateReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{14}\n}", "func (*CMsgGCPlayerInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{117}\n}", "func (*ProvideValidationFeedbackRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_maps_addressvalidation_v1_address_validation_service_proto_rawDescGZIP(), []int{2}\n}", "func (*RegistrationUpdateReq) Descriptor() ([]byte, []int) {\n\treturn file_registration_proto_rawDescGZIP(), []int{19}\n}", "func (*LocatePhoneRequest) Descriptor() ([]byte, []int) {\n\treturn file_contacts_phones_proto_rawDescGZIP(), []int{3}\n}", "func (*MeasureDistanceRequest) Descriptor() ([]byte, []int) {\n\treturn file_coolenv_proto_rawDescGZIP(), []int{3}\n}", "func (*UpdateFriendStatusReq) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{2}\n}", "func (*SyncTimeRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_worker_v1_device_state_proto_rawDescGZIP(), []int{2}\n}", "func (*BatchUpdateReferencesRequest_Request) Descriptor() ([]byte, []int) {\n\treturn file_pkg_proto_icas_icas_proto_rawDescGZIP(), []int{1, 0}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_todo_proto_rawDescGZIP(), []int{5}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_app_config_agent_cmd_grpcserver_proto_api_app_config_proto_rawDescGZIP(), []int{17}\n}", "func (*UpdateEndpointApiRequest) Descriptor() ([]byte, []int) {\n\treturn file_endpoint_api_proto_rawDescGZIP(), []int{5}\n}", "func (*RefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_toit_api_auth_proto_rawDescGZIP(), []int{1}\n}", "func (*UpdatePermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_protodef_user_user_proto_rawDescGZIP(), []int{16}\n}", "func (*UpdateEntityRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_dataplex_v1_metadata_proto_rawDescGZIP(), []int{1}\n}", "func (*PatchConceptLanguagesRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{50}\n}", "func (*TelemetryRequest) Descriptor() ([]byte, []int) {\n\treturn file_interservice_license_control_license_control_proto_rawDescGZIP(), []int{11}\n}", "func (*RouteLookupRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_lookup_v1_rls_proto_rawDescGZIP(), []int{0}\n}", "func (*OffsetRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_carbon_proto_rawDescGZIP(), []int{1}\n}", "func (*UpdateRefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_auth_auth_proto_rawDescGZIP(), []int{15}\n}", "func (*UpdateReq) Descriptor() ([]byte, []int) {\n\treturn file_internal_proto_crypto_proto_rawDescGZIP(), []int{8}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_interservice_license_control_license_control_proto_rawDescGZIP(), []int{9}\n}", "func (*PatchKeysRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{74}\n}", "func (*AddPeerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*TaskUpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_protobuf_v1_task_proto_rawDescGZIP(), []int{2}\n}", "func (*GetPeerInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{6}\n}", "func (*PatchTasksRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{154}\n}", "func (*RefreshServiceAclRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_RefreshAuthorizationPolicyProtocol_proto_rawDescGZIP(), []int{0}\n}", "func (*LookupRequest) Descriptor() ([]byte, []int) {\n\treturn file_authzed_api_v0_acl_service_proto_rawDescGZIP(), []int{10}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_user_proto_rawDescGZIP(), []int{4}\n}", "func (*UpdatePhoneRequest) Descriptor() ([]byte, []int) {\n\treturn file_contacts_phones_proto_rawDescGZIP(), []int{7}\n}", "func (*GetCollectorRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{163}\n}", "func (*ListPlannableLocationsRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{0}\n}", "func (*UpdateTaskRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_containerd_containerd_runtime_v1_shim_v1_shim_proto_rawDescGZIP(), []int{15}\n}", "func (*ModifyRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_engine_proto_rawDescGZIP(), []int{10}\n}", "func (*QueryPermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_permission_pb_request_proto_rawDescGZIP(), []int{0}\n}", "func (*UpdateCheckRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_ocp_check_api_ocp_check_api_proto_rawDescGZIP(), []int{8}\n}", "func (*CheckRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_api_servicecontrol_v1_service_controller_proto_rawDescGZIP(), []int{0}\n}", "func (*UpdateFriendStatusRsp) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{3}\n}", "func (*UpdateNetworkRequest) Descriptor() ([]byte, []int) {\n\treturn file_packetbroker_api_iam_v1_service_proto_rawDescGZIP(), []int{8}\n}", "func (*GetBlockLocalPathInfoResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{7}\n}", "func (*DescribePermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{6}\n}", "func (*CodeLensResolveRequest) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{33}\n}", "func (*TelemetryRequest) Descriptor() ([]byte, []int) {\n\treturn file_automate_gateway_api_telemetry_telemetry_proto_rawDescGZIP(), []int{0}\n}", "func (*DeliveryVehicleLocation) Descriptor() ([]byte, []int) {\n\treturn file_google_maps_fleetengine_delivery_v1_common_proto_rawDescGZIP(), []int{1}\n}", "func (*UpdateServiceRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_appengine_v1_appengine_proto_rawDescGZIP(), []int{7}\n}", "func (*GetServiceRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_appengine_v1_appengine_proto_rawDescGZIP(), []int{6}\n}", "func (*LookupRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_ip_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgGCNotificationsRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{113}\n}", "func (*GetRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_comments_proto_rawDescGZIP(), []int{3}\n}", "func (*RefreshNamenodesRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{2}\n}", "func (*CheckPermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_permission_pb_request_proto_rawDescGZIP(), []int{2}\n}", "func (*GetLocationResponse) Descriptor() ([]byte, []int) {\n\treturn file_api_protobuf_spec_connection_user_v1_proto_rawDescGZIP(), []int{4}\n}", "func (*ResolveRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_api_proto_rawDescGZIP(), []int{1}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_versions_v1_versions_proto_rawDescGZIP(), []int{5}\n}", "func (*UpdateVersionRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_appengine_v1_appengine_proto_rawDescGZIP(), []int{13}\n}", "func (*MemberLevelUpdateReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{50}\n}", "func (*MemberTaskUpdateReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{122}\n}", "func (*UpdateConversationRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_threads_proto_rawDescGZIP(), []int{8}\n}", "func (*BatchUpdateReferencesRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_proto_icas_icas_proto_rawDescGZIP(), []int{1}\n}", "func (*MyScopesRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{109}\n}", "func (*CBroadcast_WatchBroadcast_Request) Descriptor() ([]byte, []int) {\n\treturn file_steammessages_broadcast_steamclient_proto_rawDescGZIP(), []int{7}\n}", "func (*ModelControlRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_service_proto_rawDescGZIP(), []int{4}\n}", "func (*RefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_auth_auth_proto_rawDescGZIP(), []int{6}\n}", "func (*SignalRequest) Descriptor() ([]byte, []int) {\n\treturn file_cmd_server_grpc_proto_sfu_proto_rawDescGZIP(), []int{0}\n}", "func (*UpdateRemoteMirrorRequest) Descriptor() ([]byte, []int) {\n\treturn file_remote_proto_rawDescGZIP(), []int{0}\n}", "func (*Locations) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{22}\n}", "func (*PatchWorkflowsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{131}\n}", "func (*MemberUpdateReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{5}\n}", "func (*UnaryMapMessageRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_proto_rawDescGZIP(), []int{10}\n}", "func (*WatchRequestTypeProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{25}\n}", "func (*ReferenceRequest) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{141}\n}", "func (*FsyncRequest) Descriptor() ([]byte, []int) {\n\treturn file_IOService_proto_rawDescGZIP(), []int{56}\n}", "func (*FeedbackRequest) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{10}\n}" ]
[ "0.71103185", "0.69038934", "0.6815616", "0.6785577", "0.67049074", "0.66889256", "0.6613836", "0.66054076", "0.65946", "0.65917665", "0.6582088", "0.65695715", "0.6560337", "0.6535074", "0.65338737", "0.65247077", "0.65170616", "0.6483615", "0.64714944", "0.64662427", "0.6456886", "0.64484286", "0.6445085", "0.6440484", "0.643664", "0.6436235", "0.64324015", "0.6431758", "0.64295584", "0.642198", "0.6421411", "0.64174163", "0.6416304", "0.63977665", "0.63900965", "0.6387687", "0.6386571", "0.63825965", "0.6374931", "0.6367375", "0.6364737", "0.63547707", "0.6353727", "0.6353155", "0.6351243", "0.63476485", "0.6344454", "0.634137", "0.6337473", "0.6337249", "0.63332945", "0.6329875", "0.63260186", "0.63150734", "0.63125783", "0.63102835", "0.63077265", "0.63076824", "0.63054395", "0.6303244", "0.630058", "0.6293954", "0.6291491", "0.6284448", "0.62817305", "0.62751764", "0.6271683", "0.62706834", "0.62680095", "0.626329", "0.6263225", "0.62603605", "0.62598467", "0.62565595", "0.6254934", "0.62508035", "0.6249023", "0.6246905", "0.6245876", "0.6243899", "0.6235574", "0.6233067", "0.62289417", "0.6224867", "0.6224783", "0.6224115", "0.6222461", "0.62200356", "0.6218277", "0.62176937", "0.6217214", "0.62167656", "0.6216338", "0.62147206", "0.6211842", "0.6208964", "0.62086654", "0.6207624", "0.62056744", "0.62056166" ]
0.7409314
0
Deprecated: Use SyncLocationRsp.ProtoReflect.Descriptor instead.
func (*SyncLocationRsp) Descriptor() ([]byte, []int) { return file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{1} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*SyncLocationReq) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{0}\n}", "func (*UpdateFriendStatusRsp) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{3}\n}", "func (*RefreshResponse) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{17}\n}", "func (DeliveryVehicleLocationSensor) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_maps_fleetengine_delivery_v1_common_proto_rawDescGZIP(), []int{0}\n}", "func (*GetRsp) Descriptor() ([]byte, []int) {\n\treturn file_grpc_proto_rawDescGZIP(), []int{1}\n}", "func (*RefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{16}\n}", "func (*ClusterRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{8}\n}", "func (*GroupRsp) Descriptor() ([]byte, []int) {\n\treturn file_chatMsg_msg_proto_rawDescGZIP(), []int{6}\n}", "func (*CreateFriendRsp) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{1}\n}", "func (*MemberReceiveAddressUpdateResp) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{87}\n}", "func (*GetLocationResponse) Descriptor() ([]byte, []int) {\n\treturn file_api_protobuf_spec_connection_user_v1_proto_rawDescGZIP(), []int{4}\n}", "func (Diagnostic_Kind) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_api_servicemanagement_v1_resources_proto_rawDescGZIP(), []int{2, 0}\n}", "func (*DeliveryVehicleLocation) Descriptor() ([]byte, []int) {\n\treturn file_google_maps_fleetengine_delivery_v1_common_proto_rawDescGZIP(), []int{1}\n}", "func (*ChangeInfoRsp) Descriptor() ([]byte, []int) {\n\treturn file_Auth_Auth_proto_rawDescGZIP(), []int{5}\n}", "func (*UpdateLocationRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_protobuf_spec_connection_user_v1_proto_rawDescGZIP(), []int{2}\n}", "func (*GetBlockLocalPathInfoResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{7}\n}", "func (EBroadcastWatchLocation) EnumDescriptor() ([]byte, []int) {\n\treturn file_steammessages_broadcast_steamclient_proto_rawDescGZIP(), []int{0}\n}", "func (RouteLookupRequest_Reason) EnumDescriptor() ([]byte, []int) {\n\treturn file_grpc_lookup_v1_rls_proto_rawDescGZIP(), []int{0, 0}\n}", "func (StatusMessage_Reference) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_devtools_clouddebugger_v2_data_proto_rawDescGZIP(), []int{1, 0}\n}", "func (Span_SpanKind) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_devtools_cloudtrace_v2_trace_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*CMsgSetMapLocationStateResponse) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{62}\n}", "func (*UpdateFriendRemarkRsp) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{5}\n}", "func (*Locations) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{22}\n}", "func (LocationMetadata_LocationSource) EnumDescriptor() ([]byte, []int) {\n\treturn file_router_proto_rawDescGZIP(), []int{7, 0}\n}", "func (*RefreshCallQueueResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_RefreshCallQueueProtocol_proto_rawDescGZIP(), []int{1}\n}", "func (*GetDelegationTokenResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{4}\n}", "func (*SwitchKeeperRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{6}\n}", "func (*GetLocationRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_protobuf_spec_connection_user_v1_proto_rawDescGZIP(), []int{3}\n}", "func (*PlannableLocation) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{2}\n}", "func (*Deprecation) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{8}\n}", "func (*StopBroadcastResponse) Descriptor() ([]byte, []int) {\n\treturn file_services_core_protobuf_servers_proto_rawDescGZIP(), []int{5}\n}", "func (*LocationLinks) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{23}\n}", "func (*RefreshNamenodesResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{3}\n}", "func (Span_Link_Type) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_devtools_cloudtrace_v2_trace_proto_rawDescGZIP(), []int{0, 3, 0}\n}", "func (*UpdateTelemetryReportedResponse) Descriptor() ([]byte, []int) {\n\treturn file_external_applications_applications_proto_rawDescGZIP(), []int{30}\n}", "func (LocationType) EnumDescriptor() ([]byte, []int) {\n\treturn file_determined_project_v1_project_proto_rawDescGZIP(), []int{0}\n}", "func (*CancelDelegationTokenResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{8}\n}", "func (*ListResponse) Descriptor() ([]byte, []int) {\n\treturn file_proto_contact_proto_rawDescGZIP(), []int{15}\n}", "func (SocketAddress_Protocol) EnumDescriptor() ([]byte, []int) {\n\treturn file_envoy_config_core_v3_address_proto_rawDescGZIP(), []int{1, 0}\n}", "func (*Diagnostic) Descriptor() ([]byte, []int) {\n\treturn file_google_api_servicemanagement_v1_resources_proto_rawDescGZIP(), []int{2}\n}", "func (*CMsgResetMapLocationsResponse) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{64}\n}", "func (*RefreshServiceAclResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_RefreshAuthorizationPolicyProtocol_proto_rawDescGZIP(), []int{1}\n}", "func (SyncSubType) EnumDescriptor() ([]byte, []int) {\n\treturn file_api_proto_global_Global_proto_rawDescGZIP(), []int{6}\n}", "func (*PatchCollectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{161}\n}", "func (*RouteLookupResponse) Descriptor() ([]byte, []int) {\n\treturn file_grpc_lookup_v1_rls_proto_rawDescGZIP(), []int{1}\n}", "func (*RouteLegTravelAdvisory) Descriptor() ([]byte, []int) {\n\treturn file_google_maps_routing_v2_route_proto_rawDescGZIP(), []int{2}\n}", "func (*RefreshCallQueueRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_RefreshCallQueueProtocol_proto_rawDescGZIP(), []int{0}\n}", "func (*GetDatanodeInfoResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{13}\n}", "func (RefType) EnumDescriptor() ([]byte, []int) {\n\treturn file_common_trace_common_proto_rawDescGZIP(), []int{1}\n}", "func (*MemberReceiveAddressUpdateReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{86}\n}", "func (x *fastReflection_MsgUpdateParamsResponse) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_MsgUpdateParamsResponse\n}", "func (*HelloRsp) Descriptor() ([]byte, []int) {\n\treturn file_helloword_proto_rawDescGZIP(), []int{1}\n}", "func (*AddPeerResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{30}\n}", "func (*ProvideValidationFeedbackResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_maps_addressvalidation_v1_address_validation_service_proto_rawDescGZIP(), []int{3}\n}", "func (*StopBroadcastRequest) Descriptor() ([]byte, []int) {\n\treturn file_services_core_protobuf_servers_proto_rawDescGZIP(), []int{4}\n}", "func (*RefreshServiceAclRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_RefreshAuthorizationPolicyProtocol_proto_rawDescGZIP(), []int{0}\n}", "func (*LocationMetadata) Descriptor() ([]byte, []int) {\n\treturn file_router_proto_rawDescGZIP(), []int{7}\n}", "func (*RouteLookupRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_lookup_v1_rls_proto_rawDescGZIP(), []int{0}\n}", "func (*ConsumableTrafficPolyline) Descriptor() ([]byte, []int) {\n\treturn file_google_maps_fleetengine_v1_traffic_proto_rawDescGZIP(), []int{1}\n}", "func (*GetPeerInfoResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{28}\n}", "func (*ListResponse) Descriptor() ([]byte, []int) {\n\treturn file_weather_proto_rawDescGZIP(), []int{17}\n}", "func (*QueryPlanStatusResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{25}\n}", "func (DnsRegMethod) EnumDescriptor() ([]byte, []int) {\n\treturn file_signalling_proto_rawDescGZIP(), []int{1}\n}", "func (*GetBlockLocalPathInfoRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{6}\n}", "func (*ListPlannableLocationsResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{1}\n}", "func (*ListUserFriendRsp) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{7}\n}", "func (x *fastReflection_AddressStringToBytesResponse) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_AddressStringToBytesResponse\n}", "func (*UpdateTelemetryReportedRequest) Descriptor() ([]byte, []int) {\n\treturn file_external_applications_applications_proto_rawDescGZIP(), []int{29}\n}", "func (Protocol) EnumDescriptor() ([]byte, []int) {\n\treturn file_github_com_ameliaikeda_protoc_gen_router_proto_router_proto_rawDescGZIP(), []int{0}\n}", "func (PolicyBasedRoute_Warnings_Code) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_cloud_networkconnectivity_v1_policy_based_routing_proto_rawDescGZIP(), []int{0, 3, 0}\n}", "func (*AddPeerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*Location) Descriptor() ([]byte, []int) {\n\treturn file_clientToAppMgr_proto_rawDescGZIP(), []int{1}\n}", "func (*RouteLeg) Descriptor() ([]byte, []int) {\n\treturn file_google_maps_routing_v2_route_proto_rawDescGZIP(), []int{4}\n}", "func (*LocatePhoneRequest) Descriptor() ([]byte, []int) {\n\treturn file_contacts_phones_proto_rawDescGZIP(), []int{3}\n}", "func (x *fastReflection_AddressBytesToStringResponse) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_AddressBytesToStringResponse\n}", "func (*UpdateResponse) Descriptor() ([]byte, []int) {\n\treturn file_proto_contact_proto_rawDescGZIP(), []int{13}\n}", "func (SpanLayer) EnumDescriptor() ([]byte, []int) {\n\treturn file_common_trace_common_proto_rawDescGZIP(), []int{2}\n}", "func (*SyncTagsEventRsp) Descriptor() ([]byte, []int) {\n\treturn file_api_proto_user_SyncTagsEvent_proto_rawDescGZIP(), []int{1}\n}", "func (MsgSubType) EnumDescriptor() ([]byte, []int) {\n\treturn file_api_proto_global_Global_proto_rawDescGZIP(), []int{5}\n}", "func LegacyLoadMessageDesc(t reflect.Type) protoreflect.MessageDescriptor {\n\treturn legacyLoadMessageDesc(t, \"\")\n}", "func (*GetCollectorRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{163}\n}", "func (SVC_Messages) EnumDescriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{4}\n}", "func (*MemberAddressUpdateResp) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{15}\n}", "func (*TelemetryParams) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{62}\n}", "func (*GetPeerInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{6}\n}", "func (SymbolKind) EnumDescriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{7}\n}", "func (*PatchAnnotationsStatusResponse) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{6}\n}", "func (*RenewDelegationTokenResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{6}\n}", "func (FriendSubType) EnumDescriptor() ([]byte, []int) {\n\treturn file_api_proto_global_Global_proto_rawDescGZIP(), []int{3}\n}", "func (*RouteLegStepTravelAdvisory) Descriptor() ([]byte, []int) {\n\treturn file_google_maps_routing_v2_route_proto_rawDescGZIP(), []int{3}\n}", "func (*PatchAnnotationsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{4}\n}", "func (SqlUpdateTrack) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_cloud_sql_v1_cloud_sql_resources_proto_rawDescGZIP(), []int{8}\n}", "func (SqlUpdateTrack) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_cloud_sql_v1beta4_cloud_sql_resources_proto_rawDescGZIP(), []int{13}\n}", "func (*CMsgSetMapLocationState) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{61}\n}", "func (*Location) Descriptor() ([]byte, []int) {\n\treturn file_tlogpb_tlog_proto_rawDescGZIP(), []int{2}\n}", "func (*RegisterNodeRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{10}\n}", "func (*ComputeRoutesResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_maps_routes_v1_compute_routes_response_proto_rawDescGZIP(), []int{0}\n}", "func (UpdateType) EnumDescriptor() ([]byte, []int) {\n\treturn file_GetUserInfo_proto_rawDescGZIP(), []int{0}\n}", "func (*Span) Descriptor() ([]byte, []int) {\n\treturn file_google_devtools_cloudtrace_v2_trace_proto_rawDescGZIP(), []int{0}\n}", "func (*DeviceStateRefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_worker_v1_device_state_proto_rawDescGZIP(), []int{4}\n}" ]
[ "0.6654763", "0.663932", "0.6555286", "0.65550774", "0.65344405", "0.65240306", "0.6431467", "0.6428126", "0.6421469", "0.64044356", "0.64010787", "0.6398879", "0.6377352", "0.6371854", "0.6344242", "0.6334764", "0.6315522", "0.6313767", "0.63100225", "0.63098186", "0.6308928", "0.6300741", "0.6297876", "0.62957907", "0.62866217", "0.62855846", "0.626995", "0.6268929", "0.6251781", "0.624252", "0.62377894", "0.6232149", "0.62194085", "0.62188077", "0.62102", "0.62071586", "0.62070006", "0.6206325", "0.6204493", "0.6204394", "0.61924213", "0.6190351", "0.6189968", "0.617781", "0.6173126", "0.6161842", "0.6159241", "0.61525697", "0.6150274", "0.6148279", "0.6144668", "0.61424667", "0.61424375", "0.6140847", "0.6140338", "0.6137101", "0.6131071", "0.6128279", "0.6125489", "0.61116815", "0.6108788", "0.6106509", "0.6106314", "0.6103288", "0.60982585", "0.6097865", "0.60956013", "0.60903215", "0.60877764", "0.60875654", "0.608052", "0.6072344", "0.6069711", "0.60692436", "0.6067341", "0.6061138", "0.60580873", "0.6056635", "0.6053889", "0.6053458", "0.6052595", "0.60520935", "0.6050501", "0.6048721", "0.6048194", "0.60460997", "0.60459805", "0.6042694", "0.6038981", "0.6034592", "0.6032689", "0.6032459", "0.6032243", "0.6031134", "0.60308063", "0.60201734", "0.60201186", "0.60200113", "0.60194004", "0.60107654" ]
0.73720187
0
Deprecated: Use RemoveKeeperReq.ProtoReflect.Descriptor instead.
func (*RemoveKeeperReq) Descriptor() ([]byte, []int) { return file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{2} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*ExternalIDPRemoveRequest) Descriptor() ([]byte, []int) {\n\treturn file_management_proto_rawDescGZIP(), []int{162}\n}", "func (*RemoveCheckRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_ocp_check_api_ocp_check_api_proto_rawDescGZIP(), []int{10}\n}", "func (*RemoveRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_bucketsd_pb_bucketsd_proto_rawDescGZIP(), []int{27}\n}", "func (*MemberLevelDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{52}\n}", "func (*MessageHubRemoveRequest) Descriptor() ([]byte, []int) {\n\treturn file_messagehub_proto_rawDescGZIP(), []int{7}\n}", "func (*MemberDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{7}\n}", "func (*MemberReceiveAddressDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{88}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_wallet_proto_rawDescGZIP(), []int{3}\n}", "func (*DeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_internal_proto_crypto_proto_rawDescGZIP(), []int{10}\n}", "func (*RemovePermissionFromRoleRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{8}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_dictybase_api_jsonapi_request_proto_rawDescGZIP(), []int{7}\n}", "func (*DelRequest) Descriptor() ([]byte, []int) {\n\treturn file_patrol_proto_rawDescGZIP(), []int{8}\n}", "func (*GroupRemoveRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{34}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_exercicio_proto_rawDescGZIP(), []int{7}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_engine_proto_rawDescGZIP(), []int{12}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_message_service_proto_rawDescGZIP(), []int{0}\n}", "func (*DeleteMicroRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_micro_pb_request_proto_rawDescGZIP(), []int{4}\n}", "func (*RemoveItemFromGroupRequest) Descriptor() ([]byte, []int) {\n\treturn file_apps_mconf_pb_request_proto_rawDescGZIP(), []int{7}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_teams_v1_teams_proto_rawDescGZIP(), []int{10}\n}", "func (*DeleteFriendRequest) Descriptor() ([]byte, []int) {\n\treturn file_console_proto_rawDescGZIP(), []int{7}\n}", "func (*RemoveRemindRequest) Descriptor() ([]byte, []int) {\n\treturn file_ocp_remind_api_proto_rawDescGZIP(), []int{0}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{14}\n}", "func (*RemoveAllReq) Descriptor() ([]byte, []int) {\n\treturn file_dfs_proto_rawDescGZIP(), []int{3}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_weather_proto_rawDescGZIP(), []int{8}\n}", "func (*MemberTagDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{115}\n}", "func (*MemberRuleSettingDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{97}\n}", "func (*RemoveOrgMemberRequest) Descriptor() ([]byte, []int) {\n\treturn file_management_proto_rawDescGZIP(), []int{67}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_mods_v1_mods_proto_rawDescGZIP(), []int{10}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_todo_proto_rawDescGZIP(), []int{7}\n}", "func (*RevokeJobRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_noderpc_proto_feeds_manager_proto_rawDescGZIP(), []int{20}\n}", "func (*GrowthChangeHistoryDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{25}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_contact_proto_rawDescGZIP(), []int{10}\n}", "func (*IntegrationChangeHistoryDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{34}\n}", "func (*DeleteBrokerRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_iot_broker_v1_broker_service_proto_rawDescGZIP(), []int{7}\n}", "func (*DeleteBrokerPasswordRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_iot_broker_v1_broker_service_proto_rawDescGZIP(), []int{19}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_githubcard_proto_rawDescGZIP(), []int{11}\n}", "func (*MemberAddressDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{16}\n}", "func (*DeleteKeyRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{73}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_storage_server_proto_rawDescGZIP(), []int{2}\n}", "func (*RemovePathRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_bucketsd_pb_bucketsd_proto_rawDescGZIP(), []int{29}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_protobuf_index_proto_rawDescGZIP(), []int{13}\n}", "func (*DeleteFeedbackRequest) Descriptor() ([]byte, []int) {\n\treturn file_feedbackreq_proto_rawDescGZIP(), []int{6}\n}", "func (*RemoveOrgMemberRequest) Descriptor() ([]byte, []int) {\n\treturn file_management_proto_rawDescGZIP(), []int{64}\n}", "func (*RemoveCertificateV1Request) Descriptor() ([]byte, []int) {\n\treturn file_api_ocp_certificate_api_ocp_certificate_api_proto_rawDescGZIP(), []int{10}\n}", "func (*DeleteHelmContextReq) Descriptor() ([]byte, []int) {\n\treturn file_helm_api_proto_rawDescGZIP(), []int{4}\n}", "func (*DeleteJobRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_noderpc_proto_feeds_manager_proto_rawDescGZIP(), []int{18}\n}", "func (*UnregisterClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_grpc_proto_cluster_cluster_proto_rawDescGZIP(), []int{3}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_task_v1_task_proto_rawDescGZIP(), []int{13}\n}", "func (*DeleteLeaderboardRecordRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_proto_rawDescGZIP(), []int{31}\n}", "func (*DeleteRefRequest) Descriptor() ([]byte, []int) {\n\treturn file_go_chromium_org_luci_cipd_api_cipd_v1_repo_proto_rawDescGZIP(), []int{14}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_vote_proto_rawDescGZIP(), []int{4}\n}", "func (*DeleteGroupRequest) Descriptor() ([]byte, []int) {\n\treturn file_apps_mconf_pb_request_proto_rawDescGZIP(), []int{4}\n}", "func (*RemoveXAttrRequest) Descriptor() ([]byte, []int) {\n\treturn file_IOService_proto_rawDescGZIP(), []int{52}\n}", "func (*RemoveFaultRequest) Descriptor() ([]byte, []int) {\n\treturn file_faultinjector_proto_rawDescGZIP(), []int{6}\n}", "func (*DeleteWalletLedgerRequest) Descriptor() ([]byte, []int) {\n\treturn file_console_proto_rawDescGZIP(), []int{10}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_versions_v1_versions_proto_rawDescGZIP(), []int{10}\n}", "func (*DeleteRoleRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{4}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_store_store_proto_rawDescGZIP(), []int{9}\n}", "func (*DeleteModelRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_automl_v1_service_proto_rawDescGZIP(), []int{13}\n}", "func (*ProvisioningPolicyChange_Removed) Descriptor() ([]byte, []int) {\n\treturn edgelq_devices_proto_v1alpha_provisioning_policy_change_proto_rawDescGZIP(), []int{0, 3}\n}", "func (*RemoveCheckResponse) Descriptor() ([]byte, []int) {\n\treturn file_api_ocp_check_api_ocp_check_api_proto_rawDescGZIP(), []int{11}\n}", "func (*MemberStatisticsInfoDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{106}\n}", "func (*MemberTaskDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{124}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_order_proto_rawDescGZIP(), []int{6}\n}", "func (*PlanChange_Removed) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_plan_change_proto_rawDescGZIP(), []int{0, 3}\n}", "func (*MemberLoginLogDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{61}\n}", "func (*DeleteRuntimeRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_notebooks_v1_managed_service_proto_rawDescGZIP(), []int{4}\n}", "func (*DeleteNSRecordRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_record_proto_rawDescGZIP(), []int{3}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_ric_action_ricaction_proto_rawDescGZIP(), []int{4}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_threads_proto_rawDescGZIP(), []int{31}\n}", "func (*DeleteFriendsRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_proto_rawDescGZIP(), []int{29}\n}", "func (*DataDataSyncDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_proto_DataData_DataData_proto_rawDescGZIP(), []int{6}\n}", "func (*DeleteModelRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{81}\n}", "func (*MemcacheDeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_memcache_service_proto_rawDescGZIP(), []int{7}\n}", "func (*DeregisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_engine_proto_rawDescGZIP(), []int{8}\n}", "func (*RemoveProjectRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_ocp_project_api_ocp_project_api_proto_rawDescGZIP(), []int{6}\n}", "func (*DeleteIngressRuleRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_appengine_v1_appengine_proto_rawDescGZIP(), []int{27}\n}", "func (*SharedMemoryControlRequest_Unregister) Descriptor() ([]byte, []int) {\n\treturn file_grpc_service_proto_rawDescGZIP(), []int{6, 1}\n}", "func (*DeleteWebhookRequest) Descriptor() ([]byte, []int) {\n\treturn file_events_Event_proto_rawDescGZIP(), []int{6}\n}", "func (*DeleteLimitRequest) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_limit_service_proto_rawDescGZIP(), []int{10}\n}", "func (*DeleteWebhookRequest) Descriptor() ([]byte, []int) {\n\treturn file_uac_Event_proto_rawDescGZIP(), []int{5}\n}", "func (*MemberTagRelationDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{70}\n}", "func (*CleanupRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{14}\n}", "func (*DeleteMessageRequest) Descriptor() ([]byte, []int) {\n\treturn file_chat_v1_messages_proto_rawDescGZIP(), []int{13}\n}", "func (*RemoveResponse) Descriptor() ([]byte, []int) {\n\treturn file_api_bucketsd_pb_bucketsd_proto_rawDescGZIP(), []int{28}\n}", "func (*GetDeleteDisconnectedServicesConfigReq) Descriptor() ([]byte, []int) {\n\treturn file_external_applications_applications_proto_rawDescGZIP(), []int{17}\n}", "func (*DeleteCheckerV1Request) Descriptor() ([]byte, []int) {\n\treturn file_checker_v1_proto_rawDescGZIP(), []int{4}\n}", "func (*DeleteProvisionerRequest) Descriptor() ([]byte, []int) {\n\treturn file_majordomo_proto_rawDescGZIP(), []int{9}\n}", "func (*DeleteVersionRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_appengine_v1_appengine_proto_rawDescGZIP(), []int{14}\n}", "func (*IntegrationConsumeSettingDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{43}\n}", "func (*RemoveVacancyV1Request) Descriptor() ([]byte, []int) {\n\treturn file_api_ocp_vacancy_api_ocp_vacancy_api_proto_rawDescGZIP(), []int{9}\n}", "func (*DeleteMetadataRequest) Descriptor() ([]byte, []int) {\n\treturn file_protobuf_index_proto_rawDescGZIP(), []int{19}\n}", "func (*ReleaseDeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_release_proto_rawDescGZIP(), []int{11}\n}", "func (*DeleteInputRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{60}\n}", "func (*DeleteSecretRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_flow_grpc_secrets_proto_rawDescGZIP(), []int{7}\n}", "func (*RevokeTokensRequest) Descriptor() ([]byte, []int) {\n\treturn file_token_proto_rawDescGZIP(), []int{17}\n}", "func (*DelGroupRequest) Descriptor() ([]byte, []int) {\n\treturn file_userGroups_proto_rawDescGZIP(), []int{3}\n}", "func (*EmailDelReq) Descriptor() ([]byte, []int) {\n\treturn file_login_proto_rawDescGZIP(), []int{53}\n}", "func (*InterfaceDeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_jnx_interfaces_service_proto_rawDescGZIP(), []int{21}\n}", "func (*DeleteClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{7}\n}" ]
[ "0.7382873", "0.7358745", "0.7324274", "0.721699", "0.71849823", "0.7162527", "0.7161694", "0.71450984", "0.7122148", "0.7120509", "0.71141547", "0.70940745", "0.7093572", "0.7085726", "0.7074796", "0.7035921", "0.7029398", "0.702928", "0.70082223", "0.7003014", "0.6984949", "0.6955201", "0.6952391", "0.69488364", "0.69445163", "0.6942311", "0.6934907", "0.69342136", "0.69136673", "0.69131", "0.69125175", "0.6908887", "0.68961847", "0.68833977", "0.68773836", "0.68735546", "0.68731457", "0.6870962", "0.68663836", "0.6861858", "0.686121", "0.6855915", "0.6855378", "0.6851162", "0.68465495", "0.6846268", "0.68400717", "0.6838346", "0.68357295", "0.683041", "0.6820257", "0.6819497", "0.6819097", "0.6818325", "0.68175447", "0.68153936", "0.68107027", "0.6807418", "0.6797242", "0.6796404", "0.67945063", "0.67872286", "0.6785094", "0.67847055", "0.6784116", "0.6775595", "0.67637134", "0.67633325", "0.67534465", "0.675073", "0.67500657", "0.67478263", "0.6747347", "0.6746063", "0.6742839", "0.6741175", "0.674092", "0.6734443", "0.67316645", "0.6731342", "0.672524", "0.6719356", "0.6714675", "0.6710737", "0.67004544", "0.66986173", "0.6698114", "0.66942316", "0.6693011", "0.66881245", "0.66866755", "0.66859597", "0.66666424", "0.66649914", "0.6655599", "0.66547143", "0.66522026", "0.66459125", "0.66445386", "0.66425145" ]
0.7849878
0
Deprecated: Use AddKeeperReq.ProtoReflect.Descriptor instead.
func (*AddKeeperReq) Descriptor() ([]byte, []int) { return file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{3} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*RemoveKeeperReq) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{2}\n}", "func (*AddPeerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*AddRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_calculator_proto_calc_proto_rawDescGZIP(), []int{0}\n}", "func (*MemberLevelAddReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{45}\n}", "func (*AddRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_calculate_proto_rawDescGZIP(), []int{3}\n}", "func (*AddApikeyRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_proto_rawDescGZIP(), []int{2}\n}", "func (*AddProducerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{2}\n}", "func (*MemberAddReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{0}\n}", "func (*SwitchKeeperReq) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{5}\n}", "func (*ModifyRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_engine_proto_rawDescGZIP(), []int{10}\n}", "func (*UpdateWithdrawRequest) Descriptor() ([]byte, []int) {\n\treturn file_services_temporal_service_proto_rawDescGZIP(), []int{4}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_dictybase_api_jsonapi_request_proto_rawDescGZIP(), []int{7}\n}", "func (*UpdatePermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{9}\n}", "func (*AddWantRequest) Descriptor() ([]byte, []int) {\n\treturn file_recordwants_proto_rawDescGZIP(), []int{8}\n}", "func (*PatchKeysRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{74}\n}", "func (*CMsgClientToGCWageringRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{169}\n}", "func (*DeleteWalletLedgerRequest) Descriptor() ([]byte, []int) {\n\treturn file_console_proto_rawDescGZIP(), []int{10}\n}", "func (*AddFriendsRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_proto_rawDescGZIP(), []int{11}\n}", "func (*AddBrokerPasswordRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_iot_broker_v1_broker_service_proto_rawDescGZIP(), []int{17}\n}", "func (*MemberReceiveAddressAddReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{81}\n}", "func (*AddItemToGroupRequest) Descriptor() ([]byte, []int) {\n\treturn file_apps_mconf_pb_request_proto_rawDescGZIP(), []int{6}\n}", "func (*WatchRequest) Descriptor() ([]byte, []int) {\n\treturn file_authzed_api_v0_watch_service_proto_rawDescGZIP(), []int{0}\n}", "func (*ListenRequest) Descriptor() ([]byte, []int) {\n\treturn file_faultinjector_proto_rawDescGZIP(), []int{8}\n}", "func (*CheckRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_api_servicecontrol_v1_service_controller_proto_rawDescGZIP(), []int{0}\n}", "func (*MemberLevelUpdateReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{50}\n}", "func (*DelRequest) Descriptor() ([]byte, []int) {\n\treturn file_patrol_proto_rawDescGZIP(), []int{8}\n}", "func (*AddPersonRequest) Descriptor() ([]byte, []int) {\n\treturn file_protos_face_recognition_service_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgGCPlayerInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{117}\n}", "func (*AddPermissionToRoleRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{7}\n}", "func (*AddRequest) Descriptor() ([]byte, []int) {\n\treturn file_user_proto_rawDescGZIP(), []int{1}\n}", "func (*GrowthChangeHistoryAddReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{18}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_wallet_proto_rawDescGZIP(), []int{3}\n}", "func (*AddRequest) Descriptor() ([]byte, []int) {\n\treturn file_order_proto_rawDescGZIP(), []int{2}\n}", "func (*IntegrationChangeHistoryAddReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{27}\n}", "func (*Request) Descriptor() ([]byte, []int) {\n\treturn file_Trd_ModifyOrder_proto_rawDescGZIP(), []int{2}\n}", "func (*CreateAlterRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_exercicio_proto_rawDescGZIP(), []int{1}\n}", "func (*RevokeJobRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_noderpc_proto_feeds_manager_proto_rawDescGZIP(), []int{20}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_recordwants_proto_rawDescGZIP(), []int{6}\n}", "func (*UpdateBrokerRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_iot_broker_v1_broker_service_proto_rawDescGZIP(), []int{5}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_Ultimate_Super_WebDev_Corp_gateway_services_customer_customer_proto_rawDescGZIP(), []int{4}\n}", "func (*DeleteFriendRequest) Descriptor() ([]byte, []int) {\n\treturn file_console_proto_rawDescGZIP(), []int{7}\n}", "func (*CMsgClientToGCPlayerStatsRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{143}\n}", "func (*AppendPermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_app_config_agent_cmd_grpcserver_proto_api_app_config_proto_rawDescGZIP(), []int{5}\n}", "func (*AddInstanceRequest) Descriptor() ([]byte, []int) {\n\treturn file_myshoes_proto_rawDescGZIP(), []int{0}\n}", "func (*DeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_internal_proto_crypto_proto_rawDescGZIP(), []int{10}\n}", "func (x *fastReflection_Bech32PrefixRequest) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_Bech32PrefixRequest\n}", "func (*ControlRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_gateway_v1_control_proto_rawDescGZIP(), []int{0}\n}", "func (*MemberRuleSettingAddReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{90}\n}", "func (*RollbackAppendPermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_app_config_agent_cmd_grpcserver_proto_api_app_config_proto_rawDescGZIP(), []int{7}\n}", "func (*AddMachineKeyRequest) Descriptor() ([]byte, []int) {\n\treturn file_management_proto_rawDescGZIP(), []int{21}\n}", "func (*PlayerAddRequest) Descriptor() ([]byte, []int) {\n\treturn file_player_proto_rawDescGZIP(), []int{2}\n}", "func (*UpdateIngressRuleRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_appengine_v1_appengine_proto_rawDescGZIP(), []int{26}\n}", "func (*ChangeRequest) Descriptor() ([]byte, []int) {\n\treturn file_authorization_proto_rawDescGZIP(), []int{0}\n}", "func (*AddBrokerCertificateRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_iot_broker_v1_broker_service_proto_rawDescGZIP(), []int{11}\n}", "func (*MemberLevelDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{52}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_interservice_license_control_license_control_proto_rawDescGZIP(), []int{9}\n}", "func (*BatchUpdateReferencesRequest_Request) Descriptor() ([]byte, []int) {\n\treturn file_pkg_proto_icas_icas_proto_rawDescGZIP(), []int{1, 0}\n}", "func (*WatchRequestTypeProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{25}\n}", "func (*UpdateTokenRequest) Descriptor() ([]byte, []int) {\n\treturn file_access_service_token_proto_rawDescGZIP(), []int{2}\n}", "func (*MarginLevelsRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_trading_proto_rawDescGZIP(), []int{49}\n}", "func (*RemoveCheckRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_ocp_check_api_ocp_check_api_proto_rawDescGZIP(), []int{10}\n}", "func (*WriteRequest) Descriptor() ([]byte, []int) {\n\treturn file_authzed_api_v0_acl_service_proto_rawDescGZIP(), []int{3}\n}", "func (*WithdrawalRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_trading_proto_rawDescGZIP(), []int{132}\n}", "func (*OutdatedRequest) Descriptor() ([]byte, []int) {\n\treturn file_cc_arduino_cli_commands_v1_commands_proto_rawDescGZIP(), []int{12}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_app_config_agent_cmd_grpcserver_proto_api_app_config_proto_rawDescGZIP(), []int{17}\n}", "func (*CMsgClientToGCUnderDraftRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{367}\n}", "func (*UpdateTelemetryReportedRequest) Descriptor() ([]byte, []int) {\n\treturn file_external_applications_applications_proto_rawDescGZIP(), []int{29}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_todo_proto_rawDescGZIP(), []int{5}\n}", "func (*CheckRequest) Descriptor() ([]byte, []int) {\n\treturn file_authzed_api_v0_acl_service_proto_rawDescGZIP(), []int{5}\n}", "func (*UpdateNetworkRequest) Descriptor() ([]byte, []int) {\n\treturn file_packetbroker_api_iam_v1_service_proto_rawDescGZIP(), []int{8}\n}", "func (*AddMetaRequest) Descriptor() ([]byte, []int) {\n\treturn file_token_balance_proto_rawDescGZIP(), []int{12}\n}", "func (*GetRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_comments_proto_rawDescGZIP(), []int{3}\n}", "func (*AddRefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_auth_auth_proto_rawDescGZIP(), []int{10}\n}", "func (*AddMockRequest) Descriptor() ([]byte, []int) {\n\treturn file_mocking_service_proto_rawDescGZIP(), []int{0}\n}", "func (*UpdateHookRequest) Descriptor() ([]byte, []int) {\n\treturn file_hook_proto_rawDescGZIP(), []int{4}\n}", "func (*MemberTagAddReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{108}\n}", "func (*RefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{16}\n}", "func (*CalculatorRequest) Descriptor() ([]byte, []int) {\n\treturn file_basicpb_unary_api_proto_rawDescGZIP(), []int{4}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_exercicio_proto_rawDescGZIP(), []int{7}\n}", "func (*UpdateReq) Descriptor() ([]byte, []int) {\n\treturn file_internal_proto_crypto_proto_rawDescGZIP(), []int{8}\n}", "func (*CreateFriendReq) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{0}\n}", "func (*CreateCheckpointRequest) Descriptor() ([]byte, []int) {\n\treturn file_keepsake_proto_rawDescGZIP(), []int{2}\n}", "func (*MarginLevelsSubscribeRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_trading_proto_rawDescGZIP(), []int{47}\n}", "func (*AddCommentReq) Descriptor() ([]byte, []int) {\n\treturn file_proto_comment_comment_proto_rawDescGZIP(), []int{6}\n}", "func (*MemberLoginLogAddReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{54}\n}", "func (*RenewDelegationTokenRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{5}\n}", "func (*MemberReceiveAddressDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{88}\n}", "func (*TelemetryRequest) Descriptor() ([]byte, []int) {\n\treturn file_interservice_license_control_license_control_proto_rawDescGZIP(), []int{11}\n}", "func (*DeleteMicroRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_micro_pb_request_proto_rawDescGZIP(), []int{4}\n}", "func (*CreateWithdrawRequest) Descriptor() ([]byte, []int) {\n\treturn file_services_temporal_service_proto_rawDescGZIP(), []int{0}\n}", "func (*UpdateLimitRequest) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_limit_service_proto_rawDescGZIP(), []int{9}\n}", "func (*AddLiftRequest) Descriptor() ([]byte, []int) {\n\treturn file_API_session_proto_rawDescGZIP(), []int{0}\n}", "func (*UpgradeRuntimeRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_notebooks_v1_managed_service_proto_rawDescGZIP(), []int{9}\n}", "func (*InterfaceAddRequest) Descriptor() ([]byte, []int) {\n\treturn file_jnx_interfaces_service_proto_rawDescGZIP(), []int{12}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_engine_proto_rawDescGZIP(), []int{12}\n}", "func (*DeleteFeedbackRequest) Descriptor() ([]byte, []int) {\n\treturn file_feedbackreq_proto_rawDescGZIP(), []int{6}\n}", "func (*CMsgProfileRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{275}\n}", "func (*MemberDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{7}\n}", "func (*PatchCollectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{161}\n}", "func (*ValidateClientCredentialRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_micro_pb_request_proto_rawDescGZIP(), []int{0}\n}" ]
[ "0.71223587", "0.698026", "0.68898994", "0.6857225", "0.67590207", "0.67427474", "0.67198485", "0.6710893", "0.66748834", "0.6674649", "0.66576546", "0.6655914", "0.6629135", "0.6626807", "0.6617738", "0.66125524", "0.6579504", "0.6575033", "0.65688556", "0.6568845", "0.65657747", "0.65649956", "0.6562206", "0.6558641", "0.6554327", "0.65524167", "0.6548536", "0.6544852", "0.6537183", "0.65285635", "0.65261984", "0.6525718", "0.65197337", "0.6512616", "0.6510023", "0.650672", "0.6502714", "0.65021276", "0.6499182", "0.6495064", "0.64947057", "0.64944947", "0.6493683", "0.6484797", "0.64847606", "0.6478963", "0.647865", "0.64741504", "0.6473329", "0.6469736", "0.64681244", "0.6467158", "0.646614", "0.6457722", "0.64556336", "0.6455386", "0.6451455", "0.64513344", "0.64426666", "0.64410347", "0.6433083", "0.6431107", "0.6430378", "0.6429819", "0.6425565", "0.6425279", "0.6418118", "0.64133686", "0.64121246", "0.6410763", "0.640919", "0.64089173", "0.64087355", "0.64074975", "0.6406293", "0.64020216", "0.6400482", "0.6399319", "0.6398838", "0.63975185", "0.6397487", "0.6396813", "0.63965577", "0.6394357", "0.6392978", "0.63929677", "0.63914084", "0.63912743", "0.63906467", "0.6388079", "0.6387804", "0.6385782", "0.63819146", "0.6381661", "0.6380932", "0.63801056", "0.63760066", "0.6375921", "0.6374243", "0.63739365" ]
0.7387112
0
Deprecated: Use AssignAck.ProtoReflect.Descriptor instead.
func (*AssignAck) Descriptor() ([]byte, []int) { return file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{4} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*Ack) Descriptor() ([]byte, []int) {\n\treturn file_chatMsg_msg_proto_rawDescGZIP(), []int{0}\n}", "func (*EpochChangeAck) Descriptor() ([]byte, []int) {\n\treturn file_msgs_msgs_proto_rawDescGZIP(), []int{22}\n}", "func (*AckRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_acknowledge_proto_rawDescGZIP(), []int{0}\n}", "func (*RequestAck) Descriptor() ([]byte, []int) {\n\treturn file_msgs_msgs_proto_rawDescGZIP(), []int{15}\n}", "func (*Ack) Descriptor() ([]byte, []int) {\n\treturn file_publisher_proto_rawDescGZIP(), []int{1}\n}", "func (*AckResponse) Descriptor() ([]byte, []int) {\n\treturn file_proto_acknowledge_proto_rawDescGZIP(), []int{1}\n}", "func (*MoveAcknowledgment) Descriptor() ([]byte, []int) {\n\treturn file_FillerGame_proto_rawDescGZIP(), []int{4}\n}", "func (*PullFrameFromFlowResponse_Ack) Descriptor() ([]byte, []int) {\n\treturn file_pull_frame_from_flow_proto_rawDescGZIP(), []int{1, 0}\n}", "func (*CCLCMsg_BaselineAck) Descriptor() ([]byte, []int) {\n\treturn file_netmessages_proto_rawDescGZIP(), []int{17}\n}", "func (*CCLCMsg_BaselineAck) Descriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{17}\n}", "func (*EventAck) Descriptor() ([]byte, []int) {\n\treturn file_arista_event_v1_event_proto_rawDescGZIP(), []int{2}\n}", "func (*AcknowledgeRequest) Descriptor() ([]byte, []int) {\n\treturn file_toit_api_data_proto_rawDescGZIP(), []int{12}\n}", "func (*TxAcknowledgment) Descriptor() ([]byte, []int) {\n\treturn file_ttn_lorawan_v3_messages_proto_rawDescGZIP(), []int{2}\n}", "func (*ClusterRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{8}\n}", "func (*CDOTABroadcastMsg) Descriptor() ([]byte, []int) {\n\treturn file_dota_broadcastmessages_proto_rawDescGZIP(), []int{0}\n}", "func (*AckWebsocketRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_webview_view_proto_rawDescGZIP(), []int{14}\n}", "func (*RefreshCallQueueRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_RefreshCallQueueProtocol_proto_rawDescGZIP(), []int{0}\n}", "func (*StorageObjectAck) Descriptor() ([]byte, []int) {\n\treturn file_api_proto_rawDescGZIP(), []int{81}\n}", "func (*CSVCMsg_FixAngle) Descriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{34}\n}", "func (*AnalyzeReply) Descriptor() ([]byte, []int) {\n\treturn file_grpcapi_sequencelabeler_proto_rawDescGZIP(), []int{2}\n}", "func (*AcknowledgeResponse) Descriptor() ([]byte, []int) {\n\treturn file_toit_api_data_proto_rawDescGZIP(), []int{13}\n}", "func (*CSVCMsg_FixAngle) Descriptor() ([]byte, []int) {\n\treturn file_netmessages_proto_rawDescGZIP(), []int{34}\n}", "func (*RefreshServiceAclRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_RefreshAuthorizationPolicyProtocol_proto_rawDescGZIP(), []int{0}\n}", "func (*GatewayTxAcknowledgment) Descriptor() ([]byte, []int) {\n\treturn file_ttn_lorawan_v3_messages_proto_rawDescGZIP(), []int{3}\n}", "func (*ClusterReq) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{7}\n}", "func (*Approve) Descriptor() ([]byte, []int) {\n\treturn file_messages_proto_rawDescGZIP(), []int{4}\n}", "func (*Topic) Descriptor() ([]byte, []int) {\n\treturn file_feedbackreq_proto_rawDescGZIP(), []int{2}\n}", "func (*Answer) Descriptor() ([]byte, []int) {\n\treturn file_protos_clac_proto_rawDescGZIP(), []int{1}\n}", "func (*DownlinkQueueRequest) Descriptor() ([]byte, []int) {\n\treturn file_ttn_lorawan_v3_messages_proto_rawDescGZIP(), []int{17}\n}", "func (*AddKeeperReq) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{3}\n}", "func (*CMsgAckSharedPaintBuffers) Descriptor() ([]byte, []int) {\n\treturn file_steam_htmlmessages_proto_rawDescGZIP(), []int{68}\n}", "func (*ClientUpdatePendingMessageDelivery) Descriptor() ([]byte, []int) {\n\treturn file_client_updates_proto_rawDescGZIP(), []int{0}\n}", "func (*GroupAddRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{33}\n}", "func (*EvictWritersRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{10}\n}", "func (*RefreshNamenodesRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{2}\n}", "func (*AckWebsocketResponse) Descriptor() ([]byte, []int) {\n\treturn file_pkg_webview_view_proto_rawDescGZIP(), []int{15}\n}", "func (*RefreshCallQueueResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_RefreshCallQueueProtocol_proto_rawDescGZIP(), []int{1}\n}", "func (*CMsgDOTABroadcastNotification) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{5}\n}", "func (*AnalysisMessageWeakSchema_ArgType) Descriptor() ([]byte, []int) {\n\treturn file_analysis_v1alpha1_message_proto_rawDescGZIP(), []int{1, 0}\n}", "func (*Action) Descriptor() ([]byte, []int) {\n\treturn file_chat_v1_messages_proto_rawDescGZIP(), []int{1}\n}", "func (*CSVCMsg_Prefetch) Descriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{36}\n}", "func (*CancelPlanResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{23}\n}", "func (*TriggerActionResponse) Descriptor() ([]byte, []int) {\n\treturn file_chat_v1_messages_proto_rawDescGZIP(), []int{16}\n}", "func (*CSVCMsg_Prefetch) Descriptor() ([]byte, []int) {\n\treturn file_netmessages_proto_rawDescGZIP(), []int{36}\n}", "func (*Message12821) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{2}\n}", "func (*ApplicationDownlink_ConfirmedRetry) Descriptor() ([]byte, []int) {\n\treturn file_ttn_lorawan_v3_messages_proto_rawDescGZIP(), []int{9, 1}\n}", "func (*AnalysisMessageWeakSchema) Descriptor() ([]byte, []int) {\n\treturn file_analysis_v1alpha1_message_proto_rawDescGZIP(), []int{1}\n}", "func (*SwitchKeeperRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{6}\n}", "func (*CancelPlanRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{22}\n}", "func (*Checkpoint) Descriptor() ([]byte, []int) {\n\treturn file_msgs_msgs_proto_rawDescGZIP(), []int{19}\n}", "func (*KeepAlive) Descriptor() ([]byte, []int) {\n\treturn file_chatMsg_msg_proto_rawDescGZIP(), []int{4}\n}", "func (*Message6578) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{7}\n}", "func (*Message7511) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{16}\n}", "func (*EvictWritersResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{11}\n}", "func (*Message12774) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{0}\n}", "func (*Message6024) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{26}\n}", "func (*Message12818) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{5}\n}", "func (*RenewDelegationTokenRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{5}\n}", "func (*AssignmentFailure) Descriptor() ([]byte, []int) {\n\treturn file_api_backend_proto_rawDescGZIP(), []int{8}\n}", "func (*Message12817) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{22}\n}", "func (*Deprecation) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{8}\n}", "func (*Message5881) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{9}\n}", "func (*DecreaseAllowance) Descriptor() ([]byte, []int) {\n\treturn file_messages_proto_rawDescGZIP(), []int{6}\n}", "func (*Message6108) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{31}\n}", "func (*TopicMessage) Descriptor() ([]byte, []int) {\n\treturn file_packetbroker_api_routing_v2_service_proto_rawDescGZIP(), []int{9}\n}", "func (*DownlinkMessage) Descriptor() ([]byte, []int) {\n\treturn file_ttn_lorawan_v3_messages_proto_rawDescGZIP(), []int{1}\n}", "func (*CMsgDOTAKickedFromMatchmakingQueue) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{21}\n}", "func (*NewEnforcerReply) Descriptor() ([]byte, []int) {\n\treturn file_proto_casbin_proto_rawDescGZIP(), []int{1}\n}", "func (*Message6110) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{10}\n}", "func (*MsgWithRequired) Descriptor() ([]byte, []int) {\n\treturn file_jsonpb_proto_test2_proto_rawDescGZIP(), []int{9}\n}", "func (*IncreaseAllowance) Descriptor() ([]byte, []int) {\n\treturn file_messages_proto_rawDescGZIP(), []int{5}\n}", "func (*Message7920) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{20}\n}", "func (*AssignmentGroup) Descriptor() ([]byte, []int) {\n\treturn file_api_backend_proto_rawDescGZIP(), []int{7}\n}", "func (*Message12820) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{3}\n}", "func (*UplinkMessage) Descriptor() ([]byte, []int) {\n\treturn file_ttn_lorawan_v3_messages_proto_rawDescGZIP(), []int{0}\n}", "func (*Message6127) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{24}\n}", "func (*PatchCollectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{161}\n}", "func (*MessageHubApproveRequest) Descriptor() ([]byte, []int) {\n\treturn file_messagehub_proto_rawDescGZIP(), []int{5}\n}", "func (*GroupRemoveRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{34}\n}", "func (*Message12796) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{1}\n}", "func (*CSVCMsg_SetPause) Descriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{28}\n}", "func (*Message5903) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{34}\n}", "func (*MemberReceiveAddressDeleteReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{88}\n}", "func (*TriggerActionRequest) Descriptor() ([]byte, []int) {\n\treturn file_chat_v1_messages_proto_rawDescGZIP(), []int{15}\n}", "func (*Reply) Descriptor() ([]byte, []int) {\n\treturn file_my_task_my_task_proto_rawDescGZIP(), []int{1}\n}", "func (*TcpKeepalive) Descriptor() ([]byte, []int) {\n\treturn file_envoy_config_core_v3_address_proto_rawDescGZIP(), []int{2}\n}", "func (*DeleteFeedbackRequest) Descriptor() ([]byte, []int) {\n\treturn file_feedbackreq_proto_rawDescGZIP(), []int{6}\n}", "func (*CBroadcast_WebRTCSetAnswer_Notification) Descriptor() ([]byte, []int) {\n\treturn file_steammessages_broadcast_steamclient_proto_rawDescGZIP(), []int{75}\n}", "func (*ForwardRequest) Descriptor() ([]byte, []int) {\n\treturn file_msgs_msgs_proto_rawDescGZIP(), []int{13}\n}", "func (*CSVCMsg_HltvReplay) Descriptor() ([]byte, []int) {\n\treturn file_netmessages_proto_rawDescGZIP(), []int{51}\n}", "func (*CSVCMsg_HltvReplay) Descriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{51}\n}", "func (*MsgPack) Descriptor() ([]byte, []int) {\n\treturn file_chatMsg_msg_proto_rawDescGZIP(), []int{2}\n}", "func (*RaftClientRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{26}\n}", "func (*RevokeCertificateRequest) Descriptor() ([]byte, []int) {\n\treturn file_sa_proto_rawDescGZIP(), []int{33}\n}", "func (x *fastReflection_MsgSetWithdrawAddress) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_MsgSetWithdrawAddress\n}", "func (*Message7921) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{19}\n}", "func (*AddBrokerCertificateRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_iot_broker_v1_broker_service_proto_rawDescGZIP(), []int{11}\n}", "func (*Message7865) Descriptor() ([]byte, []int) {\n\treturn file_datasets_google_message4_benchmark_message4_2_proto_rawDescGZIP(), []int{15}\n}", "func (*CCLCMsg_HltvReplay) Descriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{52}\n}", "func (*AddPeerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{8}\n}" ]
[ "0.6931855", "0.6785189", "0.67617804", "0.6728953", "0.65926194", "0.6559347", "0.6514554", "0.64602405", "0.6412379", "0.64036167", "0.624473", "0.62233335", "0.6151008", "0.6145046", "0.61359626", "0.6081008", "0.6074918", "0.60593617", "0.60510236", "0.60306334", "0.60263705", "0.60232085", "0.59805846", "0.5970373", "0.5961037", "0.5958772", "0.5953138", "0.59257346", "0.58914053", "0.58899105", "0.5884694", "0.58806396", "0.58773607", "0.58768475", "0.5875401", "0.58736825", "0.58660775", "0.5862988", "0.5856513", "0.58370477", "0.5833868", "0.583158", "0.5829422", "0.5827609", "0.5827393", "0.58269525", "0.58249015", "0.58224493", "0.5809792", "0.5807087", "0.5800557", "0.57987267", "0.57971644", "0.57971585", "0.57962763", "0.5792581", "0.5789384", "0.5788274", "0.5788157", "0.5777743", "0.57771146", "0.577581", "0.5772932", "0.5764816", "0.5761675", "0.576164", "0.5760634", "0.57597727", "0.5756312", "0.5756102", "0.575592", "0.5753866", "0.5753537", "0.5752993", "0.575175", "0.57466495", "0.5746242", "0.5741718", "0.5739966", "0.5739557", "0.57384944", "0.5736139", "0.5734648", "0.5734148", "0.57333815", "0.57306707", "0.57303405", "0.5728583", "0.572761", "0.5721718", "0.5720787", "0.57184654", "0.5717135", "0.5716778", "0.5714086", "0.57125014", "0.5711893", "0.5711516", "0.571046", "0.57099503" ]
0.7436857
0
Deprecated: Use SwitchKeeperReq.ProtoReflect.Descriptor instead.
func (*SwitchKeeperReq) Descriptor() ([]byte, []int) { return file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{5} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*ControlRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_gateway_v1_control_proto_rawDescGZIP(), []int{0}\n}", "func (*CheckRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_api_servicecontrol_v1_service_controller_proto_rawDescGZIP(), []int{0}\n}", "func (*SwitchRuntimeRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_notebooks_v1_managed_service_proto_rawDescGZIP(), []int{7}\n}", "func (*WatchRequest) Descriptor() ([]byte, []int) {\n\treturn file_authzed_api_v0_watch_service_proto_rawDescGZIP(), []int{0}\n}", "func (*SwitchKeeperRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{6}\n}", "func (*WatchRequestTypeProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{25}\n}", "func (x *fastReflection_Bech32PrefixRequest) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_Bech32PrefixRequest\n}", "func (*ProbeRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_gateway_v1_control_proto_rawDescGZIP(), []int{2}\n}", "func (*ChangeRequest) Descriptor() ([]byte, []int) {\n\treturn file_management_proto_rawDescGZIP(), []int{2}\n}", "func (*ChangeRequest) Descriptor() ([]byte, []int) {\n\treturn file_management_proto_rawDescGZIP(), []int{2}\n}", "func (*RemoveKeeperReq) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{2}\n}", "func (*ListenRequest) Descriptor() ([]byte, []int) {\n\treturn file_faultinjector_proto_rawDescGZIP(), []int{8}\n}", "func (*AddPeerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*AddKeeperReq) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{3}\n}", "func (*TelemetryRequest) Descriptor() ([]byte, []int) {\n\treturn file_interservice_license_control_license_control_proto_rawDescGZIP(), []int{11}\n}", "func (*PatchCollectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{161}\n}", "func (*FeedbackRequest) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{10}\n}", "func (*ChangeRequest) Descriptor() ([]byte, []int) {\n\treturn file_authorization_proto_rawDescGZIP(), []int{0}\n}", "func (*PatchKeysRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{74}\n}", "func (*ModelControlRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_service_proto_rawDescGZIP(), []int{4}\n}", "func (*SelectorVerificationReq) Descriptor() ([]byte, []int) {\n\treturn file_proto_selector_verification_msgs_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgGCPlayerInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{117}\n}", "func (*NewChromeRequest) Descriptor() ([]byte, []int) {\n\treturn file_check_power_menu_service_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgClientToGCWageringRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{169}\n}", "func (*ModifyRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_engine_proto_rawDescGZIP(), []int{10}\n}", "func (*GetVersionRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{9}\n}", "func (*ChangePasswordRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_Ultimate_Super_WebDev_Corp_gateway_services_customer_customer_proto_rawDescGZIP(), []int{1}\n}", "func (*ListenRequest) Descriptor() ([]byte, []int) {\n\treturn file_threads_proto_rawDescGZIP(), []int{46}\n}", "func (*DiagnoseRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_api_proto_rawDescGZIP(), []int{16}\n}", "func (*GetPeerInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{6}\n}", "func (*ChangeModeRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_bfd_bfd_proto_rawDescGZIP(), []int{5}\n}", "func (*MemberLevelUpdateReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{50}\n}", "func (*RefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{16}\n}", "func (*StateRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_containerd_containerd_runtime_v1_shim_v1_shim_proto_rawDescGZIP(), []int{7}\n}", "func (*GetRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_comments_proto_rawDescGZIP(), []int{3}\n}", "func (*UpdateMachineRequest) Descriptor() ([]byte, []int) {\n\treturn file_management_proto_rawDescGZIP(), []int{20}\n}", "func (*UpdatePermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{9}\n}", "func (*TelemetryRequest) Descriptor() ([]byte, []int) {\n\treturn file_automate_gateway_api_telemetry_telemetry_proto_rawDescGZIP(), []int{0}\n}", "func (*SelectorVerificationsReq) Descriptor() ([]byte, []int) {\n\treturn file_proto_selector_verification_msgs_proto_rawDescGZIP(), []int{2}\n}", "func (*UpdateNetworkRequest) Descriptor() ([]byte, []int) {\n\treturn file_packetbroker_api_iam_v1_service_proto_rawDescGZIP(), []int{8}\n}", "func (*RevokeJobRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_noderpc_proto_feeds_manager_proto_rawDescGZIP(), []int{20}\n}", "func (*PollCredentialOffersRequest) Descriptor() ([]byte, []int) {\n\treturn file_messages_proto_rawDescGZIP(), []int{30}\n}", "func (*CMsgClientToGCPlayerStatsRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{143}\n}", "func (*SignalRequest) Descriptor() ([]byte, []int) {\n\treturn file_cmd_server_grpc_proto_sfu_proto_rawDescGZIP(), []int{0}\n}", "func (*Request) Descriptor() ([]byte, []int) {\n\treturn file_Trd_ModifyOrder_proto_rawDescGZIP(), []int{2}\n}", "func (*DeleteFeedbackRequest) Descriptor() ([]byte, []int) {\n\treturn file_feedbackreq_proto_rawDescGZIP(), []int{6}\n}", "func (*WatchLimitsRequest) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_limit_service_proto_rawDescGZIP(), []int{7}\n}", "func (*SetTraceRequest) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*UpgradeRuntimeRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_notebooks_v1_managed_service_proto_rawDescGZIP(), []int{9}\n}", "func (*UpgradeReq) Descriptor() ([]byte, []int) {\n\treturn file_helm_api_proto_rawDescGZIP(), []int{8}\n}", "func (*MoneyRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_swap_swap_proto_rawDescGZIP(), []int{0}\n}", "func (*CBroadcast_WebRTCStopped_Request) Descriptor() ([]byte, []int) {\n\treturn file_steammessages_broadcast_steamclient_proto_rawDescGZIP(), []int{47}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_Ultimate_Super_WebDev_Corp_gateway_services_customer_customer_proto_rawDescGZIP(), []int{4}\n}", "func (*RenameReq) Descriptor() ([]byte, []int) {\n\treturn file_dfs_proto_rawDescGZIP(), []int{4}\n}", "func (*DeleteWalletLedgerRequest) Descriptor() ([]byte, []int) {\n\treturn file_console_proto_rawDescGZIP(), []int{10}\n}", "func (*ReportRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_api_servicecontrol_v1_service_controller_proto_rawDescGZIP(), []int{2}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_recordwants_proto_rawDescGZIP(), []int{6}\n}", "func (*ConnectedRequest) Descriptor() ([]byte, []int) {\n\treturn file_vm_vm_proto_rawDescGZIP(), []int{28}\n}", "func (*PatchWorkflowVersionsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{143}\n}", "func (*CodeLensRequest) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{163}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_interservice_license_control_license_control_proto_rawDescGZIP(), []int{9}\n}", "func (*DisconnectedServicesReq) Descriptor() ([]byte, []int) {\n\treturn file_external_applications_applications_proto_rawDescGZIP(), []int{6}\n}", "func (*FeedbackRequest) Descriptor() ([]byte, []int) {\n\treturn file_feedbackreq_proto_rawDescGZIP(), []int{3}\n}", "func (*PatchModelVersionsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{86}\n}", "func (*RenameRequest) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{194}\n}", "func (*UpgradeRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_pb_protobuf_api_proto_rawDescGZIP(), []int{6}\n}", "func (*WatchConfigMapsRequest) Descriptor() ([]byte, []int) {\n\treturn edgelq_applications_proto_v1alpha2_config_map_service_proto_rawDescGZIP(), []int{7}\n}", "func (*UpdateBrokerRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_iot_broker_v1_broker_service_proto_rawDescGZIP(), []int{5}\n}", "func (*SetStrategyRequest) Descriptor() ([]byte, []int) {\n\treturn file_rpc_proto_rawDescGZIP(), []int{21}\n}", "func (*GenerateProductMixIdeasRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{7}\n}", "func (*PrepareFeedbackRequest) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{9}\n}", "func (*UpdateHookRequest) Descriptor() ([]byte, []int) {\n\treturn file_hook_proto_rawDescGZIP(), []int{4}\n}", "func (*WaitRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_containerd_containerd_runtime_v1_shim_v1_shim_proto_rawDescGZIP(), []int{18}\n}", "func (*GetRequest) Descriptor() ([]byte, []int) {\n\treturn file_vote_proto_rawDescGZIP(), []int{9}\n}", "func (*Deprecation) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{8}\n}", "func (*DeviceRestartRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_worker_v1_device_state_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgProfileRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{275}\n}", "func (*ShowMessageRequestRequest) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{58}\n}", "func (*ToggleFavoriteTranslationRequest) Descriptor() ([]byte, []int) {\n\treturn file_translation_proto_rawDescGZIP(), []int{5}\n}", "func (*GetRequest) Descriptor() ([]byte, []int) {\n\treturn file_index_faults_rpc_rpc_proto_rawDescGZIP(), []int{2}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_dictybase_api_jsonapi_request_proto_rawDescGZIP(), []int{7}\n}", "func (*WatchLimitRequest) Descriptor() ([]byte, []int) {\n\treturn edgelq_limits_proto_v1alpha2_limit_service_proto_rawDescGZIP(), []int{5}\n}", "func (*CAccountHardware_VRCompatibilityCheck_Request) Descriptor() ([]byte, []int) {\n\treturn file_steammessages_accounthardware_steamclient_proto_rawDescGZIP(), []int{19}\n}", "func (*GetRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_app_config_agent_cmd_grpcserver_proto_api_app_config_proto_rawDescGZIP(), []int{15}\n}", "func (*ChangePasswordRequest) Descriptor() ([]byte, []int) {\n\treturn file_pb_auth_proto_rawDescGZIP(), []int{2}\n}", "func (*ChangeUpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_githubcard_proto_rawDescGZIP(), []int{23}\n}", "func (*DeviceStateRefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_worker_v1_device_state_proto_rawDescGZIP(), []int{4}\n}", "func (*HoverRequest) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{111}\n}", "func (*UpdateReq) Descriptor() ([]byte, []int) {\n\treturn file_internal_proto_crypto_proto_rawDescGZIP(), []int{8}\n}", "func (*PatchModelsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{80}\n}", "func (*GetServiceRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_appengine_v1_appengine_proto_rawDescGZIP(), []int{6}\n}", "func (*ModifyGatewayRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_pb_protobuf_api_proto_rawDescGZIP(), []int{20}\n}", "func (*ValidateClientCredentialRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_micro_pb_request_proto_rawDescGZIP(), []int{0}\n}", "func (*OutdatedRequest) Descriptor() ([]byte, []int) {\n\treturn file_cc_arduino_cli_commands_v1_commands_proto_rawDescGZIP(), []int{12}\n}", "func (*GenerateFeedbackRequest) Descriptor() ([]byte, []int) {\n\treturn file_feedbackreq_proto_rawDescGZIP(), []int{8}\n}", "func (*DescribeCheckRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_ocp_check_api_ocp_check_api_proto_rawDescGZIP(), []int{2}\n}", "func (*CapabilityRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_gnmi_gnmi_proto_rawDescGZIP(), []int{20}\n}", "func (*ToggleWorkflowRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_flow_grpc_workflows_proto_rawDescGZIP(), []int{35}\n}", "func (*SetStateRequest) Descriptor() ([]byte, []int) {\n\treturn file_vm_vm_proto_rawDescGZIP(), []int{3}\n}", "func (*GetRequest) Descriptor() ([]byte, []int) {\n\treturn file_dictybase_api_jsonapi_request_proto_rawDescGZIP(), []int{0}\n}" ]
[ "0.67672926", "0.67448694", "0.6714067", "0.6700932", "0.66939986", "0.6679988", "0.66392297", "0.6629699", "0.66290075", "0.66290075", "0.66270024", "0.6600582", "0.6593718", "0.65869516", "0.6586671", "0.65798324", "0.6578982", "0.65722543", "0.6559393", "0.6541644", "0.65267646", "0.65265423", "0.65179485", "0.6513421", "0.6503316", "0.649521", "0.6491715", "0.6480754", "0.6480587", "0.6468997", "0.6466369", "0.6466115", "0.646355", "0.64544034", "0.64454424", "0.64377344", "0.6436496", "0.6430463", "0.6424914", "0.64243186", "0.6419063", "0.6416301", "0.6412139", "0.64117485", "0.64084595", "0.64054406", "0.64028746", "0.6401506", "0.6395661", "0.6394999", "0.63918316", "0.63897383", "0.6383831", "0.6383094", "0.6381685", "0.63784", "0.63768744", "0.6375659", "0.63738394", "0.6372521", "0.63718444", "0.63693017", "0.63666797", "0.6364774", "0.63639015", "0.6362834", "0.63560724", "0.635578", "0.63546145", "0.6352296", "0.6351564", "0.63509506", "0.63493705", "0.6344447", "0.634378", "0.63431734", "0.6342378", "0.63421416", "0.6339831", "0.63387823", "0.63367987", "0.6333681", "0.63287485", "0.6328048", "0.63273853", "0.6322997", "0.63226646", "0.63218224", "0.63218033", "0.6321513", "0.63211846", "0.6320323", "0.6319321", "0.63172567", "0.63148344", "0.63144857", "0.6313351", "0.6312946", "0.6308311", "0.63081616" ]
0.7221775
0
Deprecated: Use SwitchKeeperRsp.ProtoReflect.Descriptor instead.
func (*SwitchKeeperRsp) Descriptor() ([]byte, []int) { return file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{6} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (SVC_Messages) EnumDescriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{4}\n}", "func (SVC_Messages) EnumDescriptor() ([]byte, []int) {\n\treturn file_netmessages_proto_rawDescGZIP(), []int{4}\n}", "func (*Deprecation) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{8}\n}", "func (*GroupRsp) Descriptor() ([]byte, []int) {\n\treturn file_chatMsg_msg_proto_rawDescGZIP(), []int{6}\n}", "func (*ChangeInfoRsp) Descriptor() ([]byte, []int) {\n\treturn file_Auth_Auth_proto_rawDescGZIP(), []int{5}\n}", "func (*GetRsp) Descriptor() ([]byte, []int) {\n\treturn file_grpc_proto_rawDescGZIP(), []int{1}\n}", "func (StatusMessage_Reference) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_devtools_clouddebugger_v2_data_proto_rawDescGZIP(), []int{1, 0}\n}", "func (Gateway_Listener_Protocol) EnumDescriptor() ([]byte, []int) {\n\treturn file_mesh_v1alpha1_gateway_proto_rawDescGZIP(), []int{0, 1, 0}\n}", "func (Listener_Protocol) EnumDescriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_loadbalancer_v1_network_load_balancer_proto_rawDescGZIP(), []int{2, 0}\n}", "func (CLC_Messages) EnumDescriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{1}\n}", "func (*Listen) Descriptor() ([]byte, []int) {\n\treturn file_pkg_smgrpc_smgrpc_proto_rawDescGZIP(), []int{4}\n}", "func (*AddPeerResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{30}\n}", "func (CBroadcast_WatchBroadcast_Response_EWatchResponse) EnumDescriptor() ([]byte, []int) {\n\treturn file_steammessages_broadcast_steamclient_proto_rawDescGZIP(), []int{8, 0}\n}", "func (ConfigureStatelessRequest_Cmd) EnumDescriptor() ([]byte, []int) {\n\treturn file_orc8r_protos_magmad_proto_rawDescGZIP(), []int{15, 0}\n}", "func (CLC_Messages) EnumDescriptor() ([]byte, []int) {\n\treturn file_netmessages_proto_rawDescGZIP(), []int{1}\n}", "func (x *fastReflection_MsgUpdateParamsResponse) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_MsgUpdateParamsResponse\n}", "func (SpanLayer) EnumDescriptor() ([]byte, []int) {\n\treturn file_common_trace_common_proto_rawDescGZIP(), []int{2}\n}", "func (*CancelPlanResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{23}\n}", "func (*RefreshResponse) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{17}\n}", "func (*QueryPlanStatusResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{25}\n}", "func (*ClusterRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{8}\n}", "func (CMsgClientToGCGiveTipResponse_Result) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{240, 0}\n}", "func (RouteLookupRequest_Reason) EnumDescriptor() ([]byte, []int) {\n\treturn file_grpc_lookup_v1_rls_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*AddPeerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*UpdateFriendStatusRsp) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{3}\n}", "func (DOTA_WatchReplayType) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{0}\n}", "func (*GetDelegationTokenResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{4}\n}", "func (StandardPTransforms_DeprecatedPrimitives) EnumDescriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{4, 1}\n}", "func (NET_Messages) EnumDescriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{0}\n}", "func (EUnderDraftResponse) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{11}\n}", "func (Diagnostic_Kind) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_api_servicemanagement_v1_resources_proto_rawDescGZIP(), []int{2, 0}\n}", "func (*DecodeReply) Descriptor() ([]byte, []int) {\n\treturn file_proto_videoservice_proto_rawDescGZIP(), []int{1}\n}", "func (*CreateFriendRsp) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{1}\n}", "func (Retry_Conf_Grpc_RetryOn) EnumDescriptor() ([]byte, []int) {\n\treturn file_api_mesh_v1alpha1_retry_proto_rawDescGZIP(), []int{0, 0, 3, 0}\n}", "func (*ListenResponse) Descriptor() ([]byte, []int) {\n\treturn file_faultinjector_proto_rawDescGZIP(), []int{9}\n}", "func (StandardProtocols_Enum) EnumDescriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{54, 0}\n}", "func (ProbeConf_Method) EnumDescriptor() ([]byte, []int) {\n\treturn file_github_com_google_cloudprober_probes_http_proto_config_proto_rawDescGZIP(), []int{0, 1}\n}", "func (*RefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{16}\n}", "func (*RefreshCallQueueResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_RefreshCallQueueProtocol_proto_rawDescGZIP(), []int{1}\n}", "func (*SwitchKeeperReq) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{5}\n}", "func (Span_SpanKind) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_devtools_cloudtrace_v2_trace_proto_rawDescGZIP(), []int{0, 0}\n}", "func (RefType) EnumDescriptor() ([]byte, []int) {\n\treturn file_common_trace_common_proto_rawDescGZIP(), []int{1}\n}", "func (Span_Link_Type) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_devtools_cloudtrace_v2_trace_proto_rawDescGZIP(), []int{0, 3, 0}\n}", "func (*TraceProto) Descriptor() ([]byte, []int) {\n\treturn file_internal_tracing_extended_extended_trace_proto_rawDescGZIP(), []int{0}\n}", "func (*DiagOperation) Descriptor() ([]byte, []int) {\n\treturn file_testvector_tv_proto_rawDescGZIP(), []int{10}\n}", "func (ProbeConf_Type) EnumDescriptor() ([]byte, []int) {\n\treturn file_github_com_cloudprober_cloudprober_probes_udplistener_proto_config_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*GetPeerInfoResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{28}\n}", "func (CP2P_Voice_Handler_Flags) EnumDescriptor() ([]byte, []int) {\n\treturn file_c_peer2peer_netmessages_proto_rawDescGZIP(), []int{2, 0}\n}", "func (CMsgClientToGCRecyclePlayerCardResponse_Result) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{215, 0}\n}", "func (NET_Messages) EnumDescriptor() ([]byte, []int) {\n\treturn file_netmessages_proto_rawDescGZIP(), []int{0}\n}", "func (*ScheduleDownlinkResponse) Descriptor() ([]byte, []int) {\n\treturn file_ttn_lorawan_v3_gatewayserver_proto_rawDescGZIP(), []int{2}\n}", "func (*HelloRsp) Descriptor() ([]byte, []int) {\n\treturn file_helloword_proto_rawDescGZIP(), []int{1}\n}", "func (*EvictWritersResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{11}\n}", "func (ConstantSampler_ConstantDecision) EnumDescriptor() ([]byte, []int) {\n\treturn file_opencensus_proto_trace_v1_trace_config_proto_rawDescGZIP(), []int{2, 0}\n}", "func (*ListenRequest) Descriptor() ([]byte, []int) {\n\treturn file_faultinjector_proto_rawDescGZIP(), []int{8}\n}", "func (Protocol) EnumDescriptor() ([]byte, []int) {\n\treturn file_github_com_ameliaikeda_protoc_gen_router_proto_router_proto_rawDescGZIP(), []int{0}\n}", "func (*StopBroadcastResponse) Descriptor() ([]byte, []int) {\n\treturn file_services_core_protobuf_servers_proto_rawDescGZIP(), []int{5}\n}", "func (*DisconnectedRequest) Descriptor() ([]byte, []int) {\n\treturn file_vm_vm_proto_rawDescGZIP(), []int{29}\n}", "func (MsgSubType) EnumDescriptor() ([]byte, []int) {\n\treturn file_api_proto_global_Global_proto_rawDescGZIP(), []int{5}\n}", "func (Connector_Status) EnumDescriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_kafka_v1_connector_proto_rawDescGZIP(), []int{10, 1}\n}", "func (FileSystemWatcher_WatchKind) EnumDescriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{76, 0}\n}", "func (LightstepConfig_PropagationMode) EnumDescriptor() ([]byte, []int) {\n\treturn file_envoy_config_trace_v3_lightstep_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*CancelDelegationTokenResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{8}\n}", "func (ConstantSampler_ConstantDecision) EnumDescriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_external_envoy_config_trace_v3_opencensus_proto_rawDescGZIP(), []int{3, 0}\n}", "func (Strategy) EnumDescriptor() ([]byte, []int) {\n\treturn file_rpc_proto_rawDescGZIP(), []int{1}\n}", "func (EDevEventRequestResult) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{7}\n}", "func (ESupportEventRequestResult) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{8}\n}", "func (P2P_Messages) EnumDescriptor() ([]byte, []int) {\n\treturn file_c_peer2peer_netmessages_proto_rawDescGZIP(), []int{0}\n}", "func (CMsgClientToGCRecycleHeroRelicResponse_Result) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{268, 0}\n}", "func (EOverwatchConviction) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_common_proto_rawDescGZIP(), []int{7}\n}", "func (*MemberReceiveAddressUpdateResp) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{87}\n}", "func (Span_TimeEvent_MessageEvent_Type) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_devtools_cloudtrace_v2_trace_proto_rawDescGZIP(), []int{0, 1, 1, 0}\n}", "func (ListenNetworkEnum) EnumDescriptor() ([]byte, []int) {\n\treturn file_pkg_kascfg_kascfg_proto_rawDescGZIP(), []int{0}\n}", "func (*WatchResponse) Descriptor() ([]byte, []int) {\n\treturn file_authzed_api_v0_watch_service_proto_rawDescGZIP(), []int{1}\n}", "func (NET_Messages) EnumDescriptor() ([]byte, []int) {\n\treturn file_artifact_networkbasetypes_proto_rawDescGZIP(), []int{0}\n}", "func (SocketAddress_Protocol) EnumDescriptor() ([]byte, []int) {\n\treturn file_envoy_config_core_v3_address_proto_rawDescGZIP(), []int{1, 0}\n}", "func (*CheckResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_api_servicecontrol_v1_service_controller_proto_rawDescGZIP(), []int{1}\n}", "func (OpenCensusConfig_TraceContext) EnumDescriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_external_envoy_config_trace_v3_opencensus_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*ProbeResponse) Descriptor() ([]byte, []int) {\n\treturn file_proto_gateway_v1_control_proto_rawDescGZIP(), []int{3}\n}", "func (MSG) EnumDescriptor() ([]byte, []int) {\n\treturn file_chat_proto_rawDescGZIP(), []int{0}\n}", "func (PolicyBasedRoute_Warnings_Code) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_cloud_networkconnectivity_v1_policy_based_routing_proto_rawDescGZIP(), []int{0, 3, 0}\n}", "func (CMsgProfileUpdateResponse_Result) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{278, 0}\n}", "func (*SendFromMinerResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{26}\n}", "func (ListenReply_Action) EnumDescriptor() ([]byte, []int) {\n\treturn file_threads_proto_rawDescGZIP(), []int{47, 0}\n}", "func (CMsgDOTADestroyLobbyResponse_Result) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{258, 0}\n}", "func (Kind) EnumDescriptor() ([]byte, []int) {\n\treturn file_go_pkg_beam_core_runtime_harness_session_proto_rawDescGZIP(), []int{0}\n}", "func (WinRMProtocolType) EnumDescriptor() ([]byte, []int) {\n\treturn file_moc_common_computecommon_proto_rawDescGZIP(), []int{5}\n}", "func (*DeviceStateRefreshReply) Descriptor() ([]byte, []int) {\n\treturn file_api_worker_v1_device_state_proto_rawDescGZIP(), []int{5}\n}", "func (*SignalReply) Descriptor() ([]byte, []int) {\n\treturn file_cmd_server_grpc_proto_sfu_proto_rawDescGZIP(), []int{1}\n}", "func (*DisconnectedServicesReq) Descriptor() ([]byte, []int) {\n\treturn file_external_applications_applications_proto_rawDescGZIP(), []int{6}\n}", "func (*DiagnoseResponse) Descriptor() ([]byte, []int) {\n\treturn file_proto_api_proto_rawDescGZIP(), []int{17}\n}", "func (ReplyType) EnumDescriptor() ([]byte, []int) {\n\treturn file_fk_atlas_proto_rawDescGZIP(), []int{13}\n}", "func (*DiagnoseRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_api_proto_rawDescGZIP(), []int{16}\n}", "func (*RefreshCallQueueRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_RefreshCallQueueProtocol_proto_rawDescGZIP(), []int{0}\n}", "func (*GetPeerInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{6}\n}", "func (*SetTraceRequest) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*WinRMListener) Descriptor() ([]byte, []int) {\n\treturn file_moc_common_computecommon_proto_rawDescGZIP(), []int{0}\n}", "func (*RenewDelegationTokenResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{6}\n}", "func (*FeedbackRequest) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{10}\n}", "func (*RefreshNamenodesResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{3}\n}" ]
[ "0.6646759", "0.66103727", "0.65684617", "0.65414816", "0.6525648", "0.6515199", "0.65136725", "0.6510185", "0.6498448", "0.64884603", "0.6486122", "0.6476423", "0.64567405", "0.6449851", "0.6436708", "0.64062786", "0.640551", "0.6400204", "0.6399626", "0.6393379", "0.63858473", "0.6380418", "0.63794726", "0.6377344", "0.63766587", "0.6366592", "0.63663375", "0.63643116", "0.63642377", "0.63621867", "0.6358558", "0.635826", "0.6358192", "0.63563603", "0.6355208", "0.63535726", "0.63454574", "0.6345231", "0.6342059", "0.63408595", "0.63406956", "0.6336321", "0.633601", "0.63358504", "0.6335744", "0.6334608", "0.6327953", "0.63261795", "0.63241047", "0.63168305", "0.63144165", "0.6312551", "0.6312306", "0.63089395", "0.63075423", "0.6303985", "0.6299445", "0.629137", "0.6287585", "0.628528", "0.6282853", "0.6275765", "0.6273658", "0.6259821", "0.62598085", "0.6258975", "0.6258675", "0.6252341", "0.62506515", "0.6249553", "0.6248571", "0.624788", "0.6247757", "0.62476796", "0.6245605", "0.6245536", "0.62448156", "0.62420434", "0.62420076", "0.6240708", "0.62403476", "0.62400836", "0.62376803", "0.6237537", "0.62356204", "0.6234964", "0.6234492", "0.62333965", "0.62330765", "0.6228893", "0.6228874", "0.6228836", "0.6228725", "0.6225904", "0.62251645", "0.6222487", "0.62187475", "0.62177664", "0.6217729", "0.6217605" ]
0.7286045
0
Deprecated: Use ClusterReq.ProtoReflect.Descriptor instead.
func (*ClusterReq) Descriptor() ([]byte, []int) { return file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{7} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*UpdateClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{5}\n}", "func (*GetClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{0}\n}", "func (*DeleteClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{7}\n}", "func (*UnregisterClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_grpc_proto_cluster_cluster_proto_rawDescGZIP(), []int{3}\n}", "func (*StopClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{11}\n}", "func (*GetClusterInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_uber_cadence_api_v1_service_workflow_proto_rawDescGZIP(), []int{20}\n}", "func (*CreateClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{3}\n}", "func (*StartClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{9}\n}", "func (*UpdatePermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{9}\n}", "func (*UpdateGameServerClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_gaming_v1beta_game_server_clusters_proto_rawDescGZIP(), []int{9}\n}", "func (*ListClustersRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{1}\n}", "func (*AddPeerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*PreviewUpdateGameServerClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_gaming_v1beta_game_server_clusters_proto_rawDescGZIP(), []int{10}\n}", "func (*ListClusterOperationsRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{13}\n}", "func (*DescribePermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{6}\n}", "func (*PatchCollectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{161}\n}", "func (*GetGameServerClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_gaming_v1beta_game_server_clusters_proto_rawDescGZIP(), []int{2}\n}", "func (*CMsgClientToGCPlayerStatsRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{143}\n}", "func (*QueryClusterMonitorDataReq) Descriptor() ([]byte, []int) {\n\treturn file_api_proto_rawDescGZIP(), []int{9}\n}", "func (*AddPermissionToRoleRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{7}\n}", "func (*GetPeerInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{6}\n}", "func (*QueryPermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{5}\n}", "func (*ModifyRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_engine_proto_rawDescGZIP(), []int{10}\n}", "func (*DeleteGameServerClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_gaming_v1beta_game_server_clusters_proto_rawDescGZIP(), []int{6}\n}", "func (*PatchAnnotationsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{4}\n}", "func (*QueryPermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_permission_pb_request_proto_rawDescGZIP(), []int{0}\n}", "func (*CMsgGCPlayerInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{117}\n}", "func (*CreateGameServerClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_gaming_v1beta_game_server_clusters_proto_rawDescGZIP(), []int{3}\n}", "func (*PatchConceptsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{34}\n}", "func (*CheckPermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_permission_pb_request_proto_rawDescGZIP(), []int{2}\n}", "func (*ClusterJoinRequest) Descriptor() ([]byte, []int) {\n\treturn file_metastateService_proto_rawDescGZIP(), []int{4}\n}", "func (*PreviewDeleteGameServerClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_gaming_v1beta_game_server_clusters_proto_rawDescGZIP(), []int{7}\n}", "func (*PatchTasksRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{154}\n}", "func (*ValidateRequest) Descriptor() ([]byte, []int) {\n\treturn file_protobuf_clusrun_proto_rawDescGZIP(), []int{17}\n}", "func (*PatchKeysRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{74}\n}", "func (*UpdateIngressRuleRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_appengine_v1_appengine_proto_rawDescGZIP(), []int{26}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_dictybase_api_jsonapi_request_proto_rawDescGZIP(), []int{7}\n}", "func (*PreviewCreateGameServerClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_gaming_v1beta_game_server_clusters_proto_rawDescGZIP(), []int{4}\n}", "func (*QueryRoleRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_permission_pb_request_proto_rawDescGZIP(), []int{1}\n}", "func (*RevokeJobRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_noderpc_proto_feeds_manager_proto_rawDescGZIP(), []int{20}\n}", "func (*GetServiceRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_appengine_v1_appengine_proto_rawDescGZIP(), []int{6}\n}", "func (*RemovePermissionFromRoleRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{8}\n}", "func (*UpdateClusterMetadata) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{6}\n}", "func (x *fastReflection_QueryAccountInfoRequest) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_QueryAccountInfoRequest\n}", "func (*BatchUpdateReferencesRequest_Request) Descriptor() ([]byte, []int) {\n\treturn file_pkg_proto_icas_icas_proto_rawDescGZIP(), []int{1, 0}\n}", "func (*ContractQueryRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{22}\n}", "func (*GetModelVersionMetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{94}\n}", "func (*SetNodeConfigRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{47}\n}", "func (*MeasureDistanceRequest) Descriptor() ([]byte, []int) {\n\treturn file_coolenv_proto_rawDescGZIP(), []int{3}\n}", "func (x *fastReflection_Bech32PrefixRequest) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_Bech32PrefixRequest\n}", "func (*GetCollectorRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{163}\n}", "func (*MeshCertificateRequest) Descriptor() ([]byte, []int) {\n\treturn file_security_proto_providers_google_meshca_proto_rawDescGZIP(), []int{0}\n}", "func (*PatchCollaboratorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{21}\n}", "func (*ListClusterHostsRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{15}\n}", "func (*TelemetryRequest) Descriptor() ([]byte, []int) {\n\treturn file_interservice_license_control_license_control_proto_rawDescGZIP(), []int{11}\n}", "func (*ValidateClientCredentialRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_micro_pb_request_proto_rawDescGZIP(), []int{0}\n}", "func (*DeleteNodeFromNodeClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{10}\n}", "func (*PermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_casbin_proto_rawDescGZIP(), []int{13}\n}", "func (*PatchAnnotationsStatusRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{5}\n}", "func (*RefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{16}\n}", "func (*ComputeContactsRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_essentialcontacts_v1_service_proto_rawDescGZIP(), []int{7}\n}", "func (*PatchWorkflowVersionsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{143}\n}", "func (x *fastReflection_QueryAccountRequest) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_QueryAccountRequest\n}", "func (*PatchModelVersionsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{86}\n}", "func (*UpgradeRuntimeRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_notebooks_v1_managed_service_proto_rawDescGZIP(), []int{9}\n}", "func (*UpdateNetworkRequest) Descriptor() ([]byte, []int) {\n\treturn file_packetbroker_api_iam_v1_service_proto_rawDescGZIP(), []int{8}\n}", "func (*QueryRoleRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{2}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_Ultimate_Super_WebDev_Corp_gateway_services_customer_customer_proto_rawDescGZIP(), []int{4}\n}", "func (*GetRequest) Descriptor() ([]byte, []int) {\n\treturn file_index_faults_rpc_rpc_proto_rawDescGZIP(), []int{2}\n}", "func (*GenerateProductMixIdeasRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{7}\n}", "func (*ForwardClusterJoinRequest) Descriptor() ([]byte, []int) {\n\treturn file_metastateService_proto_rawDescGZIP(), []int{8}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_exercicio_proto_rawDescGZIP(), []int{7}\n}", "func (*NodeGroupForNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{4}\n}", "func (*CreatePermssionRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{1}\n}", "func (*UpdateDatasetRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_automl_v1_service_proto_rawDescGZIP(), []int{4}\n}", "func (*ModelControlRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_service_proto_rawDescGZIP(), []int{4}\n}", "func (*UpdateRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_contact_proto_rawDescGZIP(), []int{12}\n}", "func (*GetRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_comments_proto_rawDescGZIP(), []int{3}\n}", "func (*MetricsServiceRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{18}\n}", "func (*CMsgGCNotificationsRequest) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{113}\n}", "func (*UpdateContactRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_essentialcontacts_v1_service_proto_rawDescGZIP(), []int{6}\n}", "func (x *fastReflection_AddressBytesToStringRequest) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_AddressBytesToStringRequest\n}", "func (*DeleteModelVersionRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{89}\n}", "func (*UpdateSecurityGroupRuleRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_vpc_v1_security_group_service_proto_rawDescGZIP(), []int{9}\n}", "func (x *fastReflection_AddressStringToBytesRequest) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_AddressStringToBytesRequest\n}", "func (*QueryGroupRequest) Descriptor() ([]byte, []int) {\n\treturn file_apps_mconf_pb_request_proto_rawDescGZIP(), []int{2}\n}", "func (*GetVersionRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{9}\n}", "func (*HeartbeatRequest) Descriptor() ([]byte, []int) {\n\treturn file_protobuf_clusrun_proto_rawDescGZIP(), []int{0}\n}", "func (*CreateAlterRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_exercicio_proto_rawDescGZIP(), []int{1}\n}", "func (*DeleteClusterMetadata) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{8}\n}", "func (*UpdateIpPermissionRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_containerregistry_v1_registry_service_proto_rawDescGZIP(), []int{10}\n}", "func (x *fastReflection_QueryAccountsRequest) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_QueryAccountsRequest\n}", "func (*CodeLensRequest) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{163}\n}", "func (*GetNodesRequest) Descriptor() ([]byte, []int) {\n\treturn file_protobuf_clusrun_proto_rawDescGZIP(), []int{2}\n}", "func (*MetricsRequest) Descriptor() ([]byte, []int) {\n\treturn file_ssn_dataservice_v1_dataservice_proto_rawDescGZIP(), []int{11}\n}", "func (*MClusterStatisticsRequest) Descriptor() ([]byte, []int) {\n\treturn file_s_stats_proto_rawDescGZIP(), []int{2}\n}", "func (*CollectRequest) Descriptor() ([]byte, []int) {\n\treturn file_orc8r_cloud_go_services_analytics_protos_collector_proto_rawDescGZIP(), []int{0}\n}", "func (*DeleteRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_contact_proto_rawDescGZIP(), []int{10}\n}", "func (*NetworkRequest) Descriptor() ([]byte, []int) {\n\treturn file_packetbroker_api_iam_v1_service_proto_rawDescGZIP(), []int{6}\n}", "func (*ListenRequest) Descriptor() ([]byte, []int) {\n\treturn file_threads_proto_rawDescGZIP(), []int{46}\n}" ]
[ "0.7491125", "0.73855895", "0.73499507", "0.726328", "0.7217145", "0.7166124", "0.7159625", "0.7125232", "0.7053558", "0.70486265", "0.6986181", "0.6923441", "0.69186765", "0.6912716", "0.6911513", "0.69101065", "0.689375", "0.68757945", "0.6842361", "0.68397844", "0.68354857", "0.6828015", "0.6826561", "0.6815168", "0.68101776", "0.67975676", "0.67903906", "0.67752314", "0.6766774", "0.67572224", "0.6755658", "0.67555743", "0.6743134", "0.6742683", "0.67412734", "0.6724823", "0.67218524", "0.672103", "0.6719731", "0.6718695", "0.6716874", "0.67142427", "0.67030257", "0.66973686", "0.6694846", "0.6682772", "0.66750395", "0.6671927", "0.6668833", "0.6662944", "0.6660837", "0.66568315", "0.66560435", "0.664591", "0.6643389", "0.66242254", "0.6622258", "0.66210634", "0.6610667", "0.66100234", "0.66071033", "0.6605294", "0.660463", "0.66041064", "0.66036636", "0.66025466", "0.66000223", "0.6598932", "0.65981555", "0.65968835", "0.65966", "0.6593575", "0.65935063", "0.65925163", "0.6591486", "0.6590307", "0.6588947", "0.6587044", "0.658578", "0.65843356", "0.65841985", "0.65787953", "0.65782315", "0.6577918", "0.6577849", "0.65774953", "0.65770805", "0.65757334", "0.65687025", "0.65675515", "0.65672725", "0.6566913", "0.65660757", "0.65615577", "0.6558131", "0.6557275", "0.6556478", "0.65490735", "0.6546179", "0.6543844" ]
0.7318657
3
Deprecated: Use ClusterRsp.ProtoReflect.Descriptor instead.
func (*ClusterRsp) Descriptor() ([]byte, []int) { return file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{8} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*UnregisterClusterResponse) Descriptor() ([]byte, []int) {\n\treturn file_pkg_grpc_proto_cluster_cluster_proto_rawDescGZIP(), []int{4}\n}", "func (*Deprecation) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{8}\n}", "func (*GroupRsp) Descriptor() ([]byte, []int) {\n\treturn file_chatMsg_msg_proto_rawDescGZIP(), []int{6}\n}", "func (*UnregisterClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_grpc_proto_cluster_cluster_proto_rawDescGZIP(), []int{3}\n}", "func (*AddPeerResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{30}\n}", "func (*StopClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{11}\n}", "func (*AddPeerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*GetRsp) Descriptor() ([]byte, []int) {\n\treturn file_grpc_proto_rawDescGZIP(), []int{1}\n}", "func (*GetPeerInfoResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{28}\n}", "func (*QueryPlanStatusResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{25}\n}", "func (*RefreshNamenodesResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{3}\n}", "func (*GetClusterInfoResponse) Descriptor() ([]byte, []int) {\n\treturn file_uber_cadence_api_v1_service_workflow_proto_rawDescGZIP(), []int{21}\n}", "func (*CancelPlanResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{23}\n}", "func (*SwitchKeeperRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{6}\n}", "func (*RefreshResponse) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{17}\n}", "func (*StopBroadcastResponse) Descriptor() ([]byte, []int) {\n\treturn file_services_core_protobuf_servers_proto_rawDescGZIP(), []int{5}\n}", "func (SVC_Messages) EnumDescriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{4}\n}", "func (*GetPeerInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{6}\n}", "func (x *fastReflection_RpcCommandOptions) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_RpcCommandOptions\n}", "func (*GetMetricsInfoResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{44}\n}", "func (x *fastReflection_MsgUpdateParamsResponse) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_MsgUpdateParamsResponse\n}", "func (*CreateFriendRsp) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{1}\n}", "func (*PatchCollectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{161}\n}", "func (*RefreshRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{16}\n}", "func (SVC_Messages) EnumDescriptor() ([]byte, []int) {\n\treturn file_netmessages_proto_rawDescGZIP(), []int{4}\n}", "func (Diagnostic_Kind) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_api_servicemanagement_v1_resources_proto_rawDescGZIP(), []int{2, 0}\n}", "func (*DeleteClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{7}\n}", "func (*DisconnectedRequest) Descriptor() ([]byte, []int) {\n\treturn file_vm_vm_proto_rawDescGZIP(), []int{29}\n}", "func (CMsgClientToGCRecyclePlayerCardResponse_Result) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{215, 0}\n}", "func (*GetClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{0}\n}", "func (ComputeApiClass) EnumDescriptor() ([]byte, []int) {\n\treturn file_spacemesh_v1_smesher_types_proto_rawDescGZIP(), []int{0}\n}", "func (x *fastReflection_ServiceCommandDescriptor) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_ServiceCommandDescriptor\n}", "func (*GetDatanodeInfoResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{13}\n}", "func (*StopBroadcastRequest) Descriptor() ([]byte, []int) {\n\treturn file_services_core_protobuf_servers_proto_rawDescGZIP(), []int{4}\n}", "func (*ListClusterOperationsResponse) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{14}\n}", "func (CMsgClientToGCGiveTipResponse_Result) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{240, 0}\n}", "func (*UpdateClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{5}\n}", "func (StandardPTransforms_DeprecatedPrimitives) EnumDescriptor() ([]byte, []int) {\n\treturn file_org_apache_beam_model_pipeline_v1_beam_runner_api_proto_rawDescGZIP(), []int{4, 1}\n}", "func (*UpdateFriendStatusRsp) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{3}\n}", "func (x *fastReflection_AddressStringToBytesResponse) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_AddressStringToBytesResponse\n}", "func (*ListClustersResponse) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{2}\n}", "func (ReplyType) EnumDescriptor() ([]byte, []int) {\n\treturn file_fk_atlas_proto_rawDescGZIP(), []int{13}\n}", "func (CMsgClientToGCRecycleHeroRelicResponse_Result) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{268, 0}\n}", "func (*MemberReceiveAddressUpdateResp) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{87}\n}", "func (*MetricsServiceRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{18}\n}", "func (CMsgDOTADestroyLobbyResponse_Result) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{258, 0}\n}", "func (*PreviewUpdateGameServerClusterResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_gaming_v1beta_game_server_clusters_proto_rawDescGZIP(), []int{11}\n}", "func (*RefreshCallQueueResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_RefreshCallQueueProtocol_proto_rawDescGZIP(), []int{1}\n}", "func (*GetDelegationTokenResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{4}\n}", "func (*CancelDelegationTokenResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{8}\n}", "func (*StopClusterMetadata) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_greenplum_v1_cluster_service_proto_rawDescGZIP(), []int{12}\n}", "func (*ListResponse) Descriptor() ([]byte, []int) {\n\treturn file_proto_contact_proto_rawDescGZIP(), []int{15}\n}", "func (StatusMessage_Reference) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_devtools_clouddebugger_v2_data_proto_rawDescGZIP(), []int{1, 0}\n}", "func (Host_Type) EnumDescriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_clickhouse_v1_cluster_proto_rawDescGZIP(), []int{6, 0}\n}", "func (*ClusterResponse) Descriptor() ([]byte, []int) {\n\treturn file_protobuf_index_proto_rawDescGZIP(), []int{8}\n}", "func (*RestartServicesRequest) Descriptor() ([]byte, []int) {\n\treturn file_orc8r_protos_magmad_proto_rawDescGZIP(), []int{9}\n}", "func (CLC_Messages) EnumDescriptor() ([]byte, []int) {\n\treturn file_csgo_netmessages_proto_rawDescGZIP(), []int{1}\n}", "func (*ScheduleDownlinkResponse) Descriptor() ([]byte, []int) {\n\treturn file_ttn_lorawan_v3_gatewayserver_proto_rawDescGZIP(), []int{2}\n}", "func (*GetStatsResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{45}\n}", "func (*RefreshNamenodesRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{2}\n}", "func (*PreviewDeleteGameServerClusterResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_gaming_v1beta_game_server_clusters_proto_rawDescGZIP(), []int{8}\n}", "func (Span_SpanKind) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_devtools_cloudtrace_v2_trace_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*GetCollectorRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_clarifai_api_service_proto_rawDescGZIP(), []int{163}\n}", "func (Cluster_Status) EnumDescriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_clickhouse_v1_cluster_proto_rawDescGZIP(), []int{0, 2}\n}", "func (ClusterCommand_Type) EnumDescriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{1, 0}\n}", "func (*DiagOperation) Descriptor() ([]byte, []int) {\n\treturn file_testvector_tv_proto_rawDescGZIP(), []int{10}\n}", "func (*Listen) Descriptor() ([]byte, []int) {\n\treturn file_pkg_smgrpc_smgrpc_proto_rawDescGZIP(), []int{4}\n}", "func (Span_Link_Type) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_devtools_cloudtrace_v2_trace_proto_rawDescGZIP(), []int{0, 3, 0}\n}", "func (x *fastReflection_AddressBytesToStringResponse) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_AddressBytesToStringResponse\n}", "func (*MemberReceiveAddressDeleteResp) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{89}\n}", "func (RouteLookupRequest_Reason) EnumDescriptor() ([]byte, []int) {\n\treturn file_grpc_lookup_v1_rls_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*Diagnostic) Descriptor() ([]byte, []int) {\n\treturn file_google_api_servicemanagement_v1_resources_proto_rawDescGZIP(), []int{2}\n}", "func (*HelloRsp) Descriptor() ([]byte, []int) {\n\treturn file_helloword_proto_rawDescGZIP(), []int{1}\n}", "func (*SyncLocationRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{1}\n}", "func (*SendResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{27}\n}", "func (*RaftRpcReplyProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{12}\n}", "func (*SemanticTokensLegend) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{215}\n}", "func (*ClusterReq) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{7}\n}", "func (*ChangeInfoRsp) Descriptor() ([]byte, []int) {\n\treturn file_Auth_Auth_proto_rawDescGZIP(), []int{5}\n}", "func (*GetVersionRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{9}\n}", "func (ConfigureStatelessRequest_Cmd) EnumDescriptor() ([]byte, []int) {\n\treturn file_orc8r_protos_magmad_proto_rawDescGZIP(), []int{15, 0}\n}", "func (*RaftRpcRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_raft_proto_rawDescGZIP(), []int{10}\n}", "func (*GenerateProductMixIdeasResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_ads_googleads_v2_services_reach_plan_service_proto_rawDescGZIP(), []int{9}\n}", "func (CLC_Messages) EnumDescriptor() ([]byte, []int) {\n\treturn file_netmessages_proto_rawDescGZIP(), []int{1}\n}", "func (*ContractQueryResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{50}\n}", "func (*DisconnectedServicesReq) Descriptor() ([]byte, []int) {\n\treturn file_external_applications_applications_proto_rawDescGZIP(), []int{6}\n}", "func (*ListResponse) Descriptor() ([]byte, []int) {\n\treturn file_teams_v1_teams_proto_rawDescGZIP(), []int{1}\n}", "func (Service_Type) EnumDescriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_clickhouse_v1_cluster_proto_rawDescGZIP(), []int{7, 0}\n}", "func (*SendFromMinerResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{26}\n}", "func (*ListenResponse) Descriptor() ([]byte, []int) {\n\treturn file_faultinjector_proto_rawDescGZIP(), []int{9}\n}", "func (*RegisterCloudAgentResponse) Descriptor() ([]byte, []int) {\n\treturn file_messages_proto_rawDescGZIP(), []int{18}\n}", "func (EUnderDraftResponse) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{11}\n}", "func (x *fastReflection_MsgSetWithdrawAddressResponse) Descriptor() protoreflect.MessageDescriptor {\n\treturn md_MsgSetWithdrawAddressResponse\n}", "func (ClusterStatus) EnumDescriptor() ([]byte, []int) {\n\treturn file_common_proto_rawDescGZIP(), []int{2}\n}", "func (*UpdateGameServerClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_gaming_v1beta_game_server_clusters_proto_rawDescGZIP(), []int{9}\n}", "func (*GetStatisticsBroadcastResponse) Descriptor() ([]byte, []int) {\n\treturn file_services_core_protobuf_servers_proto_rawDescGZIP(), []int{3}\n}", "func (*PerformanceResponse) Descriptor() ([]byte, []int) {\n\treturn file_commissionService_proto_rawDescGZIP(), []int{5}\n}", "func (*UpdateFriendRemarkRsp) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{5}\n}", "func (*NetInfoResponse_Peer) Descriptor() ([]byte, []int) {\n\treturn file_resources_proto_rawDescGZIP(), []int{26, 0}\n}", "func (CMsgClientToGCCreatePlayerCardPackResponse_Result) EnumDescriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{217, 0}\n}" ]
[ "0.6898454", "0.67633843", "0.6758483", "0.6746874", "0.6743965", "0.67191446", "0.66340137", "0.66231334", "0.6572636", "0.6567905", "0.6558229", "0.6550756", "0.6548363", "0.65297997", "0.65099263", "0.65075445", "0.6496817", "0.64858085", "0.64842874", "0.6477165", "0.6475513", "0.645865", "0.6450189", "0.6448742", "0.643245", "0.64242333", "0.6419134", "0.64151925", "0.64133644", "0.6409787", "0.64094967", "0.64039683", "0.6399687", "0.6398866", "0.63976514", "0.63938653", "0.63897943", "0.63891894", "0.6388436", "0.63844055", "0.6383178", "0.6379609", "0.6379397", "0.63748395", "0.6374649", "0.6372124", "0.63706404", "0.6367573", "0.63675094", "0.63569736", "0.635491", "0.6351502", "0.6345293", "0.634178", "0.6336906", "0.63360673", "0.6333758", "0.6324927", "0.6324368", "0.6322618", "0.63195217", "0.6315967", "0.63157415", "0.6313903", "0.63127977", "0.6308051", "0.63070905", "0.63040274", "0.6303538", "0.6300831", "0.6300532", "0.6295648", "0.62937266", "0.62921065", "0.6291331", "0.6289534", "0.6288345", "0.62857735", "0.62790966", "0.6276947", "0.62733513", "0.62725365", "0.6271181", "0.6267489", "0.6266212", "0.6264401", "0.6263438", "0.6258668", "0.6256646", "0.6251169", "0.6249095", "0.62485284", "0.6247776", "0.6242372", "0.62417203", "0.62393385", "0.6233339", "0.623208", "0.6231275", "0.6229103" ]
0.7362352
0
Deprecated: Use RegisterNodeReq.ProtoReflect.Descriptor instead.
func (*RegisterNodeReq) Descriptor() ([]byte, []int) { return file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{9} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*RegisterClusterNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{2}\n}", "func (*NodeGroupForNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{4}\n}", "func (*SetNodeConfigRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{47}\n}", "func (*RegisterNodeRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{10}\n}", "func (*UpdateNodeDNSRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{49}\n}", "func (*UpgradeNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{24}\n}", "func (*UpdateNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{11}\n}", "func (*GetNodeSelectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_spire_api_registration_registration_proto_rawDescGZIP(), []int{22}\n}", "func (*UpdateNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_noderpc_proto_feeds_manager_proto_rawDescGZIP(), []int{6}\n}", "func (*RegisterRequest_SecondaryNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_supernode_proto_rawDescGZIP(), []int{2, 1}\n}", "func (*UpdateNSNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{12}\n}", "func (*InstallNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{22}\n}", "func (*CreateNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{0}\n}", "func (*RegistrationAddReq) Descriptor() ([]byte, []int) {\n\treturn file_registration_proto_rawDescGZIP(), []int{11}\n}", "func (*CreateNSNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{7}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_engine_proto_rawDescGZIP(), []int{6}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_menger_menger_proto_rawDescGZIP(), []int{5}\n}", "func (*UpdateNodeStatusRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{19}\n}", "func (*InstallNSNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{13}\n}", "func (*RegistrationRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_registration_proto_rawDescGZIP(), []int{0}\n}", "func (*NodeGroupsRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{2}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_legacy_upstream_proto_rawDescGZIP(), []int{3}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_agency_web_proto_rawDescGZIP(), []int{8}\n}", "func (*UpdateNSNodeStatusRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{18}\n}", "func (*NodeGroupNodesRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{26}\n}", "func (*SiteRegReq) Descriptor() ([]byte, []int) {\n\treturn file_proto_registration_msgs_proto_rawDescGZIP(), []int{0}\n}", "func (*RegisterRequest_PrimaryNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_supernode_proto_rawDescGZIP(), []int{2, 0}\n}", "func (*RegistrationUpdateReq) Descriptor() ([]byte, []int) {\n\treturn file_registration_proto_rawDescGZIP(), []int{19}\n}", "func (*UpdateNodeSystemRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{58}\n}", "func (*FindEnabledNodeDNSRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{47}\n}", "func (*DeleteNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{9}\n}", "func (*FindEnabledNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{12}\n}", "func (*NodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_flow_grpc_workflows_proto_rawDescGZIP(), []int{1}\n}", "func (*RegisterReq) Descriptor() ([]byte, []int) {\n\treturn file_login_proto_rawDescGZIP(), []int{17}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_register_proto_rawDescGZIP(), []int{0}\n}", "func (*RenewDelegationTokenRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{5}\n}", "func (*UpdateNodeUpRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{55}\n}", "func (*UpdateNodeConnectedAPINodesRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{30}\n}", "func (*RegisterClusterNodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{3}\n}", "func (*AddPeerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*AsyncRegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_legacy_upstream_proto_rawDescGZIP(), []int{1}\n}", "func (*RegistrationRequest) Descriptor() ([]byte, []int) {\n\treturn file_cvInterface_proto_rawDescGZIP(), []int{1}\n}", "func (*NotificationRegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_sdk_service_proto_rawDescGZIP(), []int{6}\n}", "func (*UpdateNodeCacheRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{59}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_user_proto_rawDescGZIP(), []int{7}\n}", "func (*NodeSelectors) Descriptor() ([]byte, []int) {\n\treturn file_spire_api_registration_registration_proto_rawDescGZIP(), []int{21}\n}", "func (*GetNodesRequest) Descriptor() ([]byte, []int) {\n\treturn file_protobuf_clusrun_proto_rawDescGZIP(), []int{2}\n}", "func (*MessageHubRegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_messagehub_proto_rawDescGZIP(), []int{0}\n}", "func (*UpdateNodeLoginRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{42}\n}", "func (*RefreshNamenodesRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{2}\n}", "func (*MemberAddReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{0}\n}", "func (*UpdateNodeIsInstalledRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{21}\n}", "func (*GetNodesRequest) Descriptor() ([]byte, []int) {\n\treturn file_observer_observer_proto_rawDescGZIP(), []int{8}\n}", "func (*Node) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{0}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_internal_master_master_server_proto_rawDescGZIP(), []int{2}\n}", "func (*Deprecation) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{8}\n}", "func (*RenameReq) Descriptor() ([]byte, []int) {\n\treturn file_dfs_proto_rawDescGZIP(), []int{4}\n}", "func (*UpdateNSNodeConnectedAPINodesRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{26}\n}", "func (*FindEnabledNSNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{10}\n}", "func (*DeregisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_engine_proto_rawDescGZIP(), []int{8}\n}", "func (*UpdateAttestedNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_spire_server_datastore_datastore_proto_rawDescGZIP(), []int{33}\n}", "func (*NodeGroupTemplateNodeInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{31}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_supernode_proto_rawDescGZIP(), []int{2}\n}", "func (*StartNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{26}\n}", "func (*GetDelegationTokenRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{3}\n}", "func (*RegistrationGetReq) Descriptor() ([]byte, []int) {\n\treturn file_registration_proto_rawDescGZIP(), []int{15}\n}", "func (*DeleteNodeFromNodeClusterRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{10}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_pb_user_user_proto_rawDescGZIP(), []int{3}\n}", "func (*CreateAttestedNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_spire_server_datastore_datastore_proto_rawDescGZIP(), []int{30}\n}", "func (*SharedMemoryControlRequest_Register) Descriptor() ([]byte, []int) {\n\treturn file_grpc_service_proto_rawDescGZIP(), []int{6, 0}\n}", "func (*SubscribeRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{17}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_apiuser_api_user_v1_user_proto_rawDescGZIP(), []int{0}\n}", "func (*RegisterCloudAgentRequest) Descriptor() ([]byte, []int) {\n\treturn file_messages_proto_rawDescGZIP(), []int{17}\n}", "func (*UpdateNSNodeIsInstalledRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{17}\n}", "func (*CreateNodePlanningsRequest) Descriptor() ([]byte, []int) {\n\treturn file_alameda_api_v1alpha1_datahub_plannings_services_proto_rawDescGZIP(), []int{4}\n}", "func (*MemberTaskAddReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{117}\n}", "func (*DeleteNSNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{9}\n}", "func (*RegistrationListReq) Descriptor() ([]byte, []int) {\n\treturn file_registration_proto_rawDescGZIP(), []int{23}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_proto_rawDescGZIP(), []int{5}\n}", "func (*SetNodeGroupsRequest) Descriptor() ([]byte, []int) {\n\treturn file_protobuf_clusrun_proto_rawDescGZIP(), []int{19}\n}", "func (*PacketBrokerRegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_ttn_lorawan_v3_packetbrokeragent_proto_rawDescGZIP(), []int{8}\n}", "func (*ProviderDisregisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_hourglass_v1_provider_proto_rawDescGZIP(), []int{2}\n}", "func (*UpdateNSNodeLoginRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{27}\n}", "func (*StartNSNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{28}\n}", "func (*AddPermissionToRoleRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_role_pb_request_proto_rawDescGZIP(), []int{7}\n}", "func (*NodeGroupForNodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{5}\n}", "func (*MemberTagAddReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{108}\n}", "func (*ExternalGrpcNode) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{1}\n}", "func (*Request) Descriptor() ([]byte, []int) {\n\treturn file_register_registerpb_register_proto_rawDescGZIP(), []int{0}\n}", "func (*ProviderRegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_hourglass_v1_provider_proto_rawDescGZIP(), []int{0}\n}", "func (*SelectorVerificationReq) Descriptor() ([]byte, []int) {\n\treturn file_proto_selector_verification_msgs_proto_rawDescGZIP(), []int{0}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_githubcard_proto_rawDescGZIP(), []int{9}\n}", "func (*RegistrationParams) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{64}\n}", "func (*Registration) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{63}\n}", "func (*RenameRequest) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{194}\n}", "func (*RenameRegistrationOptions) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{193}\n}", "func (*MemberLevelAddReq) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{45}\n}", "func (*ValidateTokenRequest) Descriptor() ([]byte, []int) {\n\treturn file_user_proto_rawDescGZIP(), []int{4}\n}", "func (*RegistrationPayReq) Descriptor() ([]byte, []int) {\n\treturn file_registration_proto_rawDescGZIP(), []int{13}\n}", "func (*CreateNodeGroupRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_dataproc_v1_node_groups_proto_rawDescGZIP(), []int{0}\n}" ]
[ "0.6880646", "0.6877073", "0.6814076", "0.6804911", "0.67620736", "0.6750105", "0.67320293", "0.6688599", "0.66595477", "0.66334426", "0.6607929", "0.6590114", "0.6576788", "0.6574392", "0.6560336", "0.65601087", "0.65396374", "0.6493717", "0.649143", "0.64790636", "0.64757216", "0.64738816", "0.64598745", "0.64514214", "0.64501405", "0.6445339", "0.6439367", "0.64307123", "0.6423784", "0.64075416", "0.6400095", "0.6396512", "0.6390322", "0.6373391", "0.63598585", "0.63547057", "0.6347056", "0.6342663", "0.63337636", "0.63164693", "0.63137317", "0.62970084", "0.6294383", "0.6293366", "0.6290622", "0.62892264", "0.6287928", "0.62862337", "0.6282152", "0.62777436", "0.6254323", "0.6251453", "0.62485045", "0.6246547", "0.6243493", "0.6242972", "0.6240717", "0.6231083", "0.6228757", "0.6226468", "0.62122065", "0.62113893", "0.6209763", "0.620831", "0.6203343", "0.6202858", "0.62004256", "0.6192907", "0.6191191", "0.6189617", "0.61889195", "0.61866355", "0.61848044", "0.6181929", "0.61800766", "0.61780715", "0.6172771", "0.61716294", "0.6167977", "0.61676496", "0.6166828", "0.61635596", "0.61615044", "0.6159662", "0.61434567", "0.61371195", "0.61360544", "0.6128991", "0.61272675", "0.612572", "0.612197", "0.6117154", "0.61168313", "0.6115889", "0.6111685", "0.61079144", "0.6103386", "0.60951084", "0.6093391", "0.60919833" ]
0.73164415
0
Deprecated: Use RegisterNodeRsp.ProtoReflect.Descriptor instead.
func (*RegisterNodeRsp) Descriptor() ([]byte, []int) { return file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{10} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*RegisterNodeReq) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{9}\n}", "func (*RegisterReply_SecondaryNodeReply) Descriptor() ([]byte, []int) {\n\treturn file_supernode_proto_rawDescGZIP(), []int{3, 1}\n}", "func (*RegisterClusterNodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{3}\n}", "func (*NodeGroupForNodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{5}\n}", "func (*RegisterRequest_SecondaryNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_supernode_proto_rawDescGZIP(), []int{2, 1}\n}", "func (*Node) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{0}\n}", "func (*Deprecation) Descriptor() ([]byte, []int) {\n\treturn file_external_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{8}\n}", "func (*UpgradeNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{24}\n}", "func (*UpgradeNodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{25}\n}", "func (*CreateFriendRsp) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{1}\n}", "func (*NodeGroupForNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{4}\n}", "func (*SetNodeConfigRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{47}\n}", "func (*Registration) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{63}\n}", "func (*UpdateNodeDNSRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{49}\n}", "func (*RegisterClusterNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{2}\n}", "func (*RenameRegistrationOptions) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{193}\n}", "func (*RenewDelegationTokenRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{5}\n}", "func (*RenewDelegationTokenResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{6}\n}", "func (*RegistResp) Descriptor() ([]byte, []int) {\n\treturn file_proto_register_proto_rawDescGZIP(), []int{1}\n}", "func (*RegistrationReply) Descriptor() ([]byte, []int) {\n\treturn file_grpc_registration_proto_rawDescGZIP(), []int{1}\n}", "func (*RefreshNamenodesResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{3}\n}", "func (*SemanticTokensRegistrationOptions) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{218}\n}", "func (*RegisterResponse) Descriptor() ([]byte, []int) {\n\treturn file_proto_menger_menger_proto_rawDescGZIP(), []int{6}\n}", "func (*UpdateNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{11}\n}", "func (*GroupRsp) Descriptor() ([]byte, []int) {\n\treturn file_chatMsg_msg_proto_rawDescGZIP(), []int{6}\n}", "func (*RefreshNamenodesRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_ClientDatanodeProtocol_proto_rawDescGZIP(), []int{2}\n}", "func (*SiteRegReq) Descriptor() ([]byte, []int) {\n\treturn file_proto_registration_msgs_proto_rawDescGZIP(), []int{0}\n}", "func (*GetOssTokenRsp) Descriptor() ([]byte, []int) {\n\treturn file_api_proto_msg_GetOssToken_proto_rawDescGZIP(), []int{1}\n}", "func (*GetDelegationTokenResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{4}\n}", "func (DnsRegMethod) EnumDescriptor() ([]byte, []int) {\n\treturn file_signalling_proto_rawDescGZIP(), []int{1}\n}", "func (*ClusterRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{8}\n}", "func (*CreateNodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{1}\n}", "func (*CreateNSNodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{8}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_menger_menger_proto_rawDescGZIP(), []int{5}\n}", "func (*RegisterResponse) Descriptor() ([]byte, []int) {\n\treturn file_proto_agency_web_proto_rawDescGZIP(), []int{9}\n}", "func (*GetRsp) Descriptor() ([]byte, []int) {\n\treturn file_grpc_proto_rawDescGZIP(), []int{1}\n}", "func (*GetDelegationTokenRequestProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{3}\n}", "func (*GetNodeSelectorsRequest) Descriptor() ([]byte, []int) {\n\treturn file_spire_api_registration_registration_proto_rawDescGZIP(), []int{22}\n}", "func (*RegistrationParams) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{64}\n}", "func (*DeregisterResponse) Descriptor() ([]byte, []int) {\n\treturn file_proto_engine_proto_rawDescGZIP(), []int{9}\n}", "func (*RegistrationRequest) Descriptor() ([]byte, []int) {\n\treturn file_grpc_registration_proto_rawDescGZIP(), []int{0}\n}", "func (*ProviderDisregisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_api_hourglass_v1_provider_proto_rawDescGZIP(), []int{2}\n}", "func (*RegisterResponse) Descriptor() ([]byte, []int) {\n\treturn file_proto_engine_proto_rawDescGZIP(), []int{7}\n}", "func (*SiteRegRes) Descriptor() ([]byte, []int) {\n\treturn file_proto_registration_msgs_proto_rawDescGZIP(), []int{1}\n}", "func (*UpdateNodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_pkg_noderpc_proto_feeds_manager_proto_rawDescGZIP(), []int{7}\n}", "func (*NodeSelectors) Descriptor() ([]byte, []int) {\n\treturn file_spire_api_registration_registration_proto_rawDescGZIP(), []int{21}\n}", "func (*AddPeerResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{30}\n}", "func (*SwitchKeeperRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{6}\n}", "func (NodeType) EnumDescriptor() ([]byte, []int) {\n\treturn file_ory_keto_relation_tuples_v1alpha2_expand_service_proto_rawDescGZIP(), []int{0}\n}", "func (*AddPeerRequest) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{8}\n}", "func (*ComposeNSBoardResponse_NodeStat) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_proto_rawDescGZIP(), []int{1, 2}\n}", "func (SetNodeConfigRequest_ConfigType) EnumDescriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{47, 0}\n}", "func (*UpdateNSNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{12}\n}", "func (*DeregisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_engine_proto_rawDescGZIP(), []int{8}\n}", "func (*StopNodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{29}\n}", "func (*StopNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{28}\n}", "func (*RegisterResp) Descriptor() ([]byte, []int) {\n\treturn file_login_proto_rawDescGZIP(), []int{18}\n}", "func (*GetNodeSelectorsResponse) Descriptor() ([]byte, []int) {\n\treturn file_spire_api_registration_registration_proto_rawDescGZIP(), []int{23}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_engine_proto_rawDescGZIP(), []int{6}\n}", "func (*UpdateFriendStatusRsp) Descriptor() ([]byte, []int) {\n\treturn file_v1_friend_friend_proto_rawDescGZIP(), []int{3}\n}", "func (*StopNSNodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{31}\n}", "func (*UpdateNodeStatusRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{19}\n}", "func (*ExternalGrpcNode) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{1}\n}", "func (*NodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_protobuf_index_proto_rawDescGZIP(), []int{7}\n}", "func (*SharedMemoryControlRequest_Register) Descriptor() ([]byte, []int) {\n\treturn file_grpc_service_proto_rawDescGZIP(), []int{6, 0}\n}", "func (*Unregistration) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{65}\n}", "func (*TypeDefinitionRegistrationOptions) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{132}\n}", "func (*NotificationRegisterResponse) Descriptor() ([]byte, []int) {\n\treturn file_sdk_service_proto_rawDescGZIP(), []int{7}\n}", "func (*UpdateNSNodeStatusRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{18}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_internal_master_master_server_proto_rawDescGZIP(), []int{2}\n}", "func (*CreateNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{0}\n}", "func (*RegistrationAddRes) Descriptor() ([]byte, []int) {\n\treturn file_registration_proto_rawDescGZIP(), []int{12}\n}", "func (*StopNSNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{30}\n}", "func (*SemanticTokensLegend) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{215}\n}", "func (*InstallNodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{23}\n}", "func (*UpdateNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_pkg_noderpc_proto_feeds_manager_proto_rawDescGZIP(), []int{6}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_proto_agency_web_proto_rawDescGZIP(), []int{8}\n}", "func (*RegisterEdgeAgentResponse) Descriptor() ([]byte, []int) {\n\treturn file_messages_proto_rawDescGZIP(), []int{16}\n}", "func (*RegisterResponse) Descriptor() ([]byte, []int) {\n\treturn file_internal_master_master_server_proto_rawDescGZIP(), []int{3}\n}", "func (*UpdateNodeSystemRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{58}\n}", "func (*CancelDelegationTokenResponseProto) Descriptor() ([]byte, []int) {\n\treturn file_Security_proto_rawDescGZIP(), []int{8}\n}", "func (*FindEnabledNodeDNSResponse) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{48}\n}", "func (*RegistrationAddReq) Descriptor() ([]byte, []int) {\n\treturn file_registration_proto_rawDescGZIP(), []int{11}\n}", "func (*NodeGroupsRequest) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{2}\n}", "func (*SyncLocationRsp) Descriptor() ([]byte, []int) {\n\treturn file_Assigneer_Assigneer_proto_rawDescGZIP(), []int{1}\n}", "func (*RegisterResponse) Descriptor() ([]byte, []int) {\n\treturn file_register_proto_rawDescGZIP(), []int{1}\n}", "func (*NodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_pkg_flow_grpc_workflows_proto_rawDescGZIP(), []int{2}\n}", "func (NodeState) EnumDescriptor() ([]byte, []int) {\n\treturn file_protobuf_clusrun_proto_rawDescGZIP(), []int{0}\n}", "func (*CreateNSNodeRequest) Descriptor() ([]byte, []int) {\n\treturn file_service_ns_node_proto_rawDescGZIP(), []int{7}\n}", "func (*RegisterResponse) Descriptor() ([]byte, []int) {\n\treturn file_legacy_upstream_proto_rawDescGZIP(), []int{2}\n}", "func (*NodeGroupsResponse) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{3}\n}", "func (*FindEnabledNodeResponse) Descriptor() ([]byte, []int) {\n\treturn file_service_node_proto_rawDescGZIP(), []int{13}\n}", "func (*MemberReceiveAddressAddResp) Descriptor() ([]byte, []int) {\n\treturn file_ums_proto_rawDescGZIP(), []int{82}\n}", "func (*NodeGroup) Descriptor() ([]byte, []int) {\n\treturn file_cloudprovider_externalgrpc_protos_externalgrpc_proto_rawDescGZIP(), []int{0}\n}", "func (*GetNodeConfigResponse) Descriptor() ([]byte, []int) {\n\treturn file_github_com_yahuizhan_dappley_metrics_go_api_rpc_pb_rpc_proto_rawDescGZIP(), []int{46}\n}", "func (*RegisterReply) Descriptor() ([]byte, []int) {\n\treturn file_supernode_proto_rawDescGZIP(), []int{3}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_legacy_upstream_proto_rawDescGZIP(), []int{3}\n}", "func (*CallHierarchyRegistrationOptions) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{208}\n}", "func (*RegisterResponse) Descriptor() ([]byte, []int) {\n\treturn file_auth_svr_proto_rawDescGZIP(), []int{5}\n}", "func (*RegisterRequest) Descriptor() ([]byte, []int) {\n\treturn file_register_proto_rawDescGZIP(), []int{0}\n}" ]
[ "0.65351105", "0.6272938", "0.6221827", "0.6136749", "0.6130905", "0.6119569", "0.61121607", "0.6094849", "0.60934556", "0.6086631", "0.6078589", "0.6075387", "0.6069023", "0.6054839", "0.60497314", "0.6033443", "0.6018209", "0.6007721", "0.59920925", "0.59901917", "0.5986334", "0.5960919", "0.59569865", "0.5955123", "0.5954828", "0.5954109", "0.59480935", "0.5946708", "0.5945324", "0.59443676", "0.59412843", "0.59156376", "0.591277", "0.5902446", "0.5898729", "0.58985555", "0.58943367", "0.58895284", "0.5884065", "0.5883237", "0.58829266", "0.5882104", "0.5875691", "0.58737797", "0.58709365", "0.58695465", "0.58685964", "0.586516", "0.5856571", "0.5855411", "0.5855095", "0.5853296", "0.58518624", "0.5843722", "0.5842851", "0.58401614", "0.58392453", "0.583831", "0.5837858", "0.58240384", "0.582191", "0.5820607", "0.58200234", "0.58135235", "0.58063924", "0.58031493", "0.57914174", "0.5790832", "0.5790085", "0.5789044", "0.57858396", "0.5777367", "0.57768697", "0.5770626", "0.57698184", "0.5767145", "0.5758909", "0.57584965", "0.5753123", "0.57457614", "0.5745498", "0.57453126", "0.57444", "0.57400393", "0.573992", "0.573775", "0.5737218", "0.5734087", "0.57330644", "0.5732063", "0.5729384", "0.5727108", "0.57261825", "0.5725105", "0.57241327", "0.5723691", "0.57219017", "0.5721107", "0.57210106", "0.57196504" ]
0.73833364
0
Serializes a tree to a single string.
func (this *Codec) serialize(root *TreeNode) string { this.s(root) return "[" + strings.Join(this.data, ",") + "]" }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (t *Tree) String() string {\n\treturn t.root.TreeString()\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tif root == nil {\n\t\treturn \"\"\n\t}\n\tans := make([]string, 0, 10)\n\tserialize(root, &ans)\n\n\treturn strings.Join(ans, \",\")\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tstrs := make([]string, 0)\n\tvar preorder func(*TreeNode)\n\tpreorder = func(node *TreeNode) {\n\t\tif node == nil {\n\t\t\tstrs = append(strs, this.null)\n\t\t\treturn\n\t\t}\n\n\t\tstrs = append(strs, strconv.Itoa(node.Val))\n\t\tpreorder(node.Left)\n\t\tpreorder(node.Right)\n\t}\n\tpreorder(root)\n\treturn strings.Join(strs, this.sep)\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tvar res string\n\n\tvar dfs func(node *TreeNode)\n\tdfs = func(node *TreeNode) {\n\t\tif node == nil {\n\t\t\tres += \"null,\"\n\t\t\treturn\n\t\t}\n\n\t\tres += fmt.Sprintf(\"%+v\", node.Val) + \",\"\n\t\tdfs(node.Left)\n\t\tdfs(node.Right)\n\t}\n\n\tdfs(root)\n\n\treturn res\n}", "func (this *Codec) serialize(root *TreeNode) string {\n if root == nil {\n return \"x\"\n }\n return strconv.Itoa(root.Val) + \",\" + this.serialize(root.Left)+ \",\" + this.serialize(root.Right)\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tif root == nil {\n\t\treturn \"\"\n\t}\n\tvar res string\n\tp := root\n\tq := new(Queue)\n\tq.EnQueue(p)\n\n\tfor !q.IsEmpty() {\n\t\tnode := q.DeQueue()\n\t\tswitch t := node.(type) {\n\t\tcase nil:\n\t\t\tres += \" \"\n\t\tcase *TreeNode:\n\t\t\tres += strconv.Itoa(t.Val)\n\t\t\tq.EnQueue(t.Left)\n\t\t\tq.EnQueue(t.Right)\n\t\tdefault:\n\t\t\tbreak\n\t\t}\n\t}\n\tfmt.Print(res)\n\treturn res\n}", "func (t *RedBlackTree) String() string {\n\treturn stringify.Struct(\"RedBlackTree\",\n\t\tstringify.StructField(\"size\", t.size),\n\t\tstringify.StructField(\"root\", t.root),\n\t)\n}", "func (tree *BTree) String() string {\n\treturn fmt.Sprintf(\"%v\", tree.ToSlice())\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tif root == nil {\n\t\treturn \"\"\n\t}\n\n\tres := make([]string, 0)\n\tqueue := make([]*TreeNode, 0)\n\tqueue = append(queue, root)\n\tfor len(queue) > 0 {\n\t\tl := len(queue)\n\t\tfor i := 0; i < l; i++ {\n\t\t\tnode := queue[0]\n\t\t\tqueue = queue[1:]\n\t\t\tif node != nil {\n\t\t\t\tres = append(res, strconv.Itoa(node.Val))\n\t\t\t\tqueue = append(queue, node.Left, node.Right)\n\t\t\t} else {\n\t\t\t\tres = append(res, \"#\")\n\t\t\t}\n\n\t\t}\n\t}\n\treturn strings.Join(res, \",\")\n}", "func (c *Codec) serialize(root *TreeNode) string {\n\treturn rserialize(root, \"\")\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\treturn this.rserialize(root, \"\")\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tif root == nil {\n\t\treturn \"nil\"\n\t}\n\treturn strconv.Itoa(root.Val) + \",\" + this.serialize(root.Left) + \",\" + this.serialize(root.Right)\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\ts := \"\"\n\n\tif root == nil {\n\t\treturn s\n\t}\n\n\ts = s + strconv.Itoa(root.Val)\n\n\tif root.Left != nil {\n\t\ts = s + \",\" + this.serialize(root.Left)\n\t} else {\n\t\ts = s + \",\" + \"null\"\n\t}\n\n\tif root.Right != nil {\n\t\ts = s + \",\" + this.serialize(root.Right)\n\t} else {\n\t\ts = s + \",\" + \"null\"\n\t}\n\n\treturn s\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\n\tif root == nil {\n\t\treturn \"\"\n\t}\n\n\tqueue := []*TreeNode{root}\n\tc := []string{strconv.Itoa(root.Val)}\n\n\tfor len(queue) > 0 {\n\t\tl := len(queue)\n\t\tfor i := 0; i < l; i++ {\n\t\t\tif queue[i].Left != nil {\n\t\t\t\tqueue = append(queue, queue[i].Left)\n\t\t\t}\n\t\t\tif queue[i].Right != nil {\n\t\t\t\tqueue = append(queue, queue[i].Right)\n\t\t\t}\n\t\t\tadd(&c, queue[i].Left)\n\t\t\tadd(&c, queue[i].Right)\n\t\t}\n\t\tqueue = queue[l:]\n\t}\n\n\tres := strings.Join(c, \",\")\n\treturn res\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tif root == nil {\n\t\treturn \"\"\n\t}\n\tvar result []string\n\tqueue := []*TreeNode{root}\n\tfor len(queue) > 0 {\n\t\troot = queue[0]\n\t\tqueue = queue[1:]\n\t\tif root == nil {\n\t\t\tresult = append(result, \"\")\n\t\t\tcontinue\n\t\t}\n\t\tresult = append(result, strconv.Itoa(root.Val))\n\t\tqueue = append(queue, root.Left, root.Right)\n\t}\n\treturn strings.Join(result, \",\")\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\ttmp := []string{}\n\ts(root, &tmp)\n\tthis.SerializeStr = strings.Join(tmp, \",\")\n\treturn this.SerializeStr\n}", "func (t *Tree[T]) String() string {\n\treturn t.Root.String()\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tif root == nil {\n\t\treturn \"[]\"\n\t}\n\n\telements := []string{}\n\tqueue := []*TreeNode{}\n\tqueue = append(queue, root)\n\n\tfor len(queue) > 0 {\n\t\tnode := queue[0]\n\t\tqueue = queue[1:]\n\t\tif node.Val == NULL_VAL {\n\t\t\telements = append(elements, NULL_STR)\n\t\t\tcontinue\n\t\t}\n\n\t\tif node.Left != nil {\n\t\t\tqueue = append(queue, node.Left)\n\t\t} else {\n\t\t\tqueue = append(queue, &TreeNode{\n\t\t\t\tVal: NULL_VAL,\n\t\t\t})\n\t\t}\n\n\t\tif node.Right != nil {\n\t\t\tqueue = append(queue, node.Right)\n\t\t} else {\n\t\t\tqueue = append(queue, &TreeNode{\n\t\t\t\tVal: NULL_VAL,\n\t\t\t})\n\t\t}\n\n\t\telements = append(elements, strconv.Itoa(node.Val))\n\t}\n\n\treturn \"[\" + strings.Join(elements, \",\") + \"]\"\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tstrList := make([]string, 0)\n\tvar solve func(root *TreeNode)\n\tsolve = func(root *TreeNode) {\n\t\tif root == nil {\n\t\t\tstrList = append(strList, \"$\")\n\t\t\treturn\n\t\t}\n\t\tstrList = append(strList, strconv.Itoa(root.Val))\n\t\tsolve(root.Left)\n\t\tsolve(root.Right)\n\t}\n\tsolve(root)\n\tfmt.Println(strings.Join(strList, \",\"))\n\treturn strings.Join(strList, \",\")\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tnodes := []*TreeNode{ root }\n\tstrList := []string{}\n\tfor i := 0; i < len(nodes); {\n\t\tcnt := len(nodes)\n\t\tfor ; i < cnt; i++ {\n\t\t\tnode := nodes[i]\n\t\t\tif node == nil {\n\t\t\t\tstrList = append(strList, \"null\")\n\t\t\t} else {\n\t\t\t\tstrList = append(strList, strconv.Itoa(node.Val))\n\t\t\t\tnodes = append(nodes, node.Left)\n\t\t\t\tnodes = append(nodes, node.Right)\n\t\t\t}\n\t\t}\n\t}\n\treturn \"[\" + strings.Join(strList, \"\") + \"]\"\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\treturn dfsSerial(root, \"\")\n}", "func (tree *BinarySearchTree) String() {\n\ttree.lock.Lock()\n\tdefer tree.lock.Unlock()\n\tfmt.Println(\"************************************************\")\n\tstringify(tree.rootNode, 0)\n\tfmt.Println(\"************************************************\")\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\n\tif root == nil {\n\t\treturn \"[null]\"\n\t}\n\n\tnodes := []*TreeNode{root}\n\tcode := \"[\" + strconv.Itoa(root.Val)\n\n\tfor len(nodes) > 0 {\n\t\tnewNodes := []*TreeNode{}\n\n\t\tfor i := range nodes {\n\n\t\t\tif nodes[i].Left != nil {\n\t\t\t\tnewNodes = append(newNodes, nodes[i].Left)\n\t\t\t\tcode = code + \",\" + strconv.Itoa(nodes[i].Left.Val)\n\t\t\t} else {\n\t\t\t\tcode = code + \",null\"\n\t\t\t}\n\n\t\t\tif nodes[i].Right != nil {\n\t\t\t\tnewNodes = append(newNodes, nodes[i].Right)\n\t\t\t\tcode = code + \",\" + strconv.Itoa(nodes[i].Right.Val)\n\t\t\t} else {\n\t\t\t\tcode = code + \",null\"\n\t\t\t}\n\n\t\t}\n\n\t\tnodes = newNodes\n\n\t}\n\tcode = code + \"]\"\n\treturn code\n}", "func (m *MerkleTree) String() string {\n\ts := \"\"\n\tfor _, l := range m.Leafs {\n\t\ts += fmt.Sprint(l)\n\t\ts += \"\\n\"\n\t}\n\treturn s\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tnodeValues := []int{}\n\t// preoder 노드 탐색\n\t_to_string(root, &nodeValues)\n\n\t// 노드 값을 공백으로 구분한 스트링으로 리턴\n\tr := \"\"\n\tfor i := 0; i < len(nodeValues); i++ {\n\t\tr += fmt.Sprintf(\"%d \", nodeValues[i])\n\t}\n\treturn strings.TrimSpace(r)\n}", "func (tree *Tree) String() string {\n\tstr := \"RedBlackTree\\n\"\n\tif !tree.Empty() {\n\t\toutput(tree.Root, \"\", true, &str)\n\t}\n\treturn str\n}", "func (n *TreeNode) ToString() string {\n\tif n == nil {\n\t\treturn \"nil\"\n\t}\n\tif n.Left == nil && n.Right == nil {\n\t\treturn strconv.Itoa(n.Data)\n\t}\n\treturn fmt.Sprintf(\"%d => (%s, %s)\", n.Data, n.Left.ToString(), n.Right.ToString())\n}", "func (c *Codec) serialize(root *TreeNode) string {\n\tvar q []*TreeNode\n\tq = append(q, root)\n\tvar out []string\n\tfor len(q) != 0 {\n\t\tfor k := len(q); k > 0; k-- {\n\t\t\tcurr := q[0]\n\t\t\tif curr != nil {\n\t\t\t\tout = append(out, strconv.Itoa(curr.Val))\n\t\t\t\tq = append(q, curr.Left, curr.Right)\n\t\t\t} else {\n\t\t\t\tout = append(out, \"null\")\n\t\t\t}\n\t\t\tq = q[1:]\n\t\t}\n\t}\n\treturn \"[\" + strings.TrimRight(strings.Join(out, \",\"), \",null\") + \"]\"\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tif root == nil {\n\t\treturn \"\"\n\t}\n\tmaps := make(map[int64]int)\n\n\tmaps[1] = root.Val\n\tinnerSerialize(root.Right, &maps, 3)\n\tinnerSerialize(root.Left, &maps, 2)\n\tfmt.Println(maps)\n\n\treturn stringfy(&maps)\n}", "func (n *Node) String() string { return fmt.Sprint(n.Data) }", "func (tree *UTree) String() string {\r\n\tstr := \"RedBlackTree\\n\"\r\n\tif !tree.Empty() {\r\n\t\toutput(tree.root, \"\", true, &str)\r\n\t}\r\n\treturn str\r\n}", "func (this *Codec) serialize1(root *TreeNode) string {\n\n\tif root == nil {\n\t\treturn \"\"\n\t}\n\n\tqueue := []*TreeNode{root}\n\tc := []string{strconv.Itoa(root.Val)}\n\n\tfor len(queue) > 0 {\n\t\tl := len(queue)\n\t\tfor i := 0; i < l; i++ {\n\t\t\tif queue[i].Left != nil {\n\t\t\t\tqueue = append(queue, queue[i].Left)\n\t\t\t}\n\t\t\tif queue[i].Right != nil {\n\t\t\t\tqueue = append(queue, queue[i].Right)\n\t\t\t}\n\t\t\tadd(&c, queue[i].Left)\n\t\t\tadd(&c, queue[i].Right)\n\t\t}\n\t\tqueue = queue[l:]\n\t}\n\n\tres := strings.Join(c, \",\")\n\treturn res\n}", "func (this *Codec) serialize(root *TreeNode) string {\n var s string \n res:=helpSerialize(root,s)\n fmt.Println(res)\n return res\n}", "func (binaryTree *BinaryTree) ToString() string {\n\tvar buffer bytes.Buffer\n\n\tif !binaryTree.Empty() {\n\t\tvar queue []*BinaryTreeNode = []*BinaryTreeNode{binaryTree.root}\n\n\t\tfor len(queue) > 0 {\n\t\t\tvar node *BinaryTreeNode = queue[0]\n\t\t\tbuffer.WriteString(strconv.Itoa(node.value) + \" \")\n\n\t\t\tif node.left != nil {\n\t\t\t\tqueue = append(queue, node.left)\n\t\t\t}\n\n\t\t\tif node.right != nil {\n\t\t\t\tqueue = append(queue, node.right)\n\t\t\t}\n\n\t\t\tqueue = queue[1:]\n\t\t}\n\t}\n\n\treturn buffer.String()\n}", "func (n *Node) String() string {\n\tif n.Children.Len() == 0 {\n\t\tfmt.Sprintf(\"(%.2f)\", n.Value)\n\t\treturn fmt.Sprintf(\"(%.2f)\", n.Value)\n\t}\n\tfmt.Sprintf(\"(%.2f: %s)\", n.Value, n.Children)\n\treturn fmt.Sprintf(\"(%.2f: %s)\", n.Value, n.Children)\n}", "func (n *Node) String() string {\n\ts := fmt.Sprintf(\"Level: %d\\nValues: %d\\nChildren: %t\\nBox: %s\\n\",\n\t\tn.level, len(n.values), n.children[0] != nil, n.boundingBox.String())\n\n\tif n.children[0] != nil {\n\t\tfor i, _ := range n.children {\n\t\t\ts = s + n.children[i].String()\n\t\t}\n\t}\n\n\treturn s\n}", "func stringify(treeNode *TreeNode, level int) {\n\tif treeNode != nil {\n\t\tformat := \"\"\n\t\tfor i := 0; i < level; i++ {\n\t\t\tformat += \" \"\n\t\t}\n\t\tformat += \"***> \"\n\t\tlevel++\n\t\tstringify(treeNode.leftNode, level)\n\t\tfmt.Printf(format+\"%d\\n\", treeNode.key)\n\t\tstringify(treeNode.rightNode, level)\n\t}\n}", "func (t *ASCIITree) String() string {\n\tvar buffer bytes.Buffer\n\tt.PrintTree(&buffer)\n\treturn string(buffer.Bytes())\n}", "func (n Node) String() string {\n\t// using json's marshal indent to format and return node information\n\ts, _ := json.MarshalIndent(n, \"\", \"\\t\")\n\treturn string(s)\n}", "func (o *Octree) String() string {\n\treturn fmt.Sprintf(\"Root: %T, leafs: %v\", o.root, o.root.(*node).leafs)\n}", "func (n *Node) String() string {\n\treturn fmt.Sprintf(\"%t %t %v %s\", n.leaf, n.Dup, n.Hash, n.C)\n}", "func (n Node) String() string {\n\tc := n.GetConfig()\n\n\ts := n.Info.Name() + \"\\n\"\n\tif n.getDepth() == 0 || c.FullPaths {\n\t\ts = n.Path + \"\\n\"\n\t}\n\n\tfor _, v := range n.Children {\n\t\tif !c.DisableIndentation {\n\t\t\ts += v.generatePrefix()\n\t\t}\n\t\ts += v.String()\n\t}\n\n\treturn s\n}", "func (n *node) String() string {\n\treturn \"<\" + n.view.String() + \"-\\n\" + n.children[0].String() + \", \\n\" + n.children[1].String() + \", \\n\" + n.children[2].String() + \", \\n\" + n.children[3].String() + \">\"\n}", "func (n *Node) String() string {\n\treturn fmt.Sprintf(\"%v\", n.data)\n}", "func (n *Node) WriteTree(padding ...int) string {\n\tvar indent int\n\tif len(padding) == 1 {\n\t\tindent = padding[0]\n\t}\n\n\tvar s string\n\tif n.val != \"\" {\n\t\tfor i := 0; i < indent; i++ {\n\t\t\ts += \" \"\n\t\t}\n\t\ts += n.key + \" : \" + n.val + \"\\n\"\n\t} else {\n\t\tfor i := 0; i < indent; i++ {\n\t\t\ts += \" \"\n\t\t}\n\t\ts += n.key + \" :\" + \"\\n\"\n\t\tfor _, nn := range n.nodes {\n\t\t\ts += nn.WriteTree(indent + 1)\n\t\t}\n\t}\n\treturn s\n}", "func (t *Tree) String() string {\n\ts := t.pattern\n\tif t.parent != nil {\n\t\ts = t.parent.String() + s\n\t}\n\treturn s\n}", "func stringify(n *Node, level int) {\n\tif n != nil {\n\t\tformat := \"\"\n\t\tfor i := 0; i < level; i++ {\n\t\t\tformat += \" \"\n\t\t}\n\t\tformat += \"---[ \"\n\t\tlevel++\n\t\tstringify(n.left, level)\n\t\tfmt.Printf(format+\"%d\\n\", n.key)\n\t\tstringify(n.right, level)\n\t}\n}", "func stringTree(builder *strings.Builder, depth int, node *TreeNode) (*strings.Builder, error) {\n\tvar err error\n\tfor i := 0; i < depth; i++ {\n\t\tif _, err = builder.WriteString(\" \"); err != nil {\n\t\t\treturn builder, err\n\t\t}\n\t}\n\tfmt.Fprintf(builder, \"└ %d\\n\", node.Val)\n\n\tif node.Left != nil {\n\t\tif builder, err = stringTree(builder, depth+1, node.Left); err != nil {\n\t\t\treturn builder, err\n\t\t}\n\t}\n\tif node.Right != nil {\n\t\tif builder, err = stringTree(builder, depth+1, node.Right); err != nil {\n\t\t\treturn builder, err\n\t\t}\n\t}\n\treturn builder, nil\n}", "func (t *Tree) formatStr() []byte {\n\tvar s []byte\n\ts = append(s, t.format...)\n\tif t.parent != nil {\n\t\tt := t.parent.formatStr()\n\t\tt = append(t, s...)\n\t\ts = t\n\t}\n\treturn s\n}", "func (t *TreeMap) String() string {\n\tif t.size == 0 {\n\t\treturn \"{}\"\n\t}\n\tb := \"{ \"\n\tinorder(t.root, &b)\n\treturn b + \"}\"\n}", "func (n Node) String() string {\n\tstr := \"\"\n\tn.FuncDownMeFirst(0, nil, func(k Ki, level int, d interface{}) bool {\n\t\tfor i := 0; i < level; i++ {\n\t\t\tstr += \"\\t\"\n\t\t}\n\t\tstr += k.Name() + \"\\n\"\n\t\treturn true\n\t})\n\treturn str\n}", "func main() {\n\troot := TreeNode{\n\t\tVal: 1,\n\t\tLeft: &TreeNode{\n\t\t\tVal: 2,\n\t\t\tLeft: nil,\n\t\t\tRight: nil,\n\t\t},\n\t\tRight: &TreeNode{\n\t\t\tVal: 3,\n\t\t\tLeft: &TreeNode{\n\t\t\t\tVal: 4,\n\t\t\t\tRight: nil,\n\t\t\t\tLeft: nil,\n\t\t\t},\n\t\t\tRight: &TreeNode{\n\t\t\t\tVal: 5,\n\t\t\t\tRight: nil,\n\t\t\t\tLeft: nil,\n\t\t\t},\n\t\t},\n\t}\n\tobj := Constructor()\n\tdata := obj.serialize(&root)\n\tfmt.Println(data)\n}", "func (n *Node[T]) String() string {\n\tif n == nil {\n\t\treturn \"nil\"\n\t}\n\treturn fmt.Sprintf(\"%v\\n%s\\n%s\", n.Value,\n\t\tindentString(n.Left.String()),\n\t\tindentString(n.Right.String()))\n}", "func main() {\n\troot := &TreeNode{\n\t\tVal: 2,\n\t\tLeft: &TreeNode{\n\t\t\tVal: 1,\n\t\t},\n\t\tRight: &TreeNode{\n\t\t\tVal: 3,\n\t\t},\n\t}\n\tprintTreeNodeByDFS(root)\n\tfmt.Println()\n\n\tser := Constructor()\n\ttreeString := ser.serialize(root)\n\tfmt.Println(treeString)\n\tans := ser.deserialize(treeString)\n\t// printTreeNodeByBFS(ans)\n\tprintTreeNodeByDFS(ans)\n\tfmt.Println()\n}", "func (n *Nodes) String() string {\n\treturn fmt.Sprintf(\"%s\", *n)\n}", "func (t *Trie) String() string {\n\treturn t.Tree().Print()\n}", "func (bt *BinarySearchTree) String() string {\n\tif bt.root == nil {\n\t\treturn \"no root\"\n\t}\n\tb := &strings.Builder{}\n\tvar err error\n\tif b, err = stringTree(b, 0, bt.root); err != nil {\n\t\tpanic(err)\n\t}\n\treturn b.String()\n}", "func (set *Set) String() string {\n\tstr := \"TreeSet\\n\"\n\titems := []string{}\n\tfor _, v := range set.tree.Keys() {\n\t\titems = append(items, fmt.Sprintf(\"%v\", v))\n\t}\n\tstr += strings.Join(items, \", \")\n\treturn str\n}", "func (ths *TreeHashStack) Serialize() ([]byte, error) {\n\tbuf := new(bytes.Buffer)\n\n\tif err := gob.NewEncoder(buf).Encode(ths); nil != err {\n\t\treturn nil, err\n\t}\n\n\treturn buf.Bytes(), nil\n}", "func (bst *StringBinarySearchTree) String() {\n\tbst.lock.Lock()\n\tdefer bst.lock.Unlock()\n\tfmt.Println(\"------------------------------------------------\")\n\tstringify(bst.root, 0)\n\tfmt.Println(\"------------------------------------------------\")\n}", "func (s HierarchyLevel) String() string {\n\treturn awsutil.Prettify(s)\n}", "func (n Node) String() string {\n\treturn fmt.Sprintf(\"<Node id=%s, data=%s>\", n.ID, n.Data)\n}", "func (n *Node) String() (s string) {\n\treturn fmt.Sprintf(\"\\\"%s\\\" ==> %s/%d %s %s/%d\", n.Original, n.Lref, n.Lval, n.Op, n.Rref, n.Rval)\n}", "func (r *Root) String() string {\n\tvar out bytes.Buffer\n\n\tfor _, s := range r.Statements {\n\t\tout.WriteString(s.String())\n\t}\n\n\treturn out.String()\n}", "func encodeTree(hmt *Tree, finalTree *string) {\n\tif hmt == nil {\n\t\treturn\n\t}\n\t\n\tif hmt.LeftNode == nil && hmt.RightNode == nil{\n\t\t*finalTree += \"1\" + string(hmt.Char)\n\t} else {\n\t\t*finalTree += \"0\"\n\t}\n\tencodeTree(hmt.LeftNode, finalTree)\n\tencodeTree(hmt.RightNode, finalTree) \n}", "func (l *leaf) String() string {\n\tvar str = l.view.String()\n\tfor _, p := range l.ps {\n\t\tstr += p.String()\n\t}\n\treturn str\n}", "func (this *TreeNode) String() string {\n\treturn this.Name.String() + this.Colon.String() +\n\t\tthis.Pattern.String()\n}", "func (n *Node) String() string {\r\n\treturn fmt.Sprintf(\"%v\", n.val)\r\n}", "func (p ParseTree) String() string {\n\ts := make([]string, len(p))\n\tfor i, e := range p {\n\t\ts[i] = e.String()\n\t}\n\treturn strings.Join(s, \" \")\n}", "func (n *Node) String() string {\n\treturn n.recString(0)\n}", "func dump(tree *avl.Node) {\n\tb, err := json.MarshalIndent(tree, \"\", \" \")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tfmt.Println(string(b))\n}", "func stringify(n *BinarySearchNode, level int, builder *strings.Builder) {\n\tif n != nil {\n\t\tformat := \"\"\n\t\tfor i := 0; i < level; i++ {\n\t\t\tformat += \" \"\n\t\t}\n\t\tformat += \"---[ \"\n\t\tlevel++\n\t\tstringify(n.left, level, builder)\n\t\tbuilder.WriteString(fmt.Sprintf(format+\"%d\\n\", n.value))\n\t\tstringify(n.right, level, builder)\n\t}\n}", "func (s HierarchyStructure) String() string {\n\treturn awsutil.Prettify(s)\n}", "func (t InfluenceTree) String() (res []string) {\n\tid := \"\"\n\tif t.ID != RootTree.ID {\n\t\tid = strconv.Itoa(t.ID) // converts int id to string\n\t}\n\n\tif len(t.Trees) == 0 {\n\t\treturn []string{id + \"|\"} // current tree is a leaf so we have a single path\n\t}\n\n\tfor _, subTree := range t.Trees {\n\t\tpaths := subTree.String() // get subtrees paths\n\t\tfor _, path := range paths { // for each sub path, we add current influence\n\t\t\tres = append(res, id+\"->\"+path)\n\t\t}\n\t}\n\n\treturn // return computed paths.\n}", "func Tree(e interface{}) string {\n\tswitch e := e.(type) {\n\tcase *gst.File:\n\t\treturn fmt.Sprintf(\"(package %s %s)\", e.PkgName, Tree(e.Decls))\n\tcase []gst.FuncDecl:\n\t\ts := \"\"\n\t\tfor _, fn := range e {\n\t\t\ts += fmt.Sprintf(\"func %s() {\\n%s\\n}\", fn.Name, Tree(fn.Body))\n\t\t}\n\t\treturn s\n\tcase *gst.RetStmt:\n\t\treturn fmt.Sprintf(\"ret\")\n\tcase *gst.ExprStmt:\n\t\treturn fmt.Sprintf(\"%s\", Tree(e.Exprs))\n\tcase value.Int:\n\t\treturn fmt.Sprintf(\"<int %s>\", e)\n\tcase variableExpr:\n\t\treturn fmt.Sprintf(\"<var %s>\", e.name)\n\tcase *unary:\n\t\treturn fmt.Sprintf(\"(%s %s)\", e.op, Tree(e.right))\n\tcase *binary:\n\t\t// Special case for [].\n\t\tif e.op == \"[]\" {\n\t\t\treturn fmt.Sprintf(\"(%s[%s])\", Tree(e.left), Tree(e.right))\n\t\t}\n\t\treturn fmt.Sprintf(\"(%s %s %s)\", Tree(e.left), e.op, Tree(e.right))\n\tcase sliceExpr:\n\t\ts := \"<TODO>\"\n\t\treturn s\n\tcase []value.Expr:\n\t\tif len(e) == 1 {\n\t\t\treturn Tree(e[0])\n\t\t}\n\t\ts := \"<\"\n\t\tfor i, expr := range e {\n\t\t\tif i > 0 {\n\t\t\t\ts += \"; \"\n\t\t\t}\n\t\t\ts += Tree(expr)\n\t\t}\n\t\ts += \">\"\n\t\treturn s\n\tdefault:\n\t\treturn fmt.Sprintf(\"%T\", e)\n\t}\n}", "func (s HierarchyPath) String() string {\n\treturn awsutil.Prettify(s)\n}", "func (decTree *Tree) WriteTree(filename string) {\n\tfile, err := os.Create(filename)\n\tif err != nil {\n\t\tfmt.Println(\"Error opening output file: \", filename)\n\t\treturn\n\t}\n\n\tcurrNode := decTree\n\tvar treeStack []*Tree\n\n\ttreeLen := 1\n\tfor treeLen != 0 {\n\t\tfile.WriteString(nodeToStr(currNode.Details))\n\n\t\tif currNode.Details.Leaf == false {\n\t\t\ttreeStack = append(treeStack, currNode.Right)\n\t\t\tcurrNode = currNode.Left\n\t\t\ttreeLen++\n\t\t} else {\n\t\t\t//get the length of the tree and set curr to the last element in the list\n\t\t\ttreeLen--\n\n\t\t\tif treeLen > 0 {\n\t\t\t\tcurrNode, treeStack = treeStack[treeLen-1], treeStack[:treeLen-1]\n\t\t\t}\n\t\t}\n\t}\n\n\tfile.Close()\n}", "func (t *Tree) PrintTree() {\n\tb, err := json.MarshalIndent(t, \"\", \" \")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tfmt.Println(string(b))\n}", "func (n *Node) String() string {\n\treturn fmt.Sprintf(\"%s [%d]\", n.ID_, n.Group_)\n}", "func (n *Node) String() string {\n\tname := n.Field\n\tif n.IsArray {\n\t\tname = \"array[\" + n.TypeName + \"]\"\n\t}\n\n\tif n.ChildNode != nil {\n\t\treturn name + \".\" + n.ChildNode.String()\n\t}\n\tif len(n.TypeName) > 0 {\n\t\treturn name + \" : \" + n.TypeName\n\t}\n\treturn name\n}", "func (n *Node) String() string {\n\tbuf := bytes.NewBuffer(nil)\n\tcn := n\n\n\tfor {\n\t\tif cn.next == nil {\n\t\t\tbreak\n\t\t}\n\t\tif buf.Len() != 0 {\n\t\t\tfmt.Fprint(buf, \",\")\n\t\t} else {\n\t\t\tfmt.Fprint(buf, \"[\")\n\n\t\t}\n\t\tfmt.Fprintf(buf, \"%v\", cn.next.data)\n\t\tcn = cn.next\n\t}\n\tfmt.Fprint(buf, \"]\")\n\n\treturn buf.String()\n}", "func (f *Forest) Serialize() ([]byte, error) {\n\tvar bigraph serializer.Int\n\tif f.Bigraph {\n\t\tbigraph = 1\n\t}\n\tserializers := make([]serializer.Serializer, len(f.Forest)+1)\n\tserializers[0] = bigraph\n\tfor i, t := range f.Forest {\n\t\tserializers[i+1] = newTreeSerializer(t)\n\t}\n\treturn serializer.SerializeSlice(serializers)\n}", "func (this *LeafNode) String() string {\n\treturn this.Expr.String()\n}", "func (w *RootWalker) String() (out string) {\n\ttabs := func(n int) {\n\t\tfor i := 0; i < n; i++ {\n\t\t\tout += \"\\t\"\n\t\t}\n\t}\n\tout += fmt.Sprint(\"Root\")\n\tsize := w.Size()\n\tif size == 0 {\n\t\treturn\n\t}\n\tout += fmt.Sprintf(\".Refs[%d] ->\\n\", w.stack[0].prevInFieldIndex)\n\tfor i, obj := range w.stack {\n\t\tschName := \"\"\n\t\ts, _ := w.r.SchemaByReference(obj.s)\n\t\tif s != nil {\n\t\t\tschName = s.Name()\n\t\t}\n\n\t\ttabs(i)\n\t\tout += fmt.Sprintf(\" %s\", schName)\n\t\tout += fmt.Sprintf(` = \"%v\"`+\"\\n\", obj.p)\n\n\t\ttabs(i)\n\t\tif obj.next != nil {\n\t\t\tout += fmt.Sprintf(\" %s\", schName)\n\t\t\tout += fmt.Sprintf(\".%s\", obj.next.prevFieldName)\n\t\t\tif obj.next.prevInFieldIndex != -1 {\n\t\t\t\tout += fmt.Sprintf(\"[%d]\", obj.next.prevInFieldIndex)\n\t\t\t}\n\t\t\tout += fmt.Sprint(\" ->\\n\")\n\t\t}\n\t}\n\treturn\n}", "func (w *RootWalker) String() (out string) {\n\ttabs := func(n int) {\n\t\tfor i := 0; i < n; i++ {\n\t\t\tout += \"\\t\"\n\t\t}\n\t}\n\tout += fmt.Sprint(\"Root\")\n\tsize := w.Size()\n\tif size == 0 {\n\t\treturn\n\t}\n\tout += fmt.Sprintf(\".Refs[%d] ->\\n\", w.stack[0].prevInFieldIndex)\n\tfor i, obj := range w.stack {\n\t\tschName := \"\"\n\t\ts, _ := w.r.SchemaByReference(obj.s)\n\t\tif s != nil {\n\t\t\tschName = s.Name()\n\t\t}\n\n\t\ttabs(i)\n\t\tout += fmt.Sprintf(\" %s\", schName)\n\t\tout += fmt.Sprintf(` = \"%v\"`+\"\\n\", obj.p)\n\n\t\ttabs(i)\n\t\tif obj.next != nil {\n\t\t\tout += fmt.Sprintf(\" %s\", schName)\n\t\t\tout += fmt.Sprintf(\".%s\", obj.next.prevFieldName)\n\t\t\tif obj.next.prevInFieldIndex != -1 {\n\t\t\t\tout += fmt.Sprintf(\"[%d]\", obj.next.prevInFieldIndex)\n\t\t\t}\n\t\t\tout += fmt.Sprint(\" ->\\n\")\n\t\t}\n\t}\n\treturn\n}", "func (n *Node) String() string {\n\tvar b strings.Builder\n\tNewPrinting(WithWriter(&b), WithColSep(\" \")).RunNode(n)\n\treturn b.String()\n}", "func (bst *BinarySearch) String() string {\n\tbst.lock.Lock()\n\tdefer bst.lock.Unlock()\n\n\tvar builder strings.Builder\n\n\tbuilder.WriteString(\"-------------------------------------------\\n\")\n\tstringify(bst.root, 0, &builder)\n\tbuilder.WriteString(\"-------------------------------------------\\n\")\n\n\treturn builder.String()\n}", "func (n NodeID) String() string {\n\treturn fmt.Sprintf(\"%x\", n[:])\n}", "func (n *Node) String() string {\n\tvar links string\n\tfor k, n := range n.Nodes {\n\t\tlinks += fmt.Sprintf(\"%s:%s \", k, n.Name)\n\t}\n\tif len(n.Nodes) > 0 {\n\t\tlinks = links[:len(links)-1]\n\t}\n\treturn fmt.Sprintf(\"name=%s links=map[%s]\", n.Name, links)\n}", "func (r *RawNode) String() string {\n\treturn r.raw\n}", "func (nc *PathContext) String() string {\n\tret := \"\\n--------------- NodeContext ------------------\\n\"\n\tret += fmt.Sprintf(\"Parent.Node=\\n%s\\n\", pretty.Sprint(nc.Parent.Node))\n\tret += fmt.Sprintf(\"KeyToChild=%v\\n\", nc.Parent.KeyToChild)\n\tret += fmt.Sprintf(\"Node=\\n%s\\n\", pretty.Sprint(nc.Node))\n\tret += \"----------------------------------------------\\n\"\n\treturn ret\n}", "func NodeToString(node core.Node) string {\n\tvar b strings.Builder\n\tfmt.Println()\n\tnodeRecurse(node, 0, &b)\n\treturn b.String()\n}", "func (n Node) String() string {\n\treturn fmt.Sprintf(\"ID: %s\\nName: %s\\nPort: %s\\nDesc: %s\\nLoc: %s\", n.ID, n.Name, n.Port, n.Desc, n.Loc)\n}", "func WriteTree(writer io.Writer, hierarchy *Hierarchy, includeEmpty bool) {\n\ttree := assembleTree(hierarchy)\n\tkeys := make([]string, len(tree))\n\ti := 0\n\tfor k := range tree {\n\t\tkeys[i] = k\n\t\ti++\n\t}\n\tsort.Strings(keys)\n\tfor _, key := range keys {\n\t\tb := tree[key]\n\t\twriteBranch(writer, b, \"\", hierarchy, includeEmpty)\n\t}\n}", "func tree(e interface{}) string {\n\tswitch e := e.(type) {\n\tcase value.Int:\n\t\treturn fmt.Sprintf(\"<int %s>\", e)\n\tcase value.BigInt:\n\t\treturn fmt.Sprintf(\"<bigint %s>\", e)\n\tcase value.BigRat:\n\t\treturn fmt.Sprintf(\"<rat %s>\", e)\n\tcase value.BigFloat:\n\t\treturn fmt.Sprintf(\"<float %s>\", e)\n\tcase value.Complex:\n\t\treturn fmt.Sprintf(\"<complex %s>\", e)\n\tcase sliceExpr:\n\t\ts := \"<\"\n\t\tfor i, x := range e {\n\t\t\tif i > 0 {\n\t\t\t\ts += \" \"\n\t\t\t}\n\t\t\ts += x.ProgString()\n\t\t}\n\t\ts += \">\"\n\t\treturn s\n\tcase *variableExpr:\n\t\treturn fmt.Sprintf(\"<var %s>\", e.name)\n\tcase *unary:\n\t\treturn fmt.Sprintf(\"(%s %s)\", e.op, tree(e.right))\n\tcase *binary:\n\t\treturn fmt.Sprintf(\"(%s %s %s)\", tree(e.left), e.op, tree(e.right))\n\tcase conditional:\n\t\treturn tree(e.binary)\n\tcase *index:\n\t\ts := fmt.Sprintf(\"(%s[\", tree(e.left))\n\t\tfor i, v := range e.right {\n\t\t\tif i > 0 {\n\t\t\t\ts += \"; \"\n\t\t\t}\n\t\t\ts += tree(v)\n\t\t}\n\t\ts += \"])\"\n\t\treturn s\n\tcase []value.Expr:\n\t\tif len(e) == 1 {\n\t\t\treturn tree(e[0])\n\t\t}\n\t\ts := \"<\"\n\t\tfor i, expr := range e {\n\t\t\tif i > 0 {\n\t\t\t\ts += \"; \"\n\t\t\t}\n\t\t\ts += tree(expr)\n\t\t}\n\t\ts += \">\"\n\t\treturn s\n\tdefault:\n\t\treturn fmt.Sprintf(\"%T\", e)\n\t}\n}", "func (n *Node) String() string {\n\treturn stringify.Struct(\"GetElement\",\n\t\tstringify.StructField(\"key\", n.key),\n\t\tstringify.StructField(\"value\", n.value),\n\t\tstringify.StructField(\"left\", n.left),\n\t\tstringify.StructField(\"right\", n.right),\n\t)\n}", "func (nt NodeValue) String() string {\n\treturn string(nt)\n}", "func (f *Forest) ToString() string {\n\n\tfh := f.height\n\t// tree height should be 6 or less\n\tif fh > 6 {\n\t\treturn \"forest too big to print \"\n\t}\n\n\toutput := make([]string, (fh*2)+1)\n\tvar pos uint8\n\tfor h := uint8(0); h <= fh; h++ {\n\t\trowlen := uint8(1 << (fh - h))\n\n\t\tfor j := uint8(0); j < rowlen; j++ {\n\t\t\tvar valstring string\n\t\t\tok := f.data.size() >= uint64(pos)\n\t\t\tif ok {\n\t\t\t\tval := f.data.read(uint64(pos))\n\t\t\t\tif val != empty {\n\t\t\t\t\tvalstring = fmt.Sprintf(\"%x\", val[:2])\n\t\t\t\t}\n\t\t\t}\n\t\t\tif valstring != \"\" {\n\t\t\t\toutput[h*2] += fmt.Sprintf(\"%02d:%s \", pos, valstring)\n\t\t\t} else {\n\t\t\t\toutput[h*2] += fmt.Sprintf(\" \")\n\t\t\t}\n\t\t\tif h > 0 {\n\t\t\t\t//\t\t\t\tif x%2 == 0 {\n\t\t\t\toutput[(h*2)-1] += \"|-------\"\n\t\t\t\tfor q := uint8(0); q < ((1<<h)-1)/2; q++ {\n\t\t\t\t\toutput[(h*2)-1] += \"--------\"\n\t\t\t\t}\n\t\t\t\toutput[(h*2)-1] += \"\\\\ \"\n\t\t\t\tfor q := uint8(0); q < ((1<<h)-1)/2; q++ {\n\t\t\t\t\toutput[(h*2)-1] += \" \"\n\t\t\t\t}\n\n\t\t\t\t//\t\t\t\t}\n\n\t\t\t\tfor q := uint8(0); q < (1<<h)-1; q++ {\n\t\t\t\t\toutput[h*2] += \" \"\n\t\t\t\t}\n\n\t\t\t}\n\t\t\tpos++\n\t\t}\n\n\t}\n\tvar s string\n\tfor z := len(output) - 1; z >= 0; z-- {\n\t\ts += output[z] + \"\\n\"\n\t}\n\treturn s\n}", "func (a *Ast) String() string {\n\treturn string(a.bytes(0))\n}", "func (t AXNodeID) String() string {\n\treturn string(t)\n}" ]
[ "0.746832", "0.73955613", "0.7394086", "0.73931366", "0.7385134", "0.7371911", "0.736461", "0.7330904", "0.73293006", "0.7313392", "0.7312375", "0.7310615", "0.7280303", "0.72729224", "0.7263317", "0.72591245", "0.7251835", "0.72475904", "0.72358364", "0.7220292", "0.71898097", "0.7151529", "0.7119679", "0.7034768", "0.70179415", "0.69819343", "0.69759303", "0.69678056", "0.68746316", "0.6854485", "0.68478036", "0.68406874", "0.6819784", "0.67705774", "0.6745141", "0.6686612", "0.6653319", "0.6638367", "0.6628339", "0.66220784", "0.6611698", "0.65936166", "0.6557745", "0.64644855", "0.64535964", "0.6447622", "0.64327866", "0.64191824", "0.6391956", "0.6388587", "0.6382577", "0.63580674", "0.6356448", "0.63532645", "0.6350606", "0.632757", "0.63248456", "0.62862504", "0.62822753", "0.62784535", "0.62754285", "0.6275298", "0.6268052", "0.62429535", "0.6240408", "0.6239632", "0.6237525", "0.62267643", "0.6223474", "0.620922", "0.62067676", "0.6135412", "0.6132027", "0.6117187", "0.6116875", "0.61056083", "0.60991025", "0.6076891", "0.6070154", "0.60542035", "0.603501", "0.60269064", "0.5999314", "0.5954027", "0.5954027", "0.59441966", "0.59438443", "0.59401035", "0.593525", "0.59314525", "0.59067184", "0.58892524", "0.58793616", "0.58728164", "0.58711815", "0.5855089", "0.5847914", "0.5846048", "0.5837588", "0.58371097" ]
0.7100859
23
Deserializes your encoded data to tree.
func (this *Codec) deserialize(data string) *TreeNode { data = data[1 : len(data)-1] this.data = strings.Split(data, ",") n := this.d() return n }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (this *Codec) deserialize(data string) *TreeNode {\n\tlist := strings.Split(data, \",\")\n\treturn buildTree(&list)\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\n\tqueue := strings.Split(data, \",\")\n\treturn buildTree(&queue)\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\ttmp := strings.Split(this.SerializeStr, \",\")\n\treturn d(&tmp)\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\tif len(data) == 0 {\n\t\treturn nil\n\t}\n\n\tres := strings.Split(data, \",\")\n\troot := &TreeNode{}\n\troot.Val, _ = strconv.Atoi(res[0])\n\tres = res[1:]\n\tqueue := make([]*TreeNode, 0)\n\tqueue = append(queue, root)\n\tfor len(queue) > 0 {\n\t\tif res[0] != \"#\" {\n\t\t\tleftVal, _ := strconv.Atoi(res[0])\n\t\t\tqueue[0].Left.Val = leftVal\n\t\t\tqueue = append(queue, queue[0].Left)\n\t\t}\n\t\tif res[1] != \"#\" {\n\t\t\trightVal, _ := strconv.Atoi(res[1])\n\t\t\tqueue[1].Right.Val = rightVal\n\t\t\tqueue = append(queue, queue[0].Right)\n\t\t}\n\t\tres = res[2:]\n\t\tqueue = queue[1:]\n\t}\n\treturn root\n}", "func (this *Codec) deserialize(data string) *TreeNode { \n list := strings.Split(data, \",\")\n var dfs func() *TreeNode\n dfs = func() *TreeNode {\n val := list[0]\n list = list[1:]\n if val == \"x\" {\n return nil\n }\n Val,_ := strconv.Atoi(val)\n root := &TreeNode{Val: Val}\n root.Left = dfs()\n root.Right = dfs()\n return root\n }\n return dfs()\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\tstrs := strings.Split(data, this.sep)\n\tpos := 0\n\tvar buildTree func() *TreeNode\n\tbuildTree = func() *TreeNode {\n\t\tval := strs[pos]\n\t\tpos++\n\t\tif val == this.null {\n\t\t\treturn nil\n\t\t}\n\t\tnum, error := strconv.Atoi(val)\n\t\tif error != nil {\n\t\t\tpanic(error)\n\t\t}\n\t\tnode := &TreeNode{Val: num}\n\t\tnode.Left = buildTree()\n\t\tnode.Right = buildTree()\n\t\treturn node\n\t}\n\treturn buildTree()\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\tstrList := strings.Split(data, \",\")\n\tindex := 0\n\tvar solve func() *TreeNode\n\tsolve = func() *TreeNode {\n\t\tif index >= len(strList) {\n\t\t\treturn nil\n\t\t}\n\t\tif strList[index] == \"$\" {\n\t\t\tindex++\n\t\t\treturn nil\n\t\t}\n\t\tval, _ := strconv.Atoi(strList[index])\n\t\troot := &TreeNode{\n\t\t\tVal: val,\n\t\t}\n\t\tindex++\n\t\troot.Left = solve()\n\t\troot.Right = solve()\n\t\treturn root\n\t}\n\treturn solve()\n}", "func (c *Codec) deserialize(data string) *TreeNode {\n\tl := strings.Split(data, \",\")\n\tfor i := 0; i < len(l); i++ {\n\t\tif l[i] != \"\" {\n\t\t\tc.l = append(c.l, l[i])\n\t\t}\n\t}\n\treturn c.rdeserialize()\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\tvar root *TreeNode\n\tif len(data) == 0 || data[0] == ' ' {\n\t\treturn root\n\t}\n\tnode := new(TreeNode)\n\troot = node\n\tnode.Val = int(data[0])\n\tqueue := new(Queue)\n\tqueue.EnQueue(node)\n\tfor index := 1; index < len(data); {\n\t\tif !queue.IsEmpty() {\n\t\t\tgenerics := queue.DeQueue()\n\t\t\tswitch t := generics.(type) {\n\t\t\tcase *TreeNode:\n\t\t\t\tif data[index] == ' ' {\n\t\t\t\t\tt.Left = nil\n\t\t\t\t} else {\n\t\t\t\t\tt.Left = new(TreeNode)\n\t\t\t\t\tt.Val = int(data[index])\n\t\t\t\t\tqueue.EnQueue(t.Left)\n\t\t\t\t}\n\t\t\t\tindex++\n\t\t\t\tif index >= len(data) {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\n\t\t\t\tif data[index] == ' ' {\n\t\t\t\t\tt.Right = nil\n\t\t\t\t} else {\n\t\t\t\t\tt.Right = new(TreeNode)\n\t\t\t\t\tt.Val = int(data[index])\n\t\t\t\t\tqueue.EnQueue(t.Right)\n\t\t\t\t}\n\t\t\t\tindex++\n\t\t\t}\n\t\t}\n\t}\n\treturn root\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\n\tif len(data) == 0 {\n\t\treturn nil\n\t}\n\n\tcode := data[1:]\n\tcode = code[:len(code)-1]\n\tnodeStr := strings.Split(code, \",\")\n\tif len(nodeStr) == 0 {\n\t\treturn nil\n\t}\n\n\tif nodeStr[0] == \"null\" {\n\t\treturn nil\n\t}\n\n\tnodes := []*TreeNode{}\n\tv, _ := strconv.Atoi(nodeStr[0])\n\troot := &TreeNode{\n\t\tVal: v,\n\t}\n\tnodes = append(nodes, root)\n\n\tnodeStr = nodeStr[1:]\n\tfor len(nodeStr) > 0 {\n\n\t\tnewNodes := []*TreeNode{}\n\t\tchildNum := 2 * len(nodes)\n\t\tif childNum > len(nodeStr) {\n\t\t\tchildNum = len(nodeStr)\n\t\t}\n\n\t\tfor j := 0; j < childNum; j++ {\n\t\t\tvar curNode *TreeNode\n\t\t\tif nodeStr[j] != \"null\" {\n\t\t\t\tv, _ := strconv.Atoi(nodeStr[j])\n\t\t\t\tcurNode = &TreeNode{\n\t\t\t\t\tVal: v,\n\t\t\t\t}\n\n\t\t\t\tnewNodes = append(newNodes, curNode)\n\t\t\t}\n\n\t\t\tparNode := nodes[j/2]\n\t\t\tif j&1 == 0 {\n\t\t\t\tparNode.Left = curNode\n\t\t\t} else {\n\t\t\t\tparNode.Right = curNode\n\t\t\t}\n\t\t}\n\n\t\tnodeStr = nodeStr[childNum:]\n\t\tnodes = newNodes\n\t}\n\n\treturn root\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\t// fmt.Println(data)\n\tdata = data[1 : len(data)-1] // remove the \"[]\"\n\telements := strings.Split(data, \",\")\n\n\tif len(elements) < 3 {\n\t\treturn nil\n\t}\n\n\trootVal, _ := strconv.Atoi(elements[0])\n\troot := &TreeNode{\n\t\tVal: rootVal,\n\t}\n\tqueue := []*TreeNode{}\n\tqueue = append(queue, root)\n\n\ti := 1\n\tfor ; i < len(elements); i = i + 2 {\n\t\tparent := queue[0]\n\t\tqueue = queue[1:]\n\n\t\tleft := elements[i]\n\t\tright := elements[i+1]\n\n\t\tif left != NULL_STR {\n\t\t\tleftVal, _ := strconv.Atoi(left)\n\t\t\tleftNode := &TreeNode{\n\t\t\t\tVal: leftVal,\n\t\t\t}\n\t\t\tparent.Left = leftNode\n\t\t\tqueue = append(queue, leftNode)\n\t\t}\n\n\t\tif right != NULL_STR {\n\t\t\trightVal, _ := strconv.Atoi(right)\n\t\t\trightNode := &TreeNode{\n\t\t\t\tVal: rightVal,\n\t\t\t}\n\t\t\tparent.Right = rightNode\n\t\t\tqueue = append(queue, rightNode)\n\t\t}\n\t}\n\n\treturn root\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\tif len(data) == 0 || data == \"[]\" {\n\t\treturn nil\n\t}\n\n\tarray := strings.Split(data, \",\")\n\tmaps := make(map[int64]int)\n\n\tfor _, v := range array {\n\t\tkeyValueArray := strings.Split(v, \":\")\n\t\tindex, _ := strconv.ParseInt(keyValueArray[0], 10, 64)\n\t\tvalue, _ := strconv.Atoi(keyValueArray[1])\n\t\tmaps[index] = value\n\t}\n\n\troot := TreeNode{\n\t\tVal: maps[1],\n\t\tRight: innerDeserialize(&maps, 3),\n\t\tLeft: innerDeserialize(&maps, 2),\n\t}\n\n\treturn &root\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\tlist := strings.Split(data, \",\")\n\tfor i := 0; i < len(list); i++ {\n\t\tif list[i] != \"\" {\n\t\t\tthis.list = append(this.list, list[i])\n\t\t}\n\t}\n\treturn this.rdeserialize()\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\tif len(data) <= 2 {\n\t\treturn nil\n\t}\n\tdata = data[1:len(data)-1]\n\tstrList := strings.Split(data, \",\")\n\tnodes := []*TreeNode{}\n\tfor _, str := range strList{\n\t\tval, _ := strconv.Atoi(str)\n\t\tnodes = append(nodes, &TreeNode{Val:val})\n\t}\n\ti, j := 0, 1\n\tfor j + 2 < len(nodes) {\n\t\tnodes[i].Left = nodes[j]\n\t\tnodes[i].Left = nodes[j+1]\n\t\ti, j = i + 1, j + 2\n\t}\n\treturn nodes[0]\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\tif data == \"\" {\n\t\treturn nil\n\t}\n\titems := strings.Split(data, \",\")\n\tvalue, _ := strconv.Atoi(items[0])\n\troot := &TreeNode{Val: value}\n\tp := root\n\tqueue := []*TreeNode{root}\n\tfor i := 0; len(queue) != 0; {\n\t\tp = queue[0]\n\t\tqueue = queue[1:]\n\t\ti++\n\t\tif items[i] == \"\" {\n\t\t\tp.Left = nil\n\t\t} else {\n\t\t\tvalue, _ = strconv.Atoi(items[i])\n\t\t\tp.Left = &TreeNode{Val: value}\n\t\t\tqueue = append(queue, p.Left)\n\t\t}\n\t\ti++\n\t\tif items[i] == \"\" {\n\t\t\tp.Right = nil\n\t\t} else {\n\t\t\tvalue, _ = strconv.Atoi(items[i])\n\t\t\tp.Right = &TreeNode{Val: value}\n\t\t\tqueue = append(queue, p.Right)\n\t\t}\n\t}\n\treturn root\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\ts := strings.Split(data, \",\")\n\tvar traverse func() *TreeNode\n\n\ttraverse = func() *TreeNode {\n\t\tval := s[0]\n\n\t\tif val == \"null\" {\n\t\t\ts = s[1:]\n\t\t\t// 这一步很关键\n\t\t\treturn nil\n\t\t}\n\n\t\tv, _ := strconv.Atoi(val)\n\t\ts = s[1:]\n\n\t\treturn &TreeNode{\n\t\t\tVal: v,\n\t\t\tLeft: traverse(),\n\t\t\tRight: traverse(),\n\t\t}\n\t}\n\n\treturn traverse()\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\tif len(data) == 0 {\n\t\treturn nil\n\t}\n\tq := strings.Split(data, \" \")\n\tif len(q) == 0 {\n\t\treturn nil\n\t}\n\n\t// 큐 맨 앞값 파악\n\tmin := -1 << 31\n\tmax := 1<<31 - 1\n\treturn _to_BinaryTreeNode(&q, min, max)\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\tif data == \"\" {\n\t\treturn nil\n\t}\n\tans := strings.Split(data, \",\")\n\n\tvar i = new(int)\n\t*i = 0\n\treturn deserialize(&ans, i)\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\tc := strings.Split(data, \",\")\n\n\tif len(data) == 0 {\n\t\treturn nil\n\t}\n\n\tt := &TreeNode{Val: myAtoi(c[0])}\n\tqueue := []*TreeNode{t}\n\n\ti := 1\n\tfor len(queue) > 0 {\n\t\tl := len(queue)\n\t\tfor j := 0; j < l; j++ {\n\t\t\tif c[i] == \"nil\" {\n\t\t\t\tqueue[j].Left = nil\n\t\t\t} else {\n\t\t\t\tqueue[j].Left = &TreeNode{Val: myAtoi2(c[i])}\n\t\t\t\tqueue = append(queue, queue[j].Left)\n\t\t\t}\n\t\t\ti++\n\t\t\tif c[i] == \"nil\" {\n\t\t\t\tqueue[j].Right = nil\n\t\t\t} else {\n\t\t\t\tqueue[j].Right = &TreeNode{Val: myAtoi2(c[i])}\n\t\t\t\tqueue = append(queue, queue[j].Right)\n\t\t\t}\n\t\t\ti++\n\t\t}\n\t\tqueue = queue[l:]\n\t}\n\treturn t\n}", "func (this *Codec) deserialize(data string) *TreeNode {\n\tints := strings.Split(data, \",\")\n\n\treturn dHelper(&ints)\n}", "func (ths *TreeHashStack) Deserialize(data []byte) error {\n\treturn gob.NewDecoder(bytes.NewBuffer(data)).Decode(ths)\n}", "func (this *Codec) deserialize(data string) *TreeNode { \n l:=strings.Split(data,\",\")\n for i:=0;i<len(l);i++{\n if l[i]!=\"\"{\n this.l=append(this.l,l[i])\n }\n }\n fmt.Println(this.l)\n return this.helpDeserialize()\n}", "func (this *Codec) deserialize1(data string) *TreeNode {\n\tc := strings.Split(data, \",\")\n\n\tif len(data) == 0 {\n\t\treturn nil\n\t}\n\n\tt := &TreeNode{Val: myAtoi(c[0])}\n\tqueue := []*TreeNode{t}\n\n\ti := 1\n\tfor len(queue) > 0 {\n\t\tl := len(queue)\n\t\tfor j := 0; j < l; j++ {\n\t\t\tif c[i] == \"nil\" {\n\t\t\t\tqueue[j].Left = nil\n\t\t\t} else {\n\t\t\t\tqueue[j].Left = &TreeNode{Val: myAtoi(c[i])}\n\t\t\t\tqueue = append(queue, queue[j].Left)\n\t\t\t}\n\t\t\ti++\n\t\t\tif c[i] == \"nil\" {\n\t\t\t\tqueue[j].Right = nil\n\t\t\t} else {\n\t\t\t\tqueue[j].Right = &TreeNode{Val: myAtoi(c[i])}\n\t\t\t\tqueue = append(queue, queue[j].Right)\n\t\t\t}\n\t\t\ti++\n\t\t}\n\t\tqueue = queue[l:]\n\t}\n\treturn t\n}", "func (c *Codec) deserialize(data string) *TreeNode {\n\tif len(data) < 2 {\n\t\treturn nil\n\t}\n\tif data[0] != '[' && data[len(data)-1] != ']' {\n\t\treturn nil\n\t}\n\tdata = data[1 : len(data)-1]\n\n\tvar nums []*int\n\tfor _, s := range strings.Split(data, \",\") {\n\t\tif s == \"null\" {\n\t\t\tnums = append(nums, nil)\n\t\t} else {\n\t\t\tnum, err := strconv.Atoi(s)\n\t\t\tif err == nil {\n\t\t\t\tnums = append(nums, &num)\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(nums) == 0 {\n\t\treturn nil\n\t}\n\n\tif nums[0] == nil {\n\t\treturn nil\n\t}\n\tvar root = &TreeNode{Val: *nums[0]}\n\tvar q []*TreeNode\n\tq = append(q, root)\n\n\tfor i := 1; len(q) != 0; {\n\t\tcurr := q[0]\n\n\t\tif curr != nil {\n\t\t\tvar left, right *int\n\t\t\tif i < len(nums) {\n\t\t\t\tleft = nums[i]\n\t\t\t\tif i+1 < len(nums) {\n\t\t\t\t\tright = nums[i+1]\n\t\t\t\t}\n\t\t\t\ti += 2\n\t\t\t}\n\t\t\tif left != nil {\n\t\t\t\tcurr.Left = &TreeNode{\n\t\t\t\t\tVal: *left,\n\t\t\t\t}\n\t\t\t\tq = append(q, curr.Left)\n\t\t\t}\n\t\t\tif right != nil {\n\t\t\t\tcurr.Right = &TreeNode{\n\t\t\t\t\tVal: *right,\n\t\t\t\t}\n\t\t\t\tq = append(q, curr.Right)\n\t\t\t}\n\t\t}\n\n\t\tq = q[1:]\n\t}\n\n\treturn root\n}", "func DecodeTree(na ipld.NodeAssembler, rd *bufio.Reader) error {\n\tif _, err := readNullTerminatedNumber(rd); err != nil {\n\t\treturn err\n\t}\n\n\tt := Type.Tree__Repr.NewBuilder()\n\tma, err := t.BeginMap(-1)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor {\n\t\tname, node, err := DecodeTreeEntry(rd)\n\t\tif err != nil {\n\t\t\tif err == io.EOF {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\treturn err\n\t\t}\n\t\tee, err := ma.AssembleEntry(name)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err = ee.AssignNode(node); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif err := ma.Finish(); err != nil {\n\t\treturn err\n\t}\n\treturn na.AssignNode(t.Build())\n}", "func (this *Codec) deserializeV2(data string) *TreeNode {\n\n\tif data==\"null\" {\n\t\treturn nil\n\t}\n\tlist :=strings.Split(data,\",\")// 序列化字符串split成数组\n\tval, _ := strconv.Atoi(list[0])\n\troot :=&TreeNode{Val: val} // 构建根节点\n\tqueue :=[]*TreeNode{}\n\tqueue=append(queue,root) // 根节点入队列\n\tcursor :=1 // data指向下一个节点\n\tfor cursor<len(list) {\n\t\tnode :=queue[0]\n\t\tqueue=queue[1:] // 出队列\n\t\tleftVal :=list[cursor]\n\t\trightVal:=list[cursor+1]\n\t\tif leftVal!=\"null\" { // 如果左子节点val 不为空 入队列\n\t\t\tVal, _ := strconv.Atoi(leftVal)\n\t\t\tleftNode:=&TreeNode{Val: Val}\n\t\t\tnode.Left=leftNode\n\t\t\tqueue=append(queue,leftNode)\n\t\t}\n\n\t\tif rightVal!=\"null\" { // 如果右子节点val 不为空入队列\n\t\t\tVal, _ := strconv.Atoi(rightVal)\n\t\t\trightNode :=&TreeNode{Val: Val}\n\t\t\tnode.Right=rightNode\n\t\t\tqueue=append(queue,rightNode)\n\t\t}\n\t\tcursor+=2\n\t}\n\treturn root\n}", "func main() {\n\troot := &TreeNode{\n\t\tVal: 2,\n\t\tLeft: &TreeNode{\n\t\t\tVal: 1,\n\t\t},\n\t\tRight: &TreeNode{\n\t\t\tVal: 3,\n\t\t},\n\t}\n\tprintTreeNodeByDFS(root)\n\tfmt.Println()\n\n\tser := Constructor()\n\ttreeString := ser.serialize(root)\n\tfmt.Println(treeString)\n\tans := ser.deserialize(treeString)\n\t// printTreeNodeByBFS(ans)\n\tprintTreeNodeByDFS(ans)\n\tfmt.Println()\n}", "func decodeV1(data string) (tree.Tree, error) {\n\tvar storage fsStorage\n\tb, err := base64.StdEncoding.DecodeString(data)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"decoding base64\")\n\t}\n\tvar r io.ReadCloser\n\tr, err = gzip.NewReader(bytes.NewReader(b))\n\tif err != nil {\n\t\t// Fallback to non-zipped version.\n\t\tlog.Printf(\n\t\t\t\"Decoding gzip: %s. Falling back to non-gzip loading.\",\n\t\t\terr)\n\t\tr = ioutil.NopCloser(bytes.NewReader(b))\n\t}\n\tdefer r.Close()\n\terr = gob.NewDecoder(r).Decode(&storage)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"decoding gob\")\n\t}\n\tt := make(tree.Tree)\n\tfor dir := range storage.Dirs {\n\t\tt.AddDir(dir)\n\t}\n\tfor path, content := range storage.Files {\n\t\tt.AddFileContent(path, content)\n\t}\n\treturn t, err\n}", "func (leaf *Node) Decode() ([]byte, error) {\n\tif len(leaf.ContentEncoding) == 0 {\n\t\tleaf.plainv = leaf.V\n\t\treturn leaf.plainv, nil\n\t}\n\n\tleaf.plainv = leaf.plainv[:0]\n\n\tif leaf.ContentEncoding == EncodingGzip {\n\t\tr, err := gzip.NewReader(bytes.NewReader(leaf.V))\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tbuf := make([]byte, 1024)\n\t\tfor {\n\t\t\tn, err := r.Read(buf)\n\t\t\tif n > 0 {\n\t\t\t\tleaf.plainv = append(leaf.plainv, buf[:n]...)\n\t\t\t}\n\t\t\tif err != nil {\n\t\t\t\tif err == io.EOF {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tbuf = buf[0:]\n\t\t}\n\t}\n\n\treturn leaf.plainv, nil\n}", "func decodeTree(r *bitio.Reader, nTree byte) (root *Node, err error) {\n\tvar head Node\n\tvar nodes byte\n\tvar leaves byte\n\tvar u uint64\n\n\tfor nodes < nTree {\n\t\tu, err = r.ReadBits(1)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif u == 1 {\n\t\t\tleaves++\n\t\t\tsymbol, err := r.ReadBits(8)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tnode := &Node{value: byte(symbol)}\n\t\t\thead.pushBack(node)\n\t\t}\n\n\t\tif u == 0 {\n\t\t\tnodes++\n\t\t\tif nodes == nTree {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tr := head.popLast()\n\t\t\tl := head.popLast()\n\t\t\tnode := join(l, r)\n\t\t\thead.pushBack(node)\n\t\t}\n\t}\n\n\tif nodes != leaves {\n\t\terr = errors.New(\"nodes != leaves\")\n\t}\n\n\treturn head.next, err\n}", "func (n *Node) Unmarshal(encoded []byte) error {\n\tvar pbn pb.PBNode\n\tif err := pbn.Unmarshal(encoded); err != nil {\n\t\treturn fmt.Errorf(\"Unmarshal failed. %v\", err)\n\t}\n\n\tpbnl := pbn.GetLinks()\n\tn.Links = make([]*Link, len(pbnl))\n\tfor i, l := range pbnl {\n\t\tn.Links[i] = &Link{Name: l.GetName(), Size: l.GetTsize()}\n\t\th, err := mh.Cast(l.GetHash())\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Link hash is not valid multihash. %v\", err)\n\t\t}\n\t\tn.Links[i].Hash = h\n\t}\n\tsort.Stable(LinkSlice(n.Links)) // keep links sorted\n\n\tn.Data = pbn.GetData()\n\treturn nil\n}", "func (s *NodeKeySignature) Unserialize(data []byte) error {\n\tdec, _ := cborDecOpts.DecMode()\n\treturn dec.Unmarshal(data, s)\n}", "func main() {\n\troot := TreeNode{\n\t\tVal: 1,\n\t\tLeft: &TreeNode{\n\t\t\tVal: 2,\n\t\t\tLeft: nil,\n\t\t\tRight: nil,\n\t\t},\n\t\tRight: &TreeNode{\n\t\t\tVal: 3,\n\t\t\tLeft: &TreeNode{\n\t\t\t\tVal: 4,\n\t\t\t\tRight: nil,\n\t\t\t\tLeft: nil,\n\t\t\t},\n\t\t\tRight: &TreeNode{\n\t\t\t\tVal: 5,\n\t\t\t\tRight: nil,\n\t\t\t\tLeft: nil,\n\t\t\t},\n\t\t},\n\t}\n\tobj := Constructor()\n\tdata := obj.serialize(&root)\n\tfmt.Println(data)\n}", "func (d *decoder) createTree() *node {\n\tif val, _ := readBit(d.r); val {\n\t\treturn &node{readByte(d.r), -1, false, nil, nil}\n\t} else if d.numChars != d.numCharsDecoded {\n\t\tleft := d.createTree()\n\t\tright := d.createTree()\n\t\treturn &node{0, -1, true, left, right}\n\t}\n\n\treturn nil\n}", "func (n *Node) UnmarshalJSON(byteData []byte) error {\n\tvar node struct {\n\t\tLeft *Node\n\t\tRight *Node\n\t\tHash []byte\n\t\tC json.RawMessage\n\t\ttree *MerkleTree\n\t\tparent *Node\n\t\tleaf bool\n\t\tDup bool\n\t}\n\tif err := json.Unmarshal(byteData, &node); err != nil {\n\t\treturn err\n\t}\n\tn.Left = node.Left\n\tn.Right = node.Right\n\tn.Hash = node.Hash\n\tn.tree = node.tree\n\tn.parent = node.parent\n\tn.leaf = node.leaf\n\tn.Dup = node.Dup\n\n\t// Check how to cast Content C\n\tif len(node.C) > 0 && string(node.C) != `null` {\n\n\t\tvar _type struct {\n\t\t\tType string `json:\"_type\"`\n\t\t}\n\t\tif err := json.Unmarshal([]byte(node.C), &_type); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tc := newContent[_type.Type]()\n\n\t\tif err := json.Unmarshal([]byte(node.C), c); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tn.C = c\n\n\t}\n\treturn nil\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tthis.s(root)\n\treturn \"[\" + strings.Join(this.data, \",\") + \"]\"\n}", "func (n *Node) Unmarshal(data []byte) error {\n\treturn json.Unmarshal(data, n)\n}", "func Decoded(encoded []byte) (*Node, error) {\n\tn := new(Node)\n\terr := n.Unmarshal(encoded)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"incorrectly formatted merkledag node: %s\", err)\n\t}\n\treturn n, nil\n}", "func Decode(f filetree.FileTree) (*Data, error) {\n\tdata := &Data{\n\t\tIdentities: make(map[string]Identity),\n\t\tNumbers: make(map[string]Numbers),\n\t}\n\n\tdirs := [...]string{\n\t\t\"common/main\",\n\t\t\"common/supplemental\",\n\t}\n\tfor _, dir := range dirs {\n\t\terr := f.Walk(dir, func(path string, r io.Reader) error {\n\t\t\treturn decodeXML(path, r, data.decode)\n\t\t})\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn data, nil\n}", "func (decTree *Tree) ReadTree(filename string) error {\n\tfile, err := ioutil.ReadFile(filename)\n\tif err != nil {\n\t\tfmt.Println(\"Error opening input file: \", filename)\n\t\treturn err\n\t}\n\n\tsDat := fmt.Sprintf(\"%s\", file)\n\tdatLines := strings.Split(sDat, \"\\n\")\n\n\tcurrNode := decTree\n\tvar treeStack []*Tree\n\ttreeLen := 1\n\tlastNode := false\n\n\tfor _, line := range datLines {\n\t\tif !lastNode {\n\t\t\tcurrNode.Details.Leaf, currNode.Details.IndexSplit, currNode.Details.SplitVal, currNode.Details.Class, err = parseLine(line)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tif currNode.Details.Leaf == false {\n\t\t\t\tcurrNode.Left = new(Tree)\n\t\t\t\tcurrNode.Right = new(Tree)\n\n\t\t\t\ttreeStack = append(treeStack, currNode.Right)\n\t\t\t\tcurrNode = currNode.Left\n\t\t\t\ttreeLen++\n\t\t\t} else {\n\t\t\t\ttreeLen--\n\t\t\t\tif treeLen > 0 {\n\t\t\t\t\tcurrNode, treeStack = treeStack[treeLen-1], treeStack[:treeLen-1]\n\t\t\t\t} else {\n\t\t\t\t\tlastNode = true\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (pd *pymtData) Deserialize(b []byte) error {\n\terr := json.Unmarshal(b, pd)\n\tif err != nil {\n\t\treturn errors.Wrap(err, ErrInvalidFormatBlob)\n\t}\n\n\treturn nil\n}", "func (s *Node) Unmarshal(val []byte) (err error) {\n\terr = json.Unmarshal(val, s)\n\treturn\n}", "func (sig *MerkleSig) Deserialize(data []byte) error {\n\tbuf := bytes.NewBuffer(data)\n\tdec := gob.NewDecoder(buf)\n\n\treturn dec.Decode(sig)\n}", "func LoadTree(jsonTree []byte) (*Tree, error) {\n\tvar trees []Tree\n\terr := json.Unmarshal(jsonTree, &trees)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn CreateTree(trees), nil\n}", "func Deserialize(data []byte, typ string, registry Registry) (interface{}, error) {\n\tif len(data) == 0 {\n\t\treturn nil, nil\n\t}\n\tserde, err := registry.GetSerde(typ)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn serde.Deserialize(data)\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tnodeValues := []int{}\n\t// preoder 노드 탐색\n\t_to_string(root, &nodeValues)\n\n\t// 노드 값을 공백으로 구분한 스트링으로 리턴\n\tr := \"\"\n\tfor i := 0; i < len(nodeValues); i++ {\n\t\tr += fmt.Sprintf(\"%d \", nodeValues[i])\n\t}\n\treturn strings.TrimSpace(r)\n}", "func (d *decoder) decodeHeader() {\n\t// first byte is the number of leaf nodes\n\td.numChars = uint8(readByte(d.r))\n\n\t// read in the total number of characters in the encoded data\n\tbuf := make([]byte, 2)\n\tbuf[0] = readByte(d.r)\n\tbuf[1] = readByte(d.r)\n\n\td.numCharsEncoded = binary.LittleEndian.Uint16(buf)\n\n\t// deserialize the tree\n\td.root = d.createTree()\n}", "func Deserialize(registerValue interface{}, data []byte, targetPointer interface{}) error {\n\tif registerValue != nil {\n\t\tgob.Register(registerValue)\n\t}\n\tdecoder := gob.NewDecoder(bytes.NewReader(data))\n\terr := decoder.Decode(targetPointer)\n\n\treturn err\n}", "func (n *HostNode) Decode(data []byte) error {\n\tif err := ffjson.Unmarshal(data, n); err != nil {\n\t\tlogrus.Error(\"decode node info error:\", err.Error())\n\t\treturn err\n\t}\n\treturn nil\n}", "func (d *Dao) Tree(c context.Context, token string) (data interface{}, err error) {\n\tvar (\n\t\treq *http.Request\n\t\ttmp map[string]interface{}\n\t\tok bool\n\t)\n\tif req, err = http.NewRequest(\"GET\", dataURI, nil); err != nil {\n\t\tlog.Error(\"Status url(%s) error(%v)\", dataURI, err)\n\t\treturn\n\t}\n\treq.Header.Set(\"X-Authorization-Token\", token)\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\tvar res struct {\n\t\tCode int `json:\"code\"`\n\t\tData map[string]map[string]interface{} `json:\"data\"`\n\t\tMessage string `json:\"message\"`\n\t\tStatus int `json:\"status\"`\n\t}\n\tif err = d.client.Do(c, req, &res); err != nil {\n\t\tlog.Error(\"d.Status url(%s) res($s) err(%v)\", dataURI, res, err)\n\t\treturn\n\t}\n\tif res.Code != 90000 {\n\t\terr = fmt.Errorf(\"error code :%d\", res.Code)\n\t\tlog.Error(\"Status url(%s) res(%v)\", dataURI, res)\n\t\treturn\n\t}\n\tif tmp, ok = res.Data[\"bilibili\"]; ok {\n\t\tdata, ok = tmp[\"children\"]\n\t}\n\tif !ok {\n\t\terr = ecode.NothingFound\n\t}\n\treturn\n}", "func CalTree(data [][]byte) {\r\n\tvar Root Node\r\n\tRoot.GenerateRoot(data, true)\r\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\treturn this.rserialize(root, \"\")\n}", "func Unserialize(data []byte, v interface{}) error {\n\treturn gob.NewDecoder(bytes.NewBuffer(data)).Decode(v)\n}", "func DecodeTreeEntry(rd *bufio.Reader) (string, ipld.Node, error) {\n\tdata, err := rd.ReadString(' ')\n\tif err != nil {\n\t\treturn \"\", nil, err\n\t}\n\tdata = data[:len(data)-1]\n\n\tname, err := rd.ReadString(0)\n\tif err != nil {\n\t\treturn \"\", nil, err\n\t}\n\tname = name[:len(name)-1]\n\n\tsha := make([]byte, 20)\n\t_, err = io.ReadFull(rd, sha)\n\tif err != nil {\n\t\treturn \"\", nil, err\n\t}\n\n\tte := _TreeEntry{\n\t\tmode: _String{data},\n\t\thash: _Link{cidlink.Link{Cid: shaToCid(sha)}},\n\t}\n\treturn name, &te, nil\n}", "func (n *Node) Unmarshal(_ string, data []byte) error {\n\tnewNode := Node{}\n\tif err := json.Unmarshal(data, &newNode); err != nil {\n\t\treturn err\n\t}\n\n\tif err := newNode.validate(); err != nil {\n\t\treturn err\n\t}\n\n\t*n = newNode\n\n\treturn nil\n}", "func unconvertData(data []byte) interface{} {\n\tif data == nil || string(data) == \"\" {\n\t\treturn nil\n\t}\n\n\tvar proto interface{}\n\tresult, err := serial.Deserialize(data, proto, serial.PERSISTENT)\n\tif err != nil {\n\t\tlog.Fatal(\"Persistent Deserialization Failed\", \"err\", err, \"data\", data)\n\t}\n\treturn result\n}", "func (c *Codec) serialize(root *TreeNode) string {\n\treturn rserialize(root, \"\")\n}", "func (c *CheckboxBase) Deserialize(d page.Decoder) {\n\tc.ControlBase.Deserialize(d)\n\n\tif err := d.Decode(&c.checked); err != nil {\n\t\tpanic(err)\n\t}\n\n\tif err := d.Decode(&c.LabelMode); err != nil {\n\t\tpanic(err)\n\t}\n\n\tif err := d.Decode(&c.labelAttributes); err != nil {\n\t\tpanic(err)\n\t}\n}", "func (n *Node) UnmarshalJSON(b []byte) error {\n\ttype proxy struct {\n\t\tID string `json:\"id\"`\n\t\tValue *Value `json:\"value,omitempty\"`\n\t\tStatus map[string]interface{} `json:\"status\"`\n\t\tChildren map[string]*Node `json:\"children\"`\n\t}\n\n\tp := &proxy{}\n\n\tdec := json.NewDecoder(bytes.NewBuffer(b))\n\tdec.UseNumber()\n\terr := dec.Decode(p)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tn.id = p.ID\n\tn.value = p.Value\n\tn.status = p.Status\n\tn.children = p.Children\n\n\treturn nil\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\treturn dfsSerial(root, \"\")\n}", "func Decode(r io.Reader, data interface{}) error {\n\tmeta, err := toml.DecodeReader(r, data)\n\tif undecoded := meta.Undecoded(); undecoded != nil && len(undecoded) > 0 {\n\t\tlog.Infoln(\"toml.Decode:\", \"undecoded keys exist,\", undecoded)\n\t}\n\treturn err\n}", "func Decompress(file *os.File, outputName string) {\n\t// Ler Árvore (Reconstruir)\n\treader := bit.NewReader(file)\n\troot := readTree(reader)\n\tif root == nil {\n\t\tpanic(\"Árvore nula!\")\n\t}\n\t// Decodificar percorrendo a arvore\n\tif root.IsLeaf() {\n\t\tnodeHelper := tree.New(\"\", nil, nil)\n\t\tnodeHelper.Left = root\n\t\troot = nodeHelper\n\t}\n\tdecodeFile(reader, outputName, root)\n}", "func (prkg *KeyIterator) Deserialize(data []byte) error {\n\treturn gob.NewDecoder(bytes.NewBuffer(data)).Decode(prkg)\n}", "func (t *TableNode) UnmarshalJSON(data []byte) error {\n\tm := &tableNode{}\n\terr := json.Unmarshal(data, m)\n\tif err != nil {\n\t\treturn err\n\t}\n\tt.UID = m.UID\n\tt.Word = []rune(m.Word)\n\tt.PUID = m.PUID\n\tt.Name = m.Name\n\tt.Children = m.Children\n\tt.Resolved = m.Resolved\n\tt.DefaultDateFieldUID = m.DefaultDateFieldUID\n\tt.DefaultDateField = m.DefaultDateField\n\tt.Description = m.Description\n\tt.DatastoreID = m.DatastoreID\n\treturn nil\n}", "func (w *RootWalker) DeserializeFromRef(ref skyobject.Reference, p interface{}) error {\n\tif w.r == nil {\n\t\treturn ErrRootNotFound\n\t}\n\tdata, got := w.r.Get(ref)\n\tif !got {\n\t\treturn ErrObjNotFound\n\t}\n\treturn encoder.DeserializeRaw(data, p)\n}", "func (v *FrameTree) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\teasyjsonC5a4559bDecodeGithubComChromedpCdprotoPage61(&r, v)\n\treturn r.Error()\n}", "func DeserializeTransaction(data []byte) Transaction {\n\tvar transaction Transaction\n\n\tdec := gob.NewDecoder(bytes.NewReader(data))\n\terr := dec.Decode(&transaction)\n\tHandle(err)\n\n\treturn transaction\n}", "func (id *NodeID) Unmarshal(data []byte) error {\n\tvar err error\n\t*id, err = NodeIDFromBytes(data)\n\treturn err\n}", "func (e EmptyNode) DecodeBinary(*io.BinReader) {\n}", "func Deserialize(data []byte, value interface{}) error {\n\treturn rlp.DecodeBytes(data, value)\n}", "func (q *Quote) Deserialize(b []byte) error {\n\tbuf := bytes.NewBuffer(b)\n\tdec := gob.NewDecoder(buf)\n\terr := dec.Decode(q)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"Deserialize: decoding failed for %s\", b)\n\t}\n\treturn nil\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tstrs := make([]string, 0)\n\tvar preorder func(*TreeNode)\n\tpreorder = func(node *TreeNode) {\n\t\tif node == nil {\n\t\t\tstrs = append(strs, this.null)\n\t\t\treturn\n\t\t}\n\n\t\tstrs = append(strs, strconv.Itoa(node.Val))\n\t\tpreorder(node.Left)\n\t\tpreorder(node.Right)\n\t}\n\tpreorder(root)\n\treturn strings.Join(strs, this.sep)\n}", "func NewMerkelTree(data [][]byte) *MerkelTree {\n\tvar nodes []MerkelNode\n\tif len(data)%2 != 0 {\n\t\tdata = append(data, data[len(data)-1])\n\t}\n\tfor _, dat := range data {\n\t\ttemp := NewMerkelNode(nil, nil, dat)\n\t\tnodes = append(nodes, *temp)\n\t}\n\tfor i := 0; i < len(data)/2; i++ {\n\t\tvar lvl []MerkelNode\n\t\tfor j := 0; j < len(nodes); j += 2 {\n\t\t\ttemp := NewMerkelNode(&nodes[j], &nodes[j+1], nil)\n\t\t\tlvl = append(lvl, *temp)\n\t\t}\n\t\tnodes = lvl\n\t}\n\tmkltree := MerkelTree{&nodes[0]}\n\treturn &mkltree\n}", "func (this *Codec) serialize(root *TreeNode) string {\n if root == nil {\n return \"x\"\n }\n return strconv.Itoa(root.Val) + \",\" + this.serialize(root.Left)+ \",\" + this.serialize(root.Right)\n}", "func (i *IpldRawNode) Tree(p string, depth int) []string {\n\treturn nil\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\ttmp := []string{}\n\ts(root, &tmp)\n\tthis.SerializeStr = strings.Join(tmp, \",\")\n\treturn this.SerializeStr\n}", "func (txn *Transaction) Deserialize(gobdata utils.Gob) {\n\t// Decode the gob data into the blockheader\n\tutils.GobDecode(gobdata, txn)\n}", "func (t *Trie) UnmarshalJSON(data []byte) error {\n\tm := marshalTrie{t.m}\n\terr := json.Unmarshal(data, &m)\n\tt.m = m.M\n\treturn err\n}", "func Decode(tomlBlob string, target interface{}) error {\n\tsreader := strings.NewReader(tomlBlob)\n\tdecoder := toml.NewDecoder(sreader)\n\terr := decoder.Decode(target)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func DeserializeForest(d []byte) (*Forest, error) {\n\tslice, err := serializer.DeserializeSlice(d)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif len(slice) < 1 {\n\t\treturn nil, errors.New(\"invalid Forest slice\")\n\t}\n\tintVal, ok := slice[0].(serializer.Int)\n\tif !ok {\n\t\treturn nil, errors.New(\"invalid Forest slice\")\n\t}\n\tvar res Forest\n\tres.Bigraph = intVal == 1\n\tfor _, t := range slice[1:] {\n\t\ttree, ok := t.(*treeSerializer)\n\t\tif !ok {\n\t\t\treturn nil, errors.New(\"invalid Forest slice\")\n\t\t}\n\t\tres.Forest = append(res.Forest, tree.Tree())\n\t}\n\treturn &res, nil\n}", "func (tree *Tree) ConstructTree(details *pb.MetricDetailsResponse) error {\n\talreadyVisited := []*caching.Node{}\n\troot, err := tree.GetNode(tree.RootName)\n\tif err != nil {\n\t\treturn err\n\t}\n\t//cycles on all the metrics of the details response.\n\t//For each metric it splits the metric name into dot separated elements. Each\n\t//element will represent a node in the tree structure.\n\t//\n\t//All the nodes will have initial Size = 0\n\tfor metric, data := range details.Metrics {\n\t\tparts := strings.Split(metric, \".\")\n\t\tleafIndex := len(parts) - 1\n\n\t\talreadyVisited = []*caching.Node{root}\n\n\t\tfor currentIndex := 0; currentIndex <= leafIndex; currentIndex++ {\n\t\t\tcurrentName := strings.Join(parts[0:currentIndex+1], \".\")\n\t\t\tif val, _ := tree.GetNodeFromRoot(currentName); val != nil {\n\t\t\t\talreadyVisited = append(alreadyVisited, val)\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif currentIndex == leafIndex {\n\t\t\t\tfor index, node := range alreadyVisited {\n\t\t\t\t\tif index != len(alreadyVisited)-1 {\n\t\t\t\t\t\tnode.Leaf = false\n\t\t\t\t\t}\n\t\t\t\t\tnode.Size += data.Size_\n\n\t\t\t\t}\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tcurrentNode := &caching.Node{\n\t\t\t\tName: tree.RootName + \".\" + currentName,\n\t\t\t\tChildren: []*caching.Node{},\n\t\t\t\tLeaf: true,\n\t\t\t\tSize: int64(0),\n\t\t\t}\n\n\t\t\ttree.AddNode(currentName, currentNode)\n\t\t\ttree.AddChild(alreadyVisited[len(alreadyVisited)-1], currentNode)\n\n\t\t\talreadyVisited = append(alreadyVisited, currentNode)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (dt *Depth) UnmarshalBinary(data []byte) (int, error) {\n\t*dt = Depth(binary.LittleEndian.Uint16(data[0:DepthSize]))\n\treturn DepthSize, nil\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tif root == nil {\n\t\treturn \"\"\n\t}\n\tans := make([]string, 0, 10)\n\tserialize(root, &ans)\n\n\treturn strings.Join(ans, \",\")\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\n\tif root == nil {\n\t\treturn \"[null]\"\n\t}\n\n\tnodes := []*TreeNode{root}\n\tcode := \"[\" + strconv.Itoa(root.Val)\n\n\tfor len(nodes) > 0 {\n\t\tnewNodes := []*TreeNode{}\n\n\t\tfor i := range nodes {\n\n\t\t\tif nodes[i].Left != nil {\n\t\t\t\tnewNodes = append(newNodes, nodes[i].Left)\n\t\t\t\tcode = code + \",\" + strconv.Itoa(nodes[i].Left.Val)\n\t\t\t} else {\n\t\t\t\tcode = code + \",null\"\n\t\t\t}\n\n\t\t\tif nodes[i].Right != nil {\n\t\t\t\tnewNodes = append(newNodes, nodes[i].Right)\n\t\t\t\tcode = code + \",\" + strconv.Itoa(nodes[i].Right.Val)\n\t\t\t} else {\n\t\t\t\tcode = code + \",null\"\n\t\t\t}\n\n\t\t}\n\n\t\tnodes = newNodes\n\n\t}\n\tcode = code + \"]\"\n\treturn code\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\ts := \"\"\n\n\tif root == nil {\n\t\treturn s\n\t}\n\n\ts = s + strconv.Itoa(root.Val)\n\n\tif root.Left != nil {\n\t\ts = s + \",\" + this.serialize(root.Left)\n\t} else {\n\t\ts = s + \",\" + \"null\"\n\t}\n\n\tif root.Right != nil {\n\t\ts = s + \",\" + this.serialize(root.Right)\n\t} else {\n\t\ts = s + \",\" + \"null\"\n\t}\n\n\treturn s\n}", "func (t *Torrent) UnmarshalJSON(data []byte) error {\n\tvar aux struct {\n\t\tID flexInt `json:\"id\"`\n\t\tCategory flexInt `json:\"category\"`\n\t\tStatus string `json:\"status\"`\n\t\tName string `json:\"name\"`\n\t\tNumFiles flexInt `json:\"num_files\"`\n\t\tInfoHash string `json:\"info_hash\"`\n\t\tDescription string `json:\"descr\"`\n\t\tLeechers flexInt `json:\"leechers\"`\n\t\tSeeders flexInt `json:\"seeders\"`\n\t\tUser string `json:\"username\"`\n\t\tSize flexInt `json:\"size\"`\n\t\tAdded flexInt `json:\"added\"`\n\t\tImdbID flexString `json:\"imdb\"`\n\t}\n\n\t// Decode json into the aux struct\n\tif err := json.Unmarshal(data, &aux); err != nil {\n\t\treturn err\n\t}\n\n\tt.ID = int(aux.ID)\n\tt.Category = TorrentCategory(int(aux.Category))\n\tt.Status = UserStatus(aux.Status)\n\tt.Name = aux.Name\n\tt.NumFiles = int(aux.NumFiles)\n\tt.Size = uint64(aux.Size)\n\tt.Seeders = int(aux.Seeders)\n\tt.Leechers = int(aux.Leechers)\n\tt.User = aux.User\n\tt.Added = time.Unix(int64(aux.Added), 0)\n\tt.Description = aux.Description\n\tt.InfoHash = aux.InfoHash\n\tt.ImdbID = string(aux.ImdbID)\n\n\treturn nil\n}", "func (n *Node) UnmarshalJSON(b []byte) error {\n\tvar tmp1 struct {\n\t\tType string `json:\"type\"`\n\t}\n\tif err := json.Unmarshal(b, &tmp1); err != nil {\n\t\treturn err\n\t}\n\tn.Type = tmp1.Type\n\n\tswitch n.Type {\n\n\tcase \"AssignmentExpression\", \"AssignmentPattern\", \"BinaryExpression\", \"LogicalExpression\":\n\t\tvar tmp2 struct {\n\t\t\tLeft *Node `json:\"left\"`\n\t\t\tRight *Node `json:\"right\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Left, tmp2.Right}\n\n\tcase \"ArrayExpression\", \"ArrayPattern\":\n\t\tvar tmp2 struct {\n\t\t\tElements []*Node `json:\"elements\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = tmp2.Elements\n\n\tcase \"ArrowFunctionExpression\":\n\t\tvar tmp2 struct {\n\t\t\tParams []*Node `json:\"params\"`\n\t\t\tBody *Node `json:\"body\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = append(tmp2.Params, tmp2.Body)\n\n\tcase \"BlockStatement\", \"ClassBody\", \"DoExpression\", \"LabeledStatement\", \"Program\":\n\t\tvar tmp2 struct {\n\t\t\tBody []*Node `json:\"body\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = tmp2.Body\n\n\tcase \"BindExpression\":\n\t\tvar tmp2 struct {\n\t\t\tCallee *Node `json:\"callee\"`\n\t\t\tObject *Node `json:\"object\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Callee, tmp2.Object}\n\n\tcase \"CallExpression\", \"NewExpression\":\n\t\tvar tmp2 struct {\n\t\t\tCallee *Node\n\t\t\tArguments []*Node `json:\"arguments\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Name = tmp2.Callee.Name\n\t\tn.From = tmp2.Callee.From\n\t\tn.Children = tmp2.Arguments\n\n\tcase \"ClassDeclaration\", \"ClassExpression\":\n\t\tvar tmp2 struct {\n\t\t\tSuperClass *Node `json:\"superClass\"`\n\t\t\tBody *Node `json:\"body\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.SuperClass, tmp2.Body}\n\n\tcase \"ClassMethod\":\n\t\tvar tmp2 struct {\n\t\t\tKey *Node `json:\"key\"`\n\t\t\tBody *Node `json:\"body\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Name = tmp2.Key.Name\n\t\tn.Children = []*Node{tmp2.Body}\n\n\tcase \"ClassProperty\":\n\t\tvar tmp2 struct {\n\t\t\tKey *Node `json:\"key\"`\n\t\t\tValue *Node `json:\"value\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Name = tmp2.Key.Name\n\t\tn.Children = []*Node{tmp2.Value}\n\n\tcase \"CatchClause\":\n\t\tvar tmp2 struct {\n\t\t\tParam *Node `json:\"param\"`\n\t\t\tBody *Node `json:\"body\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Param, tmp2.Body}\n\n\tcase \"DoWhileStatement\", \"WhileStatement\":\n\t\tvar tmp2 struct {\n\t\t\tBody *Node `json:\"body\"`\n\t\t\tTest *Node `json:\"test\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Body, tmp2.Test}\n\n\tcase \"ExportAllDeclaration\", \"ExportDefaultDeclaration\", \"ExportNamedDeclaration\":\n\t\tvar tmp2 struct {\n\t\t\tDeclaration *Node `json:\"declaration\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Declaration}\n\n\tcase \"ExpressionStatement\", \"JSXExpressionContainer\":\n\t\tvar tmp2 struct {\n\t\t\tExpression *Node `json:\"expression\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Expression}\n\n\tcase \"File\":\n\t\tvar tmp2 struct {\n\t\t\tProgram *Node `json:\"program\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Program}\n\n\tcase \"ForInStatement\":\n\t\tvar tmp2 struct {\n\t\t\tLeft *Node `json:\"left\"`\n\t\t\tRight *Node `json:\"right\"`\n\t\t\tBody *Node `json:\"body\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Left, tmp2.Right, tmp2.Body}\n\n\tcase \"ForStatement\":\n\t\tvar tmp2 struct {\n\t\t\tInit *Node `json:\"init\"`\n\t\t\tTest *Node `json:\"test\"`\n\t\t\tUpdate *Node `json:\"update\"`\n\t\t\tBody *Node `json:\"body\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Init, tmp2.Test, tmp2.Update, tmp2.Body}\n\n\tcase \"FunctionDeclaration\", \"ObjectMethod\":\n\t\tvar tmp2 struct {\n\t\t\tID *Node `json:\"id\"`\n\t\t\tBody *Node `json:\"body\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif tmp2.ID != nil {\n\t\t\tn.Name = tmp2.ID.Name\n\t\t}\n\t\tn.Children = []*Node{tmp2.Body}\n\n\tcase \"FunctionExpression\":\n\t\tvar tmp2 struct {\n\t\t\tParams []*Node `json:\"params\"`\n\t\t\tBody *Node `json:\"body\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = append(tmp2.Params, tmp2.Body)\n\n\tcase \"Identifier\":\n\t\tvar tmp2 struct {\n\t\t\tName string `json:\"name\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Name = tmp2.Name\n\n\tcase \"ConditionalExpression\", \"IfStatement\":\n\t\tvar tmp2 struct {\n\t\t\tTest *Node `json:\"test\"`\n\t\t\tConsequent *Node `json:\"consequent\"`\n\t\t\tAlternate *Node `json:\"alternate\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Test, tmp2.Consequent}\n\t\tif tmp2.Alternate != nil {\n\t\t\tn.Children = append(n.Children, tmp2.Alternate)\n\t\t}\n\n\tcase \"ImportDeclaration\":\n\t\tvar tmp2 struct {\n\t\t\tSpecifiers []*Node `json:\"specifiers\"`\n\t\t\tSource *Node `json:\"source\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfor _, spec := range tmp2.Specifiers {\n\t\t\tn.Name += spec.Name + \" \"\n\t\t}\n\t\tn.From = tmp2.Source.Name\n\n\tcase \"ImportSpecifier\":\n\t\tvar tmp2 struct {\n\t\t\tImported *Node\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Name = tmp2.Imported.Name\n\n\tcase \"ImportDefaultSpecifier\", \"ImportNamespaceSpecifier\":\n\t\tvar tmp2 struct {\n\t\t\tLocal *Node `json:\"local\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Name = tmp2.Local.Name\n\n\tcase \"JSXElement\":\n\t\tvar tmp2 struct {\n\t\t\tChildren []*Node `json:\"children\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = tmp2.Children\n\n\tcase \"MemberExpression\":\n\t\tvar tmp2 struct {\n\t\t\tObject *Node `json:\"object\"`\n\t\t\tProperty *Node `json:\"property\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Name = tmp2.Property.Name\n\t\tn.From = tmp2.Object.Name\n\t\tn.Children = []*Node{tmp2.Object, tmp2.Property}\n\n\tcase \"AwaitExpression\", \"ReturnStatement\", \"RestElement\", \"SpreadElement\", \"SpreadProperty\", \"ThrowStatement\", \"UnaryExpression\", \"UpdateExpression\", \"YieldExpression\":\n\t\tvar tmp2 struct {\n\t\t\tArgument *Node `json:\"argument\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Argument}\n\n\tcase \"ObjectExpression\", \"ObjectPattern\":\n\t\tvar tmp2 struct {\n\t\t\tProperties []*Node `json:\"properties\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = tmp2.Properties\n\n\tcase \"ObjectProperty\":\n\t\tvar tmp2 struct {\n\t\t\tValue interface{} `json:\"value\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif tmp2.Value != nil {\n\t\t\tif v, ok := tmp2.Value.(string); ok {\n\t\t\t\tn.Name = v\n\t\t\t}\n\t\t}\n\n\tcase \"StringLiteral\":\n\t\tvar tmp2 struct {\n\t\t\tValue string `json:\"value\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Name = tmp2.Value\n\n\tcase \"SequenceExpression\", \"TemplateLiteral\":\n\t\tvar tmp2 struct {\n\t\t\tExpressions []*Node `json:\"expressions\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = tmp2.Expressions\n\n\tcase \"SwitchCase\":\n\t\tvar tmp2 struct {\n\t\t\tTest *Node `json:\"test\"`\n\t\t\tConsequent []*Node `json:\"conseqeuent\"`\n\t\t}\n\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif tmp2.Test != nil {\n\t\t\tn.Children = []*Node{tmp2.Test}\n\t\t}\n\t\tn.Children = append(n.Children, tmp2.Consequent...)\n\n\tcase \"SwitchStatement\":\n\t\tvar tmp2 struct {\n\t\t\tDiscriminant *Node `json:\"discriminant\"`\n\t\t\tCases []*Node `json:\"cases\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Discriminant}\n\t\tn.Children = append(n.Children, tmp2.Cases...)\n\n\tcase \"TaggedTemplateExpression\":\n\t\tvar tmp2 struct {\n\t\t\tTag *Node `json:\"tag\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Tag}\n\n\tcase \"TryStatement\":\n\t\tvar tmp2 struct {\n\t\t\tBlock *Node `json:\"block\"`\n\t\t\tHandler *Node `json:\"handler\"`\n\t\t\tFinalizer *Node `json:\"finalizer\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Block, tmp2.Handler, tmp2.Finalizer}\n\n\tcase \"VariableDeclaration\":\n\t\tvar tmp2 struct {\n\t\t\tDeclarations []*Node `json:\"declarations\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = tmp2.Declarations\n\n\tcase \"VariableDeclarator\":\n\t\tvar tmp2 struct {\n\t\t\tInit *Node `json:\"init\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Init}\n\n\tcase \"WithStatement\":\n\t\tvar tmp2 struct {\n\t\t\tObject *Node `json:\"object\"`\n\t\t\tBody *Node `json:\"body\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &tmp2); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tn.Children = []*Node{tmp2.Object, tmp2.Body}\n\n\tcase\n\t\t\"BooleanLiteral\",\n\t\t\"BreakStatement\",\n\t\t\"ContinueStatement\",\n\t\t\"EmptyStatement\",\n\t\t\"ForOfStatement\",\n\t\t\"DebuggerStatement\",\n\t\t\"NullLiteral\",\n\t\t\"NumericLiteral\",\n\t\t\"TypeAlias\",\n\t\t\"ThisExpression\",\n\t\t\"JSXText\",\n\t\t\"JSXEmptyExpression\",\n\t\t\"DeclareVariable\",\n\t\t\"RegExpLiteral\",\n\t\t\"InterfaceDeclaration\",\n\t\t\"TypeCastExpression\",\n\t\t\"Super\":\n\n\tdefault:\n\t\tlog.Printf(\"unhandled type %s\", n.Type)\n\t}\n\n\treturn nil\n}", "func TestLeafUnmarshall(t *testing.T) {\n\tlump := Leaf{}\n\tlump.SetVersion(20)\n\terr := lump.Unmarshall(GetTestDataBytes())\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\texpected := GetTestLeafData()\n\tlog.Println(lump)\n\tactual := lump.GetData()[0]\n\n\tif actual != expected {\n\t\tlog.Println(\"Expected: \")\n\t\tlog.Println(expected)\n\t\tlog.Println(\"Actual: \")\n\t\tlog.Println(actual)\n\t\tt.Errorf(\"Imported Leaf data mismatch.\")\n\t}\n}", "func (st *SlimTrie) Unmarshal(buf []byte) error {\n\n\tvar ver string\n\tcompatible := st.compatibleVersions()\n\treader := bytes.NewReader(buf)\n\n\t_, ver, err := pbcmpl.Unmarshal(reader, &st.Children)\n\tif err != nil {\n\t\treturn errors.WithMessage(err, \"failed to unmarshal children\")\n\t}\n\n\tif !vers.IsCompatible(ver, compatible) {\n\t\treturn errors.Wrapf(ErrIncompatible,\n\t\t\tfmt.Sprintf(`version: \"%s\", compatible versions:\"%s\"`,\n\t\t\t\tver,\n\t\t\t\tstrings.Join(compatible, \" || \")))\n\t}\n\n\t_, _, err = pbcmpl.Unmarshal(reader, &st.Steps)\n\tif err != nil {\n\t\treturn errors.WithMessage(err, \"failed to unmarshal steps\")\n\t}\n\n\t_, _, err = pbcmpl.Unmarshal(reader, &st.Leaves)\n\tif err != nil {\n\t\treturn errors.WithMessage(err, \"failed to unmarshal leaves\")\n\t}\n\n\t// backward compatible:\n\n\tbefore058ConvertToChildrenEltsToBMElts(st, ver)\n\tbefore059ExtendBitmapIndex(st, ver)\n\n\treturn nil\n}", "func Decode(s blob.CAS, obj *wiretype.Object) (*Root, error) {\n\tpb, ok := obj.Value.(*wiretype.Object_Root)\n\tif !ok {\n\t\treturn nil, errors.New(\"object does not contain a root\")\n\t}\n\treturn &Root{\n\t\tcas: s,\n\n\t\tDescription: pb.Root.Description,\n\t\tFileKey: string(pb.Root.FileKey),\n\t\tIndexKey: string(pb.Root.IndexKey),\n\t}, nil\n}", "func Deparse(tree *ParseResult) (output string, err error) {\n\tprotobufTree, err := proto.Marshal(tree)\n\tif err != nil {\n\t\treturn\n\t}\n\n\toutput, err = parser.DeparseFromProtobuf(protobufTree)\n\treturn\n}", "func deserialize(src []byte, dst interface{}) error {\n\tdec := gob.NewDecoder(bytes.NewBuffer(src))\n\tif err := dec.Decode(dst); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func Unmarshal(data []byte, v Unmarshaler) error {\n\tl := jlexer.Lexer{Data: data}\n\tv.UnmarshalTinyJSON(&l)\n\treturn l.Error()\n}", "func DecodeNode(data []byte) []string {\n\tdec, _ := Decode(data, 0)\n\tif slice, ok := dec.([]interface{}); ok {\n\t\tstrSlice := make([]string, len(slice))\n\n\t\tfor i, s := range slice {\n\t\t\tif str, ok := s.([]byte); ok {\n\t\t\t\tstrSlice[i] = string(str)\n\t\t\t}\n\t\t}\n\n\t\treturn strSlice\n\t} else {\n\t\tfmt.Printf(\"It wasn't a []. It's a %T\\n\", dec)\n\t}\n\n\treturn nil\n}", "func recoverTree(text *string) *Tree{\n\tif len(*text) == 0 {\n\t\treturn nil\n\t}\n\n\tif (*text)[0] == '1' {\n\t\t*text = (*text)[1:]\n\t\treturn &Tree{rune((*text)[0]), -1, nil, nil}\n\t}\n\n\troot := &Tree{'$', -1, nil, nil}\n\t*text = (*text)[1:]\n\troot.LeftNode = recoverTree(text)\n\t*text = (*text)[1:]\n\troot.RightNode = recoverTree(text)\n\treturn root\n\t\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\tif root == nil {\n\t\treturn \"nil\"\n\t}\n\treturn strconv.Itoa(root.Val) + \",\" + this.serialize(root.Left) + \",\" + this.serialize(root.Right)\n}", "func (n *RforkNode) SetTree(t *Tree) {\n\tn.tree = t\n}", "func (v *Node) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\teasyjson6601e8cdDecodeGithubComSkydiveProjectSkydiveGraffitiApiTypes1(&r, v)\n\treturn r.Error()\n}", "func (lump *Generic) Unmarshall(data []byte) (err error) {\n\tlump.length = len(data)\n\tlump.data = data\n\n\treturn err\n}", "func Decode(na ipld.NodeAssembler, in io.Reader) error {\n\tvar src []byte\n\tif buf, ok := in.(interface{ Bytes() []byte }); ok {\n\t\tsrc = buf.Bytes()\n\t} else {\n\t\tvar err error\n\t\tsrc, err = ioutil.ReadAll(in)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn DecodeBytes(na, src)\n}" ]
[ "0.72836035", "0.7193018", "0.7156152", "0.70064354", "0.69082284", "0.68782", "0.68505496", "0.6845451", "0.6837301", "0.6799518", "0.6791657", "0.67909694", "0.678274", "0.6773538", "0.67478335", "0.6734582", "0.6712768", "0.6674699", "0.6654595", "0.6651541", "0.65485096", "0.64170986", "0.6381862", "0.6339977", "0.6298404", "0.59903187", "0.58801067", "0.5580673", "0.5380222", "0.5361961", "0.5320014", "0.5313363", "0.530031", "0.52695113", "0.5224516", "0.52098185", "0.520917", "0.5195383", "0.5146985", "0.51402926", "0.5089294", "0.50751", "0.50528824", "0.50497144", "0.50456333", "0.5000904", "0.499988", "0.49840054", "0.49767372", "0.4971378", "0.49675423", "0.49510726", "0.49478355", "0.490127", "0.48789892", "0.4868002", "0.4837173", "0.48257086", "0.48103672", "0.48078525", "0.4801625", "0.4791107", "0.47828916", "0.4782495", "0.4775916", "0.47741327", "0.47730318", "0.47700217", "0.47695404", "0.47357485", "0.4729282", "0.47280848", "0.4723517", "0.47033232", "0.46882033", "0.4679471", "0.46762815", "0.46459457", "0.46457452", "0.464115", "0.46318257", "0.4630114", "0.462548", "0.45957062", "0.45919713", "0.45883563", "0.4585982", "0.458406", "0.45826137", "0.45440072", "0.45400304", "0.45272133", "0.45079517", "0.4506168", "0.4505085", "0.45033437", "0.45026395", "0.45006815", "0.44946146", "0.44943222" ]
0.7252706
1
Process the elements of the 'do' special form
func EvalDo(env Env, seq Sequence) (res Value, err error) { res = Nil for !seq.Empty() { // Not a tail call in this position t, ok := res.(TailCall) if ok == true { res, err = t.Return() if err != nil { return } } res, err = seq.Head().Eval(env) if err != nil { return } seq = seq.Tail() } return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (t *Tempo) Do(input *SimpleBuffer) {\n\tif t.o == nil {\n\t\treturn\n\t}\n\tC.aubio_tempo_do(t.o, input.vec, t.buf.vec)\n}", "func Do(ctx context.Context, doc interface{}, opType operator.OpType, opts ...interface{}) error {\n\tif !validatorNeeded(opType) {\n\t\treturn nil\n\t}\n\tto := reflect.TypeOf(doc)\n\tif to == nil {\n\t\treturn nil\n\t}\n\tswitch reflect.TypeOf(doc).Kind() {\n\tcase reflect.Slice:\n\t\treturn sliceHandle(doc, opType)\n\tcase reflect.Ptr:\n\t\tv := reflect.ValueOf(doc).Elem()\n\t\tswitch v.Kind() {\n\t\tcase reflect.Slice:\n\t\t\treturn sliceHandle(v.Interface(), opType)\n\t\tdefault:\n\t\t\treturn do(doc)\n\t\t}\n\tdefault:\n\t\treturn do(doc)\n\t}\n}", "func (t Term) Do(args ...interface{}) Term {\n\tnewArgs := []interface{}{}\n\tnewArgs = append(newArgs, funcWrap(args[len(args)-1]))\n\tnewArgs = append(newArgs, t)\n\tnewArgs = append(newArgs, args[:len(args)-1]...)\n\n\treturn constructRootTerm(\"Do\", p.Term_FUNCALL, newArgs, map[string]interface{}{})\n}", "func Do(args ...interface{}) Term {\n\tnewArgs := []interface{}{}\n\tnewArgs = append(newArgs, funcWrap(args[len(args)-1]))\n\tnewArgs = append(newArgs, args[:len(args)-1]...)\n\n\treturn constructRootTerm(\"Do\", p.Term_FUNCALL, newArgs, map[string]interface{}{})\n}", "func (recursor StepRecursor) VisitDo(step *DoStep) error {\n\tfor _, sub := range step.Steps {\n\t\terr := sub.Config.Visit(recursor)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func (proc BuildOrderProcess) Do() {\n\tu := proc.users.GetCurrentUser()\n\tfmt.Printf(\"%v, what would you like to order?\\n\", u.Name())\n\n\tvar o models.Order\n\tfor {\n\t\tsel := prompt.Input(\"ITEM: \", proc.productsCompleter)\n\t\tif sel == \"x\" {\n\t\t\tbreak\n\t\t}\n\t\titem := proc.productFromPrompt(sel)\n\n\t\tif item.ID != \"\" {\n\t\t\tqs := prompt.Input(fmt.Sprintf(\"How many %v would you like?\", item.Name), emptyCompleter)\n\t\t\tqty, _ := strconv.Atoi(qs)\n\t\t\to.Items = append(o.Items, models.Item{ItemUPC: item.ID, Quantity: qty})\n\t\t}\n\t}\n\tfmt.Println(\"Your order:\")\n\tfmt.Println(o)\n\tresp := prompt.Input(\"Would you like to place this order now?\", emptyCompleter)\n\n\tif resp == \"yes\" {\n\t\tnewOrder := proc.orders.PlaceOrder(o)\n\t\tfmt.Printf(\"Thank you %v, the following order has been placed for you:\\n%v\\n\", u.Name(), newOrder)\n\t}\n}", "func (w *WattsonBridge) Do(cmd rune) string {\n\treturn w.DoArg(cmd, \"\")\n}", "func (e operations) Do(services []string, operation string) []interface{} {\n\tvar closures []concurrent.Closure\n\tfor index := range services {\n\t\tclosures = append(\n\t\t\tclosures,\n\t\t\tfunc(serviceName string) concurrent.Closure {\n\t\t\t\treturn func() interface{} {\n\t\t\t\t\treturn e.delegateToExecutor(serviceName, operation)\n\t\t\t\t}\n\t\t\t}(services[index]),\n\t\t)\n\t}\n\treturn concurrent.ExecuteAndAggregateResults(closures)\n}", "func Do(te TimeEntryDTO, cbs ...Step) (TimeEntryDTO, error) {\n\treturn compose(cbs...)(te)\n}", "func do(steps ...string) { // steps: variadic parameters\n\tdefer fmt.Println(\"All done!\")\n\n\tfor _, s := range steps {\n\t\t// last deferred statement is executed first (start car...)\n\t\tdefer fmt.Println(s) // defer each element\n\t}\n\n\tfmt.Println(\"Starting\")\n}", "func (f TransformObjFunc) Do(n nodes.Node) (nodes.Node, error) {\n\treturn f.Func().Do(n)\n}", "func (p *StepIntoParams) Do(ctx context.Context) (err error) {\n\treturn cdp.Execute(ctx, CommandStepInto, p, nil)\n}", "func StreamCommandDoFuncTemplate(qw422016 *qt422016.Writer, c *Type, d *Domain, domains []*Domain) {\n\t//line templates/domain.qtpl:75\n\ttyp := c.CommandType()\n\n\thasEmptyParams := len(c.Parameters) == 0\n\thasEmptyRet := len(c.Returns) == 0\n\n\temptyRet := c.EmptyRetList(d, domains)\n\tif emptyRet != \"\" {\n\t\temptyRet += \", \"\n\t}\n\n\tretTypeList := c.RetTypeList(d, domains)\n\tif retTypeList != \"\" {\n\t\tretTypeList += \", \"\n\t}\n\n\tretValueList := c.RetValueList(d, domains)\n\tif retValueList != \"\" {\n\t\tretValueList += \", \"\n\t}\n\n\tb64ret := c.Base64EncodedRetParam()\n\n\t// determine if there's a conditional return value with it\n\tb64cond := false\n\tfor _, p := range c.Returns {\n\t\tif p.Name == Base64EncodedParamName {\n\t\t\tb64cond = true\n\t\t\tbreak\n\t\t}\n\t}\n\n\t//line templates/domain.qtpl:105\n\tqw422016.N().S(`\n// Do executes `)\n\t//line templates/domain.qtpl:106\n\tqw422016.N().S(c.ProtoName(d))\n\t//line templates/domain.qtpl:106\n\tqw422016.N().S(`.`)\n\t//line templates/domain.qtpl:106\n\tif len(c.Returns) > 0 {\n\t\t//line templates/domain.qtpl:106\n\t\tqw422016.N().S(`\n//\n// returns:`)\n\t\t//line templates/domain.qtpl:108\n\t\tfor _, p := range c.Returns {\n\t\t\t//line templates/domain.qtpl:108\n\t\t\tif p.Name == Base64EncodedParamName {\n\t\t\t\t//line templates/domain.qtpl:108\n\t\t\t\tcontinue\n\t\t\t\t//line templates/domain.qtpl:108\n\t\t\t}\n\t\t\t//line templates/domain.qtpl:108\n\t\t\tqw422016.N().S(`\n// `)\n\t\t\t//line templates/domain.qtpl:109\n\t\t\tqw422016.N().S(p.String())\n\t\t\t//line templates/domain.qtpl:109\n\t\t}\n\t\t//line templates/domain.qtpl:109\n\t}\n\t//line templates/domain.qtpl:109\n\tqw422016.N().S(`\nfunc (p *`)\n\t//line templates/domain.qtpl:110\n\tqw422016.N().S(typ)\n\t//line templates/domain.qtpl:110\n\tqw422016.N().S(`) Do(ctxt context.Context, h FrameHandler) (`)\n\t//line templates/domain.qtpl:110\n\tqw422016.N().S(retTypeList)\n\t//line templates/domain.qtpl:110\n\tqw422016.N().S(`err error) {\n\tif ctxt == nil {\n\t\tctxt = context.Background()\n\t}`)\n\t//line templates/domain.qtpl:113\n\tif !hasEmptyParams {\n\t\t//line templates/domain.qtpl:113\n\t\tqw422016.N().S(`\n\n\t// marshal\n\tbuf, err := easyjson.Marshal(p)\n\tif err != nil {\n\t\treturn `)\n\t\t//line templates/domain.qtpl:118\n\t\tqw422016.N().S(emptyRet)\n\t\t//line templates/domain.qtpl:118\n\t\tqw422016.N().S(`err\n\t}`)\n\t\t//line templates/domain.qtpl:119\n\t}\n\t//line templates/domain.qtpl:119\n\tqw422016.N().S(`\n\n\t// execute\n\tch := h.Execute(ctxt, `)\n\t//line templates/domain.qtpl:122\n\tqw422016.N().S(c.CommandMethodType(d))\n\t//line templates/domain.qtpl:122\n\tqw422016.N().S(`, `)\n\t//line templates/domain.qtpl:122\n\tif hasEmptyParams {\n\t\t//line templates/domain.qtpl:122\n\t\tqw422016.N().S(`Empty`)\n\t\t//line templates/domain.qtpl:122\n\t} else {\n\t\t//line templates/domain.qtpl:122\n\t\tqw422016.N().S(`easyjson.RawMessage(buf)`)\n\t\t//line templates/domain.qtpl:122\n\t}\n\t//line templates/domain.qtpl:122\n\tqw422016.N().S(`)\n\n\t// read response\n\tselect {\n\tcase res := <-ch:\n\t\tif res == nil {\n\t\t\treturn `)\n\t//line templates/domain.qtpl:128\n\tqw422016.N().S(emptyRet)\n\t//line templates/domain.qtpl:128\n\tqw422016.N().S(`ErrChannelClosed\n\t\t}\n\n\t\tswitch v := res.(type) {\n\t\tcase easyjson.RawMessage:`)\n\t//line templates/domain.qtpl:132\n\tif !hasEmptyRet {\n\t\t//line templates/domain.qtpl:132\n\t\tqw422016.N().S(`\n\t\t\t// unmarshal\n\t\t\tvar r `)\n\t\t//line templates/domain.qtpl:134\n\t\tqw422016.N().S(c.CommandReturnsType())\n\t\t//line templates/domain.qtpl:134\n\t\tqw422016.N().S(`\n\t\t\terr = easyjson.Unmarshal(v, &r)\n\t\t\tif err != nil {\n\t\t\t\treturn `)\n\t\t//line templates/domain.qtpl:137\n\t\tqw422016.N().S(emptyRet)\n\t\t//line templates/domain.qtpl:137\n\t\tqw422016.N().S(`ErrInvalidResult\n\t\t\t}`)\n\t\t//line templates/domain.qtpl:138\n\t\tif b64ret != nil {\n\t\t\t//line templates/domain.qtpl:138\n\t\t\tqw422016.N().S(`\n\n\t\t\t// decode\n\t\t\tvar dec []byte`)\n\t\t\t//line templates/domain.qtpl:141\n\t\t\tif b64cond {\n\t\t\t\t//line templates/domain.qtpl:141\n\t\t\t\tqw422016.N().S(`\n\t\t\tif r.Base64encoded {`)\n\t\t\t\t//line templates/domain.qtpl:142\n\t\t\t}\n\t\t\t//line templates/domain.qtpl:142\n\t\t\tqw422016.N().S(`\n\t\t\t\tdec, err = base64.StdEncoding.DecodeString(r.`)\n\t\t\t//line templates/domain.qtpl:143\n\t\t\tqw422016.N().S(b64ret.GoName(false))\n\t\t\t//line templates/domain.qtpl:143\n\t\t\tqw422016.N().S(`)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}`)\n\t\t\t//line templates/domain.qtpl:146\n\t\t\tif b64cond {\n\t\t\t\t//line templates/domain.qtpl:146\n\t\t\t\tqw422016.N().S(`\n\t\t\t} else {\n\t\t\t\tdec = []byte(r.`)\n\t\t\t\t//line templates/domain.qtpl:148\n\t\t\t\tqw422016.N().S(b64ret.GoName(false))\n\t\t\t\t//line templates/domain.qtpl:148\n\t\t\t\tqw422016.N().S(`)\n\t\t\t}`)\n\t\t\t\t//line templates/domain.qtpl:149\n\t\t\t}\n\t\t\t//line templates/domain.qtpl:149\n\t\t}\n\t\t//line templates/domain.qtpl:149\n\t\tqw422016.N().S(`\n\t\t\t`)\n\t\t//line templates/domain.qtpl:150\n\t}\n\t//line templates/domain.qtpl:150\n\tqw422016.N().S(`\n\t\t\treturn `)\n\t//line templates/domain.qtpl:151\n\tqw422016.N().S(retValueList)\n\t//line templates/domain.qtpl:151\n\tqw422016.N().S(`nil\n\n\t\tcase error:\n\t\t\treturn `)\n\t//line templates/domain.qtpl:154\n\tqw422016.N().S(emptyRet)\n\t//line templates/domain.qtpl:154\n\tqw422016.N().S(`v\n\t\t}\n\n\tcase <-ctxt.Done():\n\t\treturn `)\n\t//line templates/domain.qtpl:158\n\tqw422016.N().S(emptyRet)\n\t//line templates/domain.qtpl:158\n\tqw422016.N().S(`ErrContextDone\n\t}\n\n\treturn `)\n\t//line templates/domain.qtpl:161\n\tqw422016.N().S(emptyRet)\n\t//line templates/domain.qtpl:161\n\tqw422016.N().S(`ErrUnknownResult\n}\n`)\n//line templates/domain.qtpl:163\n}", "func do() error {\n\treturn errorOne{}\n}", "func (p *SimplePipeline) Do(fs ...ProcessFunc) uint {\n\tout := NewSimpleBuffer(p.BufSize())\n\tdefer out.Free()\n\treturn p.do(fs)\n}", "func (l *ActionList) Do(ctx context.Context) (int, error) {\n\tvar das []*driver.Action\n\tfor _, a := range l.actions {\n\t\td, err := a.toDriverAction()\n\t\tif err != nil {\n\t\t\treturn 0, wrapError(l.coll.driver, err)\n\t\t}\n\t\tdas = append(das, d)\n\t}\n\tn, err := l.coll.driver.RunActions(ctx, das)\n\treturn n, wrapError(l.coll.driver, err)\n}", "func (rr *OPT) Do() bool {\n\treturn rr.Hdr.Ttl&_DO == _DO\n}", "func (cmd *Command) Do(ctx context.Context, shell *Shell, args []string) (err error) {\n\tc := &Context{ctx: ctx, shell: shell, args: args}\n\tif cmd.Before != nil {\n\t\terr = cmd.Before(c)\n\t}\n\tif err != nil {\n\t\treturn err\n\t}\n\tif cmd.Action != nil {\n\t\terr = cmd.Action(c)\n\t}\n\treturn err\n}", "func (a *CleanEniAction) Do() error {\n\tif errCode, errMsg := a.getEniIPs(); errCode != pbcommon.ErrCode_ERROR_OK {\n\t\treturn a.Err(errCode, errMsg)\n\t}\n\tif !a.req.IsForce {\n\t\tif errCode, errMsg := a.checkActiveIPs(); errCode != pbcommon.ErrCode_ERROR_OK {\n\t\t\treturn a.Err(errCode, errMsg)\n\t\t}\n\t}\n\tif errCode, errMsg := a.transIPToDeleting(); errCode != pbcommon.ErrCode_ERROR_OK {\n\t\treturn a.Err(errCode, errMsg)\n\t}\n\tif errCode, errMsg := a.cleanEniIPs(); errCode != pbcommon.ErrCode_ERROR_OK {\n\t\treturn a.Err(errCode, errMsg)\n\t}\n\tif errCode, errMsg := a.transIPToFree(); errCode != pbcommon.ErrCode_ERROR_OK {\n\t\treturn a.Err(errCode, errMsg)\n\t}\n\treturn nil\n}", "func (p *Parallel) do() {\n\t// if only one pipeline no need go routines\n\tif len(p.pipes) == 1 {\n\t\tp.secure(p.pipes[0])\n\t\treturn\n\t}\n\tfor _, pipe := range p.pipes {\n\t\tgo p.secure(pipe)\n\t}\n}", "func (f *FileUtil) Do() {\n\tswitch f.Command {\n\tcase \"mkdir\":\n\t\tf.mkdir()\n\tcase \"copy\", \"cp\":\n\t\tf.copy(f.Paths[0], f.Paths[1])\n\tcase \"move\", \"mv\":\n\t\tf.move(f.Paths[0], f.Paths[1])\n\t}\n\n}", "func (obj *Object) ObjDo(action string) {\n\t// Action behavior\n\tfmt.Println(\"I can, \", action)\n}", "func (c *Cxt) Do(r io.Reader) error {\n\tif c == nil || c.graph == nil {\n\t\treturn errs.Wrap(ecode.ErrNullPointer, \"\")\n\t}\n\tscanner := bufio.NewScanner(r)\n\tfor scanner.Scan() {\n\t\tstrs := strings.Split(scanner.Text(), \" \")\n\t\tif len(strs) == 2 {\n\t\t\tc.nodeList.addEdgeAttr(c.nodeList.getNodeFrom(c.graph, strs[0]).Edge(c.nodeList.getNodeFrom(c.graph, strs[1])))\n\t\t}\n\t}\n\tif err := scanner.Err(); err != nil {\n\t\treturn errs.Wrap(err, \"\")\n\t}\n\treturn nil\n}", "func (c *Client) do(req *http.Request) (*http.Response, error) {\n\treturn c.http.Do(req)\n}", "func DoIt(p Processor) {\n\tfmt.Printf(p.Process())\n}", "func (s *MyStruct) Do() string {\n\treturn \"do!!!!\"\n}", "func (a *Armory) DoAction() {\n\tswitch a.componentType {\n\tcase utils.BOMB:\n\tcase utils.MISSILE:\n\tcase utils.COMBOSHOT:\n\tcase utils.MULTISHOT:\n\tdefault:\n\t}\n}", "func (f TransformFunc) Do(n nodes.Node) (nodes.Node, error) {\n\tvar last error\n\tnn, ok := nodes.Apply(n, func(n nodes.Node) (nodes.Node, bool) {\n\t\tnn, ok, err := f(n)\n\t\tif err != nil {\n\t\t\tlast = err\n\t\t\treturn n, false\n\t\t} else if !ok {\n\t\t\treturn n, false\n\t\t}\n\t\treturn nn, ok\n\t})\n\tif ok {\n\t\treturn nn, last\n\t}\n\treturn n, last\n}", "func Do(values ...interface{}) {\n\tfor i, _ := range values {\n\t\tfmt.Printf(\"%#v\\n\", values[i])\n\t}\n}", "func Do(input *string) (output []string, err error) {\n\tinputStr := *input\n\tinputChars := []rune(inputStr)\n\tif len(inputStr) != 0 {\n\t\toutput = parceString(&inputChars)\n\t}\n\n\treturn\n}", "func (l *ModelList) Do(block func(name string, value Model)) {\n\tfor _, each := range l.List {\n\t\tblock(each.Name, each.Model)\n\t}\n}", "func (m *Redigomock) Do(cmd string, args ...interface{}) (interface{}, error) {\n\treturn m.FailureCall(cmd)\n}", "func (p *pipeliner) Do(a Action) error {\n\treq := getPipelinerCmd(a.(CmdAction)) // get this outside the lock to avoid\n\n\tp.l.RLock()\n\tif p.closed {\n\t\tp.l.RUnlock()\n\t\treturn errClientClosed\n\t}\n\tp.reqCh <- req\n\tp.l.RUnlock()\n\n\terr := <-req.resCh\n\tpoolPipelinerCmd(req)\n\treturn err\n}", "func Do(client *ircutil.Client, command *ircutil.Command,\n\tmessage *ircutil.Message) {\n\tircutil.SendPrivmsg(client, message.Args[0], fmt.Sprintf(\"\\x01ACTION %s\\x01\",\n\t\tstrings.Join(message.Args[1:], \" \")))\n}", "func DoSomething(do, something, from string) (string, error) {\n\treturn makeRequest(\"dosomething\", do, something, from)\n}", "func (p *ReadParams) Do(ctx context.Context) (data string, eof bool, err error) {\n\t// execute\n\tvar res ReadReturns\n\terr = cdp.Execute(ctx, CommandRead, p, &res)\n\tif err != nil {\n\t\treturn \"\", false, err\n\t}\n\n\treturn res.Data, res.EOF, nil\n}", "func (c *Client) do(req *http.Request, v interface{}) error {\n\treturn do(c.httpClient, req, v)\n}", "func (donef DoneFunc) Do(v interface{}) {\n donef()\n}", "func (c *Client) Do(ctx context.Context, args ...interface{}) *Cmd {\n\tcmd := NewCmd(ctx, args...)\n\t_ = c.Process(ctx, cmd)\n\treturn cmd\n}", "func (c *Client) do(ctx context.Context, req *http.Request, v interface{}) (*http.Response, error) {\n\treq = req.WithContext(ctx)\n\n\tresp, err := c.client.Do(req)\n\n\tif err != nil {\n\t\treturn resp, err\n\t}\n\n\tdefer resp.Body.Close()\n\n\terr = json.NewDecoder(resp.Body).Decode(v)\n\tif err == io.EOF {\n\t\terr = nil\n\t}\n\n\treturn resp, err\n}", "func (client *Client) EnvoyDo(podName, podNamespace, method, path string, body []byte) ([]byte, error) {\n\tcontainer, err := client.GetPilotAgentContainer(podName, podNamespace)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to retrieve proxy container name: %v\", err)\n\t}\n\tcmd := []string{pilotAgentPath, \"request\", method, path, string(body)}\n\treturn client.ExtractExecResult(podName, podNamespace, container, cmd)\n}", "func (p *parser) parseDoStmt() tree.Stmt {\n\tstmt := &tree.DoStmt{}\n\tstmt.DoToken = p.expectToken(token.DO)\n\tstmt.Body = p.parseBlockBody(token.END)\n\tstmt.EndToken = p.expectToken(token.END)\n\treturn stmt\n}", "func (a * Action)Do(b Board,info InfoAction)bool {\n\treturn a.action.Do(b,info)\n}", "func (client *Client) do(ctx context.Context, req *http.Request, v interface{}) (*http.Response, error) {\n\treq = req.WithContext(ctx)\n\n\tresp, err := client.client.Do(req)\n\n\tif err != nil {\n\t\treturn resp, err\n\t}\n\n\tdefer resp.Body.Close()\n\n\terr = json.NewDecoder(resp.Body).Decode(v)\n\tif err == io.EOF {\n\t\terr = nil\n\t}\n\n\treturn resp, err\n}", "func (self *procImpl) Do(a Action) error {\n\treturn self.DoLater(a)\n}", "func (lr *Rule) DoAct(ls *State, act Actions, tok *token.KeyToken) {\n\tswitch act {\n\tcase Next:\n\t\tls.Next(lr.MatchLen)\n\tcase Name:\n\t\tls.ReadName()\n\tcase Number:\n\t\ttok.Tok = ls.ReadNumber()\n\tcase Quoted:\n\t\tls.ReadQuoted()\n\tcase QuotedRaw:\n\t\tls.ReadQuoted() // todo: raw!\n\tcase EOL:\n\t\tls.Pos = len(ls.Src)\n\tcase ReadUntil:\n\t\tls.ReadUntil(lr.Until)\n\t\tls.Pos += lr.SizeAdj\n\tcase PushState:\n\t\tls.PushState(lr.PushState)\n\tcase PopState:\n\t\tls.PopState()\n\tcase SetGuestLex:\n\t\tif ls.LastName == \"\" {\n\t\t\tls.Error(ls.Pos, \"SetGuestLex action requires prior Name action -- name is empty\", lr)\n\t\t} else {\n\t\t\tlx := TheLangLexer.LexerByName(ls.LastName)\n\t\t\tif lx != nil {\n\t\t\t\tls.GuestLex = lx\n\t\t\t\tls.SaveStack = ls.Stack.Clone()\n\t\t\t}\n\t\t}\n\tcase PopGuestLex:\n\t\tif ls.SaveStack != nil {\n\t\t\tls.Stack = ls.SaveStack\n\t\t\tls.SaveStack = nil\n\t\t}\n\t\tls.GuestLex = nil\n\t}\n}", "func (cl *List) Process(bot *hbot.Bot, m *hbot.Message) {\n\t// Is the first character our command prefix?\n\tif m.Content[:1] == cl.Prefix {\n\t\tparts := strings.Fields(m.Content[1:])\n\t\tcommandstring := parts[0]\n\t\tcmd, ok := cl.Commands[commandstring]\n\t\tif !ok {\n\t\t\tif commandstring == \"help\" {\n\t\t\t\tif len(parts) < 2 {\n\t\t\t\t\tbot.Msg(m.From, \"Here's what I can do:\")\n\t\t\t\t\tvar commands bytes.Buffer\n\t\t\t\t\ti := 0\n\t\t\t\t\tfor _, cmd := range cl.Commands {\n\t\t\t\t\t\ti = i + 1\n\t\t\t\t\t\tcommands.WriteString(cmd.Name)\n\t\t\t\t\t\tif i != len(cl.Commands) {\n\t\t\t\t\t\t\tcommands.WriteString(\", \")\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tbot.Msg(m.From, commands.String())\n\t\t\t\t\tbot.Msg(m.From, fmt.Sprintf(\"The prefix for all these commands is: \\\"%s\\\"\", cl.Prefix))\n\t\t\t\t\tbot.Msg(m.From, fmt.Sprintf(\"See %shelp <command> for detailed information\", cl.Prefix))\n\t\t\t\t} else {\n\t\t\t\t\thelpcmd, helpok := cl.Commands[parts[1]]\n\t\t\t\t\tif helpok {\n\t\t\t\t\t\tbot.Msg(m.From, fmt.Sprintf(\"%s: %s\", helpcmd.Description, helpcmd.Usage))\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbot.Msg(m.From, fmt.Sprintf(\"No such command: %s\", parts[1]))\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\t// looks good, get the quote and reply with the result\n\t\tbot.Logger.Debug(\"action\", \"start processing\",\n\t\t\t\"args\", parts,\n\t\t\t\"full text\", m.Content)\n\t\tgo func(m *hbot.Message) {\n\t\t\tbot.Logger.Debug(\"action\", \"executing\",\n\t\t\t\t\"full text\", m.Content)\n\t\t\tif len(parts) > 1 {\n\t\t\t\tcmd.Run(m, parts[1:])\n\t\t\t} else {\n\t\t\t\tcmd.Run(m, []string{})\n\t\t\t}\n\t\t}(m)\n\t}\n}", "func DoDeposito(w http.ResponseWriter, r *http.Request) {\n\tvar deposito Desposito\n\tdecoder := json.NewDecoder(r.Body)\n\n\tif err := decoder.Decode(&deposito); err != nil {\n\t\tmodels.SendUnprocessableEntity(w)\n\t\treturn\n\t}\n\ttarjetaDestino, err := models.GetTarjetaByNumeroTarjeta(deposito.TarjetaDestino)\n\tcuentaDestino, err := models.GetCuentaByID(tarjetaDestino.IDCuenta)\n\t\n\terr = cuentaDestino.Depositar(deposito.Monto)\n\tif err != nil {\n\t\tmodels.SendNotFound(w)\n\t\treturn\n\t}\n\t\n\ttransaccion,_ := models.CrearTransaccion(deposito.Monto, 1, \"\", deposito.TarjetaDestino, 1)\n\ttResponse:= FormatResponse(transaccion)\n\ttResponse.NumeroTarjetaOrigen=\"\"\n\tmodels.SendData(w, tResponse)\n}", "func (tx *Transaction) do() (result Result, undoOffset int, err error) {\n\tresult, undoOffset, err = tx.doer().DoNormal(tx)\n\tif result != Success {\n\t\treturn result, undoOffset, fmt.Errorf(\"doNormal failed: %v\", err)\n\t}\n\n\treturn tx.doer().DoUncertain(tx)\n}", "func (p *StepOverParams) Do(ctx context.Context) (err error) {\n\treturn cdp.Execute(ctx, CommandStepOver, p, nil)\n}", "func (c *Executor) do(request *http.Request, followRedirects bool) (*http.Response, error) {\n\tclient, err := c.clientProvider.Client(followRedirects)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error executing request, failed to get the underlying HTTP client: %w\", err)\n\t}\n\tr, err := client.Do(request)\n\tif err != nil {\n\t\t// if we get an error because the context was cancelled, the context's error is more useful.\n\t\tctx := request.Context()\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\treturn nil, ctx.Err()\n\t\tdefault:\n\t\t}\n\t\treturn nil, fmt.Errorf(\"error executing request, failed during HTTP request send: %w\", err)\n\t}\n\treturn r, nil\n}", "func (t *BeatTracker) Do(input *SimpleBuffer) {\n\tif t.o == nil {\n\t\treturn\n\t}\n\tC.aubio_beattracking_do(t.o, input.vec, t.buf.vec)\n}", "func (s *Serial) Do(command string) (output string, err error) {\n\tout := fmt.Sprintf(\"%s%s\", command, sep)\n\t_, err = s.rw.WriteString(out)\n\tif err != nil {\n\t\treturn\n\t}\n\ts.rw.Flush()\n\n\treadLine := <-s.lines\n\treturn readLine.line, readLine.err\n}", "func do(t *testing.T, s string, e []ast.Statement) {\n\tscn := scanner.New(s)\n\tparser := New(scn)\n\tprogram, err := parser.Parse()\n\n\tif err != nil {\n\t\tt.Error(err)\n\t\treturn\n\t}\n\n\tif !reflect.DeepEqual(e, program.Statements) {\n\t\tt.Errorf(\"expected %#v got %#v\", e, program.Statements)\n\t\ttokTrace := ast.Print(program)\n\n\t\tfmt.Println(tokTrace)\n\t}\n}", "func (s *BasemumpsListener) EnterDo_(ctx *Do_Context) {}", "func (p *StepOutParams) Do(ctx context.Context) (err error) {\n\treturn cdp.Execute(ctx, CommandStepOut, nil, nil)\n}", "func (op *TemplateHTMLDefinitionsListOp) Do(ctx context.Context) (*model.DocumentHTMLDefinitionOriginals, error) {\n\tvar res *model.DocumentHTMLDefinitionOriginals\n\treturn res, ((*esign.Op)(op)).Do(ctx, &res)\n}", "func (o *OciServiceControl) do(op string, excludedErrorMsgs ...string) error {\n\tlogrus.Infof(\"Doing systemctl %s %s\", o.service, strings.ToUpper(op))\n\tvar b bytes.Buffer\n\tcmd := fmt.Sprintf(\"systemctl %s %s\", op, o.service)\n\terr := o.RunExternal(&b, \"/bin/sh\", \"-c\", cmd)\n\tlogrus.WithError(err).WithField(\"out\", b.String()).Debugf(\"SVC %sd\", op)\n\tif err != nil {\n\t\tif len(excludedErrorMsgs) > 0 {\n\t\t\t// Scan stderr output looking for provided segments that \"clear\" the error\n\t\t\t// ie. \" not loaded\" to ignore error while stopping non-existing service\n\t\t\terrOut := b.String()\n\t\t\tfor _, v := range excludedErrorMsgs {\n\t\t\t\tif strings.Contains(errOut, v) {\n\t\t\t\t\tlogrus.Debugf(\"Error message `%s` cleared as OK\", errOut)\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\terr = fmt.Errorf(\"Could not %s '%s' service: %s\", op, o.service, err)\n\t}\n\treturn err\n}", "func (c *Client) Do(query string, vars interface{}, key string, res interface{}) error {\n\treturn c.DoOperation(query, \"\", vars, key, res)\n}", "func (c *Client) Do(args ...interface{}) *Cmd {\n\treturn c.DoContext(c.ctx, args...)\n}", "func (client *Client) Do(funcname string, data []byte,\n\tflag byte, h ResponseHandler) (handle string, err error) {\n\thandle, err = client.DoWithId(funcname, data, flag, h, IdGen.Id())\n\treturn\n}", "func (c *client) do(req *http.Request) ([]byte, error) {\n\trawResp, err := c.httpClient.Do(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer rawResp.Body.Close()\n\n\tvar b []byte\n\tbuf := bytes.NewBuffer(b)\n\tif _, err = io.Copy(buf, rawResp.Body); err != nil {\n\t\treturn nil, err\n\t}\n\treturn buf.Bytes(), nil\n}", "func Do(query string, result interface{}, src Obj) error {\n\tfmt.Println(query)\n\ttree, err := sqlparser.Parse(query)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tbase.Debug(pretty.Sprint(tree))\n\n\tswitch tree.(type) {\n\tcase sqlparser.SelectStatement:\n\t\terr := sel.Do(tree.(sqlparser.SelectStatement), result, base.Obj(src))\n\t\treturn err\n\t//case *sqlparser.Union:\n\t//\ttree.(*sqlparser.Union).\n\tdefault:\n\t\treturn fmt.Errorf(\"Query type not supported\")\n\t}\n}", "func do() {\n\trun(LdapUser, LdapPass)\n}", "func (m *MockDoer) Do(data interface{}) interface{} {\n\targs := m.MethodCalled(\"Do\", data)\n\n\treturn args.Get(0)\n}", "func (sm *ShardMaster) DoCmd(op *Op) interface{} {\n if op.OpType == \"Join\" {\n return sm.DoJoin(op.Args.(JoinArgs))\n } else if op.OpType == \"Leave\" {\n return sm.DoLeave(op.Args.(LeaveArgs))\n } else if op.OpType == \"Move\" {\n return sm.DoMove(op.Args.(MoveArgs))\n } else if op.OpType == \"Query\" {\n return sm.DoQuery(op.Args.(QueryArgs))\n }\n return \"\"\n}", "func (t *Tag) Do() ([]byte, error) {\n\tbuf := bytes.NewBuffer([]byte{})\n\tif err := t.Build(buf); err != nil {\n\t\treturn nil, err\n\t}\n\tif Debug {\n\t\tfmt.Println(buf.String())\n\t}\n\n\tresp, err := http.Post(apiURL, \"text/xml\", buf)\n\tif resp != nil {\n\t\tif resp.Body != nil {\n\t\t\tdefer resp.Body.Close()\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif resp.StatusCode != 200 {\n\t\treturn body, fmt.Errorf(\"return code %d\", resp.StatusCode)\n\t}\n\n\treturn body, nil\n}", "func (c *RESTClient) do(req *http.Request) (*http.Response, error) {\n\tif c.Err != nil {\n\t\treturn nil, c.Err\n\t}\n\tc.Req = req\n\tif c.Client != nil {\n\t\treturn c.Client.Do(req)\n\t}\n\treturn c.Resp, nil\n}", "func (c *Command) Do(handler func(cmd *Command)) {\n\tc.handler = handler\n}", "func (e *ErrCollector) Do(errs ...error) {\n\tfor i, err := range errs {\n\t\tif err != nil {\n\t\t\t_, file, line, _ := runtime.Caller(1)\n\t\t\te.Err = err\n\t\t\te.Index = i + 1\n\t\t\te.File = file\n\t\t\te.Line = line\n\t\t\treturn\n\t\t}\n\t}\n}", "func (c *PipelineClient) Do(req *Request, resp *Response) error {\n\treturn c.getConnClient().Do(req, resp)\n}", "func (r *Request) Do(ctx context.Context, h Handler) (ResponseData, error) {\n\tpanic(\"TODO\")\n}", "func (s *BasejossListener) EnterDoCMD(ctx *DoCMDContext) {}", "func (op *deleteServiceOperation) do(ctx context.Context, r *Service, c *Client) error {\n\tu, err := serviceDeleteURL(c.Config.BasePath, r)\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = dcl.SendRequest(ctx, c.Config, \"POST\", u, bytes.NewBuffer([]byte{}), c.Config.Retry)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (t *T) Do(ctx context.Context) error {\n\terrs := errors.M{}\n\tinterfaces, err := listPackagesOrSpecs(ctx, t.interfacePackages)\n\terrs.Append(err)\n\tfunctions, err := listPackagesOrSpecs(ctx, t.functionPackages)\n\terrs.Append(err)\n\tvar packages []string\n\tif len(t.implementationPackages) > 0 {\n\t\tpackages, err = listPackages(ctx, t.implementationPackages)\n\t\terrs.Append(err)\n\t}\n\tif err := errs.Err(); err != nil {\n\t\treturn err\n\t}\n\n\tpackages = dedup(packages)\n\tallPackages, err := packagesToLoad(ctx, interfaces, functions, packages)\n\tif err != nil {\n\t\treturn err\n\t}\n\tcomments := dedup(t.commentExpressions)\n\tif err := t.loader.loadPaths(allPackages, t.options.tests); err != nil {\n\t\treturn err\n\t}\n\tif err := t.findInterfaces(ctx, interfaces); err != nil {\n\t\treturn err\n\t}\n\tgrp, ctx := errgroup.WithContext(ctx)\n\tgrp.GoContext(ctx, func() error {\n\t\treturn t.findFunctions(ctx, functions)\n\t})\n\tgrp.GoContext(ctx, func() error {\n\t\treturn t.findImplementations(ctx, packages)\n\t})\n\tgrp.GoContext(ctx, func() error {\n\t\treturn t.findComments(ctx, comments)\n\t})\n\treturn grp.Wait()\n}", "func (t *Tree) Do(fn Operation) bool {\n\tif t.Root == nil {\n\t\treturn false\n\t}\n\treturn t.Root.do(fn, 0)\n}", "func DoTransferencia(w http.ResponseWriter, r *http.Request) {\n\tenableCors(&w)\n\tvar transferencia Transferencia\n\tdecoder := json.NewDecoder(r.Body)\n\n\tif err := decoder.Decode(&transferencia); err != nil {\n\t\tmodels.SendUnprocessableEntity(w)\n\t\treturn\n\t}\n\n if models.ValidTarjeta(transferencia.TarjetaOrigen, transferencia.FechaVencimiento, transferencia.Cvv) {\n\n tarjetaOrigen, err := models.GetTarjetaByNumeroTarjeta(transferencia.TarjetaOrigen)\n cuentaOrigen, err := models.GetCuentaByID(tarjetaOrigen.IDCuenta)\n \t\n tarjetaDestino, err := models.GetTarjetaByNumeroTarjeta(transferencia.TarjetaDestino)\n \tcuentaDestino, err := models.GetCuentaByID(tarjetaDestino.IDCuenta)\n\n \tif tarjetaOrigen.ID != 0 && tarjetaDestino.ID != 0 {\n \terr = cuentaOrigen.Transferir(cuentaDestino.NumeroDeCuenta, transferencia.Monto)\n \tif err != nil {\n \tmodels.SendPaymentRequired(w)\n \treturn\n \t}\n \ttransaccion,_ := models.CrearTransaccion(transferencia.Monto, 1, transferencia.TarjetaOrigen, transferencia.TarjetaDestino, 2)\n \ttResponse := FormatResponse(transaccion)\n \tmodels.SendData(w, tResponse)\n \treturn\n \t} else {\n \t\tmodels.SendNotFound(w)\n \t\treturn\n \t}\n }\n\n models.SendNotFound(w)\n}", "func (p *CheckFormsIssuesParams) Do(ctx context.Context) (formIssues []*GenericIssueDetails, err error) {\n\t// execute\n\tvar res CheckFormsIssuesReturns\n\terr = cdp.Execute(ctx, CommandCheckFormsIssues, nil, &res)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn res.FormIssues, nil\n}", "func (f ActionFunc) Do(ctx context.Context) {\n\tf(ctx)\n}", "func (c *mockCore) do(needAuth bool, r request) ([]byte, error) {\n\tc.request = r\n\treturn []byte(c.respJSON), c.error\n}", "func (op *CustomFieldsDeleteOp) Do(ctx context.Context) error {\n\treturn ((*esign.Op)(op)).Do(ctx, nil)\n}", "func (rr *OPT) SetDo(do ...bool) {\n\tif len(do) == 1 {\n\t\tif do[0] {\n\t\t\trr.Hdr.Ttl |= _DO\n\t\t} else {\n\t\t\trr.Hdr.Ttl &^= _DO\n\t\t}\n\t} else {\n\t\trr.Hdr.Ttl |= _DO\n\t}\n}", "func (reqParams *ReqParams) do() (resp *http.Response, err error) {\n\tvar reqBody io.Reader\n\tif reqParams.Body != nil {\n\t\treqBody = bytes.NewBuffer(reqParams.Body)\n\t}\n\turlPath := reqParams.BaseParams.URL + reqParams.Path\n\treq, errR := http.NewRequest(reqParams.BaseParams.Method, urlPath, reqBody)\n\tif errR != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create http request: %w\", errR)\n\t}\n\treqParams.setRequestOptParams(req)\n\tSetAuxHeaders(req, &reqParams.BaseParams)\n\n\trr := reqResp{client: reqParams.BaseParams.Client, req: req}\n\terr = cmn.NetworkCallWithRetry(&cmn.RetryArgs{\n\t\tCall: rr.call,\n\t\tVerbosity: cmn.RetryLogOff,\n\t\tSoftErr: httpMaxRetries,\n\t\tSleep: httpRetrySleep,\n\t\tBackOff: true,\n\t\tIsClient: true,\n\t})\n\tresp = rr.resp\n\tif err != nil && resp != nil {\n\t\therr := cmn.NewErrHTTP(req, err, resp.StatusCode)\n\t\therr.Method, herr.URLPath = reqParams.BaseParams.Method, reqParams.Path\n\t\terr = herr\n\t}\n\treturn\n}", "func (e *Exec) DoExec() {\n\t// check\n\tif e.Name == \"\" {\n\t\te.Name = \"BTerminal-\" + e.GetNameID8b()\n\t}\n\tif e.Command == \"\" {\n\t\t// do nothing\n\t\treturn\n\t}\n\n\t// exec\n\te.Lock()\n\tDoExecute(e.LogName, e.Command)\n\te.Unlock()\n}", "func (r *Reconciler) doChange(change *devicechange.DeviceChange) error {\n\tlog.Infof(\"Applying change %v \", change.ID)\n\tlog.Debugf(\"%v \", change.Change)\n\treturn r.translateAndSendChange(change.Change)\n}", "func lesson44(){\n\tdo(10)\n\tdo(\"Sato\")\n\tdo(true)\n}", "func (df DoerFunc) Do(req *http.Request) (*http.Response, error) { return df(req) }", "func (f *Form) Do(verb, urlStr string) (*http.Response, error) {\n\treturn do(f, verb, urlStr)\n}", "func (f DoerFunc) Do(r *http.Request) (*http.Response, error) {\n\treturn f(r)\n}", "func (do doworker) Do(d interface{}, err error) (interface{}, error) {\n\treturn do.p(d, err)\n}", "func (p *StartParams) Do(ctx context.Context) (err error) {\n\treturn cdp.Execute(ctx, CommandStart, p, nil)\n}", "func (svc *DoExpressCheckoutPayment) Do(cli client.Client) (*DoExpressCheckoutPaymentResponse, error) {\n\tconst method = \"DoExpressCheckoutPayment\"\n\tsvc.BaseRequest.Method = method\n\tsvc.BaseRequest.Action = paymentActionSale\n\n\tif svc.TotalAmount == 0 {\n\t\tsvc.TotalAmount = svc.ItemAmount + svc.TaxAmount\n\t}\n\n\tresult := &DoExpressCheckoutPaymentResponse{}\n\terr := cli.Call(svc, result)\n\treturn result, err\n}", "func (p *NextWasmDisassemblyChunkParams) Do(ctx context.Context) (chunk *WasmDisassemblyChunk, err error) {\n\t// execute\n\tvar res NextWasmDisassemblyChunkReturns\n\terr = cdp.Execute(ctx, CommandNextWasmDisassemblyChunk, p, &res)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn res.Chunk, nil\n}", "func (op *TemplateDocumentHTMLDefinitionsListOp) Do(ctx context.Context) (*model.DocumentHTMLDefinitionOriginals, error) {\n\tvar res *model.DocumentHTMLDefinitionOriginals\n\treturn res, ((*esign.Op)(op)).Do(ctx, &res)\n}", "func (c *Client) Do(ctx context.Context, req *Request, resp *Response) error {\n\tr := graphql.Response{}\n\tif resp != nil {\n\t\tr.Data = resp.Data\n\t\tr.Errors = resp.Errors\n\t\tr.Extensions = resp.Extensions\n\t}\n\treturn c.gql.MakeRequest(ctx, &graphql.Request{\n\t\tQuery: req.Query,\n\t\tVariables: req.Variables,\n\t\tOpName: req.OpName,\n\t}, &r)\n}", "func (s *ConstantStep) Do(ctx context.Context, exec Executor, flow *Flow) error {\n\tflow.Data = s.Value\n\tflow.ContentType = \"application/json\"\n\texec.GetLogger().Debugf(ctx, \"Constant step ID %s returned value %s\", s.GetID(), string(s.Value))\n\n\treturn nil\n}", "func (r *Request) Do() *http.Response {\n\trec := httptest.NewRecorder()\n\tmethod := \"\"\n\tif r.Header == nil {\n\t\tr.Header = http.Header{}\n\t}\n\tif r.Body != nil {\n\t\tmethod = \"POST\"\n\t\tr.Header.Add(\"Content-Type\", \"application/x-www-form-urlencoded\")\n\t}\n\treq := httptest.NewRequest(method, r.Target, r.Body)\n\tfor name, values := range r.Header {\n\t\tfor _, value := range values {\n\t\t\treq.Header.Add(name, value)\n\t\t}\n\t}\n\tif r.Host != \"\" {\n\t\treq.Host = r.Host\n\t}\n\tr.Handler.ServeHTTP(rec, req)\n\treturn rec.Result()\n}", "func (s Sequence) Do(f func(el El)) {\n\ts.Find(func(el El)bool{\n\t\tf(el)\n\t\treturn false\n\t})\n}", "func doOp(t *testing.T, st types.CacheKVStore, truth dbm.DB, op int, args ...int) {\n\tt.Helper()\n\tswitch op {\n\tcase opSet:\n\t\tk := args[0]\n\t\tst.Set(keyFmt(k), valFmt(k))\n\t\terr := truth.Set(keyFmt(k), valFmt(k))\n\t\trequire.NoError(t, err)\n\tcase opSetRange:\n\t\tstart := args[0]\n\t\tend := args[1]\n\t\tsetRange(t, st, truth, start, end)\n\tcase opDel:\n\t\tk := args[0]\n\t\tst.Delete(keyFmt(k))\n\t\terr := truth.Delete(keyFmt(k))\n\t\trequire.NoError(t, err)\n\tcase opDelRange:\n\t\tstart := args[0]\n\t\tend := args[1]\n\t\tdeleteRange(t, st, truth, start, end)\n\tcase opWrite:\n\t\tst.Write()\n\t}\n}", "func nodeDo(path []string, data interface{}, f func(k string, v *Value) error) error {\n\tif data == nil {\n\t\treturn nil\n\t}\n\tswitch d := data.(type) {\n\tcase []interface{}:\n\t\tfor i, d := range d {\n\t\t\tk := \"#\" + strconv.Itoa(i)\n\t\t\tif err := f(k, newValue(path, d, nil)); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\treturn nil\n\tcase map[string]interface{}:\n\t\tfor k, d := range d {\n\t\t\tif err := f(k, newValue(path, d, nil)); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\treturn nil\n\tcase string, int, float64, bool:\n\t\treturn f(\"\", newValue(path, data, nil))\n\t}\n\treturn nil\n}", "func (p *DisassembleWasmModuleParams) Do(ctx context.Context) (streamID string, totalNumberOfLines int64, functionBodyOffsets []int64, chunk *WasmDisassemblyChunk, err error) {\n\t// execute\n\tvar res DisassembleWasmModuleReturns\n\terr = cdp.Execute(ctx, CommandDisassembleWasmModule, p, &res)\n\tif err != nil {\n\t\treturn \"\", 0, nil, nil, err\n\t}\n\n\treturn res.StreamID, res.TotalNumberOfLines, res.FunctionBodyOffsets, res.Chunk, nil\n}" ]
[ "0.6447774", "0.6129042", "0.60955185", "0.60474503", "0.6016045", "0.59603953", "0.59271395", "0.59009576", "0.5867061", "0.5775187", "0.5773696", "0.574748", "0.5735404", "0.5732381", "0.5718782", "0.57125884", "0.5695367", "0.5691763", "0.5640797", "0.5632126", "0.56307566", "0.5615645", "0.5599769", "0.5579192", "0.55740047", "0.5495394", "0.54916453", "0.5491077", "0.5471244", "0.5469401", "0.54530144", "0.5448908", "0.54426235", "0.5425911", "0.5424163", "0.54173565", "0.54109925", "0.53980345", "0.53921926", "0.5369409", "0.53277266", "0.5323782", "0.53212893", "0.5320306", "0.53188777", "0.5316509", "0.5304343", "0.5299223", "0.52958095", "0.52822864", "0.52708524", "0.5267048", "0.52593154", "0.5255913", "0.5253031", "0.5242542", "0.52419984", "0.52411634", "0.52344716", "0.5234046", "0.523117", "0.5230628", "0.52261937", "0.5205943", "0.52010304", "0.51996404", "0.5197955", "0.5192775", "0.5187837", "0.5186203", "0.51846254", "0.5184293", "0.5180547", "0.51722527", "0.51702374", "0.51700634", "0.51666474", "0.51568127", "0.51552933", "0.515527", "0.51543015", "0.51535845", "0.5149867", "0.5145498", "0.5142121", "0.5138117", "0.51357186", "0.5127557", "0.512711", "0.51178366", "0.51085556", "0.5108387", "0.5107587", "0.5097246", "0.5093112", "0.50904566", "0.50857997", "0.50850594", "0.5081662", "0.5076872", "0.5076443" ]
0.0
-1
TableName overrides the default tablename generated by GORM
func (HealthMenstruationPersonalInfoORM) TableName() string { return "health_menstruation_personal_infos" }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (user *User) TableName() string {\n return \"users\"\n}", "func (User) TableName() string {\n\treturn tableName\n}", "func (TblUser) TableName() string {\n\treturn \"tblUser\"\n}", "func TableNameNoSchema(dialect Dialect, mapper names.Mapper, tableName interface{}) string {\n\tquote := dialect.Quoter().Quote\n\tswitch tableName.(type) {\n\tcase []string:\n\t\tt := tableName.([]string)\n\t\tif len(t) > 1 {\n\t\t\treturn fmt.Sprintf(\"%v AS %v\", quote(t[0]), quote(t[1]))\n\t\t} else if len(t) == 1 {\n\t\t\treturn quote(t[0])\n\t\t}\n\tcase []interface{}:\n\t\tt := tableName.([]interface{})\n\t\tl := len(t)\n\t\tvar table string\n\t\tif l > 0 {\n\t\t\tf := t[0]\n\t\t\tswitch f.(type) {\n\t\t\tcase string:\n\t\t\t\ttable = f.(string)\n\t\t\tcase names.TableName:\n\t\t\t\ttable = f.(names.TableName).TableName()\n\t\t\tdefault:\n\t\t\t\tv := utils.ReflectValue(f)\n\t\t\t\tt := v.Type()\n\t\t\t\tif t.Kind() == reflect.Struct {\n\t\t\t\t\ttable = names.GetTableName(mapper, v)\n\t\t\t\t} else {\n\t\t\t\t\ttable = quote(fmt.Sprintf(\"%v\", f))\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif l > 1 {\n\t\t\treturn fmt.Sprintf(\"%v AS %v\", quote(table), quote(fmt.Sprintf(\"%v\", t[1])))\n\t\t} else if l == 1 {\n\t\t\treturn quote(table)\n\t\t}\n\tcase names.TableName:\n\t\treturn tableName.(names.TableName).TableName()\n\tcase string:\n\t\treturn tableName.(string)\n\tcase reflect.Value:\n\t\tv := tableName.(reflect.Value)\n\t\treturn names.GetTableName(mapper, v)\n\tdefault:\n\t\tv := utils.ReflectValue(tableName)\n\t\tt := v.Type()\n\t\tif t.Kind() == reflect.Struct {\n\t\t\treturn names.GetTableName(mapper, v)\n\t\t}\n\t\treturn quote(fmt.Sprintf(\"%v\", tableName))\n\t}\n\treturn \"\"\n}", "func (o *<%= classedName %>) TableName() string {\n\treturn \"<%= tableName %>\"\n}", "func (Alumno) TableName() string {\n\treturn \"alumnos\"\n}", "func (model *Barang) TableName() string {\n\treturn BarangTableName\n}", "func (cpt *DbVersion) TableName() string { return \"goose_db_version\" }", "func (u User) Table() string {\n\treturn tableName\n}", "func (CommentORM) TableName() string {\n\treturn \"comments\"\n}", "func (a *Action) TableName() string {\n\tconst ormTableName = \"actions\"\n\treturn ormTableName\n}", "func (ProfileORM) TableName() string {\n\treturn \"profiles\"\n}", "func (UserInfoORM) TableName() string {\n\treturn \"user_infos\"\n}", "func (User) TableName() string {\n\treturn \"user\"\n}", "func (User) TableName() string {\n\treturn \"user\"\n}", "func (User) TableName() string {\n\treturn \"user\"\n}", "func (User) TableName() string {\n\treturn \"user\"\n}", "func (User) TableName() string {\n\treturn \"dbo.Users\"\n}", "func (EmailORM) TableName() string {\n\treturn \"emails\"\n}", "func (u *User) TableName() string {\n\treturn LUTUserTableName\n}", "func (sc SnakeCaseConvention) TableName(typeName string) string {\n\treturn sc.Convert(typeName)\n}", "func (PeriodORM) TableName() string {\n\treturn \"periods\"\n}", "func FullTableName(tableName string) string {\n\tconf, confErr := config.GetAppConfig()\n\tif confErr != nil {\n\t\tlog.Fatalf(\"read database config err %v \", confErr)\n\t}\n\n\treturn conf.App.DataBase.Prefix + tableName\n}", "func (TestUser) TableName() string {\n\treturn \"test_user\"\n}", "func (m *GormRoleMappingRepository) TableName() string {\n\treturn \"role_mapping\"\n}", "func (m *Migration) TableName() string {\n\treturn \"dbMigration\"\n}", "func (a *Customer) TableName() string {\n\treturn \"gorm_customers\"\n}", "func (u UpdateUser) Table() string {\n\treturn tableName\n}", "func (u *user_info) TableName() string {\n return \"party_user_info\"\n}", "func (model *Produk) TableName() string {\n\treturn ProdukTableName\n}", "func (UserEntity) TableName() string {\n\treturn \"user\"\n}", "func (sc SameCaseConvention) TableName(typeName string) string {\n\treturn sc.Convert(typeName)\n}", "func (z *Zzz) SqlTableName() string { //nolint:dupl false positive\n\treturn `\"zzz\"`\n}", "func (s *UpBaseInfo) TableName() string {\n\treturn TableNameUpBaseInfo\n}", "func (m *ClassChangeTeaHis) TableName() string {\n\treturn \"class_change_tea_his\"\n}", "func (Project) TableName() string {\n\treturn \"project\"\n}", "func (Person) TableName() string {\n\treturn \"person\"\n}", "func FullTableName(dialect Dialect, mapper names.Mapper, bean interface{}, includeSchema ...bool) string {\n\ttbName := TableNameNoSchema(dialect, mapper, bean)\n\tif len(includeSchema) > 0 && includeSchema[0] && !utils.IsSubQuery(tbName) {\n\t\ttbName = TableNameWithSchema(dialect, tbName)\n\t}\n\treturn tbName\n}", "func TableName(name string) string {\n\tif IsTest {\n\t\treturn fmt.Sprintf(\"%v_test\", name)\n\t}\n\treturn fmt.Sprintf(\"%v_development\", name)\n}", "func (User) TableName() string {\n\treturn WithTablePrefix(\"user\")\n}", "func (Server) TableName() string {\n\treturn \"Server\"\n}", "func (i *Install) TableName() string {\r\n\treturn \"install\"\r\n}", "func (u User) TableName() string {\n\treturn \"Users\"\n}", "func (ContactORM) TableName() string {\n\treturn \"contacts\"\n}", "func (ContactORM) TableName() string {\n\treturn \"contacts\"\n}", "func TableName(key string) string {\n\treturn strings.TrimSuffix(key, sqlIDSuffix)\n}", "func (Company) TableName() string {\n\treturn \"company\"\n}", "func (ul *UserLogin) TableName() string {\n\treturn \"user_login\"\n}", "func (hp mysqlTS) TableName() string {\n\treturn \"tsad_points\"\n}", "func (m Article) TableName() string {\n\treturn \"articles\"\n\n}", "func (m Article) TableName() string {\n\treturn \"articles\"\n\n}", "func (IntPointORM) TableName() string {\n\treturn \"int_points\"\n}", "func (u *User) TableName() string {\n\treturn userTableName\n}", "func (Pokemon) TableName() string { return \"pokemons\" }", "func (Bank) TableName() string {\n\treturn \"bank\"\n}", "func (app *Table) Name() string {\n\treturn \"table\"\n}", "func (meeting *Meeting) TableName() string {\n\treturn \"ljl_meeting\"\n}", "func (Practica) TableName() string {\n\treturn \"practicas\"\n}", "func (lc LowerCaseConvention) TableName(typeName string) string {\n\treturn lc.Convert(typeName)\n}", "func (p Project) TableName() string {\n\treturn \"project\"\n}", "func (*Admin) TableName() string {\n\treturn tablePrefix + \"admin\"\n}", "func (User) TableName() string {\n\treturn \"users\"\n}", "func (User) TableName() string {\n\treturn \"users\"\n}", "func (User) TableName() string {\n\treturn \"users\"\n}", "func (Static) TableName() string {\n\treturn \"static\"\n}", "func (s *SchemaMigration) TableName() string {\n\treturn \"schema_migrations\"\n}", "func (Menu) TableName() string {\n\treturn \"menu\"\n}", "func (st SlackTeam) TableName() string {\n\treturn \"slack_team\"\n}", "func (User) TableName() string {\n\treturn \"urbs_user\"\n}", "func (Metric) TableName() string {\r\n\treturn tableNameMetrics\r\n}", "func (Team) TableName() string {\n\treturn \"teams\"\n}", "func (a *Article) TableName() string {\n\treturn \"article\"\n}", "func (c *AccountModel) TableName() string {\n\treturn AccountTable\n}", "func (e Entity) TableName() string {\n\treturn \"posts\"\n}", "func (u Umpires) TableName() string {\n\treturn \"umpires\"\n}", "func (Account) TableName() string {\n\ttableName := \"accounts\"\n\n\tif namespace.GetNamespace() != \"\" {\n\t\treturn namespace.GetNamespace() + \"_\" + tableName\n\t}\n\n\treturn tableName\n}", "func (TramDoTrieu) TableName() string {\n\treturn \"TramDoTrieu\"\n}", "func (c CDRgorm) TableName() string {\n\treturn \"cdr\"\n}", "func (p *ProjectJenkins) TableName() string {\n\treturn \"project_jenkins\"\n}", "func (model *User) TableName() string {\n\treturn \"user\"\n}", "func (HealthMenstruationDailyEntryORM) TableName() string {\n\treturn \"health_menstruation_daily_entries\"\n}", "func (u UserUpload) TableName() string {\n\treturn \"user_uploads\"\n}", "func (a *AddressDAL) TableName() string {\n\treturn \"address\"\n}", "func (mc *MeetingCanhui) TableName() string {\n\treturn \"ljl_meetingcanhui\"\n}", "func getTableName(object interface{}) string {\n\tstringName := fmt.Sprintf(\"%ss\", strings.ToLower(getType(object)))\n\treturn stringName\n}", "func (m *SysUser) TableName() string {\n\treturn \"sys_user\"\n}", "func (m *Mysql) Table(name string) {\n\tm.table = name\n}", "func (Goods) TableName() string {\n\treturn \"goods\"\n}", "func (d *Daily) TableName() string {\n\treturn \"daily\"\n}", "func (b *Binary) TableName() string {\n\tif b.ecosystem == 0 {\n\t\tb.ecosystem = 1\n\t}\n\treturn `1_binaries`\n}", "func (u StudentInfo) TableName() string {\n\treturn \"student\"\n}", "func (Invoice) TableName() string {\n\t// custom table name, this is default\n\treturn \"invoices\"\n}", "func (db *DB) Table(name string) *DB {\n\tdb.tablename = TableName(name)\n\treturn db\n}", "func (g *Grid) TableName() string {\n\treturn \"grid\"\n}", "func (post *Post) TableName() string {\n\treturn \"post\"\n}", "func (MigrationV1) TableName() string {\n\treturn \"migrations\"\n}", "func (NestTable) TableName() string {\n\treturn \"nest_table\"\n}", "func (m *GormIdentityRoleRepository) TableName() string {\n\treturn \"identity_role\"\n}", "func (m *Stucasting) TableName() string {\n\treturn \"stucasting\"\n}", "func (m *Stucasting) TableName() string {\n\treturn \"stucasting\"\n}" ]
[ "0.7302173", "0.713911", "0.7083268", "0.7068281", "0.7064129", "0.6997782", "0.6992626", "0.69540036", "0.6950419", "0.6922991", "0.6906448", "0.6896193", "0.68884236", "0.6842365", "0.6842365", "0.6842365", "0.6842365", "0.684192", "0.6834944", "0.68174136", "0.6791999", "0.67894596", "0.6758264", "0.6746872", "0.6721033", "0.6711168", "0.6709335", "0.67092973", "0.6709078", "0.67040694", "0.66948277", "0.6692023", "0.66878545", "0.6676768", "0.667669", "0.66739726", "0.667365", "0.6660641", "0.6657154", "0.6657054", "0.6655283", "0.66537124", "0.664319", "0.66392404", "0.66392404", "0.6638248", "0.66322553", "0.66243047", "0.6620342", "0.6606126", "0.6606126", "0.66010815", "0.65929216", "0.6572596", "0.6562763", "0.65560025", "0.6539823", "0.65352136", "0.65338016", "0.6522775", "0.6521889", "0.65208614", "0.65208614", "0.65208614", "0.6520408", "0.65014386", "0.64898723", "0.6475254", "0.6469655", "0.646608", "0.64618593", "0.6441312", "0.64342874", "0.6428684", "0.6421728", "0.6418567", "0.6417183", "0.64169204", "0.64138025", "0.6398845", "0.6397078", "0.6393849", "0.63914305", "0.6390791", "0.6388002", "0.6377717", "0.6377688", "0.6374135", "0.6369855", "0.63626885", "0.63614875", "0.63582975", "0.63546085", "0.6353095", "0.6352752", "0.63474596", "0.6340351", "0.6336714", "0.6336175", "0.6336175" ]
0.6537073
57
ToORM runs the BeforeToORM hook if present, converts the fields of this object to ORM format, runs the AfterToORM hook, then returns the ORM object
func (m *HealthMenstruationPersonalInfo) ToORM(ctx context.Context) (HealthMenstruationPersonalInfoORM, error) { to := HealthMenstruationPersonalInfoORM{} var err error if prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToORM); ok { if err = prehook.BeforeToORM(ctx, &to); err != nil { return to, err } } to.Id = m.Id if m.CreatedAt != nil { var t time.Time if t, err = ptypes1.Timestamp(m.CreatedAt); err != nil { return to, err } to.CreatedAt = &t } if m.UpdatedAt != nil { var t time.Time if t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil { return to, err } to.UpdatedAt = &t } to.ProfileId = m.ProfileId to.PeriodLengthInDays = m.PeriodLengthInDays to.CycleLengthInDays = m.CycleLengthInDays if posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToORM); ok { err = posthook.AfterToORM(ctx, &to) } return to, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *Contact) ToORM(ctx context.Context) (ContactORM, error) {\n\tto := ContactORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(ContactWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.FirstName = m.FirstName\n\tto.MiddleName = m.MiddleName\n\tto.LastName = m.LastName\n\t// Skipping field: PrimaryEmail\n\tfor _, v := range m.Emails {\n\t\tif v != nil {\n\t\t\tif tempEmails, cErr := v.ToORM(ctx); cErr == nil {\n\t\t\t\tto.Emails = append(to.Emails, &tempEmails)\n\t\t\t} else {\n\t\t\t\treturn to, cErr\n\t\t\t}\n\t\t} else {\n\t\t\tto.Emails = append(to.Emails, nil)\n\t\t}\n\t}\n\tif posthook, ok := interface{}(m).(ContactWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *UserInfo) ToORM(ctx context.Context) (UserInfoORM, error) {\n\tto := UserInfoORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(UserInfoWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.UserId = m.UserId\n\tto.LastName = m.LastName\n\tto.FirstName = m.FirstName\n\tto.Period = m.Period\n\tto.DepartmentId = m.DepartmentId\n\tto.JobId = m.JobId\n\tto.EnrollmentFlg = m.EnrollmentFlg\n\tto.AdminFlg = m.AdminFlg\n\tif m.CreatedAt != nil {\n\t\t*to.CreatedAt = m.CreatedAt.AsTime()\n\t}\n\tif m.UpdatedAt != nil {\n\t\t*to.UpdatedAt = m.UpdatedAt.AsTime()\n\t}\n\tif posthook, ok := interface{}(m).(UserInfoWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *Period) ToORM(ctx context.Context) (PeriodORM, error) {\n\tto := PeriodORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(PeriodWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.Period = m.Period\n\tif m.CreatedAt != nil {\n\t\t*to.CreatedAt = m.CreatedAt.AsTime()\n\t}\n\tif m.UpdatedAt != nil {\n\t\t*to.UpdatedAt = m.UpdatedAt.AsTime()\n\t}\n\tif posthook, ok := interface{}(m).(PeriodWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *Email) ToORM(ctx context.Context) (EmailORM, error) {\n\tto := EmailORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(EmailWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.Address = m.Address\n\tif posthook, ok := interface{}(m).(EmailWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *HealthMenstruationDailyEntry) ToORM(ctx context.Context) (HealthMenstruationDailyEntryORM, error) {\n\tto := HealthMenstruationDailyEntryORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.Day = &t\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = int32(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *Profile) ToORM(ctx context.Context) (ProfileORM, error) {\n\tto := ProfileORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(ProfileWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tto.Notes = m.Notes\n\tto.FirstName = m.FirstName\n\tto.LastName = m.LastName\n\tto.PrimaryEmail = m.PrimaryEmail\n\tfor _, v := range m.Groups {\n\t\tif v != nil {\n\t\t\tif tempGroups, cErr := v.ToORM(ctx); cErr == nil {\n\t\t\t\tto.Groups = append(to.Groups, &tempGroups)\n\t\t\t} else {\n\t\t\t\treturn to, cErr\n\t\t\t}\n\t\t} else {\n\t\t\tto.Groups = append(to.Groups, nil)\n\t\t}\n\t}\n\tto.ProfilePictureUrl = m.ProfilePictureUrl\n\tif posthook, ok := interface{}(m).(ProfileWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *Comment) ToORM(ctx context.Context) (CommentORM, error) {\n\tto := CommentORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(CommentWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.Id != nil {\n\t\tto.Id, err = go_uuid1.FromString(m.Id.Value)\n\t\tif err != nil {\n\t\t\treturn to, err\n\t\t}\n\t} else {\n\t\tto.Id = go_uuid1.Nil\n\t}\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tif m.DeletedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.DeletedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.DeletedAt = &t\n\t}\n\tif m.BoardId != nil {\n\t\tv := m.BoardId.Value\n\t\tto.BoardId = &v\n\t}\n\tif m.PostId != nil {\n\t\tv := m.PostId.Value\n\t\tto.PostId = &v\n\t}\n\tif m.ContentId != nil {\n\t\tv := m.ContentId.Value\n\t\tto.ContentId = &v\n\t}\n\tif m.Userid != nil {\n\t\tv := m.Userid.Value\n\t\tto.Userid = &v\n\t}\n\tif m.Username != nil {\n\t\tv := m.Username.Value\n\t\tto.Username = &v\n\t}\n\tif m.Nickname != nil {\n\t\tv := m.Nickname.Value\n\t\tto.Nickname = &v\n\t}\n\tif m.Email != nil {\n\t\tv := m.Email.Value\n\t\tto.Email = &v\n\t}\n\tif m.Password != nil {\n\t\tv := m.Password.Value\n\t\tto.Password = &v\n\t}\n\tif m.Url != nil {\n\t\tv := m.Url.Value\n\t\tto.Url = &v\n\t}\n\tif m.UseHtml != nil {\n\t\tv := m.UseHtml.Value\n\t\tto.UseHtml = &v\n\t}\n\tif m.UseSecret != nil {\n\t\tv := m.UseSecret.Value\n\t\tto.UseSecret = &v\n\t}\n\tto.UpVoteCount = m.UpVoteCount\n\tto.DownVoteCount = m.DownVoteCount\n\tif posthook, ok := interface{}(m).(CommentWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *IntPoint) ToORM(ctx context.Context) (IntPointORM, error) {\n\tto := IntPointORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(IntPointWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.X = m.X\n\tto.Y = m.Y\n\tif posthook, ok := interface{}(m).(IntPointWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func ConvertContactToORM(from Contact) (ContactORM, error) {\n\tto := ContactORM{}\n\tvar err error\n\tto.Id = from.Id\n\tto.FirstName = from.FirstName\n\tto.MiddleName = from.MiddleName\n\tto.LastName = from.LastName\n\tto.EmailAddress = from.EmailAddress\n\treturn to, err\n}", "func NewORM() ORMWrapper {\n\treturn &orm{\n\t\tdone: make(chan error),\n\t}\n}", "func (adapter *GORMAdapter) GetUnderlyingORM() interface{} {\n\treturn adapter.db\n}", "func (m *ContactORM) ToPB(ctx context.Context) (Contact, error) {\n\tto := Contact{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(ContactWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.FirstName = m.FirstName\n\tto.MiddleName = m.MiddleName\n\tto.LastName = m.LastName\n\t// Skipping field: PrimaryEmail\n\tfor _, v := range m.Emails {\n\t\tif v != nil {\n\t\t\tif tempEmails, cErr := v.ToPB(ctx); cErr == nil {\n\t\t\t\tto.Emails = append(to.Emails, &tempEmails)\n\t\t\t} else {\n\t\t\t\treturn to, cErr\n\t\t\t}\n\t\t} else {\n\t\t\tto.Emails = append(to.Emails, nil)\n\t\t}\n\t}\n\tif posthook, ok := interface{}(m).(ContactWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *HealthMenstruationDailyEntryORM) ToPB(ctx context.Context) (HealthMenstruationDailyEntry, error) {\n\tto := HealthMenstruationDailyEntry{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tif to.Day, err = ptypes1.TimestampProto(*m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = HealthMenstruationDailyEntry_Type(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *HealthMenstruationPersonalInfoORM) ToPB(ctx context.Context) (HealthMenstruationPersonalInfo, error) {\n\tto := HealthMenstruationPersonalInfo{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tto.PeriodLengthInDays = m.PeriodLengthInDays\n\tto.CycleLengthInDays = m.CycleLengthInDays\n\tif posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (s *orm) ORM() *gorm.DB {\n\treturn s.client\n}", "func (m *UserInfoORM) ToPB(ctx context.Context) (UserInfo, error) {\n\tto := UserInfo{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(UserInfoWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.UserId = m.UserId\n\tto.LastName = m.LastName\n\tto.FirstName = m.FirstName\n\tto.Period = m.Period\n\tto.DepartmentId = m.DepartmentId\n\tto.JobId = m.JobId\n\tto.EnrollmentFlg = m.EnrollmentFlg\n\tto.AdminFlg = m.AdminFlg\n\tif m.CreatedAt != nil {\n\t\tto.CreatedAt = timestamppb.New(*m.CreatedAt)\n\t}\n\tif m.UpdatedAt != nil {\n\t\tto.UpdatedAt = timestamppb.New(*m.UpdatedAt)\n\t}\n\tif posthook, ok := interface{}(m).(UserInfoWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (auth *AuthModuleConversion) ToDB() ([]byte, error) {\n\treturn []byte{}, nil\n}", "func (m *Group) AfterToORM(ctx context.Context, a *GroupORM) error {\n\tfor _, item := range m.UserList {\n\t\tid, err := resource.DecodeInt64(&User{}, item)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\ta.UserList = append(a.UserList, &UserORM{Id: id})\n\t}\n\n\treturn nil\n}", "func ORMToModelBclassTranslate(\n\ttranslationImpact TranslationImpact,\n\tdb *gorm.DB) (Error error) {\n\n\tif translationImpact == CreateMode {\n\n\t\t// check that bclassStores are nil\n\n\t\tif map_BclassDBID_BclassPtr != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, Parameters bclassStore should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_BclassDBID_BclassDB != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, parameters BclassDBStore should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\t// init stores\n\t\ttmp := make(map[uint]*models.Bclass, 0)\n\t\tmap_BclassDBID_BclassPtr = &tmp\n\n\t\ttmpDB := make(map[uint]*BclassDB, 0)\n\t\tmap_BclassDBID_BclassDB = &tmpDB\n\n\t\ttmpID := make(map[*models.Bclass]uint, 0)\n\t\tmap_BclassPtr_BclassDBID = &tmpID\n\n\t\tmodels.AllModelStore.Bclasss = make([]*models.Bclass, 0)\n\n\t\tbclassDBArray := make([]BclassDB, 0)\n\t\tquery := db.Find(&bclassDBArray)\n\t\tif query.Error != nil {\n\t\t\treturn query.Error\n\t\t}\n\n\t\t// copy orm objects to the two stores\n\t\tfor _, bclassDB := range bclassDBArray {\n\n\t\t\t// create entries in the tree maps.\n\t\t\tbclass := bclassDB.Bclass\n\t\t\t(*map_BclassDBID_BclassPtr)[bclassDB.ID] = &bclass\n\n\t\t\t(*map_BclassPtr_BclassDBID)[&bclass] = bclassDB.ID\n\n\t\t\tbclassDBCopy := bclassDB\n\t\t\t(*map_BclassDBID_BclassDB)[bclassDB.ID] = &bclassDBCopy\n\n\t\t\t// append model store with the new element\n\t\t\tmodels.AllModelStore.Bclasss = append(models.AllModelStore.Bclasss, &bclass)\n\t\t}\n\t} else { // UpdateMode\n\t\t// for later, update of the data field\n\n\t\t// check that bclassStore is not nil\n\t\tif map_BclassDBID_BclassPtr == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, bclassStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_BclassDBID_BclassDB == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, bclassStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\t// update fields of bclassDB with fields of bclass\n\t\tfor _, bclass := range models.AllModelStore.Bclasss {\n\t\t\tbclassDBID := (*map_BclassPtr_BclassDBID)[bclass]\n\t\t\tbclassDB := (*map_BclassDBID_BclassDB)[bclassDBID]\n\n\t\t\t*bclass = bclassDB.Bclass\n\t\t}\n\n\t\t// parse all DB instance and update all pointer fields of the translated models instance\n\t\tfor _, bclassDB := range *map_BclassDBID_BclassDB {\n\t\t\tbclass := (*map_BclassDBID_BclassPtr)[bclassDB.ID]\n\t\t\tif bclass == nil {\n\t\t\t\terr := errors.New(\"cannot find translated instance in models store\")\n\t\t\t\treturn err\n\t\t\t}\n\n\n\n\n\t\t}\n\t}\n\n\treturn nil\n}", "func (p *ormPlugin) generateMapFunctions(message *generator.Descriptor) {\n\tccTypeNamePb := generator.CamelCaseSlice(message.TypeName())\n\tccTypeNameBase := lintName(ccTypeNamePb)\n\tccTypeNameOrm := fmt.Sprintf(\"%sORM\", ccTypeNameBase)\n\t///// To Orm\n\tp.P(`// Convert`, ccTypeNameBase, `ToORM takes a pb object and returns an orm object`)\n\tp.P(`func Convert`, ccTypeNameBase, `ToORM (from `,\n\t\tccTypeNamePb, `) `, ccTypeNameOrm, ` {`)\n\tp.P(`to := `, ccTypeNameOrm, `{}`)\n\tfor _, field := range message.Field {\n\t\t// Checking if field is skipped\n\t\tif field.Options != nil {\n\t\t\tv, err := proto.GetExtension(field.Options, gorm.E_Field)\n\t\t\tif err == nil && v.(*gorm.GormFieldOptions) != nil {\n\t\t\t\tif v.(*gorm.GormFieldOptions).Drop != nil && *v.(*gorm.GormFieldOptions).Drop {\n\t\t\t\t\tp.P(`// Skipping field: `, p.GetOneOfFieldName(message, field))\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tp.generateFieldMap(message, field, true)\n\t}\n\tp.P(`return to`)\n\tp.P(`}`)\n\n\tp.P()\n\t///// To Pb\n\tp.P(`// Convert`, ccTypeNameBase, `FromORM takes an orm object and returns a pb object`)\n\tp.P(`func Convert`, ccTypeNameBase, `FromORM (from `, ccTypeNameOrm, `) `,\n\t\tccTypeNamePb, ` {`)\n\tp.P(`to := `, ccTypeNamePb, `{}`)\n\tfor _, field := range message.Field {\n\t\t// Checking if field is skipped\n\t\tif field.Options != nil {\n\t\t\tv, err := proto.GetExtension(field.Options, gorm.E_Field)\n\t\t\tif err == nil && v.(*gorm.GormFieldOptions) != nil {\n\t\t\t\tif v.(*gorm.GormFieldOptions).Drop != nil && *v.(*gorm.GormFieldOptions).Drop {\n\t\t\t\t\tp.P(`// Skipping field: `, p.GetOneOfFieldName(message, field))\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tp.generateFieldMap(message, field, false)\n\t}\n\tp.P(`return to`)\n\tp.P(`}`)\n}", "func (tc TypeConverter) ToDb(val interface{}) (interface{}, error) {\n\tswitch val.(type) {\n\tcase map[string]interface{}:\n\t\treturn json.Marshal(val)\n\t}\n\treturn val, nil\n}", "func (m *CommentORM) ToPB(ctx context.Context) (Comment, error) {\n\tto := Comment{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(CommentWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = &types1.UUID{Value: m.Id.String()}\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.DeletedAt != nil {\n\t\tif to.DeletedAt, err = ptypes1.TimestampProto(*m.DeletedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.BoardId != nil {\n\t\tto.BoardId = &wrappers.StringValue{Value: *m.BoardId}\n\t}\n\tif m.PostId != nil {\n\t\tto.PostId = &wrappers.StringValue{Value: *m.PostId}\n\t}\n\tif m.ContentId != nil {\n\t\tto.ContentId = &wrappers.StringValue{Value: *m.ContentId}\n\t}\n\tif m.Userid != nil {\n\t\tto.Userid = &wrappers.StringValue{Value: *m.Userid}\n\t}\n\tif m.Username != nil {\n\t\tto.Username = &wrappers.StringValue{Value: *m.Username}\n\t}\n\tif m.Nickname != nil {\n\t\tto.Nickname = &wrappers.StringValue{Value: *m.Nickname}\n\t}\n\tif m.Email != nil {\n\t\tto.Email = &wrappers.StringValue{Value: *m.Email}\n\t}\n\tif m.Password != nil {\n\t\tto.Password = &wrappers.StringValue{Value: *m.Password}\n\t}\n\tif m.Url != nil {\n\t\tto.Url = &wrappers.StringValue{Value: *m.Url}\n\t}\n\tif m.UseHtml != nil {\n\t\tto.UseHtml = &wrappers.BoolValue{Value: *m.UseHtml}\n\t}\n\tif m.UseSecret != nil {\n\t\tto.UseSecret = &wrappers.BoolValue{Value: *m.UseSecret}\n\t}\n\tto.UpVoteCount = m.UpVoteCount\n\tto.DownVoteCount = m.DownVoteCount\n\tif posthook, ok := interface{}(m).(CommentWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *EmailORM) ToPB(ctx context.Context) (Email, error) {\n\tto := Email{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(EmailWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.Address = m.Address\n\tif posthook, ok := interface{}(m).(EmailWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *PeriodORM) ToPB(ctx context.Context) (Period, error) {\n\tto := Period{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(PeriodWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.Period = m.Period\n\tif m.CreatedAt != nil {\n\t\tto.CreatedAt = timestamppb.New(*m.CreatedAt)\n\t}\n\tif m.UpdatedAt != nil {\n\t\tto.UpdatedAt = timestamppb.New(*m.UpdatedAt)\n\t}\n\tif posthook, ok := interface{}(m).(PeriodWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func ConvertContactFromORM(from ContactORM) (Contact, error) {\n\tto := Contact{}\n\tvar err error\n\tto.Id = from.Id\n\tto.FirstName = from.FirstName\n\tto.MiddleName = from.MiddleName\n\tto.LastName = from.LastName\n\tto.EmailAddress = from.EmailAddress\n\treturn to, err\n}", "func (m *ProfileORM) ToPB(ctx context.Context) (Profile, error) {\n\tto := Profile{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(ProfileWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Notes = m.Notes\n\tto.FirstName = m.FirstName\n\tto.LastName = m.LastName\n\tto.PrimaryEmail = m.PrimaryEmail\n\tfor _, v := range m.Groups {\n\t\tif v != nil {\n\t\t\tif tempGroups, cErr := v.ToPB(ctx); cErr == nil {\n\t\t\t\tto.Groups = append(to.Groups, &tempGroups)\n\t\t\t} else {\n\t\t\t\treturn to, cErr\n\t\t\t}\n\t\t} else {\n\t\t\tto.Groups = append(to.Groups, nil)\n\t\t}\n\t}\n\tto.ProfilePictureUrl = m.ProfilePictureUrl\n\tif posthook, ok := interface{}(m).(ProfileWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func ModelToORMStateTranslate(\n\ttranslationImpact TranslationImpact,\n\tdb *gorm.DB) (Error error) {\n\n\tif translationImpact == CreateMode {\n\n\t\t// check that stateStore is nil as well as stateDBs\n\t\tif map_StateDBID_StatePtr != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, map_StateDBID_StatePtr should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_StateDBID_StateDB != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, map_StateDBID_StateDB should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_StatePtr_StateDBID != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, map_StatePtr_StateDBID should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\ttmp := make(map[uint]*models.State, 0)\n\t\tmap_StateDBID_StatePtr = &tmp\n\n\t\ttmpDB := make(map[uint]*StateDB, 0)\n\t\tmap_StateDBID_StateDB = &tmpDB\n\n\t\ttmpID := make(map[*models.State]uint, 0)\n\t\tmap_StatePtr_StateDBID = &tmpID\n\n\t\tfor _, state := range models.AllModelStore.States {\n\n\t\t\t// initiate state\n\t\t\tvar stateDB StateDB\n\t\t\tstateDB.State = *state\n\n\t\t\tquery := db.Create(&stateDB)\n\t\t\tif query.Error != nil {\n\t\t\t\treturn query.Error\n\t\t\t}\n\n\t\t\t// update stores\n\t\t\t(*map_StatePtr_StateDBID)[state] = stateDB.ID\n\t\t\t(*map_StateDBID_StatePtr)[stateDB.ID] = state\n\t\t\t(*map_StateDBID_StateDB)[stateDB.ID] = &stateDB\n\t\t}\n\t} else { // UpdateMode, update IDs of Pointer Fields of ORM object\n\n\t\t// check that stateStore is not nil\n\t\tif map_StateDBID_StatePtr == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, stateStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_StateDBID_StateDB == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, stateStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\t// update fields of stateDB with fields of state\n\t\tfor _, state := range models.AllModelStore.States {\n\t\t\tstateDBID := (*map_StatePtr_StateDBID)[state]\n\t\t\tstateDB := (*map_StateDBID_StateDB)[stateDBID]\n\n\t\t\tstateDB.State = *state\n\t\t}\n\n\t\t// parse model objects ot update associations\n\t\tfor idx, state := range *map_StateDBID_StatePtr {\n\n\t\t\t// fetch matching stateDB\n\t\t\tif stateDB, ok := (*map_StateDBID_StateDB)[idx]; ok {\n\t\t\t\t// set {{Fieldname}}ID\n\n\t\t\t\tquery := db.Save(&stateDB)\n\t\t\t\tif query.Error != nil {\n\t\t\t\t\treturn query.Error\n\t\t\t\t}\n\n\t\t\t} else {\n\t\t\t\terr := errors.New(\n\t\t\t\t\tfmt.Sprintf(\"In UpdateMode translation, stateStore should not be nil %v %v\",\n\t\t\t\t\t\tstateDB, state))\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func ORMToModelStateTranslate(\n\ttranslationImpact TranslationImpact,\n\tdb *gorm.DB) (Error error) {\n\n\tif translationImpact == CreateMode {\n\n\t\t// check that stateStores are nil\n\n\t\tif map_StateDBID_StatePtr != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, Parameters stateStore should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_StateDBID_StateDB != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, parameters StateDBStore should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\t// init stores\n\t\ttmp := make(map[uint]*models.State, 0)\n\t\tmap_StateDBID_StatePtr = &tmp\n\n\t\ttmpDB := make(map[uint]*StateDB, 0)\n\t\tmap_StateDBID_StateDB = &tmpDB\n\n\t\ttmpID := make(map[*models.State]uint, 0)\n\t\tmap_StatePtr_StateDBID = &tmpID\n\n\t\tmodels.AllModelStore.States = make([]*models.State, 0)\n\n\t\tstateDBArray := make([]StateDB, 0)\n\t\tquery := db.Find(&stateDBArray)\n\t\tif query.Error != nil {\n\t\t\treturn query.Error\n\t\t}\n\n\t\t// copy orm objects to the two stores\n\t\tfor _, stateDB := range stateDBArray {\n\n\t\t\t// create entries in the tree maps.\n\t\t\tstate := stateDB.State\n\t\t\t(*map_StateDBID_StatePtr)[stateDB.ID] = &state\n\n\t\t\t(*map_StatePtr_StateDBID)[&state] = stateDB.ID\n\n\t\t\tstateDBCopy := stateDB\n\t\t\t(*map_StateDBID_StateDB)[stateDB.ID] = &stateDBCopy\n\n\t\t\t// append model store with the new element\n\t\t\tmodels.AllModelStore.States = append(models.AllModelStore.States, &state)\n\t\t}\n\t} else { // UpdateMode\n\t\t// for later, update of the data field\n\n\t\t// check that stateStore is not nil\n\t\tif map_StateDBID_StatePtr == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, stateStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_StateDBID_StateDB == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, stateStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\t// update fields of stateDB with fields of state\n\t\tfor _, state := range models.AllModelStore.States {\n\t\t\tstateDBID := (*map_StatePtr_StateDBID)[state]\n\t\t\tstateDB := (*map_StateDBID_StateDB)[stateDBID]\n\n\t\t\t*state = stateDB.State\n\t\t}\n\n\t\t// parse all DB instance and update all pointer fields of the translated models instance\n\t\tfor _, stateDB := range *map_StateDBID_StateDB {\n\t\t\tstate := (*map_StateDBID_StatePtr)[stateDB.ID]\n\t\t\tif state == nil {\n\t\t\t\terr := errors.New(\"cannot find translated instance in models store\")\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t}\n\t}\n\n\treturn nil\n}", "func ModelToORMBclassTranslate(\n\ttranslationImpact TranslationImpact,\n\tdb *gorm.DB) (Error error) {\n\n\tif translationImpact == CreateMode {\n\n\t\t// check that bclassStore is nil as well as bclassDBs\n\t\tif map_BclassDBID_BclassPtr != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, map_BclassDBID_BclassPtr should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_BclassDBID_BclassDB != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, map_BclassDBID_BclassDB should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_BclassPtr_BclassDBID != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, map_BclassPtr_BclassDBID should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\ttmp := make(map[uint]*models.Bclass, 0)\n\t\tmap_BclassDBID_BclassPtr = &tmp\n\n\t\ttmpDB := make(map[uint]*BclassDB, 0)\n\t\tmap_BclassDBID_BclassDB = &tmpDB\n\n\t\ttmpID := make(map[*models.Bclass]uint, 0)\n\t\tmap_BclassPtr_BclassDBID = &tmpID\n\n\t\tfor _, bclass := range models.AllModelStore.Bclasss {\n\n\t\t\t// initiate bclass\n\t\t\tvar bclassDB BclassDB\n\t\t\tbclassDB.Bclass = *bclass\n\n\t\t\tquery := db.Create(&bclassDB)\n\t\t\tif query.Error != nil {\n\t\t\t\treturn query.Error\n\t\t\t}\n\n\t\t\t// update stores\n\t\t\t(*map_BclassPtr_BclassDBID)[bclass] = bclassDB.ID\n\t\t\t(*map_BclassDBID_BclassPtr)[bclassDB.ID] = bclass\n\t\t\t(*map_BclassDBID_BclassDB)[bclassDB.ID] = &bclassDB\n\t\t}\n\t} else { // UpdateMode, update IDs of Pointer Fields of ORM object\n\n\t\t// check that bclassStore is not nil\n\t\tif map_BclassDBID_BclassPtr == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, bclassStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_BclassDBID_BclassDB == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, bclassStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\t// update fields of bclassDB with fields of bclass\n\t\tfor _, bclass := range models.AllModelStore.Bclasss {\n\t\t\tbclassDBID := (*map_BclassPtr_BclassDBID)[bclass]\n\t\t\tbclassDB := (*map_BclassDBID_BclassDB)[bclassDBID]\n\n\t\t\tbclassDB.Bclass = *bclass\n\t\t}\n\n\t\t// parse model objects ot update associations\n\t\tfor idx, bclass := range *map_BclassDBID_BclassPtr {\n\n\t\t\t// fetch matching bclassDB\n\t\t\tif bclassDB, ok := (*map_BclassDBID_BclassDB)[idx]; ok {\n\t\t\t\t// set {{Fieldname}}ID\n\n\n\n\t\t\t\tquery := db.Save(&bclassDB)\n\t\t\t\tif query.Error != nil {\n\t\t\t\t\treturn query.Error\n\t\t\t\t}\n\n\t\t\t} else {\n\t\t\t\terr := errors.New(\n\t\t\t\t\tfmt.Sprintf(\"In UpdateMode translation, bclassStore should not be nil %v %v\",\n\t\t\t\t\t\tbclassDB, bclass))\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func CreateORMFoo(foo *Foo) {\n\tfoo.Stage()\n\tif Stage.AllModelsStructCreateCallback != nil {\n\t\tStage.AllModelsStructCreateCallback.CreateORMFoo(foo)\n\t}\n}", "func GenORMSetup(db *gorm.DB) {\n\n\t// relative to the models package, swith to ../controlers package\n\tfilename := filepath.Join(OrmPkgGenPath, \"setup.go\")\n\n\t// we should use go generate\n\tlog.Println(\"generating orm setup file : \" + filename)\n\n\tf, err := os.Create(filename)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\t// create the list of structs\n\tvar structs []models.Struct\n\tdb.Find(&structs)\n\n\tLISTOFSTRUCT := \"\\n\"\n\n\tdeleteCalls := \"\"\n\n\tfor idx, _struct := range structs {\n\t\tif idx != 0 {\n\t\t\tLISTOFSTRUCT += \",\\n\"\n\t\t}\n\t\tLISTOFSTRUCT += fmt.Sprintf(\"\\t\\t&%sDB{}\", _struct.Name)\n\n\t\tdeleteCalls += fmt.Sprintf(\"\\tdb.Delete(&%sDB{})\\n\", _struct.Name)\n\n\t\tfmt.Printf(\"\t\torm.LoadDB%s(%ss, db)\\n\", _struct.Name, _struct.Name)\n\t}\n\tres := strings.ReplaceAll(template, \"{{LISTOFSTRUCT}}\", LISTOFSTRUCT)\n\n\tres = strings.ReplaceAll(res, \"{{Deletes}}\", deleteCalls)\n\n\tfmt.Fprintf(f, \"%s\", res)\n\n\tdefer f.Close()\n}", "func (track *Track) ToDb() interface{} {\n\treturn track.Id\n}", "func (m *Contact) BeforeToORM(ctx context.Context, c *ContactORM) error {\n\tif m.PrimaryEmail != \"\" {\n\t\tfor _, mail := range m.Emails {\n\t\t\tif mail.Address == m.PrimaryEmail {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\tc.Emails = append(c.Emails, &EmailORM{Address: m.PrimaryEmail, IsPrimary: true})\n\t}\n\treturn nil\n}", "func (tf Task) ToEntity(tm model.Task) domain.Task {\n\treturn domain.Task{\n\t\tID: tm.ID,\n\t\tTitle: tm.Title,\n\t\tDescription: tm.Description.String,\n\t\tDueDate: tm.DueDate.Time,\n\t\tCreatedAt: tm.CreatedAt,\n\t\tUpdatedAt: tm.UpdatedAt,\n\t}\n}", "func (this *BoltDBAbstractDAO) jsonToStruct(value string) interface{} {\n\tif this.createEntity != nil {\n\t\tentity := this.createEntity()\n\t\terr := json.Unmarshal([]byte(value), entity)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\treturn entity\n\t} else {\n\t\tpanic(\"function 'createEntity' not defined in DAO\")\n\t}\n}", "func setupDB(DB *gorm.DB) {\n\t// remove original functions to update timestamps, we will maintain that by ourself in models in\n\t// BeforeCreate, BeforeSave methods\n\tDB.Callback().Create().Remove(\"gorm:update_time_stamp\")\n}", "func (self *CreateManager) ToSql() (string, error) {\n\tif nil == self.adapter {\n\t\tself.adapter = \"to_sql\"\n\t}\n\n\treturn VisitorFor(self.adapter).Accept(self.Tree)\n}", "func Db() *gorm.DB {\n\treturn entity.Db()\n}", "func (crtEP *MdlPersonCreateExt) AfterDB(ent interface{}) error {\n\n\t// fmt.Println(\"TypeOf ent:\", reflect.TypeOf(ent))\n\t// fmt.Println(\"ValueOf ent:\", reflect.ValueOf(ent))\n\t// p := ent.(*models.Person)\n\n\t// make changes / validate the content struct pointer (p) here\n\t// p.Name = \"A new field value\"\n\treturn nil\n}", "func ToPersistenceInfo(t *admin.PersistenceInfo) *types.PersistenceInfo {\n\tif t == nil {\n\t\treturn nil\n\t}\n\treturn &types.PersistenceInfo{\n\t\tBackend: t.GetBackend(),\n\t\tSettings: ToPersistenceSettings(t.Settings),\n\t\tFeatures: ToPersistenceFeatures(t.Features),\n\t}\n}", "func InitORM() {\n\tdb, err := gorm.Open(\"sqlite3\", \"test.db\")\n\tif err != nil {\n\t\t//Panic is a built-in function that stops the ordinary flow of control and begins panicking.\n\t\t//When the function F calls panic, execution of F stops, any deferred functions in F are executed normally,\n\t\t//and then F returns to its caller\n\t\tpanic(\"failed to connect database\")\n\t}\n\tdefer db.Close() //remember the use of defer? A defer statement defers the execution of close until the InitORM function returns\n\n\t// Migrate the schema\n\tdb.AutoMigrate(&User{})\n\n\t// Create\n\tdb.Create(&User{Name: \"Renjith\", Email: \"[email protected]\", Gender: \"Male\", City: \"Cochin\"})\n\n\t// Read\n\tvar user User\n\tdb.First(&user, 1) // find user with id 1\n\tdb.First(&user, \"name = ?\", \"Renjith\") // find user with name Renjith\n\n\tfmt.Println(user)\n\t// Update - update user's city to Kochi\n\tdb.Model(&user).Update(\"City\", \"Kochi\")\n\t// Delete - delete product\n\tdb.Delete(&user)\n}", "func ToEntity(om *OrganizationMongo) *Organization {\n\tID := \"\"\n\tif !om.ID.IsZero() {\n\t\tID = om.ID.Hex()\n\t}\n\tIDsCategory := make([]string, 0)\n\tif om.IDsCategory != nil && len(om.IDsCategory) > 0 {\n\t\tfor _, id := range om.IDsCategory {\n\t\t\tIDsCategory = append(IDsCategory, id.Hex())\n\t\t}\n\t}\n\n\treturn &Organization{\n\t\tID: ID,\n\t\tName: om.Name,\n\t\tEmail: om.Email,\n\t\tPhone: om.Phone,\n\t\tStatus: om.Status,\n\t\tIDsCategory: IDsCategory,\n\t}\n}", "func (m *IntPointORM) ToPB(ctx context.Context) (IntPoint, error) {\n\tto := IntPoint{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(IntPointWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.X = m.X\n\tto.Y = m.Y\n\tif posthook, ok := interface{}(m).(IntPointWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (getEP *MdlPersonGetExt) AfterDB(ent interface{}) error {\n\n\t// fmt.Println(\"TypeOf ent:\", reflect.TypeOf(ent))\n\t// fmt.Println(\"ValueOf ent:\", reflect.ValueOf(ent))\n\t// p := ent.(*Person)\n\n\t// make changes / validate the content struct pointer (p) here\n\t// p.Name = \"A new field value\"\n\treturn nil\n}", "func (newAdmin *NewAdmin) ToAdmin() (*Admin, error) {\n\n\t// Construct a Admin based on NewAdmin.\n\tadmin := &Admin{\n\t\tID: bson.NewObjectId(),\n\t\tUserName: newAdmin.UserName,\n\t\tFirstName: newAdmin.FirstName,\n\t\tLastName: newAdmin.LastName,\n\t}\n\n\t// Trim leading and trailing whitespace from an email address.\n\temail := strings.TrimSpace(newAdmin.Email)\n\n\t// Force all characters in the email to be lower-case.\n\temail = strings.ToLower(email)\n\n\t// Update Email field.\n\tadmin.Email = email\n\n\t// md5 hash the final email string.\n\th := md5.New()\n\tio.WriteString(h, email)\n\tresult := hex.EncodeToString(h.Sum(nil))\n\n\t// Set the PhotoURL field of the new Admin to\n\t// the Gravatar PhotoURL for the admin's email address.\n\tphotoURL := gravatarBasePhotoURL + result\n\tadmin.PhotoURL = photoURL\n\n\t// Call .SetPassword() to set the PassHash\n\t// field of the Admin to a hash of the NewAdmin.Password.\n\terr := admin.SetPassword(newAdmin.Password)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error setting password hash of the Admin: %v\", err)\n\t}\n\n\treturn admin, nil\n}", "func (Mapper) ModelToEntity(m, e interface{}) error {\r\n\tobj, err := json.Marshal(m)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\treturn json.Unmarshal(obj, e)\r\n}", "func (r Role) MapToModel(m map[string]interface{}) Role {\n\tr.Id = m[\"id\"].(int)\n\tr.Name, _ = m[\"name\"].(string)\n\tr.Slug, _ = m[\"slug\"].(string)\n\tr.CreatedAt, _ = m[\"created_at\"].(string)\n\tr.UpdatedAt, _ = m[\"updated_at\"].(string)\n\treturn r\n}", "func Transform(db *gorm.DB, queries ...Query) *gorm.DB {\n\tfor _, q := range queries {\n\t\tdb = q(db)\n\t}\n\n\treturn db\n}", "func (w *NotificationPolicy) ConvertFromDBModel() error {\n\ttargets := []EventTarget{}\n\tif len(w.TargetsDB) != 0 {\n\t\terr := json.Unmarshal([]byte(w.TargetsDB), &targets)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tw.Targets = targets\n\n\ttypes := []string{}\n\tif len(w.EventTypesDB) != 0 {\n\t\terr := json.Unmarshal([]byte(w.EventTypesDB), &types)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tw.EventTypes = types\n\n\treturn nil\n}", "func GetDataBase() *gorm.DB {\n\treturn db\n}", "func (updEP *MdlPersonUpdateExt) AfterDB(ent interface{}) error {\n\n\t// fmt.Println(\"TypeOf ent:\", reflect.TypeOf(ent))\n\t// fmt.Println(\"ValueOf ent:\", reflect.ValueOf(ent))\n\t// p := ent.(*models.Person)\n\n\t// make changes / validate the content struct pointer (p) here\n\t// p.Name = \"A new field value\"\n\treturn nil\n}", "func (crtEP *MdlPersonCreateExt) BeforeDB(ent interface{}) error {\n\n\t// fmt.Println(\"TypeOf ent:\", reflect.TypeOf(ent))\n\t// fmt.Println(\"ValueOf ent:\", reflect.ValueOf(ent))\n\t// p := ent.(*models.Person)\n\n\t// make changes / validate the content struct pointer (p) here\n\t// p.Name = \"A new field value\"\n\treturn nil\n}", "func (getEP *MdlPersonGetExt) BeforeDB(ent interface{}) error {\n\n\t// fmt.Println(\"TypeOf ent:\", reflect.TypeOf(ent))\n\t// fmt.Println(\"ValueOf ent:\", reflect.ValueOf(ent))\n\t// p := ent.(*models.Person)\n\n\t// make changes / validate the content struct pointer (p) here\n\t// p.Name = \"A new field value\"\n\treturn nil\n}", "func toRecord(cache airtabledb.DB, src Feature, dst interface{}) {\n\tdV := reflect.ValueOf(dst).Elem().FieldByName(\"Fields\")\n\tsV := reflect.ValueOf(src)\n\tcopyFields(cache, sV, dV)\n}", "func NewORM[I ID, C Config, N Node](q pg.Q, prefix string, nodeCols ...string) ORM[I, C, N] {\n\treturn orm[I, C, N]{\n\t\tnewChainsORM[I, C](q, prefix),\n\t\tnewNodesORM[I, N](q, prefix, nodeCols...),\n\t}\n}", "func (updEP *MdlPersonUpdateExt) BeforeDB(ent interface{}) error {\n\n\t// fmt.Println(\"TypeOf ent:\", reflect.TypeOf(ent))\n\t// fmt.Println(\"ValueOf ent:\", reflect.ValueOf(ent))\n\t// p := ent.(*models.Person)\n\n\t// make changes / validate the content struct pointer (p) here\n\t// p.Name = \"A new field value\"\n\treturn nil\n}", "func (l *Contact) ToModel() model.Contact {\n\treturn model.Contact{\n\t\t// Code omitted.\n\t}\n\n}", "func (profile *Profile) ToDb() interface{} {\n\treturn profile.Id\n}", "func GormInit(conf *config.Config, logger Logger) (*gorm.DB, error) {\n\n\tdb, openErr := gorm.Open(conf.DBType, conf.DBParams)\n\tif openErr != nil {\n\t\tlog.CheckError(openErr)\n\t\treturn nil, openErr\n\t}\n\n\tIsSqlite = conf.DBType == SqliteType\n\n\tconnectionErr := db.DB().Ping()\n\tif connectionErr != nil {\n\t\tlog.CheckError(connectionErr)\n\t\treturn nil, connectionErr\n\t}\n\n\t// Negative MaxIdleConns means don't retain any idle connection\n\tmaxIdleConns := -1\n\tif IsSqlite {\n\t\t// sqlite doesn't like having a negative maxIdleConns\n\t\tmaxIdleConns = 10\n\t}\n\n\tdb.DB().SetMaxIdleConns(maxIdleConns)\n\tdb.DB().SetMaxOpenConns(400)\n\n\tif config.Environment == \"DEVELOPMENT\" {\n\t\tdb.LogMode(true)\n\t}\n\n\tswitch conf.DBLogMode {\n\tcase \"detailed\":\n\t\tdb.LogMode(true)\n\tcase \"silent\":\n\t\tdb.LogMode(false)\n\t}\n\n\tif logger != nil {\n\t\tdb.SetLogger(logger)\n\t}\n\n\tdb.AutoMigrate(&model.User{}, &model.UserFollows{}, &model.UserUploadsOld{}, &model.Notification{})\n\tif db.Error != nil {\n\t\treturn db, db.Error\n\t}\n\tdb.AutoMigrate(&model.Torrent{}, &model.TorrentReport{})\n\tif db.Error != nil {\n\t\treturn db, db.Error\n\t}\n\tdb.AutoMigrate(&model.File{})\n\tif db.Error != nil {\n\t\treturn db, db.Error\n\t}\n\tdb.AutoMigrate(&model.Comment{}, &model.OldComment{})\n\tif db.Error != nil {\n\t\treturn db, db.Error\n\t}\n\n\treturn db, nil\n}", "func (m *FileUserDatabase) ToDatabaseModel() (model *FileDatabaseModel) {\n\tmodel = &FileDatabaseModel{\n\t\tUsers: map[string]FileDatabaseUserDetailsModel{},\n\t}\n\n\tm.RLock()\n\n\tfor user, details := range m.Users {\n\t\tmodel.Users[user] = details.ToUserDetailsModel()\n\t}\n\n\tm.RUnlock()\n\n\treturn model\n}", "func (_m *APIDefinitionConverter) ToEntity(apiModel *model.APIDefinition) *api.Entity {\n\tret := _m.Called(apiModel)\n\n\tvar r0 *api.Entity\n\tif rf, ok := ret.Get(0).(func(*model.APIDefinition) *api.Entity); ok {\n\t\tr0 = rf(apiModel)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*api.Entity)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (b *Blog) Unwrap() *Blog {\n\t_tx, ok := b.config.driver.(*txDriver)\n\tif !ok {\n\t\tpanic(\"entv2: Blog is not a transactional entity\")\n\t}\n\tb.config.driver = _tx.drv\n\treturn b\n}", "func Use(db *gorm.DB) *DB {\n\treturn &DB{\n\t\tdb: db,\n\t}\n}", "func ToPersistenceSetting(t *admin.PersistenceSetting) *types.PersistenceSetting {\n\tif t == nil {\n\t\treturn nil\n\t}\n\treturn &types.PersistenceSetting{\n\t\tKey: t.GetKey(),\n\t\tValue: t.GetValue(),\n\t}\n}", "func ToPersistenceFeature(t *admin.PersistenceFeature) *types.PersistenceFeature {\n\tif t == nil {\n\t\treturn nil\n\t}\n\treturn &types.PersistenceFeature{\n\t\tKey: t.GetKey(),\n\t\tEnabled: t.GetEnabled(),\n\t}\n}", "func (d *Person) ToDocument() dynago.Document {\n\treturn dynago.Document{\n\t\t\"name\": d.Name,\n\t\t\"email\": d.Email,\n\t\t\"car_type\": d.CarType,\n\t\t\"drive\": d.Driver,\n\t\t\"lat\": d.Lat,\n\t\t\"lng\": d.Lng,\n\t\t\"geohash\": d.GeoHash,\n\t}\n}", "func (m *Matchers) ToDB() ([]byte, error) {\n\tblobMatchers, err := json.Marshal(m)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to convert matchers to send to the database: %w\", err)\n\t}\n\treturn blobMatchers, nil\n}", "func WrapDB(db *gorm.DB) (DB, error) {\n\tif db == nil {\n\t\treturn nil, fmt.Errorf(\"[err] WrapDB empty params\")\n\t}\n\twdb := &wrapDB{gorm: db}\n\t// add callback functions to *gorm.DB\n\twdb.registerCallbacks()\n\treturn wdb, nil\n}", "func (tf Task) ToModel(te domain.Task) model.Task {\n\treturn model.Task{\n\t\tID: te.ID,\n\t\tTitle: te.Title,\n\t\tDescription: sql.NullString{String: te.Description, Valid: len(te.Description) > 0},\n\t\tDueDate: sql.NullTime{Time: te.DueDate, Valid: !te.DueDate.IsZero()},\n\t\tCreatedAt: te.CreatedAt,\n\t\tUpdatedAt: te.UpdatedAt,\n\t}\n}", "func (o *MySQLOutput) Setup() error {\n\tdb, err := gorm.Open(mysql.Open(fmt.Sprintf(\"%s:%s@tcp(%s:%s)/?%s\", o.Cfg.User, o.Cfg.Password, o.Cfg.Host, o.Cfg.Port, \"charset=utf8mb4&parseTime=true\")), &gorm.Config{DisableForeignKeyConstraintWhenMigrating: true})\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to establish a general connection: %s\", err)\n\t}\n\terr = db.Exec(fmt.Sprintf(\"CREATE DATABASE IF NOT EXISTS `%s` DEFAULT CHARACTER SET = `utf8mb4` DEFAULT COLLATE = `utf8mb4_unicode_ci`;\", o.Cfg.Database)).Error\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to create database: %v\", err)\n\t}\n\tdb, err = gorm.Open(mysql.Open(fmt.Sprintf(\"%s:%s@tcp(%s:%s)/%s?%s\", o.Cfg.User, o.Cfg.Password, o.Cfg.Host, o.Cfg.Port, o.Cfg.Database, \"charset=utf8mb4&parseTime=true\")), &gorm.Config{DisableForeignKeyConstraintWhenMigrating: true})\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to establish a database connection: %v\", err)\n\t}\n\tdb = db.Set(\"gorm:table_options\", \"CHARSET=utf8mb4 ENGINE=InnoDB COLLATE=utf8mb4_unicode_ci\")\n\to.Client = db.Session(&gorm.Session{Logger: o.Cfg.Logger})\n\tfor table, model := range o.RelatedModels {\n\t\tif err := o.Client.Table(table).AutoMigrate(model); err != nil {\n\t\t\treturn fmt.Errorf(\"table %s auto-migration error: %v\", table, err)\n\t\t}\n\t}\n\tif err := o.buildTablesInfo(); err != nil {\n\t\treturn fmt.Errorf(\"failed to gather tables information: %v\", err)\n\t}\n\treturn nil\n}", "func Instance() *gorm.DB {\n return database\n}", "func PostAutoMigrate(db *gorm.DB) error {\n\t// These types don't apply for sqlite -- just mysql.\n\tif db.Dialect().GetName() == mySQLDialect {\n\t\tdb.Model(&Invocation{}).ModifyColumn(\"pattern\", \"text\")\n\t\tdb.Model(&Execution{}).ModifyColumn(\"serialized_operation\", \"text\")\n\t}\n\treturn nil\n}", "func (d *Dao) Close() (err error) {\n\tif d.WriteORM != nil {\n\t\td.WriteORM.Close()\n\t}\n\tif d.ReadORM != nil {\n\t\td.ReadORM.Close()\n\t}\n\treturn\n}", "func GetOrmer() orm.Ormer {\n\tonce.Do(func() {\n\t\t// override the default value(1000) to return all records when setting no limit\n\t\torm.DefaultRowsLimit = -1\n\t\tglobalOrm = orm.NewOrm()\n\t})\n\treturn globalOrm\n}", "func (r *RawRecord) UnmarshalTo(obj restlicodec.Unmarshaler) error {\n\treturn obj.UnmarshalRestLi(restlicodec.NewInterfaceReader(*r))\n}", "func NewDao(m interface{}, db *sql.DB, opts ...options.DaoOption) *Dao {\n\tdao := &Dao{\n\t\tdb: db,\n\t}\n\t// options\n\tcfg := options.DaoOptions{}\n\tfor _, fn := range opts {\n\t\tfn(&cfg)\n\t}\n\tif cfg.Table != \"\" {\n\t\tdao.table = cfg.Table\n\t} else {\n\t\tdao.table = strutils.ToUnderscore(model.ParseTableName(m))\n\t}\n\tdao.modelType = model.RealType(m)\n\t// fields\n\tfields := model.Parse(m)\n\tif len(fields) < 1 {\n\t\tpanic(\"No fields found in model given\")\n\t}\n\tdao.fields = fields\n\tdao.primaries = []*types.ModelField{}\n\tdao.columnMap = make(map[string]*types.ModelField, len(fields))\n\tdao.fieldMap = make(map[string]*types.ModelField, len(fields))\n\tcolumnsBuilder := strings.Builder{}\n\tholderBuilder := strings.Builder{}\n\tselectFields := make([]string, 0, len(fields))\n\tfor _, field := range fields {\n\t\tdao.columnMap[field.Column] = field\n\t\tdao.fieldMap[field.Name] = field\n\t\tif field.Primary {\n\t\t\tdao.primaries = append(dao.primaries, field)\n\t\t}\n\t\t{\n\t\t\tif columnsBuilder.Len() > 0 {\n\t\t\t\tcolumnsBuilder.WriteString(\", \")\n\t\t\t\tholderBuilder.WriteString(\", \")\n\t\t\t}\n\t\t\tcolumnsBuilder.WriteByte('`')\n\t\t\tcolumnsBuilder.WriteString(field.Column)\n\t\t\tcolumnsBuilder.WriteByte('`')\n\t\t\tholderBuilder.WriteString(\"?\")\n\t\t\tselectFields = append(selectFields, field.Name)\n\t\t}\n\t}\n\tif len(dao.primaries) < 1 {\n\t\tpanic(\"No primary key found\")\n\t}\n\tdao.columnsAll = columnsBuilder.String()\n\tdao.valuesHolder = holderBuilder.String()\n\tdao.selectColumns = selectFields\n\treturn dao\n}", "func (m *ArticleDB) DB() interface{} {\n\treturn m.Db\n}", "func (m *ArticleDB) DB() interface{} {\n\treturn m.Db\n}", "func Factory() (*ORM, error) {\n\tdsn := fmt.Sprintf(\"host=%s user=%s dbname=%s sslmode=%s password=%s\", hostDB, userDB, nameDB, sslDB, passwordDB) //Build connection string\n\n\tdb, err := gorm.Open(dialect, dsn)\n\tif err != nil {\n\t\tlog.Panic(\"[ORM] err: \", err)\n\t}\n\torm := &ORM{\n\t\tDB: db,\n\t}\n\t// Log every SQL command on dev, @prod: this should be disabled?\n\tdb.LogMode(logMode)\n\t// Automigrate tables\n\tif autoMigrate {\n\t\terr = migration.ServiceAutoMigration(orm.DB)\n\t}\n\tlog.Info(\"[ORM] Database connection initialized.\")\n\treturn orm, err\n}", "func GetEngine() *xorm.Engine {\n\treturn orm\n}", "func (_m *MockORM) Create(value interface{}) ORM {\n\tret := _m.Called(value)\n\n\tvar r0 ORM\n\tif rf, ok := ret.Get(0).(func(interface{}) ORM); ok {\n\t\tr0 = rf(value)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(ORM)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (s *Service) ToObject(data *Data) (r interface{}, err error) {\n\tdefer func() {\n\t\tif rec := recover(); rec != nil {\n\t\t\terr = makeError(rec)\n\t\t}\n\t}()\n\tvar ok bool\n\tif data == nil {\n\t\treturn nil, nil\n\t}\n\ttypeID := data.Type()\n\tserializer := s.lookupBuiltinDeserializer(typeID)\n\tif serializer == nil {\n\t\tserializer, ok = s.registry[typeID]\n\t\tif !ok {\n\t\t\treturn nil, ihzerrors.NewSerializationError(fmt.Sprintf(\"there is no suitable de-serializer for type %d\", typeID), nil)\n\t\t}\n\t}\n\tdataInput := NewObjectDataInput(data.Buffer(), DataOffset, s, !s.SerializationConfig.LittleEndian)\n\treturn serializer.Read(dataInput), nil\n}", "func MarshalToDB(fromValue interface{}) (interface{}, bool) {\n\tswitch fromValue.(type) {\n\tcase primitive.ObjectID:\n\t\treturn fromValue, true\n\tcase bool:\n\t\treturn fromValue, true\n\tcase string:\n\t\treturn fromValue, true\n\tcase int:\n\t\treturn int32(fromValue.(int)), true\n\tcase int8:\n\t\treturn int32(fromValue.(int8)), true\n\tcase int16:\n\t\treturn int32(fromValue.(int16)), true\n\tcase int32: // also covers rune\n\t\treturn int32(fromValue.(int32)), true\n\tcase int64:\n\t\treturn int64(fromValue.(int64)), true\n\tcase uint:\n\t\treturn int64(fromValue.(uint)), true\n\tcase uint8: // also covers byte\n\t\treturn int32(fromValue.(uint8)), true\n\tcase uint16:\n\t\treturn int32(fromValue.(uint16)), true\n\tcase uint32:\n\t\treturn int64(fromValue.(uint32)), true\n\tcase uint64:\n\t\tval := fromValue.(uint64)\n\t\treturn strconv.FormatUint(val, 10), true\n\tcase float32:\n\t\treturn float64(fromValue.(float32)), true\n\tcase float64:\n\t\treturn float64(fromValue.(float64)), true\n\tcase complex64:\n\t\treturn strconv.FormatComplex(complex128(fromValue.(complex64)), 'f', -1, 64), true\n\tcase complex128:\n\t\treturn strconv.FormatComplex(fromValue.(complex128), 'f', -1, 128), true\n\tcase *bool:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*bool)))\n\t\t}\n\t\treturn nil, true\n\tcase *string:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*string)))\n\t\t}\n\t\treturn nil, true\n\tcase *int:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*int)))\n\t\t}\n\t\treturn nil, true\n\tcase *int8:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*int8)))\n\t\t}\n\t\treturn nil, true\n\tcase *int16:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*int16)))\n\t\t}\n\t\treturn nil, true\n\tcase *int32:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*int32)))\n\t\t}\n\t\treturn nil, true\n\tcase *int64:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*int64)))\n\t\t}\n\t\treturn nil, true\n\tcase *uint:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*uint)))\n\t\t}\n\t\treturn nil, true\n\tcase *uint8:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*uint8)))\n\t\t}\n\t\treturn nil, true\n\tcase *uint16:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*uint16)))\n\t\t}\n\t\treturn nil, true\n\tcase *uint32:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*uint32)))\n\t\t}\n\t\treturn nil, true\n\tcase *uint64:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*uint64)))\n\t\t}\n\t\treturn nil, true\n\tcase *float32:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*float32)))\n\t\t}\n\t\treturn nil, true\n\tcase *float64:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*float64)))\n\t\t}\n\t\treturn nil, true\n\tcase *complex64:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*complex64)))\n\t\t}\n\t\treturn nil, true\n\tcase *complex128:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*complex128)))\n\t\t}\n\t\treturn nil, true\n\tdefault:\n\t\treturn fromValue, false\n\t}\n}", "func (d *dbBase) convertValueFromDB(fi *fieldInfo, val interface{}, tz *time.Location) (interface{}, error) {\n\tif val == nil {\n\t\treturn nil, nil\n\t}\n\n\tvar value interface{}\n\tvar tErr error\n\n\tvar str *StrTo\n\tswitch v := val.(type) {\n\tcase []byte:\n\t\ts := StrTo(string(v))\n\t\tstr = &s\n\tcase string:\n\t\ts := StrTo(v)\n\t\tstr = &s\n\t}\n\n\tfieldType := fi.fieldType\n\nsetValue:\n\tswitch {\n\tcase fieldType == TypeBooleanField:\n\t\tif str == nil {\n\t\t\tswitch v := val.(type) {\n\t\t\tcase int64:\n\t\t\t\tb := v == 1\n\t\t\t\tvalue = b\n\t\t\tdefault:\n\t\t\t\ts := StrTo(ToStr(v))\n\t\t\t\tstr = &s\n\t\t\t}\n\t\t}\n\t\tif str != nil {\n\t\t\tb, err := str.Bool()\n\t\t\tif err != nil {\n\t\t\t\ttErr = err\n\t\t\t\tgoto end\n\t\t\t}\n\t\t\tvalue = b\n\t\t}\n\tcase fieldType == TypeVarCharField || fieldType == TypeCharField || fieldType == TypeTextField || fieldType == TypeJSONField || fieldType == TypeJsonbField:\n\t\tif str == nil {\n\t\t\tvalue = ToStr(val)\n\t\t} else {\n\t\t\tvalue = str.String()\n\t\t}\n\tcase fieldType == TypeTimeField || fieldType == TypeDateField || fieldType == TypeDateTimeField:\n\t\tif str == nil {\n\t\t\tswitch t := val.(type) {\n\t\t\tcase time.Time:\n\t\t\t\td.ins.TimeFromDB(&t, tz)\n\t\t\t\tvalue = t\n\t\t\tdefault:\n\t\t\t\ts := StrTo(ToStr(t))\n\t\t\t\tstr = &s\n\t\t\t}\n\t\t}\n\t\tif str != nil {\n\t\t\ts := str.String()\n\t\t\tvar (\n\t\t\t\tt time.Time\n\t\t\t\terr error\n\t\t\t)\n\n\t\t\tif fi.timePrecision != nil && len(s) >= (20+*fi.timePrecision) {\n\t\t\t\tlayout := formatDateTime + \".\"\n\t\t\t\tfor i := 0; i < *fi.timePrecision; i++ {\n\t\t\t\t\tlayout += \"0\"\n\t\t\t\t}\n\t\t\t\tt, err = time.ParseInLocation(layout, s[:20+*fi.timePrecision], tz)\n\t\t\t} else if len(s) >= 19 {\n\t\t\t\ts = s[:19]\n\t\t\t\tt, err = time.ParseInLocation(formatDateTime, s, tz)\n\t\t\t} else if len(s) >= 10 {\n\t\t\t\tif len(s) > 10 {\n\t\t\t\t\ts = s[:10]\n\t\t\t\t}\n\t\t\t\tt, err = time.ParseInLocation(formatDate, s, tz)\n\t\t\t} else if len(s) >= 8 {\n\t\t\t\tif len(s) > 8 {\n\t\t\t\t\ts = s[:8]\n\t\t\t\t}\n\t\t\t\tt, err = time.ParseInLocation(formatTime, s, tz)\n\t\t\t}\n\t\t\tt = t.In(DefaultTimeLoc)\n\n\t\t\tif err != nil && s != \"00:00:00\" && s != \"0000-00-00\" && s != \"0000-00-00 00:00:00\" {\n\t\t\t\ttErr = err\n\t\t\t\tgoto end\n\t\t\t}\n\t\t\tvalue = t\n\t\t}\n\tcase fieldType&IsIntegerField > 0:\n\t\tif str == nil {\n\t\t\ts := StrTo(ToStr(val))\n\t\t\tstr = &s\n\t\t}\n\t\tif str != nil {\n\t\t\tvar err error\n\t\t\tswitch fieldType {\n\t\t\tcase TypeBitField:\n\t\t\t\t_, err = str.Int8()\n\t\t\tcase TypeSmallIntegerField:\n\t\t\t\t_, err = str.Int16()\n\t\t\tcase TypeIntegerField:\n\t\t\t\t_, err = str.Int32()\n\t\t\tcase TypeBigIntegerField:\n\t\t\t\t_, err = str.Int64()\n\t\t\tcase TypePositiveBitField:\n\t\t\t\t_, err = str.Uint8()\n\t\t\tcase TypePositiveSmallIntegerField:\n\t\t\t\t_, err = str.Uint16()\n\t\t\tcase TypePositiveIntegerField:\n\t\t\t\t_, err = str.Uint32()\n\t\t\tcase TypePositiveBigIntegerField:\n\t\t\t\t_, err = str.Uint64()\n\t\t\t}\n\t\t\tif err != nil {\n\t\t\t\ttErr = err\n\t\t\t\tgoto end\n\t\t\t}\n\t\t\tif fieldType&IsPositiveIntegerField > 0 {\n\t\t\t\tv, _ := str.Uint64()\n\t\t\t\tvalue = v\n\t\t\t} else {\n\t\t\t\tv, _ := str.Int64()\n\t\t\t\tvalue = v\n\t\t\t}\n\t\t}\n\tcase fieldType == TypeFloatField || fieldType == TypeDecimalField:\n\t\tif str == nil {\n\t\t\tswitch v := val.(type) {\n\t\t\tcase float64:\n\t\t\t\tvalue = v\n\t\t\tdefault:\n\t\t\t\ts := StrTo(ToStr(v))\n\t\t\t\tstr = &s\n\t\t\t}\n\t\t}\n\t\tif str != nil {\n\t\t\tv, err := str.Float64()\n\t\t\tif err != nil {\n\t\t\t\ttErr = err\n\t\t\t\tgoto end\n\t\t\t}\n\t\t\tvalue = v\n\t\t}\n\tcase fieldType&IsRelField > 0:\n\t\tfi = fi.relModelInfo.fields.pk\n\t\tfieldType = fi.fieldType\n\t\tgoto setValue\n\t}\n\nend:\n\tif tErr != nil {\n\t\terr := fmt.Errorf(\"convert to `%s` failed, field: %s err: %s\", fi.addrValue.Type(), fi.fullName, tErr)\n\t\treturn nil, err\n\t}\n\n\treturn value, nil\n}", "func Perform(db *gorm.DB) {\r\n\r\n\tdb.AutoMigrate(\r\n\t\t&models.WorkItem{})\r\n}", "func (Files) ToModel(data interface{}, model *Files) error {\n\tbsonBytes, err := bson.Marshal(data.(bson.M))\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = bson.Unmarshal(bsonBytes, &model)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (adapter *GORMAdapter) Create(entity interface{}) orm.Result {\n\treturn orm.Result{\n\t\tError: adapter.db.Create(entity).Error,\n\t}\n}", "func DB() *gorm.DB {\n\treturn db\n}", "func DB() *gorm.DB {\n\treturn db\n}", "func (reg *Registry) ToReal(logger logr.Logger) (globalregistry.Registry, error) {\n\treturn globalregistry.New(logger, reg)\n}", "func (s *Service) ToObject(data *Data) (r interface{}, err error) {\n\tdefer func() {\n\t\tif rec := recover(); rec != nil {\n\t\t\terr = makeError(rec)\n\t\t}\n\t}()\n\tif data == nil {\n\t\treturn nil, nil\n\t}\n\ttypeID := data.Type()\n\tif typeID == 0 {\n\t\treturn data, nil\n\t}\n\tserializer, ok := s.registry[typeID]\n\tif !ok {\n\t\treturn nil, hzerrors.NewHazelcastSerializationError(fmt.Sprintf(\"there is no suitable de-serializer for type %d\", typeID), nil)\n\t}\n\tdataInput := NewObjectDataInput(data.Buffer(), DataOffset, s, s.SerializationConfig.BigEndian)\n\treturn serializer.Read(dataInput), nil\n}", "func (a *InventoryDTO) InventoryDTOToDAL() (*dal.InventoryDAL, error) { \n\tinventory := &dal.InventoryDAL{\n\t\tInventoryID:a.InventoryID,\n\t\tFilmID:a.FilmID,\n\t\tStoreID:a.StoreID,\n\t\tLastUpdate:a.LastUpdate,\n\t\t \n\t}\n\treturn inventory, nil\n}", "func (userModel *UserModel) ToEntity() entities.User {\n\treturn entities.User{\n\t\tUUID: userModel.UUID,\n\t\tEmailID: userModel.EmailID,\n\t\tSolvedQuestions: strings.Split(userModel.SolvedQuestions, \",\"),\n\t\tHintsUsed: strings.Split(userModel.HintsUsed, \",\"),\n\t}\n}", "func (e GenericPersistable) ToJSON() []byte {\n\t\tresult, _ := json.Marshal(e)\n\t\treturn result\n\t}", "func (self Accessor) ToSql() (string, error) {\n\treturn self.From(self.Relation()).ToSql()\n}", "func (ot *ObjectType) Unwrap() *ObjectType {\n\ttx, ok := ot.config.driver.(*txDriver)\n\tif !ok {\n\t\tpanic(\"ent: ObjectType is not a transactional entity\")\n\t}\n\tot.config.driver = tx.drv\n\treturn ot\n}", "func (w *NotificationPolicy) ConvertToDBModel() error {\n\tif len(w.Targets) != 0 {\n\t\ttargets, err := json.Marshal(w.Targets)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tw.TargetsDB = string(targets)\n\t}\n\tif len(w.EventTypes) != 0 {\n\t\teventTypes, err := json.Marshal(w.EventTypes)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tw.EventTypesDB = string(eventTypes)\n\t}\n\n\treturn nil\n}", "func (database *DatabaseAccounts_SqlDatabase_Spec) ConvertToARM(resolved genruntime.ConvertToARMResolvedDetails) (interface{}, error) {\n\tif database == nil {\n\t\treturn nil, nil\n\t}\n\tresult := &DatabaseAccounts_SqlDatabase_Spec_ARM{}\n\n\t// Set property \"Location\":\n\tif database.Location != nil {\n\t\tlocation := *database.Location\n\t\tresult.Location = &location\n\t}\n\n\t// Set property \"Name\":\n\tresult.Name = resolved.Name\n\n\t// Set property \"Properties\":\n\tif database.Options != nil || database.Resource != nil {\n\t\tresult.Properties = &SqlDatabaseCreateUpdateProperties_ARM{}\n\t}\n\tif database.Options != nil {\n\t\toptions_ARM, err := (*database.Options).ConvertToARM(resolved)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\toptions := *options_ARM.(*CreateUpdateOptions_ARM)\n\t\tresult.Properties.Options = &options\n\t}\n\tif database.Resource != nil {\n\t\tresource_ARM, err := (*database.Resource).ConvertToARM(resolved)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresource := *resource_ARM.(*SqlDatabaseResource_ARM)\n\t\tresult.Properties.Resource = &resource\n\t}\n\n\t// Set property \"Tags\":\n\tif database.Tags != nil {\n\t\tresult.Tags = make(map[string]string, len(database.Tags))\n\t\tfor key, value := range database.Tags {\n\t\t\tresult.Tags[key] = value\n\t\t}\n\t}\n\treturn result, nil\n}", "func ToSqlParms(o interface{}) SQLParms {\n\treturn conv.StructToMap(o)\n}", "func MarshalFromDB(intoType reflect.Type, fromValue interface{}) interface{} {\n\tif reflect.TypeOf(fromValue) == intoType {\n\t\treturn fromValue\n\t}\n\n\tswitch intoType.Kind() {\n\tcase reflect.Int8:\n\t\tfallthrough\n\tcase reflect.Int16:\n\t\tfallthrough\n\tcase reflect.Int32:\n\t\tfallthrough\n\tcase reflect.Int64:\n\t\tfallthrough\n\tcase reflect.Int:\n\t\tdstPtr := reflect.New(intoType)\n\t\tdst := reflect.Indirect(dstPtr)\n\t\tsrc := reflect.ValueOf(fromValue)\n\t\tif dst.OverflowInt(src.Int()) {\n\t\t\tlog.Panicf(\"Overflow detected while storing %v within %v\", src.Type(), dst.Type())\n\t\t}\n\t\tdst.SetInt(src.Int())\n\t\treturn dst.Interface()\n\tcase reflect.Uint8:\n\t\tfallthrough\n\tcase reflect.Uint16:\n\t\tfallthrough\n\tcase reflect.Uint32:\n\t\tfallthrough\n\tcase reflect.Uint64:\n\t\tfallthrough\n\tcase reflect.Uint:\n\t\tdstPtr := reflect.New(intoType)\n\t\tdst := reflect.Indirect(dstPtr)\n\t\tvar srcStr string\n\t\tswitch fromValue.(type) {\n\t\tcase int64:\n\t\t\tsrcStr = strconv.FormatInt(fromValue.(int64), 10)\n\t\tcase int32:\n\t\t\tsrcStr = strconv.FormatInt(int64(fromValue.(int32)), 10)\n\t\tcase string:\n\t\t\tsrcStr = fromValue.(string)\n\t\t}\n\t\tsrcUint64, srcUint64Err := strconv.ParseUint(srcStr, 10, 64)\n\t\tif srcUint64Err != nil {\n\t\t\tlog.Panicf(\"Error detected while storing %v within %v: %v\", reflect.TypeOf(fromValue), intoType, srcUint64Err)\n\t\t}\n\t\tif dst.OverflowUint(srcUint64) {\n\t\t\tlog.Panicf(\"Overflow detected while storing %v within %v\", reflect.TypeOf(fromValue), intoType)\n\t\t}\n\t\tdst.SetUint(srcUint64)\n\t\treturn dst.Interface()\n\tcase reflect.Complex64:\n\t\tfallthrough\n\tcase reflect.Complex128:\n\t\tdstPtr := reflect.New(intoType)\n\t\tdst := reflect.Indirect(dstPtr)\n\t\tvar srcStr string\n\t\tswitch fromValue.(type) {\n\t\tcase complex64:\n\t\t\tsrcStr = strconv.FormatComplex(complex128(fromValue.(complex64)), 'f', -1, 64)\n\t\tcase complex128:\n\t\t\tsrcStr = strconv.FormatComplex(fromValue.(complex128), 'f', -1, 128)\n\t\tcase string:\n\t\t\tsrcStr = fromValue.(string)\n\t\t}\n\t\tvar dstBits int\n\t\tswitch intoType.Kind() {\n\t\tcase reflect.Complex64:\n\t\t\tdstBits = 64\n\t\tcase reflect.Complex128:\n\t\t\tdstBits = 128\n\t\t}\n\t\tsrcComplex128, srcComplex128Err := strconv.ParseComplex(srcStr, dstBits)\n\t\tif srcComplex128Err != nil {\n\t\t\tlog.Panicf(\"Error detected while storing %v within %v: %v\", reflect.TypeOf(fromValue), intoType, srcComplex128Err)\n\t\t}\n\t\tif dst.OverflowComplex(srcComplex128) {\n\t\t\tlog.Panicf(\"Overflow detected while storing %v within %v\", reflect.TypeOf(fromValue), intoType)\n\t\t}\n\t\tdst.SetComplex(srcComplex128)\n\t\treturn dst.Interface()\n\t}\n\tlog.Panicf(\"Unhandled kind: %v\", intoType.Kind())\n\treturn nil\n}" ]
[ "0.734709", "0.71402293", "0.71162224", "0.7063234", "0.67791146", "0.66816986", "0.6518742", "0.65066636", "0.61664253", "0.61527824", "0.60039365", "0.5621043", "0.56026745", "0.55870265", "0.55276644", "0.5513942", "0.5510278", "0.54907125", "0.5351097", "0.5330373", "0.5288272", "0.5229566", "0.52278614", "0.5221685", "0.5177009", "0.50882465", "0.50287026", "0.49686974", "0.49683306", "0.49220398", "0.49163395", "0.47865587", "0.47786608", "0.47754484", "0.47244176", "0.47064662", "0.46851966", "0.46695825", "0.4662574", "0.46551648", "0.46540704", "0.46493435", "0.46340266", "0.46284994", "0.4625039", "0.4602102", "0.4593214", "0.4592832", "0.4573691", "0.45729995", "0.45616505", "0.4531067", "0.449992", "0.44968146", "0.4489571", "0.44604528", "0.44570243", "0.44481555", "0.44407943", "0.4429363", "0.4423802", "0.44212446", "0.43963447", "0.4394025", "0.4389021", "0.43695447", "0.43667966", "0.43666148", "0.4366119", "0.43652746", "0.43481392", "0.43465573", "0.43378648", "0.4308207", "0.43045425", "0.42983583", "0.42905664", "0.42905664", "0.4289474", "0.42873737", "0.4283421", "0.42814264", "0.4280861", "0.42784083", "0.42570505", "0.4254565", "0.42517382", "0.42448783", "0.42448783", "0.42443937", "0.42401963", "0.42191368", "0.4214165", "0.42094088", "0.4192315", "0.4183652", "0.41728094", "0.4170735", "0.41682035", "0.4159172" ]
0.7026667
4
ToPB runs the BeforeToPB hook if present, converts the fields of this object to PB format, runs the AfterToPB hook, then returns the PB object
func (m *HealthMenstruationPersonalInfoORM) ToPB(ctx context.Context) (HealthMenstruationPersonalInfo, error) { to := HealthMenstruationPersonalInfo{} var err error if prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToPB); ok { if err = prehook.BeforeToPB(ctx, &to); err != nil { return to, err } } to.Id = m.Id if m.CreatedAt != nil { if to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil { return to, err } } if m.UpdatedAt != nil { if to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil { return to, err } } to.ProfileId = m.ProfileId to.PeriodLengthInDays = m.PeriodLengthInDays to.CycleLengthInDays = m.CycleLengthInDays if posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToPB); ok { err = posthook.AfterToPB(ctx, &to) } return to, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *ContactORM) ToPB(ctx context.Context) (Contact, error) {\n\tto := Contact{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(ContactWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.FirstName = m.FirstName\n\tto.MiddleName = m.MiddleName\n\tto.LastName = m.LastName\n\t// Skipping field: PrimaryEmail\n\tfor _, v := range m.Emails {\n\t\tif v != nil {\n\t\t\tif tempEmails, cErr := v.ToPB(ctx); cErr == nil {\n\t\t\t\tto.Emails = append(to.Emails, &tempEmails)\n\t\t\t} else {\n\t\t\t\treturn to, cErr\n\t\t\t}\n\t\t} else {\n\t\t\tto.Emails = append(to.Emails, nil)\n\t\t}\n\t}\n\tif posthook, ok := interface{}(m).(ContactWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *ProfileORM) ToPB(ctx context.Context) (Profile, error) {\n\tto := Profile{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(ProfileWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Notes = m.Notes\n\tto.FirstName = m.FirstName\n\tto.LastName = m.LastName\n\tto.PrimaryEmail = m.PrimaryEmail\n\tfor _, v := range m.Groups {\n\t\tif v != nil {\n\t\t\tif tempGroups, cErr := v.ToPB(ctx); cErr == nil {\n\t\t\t\tto.Groups = append(to.Groups, &tempGroups)\n\t\t\t} else {\n\t\t\t\treturn to, cErr\n\t\t\t}\n\t\t} else {\n\t\t\tto.Groups = append(to.Groups, nil)\n\t\t}\n\t}\n\tto.ProfilePictureUrl = m.ProfilePictureUrl\n\tif posthook, ok := interface{}(m).(ProfileWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *PeriodORM) ToPB(ctx context.Context) (Period, error) {\n\tto := Period{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(PeriodWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.Period = m.Period\n\tif m.CreatedAt != nil {\n\t\tto.CreatedAt = timestamppb.New(*m.CreatedAt)\n\t}\n\tif m.UpdatedAt != nil {\n\t\tto.UpdatedAt = timestamppb.New(*m.UpdatedAt)\n\t}\n\tif posthook, ok := interface{}(m).(PeriodWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *EmailORM) ToPB(ctx context.Context) (Email, error) {\n\tto := Email{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(EmailWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.Address = m.Address\n\tif posthook, ok := interface{}(m).(EmailWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *UserInfoORM) ToPB(ctx context.Context) (UserInfo, error) {\n\tto := UserInfo{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(UserInfoWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.UserId = m.UserId\n\tto.LastName = m.LastName\n\tto.FirstName = m.FirstName\n\tto.Period = m.Period\n\tto.DepartmentId = m.DepartmentId\n\tto.JobId = m.JobId\n\tto.EnrollmentFlg = m.EnrollmentFlg\n\tto.AdminFlg = m.AdminFlg\n\tif m.CreatedAt != nil {\n\t\tto.CreatedAt = timestamppb.New(*m.CreatedAt)\n\t}\n\tif m.UpdatedAt != nil {\n\t\tto.UpdatedAt = timestamppb.New(*m.UpdatedAt)\n\t}\n\tif posthook, ok := interface{}(m).(UserInfoWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *IntPointORM) ToPB(ctx context.Context) (IntPoint, error) {\n\tto := IntPoint{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(IntPointWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.X = m.X\n\tto.Y = m.Y\n\tif posthook, ok := interface{}(m).(IntPointWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (t Transaction) PB() *pb.Transaction {\n\treturn &pb.Transaction{\n\t\tId: t.ID.String(),\n\t\tDate: t.Date.Format(\"2006-01-02\"),\n\t\tEntity: t.Entity,\n\t\tReference: t.Reference,\n\t\tHash: t.Hash,\n\t\tPostings: t.Postings.PB(),\n\t}\n}", "func ProtoToBackup(p *betapb.FilestoreBetaBackup) *beta.Backup {\n\tobj := &beta.Backup{\n\t\tName: dcl.StringOrNil(p.GetName()),\n\t\tDescription: dcl.StringOrNil(p.GetDescription()),\n\t\tState: ProtoToFilestoreBetaBackupStateEnum(p.GetState()),\n\t\tCreateTime: dcl.StringOrNil(p.GetCreateTime()),\n\t\tCapacityGb: dcl.Int64OrNil(p.GetCapacityGb()),\n\t\tStorageBytes: dcl.Int64OrNil(p.GetStorageBytes()),\n\t\tSourceInstance: dcl.StringOrNil(p.GetSourceInstance()),\n\t\tSourceFileShare: dcl.StringOrNil(p.GetSourceFileShare()),\n\t\tSourceInstanceTier: ProtoToFilestoreBetaBackupSourceInstanceTierEnum(p.GetSourceInstanceTier()),\n\t\tDownloadBytes: dcl.Int64OrNil(p.GetDownloadBytes()),\n\t\tProject: dcl.StringOrNil(p.GetProject()),\n\t\tLocation: dcl.StringOrNil(p.GetLocation()),\n\t}\n\treturn obj\n}", "func (m *CommentORM) ToPB(ctx context.Context) (Comment, error) {\n\tto := Comment{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(CommentWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = &types1.UUID{Value: m.Id.String()}\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.DeletedAt != nil {\n\t\tif to.DeletedAt, err = ptypes1.TimestampProto(*m.DeletedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.BoardId != nil {\n\t\tto.BoardId = &wrappers.StringValue{Value: *m.BoardId}\n\t}\n\tif m.PostId != nil {\n\t\tto.PostId = &wrappers.StringValue{Value: *m.PostId}\n\t}\n\tif m.ContentId != nil {\n\t\tto.ContentId = &wrappers.StringValue{Value: *m.ContentId}\n\t}\n\tif m.Userid != nil {\n\t\tto.Userid = &wrappers.StringValue{Value: *m.Userid}\n\t}\n\tif m.Username != nil {\n\t\tto.Username = &wrappers.StringValue{Value: *m.Username}\n\t}\n\tif m.Nickname != nil {\n\t\tto.Nickname = &wrappers.StringValue{Value: *m.Nickname}\n\t}\n\tif m.Email != nil {\n\t\tto.Email = &wrappers.StringValue{Value: *m.Email}\n\t}\n\tif m.Password != nil {\n\t\tto.Password = &wrappers.StringValue{Value: *m.Password}\n\t}\n\tif m.Url != nil {\n\t\tto.Url = &wrappers.StringValue{Value: *m.Url}\n\t}\n\tif m.UseHtml != nil {\n\t\tto.UseHtml = &wrappers.BoolValue{Value: *m.UseHtml}\n\t}\n\tif m.UseSecret != nil {\n\t\tto.UseSecret = &wrappers.BoolValue{Value: *m.UseSecret}\n\t}\n\tto.UpVoteCount = m.UpVoteCount\n\tto.DownVoteCount = m.DownVoteCount\n\tif posthook, ok := interface{}(m).(CommentWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (b *Block) ToProto() (*tmproto.Block, error) {\n\tif b == nil {\n\t\treturn nil, errors.New(\"nil Block\")\n\t}\n\n\tpb := new(tmproto.Block)\n\n\tpb.Header = *b.Header.ToProto()\n\tpb.LastCommit = b.LastCommit.ToProto()\n\tpb.Data = b.Data.ToProto()\n\n\tprotoEvidence, err := b.Evidence.ToProto()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpb.Evidence = *protoEvidence\n\n\treturn pb, nil\n}", "func (m *ContactORM) AfterToPB(ctx context.Context, c *Contact) error {\n\tif len(m.Emails) == 0 {\n\t\treturn nil\n\t}\n\t// find the primary e-mail in list of e-mails from DB\n\tfor _, addr := range m.Emails {\n\t\tif addr != nil && addr.IsPrimary {\n\t\t\tc.PrimaryEmail = addr.Address\n\t\t\tbreak\n\t\t}\n\t}\n\treturn nil\n}", "func (b *Block) ToProto() (*tmproto.Block, error) {\n\tif b == nil {\n\t\treturn nil, errors.New(\"nil Block\")\n\t}\n\n\tpb := new(tmproto.Block)\n\n\tpb.Header = *b.Header.ToProto()\n\tpb.CoreChainLock = b.CoreChainLock.ToProto()\n\tpb.LastCommit = b.LastCommit.ToProto()\n\tpb.Data = b.Data.ToProto()\n\n\tprotoEvidence, err := b.Evidence.ToProto()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpb.Evidence = *protoEvidence\n\n\treturn pb, nil\n}", "func BackupToProto(resource *beta.Backup) *betapb.FilestoreBetaBackup {\n\tp := &betapb.FilestoreBetaBackup{}\n\tp.SetName(dcl.ValueOrEmptyString(resource.Name))\n\tp.SetDescription(dcl.ValueOrEmptyString(resource.Description))\n\tp.SetState(FilestoreBetaBackupStateEnumToProto(resource.State))\n\tp.SetCreateTime(dcl.ValueOrEmptyString(resource.CreateTime))\n\tp.SetCapacityGb(dcl.ValueOrEmptyInt64(resource.CapacityGb))\n\tp.SetStorageBytes(dcl.ValueOrEmptyInt64(resource.StorageBytes))\n\tp.SetSourceInstance(dcl.ValueOrEmptyString(resource.SourceInstance))\n\tp.SetSourceFileShare(dcl.ValueOrEmptyString(resource.SourceFileShare))\n\tp.SetSourceInstanceTier(FilestoreBetaBackupSourceInstanceTierEnumToProto(resource.SourceInstanceTier))\n\tp.SetDownloadBytes(dcl.ValueOrEmptyInt64(resource.DownloadBytes))\n\tp.SetProject(dcl.ValueOrEmptyString(resource.Project))\n\tp.SetLocation(dcl.ValueOrEmptyString(resource.Location))\n\tmLabels := make(map[string]string, len(resource.Labels))\n\tfor k, r := range resource.Labels {\n\t\tmLabels[k] = r\n\t}\n\tp.SetLabels(mLabels)\n\n\treturn p\n}", "func (i *Invoice) createP2PProtobuf() *invoicepb.InvoiceData {\n\tvar recipient, sender, payee []byte\n\tif i.Recipient != nil {\n\t\trecipient = i.Recipient[:]\n\t}\n\n\tif i.Sender != nil {\n\t\tsender = i.Sender[:]\n\t}\n\n\tif i.Payee != nil {\n\t\tpayee = i.Payee[:]\n\t}\n\n\treturn &invoicepb.InvoiceData{\n\t\tInvoiceNumber: i.InvoiceNumber,\n\t\tInvoiceStatus: i.InvoiceStatus,\n\t\tSenderName: i.SenderName,\n\t\tSenderStreet: i.SenderStreet,\n\t\tSenderCity: i.SenderCity,\n\t\tSenderZipcode: i.SenderZipcode,\n\t\tSenderCountry: i.SenderCountry,\n\t\tRecipientName: i.RecipientName,\n\t\tRecipientStreet: i.RecipientStreet,\n\t\tRecipientCity: i.RecipientCity,\n\t\tRecipientZipcode: i.RecipientZipcode,\n\t\tRecipientCountry: i.RecipientCountry,\n\t\tCurrency: i.Currency,\n\t\tGrossAmount: i.GrossAmount,\n\t\tNetAmount: i.NetAmount,\n\t\tTaxAmount: i.TaxAmount,\n\t\tTaxRate: i.TaxRate,\n\t\tRecipient: recipient,\n\t\tSender: sender,\n\t\tPayee: payee,\n\t\tComment: i.Comment,\n\t\tDueDate: i.DueDate,\n\t\tDateCreated: i.DateCreated,\n\t\tExtraData: i.ExtraData,\n\t}\n\n}", "func (b *Block) ConvertToBlockPb() *iotextypes.Block {\n\tfooter, err := b.ConvertToBlockFooterPb()\n\tif err != nil {\n\t\tlog.L().Panic(\"failed to convert block footer to protobuf message\")\n\t}\n\treturn &iotextypes.Block{\n\t\tHeader: b.Header.Proto(),\n\t\tBody: b.Body.Proto(),\n\t\tFooter: footer,\n\t}\n}", "func CopyPB(dst interface{}, src interface{}) interface{} {\n\tif src == nil {\n\t\treturn nil\n\t}\n\tvar b []byte\n\tvar err error\n\tif srcPB, ok := src.(proto.Message); ok {\n\t\tv := reflect.ValueOf(srcPB)\n\t\tif srcPB == nil || (v.Kind() == reflect.Ptr && v.IsNil()) {\n\t\t\treturn dst\n\t\t}\n\t\tvar buf bytes.Buffer\n\t\tm := &jsonpb.Marshaler{EnumsAsInts: true}\n\t\terr = m.Marshal(&buf, srcPB)\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(\"could not Marshal proto.Message: %v\", err)\n\t\t}\n\t\tb = buf.Bytes()\n\t} else if srcPB, ok := src.(protov2.Message); ok {\n\t\tif srcPB == nil || !srcPB.ProtoReflect().IsValid() {\n\t\t\treturn dst\n\t\t}\n\t\tmo := protojson.MarshalOptions{UseEnumNumbers: true}\n\t\tb, err = mo.Marshal(srcPB)\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(\"could not Marshal protov2.Message: %v\", err)\n\t\t}\n\t} else {\n\t\tb, err = json.Marshal(src)\n\t}\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tif dstPB, ok := dst.(proto.Message); ok {\n\t\tu := jsonpb.Unmarshaler{}\n\t\tu.AllowUnknownFields = true\n\t\terr = u.Unmarshal(bytes.NewReader(b), dstPB)\n\t\tdst = dstPB\n\t} else if dstPB, ok := dst.(protov2.Message); ok {\n\t\tuo := protojson.UnmarshalOptions{DiscardUnknown: false}\n\t\terr = uo.Unmarshal(b, dstPB)\n\t\tdst = dstPB\n\t} else {\n\t\terr = json.Unmarshal(b, dst)\n\t}\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn dst\n}", "func (commit *Commit) ToProto() *tmproto.Commit {\n\tif commit == nil {\n\t\treturn nil\n\t}\n\n\tc := new(tmproto.Commit)\n\tsigs := make([]tmproto.CommitSig, len(commit.Signatures))\n\tfor i := range commit.Signatures {\n\t\tsigs[i] = *commit.Signatures[i].ToProto()\n\t}\n\tc.Signatures = sigs\n\n\tc.Height = commit.Height\n\tc.Round = commit.Round\n\tc.BlockID = commit.BlockID.ToProto()\n\n\treturn c\n}", "func (p Pipeline) ToProto(pb *pipelinepb.AppliedPipeline) error {\n\tnumOps := len(p.Operations)\n\tif cap(pb.Ops) >= numOps {\n\t\tpb.Ops = pb.Ops[:numOps]\n\t} else {\n\t\tpb.Ops = make([]pipelinepb.AppliedPipelineOp, numOps)\n\t}\n\tfor i := 0; i < numOps; i++ {\n\t\tif err := p.Operations[i].ToProto(&pb.Ops[i]); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (st *Account) ToProto() *iproto.AccountPb {\n\tacPb := &iproto.AccountPb{}\n\tacPb.Nonce = st.Nonce\n\tif st.Balance != nil {\n\t\tacPb.Balance = st.Balance.Bytes()\n\t}\n\tacPb.Root = make([]byte, hash.HashSize)\n\tcopy(acPb.Root, st.Root[:])\n\tacPb.CodeHash = make([]byte, len(st.CodeHash))\n\tcopy(acPb.CodeHash, st.CodeHash)\n\tacPb.IsCandidate = st.IsCandidate\n\tif st.VotingWeight != nil {\n\t\tacPb.VotingWeight = st.VotingWeight.Bytes()\n\t}\n\tacPb.Votee = st.Votee\n\treturn acPb\n}", "func (commit *Commit) ToProto() *tmproto.Commit {\n\tif commit == nil {\n\t\treturn nil\n\t}\n\n\tc := new(tmproto.Commit)\n\n\tc.Height = commit.Height\n\tc.Round = commit.Round\n\tc.BlockID = commit.BlockID.ToProto()\n\tc.StateID = commit.StateID.ToProto()\n\n\tc.ThresholdStateSignature = commit.ThresholdStateSignature\n\tc.ThresholdBlockSignature = commit.ThresholdBlockSignature\n\n\tc.QuorumHash = commit.QuorumHash\n\n\treturn c\n}", "func ComputeBetaInstanceTemplatePropertiesToProto(o *beta.InstanceTemplateProperties) *betapb.ComputeBetaInstanceTemplateProperties {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.ComputeBetaInstanceTemplateProperties{\n\t\tCanIpForward: dcl.ValueOrEmptyBool(o.CanIPForward),\n\t\tDescription: dcl.ValueOrEmptyString(o.Description),\n\t\tMachineType: dcl.ValueOrEmptyString(o.MachineType),\n\t\tMinCpuPlatform: dcl.ValueOrEmptyString(o.MinCpuPlatform),\n\t\tReservationAffinity: ComputeBetaInstanceTemplatePropertiesReservationAffinityToProto(o.ReservationAffinity),\n\t\tShieldedInstanceConfig: ComputeBetaInstanceTemplatePropertiesShieldedInstanceConfigToProto(o.ShieldedInstanceConfig),\n\t\tScheduling: ComputeBetaInstanceTemplatePropertiesSchedulingToProto(o.Scheduling),\n\t}\n\tfor _, r := range o.Disks {\n\t\tp.Disks = append(p.Disks, ComputeBetaInstanceTemplatePropertiesDisksToProto(&r))\n\t}\n\tp.Labels = make(map[string]string)\n\tfor k, r := range o.Labels {\n\t\tp.Labels[k] = r\n\t}\n\tp.Metadata = make(map[string]string)\n\tfor k, r := range o.Metadata {\n\t\tp.Metadata[k] = r\n\t}\n\tfor _, r := range o.GuestAccelerators {\n\t\tp.GuestAccelerators = append(p.GuestAccelerators, ComputeBetaInstanceTemplatePropertiesGuestAcceleratorsToProto(&r))\n\t}\n\tfor _, r := range o.NetworkInterfaces {\n\t\tp.NetworkInterfaces = append(p.NetworkInterfaces, ComputeBetaInstanceTemplatePropertiesNetworkInterfacesToProto(&r))\n\t}\n\tfor _, r := range o.ServiceAccounts {\n\t\tp.ServiceAccounts = append(p.ServiceAccounts, ComputeBetaInstanceTemplatePropertiesServiceAccountsToProto(&r))\n\t}\n\tfor _, r := range o.Tags {\n\t\tp.Tags = append(p.Tags, r)\n\t}\n\treturn p\n}", "func (vc *VehicleContainer) SavePB(target string, humanReadable bool) (err error) {\n\t// Marshall to GTFS-RT\n\tvar b []byte\n\tif humanReadable {\n\t\tb, err = prototext.Marshal(vc.AsProto())\n\t} else {\n\t\tb, err = proto.Marshal(vc.AsProto())\n\t}\n\n\t// Check for marshall errors\n\tif err != nil {\n\t\treturn\n\t}\n\n\t// Write to target file\n\terr = os.WriteFile(target, b, 0o666)\n\treturn\n}", "func (p Postings) PB() []*pb.Posting {\n\tvar postings []*pb.Posting\n\tfor _, posting := range p {\n\t\tpostings = append(postings, posting.PB())\n\t}\n\n\treturn postings\n}", "func (m *HealthMenstruationDailyEntryORM) ToPB(ctx context.Context) (HealthMenstruationDailyEntry, error) {\n\tto := HealthMenstruationDailyEntry{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tif to.Day, err = ptypes1.TimestampProto(*m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = HealthMenstruationDailyEntry_Type(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func InstanceTemplateToProto(resource *beta.InstanceTemplate) *betapb.ComputeBetaInstanceTemplate {\n\tp := &betapb.ComputeBetaInstanceTemplate{\n\t\tCreationTimestamp: dcl.ValueOrEmptyString(resource.CreationTimestamp),\n\t\tDescription: dcl.ValueOrEmptyString(resource.Description),\n\t\tId: dcl.ValueOrEmptyInt64(resource.Id),\n\t\tSelfLink: dcl.ValueOrEmptyString(resource.SelfLink),\n\t\tName: dcl.ValueOrEmptyString(resource.Name),\n\t\tProperties: ComputeBetaInstanceTemplatePropertiesToProto(resource.Properties),\n\t\tProject: dcl.ValueOrEmptyString(resource.Project),\n\t}\n\n\treturn p\n}", "func toJSONPb(_ *starlark.Thread, _ *starlark.Builtin, args starlark.Tuple, kwargs []starlark.Tuple) (starlark.Value, error) {\n\tvar msg *Message\n\tvar emitDefaults starlark.Bool\n\tif err := starlark.UnpackArgs(\"to_jsonpb\", args, kwargs, \"msg\", &msg, \"emit_defaults?\", &emitDefaults); err != nil {\n\t\treturn nil, err\n\t}\n\tpb, err := msg.ToProto()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t// More jsonpb Marshaler options may be added here as needed.\n\tvar jsonMarshaler = &jsonpb.Marshaler{Indent: \"\\t\", EmitDefaults: bool(emitDefaults)}\n\tstr, err := jsonMarshaler.MarshalToString(pb)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn starlark.String(str), nil\n}", "func ProtoToInstanceTemplate(p *betapb.ComputeBetaInstanceTemplate) *beta.InstanceTemplate {\n\tobj := &beta.InstanceTemplate{\n\t\tCreationTimestamp: dcl.StringOrNil(p.GetCreationTimestamp()),\n\t\tDescription: dcl.StringOrNil(p.Description),\n\t\tId: dcl.Int64OrNil(p.Id),\n\t\tSelfLink: dcl.StringOrNil(p.SelfLink),\n\t\tName: dcl.StringOrNil(p.Name),\n\t\tProperties: ProtoToComputeBetaInstanceTemplateProperties(p.GetProperties()),\n\t\tProject: dcl.StringOrNil(p.Project),\n\t}\n\treturn obj\n}", "func unmarshalTodoPB(t *proto.Todo) (*models.Todo, error) {\n\ttodo := models.Todo{\n\t\tID: t.GetId(),\n\t\tComplete: t.GetComplete(),\n\t\tDescription: null.StringFrom(t.GetDescription()).Ptr(),\n\t\tTitle: t.GetTitle(),\n\t}\n\n\tcompletedAt, err := ptypes.Timestamp(t.GetCompletedAt())\n\tvalid := true\n\tif err != nil {\n\t\tvalid = false\n\t}\n\ttodo.CompletedAt = null.NewTime(completedAt, valid).Ptr()\n\n\tcreatedAt, err := ptypes.Timestamp(t.GetCreatedAt())\n\tif err != nil {\n\t\treturn nil, gqlerror.Errorf(\"error marshalling todo: %v\", err)\n\t}\n\ttodo.CreatedAt = createdAt\n\n\treturn &todo, nil\n}", "func (in *Store) ToProto() *iotextypes.BlockStore {\n\treceipts := []*iotextypes.Receipt{}\n\tfor _, r := range in.Receipts {\n\t\treceipts = append(receipts, r.ConvertToReceiptPb())\n\t}\n\treturn &iotextypes.BlockStore{\n\t\tBlock: in.Block.ConvertToBlockPb(),\n\t\tReceipts: receipts,\n\t}\n}", "func (b *BalanceAs) toProto() (*proto.Balance, error) {\n\tpb := &proto.Balance{\n\t\tSymbol: b.CoinBalance.Symbol,\n\t\tExchange: b.Exchange,\n\t\tFree: float32(b.Free),\n\t\tLocked: float32(b.Locked),\n\t\tTotal: float32(b.Total),\n\t\tAs: string(b.As),\n\t\tPrice: float32(b.Price),\n\t\tValue: float32(b.Value),\n\t\tPrice24H: float32(b.Price24H),\n\t\tValue24H: float32(b.Value24H),\n\t\tChange24H: float32(b.Change24H),\n\t\tChangePct24H: float32(b.ChangePct24H),\n\t}\n\n\tts, err := tspb.TimestampProto(b.At)\n\tif err != nil {\n\t\treturn nil, err\n\t} else {\n\t\tpb.At = ts\n\t}\n\n\tif b.BuyStrategy != nil {\n\t\tif pb.BuyStrategy, err = strategyToProto(b.BuyStrategy); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tif b.SellStrategy != nil {\n\t\tif pb.SellStrategy, err = strategyToProto(b.SellStrategy); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn pb, nil\n}", "func (m *GroupORM) AfterToPB(ctx context.Context, a *Group) error {\n\n\tfor _, item := range m.UserList {\n\t\tid, err := resource.Encode(&User{}, item.Id)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\ta.UserList = append(a.UserList, id)\n\t}\n\treturn nil\n}", "func TransactionFromPB(t *pb.Transaction) (Transaction, error) {\n\tvar (\n\t\tid uuid.UUID\n\t\terr error\n\t)\n\n\tif t.Id == \"\" {\n\t\tid = uuid.Nil\n\t} else {\n\t\tid, err = uuid.FromString(t.Id)\n\t\tif err != nil {\n\t\t\treturn Transaction{}, err\n\t\t}\n\t}\n\n\tdate, err := time.Parse(\"2006-01-02\", t.Date)\n\tif err != nil {\n\t\treturn Transaction{}, err\n\t}\n\n\tpostings, err := PostingsFromPB(&pb.Postings{\n\t\tData: t.Postings,\n\t})\n\tif err != nil {\n\t\treturn Transaction{}, err\n\t}\n\n\ttransaction := Transaction{}\n\ttransaction.ID = id\n\ttransaction.Date = date\n\ttransaction.Entity = t.Entity\n\ttransaction.Reference = t.Reference\n\ttransaction.Hash = t.Hash\n\ttransaction.Postings = postings\n\n\treturn transaction, nil\n}", "func ToWireFormat(data []byte, storage string) ([]byte, error) {\n\tprototypeType, found := storageToType[storage]\n\tif !found {\n\t\treturn nil, fmt.Errorf(\"unknown storage type: %v\", storage)\n\t}\n\n\tobj := reflect.New(prototypeType).Interface()\n\terr := yaml.Unmarshal(data, obj)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn json.Marshal(obj)\n}", "func NewCaptureFromPB(pbCap *pb.BGPCapture) (*Capture, error) {\n\tcap := &Capture{fromTable: \"\", ID: \"\"}\n\tvar err error\n\n\tcap.Timestamp, cap.ColIP, err = util.GetTimeColIP(pbCap)\n\tif err != nil {\n\t\treturn nil, dbLogger.Errorf(\"unable to parse collector IP: %s\", err)\n\t}\n\n\tcap.PeerIP, err = util.GetPeerIP(pbCap)\n\tif err != nil {\n\t\treturn nil, dbLogger.Errorf(\"unable to parse peer IP: %s\", err)\n\t}\n\n\t// Ignoring the error here as this message could only have withdraws.\n\tcap.ASPath, _ = util.GetASPath(pbCap)\n\n\tcap.Origin = 0\n\tif len(cap.ASPath) != 0 {\n\t\tcap.Origin = cap.ASPath[len(cap.ASPath)-1]\n\t}\n\n\tcap.NextHop, err = util.GetNextHop(pbCap)\n\tif err != nil {\n\t\tcap.NextHop = net.IPv4(0, 0, 0, 0)\n\t}\n\n\t// Here if it errors and the return is nil, PrefixToPQArray should leave it and the schema should insert the default\n\tcap.Advertised, _ = util.GetAdvertisedPrefixes(pbCap)\n\tcap.Withdrawn, _ = util.GetWithdrawnPrefixes(pbCap)\n\n\treturn cap, nil\n}", "func AppengineDomainMappingResourceRecordsToProto(o *appengine.DomainMappingResourceRecords) *appenginepb.AppengineDomainMappingResourceRecords {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &appenginepb.AppengineDomainMappingResourceRecords{\n\t\tName: dcl.ValueOrEmptyString(o.Name),\n\t\tRrdata: dcl.ValueOrEmptyString(o.Rrdata),\n\t\tType: AppengineDomainMappingResourceRecordsTypeEnumToProto(o.Type),\n\t}\n\treturn p\n}", "func CloudkmsBetaCryptoKeyPrimaryToProto(o *beta.CryptoKeyPrimary) *betapb.CloudkmsBetaCryptoKeyPrimary {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.CloudkmsBetaCryptoKeyPrimary{}\n\tp.SetName(dcl.ValueOrEmptyString(o.Name))\n\tp.SetState(CloudkmsBetaCryptoKeyPrimaryStateEnumToProto(o.State))\n\tp.SetProtectionLevel(CloudkmsBetaCryptoKeyPrimaryProtectionLevelEnumToProto(o.ProtectionLevel))\n\tp.SetAlgorithm(CloudkmsBetaCryptoKeyPrimaryAlgorithmEnumToProto(o.Algorithm))\n\tp.SetAttestation(CloudkmsBetaCryptoKeyPrimaryAttestationToProto(o.Attestation))\n\tp.SetCreateTime(dcl.ValueOrEmptyString(o.CreateTime))\n\tp.SetGenerateTime(dcl.ValueOrEmptyString(o.GenerateTime))\n\tp.SetDestroyTime(dcl.ValueOrEmptyString(o.DestroyTime))\n\tp.SetDestroyEventTime(dcl.ValueOrEmptyString(o.DestroyEventTime))\n\tp.SetImportJob(dcl.ValueOrEmptyString(o.ImportJob))\n\tp.SetImportTime(dcl.ValueOrEmptyString(o.ImportTime))\n\tp.SetImportFailureReason(dcl.ValueOrEmptyString(o.ImportFailureReason))\n\tp.SetExternalProtectionLevelOptions(CloudkmsBetaCryptoKeyPrimaryExternalProtectionLevelOptionsToProto(o.ExternalProtectionLevelOptions))\n\tp.SetReimportEligible(dcl.ValueOrEmptyBool(o.ReimportEligible))\n\treturn p\n}", "func (s *subscription) AsPB() *annotatedvalue.Subscription {\n\treturn &annotatedvalue.Subscription{\n\t\tID: s.GetID(),\n\t}\n}", "func (e *Entity) createP2PProtobuf() *entitypb.Entity {\n\tdids := identity.DIDsToBytes(e.Identity)\n\treturn &entitypb.Entity{\n\t\tIdentity: dids[0],\n\t\tLegalName: e.LegalName,\n\t\tAddresses: e.Addresses,\n\t\tPaymentDetails: e.PaymentDetails,\n\t\tContacts: e.Contacts,\n\t}\n}", "func PostingsFromPB(pb *pb.Postings) (Postings, error) {\n\tvar postings Postings\n\tfor _, v := range pb.Data {\n\t\tposting, err := PostingFromPB(v)\n\t\tif err != nil {\n\t\t\treturn Postings{}, err\n\t\t}\n\t\tpostings = append(postings, posting)\n\t}\n\n\treturn postings, nil\n}", "func (p *Purrgil) ParseToByte() ([]byte, error) {\n\treturn yaml.Marshal(p)\n}", "func (m *Mutate) ToProto() proto.Message {\n\tp, _, _ := m.toProto(false, nil)\n\treturn p\n}", "func (blockID *BlockID) ToProto() tmproto.BlockID {\n\tif blockID == nil {\n\t\treturn tmproto.BlockID{}\n\t}\n\n\treturn tmproto.BlockID{\n\t\tHash: blockID.Hash,\n\t\t// PartSetHeader: blockID.PartSetHeader.ToProto(),\n\t}\n}", "func DomainMappingToProto(resource *appengine.DomainMapping) *appenginepb.AppengineDomainMapping {\n\tp := &appenginepb.AppengineDomainMapping{\n\t\tSelfLink: dcl.ValueOrEmptyString(resource.SelfLink),\n\t\tName: dcl.ValueOrEmptyString(resource.Name),\n\t\tSslSettings: AppengineDomainMappingSslSettingsToProto(resource.SslSettings),\n\t\tApp: dcl.ValueOrEmptyString(resource.App),\n\t}\n\tfor _, r := range resource.ResourceRecords {\n\t\tp.ResourceRecords = append(p.ResourceRecords, AppengineDomainMappingResourceRecordsToProto(&r))\n\t}\n\n\treturn p\n}", "func ClouddeployAlphaTargetGkeToProto(o *alpha.TargetGke) *alphapb.ClouddeployAlphaTargetGke {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &alphapb.ClouddeployAlphaTargetGke{}\n\tp.SetCluster(dcl.ValueOrEmptyString(o.Cluster))\n\tp.SetInternalIp(dcl.ValueOrEmptyBool(o.InternalIP))\n\treturn p\n}", "func ProtoToInstance(p *sqlpb.SqlInstance) *sql.Instance {\n\tobj := &sql.Instance{\n\t\tBackendType: ProtoToSqlInstanceBackendTypeEnum(p.GetBackendType()),\n\t\tConnectionName: dcl.StringOrNil(p.ConnectionName),\n\t\tDatabaseVersion: ProtoToSqlInstanceDatabaseVersionEnum(p.GetDatabaseVersion()),\n\t\tEtag: dcl.StringOrNil(p.Etag),\n\t\tGceZone: dcl.StringOrNil(p.GceZone),\n\t\tInstanceType: ProtoToSqlInstanceInstanceTypeEnum(p.GetInstanceType()),\n\t\tMasterInstanceName: dcl.StringOrNil(p.MasterInstanceName),\n\t\tMaxDiskSize: ProtoToSqlInstanceMaxDiskSize(p.GetMaxDiskSize()),\n\t\tName: dcl.StringOrNil(p.Name),\n\t\tProject: dcl.StringOrNil(p.Project),\n\t\tRegion: dcl.StringOrNil(p.Region),\n\t\tRootPassword: dcl.StringOrNil(p.RootPassword),\n\t\tCurrentDiskSize: ProtoToSqlInstanceCurrentDiskSize(p.GetCurrentDiskSize()),\n\t\tDiskEncryptionConfiguration: ProtoToSqlInstanceDiskEncryptionConfiguration(p.GetDiskEncryptionConfiguration()),\n\t\tFailoverReplica: ProtoToSqlInstanceFailoverReplica(p.GetFailoverReplica()),\n\t\tMasterInstance: ProtoToSqlInstanceMasterInstance(p.GetMasterInstance()),\n\t\tReplicaConfiguration: ProtoToSqlInstanceReplicaConfiguration(p.GetReplicaConfiguration()),\n\t\tScheduledMaintenance: ProtoToSqlInstanceScheduledMaintenance(p.GetScheduledMaintenance()),\n\t\tSettings: ProtoToSqlInstanceSettings(p.GetSettings()),\n\t}\n\tfor _, r := range p.GetIpAddresses() {\n\t\tobj.IPAddresses = append(obj.IPAddresses, *ProtoToSqlInstanceIPAddresses(r))\n\t}\n\treturn obj\n}", "func CloudkmsBetaCryptoKeyPrimaryAttestationToProto(o *beta.CryptoKeyPrimaryAttestation) *betapb.CloudkmsBetaCryptoKeyPrimaryAttestation {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.CloudkmsBetaCryptoKeyPrimaryAttestation{}\n\tp.SetFormat(CloudkmsBetaCryptoKeyPrimaryAttestationFormatEnumToProto(o.Format))\n\tp.SetContent(dcl.ValueOrEmptyString(o.Content))\n\tp.SetCertChains(CloudkmsBetaCryptoKeyPrimaryAttestationCertChainsToProto(o.CertChains))\n\treturn p\n}", "func (o *KanbanPartial) ToMap() map[string]interface{} {\n\tkv := map[string]interface{}{\n\t\t\"active\": toKanbanObject(o.Active, true),\n\t\t\"board_id\": toKanbanObject(o.BoardID, true),\n\t\t\"columns\": toKanbanObject(o.Columns, true),\n\t\t\"deleted\": toKanbanObject(o.Deleted, true),\n\t\t\"issue_ids\": toKanbanObject(o.IssueIds, true),\n\t\t\"name\": toKanbanObject(o.Name, true),\n\t\t\"project_ids\": toKanbanObject(o.ProjectIds, true),\n\t\t\"updated_date\": toKanbanObject(o.UpdatedDate, true),\n\t\t\"url\": toKanbanObject(o.URL, true),\n\t}\n\tfor k, v := range kv {\n\t\tif v == nil || reflect.ValueOf(v).IsZero() {\n\t\t\tdelete(kv, k)\n\t\t} else {\n\n\t\t\tif k == \"columns\" {\n\t\t\t\tif arr, ok := v.([]KanbanColumns); ok {\n\t\t\t\t\tif len(arr) == 0 {\n\t\t\t\t\t\tdelete(kv, k)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif k == \"issue_ids\" {\n\t\t\t\tif arr, ok := v.([]string); ok {\n\t\t\t\t\tif len(arr) == 0 {\n\t\t\t\t\t\tdelete(kv, k)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif k == \"project_ids\" {\n\t\t\t\tif arr, ok := v.([]string); ok {\n\t\t\t\t\tif len(arr) == 0 {\n\t\t\t\t\t\tdelete(kv, k)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif k == \"updated_date\" {\n\t\t\t\tif dt, ok := v.(*KanbanUpdatedDate); ok {\n\t\t\t\t\tif dt.Epoch == 0 && dt.Offset == 0 && dt.Rfc3339 == \"\" {\n\t\t\t\t\t\tdelete(kv, k)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn kv\n}", "func (blockID *BlockID) ToProto() tmproto.BlockID {\n\tif blockID == nil {\n\t\treturn tmproto.BlockID{}\n\t}\n\n\treturn tmproto.BlockID{\n\t\tHash: blockID.Hash,\n\t\tPartSetHeader: blockID.PartSetHeader.ToProto(),\n\t}\n}", "func (blockID *BlockID) ToProto() tmproto.BlockID {\n\tif blockID == nil {\n\t\treturn tmproto.BlockID{}\n\t}\n\n\treturn tmproto.BlockID{\n\t\tHash: blockID.Hash,\n\t\tPartSetHeader: blockID.PartSetHeader.ToProto(),\n\t}\n}", "func ClouddeployAlphaTargetRunToProto(o *alpha.TargetRun) *alphapb.ClouddeployAlphaTargetRun {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &alphapb.ClouddeployAlphaTargetRun{}\n\tp.SetLocation(dcl.ValueOrEmptyString(o.Location))\n\treturn p\n}", "func go2pbStmts(rootPkg *amino.Package, isRoot bool, imports *ast.GenDecl, scope *ast.Scope, pbo ast.Expr, goo ast.Expr, gooIsPtr bool, gooType *amino.TypeInfo, fopts amino.FieldOptions, options uint64) (b []ast.Stmt) {\n\n\tconst (\n\t\toption_bytes = 0x01 // if goo's repr is uint8 as an element of bytes.\n\t\toption_implicit_list = 0x02 // if goo is a repeated list & also an element.\n\t)\n\n\t// Special case if nil-pointer.\n\tif gooIsPtr || gooType.Type.Kind() == reflect.Interface {\n\t\tdefer func() {\n\t\t\t// Wrap penultimate b with if statement.\n\t\t\tb = []ast.Stmt{_if(_b(goo, \"!=\", _i(\"nil\")),\n\t\t\t\tb...,\n\t\t\t)}\n\t\t}()\n\t}\n\t// Below, we can assume that goo isn't nil.\n\n\t// External case.\n\t// If gooType is registered, just call ToPBMessage.\n\t// TODO If not registered?\n\tif !isRoot && gooType.Registered && hasPBBindings(gooType) && (options&option_bytes == 0) {\n\t\t// Call ToPBMessage().\n\t\tpbote_ := p3goTypeExprString(rootPkg, imports, scope, gooType, fopts)\n\t\tpbom_ := addVarUniq(scope, \"pbom\")\n\t\tb = append(b,\n\t\t\t_a(pbom_, \":=\", _x(\"proto.Message~(~nil~)\")),\n\t\t\t_a(pbom_, _i(\"err\"), \"=\", _call(_sel(goo, \"ToPBMessage\"), _i(\"cdc\"))),\n\t\t\t_if(_x(\"err__!=__nil\"),\n\t\t\t\t_return(),\n\t\t\t),\n\t\t\t_a(pbo, \"=\", _x(\"%v.~(~%v~)\", pbom_, pbote_)),\n\t\t)\n\t\tif gooIsPtr {\n\t\t\tif pbote_[0] != '*' {\n\t\t\t\tpanic(\"expected pointer kind for p3goTypeExprString (of registered type)\")\n\t\t\t}\n\t\t\tdpbote_ := pbote_[1:]\n\t\t\tb = append(b,\n\t\t\t\t_if(_b(pbo, \"==\", \"nil\"),\n\t\t\t\t\t_a(pbo, \"=\", _x(\"new~(~%v~)\", dpbote_))))\n\t\t}\n\t\treturn\n\t}\n\n\t// Use *goor* for goo's repr.\n\tvar goor ast.Expr\n\tvar goorType *amino.TypeInfo\n\n\t// Maybe wrap pbo.\n\t// NOTE: Instead of writing code to determine the .Value type,\n\t// just lazily construct before assigning to pbo.\n\tvar wrapImplicitStruct bool\n\tvar maybeWrap = func(goor ast.Expr) ast.Expr {\n\t\tif wrapImplicitStruct {\n\t\t\tpbote_ := p3goTypeExprString(rootPkg, imports, scope, gooType, fopts)\n\t\t\tif pbote_[0] != '*' {\n\t\t\t\tpanic(\"expected pointer kind for p3goTypeExprString (of type to be wrapped)\")\n\t\t\t}\n\t\t\tdpbote_ := pbote_[1:]\n\t\t\treturn _ref(&ast.CompositeLit{\n\t\t\t\tType: _x(dpbote_),\n\t\t\t\tElts: []ast.Expr{_kv(\"Value\", goor)},\n\t\t\t\tIncomplete: false,\n\t\t\t})\n\t\t} else {\n\t\t\treturn goor\n\t\t}\n\t}\n\n\t// Special case if IsAminoMarshaler.\n\tif gooType.IsAminoMarshaler {\n\t\t// First, derive repr instance.\n\t\tgoor_ := addVarUniq(scope, \"goor\")\n\t\terr_ := addVarUniq(scope, \"err\") // do not shadow original err\n\t\tb = append(b,\n\t\t\t_a(goor_, err_, \":=\", _call(_sel(goo, \"MarshalAmino\"))),\n\t\t\t_if(_x(\"%v__!=__nil\", err_),\n\t\t\t\t_return(_x(\"nil\"), _i(err_)),\n\t\t\t),\n\t\t)\n\t\t// If gooType is struct or is registered non-native, but the repr type\n\t\t// isn't struct, an implicit struct is needed.\n\t\t// If option_bytes, special case as we will encode as uint8.\n\t\tif (gooType.Type.Kind() == reflect.Struct ||\n\t\t\t(gooType.Package != nil && gooType.Package.GoPkgPath != \"\")) &&\n\t\t\tgooType.ReprType.Type.Kind() != reflect.Struct &&\n\t\t\toptions&option_bytes == 0 {\n\n\t\t\tif gooType.ReprType.Type.Kind() == reflect.Interface {\n\t\t\t\tpanic(\"not yet tested\")\n\t\t\t}\n\t\t\twrapImplicitStruct = true\n\t\t}\n\t\t// Assign *goor*.\n\t\tgoor = _i(goor_)\n\t\tgoorType = gooType.ReprType\n\t} else {\n\t\t// If gooType is registered non-native, but the repr type isn't struct\n\t\t// nor interface, an implicit struct is needed. (if not amino\n\t\t// marshaler and isn't struct (nor interface), but isn't registered,\n\t\t// not a p3 message).\n\t\tif gooType.Package != nil &&\n\t\t\tgooType.Package.GoPkgPath != \"\" &&\n\t\t\tgooType.Type.Kind() != reflect.Struct &&\n\t\t\tgooType.Type.Kind() != reflect.Interface {\n\n\t\t\twrapImplicitStruct = true\n\t\t}\n\t\t// Assign *goor*.\n\t\tgoor = goo\n\t\tgoorType = gooType\n\t\tif gooIsPtr {\n\t\t\tdgoor_ := addVarUniq(scope, \"dgoor\")\n\t\t\tb = append(b,\n\t\t\t\t_a(dgoor_, \":=\", _deref(goor)),\n\t\t\t\t_a(dgoor_, \"=\", dgoor_)) // XXX\n\t\t\tgoor = _i(dgoor_)\n\t\t}\n\t}\n\t// Below, goor is dereferenced if goo is pointer..\n\n\t// Special case, time & duration.\n\tswitch goorType.Type {\n\tcase timeType:\n\t\tpkgName := addImportAuto(\n\t\t\timports, scope, \"timestamppb\", \"google.golang.org/protobuf/types/known/timestamppb\")\n\t\tif gooIsPtr { // (non-nil)\n\t\t\tb = append(b,\n\t\t\t\t_a(pbo, \"=\", _call(_sel(_x(pkgName), \"New\"), goor)))\n\t\t} else {\n\t\t\tb = append(b,\n\t\t\t\t_if(_not(_call(_x(\"amino.IsEmptyTime\"), goor)),\n\t\t\t\t\t_a(pbo, \"=\", _call(_sel(_x(pkgName), \"New\"), goor))))\n\t\t}\n\t\treturn\n\tcase durationType:\n\t\tpkgName := addImportAuto(\n\t\t\timports, scope, \"durationpb\", \"google.golang.org/protobuf/types/known/durationpb\")\n\t\tif gooIsPtr { // (non-nil)\n\t\t\tb = append(b,\n\t\t\t\t_a(pbo, \"=\", _call(_sel(_x(pkgName), \"New\"), goor)))\n\t\t} else {\n\t\t\tb = append(b,\n\t\t\t\t_if(_b(_call(_sel(goor, \"Nanoseconds\")), \"!=\", \"0\"),\n\t\t\t\t\t_a(pbo, \"=\", _call(_sel(_x(pkgName), \"New\"), goor))))\n\t\t}\n\t\treturn\n\t}\n\n\t// Special case, external empty types.\n\tif gooType.Registered && hasPBBindings(gooType) {\n\t\tif isRoot {\n\t\t\tpbote_ := p3goTypeExprString(rootPkg, imports, scope, gooType, fopts)\n\t\t\tpbov_ := addVarUniq(scope, \"pbov\")\n\t\t\tb = append(b,\n\t\t\t\t_if(_call(_x(\"Is%vReprEmpty\", gooType.Name), goor),\n\t\t\t\t\t_var(pbov_, _x(pbote_), nil),\n\t\t\t\t\t_a(\"msg\", \"=\", pbov_),\n\t\t\t\t\t_return()))\n\t\t} else if !gooIsPtr {\n\t\t\tpkgPrefix := goPkgPrefix(rootPkg, gooType.Type, gooType, imports, scope)\n\t\t\t// b switcharoo pattern\n\t\t\t// statements after this pattern appended to b\n\t\t\t// will come after the injected if-condition.\n\t\t\toldb := b\n\t\t\tb = []ast.Stmt(nil)\n\t\t\tdefer func() {\n\t\t\t\tnewb := b // named for clarity\n\t\t\t\tb = append(oldb,\n\t\t\t\t\t_if(_not(_call(_x(\"%vIs%vReprEmpty\", pkgPrefix, gooType.Name), goor)),\n\t\t\t\t\t\tnewb...))\n\t\t\t}()\n\t\t\t// end b switcharoo pattern\n\t\t}\n\t}\n\n\t// General case\n\tswitch goork := goorType.Type.Kind(); goork {\n\n\tcase reflect.Interface:\n\t\ttypeUrl_ := addVarUniq(scope, \"typeUrl\")\n\t\tbz_ := addVarUniq(scope, \"bz\")\n\t\tanyte_ := p3goTypeExprString(rootPkg, imports, scope, gooType, fopts)\n\t\tif anyte_[0] != '*' {\n\t\t\tpanic(\"expected pointer kind for p3goTypeExprString (of interface type)\")\n\t\t}\n\t\tdanyte_ := anyte_[1:]\n\t\tb = append(b,\n\t\t\t_a(typeUrl_, \":=\", _call(_x(\"cdc.GetTypeURL\"), goo)),\n\t\t\t_a(bz_, \":=\", \"[]byte~(~nil~)\"),\n\t\t\t_a(bz_, \"err\", \"=\", _call(_x(\"cdc.Marshal\"), goor)),\n\t\t\t_if(_x(\"err__!=__nil\"),\n\t\t\t\t_return(),\n\t\t\t),\n\t\t\t_a(pbo, \"=\", _x(\"&%v~{~TypeUrl:typeUrl,Value:bz~}\", danyte_)),\n\t\t)\n\n\tcase reflect.Int:\n\t\tb = append(b,\n\t\t\t_a(pbo, \"=\", maybeWrap(_call(_i(\"int64\"), goor))))\n\tcase reflect.Int16, reflect.Int8:\n\t\tb = append(b,\n\t\t\t_a(pbo, \"=\", maybeWrap(_call(_i(\"int32\"), goor))))\n\tcase reflect.Uint:\n\t\tb = append(b,\n\t\t\t_a(pbo, \"=\", maybeWrap(_call(_i(\"uint64\"), goor))))\n\tcase reflect.Uint16:\n\t\tb = append(b,\n\t\t\t_a(pbo, \"=\", maybeWrap(_call(_i(\"uint32\"), goor))))\n\tcase reflect.Uint8:\n\t\tif options&option_bytes == 0 {\n\t\t\tb = append(b,\n\t\t\t\t_a(pbo, \"=\", maybeWrap(_call(_i(\"uint32\"), goor))))\n\t\t} else {\n\t\t\tb = append(b,\n\t\t\t\t_a(pbo, \"=\", _call(_i(\"byte\"), goor)))\n\t\t}\n\n\tcase reflect.Array, reflect.Slice:\n\t\tvar newoptions uint64\n\t\tvar gooreIsPtr = goorType.ElemIsPtr\n\t\tvar gooreType = goorType.Elem\n\t\tvar dpbote_ string\n\t\tvar pboIsImplicit = isImplicitList(goorType, fopts)\n\t\tvar pboeIsImplicit = isImplicitList(gooreType, fopts)\n\t\tvar pbote_ = p3goTypeExprString(rootPkg, imports, scope, gooType, fopts)\n\t\tvar pboete_ = p3goTypeExprString(rootPkg, imports, scope, gooreType, fopts)\n\n\t\tif gooreType.ReprType.Type.Kind() == reflect.Uint8 {\n\t\t\t// Special bytes optimization for recursive case.\n\t\t\tpboete_ = \"uint8\"\n\t\t\tnewoptions |= option_bytes\n\t\t} else if pboeIsImplicit {\n\t\t\t// Special implicit list struct for recursive call.\n\t\t\tnewoptions |= option_implicit_list\n\t\t}\n\n\t\t// Iff also option & option_implicit_list, wrap with implicit list struct.\n\t\tif pboIsImplicit {\n\t\t\tif pbote_[0] != '*' {\n\t\t\t\tpanic(\"expected pointer kind for p3goTypeExprString (of implicit list-struct type)\")\n\t\t\t}\n\t\t\tdpbote_ = pbote_[1:]\n\t\t} else {\n\t\t\tdpbote_ = \"XXX\" // needed for _x() parsing regardless of _ctif condition.\n\t\t}\n\n\t\t// Construct, translate, assign.\n\t\tgoorl_ := addVarUniq(scope, \"goorl\")\n\t\tpbos_ := addVarUniq(scope, \"pbos\")\n\t\tscope2 := ast.NewScope(scope)\n\t\taddVars(scope2, \"i\", \"goore\", \"pbose\")\n\t\tb = append(b,\n\t\t\t_a(goorl_, \":=\", _len(goor)),\n\t\t\t_ife(_x(\"%v__==__0\", goorl_),\n\t\t\t\t_block( // then\n\t\t\t\t\t// Prefer nil for empty slices for less gc overhead.\n\t\t\t\t\t_a(pbo, \"=\", _i(\"nil\")),\n\t\t\t\t),\n\t\t\t\t_block( // else\n\t\t\t\t\t_var(pbos_, nil, _x(\"make~(~[]%v,%v~)\", pboete_, goorl_)),\n\t\t\t\t\t_for(\n\t\t\t\t\t\t_a(\"i\", \":=\", \"0\"),\n\t\t\t\t\t\t_x(\"i__<__%v\", goorl_),\n\t\t\t\t\t\t_a(\"i\", \"+=\", \"1\"),\n\t\t\t\t\t\t_block(\n\t\t\t\t\t\t\t// Translate in place.\n\t\t\t\t\t\t\t_a(\"goore\", \":=\", _idx(goor, _i(\"i\"))),\n\t\t\t\t\t\t\t_block(go2pbStmts(rootPkg, false, imports, scope2, _x(\"%v~[~i~]\", pbos_), _i(\"goore\"), gooreIsPtr, gooreType, fopts, newoptions)...),\n\t\t\t\t\t\t),\n\t\t\t\t\t),\n\t\t\t\t\t_ctif((pboIsImplicit && options&option_implicit_list != 0), // compile time if\n\t\t\t\t\t\t_a(pbo, \"=\", _x(\"&%v~{~Value:%v~}\", dpbote_, pbos_)), // then\n\t\t\t\t\t\t_a(pbo, \"=\", maybeWrap(_i(pbos_))), // else\n\t\t\t\t\t),\n\t\t\t\t)))\n\n\tcase reflect.Struct:\n\t\tpbote_ := p3goTypeExprString(rootPkg, imports, scope, gooType, fopts)\n\t\tif pbote_[0] != '*' {\n\t\t\tpanic(\"expected pointer kind for p3goTypeExprString of struct type\")\n\t\t}\n\t\tdpbote_ := pbote_[1:]\n\n\t\tb = append(b,\n\t\t\t_a(pbo, \"=\", _x(\"new~(~%v~)\", dpbote_)))\n\n\t\tfor _, field := range goorType.Fields {\n\t\t\tvar goorfIsPtr = field.IsPtr()\n\t\t\tvar goorfType = field.TypeInfo.ReprType\n\t\t\tvar goorf = _sel(goor, field.Name) // next goo\n\t\t\tvar pbof = _sel(pbo, field.Name) // next pbo\n\n\t\t\t// Translate in place.\n\t\t\tscope2 := ast.NewScope(scope)\n\t\t\tb = append(b,\n\t\t\t\t_block(go2pbStmts(rootPkg, false, imports, scope2, pbof, goorf, goorfIsPtr, goorfType, field.FieldOptions, 0)...),\n\t\t\t)\n\t\t}\n\n\tdefault:\n\t\t// General translation.\n\t\tb = append(b,\n\t\t\t_a(pbo, \"=\", maybeWrap(_call(_i(goork.String()), goor))))\n\n\t}\n\treturn b\n}", "func (mb *MutableBag) ToProto(output *mixerpb.CompressedAttributes, globalDict map[string]int32, globalWordCount int) {\n\tds := newDictState(globalDict, globalWordCount)\n\tkeys := mb.Names()\n\n\tfor _, k := range keys {\n\t\tindex := ds.assignDictIndex(k)\n\t\tv, _ := mb.Get(k) // if not found, nil return will be ignored by the switch below\n\n\t\tswitch t := v.(type) {\n\t\tcase string:\n\t\t\tif output.Strings == nil {\n\t\t\t\toutput.Strings = make(map[int32]int32)\n\t\t\t}\n\t\t\toutput.Strings[index] = ds.assignDictIndex(t)\n\n\t\tcase int64:\n\t\t\tif output.Int64S == nil {\n\t\t\t\toutput.Int64S = make(map[int32]int64)\n\t\t\t}\n\t\t\toutput.Int64S[index] = t\n\n\t\tcase int:\n\t\t\tif output.Int64S == nil {\n\t\t\t\toutput.Int64S = make(map[int32]int64)\n\t\t\t}\n\t\t\toutput.Int64S[index] = int64(t)\n\n\t\tcase float64:\n\t\t\tif output.Doubles == nil {\n\t\t\t\toutput.Doubles = make(map[int32]float64)\n\t\t\t}\n\t\t\toutput.Doubles[index] = t\n\n\t\tcase bool:\n\t\t\tif output.Bools == nil {\n\t\t\t\toutput.Bools = make(map[int32]bool)\n\t\t\t}\n\t\t\toutput.Bools[index] = t\n\n\t\tcase time.Time:\n\t\t\tif output.Timestamps == nil {\n\t\t\t\toutput.Timestamps = make(map[int32]time.Time)\n\t\t\t}\n\t\t\toutput.Timestamps[index] = t\n\n\t\tcase time.Duration:\n\t\t\tif output.Durations == nil {\n\t\t\t\toutput.Durations = make(map[int32]time.Duration)\n\t\t\t}\n\t\t\toutput.Durations[index] = t\n\n\t\tcase []byte:\n\t\t\tif output.Bytes == nil {\n\t\t\t\toutput.Bytes = make(map[int32][]byte)\n\t\t\t}\n\t\t\toutput.Bytes[index] = t\n\n\t\tcase map[string]string:\n\t\t\tsm := make(map[int32]int32, len(t))\n\t\t\tfor smk, smv := range t {\n\t\t\t\tsm[ds.assignDictIndex(smk)] = ds.assignDictIndex(smv)\n\t\t\t}\n\n\t\t\tif output.StringMaps == nil {\n\t\t\t\toutput.StringMaps = make(map[int32]mixerpb.StringMap)\n\t\t\t}\n\t\t\toutput.StringMaps[index] = mixerpb.StringMap{Entries: sm}\n\n\t\tdefault:\n\t\t\tpanic(fmt.Errorf(\"cannot convert value:%v of type:%T\", v, v))\n\t\t}\n\t}\n\n\toutput.Words = ds.getMessageWordList()\n}", "func reposetToPBData(reposet ReposetProps) ([]byte, error) {\n\n data, err := reposet.Marshal()\n if err != nil {\n return nil, err\n }\n\n pbreposet := new(pb.IndexNode)\n typ := pb.IndexNode_Reposet\n pbreposet.Type = typ\n pbreposet.Data = data\n\n data, err = proto.Marshal(pbreposet)\n\n return data, nil\n}", "func ComputeBetaInstanceTemplatePropertiesDisksToProto(o *beta.InstanceTemplatePropertiesDisks) *betapb.ComputeBetaInstanceTemplatePropertiesDisks {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.ComputeBetaInstanceTemplatePropertiesDisks{\n\t\tAutoDelete: dcl.ValueOrEmptyBool(o.AutoDelete),\n\t\tBoot: dcl.ValueOrEmptyBool(o.Boot),\n\t\tDeviceName: dcl.ValueOrEmptyString(o.DeviceName),\n\t\tDiskEncryptionKey: ComputeBetaInstanceTemplatePropertiesDisksDiskEncryptionKeyToProto(o.DiskEncryptionKey),\n\t\tIndex: dcl.ValueOrEmptyInt64(o.Index),\n\t\tInitializeParams: ComputeBetaInstanceTemplatePropertiesDisksInitializeParamsToProto(o.InitializeParams),\n\t\tInterface: ComputeBetaInstanceTemplatePropertiesDisksInterfaceEnumToProto(o.Interface),\n\t\tMode: ComputeBetaInstanceTemplatePropertiesDisksModeEnumToProto(o.Mode),\n\t\tSource: dcl.ValueOrEmptyString(o.Source),\n\t\tType: ComputeBetaInstanceTemplatePropertiesDisksTypeEnumToProto(o.Type),\n\t}\n\tfor _, r := range o.GuestOSFeatures {\n\t\tp.GuestOsFeatures = append(p.GuestOsFeatures, ComputeBetaInstanceTemplatePropertiesDisksGuestOSFeaturesToProto(&r))\n\t}\n\treturn p\n}", "func ComputeBetaInstanceTemplatePropertiesShieldedInstanceConfigToProto(o *beta.InstanceTemplatePropertiesShieldedInstanceConfig) *betapb.ComputeBetaInstanceTemplatePropertiesShieldedInstanceConfig {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.ComputeBetaInstanceTemplatePropertiesShieldedInstanceConfig{\n\t\tEnableSecureBoot: dcl.ValueOrEmptyBool(o.EnableSecureBoot),\n\t\tEnableVtpm: dcl.ValueOrEmptyBool(o.EnableVtpm),\n\t\tEnableIntegrityMonitoring: dcl.ValueOrEmptyBool(o.EnableIntegrityMonitoring),\n\t}\n\treturn p\n}", "func EncodePb(p proto.Message) (data []byte, err error) {\n\tdata, err = proto.Marshal(p)\n\treturn\n}", "func yamlToProto(path string, v proto.Message) error {\n\tyamlBytes, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn errors.Trace(err)\n\t}\n\tjsonBytes, err := yaml.YAMLToJSONStrict(yamlBytes)\n\tif err != nil {\n\t\treturn errors.Trace(err)\n\t}\n\tr := bytes.NewReader(jsonBytes)\n\terr = pbjson.NewDecoder(r).Decode(v)\n\treturn errors.Trace(err)\n}", "func (i *Invoice) loadFromP2PProtobuf(invoiceData *invoicepb.InvoiceData) {\n\ti.InvoiceNumber = invoiceData.InvoiceNumber\n\ti.InvoiceStatus = invoiceData.InvoiceStatus\n\ti.SenderName = invoiceData.SenderName\n\ti.SenderStreet = invoiceData.SenderStreet\n\ti.SenderCity = invoiceData.SenderCity\n\ti.SenderZipcode = invoiceData.SenderZipcode\n\ti.SenderCountry = invoiceData.SenderCountry\n\ti.RecipientName = invoiceData.RecipientName\n\ti.RecipientStreet = invoiceData.RecipientStreet\n\ti.RecipientCity = invoiceData.RecipientCity\n\ti.RecipientZipcode = invoiceData.RecipientZipcode\n\ti.RecipientCountry = invoiceData.RecipientCountry\n\ti.Currency = invoiceData.Currency\n\ti.GrossAmount = invoiceData.GrossAmount\n\ti.NetAmount = invoiceData.NetAmount\n\ti.TaxAmount = invoiceData.TaxAmount\n\ti.TaxRate = invoiceData.TaxRate\n\n\tif invoiceData.Recipient != nil {\n\t\trecipient := identity.NewDIDFromBytes(invoiceData.Recipient)\n\t\ti.Recipient = &recipient\n\t}\n\n\tif invoiceData.Sender != nil {\n\t\tsender := identity.NewDIDFromBytes(invoiceData.Sender)\n\t\ti.Sender = &sender\n\t}\n\n\tif invoiceData.Payee != nil {\n\t\tpayee := identity.NewDIDFromBytes(invoiceData.Payee)\n\t\ti.Payee = &payee\n\t}\n\n\ti.Comment = invoiceData.Comment\n\ti.DueDate = invoiceData.DueDate\n\ti.DateCreated = invoiceData.DateCreated\n\ti.ExtraData = invoiceData.ExtraData\n}", "func (object Object) BTo(value interface{}) Object {\n\treturn object.Property(as.PropertyBTo, value)\n}", "func GenerateGBfromproto(record *bioproto.Genbank) string {\n\tvar stringbuffer bytes.Buffer\n\n\tstringbuffer.WriteString(generateHeaderString(record))\n\tstringbuffer.WriteString(\"FEATURES Location/Qualifiers\\n\")\n\tstringbuffer.WriteString(generateQualifierString(record))\n\tif record.FEATURES != nil {\n\n\t}\n\tif record.CONTIG != \"\" {\n\t\tstringbuffer.WriteString(\"CONTIG \" + record.CONTIG + \"\\n\")\n\t}\n\tstringbuffer.WriteString(\"//\\n\")\n\treturn stringbuffer.String()\n}", "func generateMethodsForType(imports *ast.GenDecl, scope *ast.Scope, pkg *amino.Package, info *amino.TypeInfo) (methods []ast.Decl, err error) {\n\tif info.Type.Kind() == reflect.Interface {\n\t\tpanic(\"should not happen\")\n\t}\n\n\tpbote_ := p3goTypeExprString(pkg, imports, scope, info, amino.FieldOptions{})\n\tif pbote_[0] != '*' {\n\t\tpanic(\"expected pointer kind for p3goTypeExprString (of registered type)\")\n\t}\n\tdpbote_ := pbote_[1:]\n\n\t//////////////////\n\t// ToPBMessage()\n\t{\n\t\tscope2 := ast.NewScope(scope)\n\t\taddVars(scope2, \"cdc\", \"goo\", \"pbo\", \"msg\", \"err\")\n\t\t// Set toProto function.\n\t\tmethods = append(methods, _func(\"ToPBMessage\",\n\t\t\t\"goo\", info.Type.Name(),\n\t\t\t_fields(\"cdc\", \"*amino.Codec\"),\n\t\t\t_fields(\"msg\", \"proto.Message\", \"err\", \"error\"),\n\t\t\t_block(\n\t\t\t\t// Body: declaration for pb message.\n\t\t\t\t_var(\"pbo\", _x(pbote_), nil),\n\t\t\t\t// Body: copying over fields.\n\t\t\t\t_block(go2pbStmts(pkg, true, imports, scope2, _i(\"pbo\"), _i(\"goo\"), false, info, amino.FieldOptions{}, 0)...),\n\t\t\t\t// Body: return value.\n\t\t\t\t_a(\"msg\", \"=\", \"pbo\"),\n\t\t\t\t_return(),\n\t\t\t),\n\t\t))\n\t}\n\n\t//////////////////\n\t// EmptyPBMessage()\n\t// Use to create the pbm to proto.Unmarshal to before FromPBMessage.\n\t{\n\t\tscope2 := ast.NewScope(scope)\n\t\taddVars(scope2, \"cdc\", \"goo\", \"pbo\", \"msg\", \"err\")\n\t\t// Set toProto function.\n\t\tmethods = append(methods, _func(\"EmptyPBMessage\",\n\t\t\t\"goo\", info.Type.Name(),\n\t\t\t_fields(\"cdc\", \"*amino.Codec\"),\n\t\t\t_fields(\"msg\", \"proto.Message\"),\n\t\t\t_block(\n\t\t\t\t// Body: declaration for pb message.\n\t\t\t\t_a(\"pbo\", \":=\", _x(\"new~(~%v~)\", dpbote_)),\n\t\t\t\t// Body: return value.\n\t\t\t\t_a(\"msg\", \"=\", \"pbo\"),\n\t\t\t\t_return(),\n\t\t\t),\n\t\t))\n\t}\n\n\t//////////////////\n\t// FromPBMessage()\n\t{\n\t\tscope2 := ast.NewScope(scope)\n\t\taddVars(scope2, \"cdc\", \"goo\", \"pbo\", \"msg\", \"err\")\n\t\tmethods = append(methods, _func(\"FromPBMessage\",\n\t\t\t\"goo\", \"*\"+info.Type.Name(),\n\t\t\t_fields(\"cdc\", \"*amino.Codec\", \"msg\", \"proto.Message\"),\n\t\t\t_fields(\"err\", \"error\"),\n\t\t\t_block(\n\t\t\t\t// Body: declaration for pb message.\n\t\t\t\t_var(\"pbo\", _x(pbote_),\n\t\t\t\t\t_x(\"%v.~(~%v~)\", \"msg\", pbote_)),\n\t\t\t\t// Body: copying over fields.\n\t\t\t\t_block(pb2goStmts(pkg, true, imports, scope2, _i(\"goo\"), true, info, _i(\"pbo\"), amino.FieldOptions{}, 0)...),\n\t\t\t\t// Body: return.\n\t\t\t\t_return(),\n\t\t\t),\n\t\t))\n\t}\n\n\t//////////////////\n\t// TypeUrl()\n\t{\n\t\tmethods = append(methods, _func(\"GetTypeURL\",\n\t\t\t\"\", info.Type.Name(),\n\t\t\t_fields(),\n\t\t\t_fields(\"typeURL\", \"string\"),\n\t\t\t_block(\n\t\t\t\t_return(_s(info.TypeURL)),\n\t\t\t),\n\t\t))\n\t}\n\n\t//////////////////\n\t// Is*ReprEmpty()\n\t{\n\t\trinfo := info.ReprType\n\t\tscope2 := ast.NewScope(scope)\n\t\taddVars(scope2, \"goo\", \"empty\")\n\t\tgoorte := goTypeExpr(pkg, rinfo.Type, imports, scope2)\n\t\tmethods = append(methods, _func(fmt.Sprintf(\"Is%vReprEmpty\", info.Name),\n\t\t\t\"\", \"\",\n\t\t\t_fields(\"goor\", goorte),\n\t\t\t_fields(\"empty\", \"bool\"),\n\t\t\t_block(\n\t\t\t\t// Body: check fields.\n\t\t\t\t_block(append(\n\t\t\t\t\t[]ast.Stmt{_a(\"empty\", \"=\", \"true\")},\n\t\t\t\t\tisReprEmptyStmts(pkg, true, imports, scope2, _i(\"goor\"), false, info)...,\n\t\t\t\t)...),\n\t\t\t\t// Body: return.\n\t\t\t\t_return(),\n\t\t\t),\n\t\t))\n\t}\n\treturn\n}", "func (op *Operation) ToProto(wrappedID string) *api.UploadOperation {\n\tvar ref *api.ObjectRef\n\tif op.Status == api.UploadStatus_PUBLISHED {\n\t\tref = &api.ObjectRef{\n\t\t\tHashAlgo: op.HashAlgo,\n\t\t\tHexDigest: op.HexDigest,\n\t\t}\n\t}\n\treturn &api.UploadOperation{\n\t\tOperationId: wrappedID,\n\t\tUploadUrl: op.UploadURL,\n\t\tStatus: op.Status,\n\t\tObject: ref,\n\t\tErrorMessage: op.Error,\n\t}\n}", "func MakePbUtxo(op *types.OutPoint, uw *types.UtxoWrap) *rpcpb.Utxo {\n\ts := script.NewScriptFromBytes(uw.Script())\n\tvalue := uw.Value()\n\tif s.IsTokenIssue() || s.IsTokenTransfer() {\n\t\tvalue = 0\n\t}\n\treturn &rpcpb.Utxo{\n\t\tBlockHeight: uw.Height(),\n\t\t// IsCoinbase: uw.IsCoinBase(),\n\t\tIsSpent: uw.IsSpent(),\n\t\tOutPoint: NewPbOutPoint(&op.Hash, op.Index),\n\t\tTxOut: &corepb.TxOut{\n\t\t\tValue: value,\n\t\t\tScriptPubKey: uw.Script(),\n\t\t},\n\t}\n}", "func BrandToProto(brand *domain.Brand) *brandproto.Brand {\n\tif brand == nil {\n\t\treturn nil\n\t}\n\n\treturn &brandproto.Brand{\n\t\tId: brand.ID,\n\t\tSlug: brand.Slug,\n\t\tShortName: brand.ShortName,\n\t\tName: brand.Name,\n\t\tDescription: brand.Description,\n\t\tImageId: brand.ImageID,\n\t\tCreatedAt: util.TimeToProto(brand.CreatedAt),\n\t\tUpdatedAt: util.TimeToProto(brand.UpdatedAt),\n\t}\n}", "func BigqueryRoutineArgumentsDataTypeStructTypeFieldsToProto(o *bigquery.RoutineArgumentsDataTypeStructTypeFields) *bigquerypb.BigqueryRoutineArgumentsDataTypeStructTypeFields {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &bigquerypb.BigqueryRoutineArgumentsDataTypeStructTypeFields{}\n\tp.SetName(dcl.ValueOrEmptyString(o.Name))\n\tp.SetType(BigqueryRoutineArgumentsDataTypeToProto(o.Type))\n\treturn p\n}", "func ToProto(sandbox *Sandbox) *types.Sandbox {\n\textensions := make(map[string]*gogo_types.Any)\n\tfor k, v := range sandbox.Extensions {\n\t\textensions[k] = protobuf.FromAny(v)\n\t}\n\treturn &types.Sandbox{\n\t\tSandboxID: sandbox.ID,\n\t\tRuntime: &types.Sandbox_Runtime{\n\t\t\tName: sandbox.Runtime.Name,\n\t\t\tOptions: protobuf.FromAny(sandbox.Runtime.Options),\n\t\t},\n\t\tLabels: sandbox.Labels,\n\t\tCreatedAt: protobuf.ToTimestamp(sandbox.CreatedAt),\n\t\tUpdatedAt: protobuf.ToTimestamp(sandbox.UpdatedAt),\n\t\tExtensions: extensions,\n\t\tSpec: protobuf.FromAny(sandbox.Spec),\n\t}\n}", "func (m *Fields) MarshalTo(data []byte) (int, error) {\n\treturn m.ProtoFields().MarshalTo(data)\n}", "func BigIntToProto(x *big.Int) *BigInt {\n\tif x == nil {\n\t\treturn nil\n\t}\n\tpb := new(BigInt)\n\tpb.Raw = x.Bytes()\n\treturn pb\n}", "func InstanceToProto(resource *sql.Instance) *sqlpb.SqlInstance {\n\tp := &sqlpb.SqlInstance{\n\t\tBackendType: SqlInstanceBackendTypeEnumToProto(resource.BackendType),\n\t\tConnectionName: dcl.ValueOrEmptyString(resource.ConnectionName),\n\t\tDatabaseVersion: SqlInstanceDatabaseVersionEnumToProto(resource.DatabaseVersion),\n\t\tEtag: dcl.ValueOrEmptyString(resource.Etag),\n\t\tGceZone: dcl.ValueOrEmptyString(resource.GceZone),\n\t\tInstanceType: SqlInstanceInstanceTypeEnumToProto(resource.InstanceType),\n\t\tMasterInstanceName: dcl.ValueOrEmptyString(resource.MasterInstanceName),\n\t\tMaxDiskSize: SqlInstanceMaxDiskSizeToProto(resource.MaxDiskSize),\n\t\tName: dcl.ValueOrEmptyString(resource.Name),\n\t\tProject: dcl.ValueOrEmptyString(resource.Project),\n\t\tRegion: dcl.ValueOrEmptyString(resource.Region),\n\t\tRootPassword: dcl.ValueOrEmptyString(resource.RootPassword),\n\t\tCurrentDiskSize: SqlInstanceCurrentDiskSizeToProto(resource.CurrentDiskSize),\n\t\tDiskEncryptionConfiguration: SqlInstanceDiskEncryptionConfigurationToProto(resource.DiskEncryptionConfiguration),\n\t\tFailoverReplica: SqlInstanceFailoverReplicaToProto(resource.FailoverReplica),\n\t\tMasterInstance: SqlInstanceMasterInstanceToProto(resource.MasterInstance),\n\t\tReplicaConfiguration: SqlInstanceReplicaConfigurationToProto(resource.ReplicaConfiguration),\n\t\tScheduledMaintenance: SqlInstanceScheduledMaintenanceToProto(resource.ScheduledMaintenance),\n\t\tSettings: SqlInstanceSettingsToProto(resource.Settings),\n\t}\n\tfor _, r := range resource.IPAddresses {\n\t\tp.IpAddresses = append(p.IpAddresses, SqlInstanceIPAddressesToProto(&r))\n\t}\n\n\treturn p\n}", "func ProtoToComputeBetaInstanceTemplateProperties(p *betapb.ComputeBetaInstanceTemplateProperties) *beta.InstanceTemplateProperties {\n\tif p == nil {\n\t\treturn nil\n\t}\n\tobj := &beta.InstanceTemplateProperties{\n\t\tCanIPForward: dcl.Bool(p.CanIpForward),\n\t\tDescription: dcl.StringOrNil(p.Description),\n\t\tMachineType: dcl.StringOrNil(p.MachineType),\n\t\tMinCpuPlatform: dcl.StringOrNil(p.MinCpuPlatform),\n\t\tReservationAffinity: ProtoToComputeBetaInstanceTemplatePropertiesReservationAffinity(p.GetReservationAffinity()),\n\t\tShieldedInstanceConfig: ProtoToComputeBetaInstanceTemplatePropertiesShieldedInstanceConfig(p.GetShieldedInstanceConfig()),\n\t\tScheduling: ProtoToComputeBetaInstanceTemplatePropertiesScheduling(p.GetScheduling()),\n\t}\n\tfor _, r := range p.GetDisks() {\n\t\tobj.Disks = append(obj.Disks, *ProtoToComputeBetaInstanceTemplatePropertiesDisks(r))\n\t}\n\tfor _, r := range p.GetGuestAccelerators() {\n\t\tobj.GuestAccelerators = append(obj.GuestAccelerators, *ProtoToComputeBetaInstanceTemplatePropertiesGuestAccelerators(r))\n\t}\n\tfor _, r := range p.GetNetworkInterfaces() {\n\t\tobj.NetworkInterfaces = append(obj.NetworkInterfaces, *ProtoToComputeBetaInstanceTemplatePropertiesNetworkInterfaces(r))\n\t}\n\tfor _, r := range p.GetServiceAccounts() {\n\t\tobj.ServiceAccounts = append(obj.ServiceAccounts, *ProtoToComputeBetaInstanceTemplatePropertiesServiceAccounts(r))\n\t}\n\tfor _, r := range p.GetTags() {\n\t\tobj.Tags = append(obj.Tags, r)\n\t}\n\treturn obj\n}", "func ProtoToRoutine(p *bigquerypb.BigqueryRoutine) *bigquery.Routine {\n\tobj := &bigquery.Routine{\n\t\tEtag: dcl.StringOrNil(p.GetEtag()),\n\t\tName: dcl.StringOrNil(p.GetName()),\n\t\tProject: dcl.StringOrNil(p.GetProject()),\n\t\tDataset: dcl.StringOrNil(p.GetDataset()),\n\t\tRoutineType: ProtoToBigqueryRoutineRoutineTypeEnum(p.GetRoutineType()),\n\t\tCreationTime: dcl.Int64OrNil(p.GetCreationTime()),\n\t\tLastModifiedTime: dcl.Int64OrNil(p.GetLastModifiedTime()),\n\t\tLanguage: ProtoToBigqueryRoutineLanguageEnum(p.GetLanguage()),\n\t\tReturnType: ProtoToBigqueryRoutineArgumentsDataType(p.GetReturnType()),\n\t\tDefinitionBody: dcl.StringOrNil(p.GetDefinitionBody()),\n\t\tDescription: dcl.StringOrNil(p.GetDescription()),\n\t\tDeterminismLevel: ProtoToBigqueryRoutineDeterminismLevelEnum(p.GetDeterminismLevel()),\n\t\tStrictMode: dcl.Bool(p.GetStrictMode()),\n\t}\n\tfor _, r := range p.GetArguments() {\n\t\tobj.Arguments = append(obj.Arguments, *ProtoToBigqueryRoutineArguments(r))\n\t}\n\tfor _, r := range p.GetImportedLibraries() {\n\t\tobj.ImportedLibraries = append(obj.ImportedLibraries, r)\n\t}\n\treturn obj\n}", "func CopyPBAndDereference(dst interface{}, src interface{}) interface{} {\n\tcopy := CopyPB(dst, src)\n\treturn reflect.ValueOf(copy).Elem().Interface()\n}", "func NewPBPacket(id ProtoID) interface{} {\n packet, ok := packetMap[id];\n if !ok {\n return nil;\n }\n ms, _ := packet.(proto.Message);\n \n return proto.Clone(ms)\n}", "func (t *Tree) ToProto() *pbMerkle.Tree {\n\treturn &pbMerkle.Tree{\n\t\tDepth: int64(t.depth),\n\t\tRootNode: t.rootNode.toProto(),\n\t}\n}", "func (data *Data) ToProto() tmproto.Data {\n\ttp := new(tmproto.Data)\n\n\tif len(data.Txs) > 0 {\n\t\ttxBzs := make([][]byte, len(data.Txs))\n\t\tfor i := range data.Txs {\n\t\t\ttxBzs[i] = data.Txs[i]\n\t\t}\n\t\ttp.Txs = txBzs\n\t}\n\n\treturn *tp\n}", "func (data *Data) ToProto() tmproto.Data {\n\ttp := new(tmproto.Data)\n\n\tif len(data.Txs) > 0 {\n\t\ttxBzs := make([][]byte, len(data.Txs))\n\t\tfor i := range data.Txs {\n\t\t\ttxBzs[i] = data.Txs[i]\n\t\t}\n\t\ttp.Txs = txBzs\n\t}\n\n\treturn *tp\n}", "func RecordToProto(ctx context.Context, dag format.DAGService, rec net.Record) (*pb.Log_Record, error) {\n\tblock, err := rec.GetBlock(ctx, dag)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tevent, ok := block.(*Event)\n\tif !ok {\n\t\tevent, err = EventFromNode(block)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\theader, err := event.GetHeader(ctx, dag, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tbody, err := event.GetBody(ctx, dag, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &pb.Log_Record{\n\t\tRecordNode: rec.RawData(),\n\t\tEventNode: block.RawData(),\n\t\tHeaderNode: header.RawData(),\n\t\tBodyNode: body.RawData(),\n\t}, nil\n}", "func BigqueryRoutineArgumentsDataTypeStructTypeToProto(o *bigquery.RoutineArgumentsDataTypeStructType) *bigquerypb.BigqueryRoutineArgumentsDataTypeStructType {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &bigquerypb.BigqueryRoutineArgumentsDataTypeStructType{}\n\tsFields := make([]*bigquerypb.BigqueryRoutineArgumentsDataTypeStructTypeFields, len(o.Fields))\n\tfor i, r := range o.Fields {\n\t\tsFields[i] = BigqueryRoutineArgumentsDataTypeStructTypeFieldsToProto(&r)\n\t}\n\tp.SetFields(sFields)\n\treturn p\n}", "func ComputeBetaInstanceTemplatePropertiesDisksInitializeParamsToProto(o *beta.InstanceTemplatePropertiesDisksInitializeParams) *betapb.ComputeBetaInstanceTemplatePropertiesDisksInitializeParams {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.ComputeBetaInstanceTemplatePropertiesDisksInitializeParams{\n\t\tDiskName: dcl.ValueOrEmptyString(o.DiskName),\n\t\tDiskSizeGb: dcl.ValueOrEmptyInt64(o.DiskSizeGb),\n\t\tDiskType: dcl.ValueOrEmptyString(o.DiskType),\n\t\tSourceImage: dcl.ValueOrEmptyString(o.SourceImage),\n\t\tSourceSnapshot: dcl.ValueOrEmptyString(o.SourceSnapshot),\n\t\tSourceSnapshotEncryptionKey: ComputeBetaInstanceTemplatePropertiesDisksInitializeParamsSourceSnapshotEncryptionKeyToProto(o.SourceSnapshotEncryptionKey),\n\t\tDescription: dcl.ValueOrEmptyString(o.Description),\n\t\tOnUpdateAction: dcl.ValueOrEmptyString(o.OnUpdateAction),\n\t\tSourceImageEncryptionKey: ComputeBetaInstanceTemplatePropertiesDisksInitializeParamsSourceImageEncryptionKeyToProto(o.SourceImageEncryptionKey),\n\t}\n\tp.Labels = make(map[string]string)\n\tfor k, r := range o.Labels {\n\t\tp.Labels[k] = r\n\t}\n\tfor _, r := range o.ResourcePolicies {\n\t\tp.ResourcePolicies = append(p.ResourcePolicies, r)\n\t}\n\treturn p\n}", "func CloudkmsBetaCryptoKeyVersionTemplateToProto(o *beta.CryptoKeyVersionTemplate) *betapb.CloudkmsBetaCryptoKeyVersionTemplate {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.CloudkmsBetaCryptoKeyVersionTemplate{}\n\tp.SetProtectionLevel(CloudkmsBetaCryptoKeyVersionTemplateProtectionLevelEnumToProto(o.ProtectionLevel))\n\tp.SetAlgorithm(CloudkmsBetaCryptoKeyVersionTemplateAlgorithmEnumToProto(o.Algorithm))\n\treturn p\n}", "func TestPbEncoderAndDecoder(t *testing.T) {\n\tregister(0, reflect.TypeOf(example.A{}))\n\n\tbuf := new(bytes.Buffer)\n\n\tinPb := &example.A{\n\t\tDescription: \"hello world!\",\n\t\tNumber: 1,\n\t}\n\t// UUID is 16 byte long\n\tfor i := 0; i < 16; i++ {\n\t\tinPb.Id = append(inPb.Id, byte(i))\n\t}\n\n\tmsg := NewPbMessage(0, inPb)\n\n\te := NewMsgEncoder(buf)\n\te.EncodePb(msg)\n\n\toutMsg := NewEmptyPbMessage()\n\n\td := NewMsgDecoder(buf)\n\terr := d.DecodePb(outMsg)\n\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tif !reflect.DeepEqual(msg, outMsg) {\n\t\tt.Fatal(\"Messages are not equal!\")\n\t}\n\n\tif !reflect.DeepEqual(inPb, outMsg.pb) {\n\t\tt.Fatal(\"Protos are not equal!\")\n\t}\n}", "func BigqueryRoutineArgumentsToProto(o *bigquery.RoutineArguments) *bigquerypb.BigqueryRoutineArguments {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &bigquerypb.BigqueryRoutineArguments{}\n\tp.SetName(dcl.ValueOrEmptyString(o.Name))\n\tp.SetArgumentKind(BigqueryRoutineArgumentsArgumentKindEnumToProto(o.ArgumentKind))\n\tp.SetMode(BigqueryRoutineArgumentsModeEnumToProto(o.Mode))\n\tp.SetDataType(BigqueryRoutineArgumentsDataTypeToProto(o.DataType))\n\treturn p\n}", "func ChatDBToProto(chat *db.Chat) *models.Message {\n\treturn &models.Message{\n\t\tId: chat.ID,\n\t\tFrom: chat.From,\n\t\tBody: chat.Body,\n\t\tTo: chat.To,\n\t\tTime: chat.Time,\n\t}\n}", "func ToBytes(inter interface{}) []byte {\n\treqBodyBytes := new(bytes.Buffer)\n\tjson.NewEncoder(reqBodyBytes).Encode(inter)\n\tfmt.Println(reqBodyBytes.Bytes()) // this is the []byte\n\tfmt.Println(string(reqBodyBytes.Bytes())) // converted back to show it's your original object\n\treturn reqBodyBytes.Bytes()\n}", "func (me *Model) ToRPCMsg(escape bool) *rpc.Message {\n\tret := &rpc.Message{\n\t\tId: me.ID.Hex(),\n\t\tTopicId: me.TopicID.Hex(),\n\t\tSenderName: me.SenderName,\n\t\tPostTime: &timestamp.Timestamp{\n\t\t\tSeconds: me.PostTime.Unix(),\n\t\t\tNanos: int32(me.PostTime.Nanosecond()),\n\t\t},\n\t\tMessage: me.Message,\n\t\tBump: me.Bump,\n\t}\n\tif escape {\n\t\tret.SenderName = html.EscapeString(me.SenderName)\n\t\tret.Message = html.EscapeString(me.Message)\n\t}\n\treturn ret\n}", "func (op RollupOp) ToProto(pb *pipelinepb.AppliedRollupOp) error {\n\top.AggregationID.ToProto(&pb.AggregationId)\n\tpb.Id = op.ID\n\treturn nil\n}", "func ForgetAllFields(t *testing.T, originalMessage proto.Message) proto.Message {\n\tt.Helper()\n\n\temptyMessage := &pb2_latest.Empty{}\n\n\tbinaryMessage, err := proto.Marshal(originalMessage)\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\n\terr = proto.Unmarshal(binaryMessage, emptyMessage)\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\treturn emptyMessage\n}", "func (m *Message) ToProtobuf() *PBDHTMessage {\n\tpmes := new(PBDHTMessage)\n\tif m.Value != nil {\n\t\tpmes.Value = m.Value\n\t}\n\n\tpmes.Type = &m.Type\n\tpmes.Key = &m.Key\n\tpmes.Response = &m.Response\n\tpmes.Id = &m.ID\n\tpmes.Success = &m.Success\n\tfor _, p := range m.Peers {\n\t\tpmes.Peers = append(pmes.Peers, peerInfo(p))\n\t}\n\n\treturn pmes\n}", "func ContainerClusterAddonsConfigHttpLoadBalancingToProto(o *container.ClusterAddonsConfigHttpLoadBalancing) *containerpb.ContainerClusterAddonsConfigHttpLoadBalancing {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &containerpb.ContainerClusterAddonsConfigHttpLoadBalancing{\n\t\tDisabled: dcl.ValueOrEmptyBool(o.Disabled),\n\t}\n\treturn p\n}", "func FieldDataToBytes(endian binary.ByteOrder, fieldData FieldData) ([]byte, error) {\n\tswitch field := fieldData.(type) {\n\tcase *BoolFieldData:\n\t\t// return binaryWrite(endian, field.Data)\n\t\treturn boolFieldDataToPbBytes(field)\n\tcase *StringFieldData:\n\t\treturn stringFieldDataToPbBytes(field)\n\tcase *ArrayFieldData:\n\t\treturn arrayFieldDataToPbBytes(field)\n\tcase *JSONFieldData:\n\t\treturn jsonFieldDataToPbBytes(field)\n\tcase *BinaryVectorFieldData:\n\t\treturn field.Data, nil\n\tcase *FloatVectorFieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tcase *Int8FieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tcase *Int16FieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tcase *Int32FieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tcase *Int64FieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tcase *FloatFieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tcase *DoubleFieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unsupported field data: %s\", field)\n\t}\n}", "func patchFieldValueToObject(fieldPath string, value interface{}, to runtime.Object, mo *xpv1.MergeOptions) error {\n\tpaved, err := fieldpath.PaveObject(to)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := paved.MergeValue(fieldPath, value, mo); err != nil {\n\t\treturn err\n\t}\n\n\treturn runtime.DefaultUnstructuredConverter.FromUnstructured(paved.UnstructuredContent(), to)\n}", "func NewPbJsonPublisher(q sqlh.Queryer, schema, table string) MsgPublisherFunc {\n\n\tpbPublisher := NewMsgPublisher(pjenc.DefaultPbEncoder, q, schema, table)\n\tjsonPublisher := NewMsgPublisher(pjenc.DefaultJsonEncoder, q, schema, table)\n\n\treturn func(ctx context.Context, spec MsgSpec, msg interface{}) error {\n\t\tif _, ok := spec.MsgValue().(proto.Message); ok {\n\t\t\treturn pbPublisher(ctx, spec, msg)\n\t\t}\n\t\treturn jsonPublisher(ctx, spec, msg)\n\t}\n}", "func ProtoToDomainMapping(p *appenginepb.AppengineDomainMapping) *appengine.DomainMapping {\n\tobj := &appengine.DomainMapping{\n\t\tSelfLink: dcl.StringOrNil(p.SelfLink),\n\t\tName: dcl.StringOrNil(p.Name),\n\t\tSslSettings: ProtoToAppengineDomainMappingSslSettings(p.GetSslSettings()),\n\t\tApp: dcl.StringOrNil(p.App),\n\t}\n\tfor _, r := range p.GetResourceRecords() {\n\t\tobj.ResourceRecords = append(obj.ResourceRecords, *ProtoToAppengineDomainMappingResourceRecords(r))\n\t}\n\treturn obj\n}", "func ToPbTime(gt time.Time) *google_protobuf1.Timestamp {\n\treturn &google_protobuf1.Timestamp{Seconds: int64(gt.Unix()), Nanos: int32(gt.Nanosecond())}\n}", "func ToObject(v []byte, output interface{}) error {\n\t_, err := newStructDecoder(output).Decode(v) // nolint\n\treturn err\n}", "func (o *ProjectWebhookPartial) ToMap() map[string]interface{} {\n\tkv := map[string]interface{}{\n\t\t\"enabled\": toProjectWebhookObject(o.Enabled, true),\n\t\t\"error_message\": toProjectWebhookObject(o.ErrorMessage, true),\n\t\t\"errored\": toProjectWebhookObject(o.Errored, true),\n\t\t\"project_id\": toProjectWebhookObject(o.ProjectID, true),\n\t\t\"url\": toProjectWebhookObject(o.URL, true),\n\t}\n\tfor k, v := range kv {\n\t\tif v == nil || reflect.ValueOf(v).IsZero() {\n\t\t\tdelete(kv, k)\n\t\t} else {\n\t\t}\n\t}\n\treturn kv\n}", "func toTextPb(_ *starlark.Thread, _ *starlark.Builtin, args starlark.Tuple, kwargs []starlark.Tuple) (starlark.Value, error) {\n\tvar msg *Message\n\tif err := starlark.UnpackArgs(\"to_textpb\", args, kwargs, \"msg\", &msg); err != nil {\n\t\treturn nil, err\n\t}\n\tpb, err := msg.ToProto()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn starlark.String(proto.MarshalTextString(pb)), nil\n}", "func AttestorToProto(resource *binaryauthorization.Attestor) *binaryauthorizationpb.BinaryauthorizationAttestor {\n\tp := &binaryauthorizationpb.BinaryauthorizationAttestor{\n\t\tName: dcl.ValueOrEmptyString(resource.Name),\n\t\tDescription: dcl.ValueOrEmptyString(resource.Description),\n\t\tUserOwnedGrafeasNote: BinaryauthorizationAttestorUserOwnedGrafeasNoteToProto(resource.UserOwnedGrafeasNote),\n\t\tUpdateTime: dcl.ValueOrEmptyString(resource.UpdateTime),\n\t\tProject: dcl.ValueOrEmptyString(resource.Project),\n\t}\n\n\treturn p\n}", "func patchFieldValueToObject(fieldPath string, value any, to runtime.Object, mo *xpv1.MergeOptions) error {\n\tpaved, err := fieldpath.PaveObject(to)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := paved.MergeValue(fieldPath, value, mo); err != nil {\n\t\treturn err\n\t}\n\n\treturn runtime.DefaultUnstructuredConverter.FromUnstructured(paved.UnstructuredContent(), to)\n}", "func SendtoPB(output string) (int){\n values := url.Values{}\n\tvalues.Set(\"api_dev_key\", \"\")\n\tvalues.Set(\"api_option\", \"paste\")\n\tvalues.Set(\"api_paste_code\", output)\n\tvalues.Set(\"api_paste_name\", \"TEST\")\n\tvalues.Set(\"api_paste_expire_date\", \"10M\")\n\tresponse, err := http.PostForm(\"http://pastebin.com/api/api_post.php\", values)\n\tdefer response.Body.Close()\n\tif err != nil {\n //log.Fatalln(err)\n return 0\n\t}\n\tif response.StatusCode != 200 {\n //log.Fatalln(response.StatusCode)\n return 0\n\t}\n\tbuf := bytes.Buffer{}\n\t_, err = buf.ReadFrom(response.Body)\n\tif err != nil {\n //log.Fatalln(err)\n return 0\n\t}\n // Debugging Pastebin response\n // fmt.Println(buf.String())\n return 0\n}" ]
[ "0.7008476", "0.69752324", "0.6808838", "0.67246944", "0.67070097", "0.6107308", "0.603101", "0.59871733", "0.5983137", "0.57622445", "0.56662786", "0.5660519", "0.5600947", "0.55079126", "0.5474511", "0.5421043", "0.53751045", "0.53708917", "0.5343565", "0.53416985", "0.5310073", "0.5297911", "0.524781", "0.5227295", "0.522543", "0.5211187", "0.5186027", "0.51705664", "0.51474416", "0.5121875", "0.508012", "0.5077509", "0.5065182", "0.50499403", "0.50310546", "0.50220364", "0.5012711", "0.497154", "0.49685913", "0.49497044", "0.49403214", "0.4930561", "0.49289125", "0.49243858", "0.49118233", "0.49108887", "0.49065998", "0.48932767", "0.48932767", "0.48878956", "0.48746583", "0.4865442", "0.48548204", "0.4837951", "0.4835112", "0.4829136", "0.4822941", "0.4821735", "0.481984", "0.48152322", "0.4814241", "0.48104528", "0.48079896", "0.47963068", "0.4794536", "0.47862014", "0.47852486", "0.47777048", "0.4773561", "0.47637913", "0.47580093", "0.47473875", "0.47470325", "0.47461468", "0.47407174", "0.47407174", "0.47168925", "0.47088444", "0.47087428", "0.47065437", "0.47060803", "0.4701034", "0.4698884", "0.46987975", "0.46986824", "0.46978208", "0.46973753", "0.469688", "0.4696356", "0.46945414", "0.46888867", "0.4684471", "0.4674574", "0.467377", "0.46693286", "0.46620587", "0.4660721", "0.46580648", "0.46514872", "0.46482298" ]
0.6098772
6
TableName overrides the default tablename generated by GORM
func (HealthMenstruationDailyEntryORM) TableName() string { return "health_menstruation_daily_entries" }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (user *User) TableName() string {\n return \"users\"\n}", "func (User) TableName() string {\n\treturn tableName\n}", "func (TblUser) TableName() string {\n\treturn \"tblUser\"\n}", "func TableNameNoSchema(dialect Dialect, mapper names.Mapper, tableName interface{}) string {\n\tquote := dialect.Quoter().Quote\n\tswitch tableName.(type) {\n\tcase []string:\n\t\tt := tableName.([]string)\n\t\tif len(t) > 1 {\n\t\t\treturn fmt.Sprintf(\"%v AS %v\", quote(t[0]), quote(t[1]))\n\t\t} else if len(t) == 1 {\n\t\t\treturn quote(t[0])\n\t\t}\n\tcase []interface{}:\n\t\tt := tableName.([]interface{})\n\t\tl := len(t)\n\t\tvar table string\n\t\tif l > 0 {\n\t\t\tf := t[0]\n\t\t\tswitch f.(type) {\n\t\t\tcase string:\n\t\t\t\ttable = f.(string)\n\t\t\tcase names.TableName:\n\t\t\t\ttable = f.(names.TableName).TableName()\n\t\t\tdefault:\n\t\t\t\tv := utils.ReflectValue(f)\n\t\t\t\tt := v.Type()\n\t\t\t\tif t.Kind() == reflect.Struct {\n\t\t\t\t\ttable = names.GetTableName(mapper, v)\n\t\t\t\t} else {\n\t\t\t\t\ttable = quote(fmt.Sprintf(\"%v\", f))\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif l > 1 {\n\t\t\treturn fmt.Sprintf(\"%v AS %v\", quote(table), quote(fmt.Sprintf(\"%v\", t[1])))\n\t\t} else if l == 1 {\n\t\t\treturn quote(table)\n\t\t}\n\tcase names.TableName:\n\t\treturn tableName.(names.TableName).TableName()\n\tcase string:\n\t\treturn tableName.(string)\n\tcase reflect.Value:\n\t\tv := tableName.(reflect.Value)\n\t\treturn names.GetTableName(mapper, v)\n\tdefault:\n\t\tv := utils.ReflectValue(tableName)\n\t\tt := v.Type()\n\t\tif t.Kind() == reflect.Struct {\n\t\t\treturn names.GetTableName(mapper, v)\n\t\t}\n\t\treturn quote(fmt.Sprintf(\"%v\", tableName))\n\t}\n\treturn \"\"\n}", "func (o *<%= classedName %>) TableName() string {\n\treturn \"<%= tableName %>\"\n}", "func (Alumno) TableName() string {\n\treturn \"alumnos\"\n}", "func (model *Barang) TableName() string {\n\treturn BarangTableName\n}", "func (cpt *DbVersion) TableName() string { return \"goose_db_version\" }", "func (u User) Table() string {\n\treturn tableName\n}", "func (CommentORM) TableName() string {\n\treturn \"comments\"\n}", "func (a *Action) TableName() string {\n\tconst ormTableName = \"actions\"\n\treturn ormTableName\n}", "func (ProfileORM) TableName() string {\n\treturn \"profiles\"\n}", "func (UserInfoORM) TableName() string {\n\treturn \"user_infos\"\n}", "func (User) TableName() string {\n\treturn \"user\"\n}", "func (User) TableName() string {\n\treturn \"user\"\n}", "func (User) TableName() string {\n\treturn \"user\"\n}", "func (User) TableName() string {\n\treturn \"user\"\n}", "func (User) TableName() string {\n\treturn \"dbo.Users\"\n}", "func (EmailORM) TableName() string {\n\treturn \"emails\"\n}", "func (u *User) TableName() string {\n\treturn LUTUserTableName\n}", "func (sc SnakeCaseConvention) TableName(typeName string) string {\n\treturn sc.Convert(typeName)\n}", "func (PeriodORM) TableName() string {\n\treturn \"periods\"\n}", "func FullTableName(tableName string) string {\n\tconf, confErr := config.GetAppConfig()\n\tif confErr != nil {\n\t\tlog.Fatalf(\"read database config err %v \", confErr)\n\t}\n\n\treturn conf.App.DataBase.Prefix + tableName\n}", "func (TestUser) TableName() string {\n\treturn \"test_user\"\n}", "func (m *GormRoleMappingRepository) TableName() string {\n\treturn \"role_mapping\"\n}", "func (m *Migration) TableName() string {\n\treturn \"dbMigration\"\n}", "func (a *Customer) TableName() string {\n\treturn \"gorm_customers\"\n}", "func (u UpdateUser) Table() string {\n\treturn tableName\n}", "func (u *user_info) TableName() string {\n return \"party_user_info\"\n}", "func (model *Produk) TableName() string {\n\treturn ProdukTableName\n}", "func (UserEntity) TableName() string {\n\treturn \"user\"\n}", "func (sc SameCaseConvention) TableName(typeName string) string {\n\treturn sc.Convert(typeName)\n}", "func (z *Zzz) SqlTableName() string { //nolint:dupl false positive\n\treturn `\"zzz\"`\n}", "func (s *UpBaseInfo) TableName() string {\n\treturn TableNameUpBaseInfo\n}", "func (m *ClassChangeTeaHis) TableName() string {\n\treturn \"class_change_tea_his\"\n}", "func (Project) TableName() string {\n\treturn \"project\"\n}", "func (Person) TableName() string {\n\treturn \"person\"\n}", "func FullTableName(dialect Dialect, mapper names.Mapper, bean interface{}, includeSchema ...bool) string {\n\ttbName := TableNameNoSchema(dialect, mapper, bean)\n\tif len(includeSchema) > 0 && includeSchema[0] && !utils.IsSubQuery(tbName) {\n\t\ttbName = TableNameWithSchema(dialect, tbName)\n\t}\n\treturn tbName\n}", "func TableName(name string) string {\n\tif IsTest {\n\t\treturn fmt.Sprintf(\"%v_test\", name)\n\t}\n\treturn fmt.Sprintf(\"%v_development\", name)\n}", "func (User) TableName() string {\n\treturn WithTablePrefix(\"user\")\n}", "func (Server) TableName() string {\n\treturn \"Server\"\n}", "func (i *Install) TableName() string {\r\n\treturn \"install\"\r\n}", "func (u User) TableName() string {\n\treturn \"Users\"\n}", "func (ContactORM) TableName() string {\n\treturn \"contacts\"\n}", "func (ContactORM) TableName() string {\n\treturn \"contacts\"\n}", "func TableName(key string) string {\n\treturn strings.TrimSuffix(key, sqlIDSuffix)\n}", "func (Company) TableName() string {\n\treturn \"company\"\n}", "func (ul *UserLogin) TableName() string {\n\treturn \"user_login\"\n}", "func (hp mysqlTS) TableName() string {\n\treturn \"tsad_points\"\n}", "func (m Article) TableName() string {\n\treturn \"articles\"\n\n}", "func (m Article) TableName() string {\n\treturn \"articles\"\n\n}", "func (IntPointORM) TableName() string {\n\treturn \"int_points\"\n}", "func (u *User) TableName() string {\n\treturn userTableName\n}", "func (Pokemon) TableName() string { return \"pokemons\" }", "func (Bank) TableName() string {\n\treturn \"bank\"\n}", "func (app *Table) Name() string {\n\treturn \"table\"\n}", "func (meeting *Meeting) TableName() string {\n\treturn \"ljl_meeting\"\n}", "func (HealthMenstruationPersonalInfoORM) TableName() string {\n\treturn \"health_menstruation_personal_infos\"\n}", "func (Practica) TableName() string {\n\treturn \"practicas\"\n}", "func (lc LowerCaseConvention) TableName(typeName string) string {\n\treturn lc.Convert(typeName)\n}", "func (p Project) TableName() string {\n\treturn \"project\"\n}", "func (*Admin) TableName() string {\n\treturn tablePrefix + \"admin\"\n}", "func (User) TableName() string {\n\treturn \"users\"\n}", "func (User) TableName() string {\n\treturn \"users\"\n}", "func (User) TableName() string {\n\treturn \"users\"\n}", "func (Static) TableName() string {\n\treturn \"static\"\n}", "func (s *SchemaMigration) TableName() string {\n\treturn \"schema_migrations\"\n}", "func (Menu) TableName() string {\n\treturn \"menu\"\n}", "func (st SlackTeam) TableName() string {\n\treturn \"slack_team\"\n}", "func (User) TableName() string {\n\treturn \"urbs_user\"\n}", "func (Metric) TableName() string {\r\n\treturn tableNameMetrics\r\n}", "func (Team) TableName() string {\n\treturn \"teams\"\n}", "func (a *Article) TableName() string {\n\treturn \"article\"\n}", "func (c *AccountModel) TableName() string {\n\treturn AccountTable\n}", "func (e Entity) TableName() string {\n\treturn \"posts\"\n}", "func (u Umpires) TableName() string {\n\treturn \"umpires\"\n}", "func (Account) TableName() string {\n\ttableName := \"accounts\"\n\n\tif namespace.GetNamespace() != \"\" {\n\t\treturn namespace.GetNamespace() + \"_\" + tableName\n\t}\n\n\treturn tableName\n}", "func (TramDoTrieu) TableName() string {\n\treturn \"TramDoTrieu\"\n}", "func (c CDRgorm) TableName() string {\n\treturn \"cdr\"\n}", "func (p *ProjectJenkins) TableName() string {\n\treturn \"project_jenkins\"\n}", "func (model *User) TableName() string {\n\treturn \"user\"\n}", "func (u UserUpload) TableName() string {\n\treturn \"user_uploads\"\n}", "func (a *AddressDAL) TableName() string {\n\treturn \"address\"\n}", "func (mc *MeetingCanhui) TableName() string {\n\treturn \"ljl_meetingcanhui\"\n}", "func getTableName(object interface{}) string {\n\tstringName := fmt.Sprintf(\"%ss\", strings.ToLower(getType(object)))\n\treturn stringName\n}", "func (m *SysUser) TableName() string {\n\treturn \"sys_user\"\n}", "func (m *Mysql) Table(name string) {\n\tm.table = name\n}", "func (Goods) TableName() string {\n\treturn \"goods\"\n}", "func (d *Daily) TableName() string {\n\treturn \"daily\"\n}", "func (b *Binary) TableName() string {\n\tif b.ecosystem == 0 {\n\t\tb.ecosystem = 1\n\t}\n\treturn `1_binaries`\n}", "func (u StudentInfo) TableName() string {\n\treturn \"student\"\n}", "func (Invoice) TableName() string {\n\t// custom table name, this is default\n\treturn \"invoices\"\n}", "func (db *DB) Table(name string) *DB {\n\tdb.tablename = TableName(name)\n\treturn db\n}", "func (g *Grid) TableName() string {\n\treturn \"grid\"\n}", "func (post *Post) TableName() string {\n\treturn \"post\"\n}", "func (MigrationV1) TableName() string {\n\treturn \"migrations\"\n}", "func (NestTable) TableName() string {\n\treturn \"nest_table\"\n}", "func (m *GormIdentityRoleRepository) TableName() string {\n\treturn \"identity_role\"\n}", "func (m *Stucasting) TableName() string {\n\treturn \"stucasting\"\n}", "func (m *Stucasting) TableName() string {\n\treturn \"stucasting\"\n}" ]
[ "0.7302173", "0.713911", "0.7083268", "0.7068281", "0.7064129", "0.6997782", "0.6992626", "0.69540036", "0.6950419", "0.6922991", "0.6906448", "0.6896193", "0.68884236", "0.6842365", "0.6842365", "0.6842365", "0.6842365", "0.684192", "0.6834944", "0.68174136", "0.6791999", "0.67894596", "0.6758264", "0.6746872", "0.6721033", "0.6711168", "0.6709335", "0.67092973", "0.6709078", "0.67040694", "0.66948277", "0.6692023", "0.66878545", "0.6676768", "0.667669", "0.66739726", "0.667365", "0.6660641", "0.6657154", "0.6657054", "0.6655283", "0.66537124", "0.664319", "0.66392404", "0.66392404", "0.6638248", "0.66322553", "0.66243047", "0.6620342", "0.6606126", "0.6606126", "0.66010815", "0.65929216", "0.6572596", "0.6562763", "0.65560025", "0.6539823", "0.6537073", "0.65352136", "0.65338016", "0.6522775", "0.6521889", "0.65208614", "0.65208614", "0.65208614", "0.6520408", "0.65014386", "0.64898723", "0.6475254", "0.6469655", "0.646608", "0.64618593", "0.6441312", "0.64342874", "0.6428684", "0.6421728", "0.6418567", "0.6417183", "0.64169204", "0.64138025", "0.6398845", "0.6393849", "0.63914305", "0.6390791", "0.6388002", "0.6377717", "0.6377688", "0.6374135", "0.6369855", "0.63626885", "0.63614875", "0.63582975", "0.63546085", "0.6353095", "0.6352752", "0.63474596", "0.6340351", "0.6336714", "0.6336175", "0.6336175" ]
0.6397078
81
ToORM runs the BeforeToORM hook if present, converts the fields of this object to ORM format, runs the AfterToORM hook, then returns the ORM object
func (m *HealthMenstruationDailyEntry) ToORM(ctx context.Context) (HealthMenstruationDailyEntryORM, error) { to := HealthMenstruationDailyEntryORM{} var err error if prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToORM); ok { if err = prehook.BeforeToORM(ctx, &to); err != nil { return to, err } } to.Id = m.Id if m.CreatedAt != nil { var t time.Time if t, err = ptypes1.Timestamp(m.CreatedAt); err != nil { return to, err } to.CreatedAt = &t } if m.UpdatedAt != nil { var t time.Time if t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil { return to, err } to.UpdatedAt = &t } to.ProfileId = m.ProfileId if m.Day != nil { var t time.Time if t, err = ptypes1.Timestamp(m.Day); err != nil { return to, err } to.Day = &t } to.IntensityPercentage = m.IntensityPercentage to.Type = int32(m.Type) to.Manual = m.Manual to.BasedOnPrediction = m.BasedOnPrediction if posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToORM); ok { err = posthook.AfterToORM(ctx, &to) } return to, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *Contact) ToORM(ctx context.Context) (ContactORM, error) {\n\tto := ContactORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(ContactWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.FirstName = m.FirstName\n\tto.MiddleName = m.MiddleName\n\tto.LastName = m.LastName\n\t// Skipping field: PrimaryEmail\n\tfor _, v := range m.Emails {\n\t\tif v != nil {\n\t\t\tif tempEmails, cErr := v.ToORM(ctx); cErr == nil {\n\t\t\t\tto.Emails = append(to.Emails, &tempEmails)\n\t\t\t} else {\n\t\t\t\treturn to, cErr\n\t\t\t}\n\t\t} else {\n\t\t\tto.Emails = append(to.Emails, nil)\n\t\t}\n\t}\n\tif posthook, ok := interface{}(m).(ContactWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *UserInfo) ToORM(ctx context.Context) (UserInfoORM, error) {\n\tto := UserInfoORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(UserInfoWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.UserId = m.UserId\n\tto.LastName = m.LastName\n\tto.FirstName = m.FirstName\n\tto.Period = m.Period\n\tto.DepartmentId = m.DepartmentId\n\tto.JobId = m.JobId\n\tto.EnrollmentFlg = m.EnrollmentFlg\n\tto.AdminFlg = m.AdminFlg\n\tif m.CreatedAt != nil {\n\t\t*to.CreatedAt = m.CreatedAt.AsTime()\n\t}\n\tif m.UpdatedAt != nil {\n\t\t*to.UpdatedAt = m.UpdatedAt.AsTime()\n\t}\n\tif posthook, ok := interface{}(m).(UserInfoWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *Period) ToORM(ctx context.Context) (PeriodORM, error) {\n\tto := PeriodORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(PeriodWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.Period = m.Period\n\tif m.CreatedAt != nil {\n\t\t*to.CreatedAt = m.CreatedAt.AsTime()\n\t}\n\tif m.UpdatedAt != nil {\n\t\t*to.UpdatedAt = m.UpdatedAt.AsTime()\n\t}\n\tif posthook, ok := interface{}(m).(PeriodWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *Email) ToORM(ctx context.Context) (EmailORM, error) {\n\tto := EmailORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(EmailWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.Address = m.Address\n\tif posthook, ok := interface{}(m).(EmailWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *HealthMenstruationPersonalInfo) ToORM(ctx context.Context) (HealthMenstruationPersonalInfoORM, error) {\n\tto := HealthMenstruationPersonalInfoORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tto.ProfileId = m.ProfileId\n\tto.PeriodLengthInDays = m.PeriodLengthInDays\n\tto.CycleLengthInDays = m.CycleLengthInDays\n\tif posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *Profile) ToORM(ctx context.Context) (ProfileORM, error) {\n\tto := ProfileORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(ProfileWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tto.Notes = m.Notes\n\tto.FirstName = m.FirstName\n\tto.LastName = m.LastName\n\tto.PrimaryEmail = m.PrimaryEmail\n\tfor _, v := range m.Groups {\n\t\tif v != nil {\n\t\t\tif tempGroups, cErr := v.ToORM(ctx); cErr == nil {\n\t\t\t\tto.Groups = append(to.Groups, &tempGroups)\n\t\t\t} else {\n\t\t\t\treturn to, cErr\n\t\t\t}\n\t\t} else {\n\t\t\tto.Groups = append(to.Groups, nil)\n\t\t}\n\t}\n\tto.ProfilePictureUrl = m.ProfilePictureUrl\n\tif posthook, ok := interface{}(m).(ProfileWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *Comment) ToORM(ctx context.Context) (CommentORM, error) {\n\tto := CommentORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(CommentWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.Id != nil {\n\t\tto.Id, err = go_uuid1.FromString(m.Id.Value)\n\t\tif err != nil {\n\t\t\treturn to, err\n\t\t}\n\t} else {\n\t\tto.Id = go_uuid1.Nil\n\t}\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tif m.DeletedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.DeletedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.DeletedAt = &t\n\t}\n\tif m.BoardId != nil {\n\t\tv := m.BoardId.Value\n\t\tto.BoardId = &v\n\t}\n\tif m.PostId != nil {\n\t\tv := m.PostId.Value\n\t\tto.PostId = &v\n\t}\n\tif m.ContentId != nil {\n\t\tv := m.ContentId.Value\n\t\tto.ContentId = &v\n\t}\n\tif m.Userid != nil {\n\t\tv := m.Userid.Value\n\t\tto.Userid = &v\n\t}\n\tif m.Username != nil {\n\t\tv := m.Username.Value\n\t\tto.Username = &v\n\t}\n\tif m.Nickname != nil {\n\t\tv := m.Nickname.Value\n\t\tto.Nickname = &v\n\t}\n\tif m.Email != nil {\n\t\tv := m.Email.Value\n\t\tto.Email = &v\n\t}\n\tif m.Password != nil {\n\t\tv := m.Password.Value\n\t\tto.Password = &v\n\t}\n\tif m.Url != nil {\n\t\tv := m.Url.Value\n\t\tto.Url = &v\n\t}\n\tif m.UseHtml != nil {\n\t\tv := m.UseHtml.Value\n\t\tto.UseHtml = &v\n\t}\n\tif m.UseSecret != nil {\n\t\tv := m.UseSecret.Value\n\t\tto.UseSecret = &v\n\t}\n\tto.UpVoteCount = m.UpVoteCount\n\tto.DownVoteCount = m.DownVoteCount\n\tif posthook, ok := interface{}(m).(CommentWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *IntPoint) ToORM(ctx context.Context) (IntPointORM, error) {\n\tto := IntPointORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(IntPointWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.X = m.X\n\tto.Y = m.Y\n\tif posthook, ok := interface{}(m).(IntPointWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func ConvertContactToORM(from Contact) (ContactORM, error) {\n\tto := ContactORM{}\n\tvar err error\n\tto.Id = from.Id\n\tto.FirstName = from.FirstName\n\tto.MiddleName = from.MiddleName\n\tto.LastName = from.LastName\n\tto.EmailAddress = from.EmailAddress\n\treturn to, err\n}", "func NewORM() ORMWrapper {\n\treturn &orm{\n\t\tdone: make(chan error),\n\t}\n}", "func (adapter *GORMAdapter) GetUnderlyingORM() interface{} {\n\treturn adapter.db\n}", "func (m *ContactORM) ToPB(ctx context.Context) (Contact, error) {\n\tto := Contact{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(ContactWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.FirstName = m.FirstName\n\tto.MiddleName = m.MiddleName\n\tto.LastName = m.LastName\n\t// Skipping field: PrimaryEmail\n\tfor _, v := range m.Emails {\n\t\tif v != nil {\n\t\t\tif tempEmails, cErr := v.ToPB(ctx); cErr == nil {\n\t\t\t\tto.Emails = append(to.Emails, &tempEmails)\n\t\t\t} else {\n\t\t\t\treturn to, cErr\n\t\t\t}\n\t\t} else {\n\t\t\tto.Emails = append(to.Emails, nil)\n\t\t}\n\t}\n\tif posthook, ok := interface{}(m).(ContactWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *HealthMenstruationDailyEntryORM) ToPB(ctx context.Context) (HealthMenstruationDailyEntry, error) {\n\tto := HealthMenstruationDailyEntry{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tif to.Day, err = ptypes1.TimestampProto(*m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = HealthMenstruationDailyEntry_Type(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *HealthMenstruationPersonalInfoORM) ToPB(ctx context.Context) (HealthMenstruationPersonalInfo, error) {\n\tto := HealthMenstruationPersonalInfo{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tto.PeriodLengthInDays = m.PeriodLengthInDays\n\tto.CycleLengthInDays = m.CycleLengthInDays\n\tif posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (s *orm) ORM() *gorm.DB {\n\treturn s.client\n}", "func (m *UserInfoORM) ToPB(ctx context.Context) (UserInfo, error) {\n\tto := UserInfo{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(UserInfoWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.UserId = m.UserId\n\tto.LastName = m.LastName\n\tto.FirstName = m.FirstName\n\tto.Period = m.Period\n\tto.DepartmentId = m.DepartmentId\n\tto.JobId = m.JobId\n\tto.EnrollmentFlg = m.EnrollmentFlg\n\tto.AdminFlg = m.AdminFlg\n\tif m.CreatedAt != nil {\n\t\tto.CreatedAt = timestamppb.New(*m.CreatedAt)\n\t}\n\tif m.UpdatedAt != nil {\n\t\tto.UpdatedAt = timestamppb.New(*m.UpdatedAt)\n\t}\n\tif posthook, ok := interface{}(m).(UserInfoWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (auth *AuthModuleConversion) ToDB() ([]byte, error) {\n\treturn []byte{}, nil\n}", "func (m *Group) AfterToORM(ctx context.Context, a *GroupORM) error {\n\tfor _, item := range m.UserList {\n\t\tid, err := resource.DecodeInt64(&User{}, item)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\ta.UserList = append(a.UserList, &UserORM{Id: id})\n\t}\n\n\treturn nil\n}", "func ORMToModelBclassTranslate(\n\ttranslationImpact TranslationImpact,\n\tdb *gorm.DB) (Error error) {\n\n\tif translationImpact == CreateMode {\n\n\t\t// check that bclassStores are nil\n\n\t\tif map_BclassDBID_BclassPtr != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, Parameters bclassStore should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_BclassDBID_BclassDB != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, parameters BclassDBStore should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\t// init stores\n\t\ttmp := make(map[uint]*models.Bclass, 0)\n\t\tmap_BclassDBID_BclassPtr = &tmp\n\n\t\ttmpDB := make(map[uint]*BclassDB, 0)\n\t\tmap_BclassDBID_BclassDB = &tmpDB\n\n\t\ttmpID := make(map[*models.Bclass]uint, 0)\n\t\tmap_BclassPtr_BclassDBID = &tmpID\n\n\t\tmodels.AllModelStore.Bclasss = make([]*models.Bclass, 0)\n\n\t\tbclassDBArray := make([]BclassDB, 0)\n\t\tquery := db.Find(&bclassDBArray)\n\t\tif query.Error != nil {\n\t\t\treturn query.Error\n\t\t}\n\n\t\t// copy orm objects to the two stores\n\t\tfor _, bclassDB := range bclassDBArray {\n\n\t\t\t// create entries in the tree maps.\n\t\t\tbclass := bclassDB.Bclass\n\t\t\t(*map_BclassDBID_BclassPtr)[bclassDB.ID] = &bclass\n\n\t\t\t(*map_BclassPtr_BclassDBID)[&bclass] = bclassDB.ID\n\n\t\t\tbclassDBCopy := bclassDB\n\t\t\t(*map_BclassDBID_BclassDB)[bclassDB.ID] = &bclassDBCopy\n\n\t\t\t// append model store with the new element\n\t\t\tmodels.AllModelStore.Bclasss = append(models.AllModelStore.Bclasss, &bclass)\n\t\t}\n\t} else { // UpdateMode\n\t\t// for later, update of the data field\n\n\t\t// check that bclassStore is not nil\n\t\tif map_BclassDBID_BclassPtr == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, bclassStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_BclassDBID_BclassDB == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, bclassStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\t// update fields of bclassDB with fields of bclass\n\t\tfor _, bclass := range models.AllModelStore.Bclasss {\n\t\t\tbclassDBID := (*map_BclassPtr_BclassDBID)[bclass]\n\t\t\tbclassDB := (*map_BclassDBID_BclassDB)[bclassDBID]\n\n\t\t\t*bclass = bclassDB.Bclass\n\t\t}\n\n\t\t// parse all DB instance and update all pointer fields of the translated models instance\n\t\tfor _, bclassDB := range *map_BclassDBID_BclassDB {\n\t\t\tbclass := (*map_BclassDBID_BclassPtr)[bclassDB.ID]\n\t\t\tif bclass == nil {\n\t\t\t\terr := errors.New(\"cannot find translated instance in models store\")\n\t\t\t\treturn err\n\t\t\t}\n\n\n\n\n\t\t}\n\t}\n\n\treturn nil\n}", "func (p *ormPlugin) generateMapFunctions(message *generator.Descriptor) {\n\tccTypeNamePb := generator.CamelCaseSlice(message.TypeName())\n\tccTypeNameBase := lintName(ccTypeNamePb)\n\tccTypeNameOrm := fmt.Sprintf(\"%sORM\", ccTypeNameBase)\n\t///// To Orm\n\tp.P(`// Convert`, ccTypeNameBase, `ToORM takes a pb object and returns an orm object`)\n\tp.P(`func Convert`, ccTypeNameBase, `ToORM (from `,\n\t\tccTypeNamePb, `) `, ccTypeNameOrm, ` {`)\n\tp.P(`to := `, ccTypeNameOrm, `{}`)\n\tfor _, field := range message.Field {\n\t\t// Checking if field is skipped\n\t\tif field.Options != nil {\n\t\t\tv, err := proto.GetExtension(field.Options, gorm.E_Field)\n\t\t\tif err == nil && v.(*gorm.GormFieldOptions) != nil {\n\t\t\t\tif v.(*gorm.GormFieldOptions).Drop != nil && *v.(*gorm.GormFieldOptions).Drop {\n\t\t\t\t\tp.P(`// Skipping field: `, p.GetOneOfFieldName(message, field))\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tp.generateFieldMap(message, field, true)\n\t}\n\tp.P(`return to`)\n\tp.P(`}`)\n\n\tp.P()\n\t///// To Pb\n\tp.P(`// Convert`, ccTypeNameBase, `FromORM takes an orm object and returns a pb object`)\n\tp.P(`func Convert`, ccTypeNameBase, `FromORM (from `, ccTypeNameOrm, `) `,\n\t\tccTypeNamePb, ` {`)\n\tp.P(`to := `, ccTypeNamePb, `{}`)\n\tfor _, field := range message.Field {\n\t\t// Checking if field is skipped\n\t\tif field.Options != nil {\n\t\t\tv, err := proto.GetExtension(field.Options, gorm.E_Field)\n\t\t\tif err == nil && v.(*gorm.GormFieldOptions) != nil {\n\t\t\t\tif v.(*gorm.GormFieldOptions).Drop != nil && *v.(*gorm.GormFieldOptions).Drop {\n\t\t\t\t\tp.P(`// Skipping field: `, p.GetOneOfFieldName(message, field))\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tp.generateFieldMap(message, field, false)\n\t}\n\tp.P(`return to`)\n\tp.P(`}`)\n}", "func (tc TypeConverter) ToDb(val interface{}) (interface{}, error) {\n\tswitch val.(type) {\n\tcase map[string]interface{}:\n\t\treturn json.Marshal(val)\n\t}\n\treturn val, nil\n}", "func (m *CommentORM) ToPB(ctx context.Context) (Comment, error) {\n\tto := Comment{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(CommentWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = &types1.UUID{Value: m.Id.String()}\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.DeletedAt != nil {\n\t\tif to.DeletedAt, err = ptypes1.TimestampProto(*m.DeletedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.BoardId != nil {\n\t\tto.BoardId = &wrappers.StringValue{Value: *m.BoardId}\n\t}\n\tif m.PostId != nil {\n\t\tto.PostId = &wrappers.StringValue{Value: *m.PostId}\n\t}\n\tif m.ContentId != nil {\n\t\tto.ContentId = &wrappers.StringValue{Value: *m.ContentId}\n\t}\n\tif m.Userid != nil {\n\t\tto.Userid = &wrappers.StringValue{Value: *m.Userid}\n\t}\n\tif m.Username != nil {\n\t\tto.Username = &wrappers.StringValue{Value: *m.Username}\n\t}\n\tif m.Nickname != nil {\n\t\tto.Nickname = &wrappers.StringValue{Value: *m.Nickname}\n\t}\n\tif m.Email != nil {\n\t\tto.Email = &wrappers.StringValue{Value: *m.Email}\n\t}\n\tif m.Password != nil {\n\t\tto.Password = &wrappers.StringValue{Value: *m.Password}\n\t}\n\tif m.Url != nil {\n\t\tto.Url = &wrappers.StringValue{Value: *m.Url}\n\t}\n\tif m.UseHtml != nil {\n\t\tto.UseHtml = &wrappers.BoolValue{Value: *m.UseHtml}\n\t}\n\tif m.UseSecret != nil {\n\t\tto.UseSecret = &wrappers.BoolValue{Value: *m.UseSecret}\n\t}\n\tto.UpVoteCount = m.UpVoteCount\n\tto.DownVoteCount = m.DownVoteCount\n\tif posthook, ok := interface{}(m).(CommentWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *EmailORM) ToPB(ctx context.Context) (Email, error) {\n\tto := Email{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(EmailWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.Address = m.Address\n\tif posthook, ok := interface{}(m).(EmailWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *PeriodORM) ToPB(ctx context.Context) (Period, error) {\n\tto := Period{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(PeriodWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.Period = m.Period\n\tif m.CreatedAt != nil {\n\t\tto.CreatedAt = timestamppb.New(*m.CreatedAt)\n\t}\n\tif m.UpdatedAt != nil {\n\t\tto.UpdatedAt = timestamppb.New(*m.UpdatedAt)\n\t}\n\tif posthook, ok := interface{}(m).(PeriodWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func ConvertContactFromORM(from ContactORM) (Contact, error) {\n\tto := Contact{}\n\tvar err error\n\tto.Id = from.Id\n\tto.FirstName = from.FirstName\n\tto.MiddleName = from.MiddleName\n\tto.LastName = from.LastName\n\tto.EmailAddress = from.EmailAddress\n\treturn to, err\n}", "func (m *ProfileORM) ToPB(ctx context.Context) (Profile, error) {\n\tto := Profile{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(ProfileWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Notes = m.Notes\n\tto.FirstName = m.FirstName\n\tto.LastName = m.LastName\n\tto.PrimaryEmail = m.PrimaryEmail\n\tfor _, v := range m.Groups {\n\t\tif v != nil {\n\t\t\tif tempGroups, cErr := v.ToPB(ctx); cErr == nil {\n\t\t\t\tto.Groups = append(to.Groups, &tempGroups)\n\t\t\t} else {\n\t\t\t\treturn to, cErr\n\t\t\t}\n\t\t} else {\n\t\t\tto.Groups = append(to.Groups, nil)\n\t\t}\n\t}\n\tto.ProfilePictureUrl = m.ProfilePictureUrl\n\tif posthook, ok := interface{}(m).(ProfileWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func ModelToORMStateTranslate(\n\ttranslationImpact TranslationImpact,\n\tdb *gorm.DB) (Error error) {\n\n\tif translationImpact == CreateMode {\n\n\t\t// check that stateStore is nil as well as stateDBs\n\t\tif map_StateDBID_StatePtr != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, map_StateDBID_StatePtr should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_StateDBID_StateDB != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, map_StateDBID_StateDB should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_StatePtr_StateDBID != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, map_StatePtr_StateDBID should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\ttmp := make(map[uint]*models.State, 0)\n\t\tmap_StateDBID_StatePtr = &tmp\n\n\t\ttmpDB := make(map[uint]*StateDB, 0)\n\t\tmap_StateDBID_StateDB = &tmpDB\n\n\t\ttmpID := make(map[*models.State]uint, 0)\n\t\tmap_StatePtr_StateDBID = &tmpID\n\n\t\tfor _, state := range models.AllModelStore.States {\n\n\t\t\t// initiate state\n\t\t\tvar stateDB StateDB\n\t\t\tstateDB.State = *state\n\n\t\t\tquery := db.Create(&stateDB)\n\t\t\tif query.Error != nil {\n\t\t\t\treturn query.Error\n\t\t\t}\n\n\t\t\t// update stores\n\t\t\t(*map_StatePtr_StateDBID)[state] = stateDB.ID\n\t\t\t(*map_StateDBID_StatePtr)[stateDB.ID] = state\n\t\t\t(*map_StateDBID_StateDB)[stateDB.ID] = &stateDB\n\t\t}\n\t} else { // UpdateMode, update IDs of Pointer Fields of ORM object\n\n\t\t// check that stateStore is not nil\n\t\tif map_StateDBID_StatePtr == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, stateStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_StateDBID_StateDB == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, stateStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\t// update fields of stateDB with fields of state\n\t\tfor _, state := range models.AllModelStore.States {\n\t\t\tstateDBID := (*map_StatePtr_StateDBID)[state]\n\t\t\tstateDB := (*map_StateDBID_StateDB)[stateDBID]\n\n\t\t\tstateDB.State = *state\n\t\t}\n\n\t\t// parse model objects ot update associations\n\t\tfor idx, state := range *map_StateDBID_StatePtr {\n\n\t\t\t// fetch matching stateDB\n\t\t\tif stateDB, ok := (*map_StateDBID_StateDB)[idx]; ok {\n\t\t\t\t// set {{Fieldname}}ID\n\n\t\t\t\tquery := db.Save(&stateDB)\n\t\t\t\tif query.Error != nil {\n\t\t\t\t\treturn query.Error\n\t\t\t\t}\n\n\t\t\t} else {\n\t\t\t\terr := errors.New(\n\t\t\t\t\tfmt.Sprintf(\"In UpdateMode translation, stateStore should not be nil %v %v\",\n\t\t\t\t\t\tstateDB, state))\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func ORMToModelStateTranslate(\n\ttranslationImpact TranslationImpact,\n\tdb *gorm.DB) (Error error) {\n\n\tif translationImpact == CreateMode {\n\n\t\t// check that stateStores are nil\n\n\t\tif map_StateDBID_StatePtr != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, Parameters stateStore should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_StateDBID_StateDB != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, parameters StateDBStore should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\t// init stores\n\t\ttmp := make(map[uint]*models.State, 0)\n\t\tmap_StateDBID_StatePtr = &tmp\n\n\t\ttmpDB := make(map[uint]*StateDB, 0)\n\t\tmap_StateDBID_StateDB = &tmpDB\n\n\t\ttmpID := make(map[*models.State]uint, 0)\n\t\tmap_StatePtr_StateDBID = &tmpID\n\n\t\tmodels.AllModelStore.States = make([]*models.State, 0)\n\n\t\tstateDBArray := make([]StateDB, 0)\n\t\tquery := db.Find(&stateDBArray)\n\t\tif query.Error != nil {\n\t\t\treturn query.Error\n\t\t}\n\n\t\t// copy orm objects to the two stores\n\t\tfor _, stateDB := range stateDBArray {\n\n\t\t\t// create entries in the tree maps.\n\t\t\tstate := stateDB.State\n\t\t\t(*map_StateDBID_StatePtr)[stateDB.ID] = &state\n\n\t\t\t(*map_StatePtr_StateDBID)[&state] = stateDB.ID\n\n\t\t\tstateDBCopy := stateDB\n\t\t\t(*map_StateDBID_StateDB)[stateDB.ID] = &stateDBCopy\n\n\t\t\t// append model store with the new element\n\t\t\tmodels.AllModelStore.States = append(models.AllModelStore.States, &state)\n\t\t}\n\t} else { // UpdateMode\n\t\t// for later, update of the data field\n\n\t\t// check that stateStore is not nil\n\t\tif map_StateDBID_StatePtr == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, stateStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_StateDBID_StateDB == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, stateStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\t// update fields of stateDB with fields of state\n\t\tfor _, state := range models.AllModelStore.States {\n\t\t\tstateDBID := (*map_StatePtr_StateDBID)[state]\n\t\t\tstateDB := (*map_StateDBID_StateDB)[stateDBID]\n\n\t\t\t*state = stateDB.State\n\t\t}\n\n\t\t// parse all DB instance and update all pointer fields of the translated models instance\n\t\tfor _, stateDB := range *map_StateDBID_StateDB {\n\t\t\tstate := (*map_StateDBID_StatePtr)[stateDB.ID]\n\t\t\tif state == nil {\n\t\t\t\terr := errors.New(\"cannot find translated instance in models store\")\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t}\n\t}\n\n\treturn nil\n}", "func ModelToORMBclassTranslate(\n\ttranslationImpact TranslationImpact,\n\tdb *gorm.DB) (Error error) {\n\n\tif translationImpact == CreateMode {\n\n\t\t// check that bclassStore is nil as well as bclassDBs\n\t\tif map_BclassDBID_BclassPtr != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, map_BclassDBID_BclassPtr should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_BclassDBID_BclassDB != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, map_BclassDBID_BclassDB should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_BclassPtr_BclassDBID != nil {\n\t\t\terr := errors.New(\"In CreateMode translation, map_BclassPtr_BclassDBID should be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\ttmp := make(map[uint]*models.Bclass, 0)\n\t\tmap_BclassDBID_BclassPtr = &tmp\n\n\t\ttmpDB := make(map[uint]*BclassDB, 0)\n\t\tmap_BclassDBID_BclassDB = &tmpDB\n\n\t\ttmpID := make(map[*models.Bclass]uint, 0)\n\t\tmap_BclassPtr_BclassDBID = &tmpID\n\n\t\tfor _, bclass := range models.AllModelStore.Bclasss {\n\n\t\t\t// initiate bclass\n\t\t\tvar bclassDB BclassDB\n\t\t\tbclassDB.Bclass = *bclass\n\n\t\t\tquery := db.Create(&bclassDB)\n\t\t\tif query.Error != nil {\n\t\t\t\treturn query.Error\n\t\t\t}\n\n\t\t\t// update stores\n\t\t\t(*map_BclassPtr_BclassDBID)[bclass] = bclassDB.ID\n\t\t\t(*map_BclassDBID_BclassPtr)[bclassDB.ID] = bclass\n\t\t\t(*map_BclassDBID_BclassDB)[bclassDB.ID] = &bclassDB\n\t\t}\n\t} else { // UpdateMode, update IDs of Pointer Fields of ORM object\n\n\t\t// check that bclassStore is not nil\n\t\tif map_BclassDBID_BclassPtr == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, bclassStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\tif map_BclassDBID_BclassDB == nil {\n\t\t\terr := errors.New(\"In UpdateMode translation, bclassStore should not be nil\")\n\t\t\treturn err\n\t\t}\n\n\t\t// update fields of bclassDB with fields of bclass\n\t\tfor _, bclass := range models.AllModelStore.Bclasss {\n\t\t\tbclassDBID := (*map_BclassPtr_BclassDBID)[bclass]\n\t\t\tbclassDB := (*map_BclassDBID_BclassDB)[bclassDBID]\n\n\t\t\tbclassDB.Bclass = *bclass\n\t\t}\n\n\t\t// parse model objects ot update associations\n\t\tfor idx, bclass := range *map_BclassDBID_BclassPtr {\n\n\t\t\t// fetch matching bclassDB\n\t\t\tif bclassDB, ok := (*map_BclassDBID_BclassDB)[idx]; ok {\n\t\t\t\t// set {{Fieldname}}ID\n\n\n\n\t\t\t\tquery := db.Save(&bclassDB)\n\t\t\t\tif query.Error != nil {\n\t\t\t\t\treturn query.Error\n\t\t\t\t}\n\n\t\t\t} else {\n\t\t\t\terr := errors.New(\n\t\t\t\t\tfmt.Sprintf(\"In UpdateMode translation, bclassStore should not be nil %v %v\",\n\t\t\t\t\t\tbclassDB, bclass))\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func CreateORMFoo(foo *Foo) {\n\tfoo.Stage()\n\tif Stage.AllModelsStructCreateCallback != nil {\n\t\tStage.AllModelsStructCreateCallback.CreateORMFoo(foo)\n\t}\n}", "func GenORMSetup(db *gorm.DB) {\n\n\t// relative to the models package, swith to ../controlers package\n\tfilename := filepath.Join(OrmPkgGenPath, \"setup.go\")\n\n\t// we should use go generate\n\tlog.Println(\"generating orm setup file : \" + filename)\n\n\tf, err := os.Create(filename)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\t// create the list of structs\n\tvar structs []models.Struct\n\tdb.Find(&structs)\n\n\tLISTOFSTRUCT := \"\\n\"\n\n\tdeleteCalls := \"\"\n\n\tfor idx, _struct := range structs {\n\t\tif idx != 0 {\n\t\t\tLISTOFSTRUCT += \",\\n\"\n\t\t}\n\t\tLISTOFSTRUCT += fmt.Sprintf(\"\\t\\t&%sDB{}\", _struct.Name)\n\n\t\tdeleteCalls += fmt.Sprintf(\"\\tdb.Delete(&%sDB{})\\n\", _struct.Name)\n\n\t\tfmt.Printf(\"\t\torm.LoadDB%s(%ss, db)\\n\", _struct.Name, _struct.Name)\n\t}\n\tres := strings.ReplaceAll(template, \"{{LISTOFSTRUCT}}\", LISTOFSTRUCT)\n\n\tres = strings.ReplaceAll(res, \"{{Deletes}}\", deleteCalls)\n\n\tfmt.Fprintf(f, \"%s\", res)\n\n\tdefer f.Close()\n}", "func (track *Track) ToDb() interface{} {\n\treturn track.Id\n}", "func (m *Contact) BeforeToORM(ctx context.Context, c *ContactORM) error {\n\tif m.PrimaryEmail != \"\" {\n\t\tfor _, mail := range m.Emails {\n\t\t\tif mail.Address == m.PrimaryEmail {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\tc.Emails = append(c.Emails, &EmailORM{Address: m.PrimaryEmail, IsPrimary: true})\n\t}\n\treturn nil\n}", "func (tf Task) ToEntity(tm model.Task) domain.Task {\n\treturn domain.Task{\n\t\tID: tm.ID,\n\t\tTitle: tm.Title,\n\t\tDescription: tm.Description.String,\n\t\tDueDate: tm.DueDate.Time,\n\t\tCreatedAt: tm.CreatedAt,\n\t\tUpdatedAt: tm.UpdatedAt,\n\t}\n}", "func (this *BoltDBAbstractDAO) jsonToStruct(value string) interface{} {\n\tif this.createEntity != nil {\n\t\tentity := this.createEntity()\n\t\terr := json.Unmarshal([]byte(value), entity)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\treturn entity\n\t} else {\n\t\tpanic(\"function 'createEntity' not defined in DAO\")\n\t}\n}", "func setupDB(DB *gorm.DB) {\n\t// remove original functions to update timestamps, we will maintain that by ourself in models in\n\t// BeforeCreate, BeforeSave methods\n\tDB.Callback().Create().Remove(\"gorm:update_time_stamp\")\n}", "func (self *CreateManager) ToSql() (string, error) {\n\tif nil == self.adapter {\n\t\tself.adapter = \"to_sql\"\n\t}\n\n\treturn VisitorFor(self.adapter).Accept(self.Tree)\n}", "func Db() *gorm.DB {\n\treturn entity.Db()\n}", "func (crtEP *MdlPersonCreateExt) AfterDB(ent interface{}) error {\n\n\t// fmt.Println(\"TypeOf ent:\", reflect.TypeOf(ent))\n\t// fmt.Println(\"ValueOf ent:\", reflect.ValueOf(ent))\n\t// p := ent.(*models.Person)\n\n\t// make changes / validate the content struct pointer (p) here\n\t// p.Name = \"A new field value\"\n\treturn nil\n}", "func ToPersistenceInfo(t *admin.PersistenceInfo) *types.PersistenceInfo {\n\tif t == nil {\n\t\treturn nil\n\t}\n\treturn &types.PersistenceInfo{\n\t\tBackend: t.GetBackend(),\n\t\tSettings: ToPersistenceSettings(t.Settings),\n\t\tFeatures: ToPersistenceFeatures(t.Features),\n\t}\n}", "func InitORM() {\n\tdb, err := gorm.Open(\"sqlite3\", \"test.db\")\n\tif err != nil {\n\t\t//Panic is a built-in function that stops the ordinary flow of control and begins panicking.\n\t\t//When the function F calls panic, execution of F stops, any deferred functions in F are executed normally,\n\t\t//and then F returns to its caller\n\t\tpanic(\"failed to connect database\")\n\t}\n\tdefer db.Close() //remember the use of defer? A defer statement defers the execution of close until the InitORM function returns\n\n\t// Migrate the schema\n\tdb.AutoMigrate(&User{})\n\n\t// Create\n\tdb.Create(&User{Name: \"Renjith\", Email: \"[email protected]\", Gender: \"Male\", City: \"Cochin\"})\n\n\t// Read\n\tvar user User\n\tdb.First(&user, 1) // find user with id 1\n\tdb.First(&user, \"name = ?\", \"Renjith\") // find user with name Renjith\n\n\tfmt.Println(user)\n\t// Update - update user's city to Kochi\n\tdb.Model(&user).Update(\"City\", \"Kochi\")\n\t// Delete - delete product\n\tdb.Delete(&user)\n}", "func ToEntity(om *OrganizationMongo) *Organization {\n\tID := \"\"\n\tif !om.ID.IsZero() {\n\t\tID = om.ID.Hex()\n\t}\n\tIDsCategory := make([]string, 0)\n\tif om.IDsCategory != nil && len(om.IDsCategory) > 0 {\n\t\tfor _, id := range om.IDsCategory {\n\t\t\tIDsCategory = append(IDsCategory, id.Hex())\n\t\t}\n\t}\n\n\treturn &Organization{\n\t\tID: ID,\n\t\tName: om.Name,\n\t\tEmail: om.Email,\n\t\tPhone: om.Phone,\n\t\tStatus: om.Status,\n\t\tIDsCategory: IDsCategory,\n\t}\n}", "func (m *IntPointORM) ToPB(ctx context.Context) (IntPoint, error) {\n\tto := IntPoint{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(IntPointWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.X = m.X\n\tto.Y = m.Y\n\tif posthook, ok := interface{}(m).(IntPointWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (getEP *MdlPersonGetExt) AfterDB(ent interface{}) error {\n\n\t// fmt.Println(\"TypeOf ent:\", reflect.TypeOf(ent))\n\t// fmt.Println(\"ValueOf ent:\", reflect.ValueOf(ent))\n\t// p := ent.(*Person)\n\n\t// make changes / validate the content struct pointer (p) here\n\t// p.Name = \"A new field value\"\n\treturn nil\n}", "func (newAdmin *NewAdmin) ToAdmin() (*Admin, error) {\n\n\t// Construct a Admin based on NewAdmin.\n\tadmin := &Admin{\n\t\tID: bson.NewObjectId(),\n\t\tUserName: newAdmin.UserName,\n\t\tFirstName: newAdmin.FirstName,\n\t\tLastName: newAdmin.LastName,\n\t}\n\n\t// Trim leading and trailing whitespace from an email address.\n\temail := strings.TrimSpace(newAdmin.Email)\n\n\t// Force all characters in the email to be lower-case.\n\temail = strings.ToLower(email)\n\n\t// Update Email field.\n\tadmin.Email = email\n\n\t// md5 hash the final email string.\n\th := md5.New()\n\tio.WriteString(h, email)\n\tresult := hex.EncodeToString(h.Sum(nil))\n\n\t// Set the PhotoURL field of the new Admin to\n\t// the Gravatar PhotoURL for the admin's email address.\n\tphotoURL := gravatarBasePhotoURL + result\n\tadmin.PhotoURL = photoURL\n\n\t// Call .SetPassword() to set the PassHash\n\t// field of the Admin to a hash of the NewAdmin.Password.\n\terr := admin.SetPassword(newAdmin.Password)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error setting password hash of the Admin: %v\", err)\n\t}\n\n\treturn admin, nil\n}", "func (Mapper) ModelToEntity(m, e interface{}) error {\r\n\tobj, err := json.Marshal(m)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\treturn json.Unmarshal(obj, e)\r\n}", "func (r Role) MapToModel(m map[string]interface{}) Role {\n\tr.Id = m[\"id\"].(int)\n\tr.Name, _ = m[\"name\"].(string)\n\tr.Slug, _ = m[\"slug\"].(string)\n\tr.CreatedAt, _ = m[\"created_at\"].(string)\n\tr.UpdatedAt, _ = m[\"updated_at\"].(string)\n\treturn r\n}", "func Transform(db *gorm.DB, queries ...Query) *gorm.DB {\n\tfor _, q := range queries {\n\t\tdb = q(db)\n\t}\n\n\treturn db\n}", "func (w *NotificationPolicy) ConvertFromDBModel() error {\n\ttargets := []EventTarget{}\n\tif len(w.TargetsDB) != 0 {\n\t\terr := json.Unmarshal([]byte(w.TargetsDB), &targets)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tw.Targets = targets\n\n\ttypes := []string{}\n\tif len(w.EventTypesDB) != 0 {\n\t\terr := json.Unmarshal([]byte(w.EventTypesDB), &types)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tw.EventTypes = types\n\n\treturn nil\n}", "func GetDataBase() *gorm.DB {\n\treturn db\n}", "func (updEP *MdlPersonUpdateExt) AfterDB(ent interface{}) error {\n\n\t// fmt.Println(\"TypeOf ent:\", reflect.TypeOf(ent))\n\t// fmt.Println(\"ValueOf ent:\", reflect.ValueOf(ent))\n\t// p := ent.(*models.Person)\n\n\t// make changes / validate the content struct pointer (p) here\n\t// p.Name = \"A new field value\"\n\treturn nil\n}", "func (crtEP *MdlPersonCreateExt) BeforeDB(ent interface{}) error {\n\n\t// fmt.Println(\"TypeOf ent:\", reflect.TypeOf(ent))\n\t// fmt.Println(\"ValueOf ent:\", reflect.ValueOf(ent))\n\t// p := ent.(*models.Person)\n\n\t// make changes / validate the content struct pointer (p) here\n\t// p.Name = \"A new field value\"\n\treturn nil\n}", "func (getEP *MdlPersonGetExt) BeforeDB(ent interface{}) error {\n\n\t// fmt.Println(\"TypeOf ent:\", reflect.TypeOf(ent))\n\t// fmt.Println(\"ValueOf ent:\", reflect.ValueOf(ent))\n\t// p := ent.(*models.Person)\n\n\t// make changes / validate the content struct pointer (p) here\n\t// p.Name = \"A new field value\"\n\treturn nil\n}", "func toRecord(cache airtabledb.DB, src Feature, dst interface{}) {\n\tdV := reflect.ValueOf(dst).Elem().FieldByName(\"Fields\")\n\tsV := reflect.ValueOf(src)\n\tcopyFields(cache, sV, dV)\n}", "func NewORM[I ID, C Config, N Node](q pg.Q, prefix string, nodeCols ...string) ORM[I, C, N] {\n\treturn orm[I, C, N]{\n\t\tnewChainsORM[I, C](q, prefix),\n\t\tnewNodesORM[I, N](q, prefix, nodeCols...),\n\t}\n}", "func (updEP *MdlPersonUpdateExt) BeforeDB(ent interface{}) error {\n\n\t// fmt.Println(\"TypeOf ent:\", reflect.TypeOf(ent))\n\t// fmt.Println(\"ValueOf ent:\", reflect.ValueOf(ent))\n\t// p := ent.(*models.Person)\n\n\t// make changes / validate the content struct pointer (p) here\n\t// p.Name = \"A new field value\"\n\treturn nil\n}", "func (l *Contact) ToModel() model.Contact {\n\treturn model.Contact{\n\t\t// Code omitted.\n\t}\n\n}", "func (profile *Profile) ToDb() interface{} {\n\treturn profile.Id\n}", "func GormInit(conf *config.Config, logger Logger) (*gorm.DB, error) {\n\n\tdb, openErr := gorm.Open(conf.DBType, conf.DBParams)\n\tif openErr != nil {\n\t\tlog.CheckError(openErr)\n\t\treturn nil, openErr\n\t}\n\n\tIsSqlite = conf.DBType == SqliteType\n\n\tconnectionErr := db.DB().Ping()\n\tif connectionErr != nil {\n\t\tlog.CheckError(connectionErr)\n\t\treturn nil, connectionErr\n\t}\n\n\t// Negative MaxIdleConns means don't retain any idle connection\n\tmaxIdleConns := -1\n\tif IsSqlite {\n\t\t// sqlite doesn't like having a negative maxIdleConns\n\t\tmaxIdleConns = 10\n\t}\n\n\tdb.DB().SetMaxIdleConns(maxIdleConns)\n\tdb.DB().SetMaxOpenConns(400)\n\n\tif config.Environment == \"DEVELOPMENT\" {\n\t\tdb.LogMode(true)\n\t}\n\n\tswitch conf.DBLogMode {\n\tcase \"detailed\":\n\t\tdb.LogMode(true)\n\tcase \"silent\":\n\t\tdb.LogMode(false)\n\t}\n\n\tif logger != nil {\n\t\tdb.SetLogger(logger)\n\t}\n\n\tdb.AutoMigrate(&model.User{}, &model.UserFollows{}, &model.UserUploadsOld{}, &model.Notification{})\n\tif db.Error != nil {\n\t\treturn db, db.Error\n\t}\n\tdb.AutoMigrate(&model.Torrent{}, &model.TorrentReport{})\n\tif db.Error != nil {\n\t\treturn db, db.Error\n\t}\n\tdb.AutoMigrate(&model.File{})\n\tif db.Error != nil {\n\t\treturn db, db.Error\n\t}\n\tdb.AutoMigrate(&model.Comment{}, &model.OldComment{})\n\tif db.Error != nil {\n\t\treturn db, db.Error\n\t}\n\n\treturn db, nil\n}", "func (m *FileUserDatabase) ToDatabaseModel() (model *FileDatabaseModel) {\n\tmodel = &FileDatabaseModel{\n\t\tUsers: map[string]FileDatabaseUserDetailsModel{},\n\t}\n\n\tm.RLock()\n\n\tfor user, details := range m.Users {\n\t\tmodel.Users[user] = details.ToUserDetailsModel()\n\t}\n\n\tm.RUnlock()\n\n\treturn model\n}", "func (_m *APIDefinitionConverter) ToEntity(apiModel *model.APIDefinition) *api.Entity {\n\tret := _m.Called(apiModel)\n\n\tvar r0 *api.Entity\n\tif rf, ok := ret.Get(0).(func(*model.APIDefinition) *api.Entity); ok {\n\t\tr0 = rf(apiModel)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*api.Entity)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (b *Blog) Unwrap() *Blog {\n\t_tx, ok := b.config.driver.(*txDriver)\n\tif !ok {\n\t\tpanic(\"entv2: Blog is not a transactional entity\")\n\t}\n\tb.config.driver = _tx.drv\n\treturn b\n}", "func Use(db *gorm.DB) *DB {\n\treturn &DB{\n\t\tdb: db,\n\t}\n}", "func ToPersistenceSetting(t *admin.PersistenceSetting) *types.PersistenceSetting {\n\tif t == nil {\n\t\treturn nil\n\t}\n\treturn &types.PersistenceSetting{\n\t\tKey: t.GetKey(),\n\t\tValue: t.GetValue(),\n\t}\n}", "func ToPersistenceFeature(t *admin.PersistenceFeature) *types.PersistenceFeature {\n\tif t == nil {\n\t\treturn nil\n\t}\n\treturn &types.PersistenceFeature{\n\t\tKey: t.GetKey(),\n\t\tEnabled: t.GetEnabled(),\n\t}\n}", "func (d *Person) ToDocument() dynago.Document {\n\treturn dynago.Document{\n\t\t\"name\": d.Name,\n\t\t\"email\": d.Email,\n\t\t\"car_type\": d.CarType,\n\t\t\"drive\": d.Driver,\n\t\t\"lat\": d.Lat,\n\t\t\"lng\": d.Lng,\n\t\t\"geohash\": d.GeoHash,\n\t}\n}", "func (m *Matchers) ToDB() ([]byte, error) {\n\tblobMatchers, err := json.Marshal(m)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to convert matchers to send to the database: %w\", err)\n\t}\n\treturn blobMatchers, nil\n}", "func WrapDB(db *gorm.DB) (DB, error) {\n\tif db == nil {\n\t\treturn nil, fmt.Errorf(\"[err] WrapDB empty params\")\n\t}\n\twdb := &wrapDB{gorm: db}\n\t// add callback functions to *gorm.DB\n\twdb.registerCallbacks()\n\treturn wdb, nil\n}", "func (tf Task) ToModel(te domain.Task) model.Task {\n\treturn model.Task{\n\t\tID: te.ID,\n\t\tTitle: te.Title,\n\t\tDescription: sql.NullString{String: te.Description, Valid: len(te.Description) > 0},\n\t\tDueDate: sql.NullTime{Time: te.DueDate, Valid: !te.DueDate.IsZero()},\n\t\tCreatedAt: te.CreatedAt,\n\t\tUpdatedAt: te.UpdatedAt,\n\t}\n}", "func (o *MySQLOutput) Setup() error {\n\tdb, err := gorm.Open(mysql.Open(fmt.Sprintf(\"%s:%s@tcp(%s:%s)/?%s\", o.Cfg.User, o.Cfg.Password, o.Cfg.Host, o.Cfg.Port, \"charset=utf8mb4&parseTime=true\")), &gorm.Config{DisableForeignKeyConstraintWhenMigrating: true})\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to establish a general connection: %s\", err)\n\t}\n\terr = db.Exec(fmt.Sprintf(\"CREATE DATABASE IF NOT EXISTS `%s` DEFAULT CHARACTER SET = `utf8mb4` DEFAULT COLLATE = `utf8mb4_unicode_ci`;\", o.Cfg.Database)).Error\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to create database: %v\", err)\n\t}\n\tdb, err = gorm.Open(mysql.Open(fmt.Sprintf(\"%s:%s@tcp(%s:%s)/%s?%s\", o.Cfg.User, o.Cfg.Password, o.Cfg.Host, o.Cfg.Port, o.Cfg.Database, \"charset=utf8mb4&parseTime=true\")), &gorm.Config{DisableForeignKeyConstraintWhenMigrating: true})\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to establish a database connection: %v\", err)\n\t}\n\tdb = db.Set(\"gorm:table_options\", \"CHARSET=utf8mb4 ENGINE=InnoDB COLLATE=utf8mb4_unicode_ci\")\n\to.Client = db.Session(&gorm.Session{Logger: o.Cfg.Logger})\n\tfor table, model := range o.RelatedModels {\n\t\tif err := o.Client.Table(table).AutoMigrate(model); err != nil {\n\t\t\treturn fmt.Errorf(\"table %s auto-migration error: %v\", table, err)\n\t\t}\n\t}\n\tif err := o.buildTablesInfo(); err != nil {\n\t\treturn fmt.Errorf(\"failed to gather tables information: %v\", err)\n\t}\n\treturn nil\n}", "func Instance() *gorm.DB {\n return database\n}", "func PostAutoMigrate(db *gorm.DB) error {\n\t// These types don't apply for sqlite -- just mysql.\n\tif db.Dialect().GetName() == mySQLDialect {\n\t\tdb.Model(&Invocation{}).ModifyColumn(\"pattern\", \"text\")\n\t\tdb.Model(&Execution{}).ModifyColumn(\"serialized_operation\", \"text\")\n\t}\n\treturn nil\n}", "func (d *Dao) Close() (err error) {\n\tif d.WriteORM != nil {\n\t\td.WriteORM.Close()\n\t}\n\tif d.ReadORM != nil {\n\t\td.ReadORM.Close()\n\t}\n\treturn\n}", "func GetOrmer() orm.Ormer {\n\tonce.Do(func() {\n\t\t// override the default value(1000) to return all records when setting no limit\n\t\torm.DefaultRowsLimit = -1\n\t\tglobalOrm = orm.NewOrm()\n\t})\n\treturn globalOrm\n}", "func (r *RawRecord) UnmarshalTo(obj restlicodec.Unmarshaler) error {\n\treturn obj.UnmarshalRestLi(restlicodec.NewInterfaceReader(*r))\n}", "func NewDao(m interface{}, db *sql.DB, opts ...options.DaoOption) *Dao {\n\tdao := &Dao{\n\t\tdb: db,\n\t}\n\t// options\n\tcfg := options.DaoOptions{}\n\tfor _, fn := range opts {\n\t\tfn(&cfg)\n\t}\n\tif cfg.Table != \"\" {\n\t\tdao.table = cfg.Table\n\t} else {\n\t\tdao.table = strutils.ToUnderscore(model.ParseTableName(m))\n\t}\n\tdao.modelType = model.RealType(m)\n\t// fields\n\tfields := model.Parse(m)\n\tif len(fields) < 1 {\n\t\tpanic(\"No fields found in model given\")\n\t}\n\tdao.fields = fields\n\tdao.primaries = []*types.ModelField{}\n\tdao.columnMap = make(map[string]*types.ModelField, len(fields))\n\tdao.fieldMap = make(map[string]*types.ModelField, len(fields))\n\tcolumnsBuilder := strings.Builder{}\n\tholderBuilder := strings.Builder{}\n\tselectFields := make([]string, 0, len(fields))\n\tfor _, field := range fields {\n\t\tdao.columnMap[field.Column] = field\n\t\tdao.fieldMap[field.Name] = field\n\t\tif field.Primary {\n\t\t\tdao.primaries = append(dao.primaries, field)\n\t\t}\n\t\t{\n\t\t\tif columnsBuilder.Len() > 0 {\n\t\t\t\tcolumnsBuilder.WriteString(\", \")\n\t\t\t\tholderBuilder.WriteString(\", \")\n\t\t\t}\n\t\t\tcolumnsBuilder.WriteByte('`')\n\t\t\tcolumnsBuilder.WriteString(field.Column)\n\t\t\tcolumnsBuilder.WriteByte('`')\n\t\t\tholderBuilder.WriteString(\"?\")\n\t\t\tselectFields = append(selectFields, field.Name)\n\t\t}\n\t}\n\tif len(dao.primaries) < 1 {\n\t\tpanic(\"No primary key found\")\n\t}\n\tdao.columnsAll = columnsBuilder.String()\n\tdao.valuesHolder = holderBuilder.String()\n\tdao.selectColumns = selectFields\n\treturn dao\n}", "func (m *ArticleDB) DB() interface{} {\n\treturn m.Db\n}", "func (m *ArticleDB) DB() interface{} {\n\treturn m.Db\n}", "func Factory() (*ORM, error) {\n\tdsn := fmt.Sprintf(\"host=%s user=%s dbname=%s sslmode=%s password=%s\", hostDB, userDB, nameDB, sslDB, passwordDB) //Build connection string\n\n\tdb, err := gorm.Open(dialect, dsn)\n\tif err != nil {\n\t\tlog.Panic(\"[ORM] err: \", err)\n\t}\n\torm := &ORM{\n\t\tDB: db,\n\t}\n\t// Log every SQL command on dev, @prod: this should be disabled?\n\tdb.LogMode(logMode)\n\t// Automigrate tables\n\tif autoMigrate {\n\t\terr = migration.ServiceAutoMigration(orm.DB)\n\t}\n\tlog.Info(\"[ORM] Database connection initialized.\")\n\treturn orm, err\n}", "func GetEngine() *xorm.Engine {\n\treturn orm\n}", "func (_m *MockORM) Create(value interface{}) ORM {\n\tret := _m.Called(value)\n\n\tvar r0 ORM\n\tif rf, ok := ret.Get(0).(func(interface{}) ORM); ok {\n\t\tr0 = rf(value)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(ORM)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (s *Service) ToObject(data *Data) (r interface{}, err error) {\n\tdefer func() {\n\t\tif rec := recover(); rec != nil {\n\t\t\terr = makeError(rec)\n\t\t}\n\t}()\n\tvar ok bool\n\tif data == nil {\n\t\treturn nil, nil\n\t}\n\ttypeID := data.Type()\n\tserializer := s.lookupBuiltinDeserializer(typeID)\n\tif serializer == nil {\n\t\tserializer, ok = s.registry[typeID]\n\t\tif !ok {\n\t\t\treturn nil, ihzerrors.NewSerializationError(fmt.Sprintf(\"there is no suitable de-serializer for type %d\", typeID), nil)\n\t\t}\n\t}\n\tdataInput := NewObjectDataInput(data.Buffer(), DataOffset, s, !s.SerializationConfig.LittleEndian)\n\treturn serializer.Read(dataInput), nil\n}", "func MarshalToDB(fromValue interface{}) (interface{}, bool) {\n\tswitch fromValue.(type) {\n\tcase primitive.ObjectID:\n\t\treturn fromValue, true\n\tcase bool:\n\t\treturn fromValue, true\n\tcase string:\n\t\treturn fromValue, true\n\tcase int:\n\t\treturn int32(fromValue.(int)), true\n\tcase int8:\n\t\treturn int32(fromValue.(int8)), true\n\tcase int16:\n\t\treturn int32(fromValue.(int16)), true\n\tcase int32: // also covers rune\n\t\treturn int32(fromValue.(int32)), true\n\tcase int64:\n\t\treturn int64(fromValue.(int64)), true\n\tcase uint:\n\t\treturn int64(fromValue.(uint)), true\n\tcase uint8: // also covers byte\n\t\treturn int32(fromValue.(uint8)), true\n\tcase uint16:\n\t\treturn int32(fromValue.(uint16)), true\n\tcase uint32:\n\t\treturn int64(fromValue.(uint32)), true\n\tcase uint64:\n\t\tval := fromValue.(uint64)\n\t\treturn strconv.FormatUint(val, 10), true\n\tcase float32:\n\t\treturn float64(fromValue.(float32)), true\n\tcase float64:\n\t\treturn float64(fromValue.(float64)), true\n\tcase complex64:\n\t\treturn strconv.FormatComplex(complex128(fromValue.(complex64)), 'f', -1, 64), true\n\tcase complex128:\n\t\treturn strconv.FormatComplex(fromValue.(complex128), 'f', -1, 128), true\n\tcase *bool:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*bool)))\n\t\t}\n\t\treturn nil, true\n\tcase *string:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*string)))\n\t\t}\n\t\treturn nil, true\n\tcase *int:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*int)))\n\t\t}\n\t\treturn nil, true\n\tcase *int8:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*int8)))\n\t\t}\n\t\treturn nil, true\n\tcase *int16:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*int16)))\n\t\t}\n\t\treturn nil, true\n\tcase *int32:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*int32)))\n\t\t}\n\t\treturn nil, true\n\tcase *int64:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*int64)))\n\t\t}\n\t\treturn nil, true\n\tcase *uint:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*uint)))\n\t\t}\n\t\treturn nil, true\n\tcase *uint8:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*uint8)))\n\t\t}\n\t\treturn nil, true\n\tcase *uint16:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*uint16)))\n\t\t}\n\t\treturn nil, true\n\tcase *uint32:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*uint32)))\n\t\t}\n\t\treturn nil, true\n\tcase *uint64:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*uint64)))\n\t\t}\n\t\treturn nil, true\n\tcase *float32:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*float32)))\n\t\t}\n\t\treturn nil, true\n\tcase *float64:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*float64)))\n\t\t}\n\t\treturn nil, true\n\tcase *complex64:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*complex64)))\n\t\t}\n\t\treturn nil, true\n\tcase *complex128:\n\t\tif fromValue != nil {\n\t\t\treturn MarshalToDB(*(fromValue.(*complex128)))\n\t\t}\n\t\treturn nil, true\n\tdefault:\n\t\treturn fromValue, false\n\t}\n}", "func (d *dbBase) convertValueFromDB(fi *fieldInfo, val interface{}, tz *time.Location) (interface{}, error) {\n\tif val == nil {\n\t\treturn nil, nil\n\t}\n\n\tvar value interface{}\n\tvar tErr error\n\n\tvar str *StrTo\n\tswitch v := val.(type) {\n\tcase []byte:\n\t\ts := StrTo(string(v))\n\t\tstr = &s\n\tcase string:\n\t\ts := StrTo(v)\n\t\tstr = &s\n\t}\n\n\tfieldType := fi.fieldType\n\nsetValue:\n\tswitch {\n\tcase fieldType == TypeBooleanField:\n\t\tif str == nil {\n\t\t\tswitch v := val.(type) {\n\t\t\tcase int64:\n\t\t\t\tb := v == 1\n\t\t\t\tvalue = b\n\t\t\tdefault:\n\t\t\t\ts := StrTo(ToStr(v))\n\t\t\t\tstr = &s\n\t\t\t}\n\t\t}\n\t\tif str != nil {\n\t\t\tb, err := str.Bool()\n\t\t\tif err != nil {\n\t\t\t\ttErr = err\n\t\t\t\tgoto end\n\t\t\t}\n\t\t\tvalue = b\n\t\t}\n\tcase fieldType == TypeVarCharField || fieldType == TypeCharField || fieldType == TypeTextField || fieldType == TypeJSONField || fieldType == TypeJsonbField:\n\t\tif str == nil {\n\t\t\tvalue = ToStr(val)\n\t\t} else {\n\t\t\tvalue = str.String()\n\t\t}\n\tcase fieldType == TypeTimeField || fieldType == TypeDateField || fieldType == TypeDateTimeField:\n\t\tif str == nil {\n\t\t\tswitch t := val.(type) {\n\t\t\tcase time.Time:\n\t\t\t\td.ins.TimeFromDB(&t, tz)\n\t\t\t\tvalue = t\n\t\t\tdefault:\n\t\t\t\ts := StrTo(ToStr(t))\n\t\t\t\tstr = &s\n\t\t\t}\n\t\t}\n\t\tif str != nil {\n\t\t\ts := str.String()\n\t\t\tvar (\n\t\t\t\tt time.Time\n\t\t\t\terr error\n\t\t\t)\n\n\t\t\tif fi.timePrecision != nil && len(s) >= (20+*fi.timePrecision) {\n\t\t\t\tlayout := formatDateTime + \".\"\n\t\t\t\tfor i := 0; i < *fi.timePrecision; i++ {\n\t\t\t\t\tlayout += \"0\"\n\t\t\t\t}\n\t\t\t\tt, err = time.ParseInLocation(layout, s[:20+*fi.timePrecision], tz)\n\t\t\t} else if len(s) >= 19 {\n\t\t\t\ts = s[:19]\n\t\t\t\tt, err = time.ParseInLocation(formatDateTime, s, tz)\n\t\t\t} else if len(s) >= 10 {\n\t\t\t\tif len(s) > 10 {\n\t\t\t\t\ts = s[:10]\n\t\t\t\t}\n\t\t\t\tt, err = time.ParseInLocation(formatDate, s, tz)\n\t\t\t} else if len(s) >= 8 {\n\t\t\t\tif len(s) > 8 {\n\t\t\t\t\ts = s[:8]\n\t\t\t\t}\n\t\t\t\tt, err = time.ParseInLocation(formatTime, s, tz)\n\t\t\t}\n\t\t\tt = t.In(DefaultTimeLoc)\n\n\t\t\tif err != nil && s != \"00:00:00\" && s != \"0000-00-00\" && s != \"0000-00-00 00:00:00\" {\n\t\t\t\ttErr = err\n\t\t\t\tgoto end\n\t\t\t}\n\t\t\tvalue = t\n\t\t}\n\tcase fieldType&IsIntegerField > 0:\n\t\tif str == nil {\n\t\t\ts := StrTo(ToStr(val))\n\t\t\tstr = &s\n\t\t}\n\t\tif str != nil {\n\t\t\tvar err error\n\t\t\tswitch fieldType {\n\t\t\tcase TypeBitField:\n\t\t\t\t_, err = str.Int8()\n\t\t\tcase TypeSmallIntegerField:\n\t\t\t\t_, err = str.Int16()\n\t\t\tcase TypeIntegerField:\n\t\t\t\t_, err = str.Int32()\n\t\t\tcase TypeBigIntegerField:\n\t\t\t\t_, err = str.Int64()\n\t\t\tcase TypePositiveBitField:\n\t\t\t\t_, err = str.Uint8()\n\t\t\tcase TypePositiveSmallIntegerField:\n\t\t\t\t_, err = str.Uint16()\n\t\t\tcase TypePositiveIntegerField:\n\t\t\t\t_, err = str.Uint32()\n\t\t\tcase TypePositiveBigIntegerField:\n\t\t\t\t_, err = str.Uint64()\n\t\t\t}\n\t\t\tif err != nil {\n\t\t\t\ttErr = err\n\t\t\t\tgoto end\n\t\t\t}\n\t\t\tif fieldType&IsPositiveIntegerField > 0 {\n\t\t\t\tv, _ := str.Uint64()\n\t\t\t\tvalue = v\n\t\t\t} else {\n\t\t\t\tv, _ := str.Int64()\n\t\t\t\tvalue = v\n\t\t\t}\n\t\t}\n\tcase fieldType == TypeFloatField || fieldType == TypeDecimalField:\n\t\tif str == nil {\n\t\t\tswitch v := val.(type) {\n\t\t\tcase float64:\n\t\t\t\tvalue = v\n\t\t\tdefault:\n\t\t\t\ts := StrTo(ToStr(v))\n\t\t\t\tstr = &s\n\t\t\t}\n\t\t}\n\t\tif str != nil {\n\t\t\tv, err := str.Float64()\n\t\t\tif err != nil {\n\t\t\t\ttErr = err\n\t\t\t\tgoto end\n\t\t\t}\n\t\t\tvalue = v\n\t\t}\n\tcase fieldType&IsRelField > 0:\n\t\tfi = fi.relModelInfo.fields.pk\n\t\tfieldType = fi.fieldType\n\t\tgoto setValue\n\t}\n\nend:\n\tif tErr != nil {\n\t\terr := fmt.Errorf(\"convert to `%s` failed, field: %s err: %s\", fi.addrValue.Type(), fi.fullName, tErr)\n\t\treturn nil, err\n\t}\n\n\treturn value, nil\n}", "func Perform(db *gorm.DB) {\r\n\r\n\tdb.AutoMigrate(\r\n\t\t&models.WorkItem{})\r\n}", "func (Files) ToModel(data interface{}, model *Files) error {\n\tbsonBytes, err := bson.Marshal(data.(bson.M))\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = bson.Unmarshal(bsonBytes, &model)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (adapter *GORMAdapter) Create(entity interface{}) orm.Result {\n\treturn orm.Result{\n\t\tError: adapter.db.Create(entity).Error,\n\t}\n}", "func DB() *gorm.DB {\n\treturn db\n}", "func DB() *gorm.DB {\n\treturn db\n}", "func (reg *Registry) ToReal(logger logr.Logger) (globalregistry.Registry, error) {\n\treturn globalregistry.New(logger, reg)\n}", "func (s *Service) ToObject(data *Data) (r interface{}, err error) {\n\tdefer func() {\n\t\tif rec := recover(); rec != nil {\n\t\t\terr = makeError(rec)\n\t\t}\n\t}()\n\tif data == nil {\n\t\treturn nil, nil\n\t}\n\ttypeID := data.Type()\n\tif typeID == 0 {\n\t\treturn data, nil\n\t}\n\tserializer, ok := s.registry[typeID]\n\tif !ok {\n\t\treturn nil, hzerrors.NewHazelcastSerializationError(fmt.Sprintf(\"there is no suitable de-serializer for type %d\", typeID), nil)\n\t}\n\tdataInput := NewObjectDataInput(data.Buffer(), DataOffset, s, s.SerializationConfig.BigEndian)\n\treturn serializer.Read(dataInput), nil\n}", "func (a *InventoryDTO) InventoryDTOToDAL() (*dal.InventoryDAL, error) { \n\tinventory := &dal.InventoryDAL{\n\t\tInventoryID:a.InventoryID,\n\t\tFilmID:a.FilmID,\n\t\tStoreID:a.StoreID,\n\t\tLastUpdate:a.LastUpdate,\n\t\t \n\t}\n\treturn inventory, nil\n}", "func (userModel *UserModel) ToEntity() entities.User {\n\treturn entities.User{\n\t\tUUID: userModel.UUID,\n\t\tEmailID: userModel.EmailID,\n\t\tSolvedQuestions: strings.Split(userModel.SolvedQuestions, \",\"),\n\t\tHintsUsed: strings.Split(userModel.HintsUsed, \",\"),\n\t}\n}", "func (e GenericPersistable) ToJSON() []byte {\n\t\tresult, _ := json.Marshal(e)\n\t\treturn result\n\t}", "func (self Accessor) ToSql() (string, error) {\n\treturn self.From(self.Relation()).ToSql()\n}", "func (ot *ObjectType) Unwrap() *ObjectType {\n\ttx, ok := ot.config.driver.(*txDriver)\n\tif !ok {\n\t\tpanic(\"ent: ObjectType is not a transactional entity\")\n\t}\n\tot.config.driver = tx.drv\n\treturn ot\n}", "func (w *NotificationPolicy) ConvertToDBModel() error {\n\tif len(w.Targets) != 0 {\n\t\ttargets, err := json.Marshal(w.Targets)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tw.TargetsDB = string(targets)\n\t}\n\tif len(w.EventTypes) != 0 {\n\t\teventTypes, err := json.Marshal(w.EventTypes)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tw.EventTypesDB = string(eventTypes)\n\t}\n\n\treturn nil\n}", "func (database *DatabaseAccounts_SqlDatabase_Spec) ConvertToARM(resolved genruntime.ConvertToARMResolvedDetails) (interface{}, error) {\n\tif database == nil {\n\t\treturn nil, nil\n\t}\n\tresult := &DatabaseAccounts_SqlDatabase_Spec_ARM{}\n\n\t// Set property \"Location\":\n\tif database.Location != nil {\n\t\tlocation := *database.Location\n\t\tresult.Location = &location\n\t}\n\n\t// Set property \"Name\":\n\tresult.Name = resolved.Name\n\n\t// Set property \"Properties\":\n\tif database.Options != nil || database.Resource != nil {\n\t\tresult.Properties = &SqlDatabaseCreateUpdateProperties_ARM{}\n\t}\n\tif database.Options != nil {\n\t\toptions_ARM, err := (*database.Options).ConvertToARM(resolved)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\toptions := *options_ARM.(*CreateUpdateOptions_ARM)\n\t\tresult.Properties.Options = &options\n\t}\n\tif database.Resource != nil {\n\t\tresource_ARM, err := (*database.Resource).ConvertToARM(resolved)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresource := *resource_ARM.(*SqlDatabaseResource_ARM)\n\t\tresult.Properties.Resource = &resource\n\t}\n\n\t// Set property \"Tags\":\n\tif database.Tags != nil {\n\t\tresult.Tags = make(map[string]string, len(database.Tags))\n\t\tfor key, value := range database.Tags {\n\t\t\tresult.Tags[key] = value\n\t\t}\n\t}\n\treturn result, nil\n}", "func ToSqlParms(o interface{}) SQLParms {\n\treturn conv.StructToMap(o)\n}", "func MarshalFromDB(intoType reflect.Type, fromValue interface{}) interface{} {\n\tif reflect.TypeOf(fromValue) == intoType {\n\t\treturn fromValue\n\t}\n\n\tswitch intoType.Kind() {\n\tcase reflect.Int8:\n\t\tfallthrough\n\tcase reflect.Int16:\n\t\tfallthrough\n\tcase reflect.Int32:\n\t\tfallthrough\n\tcase reflect.Int64:\n\t\tfallthrough\n\tcase reflect.Int:\n\t\tdstPtr := reflect.New(intoType)\n\t\tdst := reflect.Indirect(dstPtr)\n\t\tsrc := reflect.ValueOf(fromValue)\n\t\tif dst.OverflowInt(src.Int()) {\n\t\t\tlog.Panicf(\"Overflow detected while storing %v within %v\", src.Type(), dst.Type())\n\t\t}\n\t\tdst.SetInt(src.Int())\n\t\treturn dst.Interface()\n\tcase reflect.Uint8:\n\t\tfallthrough\n\tcase reflect.Uint16:\n\t\tfallthrough\n\tcase reflect.Uint32:\n\t\tfallthrough\n\tcase reflect.Uint64:\n\t\tfallthrough\n\tcase reflect.Uint:\n\t\tdstPtr := reflect.New(intoType)\n\t\tdst := reflect.Indirect(dstPtr)\n\t\tvar srcStr string\n\t\tswitch fromValue.(type) {\n\t\tcase int64:\n\t\t\tsrcStr = strconv.FormatInt(fromValue.(int64), 10)\n\t\tcase int32:\n\t\t\tsrcStr = strconv.FormatInt(int64(fromValue.(int32)), 10)\n\t\tcase string:\n\t\t\tsrcStr = fromValue.(string)\n\t\t}\n\t\tsrcUint64, srcUint64Err := strconv.ParseUint(srcStr, 10, 64)\n\t\tif srcUint64Err != nil {\n\t\t\tlog.Panicf(\"Error detected while storing %v within %v: %v\", reflect.TypeOf(fromValue), intoType, srcUint64Err)\n\t\t}\n\t\tif dst.OverflowUint(srcUint64) {\n\t\t\tlog.Panicf(\"Overflow detected while storing %v within %v\", reflect.TypeOf(fromValue), intoType)\n\t\t}\n\t\tdst.SetUint(srcUint64)\n\t\treturn dst.Interface()\n\tcase reflect.Complex64:\n\t\tfallthrough\n\tcase reflect.Complex128:\n\t\tdstPtr := reflect.New(intoType)\n\t\tdst := reflect.Indirect(dstPtr)\n\t\tvar srcStr string\n\t\tswitch fromValue.(type) {\n\t\tcase complex64:\n\t\t\tsrcStr = strconv.FormatComplex(complex128(fromValue.(complex64)), 'f', -1, 64)\n\t\tcase complex128:\n\t\t\tsrcStr = strconv.FormatComplex(fromValue.(complex128), 'f', -1, 128)\n\t\tcase string:\n\t\t\tsrcStr = fromValue.(string)\n\t\t}\n\t\tvar dstBits int\n\t\tswitch intoType.Kind() {\n\t\tcase reflect.Complex64:\n\t\t\tdstBits = 64\n\t\tcase reflect.Complex128:\n\t\t\tdstBits = 128\n\t\t}\n\t\tsrcComplex128, srcComplex128Err := strconv.ParseComplex(srcStr, dstBits)\n\t\tif srcComplex128Err != nil {\n\t\t\tlog.Panicf(\"Error detected while storing %v within %v: %v\", reflect.TypeOf(fromValue), intoType, srcComplex128Err)\n\t\t}\n\t\tif dst.OverflowComplex(srcComplex128) {\n\t\t\tlog.Panicf(\"Overflow detected while storing %v within %v\", reflect.TypeOf(fromValue), intoType)\n\t\t}\n\t\tdst.SetComplex(srcComplex128)\n\t\treturn dst.Interface()\n\t}\n\tlog.Panicf(\"Unhandled kind: %v\", intoType.Kind())\n\treturn nil\n}" ]
[ "0.7346256", "0.7139889", "0.71160406", "0.7061915", "0.7026141", "0.6680758", "0.651739", "0.65058774", "0.6165296", "0.61525524", "0.60034645", "0.5619845", "0.56022626", "0.5586433", "0.5527513", "0.5513322", "0.55083215", "0.5489093", "0.5349258", "0.53281444", "0.52873904", "0.52281636", "0.52261865", "0.5221312", "0.5175996", "0.5086861", "0.50275433", "0.49678937", "0.49657136", "0.49207565", "0.4915036", "0.47860548", "0.4778269", "0.47745582", "0.47234136", "0.47052592", "0.4683224", "0.46697578", "0.46605834", "0.46545413", "0.46533424", "0.46485263", "0.46332002", "0.4626872", "0.46235496", "0.4601297", "0.45920748", "0.4591134", "0.457295", "0.45720643", "0.4560209", "0.4530968", "0.4500106", "0.44958684", "0.44893828", "0.44606796", "0.44568723", "0.4447034", "0.44401222", "0.44286254", "0.44256374", "0.44196644", "0.4396602", "0.43935415", "0.43884298", "0.4368531", "0.43663466", "0.43663085", "0.43653855", "0.4363194", "0.43482143", "0.43438548", "0.43365568", "0.43074644", "0.43042123", "0.42976078", "0.42893326", "0.42893326", "0.42889616", "0.42862377", "0.4284782", "0.42800352", "0.4279787", "0.42775333", "0.42563197", "0.42536724", "0.42510667", "0.4244496", "0.4244496", "0.42434093", "0.42387995", "0.4217601", "0.42129073", "0.42087233", "0.41907606", "0.41823274", "0.4171572", "0.41690844", "0.41664687", "0.41583073" ]
0.67786556
5
ToPB runs the BeforeToPB hook if present, converts the fields of this object to PB format, runs the AfterToPB hook, then returns the PB object
func (m *HealthMenstruationDailyEntryORM) ToPB(ctx context.Context) (HealthMenstruationDailyEntry, error) { to := HealthMenstruationDailyEntry{} var err error if prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToPB); ok { if err = prehook.BeforeToPB(ctx, &to); err != nil { return to, err } } to.Id = m.Id if m.CreatedAt != nil { if to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil { return to, err } } if m.UpdatedAt != nil { if to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil { return to, err } } to.ProfileId = m.ProfileId if m.Day != nil { if to.Day, err = ptypes1.TimestampProto(*m.Day); err != nil { return to, err } } to.IntensityPercentage = m.IntensityPercentage to.Type = HealthMenstruationDailyEntry_Type(m.Type) to.Manual = m.Manual to.BasedOnPrediction = m.BasedOnPrediction if posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToPB); ok { err = posthook.AfterToPB(ctx, &to) } return to, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *ContactORM) ToPB(ctx context.Context) (Contact, error) {\n\tto := Contact{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(ContactWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.FirstName = m.FirstName\n\tto.MiddleName = m.MiddleName\n\tto.LastName = m.LastName\n\t// Skipping field: PrimaryEmail\n\tfor _, v := range m.Emails {\n\t\tif v != nil {\n\t\t\tif tempEmails, cErr := v.ToPB(ctx); cErr == nil {\n\t\t\t\tto.Emails = append(to.Emails, &tempEmails)\n\t\t\t} else {\n\t\t\t\treturn to, cErr\n\t\t\t}\n\t\t} else {\n\t\t\tto.Emails = append(to.Emails, nil)\n\t\t}\n\t}\n\tif posthook, ok := interface{}(m).(ContactWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *ProfileORM) ToPB(ctx context.Context) (Profile, error) {\n\tto := Profile{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(ProfileWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Notes = m.Notes\n\tto.FirstName = m.FirstName\n\tto.LastName = m.LastName\n\tto.PrimaryEmail = m.PrimaryEmail\n\tfor _, v := range m.Groups {\n\t\tif v != nil {\n\t\t\tif tempGroups, cErr := v.ToPB(ctx); cErr == nil {\n\t\t\t\tto.Groups = append(to.Groups, &tempGroups)\n\t\t\t} else {\n\t\t\t\treturn to, cErr\n\t\t\t}\n\t\t} else {\n\t\t\tto.Groups = append(to.Groups, nil)\n\t\t}\n\t}\n\tto.ProfilePictureUrl = m.ProfilePictureUrl\n\tif posthook, ok := interface{}(m).(ProfileWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *PeriodORM) ToPB(ctx context.Context) (Period, error) {\n\tto := Period{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(PeriodWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.Period = m.Period\n\tif m.CreatedAt != nil {\n\t\tto.CreatedAt = timestamppb.New(*m.CreatedAt)\n\t}\n\tif m.UpdatedAt != nil {\n\t\tto.UpdatedAt = timestamppb.New(*m.UpdatedAt)\n\t}\n\tif posthook, ok := interface{}(m).(PeriodWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *EmailORM) ToPB(ctx context.Context) (Email, error) {\n\tto := Email{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(EmailWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.Address = m.Address\n\tif posthook, ok := interface{}(m).(EmailWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *UserInfoORM) ToPB(ctx context.Context) (UserInfo, error) {\n\tto := UserInfo{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(UserInfoWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.UserId = m.UserId\n\tto.LastName = m.LastName\n\tto.FirstName = m.FirstName\n\tto.Period = m.Period\n\tto.DepartmentId = m.DepartmentId\n\tto.JobId = m.JobId\n\tto.EnrollmentFlg = m.EnrollmentFlg\n\tto.AdminFlg = m.AdminFlg\n\tif m.CreatedAt != nil {\n\t\tto.CreatedAt = timestamppb.New(*m.CreatedAt)\n\t}\n\tif m.UpdatedAt != nil {\n\t\tto.UpdatedAt = timestamppb.New(*m.UpdatedAt)\n\t}\n\tif posthook, ok := interface{}(m).(UserInfoWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *IntPointORM) ToPB(ctx context.Context) (IntPoint, error) {\n\tto := IntPoint{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(IntPointWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tto.X = m.X\n\tto.Y = m.Y\n\tif posthook, ok := interface{}(m).(IntPointWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *HealthMenstruationPersonalInfoORM) ToPB(ctx context.Context) (HealthMenstruationPersonalInfo, error) {\n\tto := HealthMenstruationPersonalInfo{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tto.PeriodLengthInDays = m.PeriodLengthInDays\n\tto.CycleLengthInDays = m.CycleLengthInDays\n\tif posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (t Transaction) PB() *pb.Transaction {\n\treturn &pb.Transaction{\n\t\tId: t.ID.String(),\n\t\tDate: t.Date.Format(\"2006-01-02\"),\n\t\tEntity: t.Entity,\n\t\tReference: t.Reference,\n\t\tHash: t.Hash,\n\t\tPostings: t.Postings.PB(),\n\t}\n}", "func ProtoToBackup(p *betapb.FilestoreBetaBackup) *beta.Backup {\n\tobj := &beta.Backup{\n\t\tName: dcl.StringOrNil(p.GetName()),\n\t\tDescription: dcl.StringOrNil(p.GetDescription()),\n\t\tState: ProtoToFilestoreBetaBackupStateEnum(p.GetState()),\n\t\tCreateTime: dcl.StringOrNil(p.GetCreateTime()),\n\t\tCapacityGb: dcl.Int64OrNil(p.GetCapacityGb()),\n\t\tStorageBytes: dcl.Int64OrNil(p.GetStorageBytes()),\n\t\tSourceInstance: dcl.StringOrNil(p.GetSourceInstance()),\n\t\tSourceFileShare: dcl.StringOrNil(p.GetSourceFileShare()),\n\t\tSourceInstanceTier: ProtoToFilestoreBetaBackupSourceInstanceTierEnum(p.GetSourceInstanceTier()),\n\t\tDownloadBytes: dcl.Int64OrNil(p.GetDownloadBytes()),\n\t\tProject: dcl.StringOrNil(p.GetProject()),\n\t\tLocation: dcl.StringOrNil(p.GetLocation()),\n\t}\n\treturn obj\n}", "func (m *CommentORM) ToPB(ctx context.Context) (Comment, error) {\n\tto := Comment{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(CommentWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = &types1.UUID{Value: m.Id.String()}\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.DeletedAt != nil {\n\t\tif to.DeletedAt, err = ptypes1.TimestampProto(*m.DeletedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.BoardId != nil {\n\t\tto.BoardId = &wrappers.StringValue{Value: *m.BoardId}\n\t}\n\tif m.PostId != nil {\n\t\tto.PostId = &wrappers.StringValue{Value: *m.PostId}\n\t}\n\tif m.ContentId != nil {\n\t\tto.ContentId = &wrappers.StringValue{Value: *m.ContentId}\n\t}\n\tif m.Userid != nil {\n\t\tto.Userid = &wrappers.StringValue{Value: *m.Userid}\n\t}\n\tif m.Username != nil {\n\t\tto.Username = &wrappers.StringValue{Value: *m.Username}\n\t}\n\tif m.Nickname != nil {\n\t\tto.Nickname = &wrappers.StringValue{Value: *m.Nickname}\n\t}\n\tif m.Email != nil {\n\t\tto.Email = &wrappers.StringValue{Value: *m.Email}\n\t}\n\tif m.Password != nil {\n\t\tto.Password = &wrappers.StringValue{Value: *m.Password}\n\t}\n\tif m.Url != nil {\n\t\tto.Url = &wrappers.StringValue{Value: *m.Url}\n\t}\n\tif m.UseHtml != nil {\n\t\tto.UseHtml = &wrappers.BoolValue{Value: *m.UseHtml}\n\t}\n\tif m.UseSecret != nil {\n\t\tto.UseSecret = &wrappers.BoolValue{Value: *m.UseSecret}\n\t}\n\tto.UpVoteCount = m.UpVoteCount\n\tto.DownVoteCount = m.DownVoteCount\n\tif posthook, ok := interface{}(m).(CommentWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (b *Block) ToProto() (*tmproto.Block, error) {\n\tif b == nil {\n\t\treturn nil, errors.New(\"nil Block\")\n\t}\n\n\tpb := new(tmproto.Block)\n\n\tpb.Header = *b.Header.ToProto()\n\tpb.LastCommit = b.LastCommit.ToProto()\n\tpb.Data = b.Data.ToProto()\n\n\tprotoEvidence, err := b.Evidence.ToProto()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpb.Evidence = *protoEvidence\n\n\treturn pb, nil\n}", "func (m *ContactORM) AfterToPB(ctx context.Context, c *Contact) error {\n\tif len(m.Emails) == 0 {\n\t\treturn nil\n\t}\n\t// find the primary e-mail in list of e-mails from DB\n\tfor _, addr := range m.Emails {\n\t\tif addr != nil && addr.IsPrimary {\n\t\t\tc.PrimaryEmail = addr.Address\n\t\t\tbreak\n\t\t}\n\t}\n\treturn nil\n}", "func (b *Block) ToProto() (*tmproto.Block, error) {\n\tif b == nil {\n\t\treturn nil, errors.New(\"nil Block\")\n\t}\n\n\tpb := new(tmproto.Block)\n\n\tpb.Header = *b.Header.ToProto()\n\tpb.CoreChainLock = b.CoreChainLock.ToProto()\n\tpb.LastCommit = b.LastCommit.ToProto()\n\tpb.Data = b.Data.ToProto()\n\n\tprotoEvidence, err := b.Evidence.ToProto()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpb.Evidence = *protoEvidence\n\n\treturn pb, nil\n}", "func BackupToProto(resource *beta.Backup) *betapb.FilestoreBetaBackup {\n\tp := &betapb.FilestoreBetaBackup{}\n\tp.SetName(dcl.ValueOrEmptyString(resource.Name))\n\tp.SetDescription(dcl.ValueOrEmptyString(resource.Description))\n\tp.SetState(FilestoreBetaBackupStateEnumToProto(resource.State))\n\tp.SetCreateTime(dcl.ValueOrEmptyString(resource.CreateTime))\n\tp.SetCapacityGb(dcl.ValueOrEmptyInt64(resource.CapacityGb))\n\tp.SetStorageBytes(dcl.ValueOrEmptyInt64(resource.StorageBytes))\n\tp.SetSourceInstance(dcl.ValueOrEmptyString(resource.SourceInstance))\n\tp.SetSourceFileShare(dcl.ValueOrEmptyString(resource.SourceFileShare))\n\tp.SetSourceInstanceTier(FilestoreBetaBackupSourceInstanceTierEnumToProto(resource.SourceInstanceTier))\n\tp.SetDownloadBytes(dcl.ValueOrEmptyInt64(resource.DownloadBytes))\n\tp.SetProject(dcl.ValueOrEmptyString(resource.Project))\n\tp.SetLocation(dcl.ValueOrEmptyString(resource.Location))\n\tmLabels := make(map[string]string, len(resource.Labels))\n\tfor k, r := range resource.Labels {\n\t\tmLabels[k] = r\n\t}\n\tp.SetLabels(mLabels)\n\n\treturn p\n}", "func (i *Invoice) createP2PProtobuf() *invoicepb.InvoiceData {\n\tvar recipient, sender, payee []byte\n\tif i.Recipient != nil {\n\t\trecipient = i.Recipient[:]\n\t}\n\n\tif i.Sender != nil {\n\t\tsender = i.Sender[:]\n\t}\n\n\tif i.Payee != nil {\n\t\tpayee = i.Payee[:]\n\t}\n\n\treturn &invoicepb.InvoiceData{\n\t\tInvoiceNumber: i.InvoiceNumber,\n\t\tInvoiceStatus: i.InvoiceStatus,\n\t\tSenderName: i.SenderName,\n\t\tSenderStreet: i.SenderStreet,\n\t\tSenderCity: i.SenderCity,\n\t\tSenderZipcode: i.SenderZipcode,\n\t\tSenderCountry: i.SenderCountry,\n\t\tRecipientName: i.RecipientName,\n\t\tRecipientStreet: i.RecipientStreet,\n\t\tRecipientCity: i.RecipientCity,\n\t\tRecipientZipcode: i.RecipientZipcode,\n\t\tRecipientCountry: i.RecipientCountry,\n\t\tCurrency: i.Currency,\n\t\tGrossAmount: i.GrossAmount,\n\t\tNetAmount: i.NetAmount,\n\t\tTaxAmount: i.TaxAmount,\n\t\tTaxRate: i.TaxRate,\n\t\tRecipient: recipient,\n\t\tSender: sender,\n\t\tPayee: payee,\n\t\tComment: i.Comment,\n\t\tDueDate: i.DueDate,\n\t\tDateCreated: i.DateCreated,\n\t\tExtraData: i.ExtraData,\n\t}\n\n}", "func (b *Block) ConvertToBlockPb() *iotextypes.Block {\n\tfooter, err := b.ConvertToBlockFooterPb()\n\tif err != nil {\n\t\tlog.L().Panic(\"failed to convert block footer to protobuf message\")\n\t}\n\treturn &iotextypes.Block{\n\t\tHeader: b.Header.Proto(),\n\t\tBody: b.Body.Proto(),\n\t\tFooter: footer,\n\t}\n}", "func CopyPB(dst interface{}, src interface{}) interface{} {\n\tif src == nil {\n\t\treturn nil\n\t}\n\tvar b []byte\n\tvar err error\n\tif srcPB, ok := src.(proto.Message); ok {\n\t\tv := reflect.ValueOf(srcPB)\n\t\tif srcPB == nil || (v.Kind() == reflect.Ptr && v.IsNil()) {\n\t\t\treturn dst\n\t\t}\n\t\tvar buf bytes.Buffer\n\t\tm := &jsonpb.Marshaler{EnumsAsInts: true}\n\t\terr = m.Marshal(&buf, srcPB)\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(\"could not Marshal proto.Message: %v\", err)\n\t\t}\n\t\tb = buf.Bytes()\n\t} else if srcPB, ok := src.(protov2.Message); ok {\n\t\tif srcPB == nil || !srcPB.ProtoReflect().IsValid() {\n\t\t\treturn dst\n\t\t}\n\t\tmo := protojson.MarshalOptions{UseEnumNumbers: true}\n\t\tb, err = mo.Marshal(srcPB)\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(\"could not Marshal protov2.Message: %v\", err)\n\t\t}\n\t} else {\n\t\tb, err = json.Marshal(src)\n\t}\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tif dstPB, ok := dst.(proto.Message); ok {\n\t\tu := jsonpb.Unmarshaler{}\n\t\tu.AllowUnknownFields = true\n\t\terr = u.Unmarshal(bytes.NewReader(b), dstPB)\n\t\tdst = dstPB\n\t} else if dstPB, ok := dst.(protov2.Message); ok {\n\t\tuo := protojson.UnmarshalOptions{DiscardUnknown: false}\n\t\terr = uo.Unmarshal(b, dstPB)\n\t\tdst = dstPB\n\t} else {\n\t\terr = json.Unmarshal(b, dst)\n\t}\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn dst\n}", "func (commit *Commit) ToProto() *tmproto.Commit {\n\tif commit == nil {\n\t\treturn nil\n\t}\n\n\tc := new(tmproto.Commit)\n\tsigs := make([]tmproto.CommitSig, len(commit.Signatures))\n\tfor i := range commit.Signatures {\n\t\tsigs[i] = *commit.Signatures[i].ToProto()\n\t}\n\tc.Signatures = sigs\n\n\tc.Height = commit.Height\n\tc.Round = commit.Round\n\tc.BlockID = commit.BlockID.ToProto()\n\n\treturn c\n}", "func (p Pipeline) ToProto(pb *pipelinepb.AppliedPipeline) error {\n\tnumOps := len(p.Operations)\n\tif cap(pb.Ops) >= numOps {\n\t\tpb.Ops = pb.Ops[:numOps]\n\t} else {\n\t\tpb.Ops = make([]pipelinepb.AppliedPipelineOp, numOps)\n\t}\n\tfor i := 0; i < numOps; i++ {\n\t\tif err := p.Operations[i].ToProto(&pb.Ops[i]); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (st *Account) ToProto() *iproto.AccountPb {\n\tacPb := &iproto.AccountPb{}\n\tacPb.Nonce = st.Nonce\n\tif st.Balance != nil {\n\t\tacPb.Balance = st.Balance.Bytes()\n\t}\n\tacPb.Root = make([]byte, hash.HashSize)\n\tcopy(acPb.Root, st.Root[:])\n\tacPb.CodeHash = make([]byte, len(st.CodeHash))\n\tcopy(acPb.CodeHash, st.CodeHash)\n\tacPb.IsCandidate = st.IsCandidate\n\tif st.VotingWeight != nil {\n\t\tacPb.VotingWeight = st.VotingWeight.Bytes()\n\t}\n\tacPb.Votee = st.Votee\n\treturn acPb\n}", "func (commit *Commit) ToProto() *tmproto.Commit {\n\tif commit == nil {\n\t\treturn nil\n\t}\n\n\tc := new(tmproto.Commit)\n\n\tc.Height = commit.Height\n\tc.Round = commit.Round\n\tc.BlockID = commit.BlockID.ToProto()\n\tc.StateID = commit.StateID.ToProto()\n\n\tc.ThresholdStateSignature = commit.ThresholdStateSignature\n\tc.ThresholdBlockSignature = commit.ThresholdBlockSignature\n\n\tc.QuorumHash = commit.QuorumHash\n\n\treturn c\n}", "func ComputeBetaInstanceTemplatePropertiesToProto(o *beta.InstanceTemplateProperties) *betapb.ComputeBetaInstanceTemplateProperties {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.ComputeBetaInstanceTemplateProperties{\n\t\tCanIpForward: dcl.ValueOrEmptyBool(o.CanIPForward),\n\t\tDescription: dcl.ValueOrEmptyString(o.Description),\n\t\tMachineType: dcl.ValueOrEmptyString(o.MachineType),\n\t\tMinCpuPlatform: dcl.ValueOrEmptyString(o.MinCpuPlatform),\n\t\tReservationAffinity: ComputeBetaInstanceTemplatePropertiesReservationAffinityToProto(o.ReservationAffinity),\n\t\tShieldedInstanceConfig: ComputeBetaInstanceTemplatePropertiesShieldedInstanceConfigToProto(o.ShieldedInstanceConfig),\n\t\tScheduling: ComputeBetaInstanceTemplatePropertiesSchedulingToProto(o.Scheduling),\n\t}\n\tfor _, r := range o.Disks {\n\t\tp.Disks = append(p.Disks, ComputeBetaInstanceTemplatePropertiesDisksToProto(&r))\n\t}\n\tp.Labels = make(map[string]string)\n\tfor k, r := range o.Labels {\n\t\tp.Labels[k] = r\n\t}\n\tp.Metadata = make(map[string]string)\n\tfor k, r := range o.Metadata {\n\t\tp.Metadata[k] = r\n\t}\n\tfor _, r := range o.GuestAccelerators {\n\t\tp.GuestAccelerators = append(p.GuestAccelerators, ComputeBetaInstanceTemplatePropertiesGuestAcceleratorsToProto(&r))\n\t}\n\tfor _, r := range o.NetworkInterfaces {\n\t\tp.NetworkInterfaces = append(p.NetworkInterfaces, ComputeBetaInstanceTemplatePropertiesNetworkInterfacesToProto(&r))\n\t}\n\tfor _, r := range o.ServiceAccounts {\n\t\tp.ServiceAccounts = append(p.ServiceAccounts, ComputeBetaInstanceTemplatePropertiesServiceAccountsToProto(&r))\n\t}\n\tfor _, r := range o.Tags {\n\t\tp.Tags = append(p.Tags, r)\n\t}\n\treturn p\n}", "func (vc *VehicleContainer) SavePB(target string, humanReadable bool) (err error) {\n\t// Marshall to GTFS-RT\n\tvar b []byte\n\tif humanReadable {\n\t\tb, err = prototext.Marshal(vc.AsProto())\n\t} else {\n\t\tb, err = proto.Marshal(vc.AsProto())\n\t}\n\n\t// Check for marshall errors\n\tif err != nil {\n\t\treturn\n\t}\n\n\t// Write to target file\n\terr = os.WriteFile(target, b, 0o666)\n\treturn\n}", "func (p Postings) PB() []*pb.Posting {\n\tvar postings []*pb.Posting\n\tfor _, posting := range p {\n\t\tpostings = append(postings, posting.PB())\n\t}\n\n\treturn postings\n}", "func InstanceTemplateToProto(resource *beta.InstanceTemplate) *betapb.ComputeBetaInstanceTemplate {\n\tp := &betapb.ComputeBetaInstanceTemplate{\n\t\tCreationTimestamp: dcl.ValueOrEmptyString(resource.CreationTimestamp),\n\t\tDescription: dcl.ValueOrEmptyString(resource.Description),\n\t\tId: dcl.ValueOrEmptyInt64(resource.Id),\n\t\tSelfLink: dcl.ValueOrEmptyString(resource.SelfLink),\n\t\tName: dcl.ValueOrEmptyString(resource.Name),\n\t\tProperties: ComputeBetaInstanceTemplatePropertiesToProto(resource.Properties),\n\t\tProject: dcl.ValueOrEmptyString(resource.Project),\n\t}\n\n\treturn p\n}", "func toJSONPb(_ *starlark.Thread, _ *starlark.Builtin, args starlark.Tuple, kwargs []starlark.Tuple) (starlark.Value, error) {\n\tvar msg *Message\n\tvar emitDefaults starlark.Bool\n\tif err := starlark.UnpackArgs(\"to_jsonpb\", args, kwargs, \"msg\", &msg, \"emit_defaults?\", &emitDefaults); err != nil {\n\t\treturn nil, err\n\t}\n\tpb, err := msg.ToProto()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t// More jsonpb Marshaler options may be added here as needed.\n\tvar jsonMarshaler = &jsonpb.Marshaler{Indent: \"\\t\", EmitDefaults: bool(emitDefaults)}\n\tstr, err := jsonMarshaler.MarshalToString(pb)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn starlark.String(str), nil\n}", "func ProtoToInstanceTemplate(p *betapb.ComputeBetaInstanceTemplate) *beta.InstanceTemplate {\n\tobj := &beta.InstanceTemplate{\n\t\tCreationTimestamp: dcl.StringOrNil(p.GetCreationTimestamp()),\n\t\tDescription: dcl.StringOrNil(p.Description),\n\t\tId: dcl.Int64OrNil(p.Id),\n\t\tSelfLink: dcl.StringOrNil(p.SelfLink),\n\t\tName: dcl.StringOrNil(p.Name),\n\t\tProperties: ProtoToComputeBetaInstanceTemplateProperties(p.GetProperties()),\n\t\tProject: dcl.StringOrNil(p.Project),\n\t}\n\treturn obj\n}", "func unmarshalTodoPB(t *proto.Todo) (*models.Todo, error) {\n\ttodo := models.Todo{\n\t\tID: t.GetId(),\n\t\tComplete: t.GetComplete(),\n\t\tDescription: null.StringFrom(t.GetDescription()).Ptr(),\n\t\tTitle: t.GetTitle(),\n\t}\n\n\tcompletedAt, err := ptypes.Timestamp(t.GetCompletedAt())\n\tvalid := true\n\tif err != nil {\n\t\tvalid = false\n\t}\n\ttodo.CompletedAt = null.NewTime(completedAt, valid).Ptr()\n\n\tcreatedAt, err := ptypes.Timestamp(t.GetCreatedAt())\n\tif err != nil {\n\t\treturn nil, gqlerror.Errorf(\"error marshalling todo: %v\", err)\n\t}\n\ttodo.CreatedAt = createdAt\n\n\treturn &todo, nil\n}", "func (in *Store) ToProto() *iotextypes.BlockStore {\n\treceipts := []*iotextypes.Receipt{}\n\tfor _, r := range in.Receipts {\n\t\treceipts = append(receipts, r.ConvertToReceiptPb())\n\t}\n\treturn &iotextypes.BlockStore{\n\t\tBlock: in.Block.ConvertToBlockPb(),\n\t\tReceipts: receipts,\n\t}\n}", "func (b *BalanceAs) toProto() (*proto.Balance, error) {\n\tpb := &proto.Balance{\n\t\tSymbol: b.CoinBalance.Symbol,\n\t\tExchange: b.Exchange,\n\t\tFree: float32(b.Free),\n\t\tLocked: float32(b.Locked),\n\t\tTotal: float32(b.Total),\n\t\tAs: string(b.As),\n\t\tPrice: float32(b.Price),\n\t\tValue: float32(b.Value),\n\t\tPrice24H: float32(b.Price24H),\n\t\tValue24H: float32(b.Value24H),\n\t\tChange24H: float32(b.Change24H),\n\t\tChangePct24H: float32(b.ChangePct24H),\n\t}\n\n\tts, err := tspb.TimestampProto(b.At)\n\tif err != nil {\n\t\treturn nil, err\n\t} else {\n\t\tpb.At = ts\n\t}\n\n\tif b.BuyStrategy != nil {\n\t\tif pb.BuyStrategy, err = strategyToProto(b.BuyStrategy); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tif b.SellStrategy != nil {\n\t\tif pb.SellStrategy, err = strategyToProto(b.SellStrategy); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn pb, nil\n}", "func (m *GroupORM) AfterToPB(ctx context.Context, a *Group) error {\n\n\tfor _, item := range m.UserList {\n\t\tid, err := resource.Encode(&User{}, item.Id)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\ta.UserList = append(a.UserList, id)\n\t}\n\treturn nil\n}", "func TransactionFromPB(t *pb.Transaction) (Transaction, error) {\n\tvar (\n\t\tid uuid.UUID\n\t\terr error\n\t)\n\n\tif t.Id == \"\" {\n\t\tid = uuid.Nil\n\t} else {\n\t\tid, err = uuid.FromString(t.Id)\n\t\tif err != nil {\n\t\t\treturn Transaction{}, err\n\t\t}\n\t}\n\n\tdate, err := time.Parse(\"2006-01-02\", t.Date)\n\tif err != nil {\n\t\treturn Transaction{}, err\n\t}\n\n\tpostings, err := PostingsFromPB(&pb.Postings{\n\t\tData: t.Postings,\n\t})\n\tif err != nil {\n\t\treturn Transaction{}, err\n\t}\n\n\ttransaction := Transaction{}\n\ttransaction.ID = id\n\ttransaction.Date = date\n\ttransaction.Entity = t.Entity\n\ttransaction.Reference = t.Reference\n\ttransaction.Hash = t.Hash\n\ttransaction.Postings = postings\n\n\treturn transaction, nil\n}", "func ToWireFormat(data []byte, storage string) ([]byte, error) {\n\tprototypeType, found := storageToType[storage]\n\tif !found {\n\t\treturn nil, fmt.Errorf(\"unknown storage type: %v\", storage)\n\t}\n\n\tobj := reflect.New(prototypeType).Interface()\n\terr := yaml.Unmarshal(data, obj)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn json.Marshal(obj)\n}", "func NewCaptureFromPB(pbCap *pb.BGPCapture) (*Capture, error) {\n\tcap := &Capture{fromTable: \"\", ID: \"\"}\n\tvar err error\n\n\tcap.Timestamp, cap.ColIP, err = util.GetTimeColIP(pbCap)\n\tif err != nil {\n\t\treturn nil, dbLogger.Errorf(\"unable to parse collector IP: %s\", err)\n\t}\n\n\tcap.PeerIP, err = util.GetPeerIP(pbCap)\n\tif err != nil {\n\t\treturn nil, dbLogger.Errorf(\"unable to parse peer IP: %s\", err)\n\t}\n\n\t// Ignoring the error here as this message could only have withdraws.\n\tcap.ASPath, _ = util.GetASPath(pbCap)\n\n\tcap.Origin = 0\n\tif len(cap.ASPath) != 0 {\n\t\tcap.Origin = cap.ASPath[len(cap.ASPath)-1]\n\t}\n\n\tcap.NextHop, err = util.GetNextHop(pbCap)\n\tif err != nil {\n\t\tcap.NextHop = net.IPv4(0, 0, 0, 0)\n\t}\n\n\t// Here if it errors and the return is nil, PrefixToPQArray should leave it and the schema should insert the default\n\tcap.Advertised, _ = util.GetAdvertisedPrefixes(pbCap)\n\tcap.Withdrawn, _ = util.GetWithdrawnPrefixes(pbCap)\n\n\treturn cap, nil\n}", "func AppengineDomainMappingResourceRecordsToProto(o *appengine.DomainMappingResourceRecords) *appenginepb.AppengineDomainMappingResourceRecords {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &appenginepb.AppengineDomainMappingResourceRecords{\n\t\tName: dcl.ValueOrEmptyString(o.Name),\n\t\tRrdata: dcl.ValueOrEmptyString(o.Rrdata),\n\t\tType: AppengineDomainMappingResourceRecordsTypeEnumToProto(o.Type),\n\t}\n\treturn p\n}", "func CloudkmsBetaCryptoKeyPrimaryToProto(o *beta.CryptoKeyPrimary) *betapb.CloudkmsBetaCryptoKeyPrimary {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.CloudkmsBetaCryptoKeyPrimary{}\n\tp.SetName(dcl.ValueOrEmptyString(o.Name))\n\tp.SetState(CloudkmsBetaCryptoKeyPrimaryStateEnumToProto(o.State))\n\tp.SetProtectionLevel(CloudkmsBetaCryptoKeyPrimaryProtectionLevelEnumToProto(o.ProtectionLevel))\n\tp.SetAlgorithm(CloudkmsBetaCryptoKeyPrimaryAlgorithmEnumToProto(o.Algorithm))\n\tp.SetAttestation(CloudkmsBetaCryptoKeyPrimaryAttestationToProto(o.Attestation))\n\tp.SetCreateTime(dcl.ValueOrEmptyString(o.CreateTime))\n\tp.SetGenerateTime(dcl.ValueOrEmptyString(o.GenerateTime))\n\tp.SetDestroyTime(dcl.ValueOrEmptyString(o.DestroyTime))\n\tp.SetDestroyEventTime(dcl.ValueOrEmptyString(o.DestroyEventTime))\n\tp.SetImportJob(dcl.ValueOrEmptyString(o.ImportJob))\n\tp.SetImportTime(dcl.ValueOrEmptyString(o.ImportTime))\n\tp.SetImportFailureReason(dcl.ValueOrEmptyString(o.ImportFailureReason))\n\tp.SetExternalProtectionLevelOptions(CloudkmsBetaCryptoKeyPrimaryExternalProtectionLevelOptionsToProto(o.ExternalProtectionLevelOptions))\n\tp.SetReimportEligible(dcl.ValueOrEmptyBool(o.ReimportEligible))\n\treturn p\n}", "func (s *subscription) AsPB() *annotatedvalue.Subscription {\n\treturn &annotatedvalue.Subscription{\n\t\tID: s.GetID(),\n\t}\n}", "func (e *Entity) createP2PProtobuf() *entitypb.Entity {\n\tdids := identity.DIDsToBytes(e.Identity)\n\treturn &entitypb.Entity{\n\t\tIdentity: dids[0],\n\t\tLegalName: e.LegalName,\n\t\tAddresses: e.Addresses,\n\t\tPaymentDetails: e.PaymentDetails,\n\t\tContacts: e.Contacts,\n\t}\n}", "func PostingsFromPB(pb *pb.Postings) (Postings, error) {\n\tvar postings Postings\n\tfor _, v := range pb.Data {\n\t\tposting, err := PostingFromPB(v)\n\t\tif err != nil {\n\t\t\treturn Postings{}, err\n\t\t}\n\t\tpostings = append(postings, posting)\n\t}\n\n\treturn postings, nil\n}", "func (p *Purrgil) ParseToByte() ([]byte, error) {\n\treturn yaml.Marshal(p)\n}", "func (m *Mutate) ToProto() proto.Message {\n\tp, _, _ := m.toProto(false, nil)\n\treturn p\n}", "func (blockID *BlockID) ToProto() tmproto.BlockID {\n\tif blockID == nil {\n\t\treturn tmproto.BlockID{}\n\t}\n\n\treturn tmproto.BlockID{\n\t\tHash: blockID.Hash,\n\t\t// PartSetHeader: blockID.PartSetHeader.ToProto(),\n\t}\n}", "func DomainMappingToProto(resource *appengine.DomainMapping) *appenginepb.AppengineDomainMapping {\n\tp := &appenginepb.AppengineDomainMapping{\n\t\tSelfLink: dcl.ValueOrEmptyString(resource.SelfLink),\n\t\tName: dcl.ValueOrEmptyString(resource.Name),\n\t\tSslSettings: AppengineDomainMappingSslSettingsToProto(resource.SslSettings),\n\t\tApp: dcl.ValueOrEmptyString(resource.App),\n\t}\n\tfor _, r := range resource.ResourceRecords {\n\t\tp.ResourceRecords = append(p.ResourceRecords, AppengineDomainMappingResourceRecordsToProto(&r))\n\t}\n\n\treturn p\n}", "func ClouddeployAlphaTargetGkeToProto(o *alpha.TargetGke) *alphapb.ClouddeployAlphaTargetGke {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &alphapb.ClouddeployAlphaTargetGke{}\n\tp.SetCluster(dcl.ValueOrEmptyString(o.Cluster))\n\tp.SetInternalIp(dcl.ValueOrEmptyBool(o.InternalIP))\n\treturn p\n}", "func ProtoToInstance(p *sqlpb.SqlInstance) *sql.Instance {\n\tobj := &sql.Instance{\n\t\tBackendType: ProtoToSqlInstanceBackendTypeEnum(p.GetBackendType()),\n\t\tConnectionName: dcl.StringOrNil(p.ConnectionName),\n\t\tDatabaseVersion: ProtoToSqlInstanceDatabaseVersionEnum(p.GetDatabaseVersion()),\n\t\tEtag: dcl.StringOrNil(p.Etag),\n\t\tGceZone: dcl.StringOrNil(p.GceZone),\n\t\tInstanceType: ProtoToSqlInstanceInstanceTypeEnum(p.GetInstanceType()),\n\t\tMasterInstanceName: dcl.StringOrNil(p.MasterInstanceName),\n\t\tMaxDiskSize: ProtoToSqlInstanceMaxDiskSize(p.GetMaxDiskSize()),\n\t\tName: dcl.StringOrNil(p.Name),\n\t\tProject: dcl.StringOrNil(p.Project),\n\t\tRegion: dcl.StringOrNil(p.Region),\n\t\tRootPassword: dcl.StringOrNil(p.RootPassword),\n\t\tCurrentDiskSize: ProtoToSqlInstanceCurrentDiskSize(p.GetCurrentDiskSize()),\n\t\tDiskEncryptionConfiguration: ProtoToSqlInstanceDiskEncryptionConfiguration(p.GetDiskEncryptionConfiguration()),\n\t\tFailoverReplica: ProtoToSqlInstanceFailoverReplica(p.GetFailoverReplica()),\n\t\tMasterInstance: ProtoToSqlInstanceMasterInstance(p.GetMasterInstance()),\n\t\tReplicaConfiguration: ProtoToSqlInstanceReplicaConfiguration(p.GetReplicaConfiguration()),\n\t\tScheduledMaintenance: ProtoToSqlInstanceScheduledMaintenance(p.GetScheduledMaintenance()),\n\t\tSettings: ProtoToSqlInstanceSettings(p.GetSettings()),\n\t}\n\tfor _, r := range p.GetIpAddresses() {\n\t\tobj.IPAddresses = append(obj.IPAddresses, *ProtoToSqlInstanceIPAddresses(r))\n\t}\n\treturn obj\n}", "func CloudkmsBetaCryptoKeyPrimaryAttestationToProto(o *beta.CryptoKeyPrimaryAttestation) *betapb.CloudkmsBetaCryptoKeyPrimaryAttestation {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.CloudkmsBetaCryptoKeyPrimaryAttestation{}\n\tp.SetFormat(CloudkmsBetaCryptoKeyPrimaryAttestationFormatEnumToProto(o.Format))\n\tp.SetContent(dcl.ValueOrEmptyString(o.Content))\n\tp.SetCertChains(CloudkmsBetaCryptoKeyPrimaryAttestationCertChainsToProto(o.CertChains))\n\treturn p\n}", "func (o *KanbanPartial) ToMap() map[string]interface{} {\n\tkv := map[string]interface{}{\n\t\t\"active\": toKanbanObject(o.Active, true),\n\t\t\"board_id\": toKanbanObject(o.BoardID, true),\n\t\t\"columns\": toKanbanObject(o.Columns, true),\n\t\t\"deleted\": toKanbanObject(o.Deleted, true),\n\t\t\"issue_ids\": toKanbanObject(o.IssueIds, true),\n\t\t\"name\": toKanbanObject(o.Name, true),\n\t\t\"project_ids\": toKanbanObject(o.ProjectIds, true),\n\t\t\"updated_date\": toKanbanObject(o.UpdatedDate, true),\n\t\t\"url\": toKanbanObject(o.URL, true),\n\t}\n\tfor k, v := range kv {\n\t\tif v == nil || reflect.ValueOf(v).IsZero() {\n\t\t\tdelete(kv, k)\n\t\t} else {\n\n\t\t\tif k == \"columns\" {\n\t\t\t\tif arr, ok := v.([]KanbanColumns); ok {\n\t\t\t\t\tif len(arr) == 0 {\n\t\t\t\t\t\tdelete(kv, k)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif k == \"issue_ids\" {\n\t\t\t\tif arr, ok := v.([]string); ok {\n\t\t\t\t\tif len(arr) == 0 {\n\t\t\t\t\t\tdelete(kv, k)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif k == \"project_ids\" {\n\t\t\t\tif arr, ok := v.([]string); ok {\n\t\t\t\t\tif len(arr) == 0 {\n\t\t\t\t\t\tdelete(kv, k)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif k == \"updated_date\" {\n\t\t\t\tif dt, ok := v.(*KanbanUpdatedDate); ok {\n\t\t\t\t\tif dt.Epoch == 0 && dt.Offset == 0 && dt.Rfc3339 == \"\" {\n\t\t\t\t\t\tdelete(kv, k)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn kv\n}", "func (blockID *BlockID) ToProto() tmproto.BlockID {\n\tif blockID == nil {\n\t\treturn tmproto.BlockID{}\n\t}\n\n\treturn tmproto.BlockID{\n\t\tHash: blockID.Hash,\n\t\tPartSetHeader: blockID.PartSetHeader.ToProto(),\n\t}\n}", "func (blockID *BlockID) ToProto() tmproto.BlockID {\n\tif blockID == nil {\n\t\treturn tmproto.BlockID{}\n\t}\n\n\treturn tmproto.BlockID{\n\t\tHash: blockID.Hash,\n\t\tPartSetHeader: blockID.PartSetHeader.ToProto(),\n\t}\n}", "func ClouddeployAlphaTargetRunToProto(o *alpha.TargetRun) *alphapb.ClouddeployAlphaTargetRun {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &alphapb.ClouddeployAlphaTargetRun{}\n\tp.SetLocation(dcl.ValueOrEmptyString(o.Location))\n\treturn p\n}", "func go2pbStmts(rootPkg *amino.Package, isRoot bool, imports *ast.GenDecl, scope *ast.Scope, pbo ast.Expr, goo ast.Expr, gooIsPtr bool, gooType *amino.TypeInfo, fopts amino.FieldOptions, options uint64) (b []ast.Stmt) {\n\n\tconst (\n\t\toption_bytes = 0x01 // if goo's repr is uint8 as an element of bytes.\n\t\toption_implicit_list = 0x02 // if goo is a repeated list & also an element.\n\t)\n\n\t// Special case if nil-pointer.\n\tif gooIsPtr || gooType.Type.Kind() == reflect.Interface {\n\t\tdefer func() {\n\t\t\t// Wrap penultimate b with if statement.\n\t\t\tb = []ast.Stmt{_if(_b(goo, \"!=\", _i(\"nil\")),\n\t\t\t\tb...,\n\t\t\t)}\n\t\t}()\n\t}\n\t// Below, we can assume that goo isn't nil.\n\n\t// External case.\n\t// If gooType is registered, just call ToPBMessage.\n\t// TODO If not registered?\n\tif !isRoot && gooType.Registered && hasPBBindings(gooType) && (options&option_bytes == 0) {\n\t\t// Call ToPBMessage().\n\t\tpbote_ := p3goTypeExprString(rootPkg, imports, scope, gooType, fopts)\n\t\tpbom_ := addVarUniq(scope, \"pbom\")\n\t\tb = append(b,\n\t\t\t_a(pbom_, \":=\", _x(\"proto.Message~(~nil~)\")),\n\t\t\t_a(pbom_, _i(\"err\"), \"=\", _call(_sel(goo, \"ToPBMessage\"), _i(\"cdc\"))),\n\t\t\t_if(_x(\"err__!=__nil\"),\n\t\t\t\t_return(),\n\t\t\t),\n\t\t\t_a(pbo, \"=\", _x(\"%v.~(~%v~)\", pbom_, pbote_)),\n\t\t)\n\t\tif gooIsPtr {\n\t\t\tif pbote_[0] != '*' {\n\t\t\t\tpanic(\"expected pointer kind for p3goTypeExprString (of registered type)\")\n\t\t\t}\n\t\t\tdpbote_ := pbote_[1:]\n\t\t\tb = append(b,\n\t\t\t\t_if(_b(pbo, \"==\", \"nil\"),\n\t\t\t\t\t_a(pbo, \"=\", _x(\"new~(~%v~)\", dpbote_))))\n\t\t}\n\t\treturn\n\t}\n\n\t// Use *goor* for goo's repr.\n\tvar goor ast.Expr\n\tvar goorType *amino.TypeInfo\n\n\t// Maybe wrap pbo.\n\t// NOTE: Instead of writing code to determine the .Value type,\n\t// just lazily construct before assigning to pbo.\n\tvar wrapImplicitStruct bool\n\tvar maybeWrap = func(goor ast.Expr) ast.Expr {\n\t\tif wrapImplicitStruct {\n\t\t\tpbote_ := p3goTypeExprString(rootPkg, imports, scope, gooType, fopts)\n\t\t\tif pbote_[0] != '*' {\n\t\t\t\tpanic(\"expected pointer kind for p3goTypeExprString (of type to be wrapped)\")\n\t\t\t}\n\t\t\tdpbote_ := pbote_[1:]\n\t\t\treturn _ref(&ast.CompositeLit{\n\t\t\t\tType: _x(dpbote_),\n\t\t\t\tElts: []ast.Expr{_kv(\"Value\", goor)},\n\t\t\t\tIncomplete: false,\n\t\t\t})\n\t\t} else {\n\t\t\treturn goor\n\t\t}\n\t}\n\n\t// Special case if IsAminoMarshaler.\n\tif gooType.IsAminoMarshaler {\n\t\t// First, derive repr instance.\n\t\tgoor_ := addVarUniq(scope, \"goor\")\n\t\terr_ := addVarUniq(scope, \"err\") // do not shadow original err\n\t\tb = append(b,\n\t\t\t_a(goor_, err_, \":=\", _call(_sel(goo, \"MarshalAmino\"))),\n\t\t\t_if(_x(\"%v__!=__nil\", err_),\n\t\t\t\t_return(_x(\"nil\"), _i(err_)),\n\t\t\t),\n\t\t)\n\t\t// If gooType is struct or is registered non-native, but the repr type\n\t\t// isn't struct, an implicit struct is needed.\n\t\t// If option_bytes, special case as we will encode as uint8.\n\t\tif (gooType.Type.Kind() == reflect.Struct ||\n\t\t\t(gooType.Package != nil && gooType.Package.GoPkgPath != \"\")) &&\n\t\t\tgooType.ReprType.Type.Kind() != reflect.Struct &&\n\t\t\toptions&option_bytes == 0 {\n\n\t\t\tif gooType.ReprType.Type.Kind() == reflect.Interface {\n\t\t\t\tpanic(\"not yet tested\")\n\t\t\t}\n\t\t\twrapImplicitStruct = true\n\t\t}\n\t\t// Assign *goor*.\n\t\tgoor = _i(goor_)\n\t\tgoorType = gooType.ReprType\n\t} else {\n\t\t// If gooType is registered non-native, but the repr type isn't struct\n\t\t// nor interface, an implicit struct is needed. (if not amino\n\t\t// marshaler and isn't struct (nor interface), but isn't registered,\n\t\t// not a p3 message).\n\t\tif gooType.Package != nil &&\n\t\t\tgooType.Package.GoPkgPath != \"\" &&\n\t\t\tgooType.Type.Kind() != reflect.Struct &&\n\t\t\tgooType.Type.Kind() != reflect.Interface {\n\n\t\t\twrapImplicitStruct = true\n\t\t}\n\t\t// Assign *goor*.\n\t\tgoor = goo\n\t\tgoorType = gooType\n\t\tif gooIsPtr {\n\t\t\tdgoor_ := addVarUniq(scope, \"dgoor\")\n\t\t\tb = append(b,\n\t\t\t\t_a(dgoor_, \":=\", _deref(goor)),\n\t\t\t\t_a(dgoor_, \"=\", dgoor_)) // XXX\n\t\t\tgoor = _i(dgoor_)\n\t\t}\n\t}\n\t// Below, goor is dereferenced if goo is pointer..\n\n\t// Special case, time & duration.\n\tswitch goorType.Type {\n\tcase timeType:\n\t\tpkgName := addImportAuto(\n\t\t\timports, scope, \"timestamppb\", \"google.golang.org/protobuf/types/known/timestamppb\")\n\t\tif gooIsPtr { // (non-nil)\n\t\t\tb = append(b,\n\t\t\t\t_a(pbo, \"=\", _call(_sel(_x(pkgName), \"New\"), goor)))\n\t\t} else {\n\t\t\tb = append(b,\n\t\t\t\t_if(_not(_call(_x(\"amino.IsEmptyTime\"), goor)),\n\t\t\t\t\t_a(pbo, \"=\", _call(_sel(_x(pkgName), \"New\"), goor))))\n\t\t}\n\t\treturn\n\tcase durationType:\n\t\tpkgName := addImportAuto(\n\t\t\timports, scope, \"durationpb\", \"google.golang.org/protobuf/types/known/durationpb\")\n\t\tif gooIsPtr { // (non-nil)\n\t\t\tb = append(b,\n\t\t\t\t_a(pbo, \"=\", _call(_sel(_x(pkgName), \"New\"), goor)))\n\t\t} else {\n\t\t\tb = append(b,\n\t\t\t\t_if(_b(_call(_sel(goor, \"Nanoseconds\")), \"!=\", \"0\"),\n\t\t\t\t\t_a(pbo, \"=\", _call(_sel(_x(pkgName), \"New\"), goor))))\n\t\t}\n\t\treturn\n\t}\n\n\t// Special case, external empty types.\n\tif gooType.Registered && hasPBBindings(gooType) {\n\t\tif isRoot {\n\t\t\tpbote_ := p3goTypeExprString(rootPkg, imports, scope, gooType, fopts)\n\t\t\tpbov_ := addVarUniq(scope, \"pbov\")\n\t\t\tb = append(b,\n\t\t\t\t_if(_call(_x(\"Is%vReprEmpty\", gooType.Name), goor),\n\t\t\t\t\t_var(pbov_, _x(pbote_), nil),\n\t\t\t\t\t_a(\"msg\", \"=\", pbov_),\n\t\t\t\t\t_return()))\n\t\t} else if !gooIsPtr {\n\t\t\tpkgPrefix := goPkgPrefix(rootPkg, gooType.Type, gooType, imports, scope)\n\t\t\t// b switcharoo pattern\n\t\t\t// statements after this pattern appended to b\n\t\t\t// will come after the injected if-condition.\n\t\t\toldb := b\n\t\t\tb = []ast.Stmt(nil)\n\t\t\tdefer func() {\n\t\t\t\tnewb := b // named for clarity\n\t\t\t\tb = append(oldb,\n\t\t\t\t\t_if(_not(_call(_x(\"%vIs%vReprEmpty\", pkgPrefix, gooType.Name), goor)),\n\t\t\t\t\t\tnewb...))\n\t\t\t}()\n\t\t\t// end b switcharoo pattern\n\t\t}\n\t}\n\n\t// General case\n\tswitch goork := goorType.Type.Kind(); goork {\n\n\tcase reflect.Interface:\n\t\ttypeUrl_ := addVarUniq(scope, \"typeUrl\")\n\t\tbz_ := addVarUniq(scope, \"bz\")\n\t\tanyte_ := p3goTypeExprString(rootPkg, imports, scope, gooType, fopts)\n\t\tif anyte_[0] != '*' {\n\t\t\tpanic(\"expected pointer kind for p3goTypeExprString (of interface type)\")\n\t\t}\n\t\tdanyte_ := anyte_[1:]\n\t\tb = append(b,\n\t\t\t_a(typeUrl_, \":=\", _call(_x(\"cdc.GetTypeURL\"), goo)),\n\t\t\t_a(bz_, \":=\", \"[]byte~(~nil~)\"),\n\t\t\t_a(bz_, \"err\", \"=\", _call(_x(\"cdc.Marshal\"), goor)),\n\t\t\t_if(_x(\"err__!=__nil\"),\n\t\t\t\t_return(),\n\t\t\t),\n\t\t\t_a(pbo, \"=\", _x(\"&%v~{~TypeUrl:typeUrl,Value:bz~}\", danyte_)),\n\t\t)\n\n\tcase reflect.Int:\n\t\tb = append(b,\n\t\t\t_a(pbo, \"=\", maybeWrap(_call(_i(\"int64\"), goor))))\n\tcase reflect.Int16, reflect.Int8:\n\t\tb = append(b,\n\t\t\t_a(pbo, \"=\", maybeWrap(_call(_i(\"int32\"), goor))))\n\tcase reflect.Uint:\n\t\tb = append(b,\n\t\t\t_a(pbo, \"=\", maybeWrap(_call(_i(\"uint64\"), goor))))\n\tcase reflect.Uint16:\n\t\tb = append(b,\n\t\t\t_a(pbo, \"=\", maybeWrap(_call(_i(\"uint32\"), goor))))\n\tcase reflect.Uint8:\n\t\tif options&option_bytes == 0 {\n\t\t\tb = append(b,\n\t\t\t\t_a(pbo, \"=\", maybeWrap(_call(_i(\"uint32\"), goor))))\n\t\t} else {\n\t\t\tb = append(b,\n\t\t\t\t_a(pbo, \"=\", _call(_i(\"byte\"), goor)))\n\t\t}\n\n\tcase reflect.Array, reflect.Slice:\n\t\tvar newoptions uint64\n\t\tvar gooreIsPtr = goorType.ElemIsPtr\n\t\tvar gooreType = goorType.Elem\n\t\tvar dpbote_ string\n\t\tvar pboIsImplicit = isImplicitList(goorType, fopts)\n\t\tvar pboeIsImplicit = isImplicitList(gooreType, fopts)\n\t\tvar pbote_ = p3goTypeExprString(rootPkg, imports, scope, gooType, fopts)\n\t\tvar pboete_ = p3goTypeExprString(rootPkg, imports, scope, gooreType, fopts)\n\n\t\tif gooreType.ReprType.Type.Kind() == reflect.Uint8 {\n\t\t\t// Special bytes optimization for recursive case.\n\t\t\tpboete_ = \"uint8\"\n\t\t\tnewoptions |= option_bytes\n\t\t} else if pboeIsImplicit {\n\t\t\t// Special implicit list struct for recursive call.\n\t\t\tnewoptions |= option_implicit_list\n\t\t}\n\n\t\t// Iff also option & option_implicit_list, wrap with implicit list struct.\n\t\tif pboIsImplicit {\n\t\t\tif pbote_[0] != '*' {\n\t\t\t\tpanic(\"expected pointer kind for p3goTypeExprString (of implicit list-struct type)\")\n\t\t\t}\n\t\t\tdpbote_ = pbote_[1:]\n\t\t} else {\n\t\t\tdpbote_ = \"XXX\" // needed for _x() parsing regardless of _ctif condition.\n\t\t}\n\n\t\t// Construct, translate, assign.\n\t\tgoorl_ := addVarUniq(scope, \"goorl\")\n\t\tpbos_ := addVarUniq(scope, \"pbos\")\n\t\tscope2 := ast.NewScope(scope)\n\t\taddVars(scope2, \"i\", \"goore\", \"pbose\")\n\t\tb = append(b,\n\t\t\t_a(goorl_, \":=\", _len(goor)),\n\t\t\t_ife(_x(\"%v__==__0\", goorl_),\n\t\t\t\t_block( // then\n\t\t\t\t\t// Prefer nil for empty slices for less gc overhead.\n\t\t\t\t\t_a(pbo, \"=\", _i(\"nil\")),\n\t\t\t\t),\n\t\t\t\t_block( // else\n\t\t\t\t\t_var(pbos_, nil, _x(\"make~(~[]%v,%v~)\", pboete_, goorl_)),\n\t\t\t\t\t_for(\n\t\t\t\t\t\t_a(\"i\", \":=\", \"0\"),\n\t\t\t\t\t\t_x(\"i__<__%v\", goorl_),\n\t\t\t\t\t\t_a(\"i\", \"+=\", \"1\"),\n\t\t\t\t\t\t_block(\n\t\t\t\t\t\t\t// Translate in place.\n\t\t\t\t\t\t\t_a(\"goore\", \":=\", _idx(goor, _i(\"i\"))),\n\t\t\t\t\t\t\t_block(go2pbStmts(rootPkg, false, imports, scope2, _x(\"%v~[~i~]\", pbos_), _i(\"goore\"), gooreIsPtr, gooreType, fopts, newoptions)...),\n\t\t\t\t\t\t),\n\t\t\t\t\t),\n\t\t\t\t\t_ctif((pboIsImplicit && options&option_implicit_list != 0), // compile time if\n\t\t\t\t\t\t_a(pbo, \"=\", _x(\"&%v~{~Value:%v~}\", dpbote_, pbos_)), // then\n\t\t\t\t\t\t_a(pbo, \"=\", maybeWrap(_i(pbos_))), // else\n\t\t\t\t\t),\n\t\t\t\t)))\n\n\tcase reflect.Struct:\n\t\tpbote_ := p3goTypeExprString(rootPkg, imports, scope, gooType, fopts)\n\t\tif pbote_[0] != '*' {\n\t\t\tpanic(\"expected pointer kind for p3goTypeExprString of struct type\")\n\t\t}\n\t\tdpbote_ := pbote_[1:]\n\n\t\tb = append(b,\n\t\t\t_a(pbo, \"=\", _x(\"new~(~%v~)\", dpbote_)))\n\n\t\tfor _, field := range goorType.Fields {\n\t\t\tvar goorfIsPtr = field.IsPtr()\n\t\t\tvar goorfType = field.TypeInfo.ReprType\n\t\t\tvar goorf = _sel(goor, field.Name) // next goo\n\t\t\tvar pbof = _sel(pbo, field.Name) // next pbo\n\n\t\t\t// Translate in place.\n\t\t\tscope2 := ast.NewScope(scope)\n\t\t\tb = append(b,\n\t\t\t\t_block(go2pbStmts(rootPkg, false, imports, scope2, pbof, goorf, goorfIsPtr, goorfType, field.FieldOptions, 0)...),\n\t\t\t)\n\t\t}\n\n\tdefault:\n\t\t// General translation.\n\t\tb = append(b,\n\t\t\t_a(pbo, \"=\", maybeWrap(_call(_i(goork.String()), goor))))\n\n\t}\n\treturn b\n}", "func (mb *MutableBag) ToProto(output *mixerpb.CompressedAttributes, globalDict map[string]int32, globalWordCount int) {\n\tds := newDictState(globalDict, globalWordCount)\n\tkeys := mb.Names()\n\n\tfor _, k := range keys {\n\t\tindex := ds.assignDictIndex(k)\n\t\tv, _ := mb.Get(k) // if not found, nil return will be ignored by the switch below\n\n\t\tswitch t := v.(type) {\n\t\tcase string:\n\t\t\tif output.Strings == nil {\n\t\t\t\toutput.Strings = make(map[int32]int32)\n\t\t\t}\n\t\t\toutput.Strings[index] = ds.assignDictIndex(t)\n\n\t\tcase int64:\n\t\t\tif output.Int64S == nil {\n\t\t\t\toutput.Int64S = make(map[int32]int64)\n\t\t\t}\n\t\t\toutput.Int64S[index] = t\n\n\t\tcase int:\n\t\t\tif output.Int64S == nil {\n\t\t\t\toutput.Int64S = make(map[int32]int64)\n\t\t\t}\n\t\t\toutput.Int64S[index] = int64(t)\n\n\t\tcase float64:\n\t\t\tif output.Doubles == nil {\n\t\t\t\toutput.Doubles = make(map[int32]float64)\n\t\t\t}\n\t\t\toutput.Doubles[index] = t\n\n\t\tcase bool:\n\t\t\tif output.Bools == nil {\n\t\t\t\toutput.Bools = make(map[int32]bool)\n\t\t\t}\n\t\t\toutput.Bools[index] = t\n\n\t\tcase time.Time:\n\t\t\tif output.Timestamps == nil {\n\t\t\t\toutput.Timestamps = make(map[int32]time.Time)\n\t\t\t}\n\t\t\toutput.Timestamps[index] = t\n\n\t\tcase time.Duration:\n\t\t\tif output.Durations == nil {\n\t\t\t\toutput.Durations = make(map[int32]time.Duration)\n\t\t\t}\n\t\t\toutput.Durations[index] = t\n\n\t\tcase []byte:\n\t\t\tif output.Bytes == nil {\n\t\t\t\toutput.Bytes = make(map[int32][]byte)\n\t\t\t}\n\t\t\toutput.Bytes[index] = t\n\n\t\tcase map[string]string:\n\t\t\tsm := make(map[int32]int32, len(t))\n\t\t\tfor smk, smv := range t {\n\t\t\t\tsm[ds.assignDictIndex(smk)] = ds.assignDictIndex(smv)\n\t\t\t}\n\n\t\t\tif output.StringMaps == nil {\n\t\t\t\toutput.StringMaps = make(map[int32]mixerpb.StringMap)\n\t\t\t}\n\t\t\toutput.StringMaps[index] = mixerpb.StringMap{Entries: sm}\n\n\t\tdefault:\n\t\t\tpanic(fmt.Errorf(\"cannot convert value:%v of type:%T\", v, v))\n\t\t}\n\t}\n\n\toutput.Words = ds.getMessageWordList()\n}", "func reposetToPBData(reposet ReposetProps) ([]byte, error) {\n\n data, err := reposet.Marshal()\n if err != nil {\n return nil, err\n }\n\n pbreposet := new(pb.IndexNode)\n typ := pb.IndexNode_Reposet\n pbreposet.Type = typ\n pbreposet.Data = data\n\n data, err = proto.Marshal(pbreposet)\n\n return data, nil\n}", "func ComputeBetaInstanceTemplatePropertiesDisksToProto(o *beta.InstanceTemplatePropertiesDisks) *betapb.ComputeBetaInstanceTemplatePropertiesDisks {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.ComputeBetaInstanceTemplatePropertiesDisks{\n\t\tAutoDelete: dcl.ValueOrEmptyBool(o.AutoDelete),\n\t\tBoot: dcl.ValueOrEmptyBool(o.Boot),\n\t\tDeviceName: dcl.ValueOrEmptyString(o.DeviceName),\n\t\tDiskEncryptionKey: ComputeBetaInstanceTemplatePropertiesDisksDiskEncryptionKeyToProto(o.DiskEncryptionKey),\n\t\tIndex: dcl.ValueOrEmptyInt64(o.Index),\n\t\tInitializeParams: ComputeBetaInstanceTemplatePropertiesDisksInitializeParamsToProto(o.InitializeParams),\n\t\tInterface: ComputeBetaInstanceTemplatePropertiesDisksInterfaceEnumToProto(o.Interface),\n\t\tMode: ComputeBetaInstanceTemplatePropertiesDisksModeEnumToProto(o.Mode),\n\t\tSource: dcl.ValueOrEmptyString(o.Source),\n\t\tType: ComputeBetaInstanceTemplatePropertiesDisksTypeEnumToProto(o.Type),\n\t}\n\tfor _, r := range o.GuestOSFeatures {\n\t\tp.GuestOsFeatures = append(p.GuestOsFeatures, ComputeBetaInstanceTemplatePropertiesDisksGuestOSFeaturesToProto(&r))\n\t}\n\treturn p\n}", "func ComputeBetaInstanceTemplatePropertiesShieldedInstanceConfigToProto(o *beta.InstanceTemplatePropertiesShieldedInstanceConfig) *betapb.ComputeBetaInstanceTemplatePropertiesShieldedInstanceConfig {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.ComputeBetaInstanceTemplatePropertiesShieldedInstanceConfig{\n\t\tEnableSecureBoot: dcl.ValueOrEmptyBool(o.EnableSecureBoot),\n\t\tEnableVtpm: dcl.ValueOrEmptyBool(o.EnableVtpm),\n\t\tEnableIntegrityMonitoring: dcl.ValueOrEmptyBool(o.EnableIntegrityMonitoring),\n\t}\n\treturn p\n}", "func EncodePb(p proto.Message) (data []byte, err error) {\n\tdata, err = proto.Marshal(p)\n\treturn\n}", "func yamlToProto(path string, v proto.Message) error {\n\tyamlBytes, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn errors.Trace(err)\n\t}\n\tjsonBytes, err := yaml.YAMLToJSONStrict(yamlBytes)\n\tif err != nil {\n\t\treturn errors.Trace(err)\n\t}\n\tr := bytes.NewReader(jsonBytes)\n\terr = pbjson.NewDecoder(r).Decode(v)\n\treturn errors.Trace(err)\n}", "func (i *Invoice) loadFromP2PProtobuf(invoiceData *invoicepb.InvoiceData) {\n\ti.InvoiceNumber = invoiceData.InvoiceNumber\n\ti.InvoiceStatus = invoiceData.InvoiceStatus\n\ti.SenderName = invoiceData.SenderName\n\ti.SenderStreet = invoiceData.SenderStreet\n\ti.SenderCity = invoiceData.SenderCity\n\ti.SenderZipcode = invoiceData.SenderZipcode\n\ti.SenderCountry = invoiceData.SenderCountry\n\ti.RecipientName = invoiceData.RecipientName\n\ti.RecipientStreet = invoiceData.RecipientStreet\n\ti.RecipientCity = invoiceData.RecipientCity\n\ti.RecipientZipcode = invoiceData.RecipientZipcode\n\ti.RecipientCountry = invoiceData.RecipientCountry\n\ti.Currency = invoiceData.Currency\n\ti.GrossAmount = invoiceData.GrossAmount\n\ti.NetAmount = invoiceData.NetAmount\n\ti.TaxAmount = invoiceData.TaxAmount\n\ti.TaxRate = invoiceData.TaxRate\n\n\tif invoiceData.Recipient != nil {\n\t\trecipient := identity.NewDIDFromBytes(invoiceData.Recipient)\n\t\ti.Recipient = &recipient\n\t}\n\n\tif invoiceData.Sender != nil {\n\t\tsender := identity.NewDIDFromBytes(invoiceData.Sender)\n\t\ti.Sender = &sender\n\t}\n\n\tif invoiceData.Payee != nil {\n\t\tpayee := identity.NewDIDFromBytes(invoiceData.Payee)\n\t\ti.Payee = &payee\n\t}\n\n\ti.Comment = invoiceData.Comment\n\ti.DueDate = invoiceData.DueDate\n\ti.DateCreated = invoiceData.DateCreated\n\ti.ExtraData = invoiceData.ExtraData\n}", "func (object Object) BTo(value interface{}) Object {\n\treturn object.Property(as.PropertyBTo, value)\n}", "func generateMethodsForType(imports *ast.GenDecl, scope *ast.Scope, pkg *amino.Package, info *amino.TypeInfo) (methods []ast.Decl, err error) {\n\tif info.Type.Kind() == reflect.Interface {\n\t\tpanic(\"should not happen\")\n\t}\n\n\tpbote_ := p3goTypeExprString(pkg, imports, scope, info, amino.FieldOptions{})\n\tif pbote_[0] != '*' {\n\t\tpanic(\"expected pointer kind for p3goTypeExprString (of registered type)\")\n\t}\n\tdpbote_ := pbote_[1:]\n\n\t//////////////////\n\t// ToPBMessage()\n\t{\n\t\tscope2 := ast.NewScope(scope)\n\t\taddVars(scope2, \"cdc\", \"goo\", \"pbo\", \"msg\", \"err\")\n\t\t// Set toProto function.\n\t\tmethods = append(methods, _func(\"ToPBMessage\",\n\t\t\t\"goo\", info.Type.Name(),\n\t\t\t_fields(\"cdc\", \"*amino.Codec\"),\n\t\t\t_fields(\"msg\", \"proto.Message\", \"err\", \"error\"),\n\t\t\t_block(\n\t\t\t\t// Body: declaration for pb message.\n\t\t\t\t_var(\"pbo\", _x(pbote_), nil),\n\t\t\t\t// Body: copying over fields.\n\t\t\t\t_block(go2pbStmts(pkg, true, imports, scope2, _i(\"pbo\"), _i(\"goo\"), false, info, amino.FieldOptions{}, 0)...),\n\t\t\t\t// Body: return value.\n\t\t\t\t_a(\"msg\", \"=\", \"pbo\"),\n\t\t\t\t_return(),\n\t\t\t),\n\t\t))\n\t}\n\n\t//////////////////\n\t// EmptyPBMessage()\n\t// Use to create the pbm to proto.Unmarshal to before FromPBMessage.\n\t{\n\t\tscope2 := ast.NewScope(scope)\n\t\taddVars(scope2, \"cdc\", \"goo\", \"pbo\", \"msg\", \"err\")\n\t\t// Set toProto function.\n\t\tmethods = append(methods, _func(\"EmptyPBMessage\",\n\t\t\t\"goo\", info.Type.Name(),\n\t\t\t_fields(\"cdc\", \"*amino.Codec\"),\n\t\t\t_fields(\"msg\", \"proto.Message\"),\n\t\t\t_block(\n\t\t\t\t// Body: declaration for pb message.\n\t\t\t\t_a(\"pbo\", \":=\", _x(\"new~(~%v~)\", dpbote_)),\n\t\t\t\t// Body: return value.\n\t\t\t\t_a(\"msg\", \"=\", \"pbo\"),\n\t\t\t\t_return(),\n\t\t\t),\n\t\t))\n\t}\n\n\t//////////////////\n\t// FromPBMessage()\n\t{\n\t\tscope2 := ast.NewScope(scope)\n\t\taddVars(scope2, \"cdc\", \"goo\", \"pbo\", \"msg\", \"err\")\n\t\tmethods = append(methods, _func(\"FromPBMessage\",\n\t\t\t\"goo\", \"*\"+info.Type.Name(),\n\t\t\t_fields(\"cdc\", \"*amino.Codec\", \"msg\", \"proto.Message\"),\n\t\t\t_fields(\"err\", \"error\"),\n\t\t\t_block(\n\t\t\t\t// Body: declaration for pb message.\n\t\t\t\t_var(\"pbo\", _x(pbote_),\n\t\t\t\t\t_x(\"%v.~(~%v~)\", \"msg\", pbote_)),\n\t\t\t\t// Body: copying over fields.\n\t\t\t\t_block(pb2goStmts(pkg, true, imports, scope2, _i(\"goo\"), true, info, _i(\"pbo\"), amino.FieldOptions{}, 0)...),\n\t\t\t\t// Body: return.\n\t\t\t\t_return(),\n\t\t\t),\n\t\t))\n\t}\n\n\t//////////////////\n\t// TypeUrl()\n\t{\n\t\tmethods = append(methods, _func(\"GetTypeURL\",\n\t\t\t\"\", info.Type.Name(),\n\t\t\t_fields(),\n\t\t\t_fields(\"typeURL\", \"string\"),\n\t\t\t_block(\n\t\t\t\t_return(_s(info.TypeURL)),\n\t\t\t),\n\t\t))\n\t}\n\n\t//////////////////\n\t// Is*ReprEmpty()\n\t{\n\t\trinfo := info.ReprType\n\t\tscope2 := ast.NewScope(scope)\n\t\taddVars(scope2, \"goo\", \"empty\")\n\t\tgoorte := goTypeExpr(pkg, rinfo.Type, imports, scope2)\n\t\tmethods = append(methods, _func(fmt.Sprintf(\"Is%vReprEmpty\", info.Name),\n\t\t\t\"\", \"\",\n\t\t\t_fields(\"goor\", goorte),\n\t\t\t_fields(\"empty\", \"bool\"),\n\t\t\t_block(\n\t\t\t\t// Body: check fields.\n\t\t\t\t_block(append(\n\t\t\t\t\t[]ast.Stmt{_a(\"empty\", \"=\", \"true\")},\n\t\t\t\t\tisReprEmptyStmts(pkg, true, imports, scope2, _i(\"goor\"), false, info)...,\n\t\t\t\t)...),\n\t\t\t\t// Body: return.\n\t\t\t\t_return(),\n\t\t\t),\n\t\t))\n\t}\n\treturn\n}", "func GenerateGBfromproto(record *bioproto.Genbank) string {\n\tvar stringbuffer bytes.Buffer\n\n\tstringbuffer.WriteString(generateHeaderString(record))\n\tstringbuffer.WriteString(\"FEATURES Location/Qualifiers\\n\")\n\tstringbuffer.WriteString(generateQualifierString(record))\n\tif record.FEATURES != nil {\n\n\t}\n\tif record.CONTIG != \"\" {\n\t\tstringbuffer.WriteString(\"CONTIG \" + record.CONTIG + \"\\n\")\n\t}\n\tstringbuffer.WriteString(\"//\\n\")\n\treturn stringbuffer.String()\n}", "func (op *Operation) ToProto(wrappedID string) *api.UploadOperation {\n\tvar ref *api.ObjectRef\n\tif op.Status == api.UploadStatus_PUBLISHED {\n\t\tref = &api.ObjectRef{\n\t\t\tHashAlgo: op.HashAlgo,\n\t\t\tHexDigest: op.HexDigest,\n\t\t}\n\t}\n\treturn &api.UploadOperation{\n\t\tOperationId: wrappedID,\n\t\tUploadUrl: op.UploadURL,\n\t\tStatus: op.Status,\n\t\tObject: ref,\n\t\tErrorMessage: op.Error,\n\t}\n}", "func MakePbUtxo(op *types.OutPoint, uw *types.UtxoWrap) *rpcpb.Utxo {\n\ts := script.NewScriptFromBytes(uw.Script())\n\tvalue := uw.Value()\n\tif s.IsTokenIssue() || s.IsTokenTransfer() {\n\t\tvalue = 0\n\t}\n\treturn &rpcpb.Utxo{\n\t\tBlockHeight: uw.Height(),\n\t\t// IsCoinbase: uw.IsCoinBase(),\n\t\tIsSpent: uw.IsSpent(),\n\t\tOutPoint: NewPbOutPoint(&op.Hash, op.Index),\n\t\tTxOut: &corepb.TxOut{\n\t\t\tValue: value,\n\t\t\tScriptPubKey: uw.Script(),\n\t\t},\n\t}\n}", "func BrandToProto(brand *domain.Brand) *brandproto.Brand {\n\tif brand == nil {\n\t\treturn nil\n\t}\n\n\treturn &brandproto.Brand{\n\t\tId: brand.ID,\n\t\tSlug: brand.Slug,\n\t\tShortName: brand.ShortName,\n\t\tName: brand.Name,\n\t\tDescription: brand.Description,\n\t\tImageId: brand.ImageID,\n\t\tCreatedAt: util.TimeToProto(brand.CreatedAt),\n\t\tUpdatedAt: util.TimeToProto(brand.UpdatedAt),\n\t}\n}", "func BigqueryRoutineArgumentsDataTypeStructTypeFieldsToProto(o *bigquery.RoutineArgumentsDataTypeStructTypeFields) *bigquerypb.BigqueryRoutineArgumentsDataTypeStructTypeFields {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &bigquerypb.BigqueryRoutineArgumentsDataTypeStructTypeFields{}\n\tp.SetName(dcl.ValueOrEmptyString(o.Name))\n\tp.SetType(BigqueryRoutineArgumentsDataTypeToProto(o.Type))\n\treturn p\n}", "func ToProto(sandbox *Sandbox) *types.Sandbox {\n\textensions := make(map[string]*gogo_types.Any)\n\tfor k, v := range sandbox.Extensions {\n\t\textensions[k] = protobuf.FromAny(v)\n\t}\n\treturn &types.Sandbox{\n\t\tSandboxID: sandbox.ID,\n\t\tRuntime: &types.Sandbox_Runtime{\n\t\t\tName: sandbox.Runtime.Name,\n\t\t\tOptions: protobuf.FromAny(sandbox.Runtime.Options),\n\t\t},\n\t\tLabels: sandbox.Labels,\n\t\tCreatedAt: protobuf.ToTimestamp(sandbox.CreatedAt),\n\t\tUpdatedAt: protobuf.ToTimestamp(sandbox.UpdatedAt),\n\t\tExtensions: extensions,\n\t\tSpec: protobuf.FromAny(sandbox.Spec),\n\t}\n}", "func (m *Fields) MarshalTo(data []byte) (int, error) {\n\treturn m.ProtoFields().MarshalTo(data)\n}", "func BigIntToProto(x *big.Int) *BigInt {\n\tif x == nil {\n\t\treturn nil\n\t}\n\tpb := new(BigInt)\n\tpb.Raw = x.Bytes()\n\treturn pb\n}", "func InstanceToProto(resource *sql.Instance) *sqlpb.SqlInstance {\n\tp := &sqlpb.SqlInstance{\n\t\tBackendType: SqlInstanceBackendTypeEnumToProto(resource.BackendType),\n\t\tConnectionName: dcl.ValueOrEmptyString(resource.ConnectionName),\n\t\tDatabaseVersion: SqlInstanceDatabaseVersionEnumToProto(resource.DatabaseVersion),\n\t\tEtag: dcl.ValueOrEmptyString(resource.Etag),\n\t\tGceZone: dcl.ValueOrEmptyString(resource.GceZone),\n\t\tInstanceType: SqlInstanceInstanceTypeEnumToProto(resource.InstanceType),\n\t\tMasterInstanceName: dcl.ValueOrEmptyString(resource.MasterInstanceName),\n\t\tMaxDiskSize: SqlInstanceMaxDiskSizeToProto(resource.MaxDiskSize),\n\t\tName: dcl.ValueOrEmptyString(resource.Name),\n\t\tProject: dcl.ValueOrEmptyString(resource.Project),\n\t\tRegion: dcl.ValueOrEmptyString(resource.Region),\n\t\tRootPassword: dcl.ValueOrEmptyString(resource.RootPassword),\n\t\tCurrentDiskSize: SqlInstanceCurrentDiskSizeToProto(resource.CurrentDiskSize),\n\t\tDiskEncryptionConfiguration: SqlInstanceDiskEncryptionConfigurationToProto(resource.DiskEncryptionConfiguration),\n\t\tFailoverReplica: SqlInstanceFailoverReplicaToProto(resource.FailoverReplica),\n\t\tMasterInstance: SqlInstanceMasterInstanceToProto(resource.MasterInstance),\n\t\tReplicaConfiguration: SqlInstanceReplicaConfigurationToProto(resource.ReplicaConfiguration),\n\t\tScheduledMaintenance: SqlInstanceScheduledMaintenanceToProto(resource.ScheduledMaintenance),\n\t\tSettings: SqlInstanceSettingsToProto(resource.Settings),\n\t}\n\tfor _, r := range resource.IPAddresses {\n\t\tp.IpAddresses = append(p.IpAddresses, SqlInstanceIPAddressesToProto(&r))\n\t}\n\n\treturn p\n}", "func ProtoToComputeBetaInstanceTemplateProperties(p *betapb.ComputeBetaInstanceTemplateProperties) *beta.InstanceTemplateProperties {\n\tif p == nil {\n\t\treturn nil\n\t}\n\tobj := &beta.InstanceTemplateProperties{\n\t\tCanIPForward: dcl.Bool(p.CanIpForward),\n\t\tDescription: dcl.StringOrNil(p.Description),\n\t\tMachineType: dcl.StringOrNil(p.MachineType),\n\t\tMinCpuPlatform: dcl.StringOrNil(p.MinCpuPlatform),\n\t\tReservationAffinity: ProtoToComputeBetaInstanceTemplatePropertiesReservationAffinity(p.GetReservationAffinity()),\n\t\tShieldedInstanceConfig: ProtoToComputeBetaInstanceTemplatePropertiesShieldedInstanceConfig(p.GetShieldedInstanceConfig()),\n\t\tScheduling: ProtoToComputeBetaInstanceTemplatePropertiesScheduling(p.GetScheduling()),\n\t}\n\tfor _, r := range p.GetDisks() {\n\t\tobj.Disks = append(obj.Disks, *ProtoToComputeBetaInstanceTemplatePropertiesDisks(r))\n\t}\n\tfor _, r := range p.GetGuestAccelerators() {\n\t\tobj.GuestAccelerators = append(obj.GuestAccelerators, *ProtoToComputeBetaInstanceTemplatePropertiesGuestAccelerators(r))\n\t}\n\tfor _, r := range p.GetNetworkInterfaces() {\n\t\tobj.NetworkInterfaces = append(obj.NetworkInterfaces, *ProtoToComputeBetaInstanceTemplatePropertiesNetworkInterfaces(r))\n\t}\n\tfor _, r := range p.GetServiceAccounts() {\n\t\tobj.ServiceAccounts = append(obj.ServiceAccounts, *ProtoToComputeBetaInstanceTemplatePropertiesServiceAccounts(r))\n\t}\n\tfor _, r := range p.GetTags() {\n\t\tobj.Tags = append(obj.Tags, r)\n\t}\n\treturn obj\n}", "func ProtoToRoutine(p *bigquerypb.BigqueryRoutine) *bigquery.Routine {\n\tobj := &bigquery.Routine{\n\t\tEtag: dcl.StringOrNil(p.GetEtag()),\n\t\tName: dcl.StringOrNil(p.GetName()),\n\t\tProject: dcl.StringOrNil(p.GetProject()),\n\t\tDataset: dcl.StringOrNil(p.GetDataset()),\n\t\tRoutineType: ProtoToBigqueryRoutineRoutineTypeEnum(p.GetRoutineType()),\n\t\tCreationTime: dcl.Int64OrNil(p.GetCreationTime()),\n\t\tLastModifiedTime: dcl.Int64OrNil(p.GetLastModifiedTime()),\n\t\tLanguage: ProtoToBigqueryRoutineLanguageEnum(p.GetLanguage()),\n\t\tReturnType: ProtoToBigqueryRoutineArgumentsDataType(p.GetReturnType()),\n\t\tDefinitionBody: dcl.StringOrNil(p.GetDefinitionBody()),\n\t\tDescription: dcl.StringOrNil(p.GetDescription()),\n\t\tDeterminismLevel: ProtoToBigqueryRoutineDeterminismLevelEnum(p.GetDeterminismLevel()),\n\t\tStrictMode: dcl.Bool(p.GetStrictMode()),\n\t}\n\tfor _, r := range p.GetArguments() {\n\t\tobj.Arguments = append(obj.Arguments, *ProtoToBigqueryRoutineArguments(r))\n\t}\n\tfor _, r := range p.GetImportedLibraries() {\n\t\tobj.ImportedLibraries = append(obj.ImportedLibraries, r)\n\t}\n\treturn obj\n}", "func CopyPBAndDereference(dst interface{}, src interface{}) interface{} {\n\tcopy := CopyPB(dst, src)\n\treturn reflect.ValueOf(copy).Elem().Interface()\n}", "func NewPBPacket(id ProtoID) interface{} {\n packet, ok := packetMap[id];\n if !ok {\n return nil;\n }\n ms, _ := packet.(proto.Message);\n \n return proto.Clone(ms)\n}", "func (t *Tree) ToProto() *pbMerkle.Tree {\n\treturn &pbMerkle.Tree{\n\t\tDepth: int64(t.depth),\n\t\tRootNode: t.rootNode.toProto(),\n\t}\n}", "func (data *Data) ToProto() tmproto.Data {\n\ttp := new(tmproto.Data)\n\n\tif len(data.Txs) > 0 {\n\t\ttxBzs := make([][]byte, len(data.Txs))\n\t\tfor i := range data.Txs {\n\t\t\ttxBzs[i] = data.Txs[i]\n\t\t}\n\t\ttp.Txs = txBzs\n\t}\n\n\treturn *tp\n}", "func (data *Data) ToProto() tmproto.Data {\n\ttp := new(tmproto.Data)\n\n\tif len(data.Txs) > 0 {\n\t\ttxBzs := make([][]byte, len(data.Txs))\n\t\tfor i := range data.Txs {\n\t\t\ttxBzs[i] = data.Txs[i]\n\t\t}\n\t\ttp.Txs = txBzs\n\t}\n\n\treturn *tp\n}", "func RecordToProto(ctx context.Context, dag format.DAGService, rec net.Record) (*pb.Log_Record, error) {\n\tblock, err := rec.GetBlock(ctx, dag)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tevent, ok := block.(*Event)\n\tif !ok {\n\t\tevent, err = EventFromNode(block)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\theader, err := event.GetHeader(ctx, dag, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tbody, err := event.GetBody(ctx, dag, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &pb.Log_Record{\n\t\tRecordNode: rec.RawData(),\n\t\tEventNode: block.RawData(),\n\t\tHeaderNode: header.RawData(),\n\t\tBodyNode: body.RawData(),\n\t}, nil\n}", "func ComputeBetaInstanceTemplatePropertiesDisksInitializeParamsToProto(o *beta.InstanceTemplatePropertiesDisksInitializeParams) *betapb.ComputeBetaInstanceTemplatePropertiesDisksInitializeParams {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.ComputeBetaInstanceTemplatePropertiesDisksInitializeParams{\n\t\tDiskName: dcl.ValueOrEmptyString(o.DiskName),\n\t\tDiskSizeGb: dcl.ValueOrEmptyInt64(o.DiskSizeGb),\n\t\tDiskType: dcl.ValueOrEmptyString(o.DiskType),\n\t\tSourceImage: dcl.ValueOrEmptyString(o.SourceImage),\n\t\tSourceSnapshot: dcl.ValueOrEmptyString(o.SourceSnapshot),\n\t\tSourceSnapshotEncryptionKey: ComputeBetaInstanceTemplatePropertiesDisksInitializeParamsSourceSnapshotEncryptionKeyToProto(o.SourceSnapshotEncryptionKey),\n\t\tDescription: dcl.ValueOrEmptyString(o.Description),\n\t\tOnUpdateAction: dcl.ValueOrEmptyString(o.OnUpdateAction),\n\t\tSourceImageEncryptionKey: ComputeBetaInstanceTemplatePropertiesDisksInitializeParamsSourceImageEncryptionKeyToProto(o.SourceImageEncryptionKey),\n\t}\n\tp.Labels = make(map[string]string)\n\tfor k, r := range o.Labels {\n\t\tp.Labels[k] = r\n\t}\n\tfor _, r := range o.ResourcePolicies {\n\t\tp.ResourcePolicies = append(p.ResourcePolicies, r)\n\t}\n\treturn p\n}", "func BigqueryRoutineArgumentsDataTypeStructTypeToProto(o *bigquery.RoutineArgumentsDataTypeStructType) *bigquerypb.BigqueryRoutineArgumentsDataTypeStructType {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &bigquerypb.BigqueryRoutineArgumentsDataTypeStructType{}\n\tsFields := make([]*bigquerypb.BigqueryRoutineArgumentsDataTypeStructTypeFields, len(o.Fields))\n\tfor i, r := range o.Fields {\n\t\tsFields[i] = BigqueryRoutineArgumentsDataTypeStructTypeFieldsToProto(&r)\n\t}\n\tp.SetFields(sFields)\n\treturn p\n}", "func CloudkmsBetaCryptoKeyVersionTemplateToProto(o *beta.CryptoKeyVersionTemplate) *betapb.CloudkmsBetaCryptoKeyVersionTemplate {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &betapb.CloudkmsBetaCryptoKeyVersionTemplate{}\n\tp.SetProtectionLevel(CloudkmsBetaCryptoKeyVersionTemplateProtectionLevelEnumToProto(o.ProtectionLevel))\n\tp.SetAlgorithm(CloudkmsBetaCryptoKeyVersionTemplateAlgorithmEnumToProto(o.Algorithm))\n\treturn p\n}", "func TestPbEncoderAndDecoder(t *testing.T) {\n\tregister(0, reflect.TypeOf(example.A{}))\n\n\tbuf := new(bytes.Buffer)\n\n\tinPb := &example.A{\n\t\tDescription: \"hello world!\",\n\t\tNumber: 1,\n\t}\n\t// UUID is 16 byte long\n\tfor i := 0; i < 16; i++ {\n\t\tinPb.Id = append(inPb.Id, byte(i))\n\t}\n\n\tmsg := NewPbMessage(0, inPb)\n\n\te := NewMsgEncoder(buf)\n\te.EncodePb(msg)\n\n\toutMsg := NewEmptyPbMessage()\n\n\td := NewMsgDecoder(buf)\n\terr := d.DecodePb(outMsg)\n\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tif !reflect.DeepEqual(msg, outMsg) {\n\t\tt.Fatal(\"Messages are not equal!\")\n\t}\n\n\tif !reflect.DeepEqual(inPb, outMsg.pb) {\n\t\tt.Fatal(\"Protos are not equal!\")\n\t}\n}", "func BigqueryRoutineArgumentsToProto(o *bigquery.RoutineArguments) *bigquerypb.BigqueryRoutineArguments {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &bigquerypb.BigqueryRoutineArguments{}\n\tp.SetName(dcl.ValueOrEmptyString(o.Name))\n\tp.SetArgumentKind(BigqueryRoutineArgumentsArgumentKindEnumToProto(o.ArgumentKind))\n\tp.SetMode(BigqueryRoutineArgumentsModeEnumToProto(o.Mode))\n\tp.SetDataType(BigqueryRoutineArgumentsDataTypeToProto(o.DataType))\n\treturn p\n}", "func ChatDBToProto(chat *db.Chat) *models.Message {\n\treturn &models.Message{\n\t\tId: chat.ID,\n\t\tFrom: chat.From,\n\t\tBody: chat.Body,\n\t\tTo: chat.To,\n\t\tTime: chat.Time,\n\t}\n}", "func (me *Model) ToRPCMsg(escape bool) *rpc.Message {\n\tret := &rpc.Message{\n\t\tId: me.ID.Hex(),\n\t\tTopicId: me.TopicID.Hex(),\n\t\tSenderName: me.SenderName,\n\t\tPostTime: &timestamp.Timestamp{\n\t\t\tSeconds: me.PostTime.Unix(),\n\t\t\tNanos: int32(me.PostTime.Nanosecond()),\n\t\t},\n\t\tMessage: me.Message,\n\t\tBump: me.Bump,\n\t}\n\tif escape {\n\t\tret.SenderName = html.EscapeString(me.SenderName)\n\t\tret.Message = html.EscapeString(me.Message)\n\t}\n\treturn ret\n}", "func ToBytes(inter interface{}) []byte {\n\treqBodyBytes := new(bytes.Buffer)\n\tjson.NewEncoder(reqBodyBytes).Encode(inter)\n\tfmt.Println(reqBodyBytes.Bytes()) // this is the []byte\n\tfmt.Println(string(reqBodyBytes.Bytes())) // converted back to show it's your original object\n\treturn reqBodyBytes.Bytes()\n}", "func (op RollupOp) ToProto(pb *pipelinepb.AppliedRollupOp) error {\n\top.AggregationID.ToProto(&pb.AggregationId)\n\tpb.Id = op.ID\n\treturn nil\n}", "func ForgetAllFields(t *testing.T, originalMessage proto.Message) proto.Message {\n\tt.Helper()\n\n\temptyMessage := &pb2_latest.Empty{}\n\n\tbinaryMessage, err := proto.Marshal(originalMessage)\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\n\terr = proto.Unmarshal(binaryMessage, emptyMessage)\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\treturn emptyMessage\n}", "func (m *Message) ToProtobuf() *PBDHTMessage {\n\tpmes := new(PBDHTMessage)\n\tif m.Value != nil {\n\t\tpmes.Value = m.Value\n\t}\n\n\tpmes.Type = &m.Type\n\tpmes.Key = &m.Key\n\tpmes.Response = &m.Response\n\tpmes.Id = &m.ID\n\tpmes.Success = &m.Success\n\tfor _, p := range m.Peers {\n\t\tpmes.Peers = append(pmes.Peers, peerInfo(p))\n\t}\n\n\treturn pmes\n}", "func ContainerClusterAddonsConfigHttpLoadBalancingToProto(o *container.ClusterAddonsConfigHttpLoadBalancing) *containerpb.ContainerClusterAddonsConfigHttpLoadBalancing {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &containerpb.ContainerClusterAddonsConfigHttpLoadBalancing{\n\t\tDisabled: dcl.ValueOrEmptyBool(o.Disabled),\n\t}\n\treturn p\n}", "func FieldDataToBytes(endian binary.ByteOrder, fieldData FieldData) ([]byte, error) {\n\tswitch field := fieldData.(type) {\n\tcase *BoolFieldData:\n\t\t// return binaryWrite(endian, field.Data)\n\t\treturn boolFieldDataToPbBytes(field)\n\tcase *StringFieldData:\n\t\treturn stringFieldDataToPbBytes(field)\n\tcase *ArrayFieldData:\n\t\treturn arrayFieldDataToPbBytes(field)\n\tcase *JSONFieldData:\n\t\treturn jsonFieldDataToPbBytes(field)\n\tcase *BinaryVectorFieldData:\n\t\treturn field.Data, nil\n\tcase *FloatVectorFieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tcase *Int8FieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tcase *Int16FieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tcase *Int32FieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tcase *Int64FieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tcase *FloatFieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tcase *DoubleFieldData:\n\t\treturn binaryWrite(endian, field.Data)\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unsupported field data: %s\", field)\n\t}\n}", "func patchFieldValueToObject(fieldPath string, value interface{}, to runtime.Object, mo *xpv1.MergeOptions) error {\n\tpaved, err := fieldpath.PaveObject(to)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := paved.MergeValue(fieldPath, value, mo); err != nil {\n\t\treturn err\n\t}\n\n\treturn runtime.DefaultUnstructuredConverter.FromUnstructured(paved.UnstructuredContent(), to)\n}", "func NewPbJsonPublisher(q sqlh.Queryer, schema, table string) MsgPublisherFunc {\n\n\tpbPublisher := NewMsgPublisher(pjenc.DefaultPbEncoder, q, schema, table)\n\tjsonPublisher := NewMsgPublisher(pjenc.DefaultJsonEncoder, q, schema, table)\n\n\treturn func(ctx context.Context, spec MsgSpec, msg interface{}) error {\n\t\tif _, ok := spec.MsgValue().(proto.Message); ok {\n\t\t\treturn pbPublisher(ctx, spec, msg)\n\t\t}\n\t\treturn jsonPublisher(ctx, spec, msg)\n\t}\n}", "func ProtoToDomainMapping(p *appenginepb.AppengineDomainMapping) *appengine.DomainMapping {\n\tobj := &appengine.DomainMapping{\n\t\tSelfLink: dcl.StringOrNil(p.SelfLink),\n\t\tName: dcl.StringOrNil(p.Name),\n\t\tSslSettings: ProtoToAppengineDomainMappingSslSettings(p.GetSslSettings()),\n\t\tApp: dcl.StringOrNil(p.App),\n\t}\n\tfor _, r := range p.GetResourceRecords() {\n\t\tobj.ResourceRecords = append(obj.ResourceRecords, *ProtoToAppengineDomainMappingResourceRecords(r))\n\t}\n\treturn obj\n}", "func ToPbTime(gt time.Time) *google_protobuf1.Timestamp {\n\treturn &google_protobuf1.Timestamp{Seconds: int64(gt.Unix()), Nanos: int32(gt.Nanosecond())}\n}", "func ToObject(v []byte, output interface{}) error {\n\t_, err := newStructDecoder(output).Decode(v) // nolint\n\treturn err\n}", "func (o *ProjectWebhookPartial) ToMap() map[string]interface{} {\n\tkv := map[string]interface{}{\n\t\t\"enabled\": toProjectWebhookObject(o.Enabled, true),\n\t\t\"error_message\": toProjectWebhookObject(o.ErrorMessage, true),\n\t\t\"errored\": toProjectWebhookObject(o.Errored, true),\n\t\t\"project_id\": toProjectWebhookObject(o.ProjectID, true),\n\t\t\"url\": toProjectWebhookObject(o.URL, true),\n\t}\n\tfor k, v := range kv {\n\t\tif v == nil || reflect.ValueOf(v).IsZero() {\n\t\t\tdelete(kv, k)\n\t\t} else {\n\t\t}\n\t}\n\treturn kv\n}", "func toTextPb(_ *starlark.Thread, _ *starlark.Builtin, args starlark.Tuple, kwargs []starlark.Tuple) (starlark.Value, error) {\n\tvar msg *Message\n\tif err := starlark.UnpackArgs(\"to_textpb\", args, kwargs, \"msg\", &msg); err != nil {\n\t\treturn nil, err\n\t}\n\tpb, err := msg.ToProto()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn starlark.String(proto.MarshalTextString(pb)), nil\n}", "func AttestorToProto(resource *binaryauthorization.Attestor) *binaryauthorizationpb.BinaryauthorizationAttestor {\n\tp := &binaryauthorizationpb.BinaryauthorizationAttestor{\n\t\tName: dcl.ValueOrEmptyString(resource.Name),\n\t\tDescription: dcl.ValueOrEmptyString(resource.Description),\n\t\tUserOwnedGrafeasNote: BinaryauthorizationAttestorUserOwnedGrafeasNoteToProto(resource.UserOwnedGrafeasNote),\n\t\tUpdateTime: dcl.ValueOrEmptyString(resource.UpdateTime),\n\t\tProject: dcl.ValueOrEmptyString(resource.Project),\n\t}\n\n\treturn p\n}", "func patchFieldValueToObject(fieldPath string, value any, to runtime.Object, mo *xpv1.MergeOptions) error {\n\tpaved, err := fieldpath.PaveObject(to)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := paved.MergeValue(fieldPath, value, mo); err != nil {\n\t\treturn err\n\t}\n\n\treturn runtime.DefaultUnstructuredConverter.FromUnstructured(paved.UnstructuredContent(), to)\n}", "func SendtoPB(output string) (int){\n values := url.Values{}\n\tvalues.Set(\"api_dev_key\", \"\")\n\tvalues.Set(\"api_option\", \"paste\")\n\tvalues.Set(\"api_paste_code\", output)\n\tvalues.Set(\"api_paste_name\", \"TEST\")\n\tvalues.Set(\"api_paste_expire_date\", \"10M\")\n\tresponse, err := http.PostForm(\"http://pastebin.com/api/api_post.php\", values)\n\tdefer response.Body.Close()\n\tif err != nil {\n //log.Fatalln(err)\n return 0\n\t}\n\tif response.StatusCode != 200 {\n //log.Fatalln(response.StatusCode)\n return 0\n\t}\n\tbuf := bytes.Buffer{}\n\t_, err = buf.ReadFrom(response.Body)\n\tif err != nil {\n //log.Fatalln(err)\n return 0\n\t}\n // Debugging Pastebin response\n // fmt.Println(buf.String())\n return 0\n}" ]
[ "0.7009581", "0.69764227", "0.6810474", "0.67255133", "0.67079705", "0.610796", "0.6100014", "0.6029623", "0.5987019", "0.5984469", "0.57624716", "0.5666927", "0.5660824", "0.5600371", "0.5506886", "0.54738975", "0.54206467", "0.53750503", "0.53714156", "0.5343201", "0.53418124", "0.5309756", "0.5295904", "0.5247248", "0.52249", "0.5209925", "0.5186056", "0.5170326", "0.5146683", "0.51216304", "0.50803584", "0.50768983", "0.50649035", "0.50504375", "0.50312245", "0.5021211", "0.5010978", "0.49711934", "0.49685082", "0.49494094", "0.4939978", "0.49300367", "0.49295285", "0.49242795", "0.49119344", "0.49103355", "0.4907311", "0.48928568", "0.48928568", "0.48878205", "0.48739877", "0.4864435", "0.48528567", "0.48376116", "0.48344088", "0.48273402", "0.48229188", "0.48207107", "0.48205057", "0.48133776", "0.4812911", "0.48108232", "0.48074412", "0.47966814", "0.47939524", "0.47860634", "0.4785482", "0.47770175", "0.47729775", "0.47639716", "0.4757776", "0.47476527", "0.47468144", "0.47458422", "0.4740616", "0.4740616", "0.47173783", "0.47083005", "0.47078735", "0.4705492", "0.4705072", "0.47002238", "0.4699886", "0.46992603", "0.46983653", "0.46977124", "0.46972507", "0.4697034", "0.4695973", "0.46935105", "0.46897048", "0.46841562", "0.46759623", "0.46731088", "0.46696308", "0.46632335", "0.4659862", "0.4658128", "0.46523663", "0.46463525" ]
0.5228932
24
DefaultCreateHealthMenstruationPersonalInfo executes a basic gorm create call
func DefaultCreateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) { if in == nil { return nil, errors1.NilArgumentError } ormObj, err := in.ToORM(ctx) if err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeCreate_); ok { if db, err = hook.BeforeCreate_(ctx, db); err != nil { return nil, err } } if err = db.Create(&ormObj).Error; err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterCreate_); ok { if err = hook.AfterCreate_(ctx, db); err != nil { return nil, err } } pbResponse, err := ormObj.ToPB(ctx) return &pbResponse, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultCreateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultCreateUserInfo(ctx context.Context, in *UserInfo, db *gorm.DB) (*UserInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func CreatePerson(c *gin.Context) {\n // Validate input\n var input CreatePersonInput\n if err := c.ShouldBindJSON(&input); err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n // Create person\n person := models.Person{CreatedBy: input.CreatedBy, FirstName: input.FirstName, LastName: input.LastName, Email: input.Email, Phone: input.Phone, Birthday: input.Birthday, Title: input.Title, Department: input.Department}\n models.DB.Create(&person)\n\n c.JSON(http.StatusOK, gin.H{\"data\": person})\n}", "func CreatePerson(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"content-type\", \"application/json\")\n\n\tperson := r.Context().Value(security.KeyUser{}).(data.Person)\n\n\t//create the user\n\tcreatedPerson := data.DB.Create(&person)\n\terr := createdPerson.Error\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\tw.Write([]byte(err.Error()))\n\t\treturn\n\t}\n\n\tjson.NewEncoder(w).Encode(&person)\n}", "func createPerson(w http.ResponseWriter, r *http.Request) {\n\tfmt.Println(\"CREATE HIT\")\n\tstmt, err := db.Prepare(\"INSERT INTO Persons(pAge, pName) VALUES (?,?)\")\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tvar per Person\n\tjson.Unmarshal(body, &per)\n\tage := per.Age\n\tname := per.Name\n\t_, err = stmt.Exec(age, name)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tfmt.Fprintf(w, \"New person was created\")\n}", "func DefaultReadHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationPersonalInfoORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationPersonalInfoORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationPersonalInfoORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func Create(c *gin.Context) {\n\tuser := c.MustGet(\"user\").(*entities.User)\n\tmechanicData := CreateMechanic{}\n\n\terr := c.ShouldBind(&mechanicData)\n\tif err == nil {\n\t\tmechanic, errRegister := RegisterNewMechanic(user, mechanicData)\n\t\tif errRegister != nil {\n\t\t\tresponse := global.ResponseServices(mechanicData, \"400\", errRegister.Error())\n\t\t\tc.JSON(400, response)\n\t\t\treturn\n\t\t}\n\t\tresponse := global.ResponseServices(mechanic, \"200\", \"Se he creado el usuario con exito\")\n\t\tc.JSON(http.StatusOK, response)\n\t\treturn\n\t}\n\tresponse := global.ResponseServices(mechanicData, \"400\", err.Error())\n\tc.JSON(400, response)\n}", "func (idb *InDB) CreatePerson(c *gin.Context) {\n\tvar (\n\t\tperson structs.Person\n\n\t\tresult gin.H\n\t)\n\tfirstName := c.PostForm(\"first_name\")\n\tlastName := c.PostForm(\"last_name\")\n\tphone := c.PostForm(\"phone_number\")\n\timei := c.PostForm(\"imei\")\n\tmodelName := c.PostForm(\"model_name\")\n\tmodelNumber := c.PostForm(\"model_number\")\n\tsoftwareVer := c.PostForm(\"software_ver\")\n\tapiKey := authGenerator(imei)\n\n\tperson.First_Name = firstName\n\tperson.Last_Name = lastName\n\tperson.Phone_Number = phone\n\tperson.Model_Number = modelNumber\n\tperson.Model_Name = modelName\n\tperson.Software_Ver = softwareVer\n\tperson.API = apiKey\n\tperson.Imei = imei\n\n\tidb.DB.Create(&person)\n\n\tresult = gin.H{\n\t\t\"result\": person,\n\t}\n\n\tc.JSON(http.StatusOK, result)\n}", "func CreatePsychologist(dbase *gorm.DB, w http.ResponseWriter, r *http.Request) {\n\tuser := &db.Psychologist{}\n\terr := json.NewDecoder(r.Body).Decode(&user)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t_ = json.NewEncoder(w).Encode(utils.ErrorResponse{\n\t\t\tCode: http.StatusInternalServerError,\n\t\t\tMessage: \"An error occurred\",\n\t\t})\n\t\treturn\n\t}\n\n\tuser.Password = utils.HashPassword(user.Password, w)\n\tif user.Password == \"\" {\n\t\treturn\n\t}\n\n\trs := dbase.Create(&user)\n\tif rs.Error != nil {\n\t\tlog.Println(rs)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tlog.Println(json.NewEncoder(w).Encode(utils.ErrorResponse{\n\t\t\tCode: http.StatusInternalServerError,\n\t\t\tMessage: \"Could not create your account. Please try again later\",\n\t\t}))\n\t\treturn\n\t}\n\n\t// body := struct {\n\t// \tName string\n\t// \tLink string\n\t// }{\n\t// \tName: fmt.Sprintf(\"%s %s\", user.FirstName, user.LastName),\n\t// \tLink: \"https://google.com\",\n\t// }\n\n\t// go func(dbase *gorm.DB, email string, subject string, HTMLTemp string, body interface{}) {\n\t// \terr := utils.SendEmail(dbase, email, subject, HTMLTemp, body)\n\t// \tif err != nil {\n\t// \t\tlog.Println(err)\n\t// \t\t_ = json.NewEncoder(w).Encode(err.Error())\n\t// \t\treturn\n\t// \t}\n\t// }(dbase, user.Email, \"Welcome\", \"templates/email/confirm.html\", body)\n\n\tw.WriteHeader(http.StatusCreated)\n\tlog.Println(json.NewEncoder(w).Encode(user))\n}", "func DefaultListHealthMenstruationPersonalInfo(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationPersonalInfo, error) {\n\tin := HealthMenstruationPersonalInfo{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationPersonalInfoORM{}, &HealthMenstruationPersonalInfo{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationPersonalInfoORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationPersonalInfo{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func Create(ctx *gin.Context, data interface{}) {\n\tctx.JSON(http.StatusCreated, gin.H{\"code\": merrors.ErrSuccess, \"data\": nil})\n\treturn\n}", "func CreatePerson(db *sql.DB) {}", "func (h *Handler) createDeveloper(c *gin.Context) handlerResponse {\n\n\tvar newDeveloper types.Developer\n\tif err := c.ShouldBindJSON(&newDeveloper); err != nil {\n\t\treturn handleBadRequest(err)\n\t}\n\tstoredDeveloper, err := h.service.Developer.Create(newDeveloper, h.who(c))\n\tif err != nil {\n\t\treturn handleError(err)\n\t}\n\treturn handleCreated(storedDeveloper)\n}", "func CreateMeeting(c *gin.Context) {\n // Validate input\n var input CreateMeetingInput\n if err := c.ShouldBindJSON(&input); err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n // Create meeting\n meeting := models.Meeting{CreatedBy: input.CreatedBy, Title: input.Title, Description: input.Description, StartDate: input.StartDate, EndDate: input.EndDate, Location: input.Location}\n models.DB.Create(&meeting)\n\n c.JSON(http.StatusOK, gin.H{\"data\": meeting})\n}", "func DefaultPatchHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationPersonalInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationPersonalInfo(ctx, &HealthMenstruationPersonalInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationPersonalInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func (p Person) Create(name string, age int) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tfmt.Fprintf(w, \"This prints the name and age of a Person:\\n\\t%v\\n\\t%v\\n\", name, age)\n\t})\n}", "func (a *App) CreateProfile(w http.ResponseWriter, r *http.Request) {\n\thandler.CreateProfile(a.DB, w, r)\n}", "func create(entity domain.ValidableEntity, ctx *gin.Context) {\n\terr := ctx.BindJSON(entity)\n\tif IsError(err, ctx) {\n\t\treturn\n\t}\n\terr = domain.Save(entity)\n\tif IsError(err, ctx) {\n\t\treturn\n\t}\n\tctx.JSON(200, entity)\n}", "func CreateUser(c *gin.Context) {}", "func (r *Repository) create(user *domain.UserInfoModel) error {\n\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancel()\n\n\tquery := \"INSERT INTO users (namee, email, password) VALUES ($1, $2, $3)\"\n\tstmt, err := r.db.PrepareContext(ctx, query)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer stmt.Close()\n\n\tqueryStart := time.Now().Nanosecond() / 1000\n\t_, err = stmt.ExecContext(ctx, user.Name, user.Email, user.PassWord)\n\tif err != nil {\n\t\treturn err\n\t}\n\tqueryEnd := time.Now().Nanosecond() / 1000\n\texecutionTime := queryEnd - queryStart\n\terr = r.insertTimeSpent(\"Create\", executionTime)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nil\n}", "func DefaultCreateProfile(ctx context.Context, in *Profile, db *gorm1.DB) (*Profile, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(ProfileORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(ProfileORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func CreateNewPatient(c *gin.Context) {\n\tvar patientRequest PatientRequest\n\tc.ShouldBind(&patientRequest)\n\tpatientResponse := PatientResponse{\n\t\tPatientID: \"2018-0001\",\n\t\tName: patientRequest.Name,\n\t\tLastname: patientRequest.Lastname,\n\t\tAge: patientRequest.Age,\n\t}\n\tc.JSON(201, patientResponse)\n\n}", "func NewPersonal(db DBProvider) (*Personal, error) {\n\treturn &Personal{\n\t\tDB: db,\n\t}, nil\n}", "func Create(ctx *gin.Context) {\n\n}", "func (srv *UsersService) CreateHandler(ctx *gin.Context) {\n\tlogger := srv.logger.New(\"action\", \"CreateHandler\")\n\n\t// Checks if the query entry is valid\n\tvalidator := validators.CreateUserValidator{}\n\tif err := validator.BindJSON(ctx); err != nil {\n\t\t// Returns a \"422 StatusUnprocessableEntity\" response\n\t\tsrv.ResponseService.ValidatorErrorResponse(ctx, responses.UnprocessableEntity, err)\n\t\treturn\n\t}\n\n\t// Check permissions\n\tcurrentUser := GetCurrentUser(ctx)\n\tif hasPerm := srv.PermissionsService.CanCreateProfile(currentUser.UID, &validator.UserModel); !hasPerm {\n\t\tsrv.ResponseService.Forbidden(ctx)\n\t\treturn\n\t}\n\n\ttmpPassword := validator.UserModel.Password\n\n\t// Create new user\n\tcreatedUser, err := srv.userCreator.Create(&validator.UserModel, true, false, nil)\n\tif err != nil {\n\t\tlogger.Error(\"сan't create a user\", \"error\", err)\n\t\t// Returns a \"500 StatusInternalServerError\" response\n\t\tsrv.ResponseService.Error(ctx, responses.CanNotCreateUser, \"Can't create a user\")\n\t\treturn\n\t}\n\n\tif nil != currentUser {\n\t\tsrv.SystemLogsService.LogCreateUserProfileAsync(createdUser, currentUser.UID)\n\t}\n\t// TODO: refactor - use events, move above functionality to the event subscriber\n\tconfirmationCode, err := srv.confirmationCodeService.GenerateSetPasswordCode(createdUser)\n\tif err != nil {\n\t\tlogger.Error(\"unable to generate set_password confirmation code\")\n\t\treturn\n\t}\n\n\tif _, err = srv.notificationsService.ProfileCreated(createdUser.UID, tmpPassword, confirmationCode.Code); nil != err {\n\t\tlogger.Error(\"сan't send notification\", \"error\", err)\n\t\treturn\n\t}\n\n\t// Returns a \"201 Created\" response\n\tsrv.ResponseService.SuccessResponse(ctx, http.StatusCreated, validator.UserModel)\n}", "func (m *HealthMenstruationPersonalInfoORM) ToPB(ctx context.Context) (HealthMenstruationPersonalInfo, error) {\n\tto := HealthMenstruationPersonalInfo{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tto.PeriodLengthInDays = m.PeriodLengthInDays\n\tto.CycleLengthInDays = m.CycleLengthInDays\n\tif posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func createNewUser(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tparams := mux.Vars(r)\n\tvar userInfo UserBody\n\t//decode the json object and store the values in userInfo\n\terr := json.NewDecoder(r.Body).Decode(&userInfo)\n\tif err != nil {\n\t\tfmt.Println(\"ERROR DECODING JSON OBJ FROM CREATE NEW USER\")\n\t}\n\tresult := post.CreateUser(params[\"id\"], userInfo.FirstName, userInfo.LastName, userInfo.Email)\n\tjson.NewEncoder(w).Encode(map[string]bool{\n\t\t\"result\": result,\n\t})\n}", "func (db *database) CreatePerson(\n\tctx context.Context,\n\tp app.Person,\n) (int, error) {\n\n\tvar id int\n\terr := db.GetContext(ctx, &id, `\n\t\tINSERT INTO person (\n\t\t\tfirst_name,\n\t\t\tlast_name,\n\t\t\temail,\n\t\t\trole_id,\n\t\t\tpass_hash\n\t\t) VALUES ($1, $2, $3, $4, $5)\n\t\tRETURNING person_id\n\t`, p.FirstName, p.LastName, p.Email, p.Role, p.Password)\n\n\treturn id, errors.Wrap(err, \"failed to insert person\")\n}", "func CreatePerson(w http.ResponseWriter, r *http.Request) {\n\tvar person model.Person\n\t/*\n\t\tTo print the response to string\n\t*/\n\tbodyBytes, _ := ioutil.ReadAll(r.Body)\n\tbodyString := string(bodyBytes)\n\tfmt.Println(bodyString)\n\n\t/*\n\t\tParse JSON object without struct\n\t*/\n\tm := map[string]interface{}{}\n\terr := json.Unmarshal(bodyBytes, &m)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tfmt.Println(m)\n\tfmt.Println(m[\"firstname\"])\n\n\tjson.Unmarshal(bodyBytes, &person) // parse JSON to person struct object\n\tfmt.Println(person.Firstname)\n\tpeople = append(people, person)\n\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\tjson.NewEncoder(w).Encode(people)\n}", "func createUser(c *gin.Context) {\n password,_ := HashPassword(c.PostForm(\"password\"))\n\tuser := user{Login: c.PostForm(\"login\"), Password: password}\n\tdb.Save(&user)\n\tc.JSON(http.StatusCreated, gin.H{\"status\": http.StatusCreated, \"message\": \"User item created successfully!\"})\n}", "func CreateDefault() {\n\texistingAdmin, err := FindByEmail(config.AdminEmail)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tif existingAdmin != nil && !existingAdmin.IsAdmin {\n\t\tlog.Fatalf(\"FATAL: User with email %v exists but has no admin rights.\", config.AdminEmail)\n\t}\n\n\tif existingAdmin == nil {\n\t\tadmin, err := New(config.AdminEmail, \"Admin\", \"Admin\", config.AdminPassword)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tadmin.IsAdmin = true\n\t\terr = Save(&admin)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Cannot create admin user with email %v. Error: %v\", config.AdminEmail, err.Error())\n\t\t}\n\t\treturn\n\t}\n\tif config.ServerDebug() {\n\t\tlog.Println(\"Admin account found.\")\n\t}\n\n\t//Create the test user if we are not in production\n\tif !config.ProductionMode {\n\t\texistingTestUser, err := FindByEmail(config.TestUserEmail)\n\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t}\n\n\t\tif existingTestUser == nil {\n\t\t\ttestuser, err := New(config.TestUserEmail, \"TestUser\", \"TestUser\", \"xaFqJDeJldIEcdfZS\")\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t}\n\t\t\terr = Save(&testuser)\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatalf(\"Cannot create user with email %v. Error: %v\", config.TestUserEmail, err.Error())\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tlog.Println(\"Test account found.\")\n\t}\n}", "func (adapter *GORMAdapter) Create(entity interface{}) orm.Result {\n\treturn orm.Result{\n\t\tError: adapter.db.Create(entity).Error,\n\t}\n}", "func (x *UserStore) Create(uuid, firstName, lastName, email, password string) (string, error) {\n\tif x.mock != nil && x.mock.Enabled() {\n\t\treturn x.mock.String(), x.mock.Error()\n\t}\n\n\t_, err := x.db.Exec(`\n\t\tINSERT INTO user\n\t\t(id, first_name, last_name, email, password, status_id)\n\t\tVALUES\n\t\t(?,?,?,?,?,?)\n\t\t`,\n\t\tuuid, firstName, lastName, email, password, 1)\n\n\treturn uuid, err\n}", "func (UserReqServices) CreateNew(ctx context.Context, req dt.UserRequest, dbConn lib.DbConnection) (resp dt.UserResponse) {\n\terr := validation.Errors{\n\t\t\"name\": validation.Validate(req.Name, validation.Required, validation.Length(1, 50), is.Letter),\n\t\t\"email\": validation.Validate(req.Email, validation.Required, is.Email),\n\t\t\"phone\": validation.Validate(req.Phone, validation.Required, is.Digit),\n\t}.Filter()\n\tresp.ResponseCode = \"-1\"\n\tresp.ResponseDesc = err\n\tif err == nil || err == \"\" {\n\n\t\tresp.ResponseCode = \"1\"\n\t\tresp.ResponseDesc = \"Success\"\n\t}\n\t\n\treturn\n}", "func (a *App) CreateUserProfile(w http.ResponseWriter, r *http.Request) {\n\thandler.CreateUserProfile(a.DB, w, r)\n}", "func Create(r render.Render, params martini.Params, db database.Datastore, data Account) {\n\n\t// Create a bcrypt hash from the password as we don't want to store\n\t// plain-text passwords in the database\n\tpwdHash, bcryptError := bcrypt.GenerateFromPassword([]byte(data.Password), 0)\n\tif bcryptError != nil {\n\t\tresponses.Error(r, bcryptError.Error())\n\t}\n\n\t// Set the hashed password\n\tdata.Password = string(pwdHash)\n\n\t// Store the object in the database. In case the\n\t// database operation fails, an error response is sent back to the caller.\n\terr := db.GetDBMap().Insert(&data)\n\tif err != nil {\n\t\tresponses.Error(r, err.Error())\n\t\treturn\n\t}\n\tresponses.OKStatusPlusData(r, data, 1)\n}", "func (server Server) CreateNewUser(w http.ResponseWriter, r *http.Request) {\n\tvar user models.User // make a user\n\tvar res models.APIResponse // make a response\n\n\terr := json.NewDecoder(r.Body).Decode(&user) //decode the user\n\tif err != nil {\n\t\tlog.Printf(\"Unable to decode the request body. %v\", err)\n\t\tres = models.BuildAPIResponseFail(\"Unable to decode the request body\", nil)\n\t}\n\tif user.Name == \"\" || user.Email == \"\" || user.Password == \"\" {\n\t\tres = models.BuildAPIResponseFail(\"Blank users cannot be created\", nil)\n\t} else {\n\t\tinsertID := insertUser(user, server.db) // call insert user function and pass the note\n\t\tres = models.BuildAPIResponseSuccess(fmt.Sprintf(\"User Created with %d id\", insertID), nil) // format a response object\n\t}\n\tjson.NewEncoder(w).Encode(res)\n\n}", "func Create(c *gin.Context) {\n\tworkshopData := CreateWorkShop{}\n\tuser := c.MustGet(\"user\").(*entities.User)\n\n\terr := c.ShouldBind(&workshopData)\n\tif err == nil {\n\t\tr, errRegister := RegisterNewWorkShop(user, workshopData)\n\t\tif errRegister != nil {\n\t\t\tresponse := global.ResponseServices(workshopData, \"400\", errRegister.Error())\n\t\t\tc.JSON(400, response)\n\t\t\treturn\n\t\t}\n\t\tresponse := global.ResponseServices(r, \"200\", \"Se he creado el taller con exito\")\n\t\tc.JSON(http.StatusOK, response)\n\t\treturn\n\t}\n\tresponse := global.ResponseServices(workshopData, \"400\", err.Error())\n\tc.JSON(400, response)\n}", "func (h *User) Create(w http.ResponseWriter, r *http.Request) {\n\tdefer r.Body.Close()\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tR.JSON500(w)\n\t\treturn\n\t}\n\n\tuser, err := validator.UserCreate(body)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tR.JSON400(w)\n\t\treturn\n\t}\n\n\terr = h.Storage.CreateUser(user)\n\t// @todo this might be also 400 response since email can be a duplicate\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tR.JSON500(w)\n\t\treturn\n\t}\n\n\tR.JSON200OK(w)\n}", "func (company *Company) Create(ctx aero.Context) error {\n\tuser := GetUserFromContext(ctx)\n\n\tif user == nil {\n\t\treturn errors.New(\"Not logged in\")\n\t}\n\n\tcompany.ID = GenerateID(\"Company\")\n\tcompany.Created = DateTimeUTC()\n\tcompany.CreatedBy = user.ID\n\tcompany.Location = &Location{}\n\n\t// Write log entry\n\tlogEntry := NewEditLogEntry(user.ID, \"create\", \"Company\", company.ID, \"\", \"\", \"\")\n\tlogEntry.Save()\n\n\treturn company.Unpublish()\n}", "func createUser() User {\n\tuser := User{\n\t\tUsername: \"igor\",\n\t\tPassword: \"please store hashed password\",\n\t\tName: \"Paolo\",\n\t\tSurname: \"Galeone\",\n\t\tEmail: \"please validate the @email . com\",\n\t\tGender: true,\n\t\tBirthDate: time.Now(),\n\t}\n\n\tif e = db.Create(&user); e != nil {\n\t\tpanic(fmt.Sprintf(\"Create(&user) filling fields having no default should work, but got: %s\\n\", e.Error()))\n\t}\n\treturn user\n}", "func CreateDefaultUser(username, password string) (err error) {\n\n err = checkInit()\n if err != nil {\n return\n }\n\n var users = data[\"users\"].(map[string]interface{})\n // Check if the default user exists\n if len(users) > 0 {\n err = createError(001)\n return\n }\n\n var defaults = defaultsForNewUser(username, password)\n users[defaults[\"_id\"].(string)] = defaults\n saveDatabase(data)\n\n return\n}", "func createHandler(w http.ResponseWriter, r *http.Request) {\n\tuser := new(User)\n\tuser.Token = validateToken(r.FormValue(\"token\"))\n\tuser.PasswordHash = validatePassHash(r.FormValue(\"passHash\"))\n\tuser.PublicKey = validatePublicKey(r.FormValue(\"publicKey\"))\n\tuser.PublicHash = computePublicHash(user.PublicKey)\n\tuser.CipherPrivateKey = validateHex(r.FormValue(\"cipherPrivateKey\"))\n\n\tlog.Printf(\"Woot! New user %s %s\\n\", user.Token, user.PublicHash)\n\n\tif !SaveUser(user) {\n\t\thttp.Error(w, \"That username is taken\", http.StatusBadRequest)\n\t}\n}", "func CreateUser(c *gin.Context) {\n\ttype result struct {\n\t\tFirstName string `json:\"first_name\"`\n\t\tLastName string `json:\"last_name\"`\n\t\tEmail string `json:\"email\"`\n\t\tPassword string `json:\"password\"`\n\t\tDateOfBirth string `json:\"birth_date\"`\n\t}\n\tUserParams := result{}\n\n\terr := c.ShouldBindJSON(&UserParams)\n\tlayout := \"2006-01-02\"\n\tstr := UserParams.DateOfBirth\n\tt, er := time.Parse(layout, str)\n\n\tif er != nil {\n\t\tfmt.Println(er)\n\t}\n\n\tvar user model.User\n\tif err != nil {\n\t\tc.JSON(http.StatusBadRequest, err)\n\t\treturn\n\t}\n\tif len(UserParams.Password) == 0 {\n\t\tfmt.Println(\"err2\")\n\t\tlog.Println(err)\n\t\tc.JSON(http.StatusBadRequest, \"No given password\")\n\t\treturn\n\t}\n\tif age.Age(t) < 18 {\n\t\tlog.Println(err)\n\t\tc.JSON(http.StatusBadRequest, \"You are not adult!\")\n\t\treturn\n\t}\n\tif !db.Where(\"email = ?\", UserParams.Email).Find(&user).RecordNotFound() {\n\t\tc.JSON(http.StatusBadRequest, \"User with this email already exist\")\n\t\treturn\n\t}\n\tid := uuid.NewV4()\n\t// 1 = single user; 2 = admin\n\tuser.AccessLevel = 1\n\tuser.UUID = id.String()\n\tvar hash = hashPassword(UserParams.Password)\n\tuser.Password = hash\n\tuser.FirstName = UserParams.FirstName\n\tuser.LastName = UserParams.LastName\n\tuser.Email = UserParams.Email\n\tuser.DateOfBirth = t\n\tdb.Create(&user)\n\tuser.Password = \"\"\n\tc.JSON(200, &user)\n}", "func (h *Handler) create() http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tuser := &model.User{}\n\t\tif err := json.NewDecoder(r.Body).Decode(user); err != nil {\n\t\t\tmsg := &errorMessage{\n\t\t\t\tError: err.Error(),\n\t\t\t\tMessage: \"user json decode error\",\n\t\t\t}\n\t\t\tresponse.JSON(w, http.StatusBadRequest, msg)\n\t\t\treturn\n\t\t}\n\n\t\tentity, err := h.UserDAO.Create(r.Context(), user)\n\t\tif err != nil {\n\t\t\tmsg := &errorMessage{\n\t\t\t\tError: err.Error(),\n\t\t\t\tMessage: \"user datastore error\",\n\t\t\t}\n\t\t\tresponse.JSON(w, http.StatusInternalServerError, msg)\n\t\t\treturn\n\t\t}\n\t\tresponse.JSON(w, http.StatusCreated, entity)\n\t}\n}", "func addGod(db *gorm.DB) {\n\tphone, password, username, email, firstname, lastname := os.Getenv(\"GOD_PHONE\"),\n\t\tos.Getenv(\"GOD_PASSWORD\"),\n\t\tos.Getenv(\"GOD_USERNAME\"),\n\t\tos.Getenv(\"GOD_EMAIL\"),\n\t\tos.Getenv(\"GOD_FIRSTNAME\"),\n\t\tos.Getenv(\"GOD_LASTNAME\")\n\tpersonnelNum, err := strconv.Atoi(os.Getenv(\"GOD_PERSONNELNUM\"))\n\tif err != nil {\n\t\te.Logger.Error(\"GOD_PERSONNELNUM is not valid\")\n\t}\n\tlevel := domains.Level{\n\t\tTitle: \"کاربر\",\n\t\tColor: \"#ffffff\",\n\t}\n\trole := domains.Role{\n\t\tTitle: \"genesis\",\n\t\tPermissions: constants.Permissions,\n\t}\n\tuser := domains.User{\n\t\tModel: gorm.Model{\n\t\t\tID: 1,\n\t\t},\n\t\tPhone: phone,\n\t\tPassword: crypto.GenerateSha256(password),\n\t\tPersonnelNum: personnelNum,\n\t\tUsername: username,\n\t\tEmail: email,\n\t\tFirstName: firstname,\n\t\tLastName: lastname,\n\t\tRoles: []domains.Role{role},\n\t\tLevel: level,\n\t}\n\tif err := db.Where(\"phone = ?\", user.Phone).FirstOrCreate(&user).Error; err != nil {\n\t\te.Logger.Error(err)\n\t}\n\tvar profile domains.Profile\n\tprofile.UserID = user.Model.ID\n\tif err := db.FirstOrCreate(&profile).Error; err != nil {\n\t\tfmt.Println(err)\n\t}\n}", "func Create(user User) error {\n\t\n}", "func CreatePerson(w http.ResponseWriter, req *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t// params := mux.Vars(req)\n\tvar person models.Person\n\n\t// The user ID will be generated by the mongodb\n\t// id,_ := strconv.Atoi(params[\"id\"])\n\t// person.ID = id\n\terr := json.NewDecoder(req.Body).Decode(&person)\n\tif err != nil {\n\t\tfmt.Println(\"Oops\", err)\n\t}\n\t\n\tcollection := client.Database(\"thepolyglotdeveloper\").Collection(\"people\")\n\tctx, _ := context.WithTimeout(context.Background(), 5*time.Second)\n\tresult, _ := collection.InsertOne(ctx, person)\n\n\t// we don`t need this since we are dealing with a database\n\t//models.People = append(models.People, person) \n\tjson.NewEncoder(w).Encode(result)\t\n}", "func CreateProfile(ctx *router.Context) {\n\tuin, _ := ctx.Keys[middleware.ComerUinContextKey].(uint64)\n\trequest := &model.CreateProfileRequest{}\n\terr := ctx.BindJSON(request)\n\tif err != nil {\n\t\tctx.ERROR(\n\t\t\trouter.ErrParametersInvaild,\n\t\t\t\"wrong metamask login parameter\",\n\t\t)\n\t\treturn\n\t}\n\terr = service.CreateComerProfile(uin, request)\n\tif err != nil {\n\t\tctx.ERROR(\n\t\t\trouter.ErrBuisnessError,\n\t\t\t\"wrong metamask login parameter\",\n\t\t)\n\t\treturn\n\t}\n\n\tctx.OK(nil)\n}", "func (s *Store) Create(c *gin.Context) {\n\n}", "func (o OktaPlugin) EntityCreate(e, de pb.Entity) (pb.Entity, error) {\n\tp := &okta.PasswordCredential{\n\t\tValue: de.GetSecret(),\n\t}\n\tuc := &okta.UserCredentials{\n\t\tPassword: p,\n\t}\n\tprofile := okta.UserProfile{}\n\tprofile[\"firstName\"] = \"UNSET\"\n\tprofile[\"lastName\"] = \"UNSET\"\n\tprofile[\"employeeNumber\"] = e.GetNumber()\n\tprofile[\"email\"] = e.GetID() + \"@\" + cfg.GetString(\"domain\")\n\tprofile[\"login\"] = e.GetID() + \"@\" + cfg.GetString(\"domain\")\n\tu := &okta.User{\n\t\tCredentials: uc,\n\t\tProfile: &profile,\n\t}\n\n\tuser, _, err := o.c.User.CreateUser(*u, nil)\n\tif err != nil {\n\t\tappLogger.Error(\"Okta user was not created!\", \"error\", err)\n\t\treturn e, err\n\t}\n\n\tif e.Meta == nil {\n\t\te.Meta = &pb.EntityMeta{}\n\t}\n\n\te.Meta.UntypedMeta = tree.PatchKeyValueSlice(e.Meta.UntypedMeta, \"UPSERT\", \"oktaID\", user.Id)\n\n\treturn e, nil\n}", "func CreatePerson() *Person {\n\tvar newPerson Person\n\tnewPerson.Name = \"mario\"\n\tnewPerson.Address = \"rome\"\n\tnewPerson.Phone = 349121454\n\treturn &newPerson\n}", "func (m memberUsecase) Create(ctx context.Context, name, phone string) (*models.Member, error) {\n\treturn m.memberRepos.Create(ctx, name, phone)\n}", "func (pc UserController) Create(c *gin.Context) {\n\tvar u repository.UserRepository\n\tp, err := u.CreateModel(c)\n\n\tif err != nil {\n\t\tc.AbortWithStatus(400)\n\t\tc.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n\t} else {\n\t\tc.JSON(201, p)\n\t}\n}", "func Create(c *fiber.Ctx) error {\n\temp := new(Employee)\n\tif err := c.BodyParser(emp); err != nil {\n\t\tc.JSON(\"something went wrong\")\n\t\treturn err\n\t}\n\n\tsess := session.Must(session.NewSessionWithOptions(session.Options{\n\t\tSharedConfigState: session.SharedConfigEnable,\n\t\tProfile: \"codingec\",\n\t\tConfig: aws.Config{Region: aws.String(\"us-east-1\")},\n\t}))\n\n\tdynamoSvc := dynamodb.New(sess, aws.NewConfig().WithLogLevel(aws.LogDebugWithHTTPBody))\n\n\temp.EmpID = uuid.New().String()\n\temp.PK = \"ORG#\" + emp.OrgID\n\temp.SK = \"EMP#\" + emp.EmpID\n\tav, err := dynamodbattribute.MarshalMap(emp)\n\tinput := &dynamodb.PutItemInput{\n\t\tItem: av,\n\t\tTableName: aws.String(\"project-manager\"),\n\t}\n\n\t_, err = dynamoSvc.PutItem(input)\n\n\tif err != nil {\n\t\tfmt.Println(\"Got error calling CreateTable:\")\n\t\tfmt.Println(err.Error())\n\t\treturn err\n\t}\n\n\treturn c.JSON(emp)\n}", "func CreateUser(user model.User) {\n\tfmt.Println(user)\n}", "func (gc *GreetingController) CreateGreeting(w http.ResponseWriter, r *http.Request) {\n\tvar gm GreetingModel\n\terr := json.NewDecoder(r.Body).Decode(&gm)\n\tif err != nil {\n\t\tMainLogger.Printf(\"Error decoding body: %v\", err)\n\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\treturn\n\t}\n\tgm.ID = bson.NewObjectId()\n\tgm.CreatedAt = time.Now()\n\tgm.CreatedBy = \"gopher\"\n\tgm.UpdatedAt = gm.CreatedAt\n\tgm.UpdatedBy = gm.CreatedBy\n\tgc.Session.DB(\"starterdb\").C(\"greetings\").Insert(gm)\n\tgmj, err := json.Marshal(gm)\n\tif err != nil {\n\t\tMainLogger.Println(\"Error marshaling into JSON\")\n\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(http.StatusCreated)\n\tw.Write(gmj)\n}", "func (m *HealthMenstruationPersonalInfo) ToORM(ctx context.Context) (HealthMenstruationPersonalInfoORM, error) {\n\tto := HealthMenstruationPersonalInfoORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tto.ProfileId = m.ProfileId\n\tto.PeriodLengthInDays = m.PeriodLengthInDays\n\tto.CycleLengthInDays = m.CycleLengthInDays\n\tif posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (u *InformationRepository) Create(model *models.Information) error {\n\tquery := u.InformationTable().Create(model)\n\tif err := query.Error; err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (t *HeathCare_Chaincode) createMedicalRecord(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tfmt.Println(\"\\n=============== start createMedicalRecord function ===============\")\n\tstart := time.Now()\n\ttime.Sleep(time.Second)\n\n\tif len(args) != 7 {\n\t\treturn shim.Error(\"there must be 7 argument\")\n\t}\n\n\tfor i := 0; i < len(args); i++ {\n\t\tif len(args[i]) == 0 {\n\t\t\treturn shim.Error(\"argument \" + strconv.Itoa(i+1) + \" must be declare\")\n\t\t}\n\t}\n\tpatientId := args[0]\n\tpersonalIdentificationInformation := args[1]\n\tmedicalHistory := args[2]\n\tfamilyMedicalHistory := args[3]\n\tmedicationHistory := args[4]\n\ttreatmentHistory := args[5]\n\tmedicalDirectives := args[6]\n\n\t//convert variable to json\n\tobjectType := \"MedicalRecord\"\n\tmedialRecord := &MedicalRecord{objectType, patientId, personalIdentificationInformation,\n\t\tmedicalHistory, familyMedicalHistory, medicationHistory,\n\t\ttreatmentHistory, medicalDirectives}\n\n\t//convert data to byte\n\tMedicalRecordAsByte, errMedicalRecordAsByte := json.Marshal(medialRecord)\n\tif errMedicalRecordAsByte != nil {\n\t\treturn shim.Error(errMedicalRecordAsByte.Error())\n\t}\n\n\t//save to database\n\terrMedicalRecordAsByte = stub.PutPrivateData(\"MedicalRecordCollection\", patientId, MedicalRecordAsByte)\n\tif errMedicalRecordAsByte != nil {\n\t\treturn shim.Error(errMedicalRecordAsByte.Error())\n\t}\n\n\t//create index key\n\tindexName := \"id\"\n\tmedicalRecordIndexKey, errMedicalRecordIndexKey := stub.CreateCompositeKey(indexName, []string{medialRecord.ID, medialRecord.PersonalIdentificationInformation, medialRecord.MedicalHistory, medialRecord.FamilyMedicalHistory, medialRecord.MedicationHistory, medialRecord.TreatmentHistory, medialRecord.MedicalDirectives})\n\tif errMedicalRecordIndexKey != nil {\n\t\treturn shim.Error(errMedicalRecordIndexKey.Error())\n\t}\n\n\t//save index\n\tvalue := []byte{0x00}\n\tstub.PutPrivateData(\"MedicalRecordCollection\", medicalRecordIndexKey, value)\n\n\tend := time.Now()\n\telapsed := time.Since(start)\n\n\tfmt.Println(\"\\nfunction createMedicalRecord\")\n\tfmt.Printf(\"time start: %s\", start.String())\n\tfmt.Printf(\"time end: %s\", end.String())\n\tfmt.Println(\"time execute: \", elapsed.String())\n\tfmt.Println(\"=============== end createMedicalRecord function ===============\")\n\treturn shim.Success(nil)\n}", "func Create(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {\n\tfmt.Fprint(w, \"Welcome!\\n\")\n}", "func runCreate(cmd *cobra.Command, args []string) {\n\tcmd.Printf(\"Creating User : Name[%s] Email[%s] Pass[%s]\\n\", create.name, create.email, create.pass)\n\n\tif create.name == \"\" && create.email == \"\" && create.pass == \"\" {\n\t\tcmd.Help()\n\t\treturn\n\t}\n\n\tu := User{\n\t\tStatus: 1,\n\t\tName: \"Bill\",\n\t\tEmail: \"[email protected]\",\n\t\tPassword: \"my passoword\",\n\t}\n\n\tif err := createUser(&u); err != nil {\n\t\tcmd.Println(\"Creating User : \", err)\n\t\treturn\n\t}\n\n\tcmd.Println(\"Creating User : Created\")\n}", "func CreateUser(name, email, password string) {\n\n\tm := make(map[string]interface{})\n\tm[\"Github\"] = \"\"\n\tm[\"Linkedin\"] = \"\"\n\tm[\"Twitter\"] = \"\"\n\n\tv, err := json.Marshal(m)\n\tif err != nil {\n\t\tlog.Println(\"Marshal error: \", err)\n\t\treturn\n\t}\n\n\tuser := &User{Name: name, Email: email, Password: password, Meta: string(v)}\n\n\tDb.Debug().Create(&user)\n}", "func (u *userController) Create() {\n\treturn\n}", "func (ctl UserController) Create(c *gin.Context) {\n\tvar createRequest microsoft.CreateUserRequest\n\tif err := c.ShouldBindJSON(&createRequest); err != nil {\n\t\tc.JSON(rootCtl.wrap(http.StatusUnprocessableEntity, err.Error()))\n\t\treturn\n\t}\n\n\tif err := microsoft.NewUser().Create(c.Param(\"id\"), createRequest); err != nil {\n\t\tc.JSON(rootCtl.wrap(http.StatusInternalServerError, err.Error()))\n\t\treturn\n\t}\n\n\tc.JSON(rootCtl.wrap(http.StatusOK))\n}", "func (*elephant) createCompany(name string) (*Company, error) {\n\tres := &Company{Name: name}\n\treturn res, db.Create(res).Error\n}", "func CreatePermission(c *gin.Context) {\n\tnewPermission := model.Permission{}\n\tc.BindJSON(&newPermission)\n\n\terr := service.CreatePermission(newPermission)\n\n\tif err != nil {\n\t\terror := service.GetGormErrorCode(err.Error())\n\t\tc.JSON(500, error)\n\t} else {\n\t\tc.String(200, \"ok\")\n\t}\n}", "func CreatePerson(w http.ResponseWriter, r *http.Request) {\n\tbody, err := ioutil.ReadAll(io.LimitReader(r.Body, 1048576))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tif err := r.Body.Close(); err != nil {\n\t\tpanic(err)\n\t}\n\n\tvar personInterface interface{}\n\tif err := json.Unmarshal(body, &personInterface); err != nil {\n\t\tutils.SendJSONResponse(w, 422, \"Not Processing\", nil)\n\t}\n\n\tperson := personInterface.(map[string]interface{})\n\n\tsession := utils.GetMongoSession()\n\tdefer session.Close()\n\n\tsession.SetMode(mgo.Monotonic, true)\n\n\tupdateInfo, errDB := session.DB(\"test_db\").C(\"persons\").Upsert(\n\t\tbson.M{\"email\": person[\"email\"]},\n\t\tbson.M{\"$set\": person},\n\t)\n\n\tif errDB != nil {\n\t\tpanic(errDB)\n\t}\n\n\tutils.SendJSONResponse(w, 0, \"Success\", updateInfo)\n}", "func Create(c *gin.Context) {\n\tif userCollection == nil {\n\t\tuserCollection = db.GetUserCollection()\n\t}\n\tctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)\n\tdefer cancel()\n\n\tvar userInfo entity.User\n\tif err := c.ShouldBindJSON(&userInfo); err != nil {\n\t\tc.AbortWithStatusJSON(400, gin.H{\"Error\": err.Error()})\n\t\treturn\n\t}\n\n\tid, err := userCollection.InsertOne(ctx, userInfo)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(500, gin.H{\"Error\": err.Error()})\n\t\treturn\n\t}\n\n\tc.JSON(200, gin.H{\"id\": id})\n}", "func CreateData(c *gin.Context) {\r\n\tvar predict Models.Predict\r\n\tc.BindJSON(&predict)\r\n\terr := Models.CreateData(&predict)\r\n\tif err != nil {\r\n\t\tfmt.Println(err.Error())\r\n\t\tc.AbortWithStatus(http.StatusNotFound)\r\n\t} else {\r\n\t\tc.JSON(http.StatusOK, predict)\r\n\t}\r\n}", "func (ctl UserController) Create(c *gin.Context) {\n\tvar createRequest microsoft.CreateUserRequest\n\tif err := c.ShouldBindJSON(&createRequest); err != nil {\n\t\tc.JSON(rootCtl.wrap(http.StatusUnprocessableEntity, err.Error()))\n\t\treturn\n\t}\n\n\tuid, err := microsoft.NewUser().Create(c.Param(\"id\"), createRequest)\n\tif err != nil {\n\t\tc.JSON(rootCtl.wrap(http.StatusInternalServerError, err.Error()))\n\t\treturn\n\t}\n\n\tc.JSON(rootCtl.wrap(http.StatusCreated, gin.H{\n\t\t\"id\": uid,\n\t}))\n}", "func CreateUser(person *Person) (err error) {\n\tif err = Config.DB.Create(person).Error; err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationPersonalInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationPersonalInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func (_obj *DataService) CreateUser(wx_id string, userInfo *UserInfo, _opt ...map[string]string) (ret int32, err error) {\n\n\tvar length int32\n\tvar have bool\n\tvar ty byte\n\t_os := codec.NewBuffer()\n\terr = _os.Write_string(wx_id, 1)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\terr = userInfo.WriteBlock(_os, 2)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tvar _status map[string]string\n\tvar _context map[string]string\n\tif len(_opt) == 1 {\n\t\t_context = _opt[0]\n\t} else if len(_opt) == 2 {\n\t\t_context = _opt[0]\n\t\t_status = _opt[1]\n\t}\n\t_resp := new(requestf.ResponsePacket)\n\ttarsCtx := context.Background()\n\n\terr = _obj.s.Tars_invoke(tarsCtx, 0, \"createUser\", _os.ToBytes(), _status, _context, _resp)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\t_is := codec.NewReader(tools.Int8ToByte(_resp.SBuffer))\n\terr = _is.Read_int32(&ret, 0, true)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tif len(_opt) == 1 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t} else if len(_opt) == 2 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t\tfor k := range _status {\n\t\t\tdelete(_status, k)\n\t\t}\n\t\tfor k, v := range _resp.Status {\n\t\t\t_status[k] = v\n\t\t}\n\n\t}\n\t_ = length\n\t_ = have\n\t_ = ty\n\treturn ret, nil\n}", "func (h Handler) Create(res http.ResponseWriter, req *http.Request) {\n\tdefer req.Body.Close()\n\tvar body []byte\n\t_, readErr := req.Body.Read(body)\n\tif readErr != nil {\n\t\tres.WriteHeader(400)\n\t\tres.Write([]byte(\"400 Bad Request\"))\n\t\tlog.Println(readErr.Error())\n\t\treturn\n\t}\n\tvar bodyMap map[string]interface{}\n\tmarshErr := json.Unmarshal(body, bodyMap)\n\tif marshErr != nil {\n\t\tres.WriteHeader(400)\n\t\tres.Write([]byte(\"400 Bad Request\"))\n\t\tlog.Println(marshErr.Error())\n\t}\n\tvar key string\n\tif bodyMap[\"Name\"] != nil {\n\t\tkey = serviceStateKey(bodyMap)\n\t} else {\n\t\tkey = projectStateKey(bodyMap)\n\t}\n\terrChan := make(chan error)\n\th.Store.Save(key, bodyMap, func(err error) {\n\t\terrChan <- err\n\t})\n\tsaveErr := <-errChan\n\tif saveErr != nil {\n\t\tres.WriteHeader(500)\n\t\tres.Write([]byte(\"500 Internal Error\"))\n\t\treturn\n\t}\n\tres.WriteHeader(201)\n\tres.Write([]byte(\"201 Created\"))\n\th.Running.CheckIn(bodyMap[\"Project\"].(string), bodyMap[\"Branch\"].(string))\n}", "func CreateNewEmployee(id string, username string, pass string, fName string, lName string) Employee {\n\treturn Employee{id, username, pass, fName, lName}\n}", "func (contract *ContractChaincode) createEmployee(stub shim.ChaincodeStubInterface, args []string) peer.Response {\n\n\tif len(args) != 7 {\n\t\treturn shim.Error(\"Incorrect number of arguments, required: 7\")\n\t}\n\n\t//\"createEmployee\",\"77\",\"xyz\",\"10000\",\"9.30\",\"permanent\",\"hitachi\",\"nissan\"]\n\n\tkey := args[0]\n\temployeeId, _ := strconv.Atoi(key)\n\tsalary, _ := strconv.Atoi(args[2])\n\tworkingHours, _ := strconv.ParseFloat(args[3], 64)\n\n\tvar employee = Employee{EmployeeId: employeeId, EmployeeName: args[1], Salary: salary, WorkingHours: workingHours, EmployeeType: args[4], ParentCompany: args[5], CurrentCompany: args[6]}\n\n\temployeeAsBytes, _ := json.Marshal(employee)\n\t//stub.PutState(\"emp-id\"+strconv.Itoa(employee.employeeId), employeeAsBytes)\n\tstub.PutState(key, employeeAsBytes)\n\tfmt.Println(\"Created \", employee)\n\tpayload := []byte(\"Employee created successfully\")\n\treturn shim.Success(payload)\n}", "func CreateEmployee(c *gin.Context) {\r\n var employee model.Employee\r\n c.BindJSON(&employee)\r\n err := model.CreateEmployee(&employee)\r\n if err != nil {\r\n fmt.Println(err.Error())\r\n c.AbortWithStatus(http.StatusNotFound)\r\n } else {\r\n c.JSON(http.StatusOK, employee)\r\n }\r\n}", "func (s *permisoService) Create(p *model.Permiso) (*model.Permiso, error) {\n\tp.ID = uuid.New().String()\n\tp.Status = true\n\tp.CreatedAt = time.Now()\n\tp.UpdatedAt = time.Now()\n\n\treturn repo.Create(p)\n\n\t// return nil, nil\n}", "func (h *UserHandler) Create(w http.ResponseWriter, r *http.Request) {\n\tlog.Print(\"/users POST handled\")\n\n\treq := &CreateRequest{}\n\tif err := util.ScanRequest(r, req); err != nil {\n\t\tlog.Println(err)\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tuser := &schema.User{\n\t\tName: req.Name,\n\t}\n\n\tif err := h.model.Validate(user); err != nil {\n\t\tlog.Println(err)\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tres, err := h.model.Create(user)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tif err := util.JSONWrite(w, res, http.StatusCreated); err != nil {\n\t\tlog.Println(err)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t}\n}", "func (s *business) Create(example repository.Example) *response.Error {\n\ts.logger = s.loggerClone\n\ts.logger.SugaredLogger = s.logger.With(\"method\", \"Create\")\n\n\texample.ExampleID = util.GetUUID()\n\texample.Status = \"Pending\"\n\texample.CreateTime = time.Now()\n\n\terr := s.repository.CreateExample(example)\n\tif err != nil {\n\t\ts.logger.Errorw(\"create data error\", \"error\", err)\n\t\treturn response.NewErrorFromCode(errorcode.CreateDataError)\n\t}\n\n\treturn nil\n}", "func createUser(u *User) error {\n\tif u.Status == 0 {\n\t\treturn errors.New(\"Invalid user value\")\n\t}\n\n\treturn nil\n}", "func createUser(name, password, passwordUpdateRequest string) string {\n\treturn fmt.Sprintf(`{\n \"type\": \"User\",\n \"name\": \"%s\",\n \"credential\": {\n \"type\": \"PasswordCredential\",\n\t\t\"password\": \"%s\",\n\t\t\"passwordUpdateRequest\": \"%s\"\n }\n}`, name, password, passwordUpdateRequest)\n}", "func (m *Manager) Create(ctx context.Context, tx *sql.Tx, user v0.User) error {\n\t_, err := tx.ExecContext(ctx, `\n\t\t\t\tINSERT INTO users (\n\t\t\t\t\tname, \n\t\t\t\t\temail, \n\t\t\t\t\tprimary_public_key, \n\t\t\t\t\trecovery_public_key, \n\t\t\t\t\tsuper_user, \n\t\t\t\t\tauth_level, \n\t\t\t\t\tweight,\n\t\t\t\t\tuser_set\n\t\t\t\t\t) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tuser.Name,\n\t\tuser.Email,\n\t\tuser.PrimaryPublicKey,\n\t\tuser.RecoveryPublicKey,\n\t\tuser.SuperUser,\n\t\tuser.AuthLevel,\n\t\tuser.Weight,\n\t\tuser.Set,\n\t)\n\treturn err\n}", "func (p *Profile) AfterCreate(scope *gorm.Scope) (err error) {\n\tfmt.Println(\"After Create\")\n\treturn\n}", "func Create(w http.ResponseWriter, r *http.Request) {\n\n\taccountDecoder := json.NewDecoder(r.Body)\n\tvar accData Account\n\terr := accountDecoder.Decode(&accData)\n\tif err != nil {\n\t\tlog.Fatalln(\"error:\", err)\n\t}\n\taccData.CreateAccount()\n\tfmt.Fprintf(w, \"Account added successfully\")\n}", "func (a *App) CreateEntity(w http.ResponseWriter, r *http.Request) {\n\tvar e Entity\n\te.Uuid = uuid.NewV4().String()\n\tdecoder := json.NewDecoder(r.Body)\n\tif err := decoder.Decode(&e); err != nil {\n\t\trespondWithError(w, http.StatusBadRequest, errors.New(\"invalid request payload\"))\n\t\treturn\n\t}\n\tdefer func() { _ = r.Body.Close() }()\n\n\tif err := e.createEntity(a.DB); err != nil {\n\t\tlog.Print(err)\n\t\trespondWithError(w, http.StatusInternalServerError, err)\n\t\treturn\n\t}\n\n\trespondWithJSON(w, http.StatusCreated, e)\n}", "func CreateInitialEngineerProfile() {\n\tEngineers = make([]Engineer, 0)\n\tengineer := Engineer{\n\t\tUsername: \"masud\",\n\t\tFirstName: \"Masudur\",\n\t\tLastName: \"Rahman\",\n\t\tCity: \"Madaripur\",\n\t\tDivision: \"Dhaka\",\n\t\tPosition: \"Software Engineer\",\n\t}\n\tEngineers = append(Engineers, engineer)\n\n\tengineer = Engineer{\n\t\tUsername: \"fahim\",\n\t\tFirstName: \"Fahim\",\n\t\tLastName: \"Abrar\",\n\t\tCity: \"Chittagong\",\n\t\tDivision: \"Chittagong\",\n\t\tPosition: \"Software Engineer\",\n\t}\n\tEngineers = append(Engineers, engineer)\n\n\tengineer = Engineer{\n\t\tUsername: \"tahsin\",\n\t\tFirstName: \"Tahsin\",\n\t\tLastName: \"Rahman\",\n\t\tCity: \"Chittagong\",\n\t\tDivision: \"Chittagong\",\n\t\tPosition: \"Software Engineer\",\n\t}\n\tEngineers = append(Engineers, engineer)\n\n\tengineer = Engineer{\n\t\tUsername: \"jenny\",\n\t\tFirstName: \"Jannatul\",\n\t\tLastName: \"Ferdows\",\n\t\tCity: \"Chittagong\",\n\t\tDivision: \"Chittagong\",\n\t\tPosition: \"Software Engineer\",\n\t}\n\tEngineers = append(Engineers, engineer)\n\n\tif exist, _ := engine.IsTableExist(new(Engineer)); !exist {\n\t\tif err := engine.CreateTables(new(Engineer)); err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\t}\n\n\tsession := engine.NewSession()\n\tdefer session.Close()\n\n\tif err := session.Begin(); err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\n\tfor _, user := range Engineers {\n\t\tif _, err := session.Insert(&user); err != nil {\n\t\t\tif err = session.Rollback(); err != nil {\n\t\t\t\tlog.Fatalln(err)\n\t\t\t}\n\t\t}\n\t}\n\tif err := session.Commit(); err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\n\tauthUser[\"masud\"] = \"pass\"\n\tauthUser[\"admin\"] = \"admin\"\n\n}", "func (u *User) Create() error {\n\tif handler == nil {\n\t\treturn errHandlerNotSet\n\t}\n\tpossible := handler.NewRecord(u)\n\tif possible {\n\t\tif err := handler.Create(u).Error; err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func (c *PatientofphysicianClient) Create() *PatientofphysicianCreate {\n\tmutation := newPatientofphysicianMutation(c.config, OpCreate)\n\treturn &PatientofphysicianCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}\n}", "func callCreate(repo interface{}, methodName string, ctx context.Context, tenant string, modelEntity interface{}) error {\n\targs := []reflect.Value{reflect.ValueOf(ctx)}\n\tif len(tenant) > 0 {\n\t\targs = append(args, reflect.ValueOf(tenant))\n\t}\n\targs = append(args, reflect.ValueOf(modelEntity))\n\tresults := reflect.ValueOf(repo).MethodByName(methodName).Call(args)\n\tif len(results) != 1 {\n\t\tpanic(\"Create should return one argument\")\n\t}\n\tresult := results[0].Interface()\n\tif result == nil {\n\t\treturn nil\n\t}\n\terr, ok := result.(error)\n\tif !ok {\n\t\tpanic(\"Expected result to be an error\")\n\t}\n\treturn err\n}", "func (u *UserHandler) Create(c *fiber.Ctx) error {\n\tuser := models.User{}\n\terr := c.BodyParser(&user)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = u.Repo.Create(user)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn c.Status(fiber.StatusOK).JSON(user)\n}", "func (m *Manager) CreateAuditLogDetail(ald *AuditLogDetail) error {\n\tnow := time.Now()\n\tald.CreatedAt = now\n\n\tfunc(in interface{}) {\n\t\tif ii, ok := in.(initializer.Simple); ok {\n\t\t\tii.Initialize()\n\t\t}\n\t}(ald)\n\n\treturn m.GetWDbMap().Insert(ald)\n}", "func (a *App) AuthCreate100(w http.ResponseWriter, r *http.Request) {\n\tservice := \"auth/create\"\n\tAuthAndCallAPI(w, r, service, \"post\", \"v100\")\n}", "func CreateEmployee(w http.ResponseWriter, r *http.Request) {\r\n\tcookie, err := r.Cookie(\"token\")\r\n\tif err != nil {\r\n\t\tif err == http.ErrNoCookie {\r\n\t\t\tw.WriteHeader(http.StatusUnauthorized)\r\n\t\t\treturn\r\n\t\t}\r\n\t\tw.WriteHeader(http.StatusBadRequest)\r\n\t\treturn\r\n\t}\r\n\r\n\ttokenStr := cookie.Value\r\n\r\n\tclaims := &Claims{}\r\n\r\n\ttkn, err := jwt.ParseWithClaims(tokenStr, claims,\r\n\t\tfunc(t *jwt.Token) (interface{}, error) {\r\n\t\t\treturn jwtKey, nil\r\n\t\t})\r\n\r\n\tif err != nil {\r\n\t\tif err == jwt.ErrSignatureInvalid {\r\n\t\t\tw.WriteHeader(http.StatusUnauthorized)\r\n\t\t\treturn\r\n\t\t}\r\n\t\tw.WriteHeader(http.StatusBadRequest)\r\n\t\treturn\r\n\t}\r\n\r\n\tif !tkn.Valid {\r\n\t\tw.WriteHeader(http.StatusUnauthorized)\r\n\t\treturn\r\n\t}\r\n\r\n\tdb := createConnection()\r\n\r\n\tvar emp models.Employee\r\n\terrr := json.NewDecoder(r.Body).Decode(&emp)\r\n\tif errr != nil {\r\n\t\tlog.Panic(\"Unable to decode the request body\")\r\n\t}\r\n\tinsertDynStmt := `insert into \"employee\"( \"empname\",\"emppro\") values($1, $2) returning id`\r\n\tid := 0\r\n\terrr = db.QueryRow(insertDynStmt, emp.EmpName, emp.EmpPRO).Scan(&id)\r\n\tif errr != nil {\r\n\t\tlog.Panic(\"Unable to execute the query\")\r\n\t}\r\n\tres := JsonResponse{\r\n\t\tID: int64(id),\r\n\t\tMessage: \"Row created\",\r\n\t}\r\n\tjson.NewEncoder(w).Encode(res)\r\n\r\n}", "func newUserLogin(c *gin.Context){\n\tvar userInfo models.UserInfo\n\tc.BindJSON(&userInfo)\n\n\n fmt.Printf(\"\\n user existring %+v %+v\", userInfo.Name, userInfo.MobileNumber)\n existingUser, existingUserErr := db.GetUserInfo(userInfo.Name, userInfo.MobileNumber)\n if existingUserErr == nil {\n fmt.Printf(\"\\n user existring\")\n payload := models.PayloadLogin{\n \t\tToken: existingUser.Token,\n \t\tUserInfo: existingUser}\n \tutils.SendSuccess(c, payload)\n return\n }\n fmt.Printf(\"\\n error %+v\",existingUserErr )\n userInfo.Id = gocql.UUID.String(gocql.TimeUUID())\n userInfo.IsDisabled = false\n userInfo.Token = utils.CreateToken(userInfo.Id)\n createUserErr := db.CreateUser(userInfo)\n if createUserErr != nil {\n fmt.Printf(\"\\n newUserLogin %+v\", createUserErr)\n utils.SendError(c, 200, utils.APP_ERROR_SERVER)\n return\n }\n\t// Create session token for the user token\n\tpayload := models.PayloadLogin{\n\t\tToken: userInfo.Token,\n\t\tUserInfo: userInfo}\n\tutils.SendSuccess(c, payload)\n}", "func Create(v validator.Validator, ps profile.Service) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tvar prof profile.Profile\n\t\terr := json.NewDecoder(r.Body).Decode(&prof)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"%s: %v\", CreateErrCode, err)\n\t\t\tresp := &je.Response{\n\t\t\t\tCode: CreateErrCode,\n\t\t\t\tMessage: err.Error(),\n\t\t\t}\n\t\t\tje.Error(r, w, resp, http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\n\t\t// validate profile\n\t\tok, fieldErrors := v.Struct(prof)\n\t\tif !ok {\n\t\t\tresp := &je.Response{\n\t\t\t\tCode: CreateBadDataCode,\n\t\t\t\tMessage: CreateBadDataCode,\n\t\t\t\tAdditional: fieldErrors,\n\t\t\t}\n\t\t\tje.Error(r, w, resp, http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\n\t\t// override the account ID with the session accountID\n\t\tsession := r.Context().Value(\"Session\").(*token.Session)\n\t\tprof.AccountID = session.AccountID\n\n\t\tp, err := ps.Create(prof)\n\t\tif err != nil {\n\t\t\tresp := &je.Response{\n\t\t\t\tCode: CreateErrCode,\n\t\t\t\tMessage: err.Error(),\n\t\t\t}\n\t\t\tje.Error(r, w, resp, profile.ServiceToHTTPErrorMap(err))\n\t\t\treturn\n\t\t}\n\n\t\t// return created profile\n\t\tw.WriteHeader(http.StatusCreated) // must write status header before NewEcoder closes body\n\t\terr = json.NewEncoder(w).Encode(p)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"%s: %v\", CreateErrCode, err)\n\t\t\tresp := &je.Response{\n\t\t\t\tCode: CreateErrCode,\n\t\t\t\tMessage: err.Error(),\n\t\t\t}\n\t\t\tje.Error(r, w, resp, http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\n\t\tlog.Printf(\"successfully created profile for ID %s\", prof.ID)\n\t\treturn\n\t}\n}", "func CreateOrUpdateChat (db *store.InMemory) func (resp http.ResponseWriter, req *http.Request, params httprouter.Params) {\n return NotImplemented\n}", "func createPerson(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tvar person Person\n\t_ = json.NewDecoder(r.Body).Decode(&person)\n\tpeople = append(people, person)\n\tjson.NewEncoder(w).Encode(person)\n\n}", "func (c *UserRepoImpl) Create(user *model.User) (*model.User, error) {\n\tif err := c.db.Table(\"user\").Save(&user).Error; err != nil {\n\t\tfmt.Errorf(\"Having error : %w\", err)\n\t\tlogrus.Error(err)\n\t\treturn nil, errors.New(\"add user data : error\")\n\t}\n\treturn user, nil\n}" ]
[ "0.62478477", "0.61608213", "0.60926765", "0.58868814", "0.5844858", "0.5783213", "0.5739187", "0.57295036", "0.5607786", "0.55578446", "0.5516983", "0.5479335", "0.5433335", "0.5418112", "0.5392498", "0.5370654", "0.5341402", "0.5338584", "0.5335343", "0.53119457", "0.53099215", "0.52835894", "0.52714974", "0.52655464", "0.52465504", "0.52433187", "0.5232905", "0.5208293", "0.5204899", "0.52042896", "0.5200776", "0.5191798", "0.5173864", "0.5170674", "0.5163448", "0.5154335", "0.51235783", "0.51229596", "0.5120859", "0.50996107", "0.50963527", "0.5095895", "0.50799215", "0.50795", "0.5078746", "0.5074648", "0.5074474", "0.5072551", "0.50717485", "0.5066825", "0.50610185", "0.5058797", "0.50554067", "0.5047297", "0.5041428", "0.5035808", "0.5033703", "0.5027199", "0.50056875", "0.5002481", "0.50004", "0.5000307", "0.49959457", "0.49951538", "0.49926284", "0.4988217", "0.49830547", "0.49792534", "0.4976642", "0.4974577", "0.49712718", "0.4967315", "0.4966975", "0.49579346", "0.49502277", "0.49480948", "0.49454254", "0.49448636", "0.4926743", "0.49237832", "0.49160454", "0.49078766", "0.49066246", "0.49050796", "0.48956794", "0.48925278", "0.48904175", "0.48891842", "0.48845193", "0.48782575", "0.4870999", "0.48672947", "0.48622456", "0.48555112", "0.48542395", "0.4853435", "0.48528308", "0.48523113", "0.48522937", "0.48496172" ]
0.81025505
0
DefaultReadHealthMenstruationPersonalInfo executes a basic gorm read call
func DefaultReadHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) { if in == nil { return nil, errors1.NilArgumentError } ormObj, err := in.ToORM(ctx) if err != nil { return nil, err } if ormObj.Id == 0 { return nil, errors1.EmptyIdError } if hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadApplyQuery); ok { if db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil { return nil, err } } if db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationPersonalInfoORM{}); err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadFind); ok { if db, err = hook.BeforeReadFind(ctx, db); err != nil { return nil, err } } ormResponse := HealthMenstruationPersonalInfoORM{} if err = db.Where(&ormObj).First(&ormResponse).Error; err != nil { return nil, err } if hook, ok := interface{}(&ormResponse).(HealthMenstruationPersonalInfoORMWithAfterReadFind); ok { if err = hook.AfterReadFind(ctx, db); err != nil { return nil, err } } pbResponse, err := ormResponse.ToPB(ctx) return &pbResponse, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultListHealthMenstruationPersonalInfo(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationPersonalInfo, error) {\n\tin := HealthMenstruationPersonalInfo{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationPersonalInfoORM{}, &HealthMenstruationPersonalInfo{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationPersonalInfoORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationPersonalInfo{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func DefaultReadHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationDailyEntryORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationDailyEntryORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationDailyEntryORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultCreateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultReadProfile(ctx context.Context, in *Profile, db *gorm1.DB) (*Profile, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == \"\" {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(ProfileORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &ProfileORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(ProfileORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := ProfileORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(ProfileORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultPatchHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationPersonalInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationPersonalInfo(ctx, &HealthMenstruationPersonalInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationPersonalInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationPersonalInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationPersonalInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func DefaultReadContact(ctx context.Context, in *Contact, db *gorm.DB) (*Contact, error) {\n\tif in == nil {\n\t\treturn nil, errors.New(\"Nil argument to DefaultReadContact\")\n\t}\n\tormParams, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\taccountID, err := auth.GetAccountID(ctx, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tormParams.AccountID = accountID\n\tormResponse := ContactORM{}\n\tif err = db.Set(\"gorm:auto_preload\", true).Where(&ormParams).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (c *UserRepoImpl) Read(id int) (*model.User, error) {\n\tuser := new(model.User)\n\n\tif err := c.db.Table(\"user\").Where(\"user_id = ?\", id).First(&user).Error; err != nil {\n\t\tlogrus.Error(err)\n\t\treturn nil, errors.New(\"get user data : error \")\n\t}\n\n\treturn user, nil\n}", "func (m *User) Read(fields ...string) error {\n\to := orm.NewOrm()\n\treturn o.Read(m, fields...)\n}", "func (m *User) Read(fields ...string) error {\n\to := orm.NewOrm()\n\treturn o.Read(m, fields...)\n}", "func DefaultReadContact(ctx context.Context, in *Contact, db *gorm.DB) (*Contact, error) {\n\tif in == nil {\n\t\treturn nil, errors.New(\"Nil argument to DefaultReadContact\")\n\t}\n\tormParams, err := ConvertContactToORM(*in)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tormResponse := ContactORM{}\n\tif err = db.Set(\"gorm:auto_preload\", true).Where(&ormParams).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse, err := ConvertContactFromORM(ormResponse)\n\treturn &pbResponse, err\n}", "func (m *ContactsDefaultServer) CustomRead(ctx context.Context, req *ReadContactRequest) (*ReadContactResponse, error) {\n\tres, err := DefaultReadContact(ctx, &Contact{Id: req.GetId()}, m.DB)\n\tif err != nil {\n\t\tst := status.Newf(codes.Internal, \"Unable to read contact. Error %v\", err)\n\t\tst, _ = st.WithDetails(errdetails.New(codes.Internal, \"CustomRead\", \"Custom error message\"))\n\t\tst, _ = st.WithDetails(errdetails.New(codes.Internal, \"CustomRead\", \"Another custom error message\"))\n\t\treturn nil, st.Err()\n\t}\n\treturn &ReadContactResponse{Result: res}, nil\n}", "func GetHealth(w http.ResponseWriter, r *http.Request, db *sqlx.DB) {\n\tparams := mux.Vars(r)\n\n\thealth := []Health{}\n\n\tvar err error\n\n\tsession, err := store.Get(r, \"auth\")\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Convert our session data into an instance of User\n\tuser := User{}\n\tuser, _ = session.Values[\"user\"].(User)\n\n\tif user.Username != \"\" && user.AccessLevel == \"admin\" {\n\t\tif _, ok := params[\"id\"]; ok {\n\t\t\terr = db.Select(&health, \"SELECT id, username, ts, variable, value \"+\n\t\t\t\t\"FROM public.health \"+\n\t\t\t\t\"WHERE id = $1 \", params[\"id\"])\n\t\t} else if _, ok = params[\"ts\"]; ok {\n\t\t\terr = db.Select(&health, \"SELECT id, username, ts, variable, value \"+\n\t\t\t\t\"FROM public.health \"+\n\t\t\t\t\"WHERE ts = $1 \", params[\"ts\"])\n\t\t} else if _, ok = params[\"variable\"]; ok {\n\t\t\terr = db.Select(&health, \"SELECT id, username, ts, variable, value \"+\n\t\t\t\t\"FROM public.health \"+\n\t\t\t\t\"WHERE variable = $1 \", params[\"variable\"])\n\t\t} else {\n\t\t\terr = db.Select(&health, \"SELECT id, username, ts, variable, value \"+\n\t\t\t\t\"FROM public.health \")\n\t\t}\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\t\tw.WriteHeader(http.StatusOK)\n\n\t\tif err := json.NewEncoder(w).Encode(health); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t} else {\n\t\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\t\tw.WriteHeader(http.StatusOK)\n\n\t\tif err := json.NewEncoder(w).Encode(\"access denied\"); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}\n\n\tlogRequest(r)\n}", "func (C *Commander) GetData(writer http.ResponseWriter, request *http.Request) {\n\tvar error model.Error\n\tdb := database.DbConn()\n\tdefer func() {\n\t\terr := db.Close()\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t}()\n\tif strings.Contains(Role, \"program manager\") == true {\n\t\tvar Offset int\n\t\tPages := request.URL.Query()[\"Pages\"]\n\t\tfmt.Println(Pages)\n\t\tif Pages[0] != \"\" {\n\t\t\tlimit, err := strconv.Atoi(request.URL.Query().Get(\"limit\"))\n\t\t\tif limit != 10 && limit != 20 && limit != 50 {\n\t\t\t\twriter.Header().Set(\"Content-Type\", \"application/json\")\n\t\t\t\twriter.WriteHeader(http.StatusBadRequest)\n\t\t\t\terror.Message = \"Incorrect Limit Value\"\n\t\t\t\tjson.NewEncoder(writer).Encode(error)\n\t\t\t\treturn\n\t\t\t}\n\t\t\ti1, _ := strconv.Atoi(Pages[0])\n\t\t\tfmt.Println(i1)\n\t\t\tOffset = 10 * i1\n\t\t\tcount, _ := db.Query(\"SELECT COUNT(Id) FROM sub_project_manager WHERE sub_project_id in (SELECT id FROM sub_project WHERE project_id in (SELECT id FROM project WHERE program_manager_id in (SELECT id FROM program_manager where program_manager_email = ?)))\", UserName)\n\t\t\tdefer func() {\n\t\t\t\terr := count.Close()\n\t\t\t\tif err != nil {\n\t\t\t\t\tpanic(err.Error())\n\t\t\t\t}\n\t\t\t}()\n\t\t\tGetManagerDetails, err := db.Query(\"call GetAllManagerDetailsData(?, ?, ?)\", UserName, Offset, limit)\n\t\t\tif err != nil {\n\t\t\t\tWriteLogFile(err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tdefer func() {\n\t\t\t\terr := GetManagerDetails.Close()\n\t\t\t\tif err != nil {\n\t\t\t\t\tpanic(err.Error())\n\t\t\t\t}\n\t\t\t}()\n\t\t\tvar Total int\n\t\t\tvar ManagerDetailData model.Project\n\t\t\tvar ManagerDetailsData []model.Project\n\t\t\tfor GetManagerDetails.Next() {\n\t\t\t\tGetManagerDetails.Scan(&ManagerDetailData.ProjectName, &ManagerDetailData.SubProjectName, &ManagerDetailData.ManagerName, &ManagerDetailData.ManagerEmailID, &ManagerDetailData.Id)\n\t\t\t\tManagerDetailsData = append(ManagerDetailsData, ManagerDetailData)\n\t\t\t}\n\t\t\tif count.Next() != false {\n\t\t\t\tcount.Scan(&Total)\n\t\t\t} else {\n\t\t\t\tTotal = 0\n\t\t\t}\n\t\t\tvar PaginationFormat model.Pagination\n\t\t\tPaginationFormat.TotalData = Total\n\t\t\tPaginationFormat.Limit = limit\n\t\t\tPaginationFormat.Data = ManagerDetailsData\n\t\t\tx1 := Total / limit\n\t\t\tx := Total % limit\n\t\t\tif x == 0 {\n\t\t\t\tPaginationFormat.TotalPages = x1\n\t\t\t} else {\n\t\t\t\tPaginationFormat.TotalPages = x1 + 1\n\t\t\t}\n\t\t\tx, _ = strconv.Atoi(Pages[0])\n\t\t\tif PaginationFormat.TotalPages != 0 {\n\t\t\t\tx1 = x + 1\n\t\t\t}\n\t\t\tPaginationFormat.Page = x1\n\t\t\tsetupResponse(&writer, request)\n\t\t\twriter.Header().Set(\"Content-Type\", \"application/json\")\n\t\t\twriter.WriteHeader(http.StatusOK)\n\t\t\tjson.NewEncoder(writer).Encode(PaginationFormat)\n\t\t} else {\n\t\t\twriter.Header().Set(\"Content-Type\", \"application/json\")\n\t\t\twriter.WriteHeader(http.StatusBadRequest)\n\t\t\terror.Message = \"Incorrect Page Value\"\n\t\t\tjson.NewEncoder(writer).Encode(error)\n\t\t\treturn\n\n\t\t}\n\t} else {\n\t\twriter.WriteHeader(http.StatusNotFound)\n\t}\n}", "func (d *DepartmentRepoImpl) Read(deptno int32) (models.Department, error) {\n\tdept := models.Department{}\n\tgetDepartment := d.DB.Table(\"departments\").Where(\"id = ?\", deptno).Find(&dept)\n\tif getDepartment.Error != nil {\n\t\tfmt.Println(\"[Repo Error] :\", getDepartment.Error)\n\t\treturn dept, getDepartment.Error\n\t}\n\n\treturn dept, nil\n}", "func Read(w http.ResponseWriter, r *http.Request) {\n\n\t//use above functions to parse string and find user\n\tuName := GetQueryString(w, r)\n\n\tuser, findErr := userOps.FindUserByUsername(uName)\n\n\t//handle err\n\tif findErr != nil {\n\t\tw.WriteHeader(404)\n\t\tw.Write([]byte(\"Not found\"))\n\t\treturn\n\t}\n\n\t//Success, 200 response\n\tw.WriteHeader(200)\n\tw.Write([]byte(user.Username))\n}", "func (self *RegisObjManager) LoadPersonalChatLogObj(id string) *RedisPersonalChatLogObj {\n\tvalue, ok := self.Load(id)\n\tif ok {\n\t\treturn value.(*RedisPersonalChatLogObj)\n\t}\n\treturn nil\n}", "func (m *Master) ReadInfoFromFile(filepath string) error {\n\t// open file\n\tfile, err := os.OpenFile(filepath, os.O_RDONLY|os.O_CREATE, 0666)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// close file\n\tdefer file.Close()\n\n\t// check file size\n\t// stats, err := file.Stat()\n\t// if err != nil {\n\t// \treturn err\n\t// }\n\t// fileSize := stats.Size()\n\t// buf := make([]byte, fileSize)\n\n\t// read file\n\tdataLenBytes := make([]byte, 8)\n\tuserTypeBytes := make([]byte, 1)\n\tfor {\n\t\t// data len\n\t\tvar dataLen int64 = 0 // data len\n\t\tn, err := io.ReadFull(file, dataLenBytes)\n\t\tif err != nil || n != 8 {\n\t\t\tbreak\n\t\t}\n\t\tbytesBuffer := bytes.NewBuffer(dataLenBytes)\n\t\tbinary.Read(bytesBuffer, binary.LittleEndian, &dataLen)\n\t\t// fmt.Println(\"data len =\", dataLen)\n\n\t\t// user type\n\t\tn, err = io.ReadFull(file, userTypeBytes)\n\t\tif err != nil || n != 1 {\n\t\t\tbreak\n\t\t}\n\t\t// fmt.Println(\"user type =\", userTypeBytes[0])\n\n\t\t// data\n\t\tfile.Seek(-1, 1) // 回退一个字节,用户类型\n\t\tdataBytes := make([]byte, dataLen)\n\t\tn, err = io.ReadFull(file, dataBytes)\n\t\tif err != nil || int64(n) != dataLen {\n\t\t\tbreak\n\t\t}\n\n\t\tswitch model.UserType(userTypeBytes[0]) {\n\t\tcase model.TypeTeacher:\n\t\t\ts := &model.Teacher{}\n\t\t\tif err := s.UnSerialize(dataBytes); err != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\t// s.DisplayInfo()\n\t\t\tm.allUserInfo[model.AllUserType[0]][s.ID] = s\n\t\tcase model.TypeStudent:\n\t\t\tt := &model.Student{}\n\t\t\tif err := t.UnSerialize(dataBytes); err != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\t// t.DisplayInfo()\n\t\t\tm.allUserInfo[model.AllUserType[1]][t.ID] = t\n\t\t}\n\t}\n\t// fmt.Println(\"file size =\", fileSize, \",read size =\", n)\n\n\treturn nil\n}", "func (r *UserRead) show(q *msg.Request, mr *msg.Result) {\n\tvar (\n\t\tuserID, userName string\n\t\tfirstName, lastName string\n\t\tmailAddr, team string\n\t\tdictID, dictName, createdBy string\n\t\tcreatedAt time.Time\n\t\temployeeNr int\n\t\tisActive, isSystem, isDeleted bool\n\t\terr error\n\t)\n\n\tif err = r.stmtShow.QueryRow(\n\t\tq.User.ID,\n\t).Scan(\n\t\t&userID,\n\t\t&userName,\n\t\t&firstName,\n\t\t&lastName,\n\t\t&employeeNr,\n\t\t&mailAddr,\n\t\t&isActive,\n\t\t&isSystem,\n\t\t&isDeleted,\n\t\t&team,\n\t\t&dictID,\n\t\t&dictName,\n\t\t&createdBy,\n\t\t&createdAt,\n\t); err == sql.ErrNoRows {\n\t\tmr.NotFound(err, q.Section)\n\t\treturn\n\t} else if err != nil {\n\t\tmr.ServerError(err, q.Section)\n\t\treturn\n\t}\n\n\tmr.User = append(mr.User, proto.User{\n\t\tID: userID,\n\t\tUserName: userName,\n\t\tFirstName: firstName,\n\t\tLastName: lastName,\n\t\tEmployeeNumber: strconv.Itoa(employeeNr),\n\t\tMailAddress: mailAddr,\n\t\tIsActive: isActive,\n\t\tIsSystem: isSystem,\n\t\tIsDeleted: isDeleted,\n\t\tTeamID: team,\n\t\tDetails: &proto.UserDetails{\n\t\t\tCreation: &proto.DetailsCreation{\n\t\t\t\tCreatedAt: createdAt.Format(msg.RFC3339Milli),\n\t\t\t\tCreatedBy: createdBy,\n\t\t\t},\n\t\t\tDictionaryID: dictID,\n\t\t\tDictionaryName: dictName,\n\t\t},\n\t})\n\tmr.OK()\n}", "func (c *AllergyIntoleranceController) Read(ctx *app.ReadAllergyIntoleranceContext) error {\n\t//var sqlParams []interface{}\n\tx := json.RawMessage{}\n\n\tabsPath, _ := filepath.Abs(\"bundle.json\")\n\tfmt.Println(absPath)\n\tfile, err := os.Open(absPath)\n\tif err == nil {\n\t\t_ = json.NewDecoder(file).Decode(&x)\n\t}\n\tfile.Close()\n\n\t//getJson(\"http://nprogram.azurewebsites.net/Patient/1?_format=json\", patient)\n\t//getJson(\"http://localhost:3001\", patient)\n\t// //getJson(\"https://open-ic.epic.com/FHIR/api/FHIR/DSTU2/Patient/Tbt3KuCY0B5PSrJvCu2j-PlK.aiHsu2xUjUM8bWpetXoB\", patient)\n\t//_, err = json.NewDecoder(file).Decode(patient)\n\t//, err := cc.Get(\"http://fhirtest.uhn.ca/baseDstu2/Patient/EXexample\")\n\t//r, err := cc.Get(\"http://nprogram.azurewebsites.net/Patient/1?_format=json\")\n\n\t// if err != nil {\n\t// \tfmt.Println(\"A timeout error occured\")\n\t// \tos.Exit(3)\n\t// }\n\t// defer r.Body.Close()\n\n\t// body, _ := ioutil.ReadAll(r.Body)\n\n\t// isJSON := IsJSON(string(body))\n\t// if isJSON == true {\n\t// \tfmt.Println(\"Yesssssssssssssssssssssssssssssssssssssssssssssssss\")\n\t// \t_ = json.Unmarshal(body, &x)\n\t// } else {\n\t// \tfmt.Println(\"NOoooooooooooooooooooooooooooooooooooooooooooo\")\n\t// \t_ = xml.Unmarshal(body, &x)\n\t// }\n\n\t//err = json.Unmarshal([]byte(r), &f)\n\n\t// //b, _ := json.Marshal(body)\n\t// return json.Unmarshal(body, &target)\n\t//fmt.Printf(\"%s\", x)\n\n\tfmt.Println()\n\t//main_rt := gjson.Get(string(x), \"resourceType\")\n\t//sub_rt := gjson.Get(string(x), \"entry.#.resource.resourceType\")\n\t// m, ok := gjson.Parse(string(x)).Value().(map[string]interface{})\n\t// if !ok {\n\t// \t// not a map\n\t// }\n\n\t//value1 := gjson.Get(string(x), \"*\")\n\t//value2 := gjson.Get(string(x), \"entry.#.resource.code.coding.#.display\")\n\t//value2 := gjson.Get(string(x), \"entry.#.resource.component.#.valueQuantity.value\")\n\t//value2 := gjson.Get(string(x), \"entry.#.resource.component.#.code.coding.#.display\")\n\n\t//value3 := gjson.Get(string(x), \"identifier\")\n\t//fmt.Println(\"value *:\", value1.String())\n\n\t//fmt.Println(\"resource:\", value11.String())\n\t//fmt.Println(\"entry:\", value2.String())\n\t//fmt.Println(\"value:\", value3.String())\n\t//result := gjson.Get(string(x), \"entry.#.resource\")\n\n\t// //results := gjson.GetMany(string(x), \"entry.#.resource.code.coding.#.display\", \"entry.#.resource.component.#.code.coding.#.display\", \"entry.#.resource.component.#.valueQuantity.Value\")\n\t// results := gjson.GetMany(string(x), \"entry.#.resource.component.#.code.coding.#.display\", \"entry.#.resource.component.#.valueQuantity.value\")\n\n\t//_ = json.Unmarshal(x, &patient)\n\t//_, _ = json.Marshal(string(x))\n\n\t//_ = json.NewDecoder(r.Body).Decode(&x)\n\t//fmt.Println(results)\n\t//fmt.Printf(\"Indentifer:\", x.Matches[0].Ad, \"\\n\")\n\t//fmt.Println(\"Address:\", x.Address)\n\t//fmt.Println(\"Telecom:\", x.Telecom)\n\t//fmt.Println(\"CareProvider:\", x.CareProvider)\n\t//fmt.Println(\"Name:\", x.Name)\n\t//fmt.Printf(\"Contact:\", x.Contact)\n\n\t//fmt.Println(\"x is equal to:\", x.Address[0].Text)\n\n\t// res := &app.Patient{}\n\t// res.Active = patient.Active\n\t// res.BirthDate = patient.BirthDate\n\t// res.Gender = patient.Gender\n\t// //res.Telecom = patient.Telecom\n\t//res.Address=patient.Address\n\t//res.Address=\n\n\t//json.Marshal(patient)\n\n\t// \ttype int64array []int64\n\n\t// func (a int64array) Value() (driver.Value, error) {\n\t// // Format a in PostgreSQL's array input format {1,2,3} and return it as as string or []byte.\n\t// }\n\t//_, _ = json.Marshal(x)\n\n\t//g, _ := json.Marshal(x)\n\t//fmt.Println(string(g))\n\t// _, err = stmt.Exec(uuid.NewV4().String(), string(g))\n\t// if err != nil {\n\t// \tfmt.Println(err.Error())\n\t// \tfmt.Println(\"Error with db\")\n\t// }\n\n\t//json.Marshal(x)\n\n\t// t := app.Observation{}\n\t// s := reflect.ValueOf(&t).Elem()\n\t// typeOfT := s.Type()\n\n\t// for i := 0; i < s.NumField(); i++ {\n\t// \t//f := s.Field(i)\n\t// \t//fmt.Printf(\"%d: %s %s = %v\\n\", i,\n\t// \t//typeOfT.Field(i).Name, f.Type(), f.Interface())\n\t// \t//test2 := fmt.Sprintf(\"%T\", s.Field(i))\n\t// \ttest2 := fmt.Sprintf(\"%T\", s.Field(i))\n\n\t// \tfmt.Println(reflect.TypeOf(test2).Kind())\n\t// \t//fmt.Println(*test2)\n\n\t// \ttest := fmt.Sprintf(\"%s\", strings.ToLower(typeOfT.Field(i).Name))\n\t// \tfmt.Println(test)\n\n\t// \tvalue := gjson.Get(string(x), test)\n\t// \tfmt.Println()\n\t// \tfmt.Println(\"value *:\", value.String())\n\n\t// }\n\n\t//res := &app.AllergyIntoleranceMedia{}\n\treturn nil\n\t//return ctx.OK(res)\n\n}", "func DefaultListHealthMenstruationDailyEntry(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationDailyEntry, error) {\n\tin := HealthMenstruationDailyEntry{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationDailyEntryORM{}, &HealthMenstruationDailyEntry{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationDailyEntryORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationDailyEntry{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func readUser(r *request.Request) (request.Response, error) {\n\tif r.Context.UserID != 0 {\n\t\tuser, err := users.ListUsers(r.Request.Context(), users.ListArgs{}, users.ListFilter{AccountID: []int32{r.Context.UserID}})\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"could not retrieve current user: %v\", err)\n\t\t} else if len(user) == 1 {\n\t\t\tr.Context.User = user[0]\n\t\t} else {\n\t\t\treturn nil, fmt.Errorf(\"did not find current user %v in database\", r.Context.UserID)\n\t\t}\n\t}\n\treturn nil, nil\n}", "func (h *provider) Read(ctx wfContext.Context, v *value.Value, act types.Action) error {\n\tobj := new(unstructured.Unstructured)\n\tif err := v.UnmarshalTo(obj); err != nil {\n\t\treturn err\n\t}\n\tkey, err := client.ObjectKeyFromObject(obj)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif key.Namespace == \"\" {\n\t\tkey.Namespace = \"default\"\n\t}\n\tif err := h.cli.Get(context.Background(), key, obj); err != nil {\n\t\treturn err\n\t}\n\treturn v.FillObject(obj.Object, \"result\")\n}", "func (s *AutograderService) GetUser(ctx context.Context, in *pb.Void) (*pb.User, error) {\n\tusr, err := s.getCurrentUser(ctx)\n\tif err != nil {\n\t\ts.logger.Errorf(\"GetUser failed: authentication error: %w\", err)\n\t\treturn nil, ErrInvalidUserInfo\n\t}\n\tdbUsr, err := s.db.GetUserWithEnrollments(usr.GetID())\n\tif err != nil {\n\t\ts.logger.Errorf(\"GetUser failed to get user with enrollments: %w \", err)\n\t}\n\treturn dbUsr, nil\n\n}", "func (m *ApplicationModule) Read(fields ...string) error {\n\to := orm.NewOrm()\n\treturn o.Read(m, fields...)\n}", "func Read(email string) (*structs.User, error) {\n\tuser := &structs.User{}\n\n\trow, err := dot.QueryRow(db, selectEmailQuery, email)\n\n\t// Scan => take data\n\tif err := row.Scan(&user.Id, &user.Sub, &user.GivenName, &user.FamilyName, &user.Profile, &user.Picture, &user.Email, &user.EmailVerified, &user.Gender, &user.Address, &user.Phone); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn nil, errors.NotFound(email, err.Error())\n\t\t}\n\n\t\treturn nil, errors.InternalServerError(email, err.Error())\n\t}\n\n\treturn user, err\n}", "func (e *Enchant) LoadPersonalDict(name string) {\n\tcName := C.CString(name)\n\tdefer C.free(unsafe.Pointer(cName))\n\n\tif e.dict != nil {\n\t\tC.enchant_broker_free_dict(e.broker, e.dict)\n\t}\n\n\te.dict = C.enchant_broker_request_pwl_dict(e.broker, cName)\n}", "func DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx context.Context, patchee *HealthMenstruationPersonalInfo, patcher *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"PeriodLengthInDays\" {\n\t\t\tpatchee.PeriodLengthInDays = patcher.PeriodLengthInDays\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CycleLengthInDays\" {\n\t\t\tpatchee.CycleLengthInDays = patcher.CycleLengthInDays\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func (p *Personal) All(ctx context.Context) (*[]PersonalData, error) {\n\tusrs, err := p.DB.All(ctx)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"could not select all personal data\")\n\t}\n\treturn usrs, nil\n}", "func (m *HealthMenstruationPersonalInfoORM) ToPB(ctx context.Context) (HealthMenstruationPersonalInfo, error) {\n\tto := HealthMenstruationPersonalInfo{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tto.PeriodLengthInDays = m.PeriodLengthInDays\n\tto.CycleLengthInDays = m.CycleLengthInDays\n\tif posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func GetPerson(db *sql.DB) {}", "func (rf *Raft) readPersist(data []byte) { //??????????\r\n\t// Your code here.\r\n\t// Example:\r\n\t// r := bytes.NewBuffer(data)\r\n\t// d := gob.NewDecoder(r)\r\n\t// d.Decode(&rf.xxx)\r\n\t// d.Decode(&rf.yyy)\r\n\tif(data == nil || len(data) < 1) { //not containing any state\r\n\t\treturn\r\n\t}\r\n}", "func read(res http.ResponseWriter, req *http.Request) {\n\trows, err := db.Query(`SELECT * FROM customer;`)\n\tcheck(err)\n\tdefer rows.Close()\n\tvar name string\n\tfor rows.Next() {\n\t\terr = rows.Scan(&name)\n\t\tcheck(err)\n\t\tfmt.Fprintln(res, \"The records are\", name)\n\t}\n}", "func (ps *Store) Read(ctx context.Context, key datastore.Key, entity datastore.Entity) error {\n\tc := GetCon(ctx)\n\temd := entity.GetEntityMetadata()\n\titer := c.Query(getJSONSelect(emd.GetName(), emd.GetIDColumnName()), key).Iter()\n\tvar json string\n\tvalid := iter.Scan(&json)\n\tif !valid {\n\t\treturn common.NewError(datastore.EntityNotFound, fmt.Sprintf(\"%v not found with id = %v\", emd.GetName(), key))\n\t}\n\tdatastore.FromJSON(json, entity)\n\tif err := iter.Close(); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (m *DeviceHealth) GetAdditionalData()(map[string]any) {\n val , err := m.backingStore.Get(\"additionalData\")\n if err != nil {\n panic(err)\n }\n if val == nil {\n var value = make(map[string]any);\n m.SetAdditionalData(value);\n }\n return val.(map[string]any)\n}", "func GetUser(c *gin.Context) {\n\tnID := c.Param(\"user_id\")\n\tdb := dbConn()\n\tselDB, err := db.Query(\"CALL read_user(?)\", nID)\n\tif err != nil {\n\t\tpanic(err.Error)\n\t}\n\n\tuser := User{}\n\tusers := []User{}\n\tfor selDB.Next() {\n\t\tvar id, username, useremail, fname, lname, password, passwordchange, passwordexpired, lastlogon, accountlocked string\n\t\terr = selDB.Scan(&id, &username, &useremail, &fname, &lname, &password, &passwordchange, &passwordexpired, &lastlogon, &accountlocked)\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t\tc.JSON(500, gin.H{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t})\n\t\t}\n\t\tuser.ID = id\n\t\tuser.UserName = username\n\t\tuser.UserEmail = useremail\n\t\tuser.FName = fname\n\t\tuser.LName = lname\n\t\tuser.Password = password\n\t\tuser.PasswordChange = passwordchange\n\t\tuser.PasswordExpired = passwordexpired\n\t\tuser.LastLogon = lastlogon\n\t\tuser.AccountLocked = accountlocked\n\t\tiid, err := strconv.Atoi(id)\n\t\tif err != nil {\n\t\t\tpanic(err.Error)\n\t\t}\n\t\tselDB02, err := db.Query(\"CALL read_access_userid(?)\", iid)\n\t\tif err != nil {\n\t\t\tpanic(err.Error)\n\t\t}\n\t\taccess := Access{}\n\t\taccessList := []Access{}\n\t\tfor selDB02.Next() {\n\t\t\tvar accessid, userid, courtid, caseaccess, personaccess, accountingaccess, juryaccess, attorneyaccess, configaccess, securitylevel, sealedcase string\n\t\t\terr := selDB02.Scan(&accessid, &userid, &courtid, &caseaccess, &personaccess, &accountingaccess, &juryaccess, &attorneyaccess, &configaccess, &securitylevel, &sealedcase)\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(err)\n\t\t\t\tc.JSON(500, gin.H{\n\t\t\t\t\t\"error\": err.Error(),\n\t\t\t\t})\n\t\t\t}\n\t\t\taccess.AccessID = accessid\n\t\t\taccess.IDUser = userid\n\t\t\taccess.IDCourt = courtid\n\t\t\taccess.CaseAccess = caseaccess\n\t\t\taccess.PersonAccess = personaccess\n\t\t\taccess.AccountingAccess = accountingaccess\n\t\t\taccess.JuryAccess = juryaccess\n\t\t\taccess.AttorneyAccess = attorneyaccess\n\t\t\taccess.ConfigAccess = configaccess\n\t\t\taccess.SecurityLevel = securitylevel\n\t\t\taccess.SealedCase = sealedcase\n\t\t\taccessList = append(accessList, access)\n\t\t}\n\t\tuser.AccessList = accessList\n\t\tusers = append(users, user)\n\t}\n\n\tc.JSON(200, gin.H{\n\t\t\"result\": users,\n\t})\n\n\tdefer db.Close()\n}", "func readFromPublic(username string) (*PublicInfo, error) {\n\tlink := \"http://localhost:8080/v1/accounts/\" + username\n\tresp, err := http.Get(link)\n\tif err != nil {\n\t\tlog.Println(\"account does not exists\", err)\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\tbodyBytes, err := ioutil.ReadAll(resp.Body)\n\tvar pageData PublicInfo\n\terr = json.Unmarshal(bodyBytes, &pageData)\n\treturn &pageData, err\n}", "func (m *CustomDomainManager) Read(id string, opts ...RequestOption) (c *CustomDomain, err error) {\n\terr = m.Request(\"GET\", m.URI(\"custom-domains\", id), &c, opts...)\n\treturn\n}", "func (repository *Datastore)GetProfile(username string)(*user.Person,error){\n\tperson := newUser() //initialize user.Person and will used to store profile info\n\tquery := `SELECT * FROM userRepository WHERE username = ?`\n\terr := repository.Db.Get(&person, query, username) //get person profile details\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &person, nil\n}", "func (t *SimpleChaincode) readuser(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar name, jsonResp string\n\tvar err error\n\n\tif len(args) != 1 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting name of the var to query\")\n\t}\n\n\tname = args[0]\n\tvalAsbytes, err := stub.GetState(name) //get the key value from chaincode state\n\tif err != nil {\n\t\tjsonResp = \"{\\\"Error\\\":\\\"Failed to get state for \" + name + \"\\\"}\"\n\t\treturn nil, errors.New(jsonResp)\n\t}\n\n\treturn valAsbytes, nil //send it onward\n}", "func (k *xyzProvider) Read(ctx context.Context, req *pulumirpc.ReadRequest) (*pulumirpc.ReadResponse, error) {\n\turn := resource.URN(req.GetUrn())\n\tty := urn.Type()\n\tif ty != \"xyz:index:Random\" {\n\t\treturn nil, fmt.Errorf(\"Unknown resource type '%s'\", ty)\n\t}\n\treturn nil, status.Error(codes.Unimplemented, \"Read is not yet implemented for 'xyz:index:Random'\")\n}", "func DefaultReadEmail(ctx context.Context, in *Email, db *gorm.DB) (*Email, error) {\n\tif in == nil {\n\t\treturn nil, errors.New(\"Nil argument to DefaultReadEmail\")\n\t}\n\tormParams, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tormResponse := EmailORM{}\n\tif err = db.Set(\"gorm:auto_preload\", true).Where(&ormParams).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func main() {\n //name := \"Petter\"\n email := \"[email protected]\"\n\n l, err := orm.FindLoginByEmailMapper(email)\n\n fmt.Println(err)\n fmt.Println(l)\n fmt.Println(\"id: \", l.ID)\n fmt.Println(\"name: \", l.Name)\n fmt.Println(\"email\", l.Email)\n\n}", "func Read(client *mongo.Client, firstname string) (*[]Person, error) {\r\n\tvar m bson.M\r\n\r\n\tpersons := make([]Person, 0, 10)\r\n\r\n\tpersonCollection := client.Database(\"MyApp\").Collection(\"person\")\r\n\tctx, _ := context.WithTimeout(context.Background(), 10*time.Second)\r\n\tfilterCursor, err := personCollection.Find(ctx, bson.M{\"firstname\": firstname})\r\n\r\n\tif err != nil {\r\n\t\treturn nil, err\r\n\t}\r\n\r\n\terr = filterCursor.All(ctx, &persons)\r\n\tif err != nil {\r\n\t\treturn nil, err\r\n\t}\r\n\r\n\tbsonBytes, _ := bson.Marshal(m)\r\n\tbson.Unmarshal(bsonBytes, &persons)\r\n\r\n\treturn &persons, err\r\n}", "func GetAllPerson(c *gin.Context) {\n\tperson, _ := models.LoadPeople()\n\tc.JSON(http.StatusOK, person)\n\n}", "func (rf *Raft) readPersist(data []byte) {\n\tif data == nil || len(data) < 1 { // bootstrap without any state?\n\t\treturn\n\t}\n\t// Your code here (2C).\n\t// Example:\n\tr := bytes.NewBuffer(data)\n\td := labgob.NewDecoder(r)\n\n\tstate := &StateInfo{}\n\n\tif err := d.Decode(state); err != nil {\n\t\tpanic(err)\n\t}\n\trf.currentTerm = state.CurrentTerm\n\trf.commitIndex = state.CommitIndex\n\trf.lastApplied = state.LastApplied\n\trf.log = state.Logs\n\tDPrintf(\"rf [me %v] read stateInfo: %#v\", rf.me, state)\n}", "func (c *UserRepoImpl) ReadAll() ([]*model.User, error) {\n\tuserList := make([]*model.User, 0)\n\tif err := c.db.Table(\"user\").Find(&userList).Error; err != nil {\n\t\tlogrus.Error(err)\n\t\treturn nil, errors.New(\"get user list data : error \")\n\t}\n\treturn userList, nil\n}", "func DefaultPatchSetHealthMenstruationPersonalInfo(ctx context.Context, objects []*HealthMenstruationPersonalInfo, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationPersonalInfo, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationPersonalInfo, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationPersonalInfo(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func (t *SimpleChaincode) readStudent(APIstub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar name, jsonResp string\n\tvar err error\n\n\tif len(args) <= 0 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting name of the name to query\")\n\t}\n\n\tname = args[0]\n\tvalAsbytes, err := APIstub.GetState(name)\n\tif err != nil {\n\t\tjsonResp = \"{\\\"Error\\\":\\\"Failed to get state for \" + name + \"\\\"}\"\n\t\treturn shim.Error(jsonResp)\n\t} else if valAsbytes == nil {\n\t\tjsonResp = \"{\\\"Error\\\":\\\"Student does not exist: \" + name + \"\\\"}\"\n\t\treturn shim.Error(jsonResp)\n\t}\n\treturn shim.Success(valAsbytes)\n}", "func showPersonInfo(r result) {\n\tif r.Error != nil {\n\t\tlog.Printf(\"Response Error because err: %v\\n\", r.Error)\n\t\treturn\n\t}\n\n\tif len(r.Person.Results) == 0 {\n\t\tlog.Printf(\"Response Error: Person info not found\\n\")\n\t\treturn\n\t}\n\n\tinfo := r.Person.Results[0]\n\tfmt.Printf(\"[%v] Email: %v \\n\", r.Latency, info.Email)\n}", "func (m *OrganizationManager) ReadByName(name string, opts ...RequestOption) (o *Organization, err error) {\n\terr = m.Request(\"GET\", m.URI(\"organizations\", \"name\", name), &o, opts...)\n\treturn\n}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here (2C).\n\t// Example:\n\t// r := bytes.NewBuffer(data)\n\t// d := gob.NewDecoder(r)\n\t// d.Decode(&rf.xxx)\n\t// d.Decode(&rf.yyy)\n\tif data == nil || len(data) < 1 { // bootstrap without any state?\n\t\treturn\n\t}\n}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here (2C).\n\t// Example:\n\t// r := bytes.NewBuffer(data)\n\t// d := gob.NewDecoder(r)\n\t// d.Decode(&rf.xxx)\n\t// d.Decode(&rf.yyy)\n\tif data == nil || len(data) < 1 { // bootstrap without any state?\n\t\treturn\n\t}\n}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here (2C).\n\t// Example:\n\t// r := bytes.NewBuffer(data)\n\t// d := gob.NewDecoder(r)\n\t// d.Decode(&rf.xxx)\n\t// d.Decode(&rf.yyy)\n\tif data == nil || len(data) < 1 { // bootstrap without any state?\n\t\treturn\n\t}\n}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here (2C).\n\t// Example:\n\t// r := bytes.NewBuffer(data)\n\t// d := gob.NewDecoder(r)\n\t// d.Decode(&rf.xxx)\n\t// d.Decode(&rf.yyy)\n\tif data == nil || len(data) < 1 { // bootstrap without any state?\n\t\treturn\n\t}\n}", "func (esc *ExtendedSimpleContract) Read(ctx utils.CustomTransactionContextInterface, key string) (string, error) {\n\texisting := ctx.GetCallData()\n\n\tif existing == nil {\n\t\treturn \"\", fmt.Errorf(\"Cannot read world state pair with key %s. Does not exist\", key)\n\t}\n\n\treturn string(existing), nil\n}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here.\n\t// Example:\n\t// r := bytes.NewBuffer(data)\n\t// d := gob.NewDecoder(r)\n\t// d.Decode(&rf.xxx)\n\t// d.Decode(&rf.yyy)\n\tr := bytes.NewBuffer(data)\n\td := gob.NewDecoder(r)\n\td.Decode(&rf.currentTerm)\n\td.Decode(&rf.votedFor)\n\td.Decode(&rf.log)\n}", "func (store TodoStore) Read(_ sqlx.Queryer, filters ...gtimer.TodoFilter) (gtimer.Todos, error) {\n\tvar todo gtimer.Todo\n\tfor _, filter := range filters {\n\t\tfilter(&todo)\n\t}\n\tif todo.ID != \"\" {\n\t\ttodo, err := store.Get(todo.ID)\n\t\tif err != nil {\n\t\t\treturn gtimer.Todos{}, err\n\t\t}\n\t\treturn gtimer.Todos{todo}, err\n\t}\n\tif todo.Status != \"\" {\n\t\treturn store.ByStatus(todo.Status)\n\t}\n\treturn store.All()\n}", "func NewPersonal(db DBProvider) (*Personal, error) {\n\treturn &Personal{\n\t\tDB: db,\n\t}, nil\n}", "func DefaultListUserInfo(ctx context.Context, db *gorm.DB) ([]*UserInfo, error) {\n\tin := UserInfo{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm1.ApplyCollectionOperators(ctx, db, &UserInfoORM{}, &UserInfo{}, nil, nil, nil, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []UserInfoORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*UserInfo{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func (DBConnection *MariaDBPlugin) GetUser(UserID uint64) (interfaces.UserInformation, error) {\n\tqueryArray := []interface{}{}\n\tsqlQuery := \"SELECT Name, CreationTime, Disabled, Permissions FROM Users WHERE ID = ?\"\n\tqueryArray = append(queryArray, UserID)\n\n\t//First Query the main information\n\tvar Name string\n\tvar NCreationTime mysql.NullTime\n\tvar CreationTime time.Time\n\tvar Disabled bool\n\tvar Permissions uint64\n\terr := DBConnection.DBHandle.QueryRow(sqlQuery, queryArray...).Scan(&Name, &NCreationTime, &Disabled, &Permissions)\n\tif err != nil {\n\t\treturn interfaces.UserInformation{}, err\n\t}\n\n\treturn interfaces.UserInformation{ID: UserID, Name: Name, CreationTime: CreationTime, Disabled: Disabled, Permissions: interfaces.UserPermission(Permissions)}, nil\n}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here.\n\t// Example:\n\t// r := bytes.NewBuffer(data)\n\t// d := gob.NewDecoder(r)\n\t// d.Decode(&rf.xxx)\n\t// d.Decode(&rf.yyy)\n\tr := bytes.NewBuffer(data)\n\td := gob.NewDecoder(r)\n\td.Decode(&rf.log)\n\td.Decode(&rf.votedFor)\n\td.Decode(&rf.raftData)\n\td.Decode(&rf.currentTerm)\n\td.Decode(&rf.commitIndex)\n\td.Decode(&rf.lastApplied)\n}", "func ReadBusinessPropertiess(rows *sql.Rows, a *BusinessProperties) error {\n\treturn rows.Scan(&a.BPID, &a.BID, &a.Name, &a.Data, &a.FLAGS, &a.CreateTS, &a.CreateBy, &a.LastModTime, &a.LastModBy)\n}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here.\n\t// Example:\n\tr := bytes.NewBuffer(data)\n\td := gob.NewDecoder(r)\n\trf.mu.Lock()\n\td.Decode(&rf.CurrentTerm)\n\td.Decode(&rf.VotedFor)\n\td.Decode(&rf.Log)\n\trf.mu.Unlock()\n}", "func TestRead(t *testing.T) {\r\n\tpersonsFiltered, err := Read(client, firstname)\r\n\tif err != nil {\r\n\t\tt.Fatalf(\"Failed read test :%s\", err)\r\n\t}\r\n\r\n\tfor _, value := range *personsFiltered {\r\n\t\tif value.ID == _id {\r\n\t\t\tt.Log(\"person exists :\", value.ID)\r\n\t\t\tbreak\r\n\t\t} else {\r\n\t\t\tt.Fatalf(\"Failed read test. \")\r\n\t\t}\r\n\t}\r\n}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here.\n\t// Example:\n\tr := bytes.NewBuffer(data)\n\td := gob.NewDecoder(r)\n\td.Decode(&rf.currentTerm)\n\td.Decode(&rf.votedFor)\n\td.Decode(&rf.logs)\n}", "func (rf *Raft) readPersist(data []byte) {\n\tif data == nil || len(data) < 1 { // bootstrap without any state?\n\t\treturn\n\t}\n\n\t// Your code here (2C).\n\t// Example:\n\tr := bytes.NewBuffer(data)\n\td := labgob.NewDecoder(r)\n\n\tif d.Decode(&rf.term) != nil {\n\t\tpanic(\"fail on read persist term\")\n\t}\n\n\tif d.Decode(&rf.votedFor) != nil {\n\t\tpanic(\"fail on read persist votedFor\")\n\t}\n\n\tif d.Decode(&rf.logEntries) != nil {\n\t\tpanic(\"fail on read persist logEntries\")\n\t}\n\n\tDPrintf(\"%v read-persist term=%d voted-for=%d log-entries=%d\", rf.raftInfo(), rf.term, rf.votedFor, len(rf.logEntries))\n}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here.\n\t// Example:\n\tr := bytes.NewBuffer(data)\n\td := gob.NewDecoder(r)\n\td.Decode(&rf.currentTerm)\n\td.Decode(&rf.voteFor)\n\td.Decode(&rf.log)\n}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here (2C).\n\t// Example:\n\trf.mu.Lock()\n defer rf.mu.Unlock()\n if data == nil || len(data) < 1 { // bootstrap without any state?\n\t\treturn\n\t}\n\t r := bytes.NewBuffer(data)\n\t d := gob.NewDecoder(r)\n\t d.Decode(&rf.currentTerm)\n\t d.Decode(&rf.votedFor)\n\t d.Decode(&rf.log)\n}", "func (m *metaObject) readMetadata(pth string) ([]byte, error) {\n\trdr, err := m.meta.Get(m.contexter(), pth)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn ioutil.ReadAll(rdr)\n}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here.\n\t// Example:\n\t// r := bytes.NewBuffer(data)\n\t// d := gob.NewDecoder(r)\n\t// d.Decode(&rf.xxx)\n\t// d.Decode(&rf.yyy)\n\n\tif data == nil {\n\t\treturn\n\t}\n\tr := bytes.NewBuffer(data)\n\td := gob.NewDecoder(r)\n\td.Decode(&rf.currentTerm)\n\td.Decode(&rf.votesAcquired)\n\td.Decode(&rf.log)\n}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here.\n\t// Example:\n\t// r := bytes.NewBuffer(data)\n\t// d := gob.NewDecoder(r)\n\t// d.Decode(&rf.xxx)\n\t// d.Decode(&rf.yyy)\n\n\tr := bytes.NewBuffer(data)\n\td := gob.NewDecoder(r)\n\td.Decode(&rf.currentTerm)\n\td.Decode(&rf.voteFor)\n\td.Decode(&rf.log)\n}", "func getLoan(l *models.Loan, db *gorm.DB) error {\n\terr := db.Select(\"id,created_at,updated_at,initial_value,interest,quota,balance,cod_loan_state,cod_client,cod_collection,cod_user\").First(l).GetErrors()\n\tif len(err) != 0 {\n\t\treturn errors.New(\"no se encuentra\")\n\t}\n\treturn nil\n}", "func (m *DBMem) Read(id int) (Person, error) {\n m.RLock()\n defer m.RUnlock()\n\n if len(m.data) < id {\n\t\tfmt.Println(\"ID is out of range\")\n\t\treturn Person{}, errors.New(\"ID is out of range\")\n\t}\n \n v, ok := m.data[id]\n if !ok {\n return Person{}, errors.New(\"ID not found\")\n }\n\n return v, nil\n}", "func (m *HealthMenstruationPersonalInfo) ToORM(ctx context.Context) (HealthMenstruationPersonalInfoORM, error) {\n\tto := HealthMenstruationPersonalInfoORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tto.ProfileId = m.ProfileId\n\tto.PeriodLengthInDays = m.PeriodLengthInDays\n\tto.CycleLengthInDays = m.CycleLengthInDays\n\tif posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (b *OGame) GetUserInfos() ogame.UserInfos {\n\treturn b.WithPriority(taskRunner.Normal).GetUserInfos()\n}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here (2C).\n\tif data == nil || len(data) < 1 { // bootstrap without any state?\n\t\treturn\n\t}\n\tr := bytes.NewBuffer(data)\n\td := gob.NewDecoder(r)\n\td.Decode(&rf.voteFor)\n\td.Decode(&rf.currentTerm)\n\td.Decode(&rf.log)\n}", "func (model *TodoerModel) Read(id int) Todoer {\n\tvar todoer = Todoer{}\n\n\tconnect(func(db *sql.DB) {\n\t\trows, err := db.Query(\"select * from todoer where id = ?\", id)\n\n\t\tif err != nil {\n\t\t\tlog.Panic(err)\n\t\t}\n\n\t\tdefer rows.Close()\n\n\t\tfor rows.Next() {\n\t\t\terr := rows.Scan(&todoer.ID, &todoer.Username, &todoer.CreatedAt, &todoer.ModifiedAt)\n\t\t\tif err != nil {\n\t\t\t\tlog.Panic(err)\n\t\t\t}\n\t\t}\n\n\t\terr = rows.Err()\n\n\t\tif err != nil {\n\t\t\tlog.Panic(err)\n\t\t}\n\t})\n\n\treturn todoer\n}", "func (rf *Raft) readPersist(data []byte) {\n\trf.Debug(dPersist, \"reading persisted state\")\n\tif data == nil || len(data) < 1 { // bootstrap without any state?\n\t\trf.logs = []LogEntry{}\n\t\treturn\n\t}\n\t// Your code here (2C).\n\tr := bytes.NewBuffer(data)\n\td := labgob.NewDecoder(r)\n\tvar currentTerm int\n\tvar votedFor int\n\tvar logs []LogEntry\n\tif d.Decode(&currentTerm) != nil ||\n\t\td.Decode(&votedFor) != nil ||\n\t\td.Decode(&logs) != nil {\n\t\tlog.Panic(\"Panic decoding persistent state\")\n\t} else {\n\t\trf.currentTerm = currentTerm\n\t\trf.votedFor = votedFor\n\t\trf.logs = logs\n\t}\n}", "func (m *TeamworkSoftwareUpdateHealth) GetAdditionalData()(map[string]any) {\n val , err := m.backingStore.Get(\"additionalData\")\n if err != nil {\n panic(err)\n }\n if val == nil {\n var value = make(map[string]any);\n m.SetAdditionalData(value);\n }\n return val.(map[string]any)\n}", "func (t *MedChain) getAllHospital(stub shim.ChaincodeStubInterface, args []string) peer.Response {\n\t\t// ==== Input sanitation ====\n\t\tfmt.Println(\"- start getAllHospital\")\n\n\t\tAssetType := \"Hospital\"\n\n\t\tqueryString := fmt.Sprintf(\"SELECT valueJson FROM <STATE> WHERE json_extract(valueJson, '$.AssetType') = '%s'\", AssetType)\n\t\t\n\t\tqueryResults, err := getQueryResultForQueryString(stub, queryString)\n\t\t\n\t\tif err != nil {\n\t\t\treturn shim.Error(err.Error())\n\t\t}\n\t\t\n\t\treturn shim.Success(queryResults)\n\t}", "func (rf *Raft) readPersist(data []byte) {\n\t// Your code here.\n\t// Example:\n\tr := bytes.NewBuffer(data)\n\td := gob.NewDecoder(r)\n\td.Decode(&rf.currentTerm)\n\td.Decode(&rf.votedFor)\n\td.Decode(&rf.log)\n\td.Decode(&rf.lastIncludedIndex)\n}", "func (gameController GameController) Read(context *gin.Context) {\r\n\tvar game models.Game\r\n\tif err := gameController.Database.Preload(\"Players\").Preload(\"Turns\").First(&game, context.Param(\"game_id\")).Error; err != nil {\r\n\t\tcontext.JSON(http.StatusNotFound, gin.H{\"error\": \"Record not found!\"})\r\n\t\treturn\r\n\t}\r\n\tcontext.JSON(http.StatusOK, game)\r\n}", "func (s *UserService) MyInfo(ctx context.Context, options ...MyInfoOption) (*User, *Response, error) {\n\treq, err := s.client.NewRequest(http.MethodGet, \"users/@me\")\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tq := req.URL.Query()\n\tfor _, o := range options {\n\t\to.myInfoApply(&q)\n\t}\n\treq.URL.RawQuery = q.Encode()\n\n\tu := new(User)\n\tresp, err := s.client.Do(ctx, req, u)\n\tif err != nil {\n\t\treturn nil, resp, err\n\t}\n\n\treturn u, resp, nil\n}", "func (m *OrganizationManager) Read(id string, opts ...RequestOption) (o *Organization, err error) {\n\terr = m.Request(\"GET\", m.URI(\"organizations\", id), &o, opts...)\n\treturn\n}", "func getFullProfile(client *http.Client, optional ...string) (FacebookPublicProfile, error) {\n\turl := getAPIUrl(\"/me?fields=name,locale,age_range,gender\")\n\tif len(optional) == 1 {\n\t\turl = optional[0]\n\t}\n\tresp, err := client.Get(url)\n\tdefer resp.Body.Close()\n\tdecoder := json.NewDecoder(resp.Body)\n\tvar profile FacebookPublicProfile\n\terr = decoder.Decode(&profile)\n\tif err != nil {\n\t\treturn FacebookPublicProfile{}, err\n\t}\n\tif resp.StatusCode != 200 {\n\t\treturn FacebookPublicProfile{}, errors.New(\"Unathorized. Check token.\")\n\t}\n\treturn profile, nil\n}", "func (rf *Raft) readPersist(data []byte) {\n\tif data == nil || len(data) < 1 { // bootstrap without any state?\n\t\treturn\n\t}\n\t// Your code here (2C).\n\tr := bytes.NewBuffer(data)\n\td := gob.NewDecoder(r)\n\td.Decode(&rf.currentTerm)\n\td.Decode(&rf.votedFor)\n\td.Decode(&rf.logs)\n}", "func ForPersonalLoans(httpCrawlService func(string, string, int) ([]byte, common.CustomError), baseURL string, page int, accumulator []personalloan.Entity) (*[]personalloan.Entity, common.CustomError) {\n\n\tfmt.Println(\"Start crawl personal loans for\", baseURL, page)\n\n\tbody, crawlErr := httpCrawlService(baseURL, \"products-services/v1/personal-loans\", page)\n\n\tif crawlErr != nil {\n\t\tfmt.Println(crawlErr)\n\t}\n\n\tjsonData := &personalLoanJSON{}\n\n\tmetaInfo := &MetaInfoJSON{}\n\tjson.Unmarshal(body, &metaInfo)\n\n\tjsonUnmarshallErr := json.Unmarshal(body, &jsonData)\n\n\tif jsonUnmarshallErr != nil {\n\t\tfmt.Printf(\"Error crawl personal loans for %s %s %s\", baseURL, strconv.Itoa(page), jsonUnmarshallErr)\n\t\treturn nil, common.NewInternalServerError(\"Unable to unmarshall data\", jsonUnmarshallErr)\n\t}\n\n\titems := accumulator\n\n\tfor i := range jsonData.Data.Brand.Companies {\n\t\tcompany := jsonData.Data.Brand.Companies[i]\n\t\tresult := company.PersonalLoans\n\t\titems = append(items, result...)\n\t}\n\n\tif metaInfo.Meta.TotalPages > page {\n\t\treturn ForPersonalLoans(httpCrawlService, baseURL, page+1, items)\n\t}\n\n\tfmt.Println(\"End crawl personal loans for\", baseURL, page)\n\n\treturn &items, nil\n\n}", "func (repo *Repository) Read(ctx context.Context, claims auth.Claims, req UserAccountReadRequest) (*UserAccount, error) {\n\tspan, ctx := tracer.StartSpanFromContext(ctx, \"internal.user_account.Read\")\n\tdefer span.Finish()\n\n\t// Validate the request.\n\tv := webcontext.Validator()\n\terr := v.Struct(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Filter base select query by ID\n\tquery := selectQuery()\n\tquery.Where(query.And(\n\t\tquery.Equal(\"user_id\", req.UserID),\n\t\tquery.Equal(\"account_id\", req.AccountID)))\n\n\tres, err := find(ctx, claims, repo.DbConn, query, []interface{}{}, req.IncludeArchived)\n\tif err != nil {\n\t\treturn nil, err\n\t} else if res == nil || len(res) == 0 {\n\t\terr = errors.WithMessagef(ErrNotFound, \"entry for user %s account %s not found\", req.UserID, req.AccountID)\n\t\treturn nil, err\n\t}\n\tu := res[0]\n\n\treturn u, nil\n}", "func (o *Object) readMetaData(ctx context.Context) (err error) {\n\tif !o.modTime.IsZero() {\n\t\treturn nil\n\t}\n\t// Last resort\n\treturn o.readEntryAndSetMetadata(ctx)\n}", "func (p *Personal) One(ctx context.Context, id string) (*PersonalData, error) {\n\tusr, err := p.DB.One(ctx, id)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"could not select one personal data\")\n\t}\n\treturn usr, nil\n}", "func (_UserCrud *UserCrudCallerSession) GetUser(userAddress common.Address) (struct {\n\tUserEmail string\n\tUserAge *big.Int\n\tIndex *big.Int\n}, error) {\n\treturn _UserCrud.Contract.GetUser(&_UserCrud.CallOpts, userAddress)\n}", "func (m MariaDB) One(ctx context.Context, id string) (entity.PersonalData, error) {\n\tsqlQuery := \"SELECT * FROM person WHERE id = ?\"\n\tvar p personalData\n\n\trow := m.Person.QueryRowContext(ctx, sqlQuery, id)\n\terr := row.Scan(&p.ID, &p.Name, &p.LastName, &p.Phone, &p.Email, &p.YearOfBirth)\n\tif err != nil {\n\t\treturn entity.PersonalData{}, errors.Wrap(err, \"could not scan row\")\n\t}\n\n\treturn p.transmit(), nil\n}", "func (rf *Raft) readPersist(data []byte) {\n if data == nil || len(data) < 1 { // bootstrap without any state?\n return\n }\n r := bytes.NewBuffer(data)\n d := labgob.NewDecoder(r)\n var currentTerm int\n var votedFor int\n var log []Entry\n var snapshottedIndex int\n\n if d.Decode(&currentTerm) != nil {\n DErrPrintf(\"read currentTerm error\")\n return\n }\n if d.Decode(&votedFor) != nil {\n DErrPrintf(\"read votedFor error\")\n return\n }\n if d.Decode(&log) != nil {\n DErrPrintf(\"read log entries error\")\n return\n }\n\n if d.Decode(&snapshottedIndex) != nil {\n DErrPrintf(\"read snapshottedIndex error\")\n return\n }\n\n rf.currentTerm = currentTerm\n rf.votedFor = votedFor\n rf.log = log\n rf.snapshottedIndex = snapshottedIndex\n rf.commitIndex = snapshottedIndex\n rf.lastApplied = snapshottedIndex\n DLCPrintf(\"Read Server(%d) state(currentTerm=%d, votedFor=%d, logLength=%d) from persister done\", rf.me, rf.currentTerm, rf.votedFor, len(rf.log))\n}", "func UserReadByToken(c *gin.Context) {\n\tuserFound := session.GetUserByToken(c.Query(\"token\"))\n\tif userFound.ID == 0 {\n\t\tc.JSON(422, gin.H{\n\t\t\t\"error\": \"not found\",\n\t\t})\n\t} else {\n\t\tc.JSON(200, gin.H{\n\t\t\t\"data\": userFound,\n\t\t})\n\t}\n}", "func (p *UserStoreClient) GetUser(ctx context.Context, authenticationToken string) (r *User, err error) {\n var _args13 UserStoreGetUserArgs\n _args13.AuthenticationToken = authenticationToken\n var _result14 UserStoreGetUserResult\n if err = p.Client_().Call(ctx, \"getUser\", &_args13, &_result14); err != nil {\n return\n }\n switch {\n case _result14.UserException!= nil:\n return r, _result14.UserException\n case _result14.SystemException!= nil:\n return r, _result14.SystemException\n }\n\n return _result14.GetSuccess(), nil\n}", "func (t *SimpleChaincode) query(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar dni string // Entities\n\tvar err error\n\tfmt.Println(\"Metodo consultar\")\n\tif len(args) != 1 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting name of the person to query\")\n\t}\n\n\tdni = args[0]\n\n\t// Get the state from the ledger\n\tstoredValue, err := stub.GetState(dni)\n\tif err != nil {\n\t\tjsonResp := \"{\\\"Error\\\":\\\"Failed to get state for \" + dni + \"\\\"}\"\n\t\treturn shim.Error(jsonResp)\n\t}\n\n\treturn shim.Success(storedValue)\n}", "func (e Employee) GetInformation() Employee {\n\n\tif runtime.GOOS == \"windows\" {\n\t\tfmt.Print(\"Please Provide Your Personal Information.\")\n\t\t_, _ = reader.ReadString('\\n')\n\t} else {\n\t\tfmt.Print(\"Please Provide Your Personal Information.\\n\")\n\t}\n\tfmt.Print(\"\\nYour Fullname: \")\n\te.Name, _ = reader.ReadString('\\n')\n\n\tfmt.Print(\"Your Current Address: \")\n\te.Address, _ = reader.ReadString('\\n')\n\n\tfmt.Print(\"Your Contact #: \")\n\te.ContactNumber, _ = reader.ReadString('\\n')\n\n\tfmt.Print(\"Your Email Address: \")\n\te.EmailAddress, _ = reader.ReadString('\\n')\n\n\tfmt.Print(\"Company's Name / Employer's Name: \")\n\te.Company, _ = reader.ReadString('\\n')\n\n\tfmt.Print(\"Office's Address / Location: \")\n\te.CompanyAddress, _ = reader.ReadString('\\n')\n\n\tfmt.Print(\"Your Current Position: \")\n\te.Position, _ = reader.ReadString('\\n')\n\n\tfmt.Print(\"No. of Years of Stay: \")\n\te.YrsOfStay, _ = reader.ReadString('\\n')\n\n\tfmt.Print(\"Your Director's Name (Including Salutations): \")\n\te.DirectorName, _ = reader.ReadString('\\n')\n\n\tfmt.Print(\"Your Resignation's Effectivity Date: \")\n\te.EffectiveDate, _ = reader.ReadString('\\n')\n\n\te = e.CleanInfo()\n\n\te.Validate()\n\n\treturn e\n}", "func (sh *SimHandlerState) readSiminfo(c *mango.Context) *SimInfo {\n\tsiminfo := NewSimInfo(c.RouteParams[\"sim_id\"])\n\tobjUpdater := sh.ListHandlerState.FileManager.Get(siminfo.Filepath())\n\terr := objUpdater.Read(siminfo)\n\tif err != nil {\n\t\tc.Error(err.Error(), http.StatusInternalServerError)\n\t\treturn nil\n\t}\n\treturn siminfo\n}", "func (c *productsServicesInterface) GetPersonalLoans(id string, page int) ([]personalloan.Entity, *subentities.Pagination, common.CustomError) {\n\treturn c.personalLoanService.FindByInstitution(id, page)\n}" ]
[ "0.6508357", "0.6502102", "0.592265", "0.55418086", "0.5489267", "0.5150009", "0.5091859", "0.5085893", "0.50467646", "0.50467646", "0.5016166", "0.498452", "0.49298856", "0.49238548", "0.48962358", "0.4871131", "0.4861762", "0.4857159", "0.48419893", "0.4817505", "0.4797317", "0.47963715", "0.4781821", "0.47694114", "0.47678044", "0.4744087", "0.47388536", "0.4737602", "0.47373936", "0.4718095", "0.4711589", "0.4701952", "0.468709", "0.46817222", "0.46706542", "0.46643656", "0.4660306", "0.46602857", "0.46560505", "0.46497944", "0.4647095", "0.46424717", "0.46371058", "0.46129483", "0.46045145", "0.46015546", "0.45877683", "0.45811814", "0.4565936", "0.45487854", "0.45448998", "0.45415306", "0.45415306", "0.45415306", "0.45415306", "0.4538954", "0.45364928", "0.45364603", "0.4535242", "0.45351353", "0.45291063", "0.45273378", "0.45209852", "0.45203117", "0.45176604", "0.45144606", "0.45144096", "0.45037606", "0.4502188", "0.45017287", "0.44986454", "0.4496086", "0.4494659", "0.44944745", "0.44920227", "0.44915265", "0.44914788", "0.44913852", "0.44797808", "0.44766578", "0.44741523", "0.4472308", "0.4468513", "0.44618624", "0.44529867", "0.44517663", "0.44511926", "0.44420078", "0.4437715", "0.44376287", "0.4437513", "0.44371185", "0.4434899", "0.4434711", "0.44200188", "0.44168034", "0.44058895", "0.44055843", "0.44049045", "0.44030318" ]
0.82387173
0
DefaultStrictUpdateHealthMenstruationPersonalInfo clears first level 1:many children and then executes a gorm update call
func DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) { if in == nil { return nil, fmt.Errorf("Nil argument to DefaultStrictUpdateHealthMenstruationPersonalInfo") } ormObj, err := in.ToORM(ctx) if err != nil { return nil, err } lockedRow := &HealthMenstruationPersonalInfoORM{} db.Model(&ormObj).Set("gorm:query_option", "FOR UPDATE").Where("id=?", ormObj.Id).First(lockedRow) if hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateCleanup); ok { if db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil { return nil, err } } if hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateSave); ok { if db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil { return nil, err } } if err = db.Save(&ormObj).Error; err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterStrictUpdateSave); ok { if err = hook.AfterStrictUpdateSave(ctx, db); err != nil { return nil, err } } pbResponse, err := ormObj.ToPB(ctx) if err != nil { return nil, err } return &pbResponse, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultPatchHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationPersonalInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationPersonalInfo(ctx, &HealthMenstruationPersonalInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationPersonalInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultListHealthMenstruationPersonalInfo(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationPersonalInfo, error) {\n\tin := HealthMenstruationPersonalInfo{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationPersonalInfoORM{}, &HealthMenstruationPersonalInfo{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationPersonalInfoORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationPersonalInfo{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx context.Context, patchee *HealthMenstruationPersonalInfo, patcher *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"PeriodLengthInDays\" {\n\t\t\tpatchee.PeriodLengthInDays = patcher.PeriodLengthInDays\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CycleLengthInDays\" {\n\t\t\tpatchee.CycleLengthInDays = patcher.CycleLengthInDays\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultPatchSetHealthMenstruationPersonalInfo(ctx context.Context, objects []*HealthMenstruationPersonalInfo, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationPersonalInfo, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationPersonalInfo, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationPersonalInfo(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func (duo *DoctorinfoUpdateOne) ClearEducationlevel() *DoctorinfoUpdateOne {\n\tduo.mutation.ClearEducationlevel()\n\treturn duo\n}", "func DefaultReadHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationPersonalInfoORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationPersonalInfoORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationPersonalInfoORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (du *DoctorinfoUpdate) ClearEducationlevel() *DoctorinfoUpdate {\n\tdu.mutation.ClearEducationlevel()\n\treturn du\n}", "func DefaultStrictUpdateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationDailyEntry\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationDailyEntryORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func DefaultCreateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (puo *ProductUpdateOne) ClearPersonal() *ProductUpdateOne {\n\tpuo.mutation.ClearPersonal()\n\treturn puo\n}", "func (pu *ProductUpdate) ClearPersonal() *ProductUpdate {\n\tpu.mutation.ClearPersonal()\n\treturn pu\n}", "func (client ModelClient) UpdateHierarchicalEntityResponder(resp *http.Response) (result OperationStatus, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func DefaultPatchHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationDailyEntry\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationDailyEntry(ctx, &HealthMenstruationDailyEntry{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationDailyEntry(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationDailyEntryWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func (m *HealthMenstruationPersonalInfo) ToORM(ctx context.Context) (HealthMenstruationPersonalInfoORM, error) {\n\tto := HealthMenstruationPersonalInfoORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tto.ProfileId = m.ProfileId\n\tto.PeriodLengthInDays = m.PeriodLengthInDays\n\tto.CycleLengthInDays = m.CycleLengthInDays\n\tif posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (client ModelClient) UpdateHierarchicalEntityChildResponder(resp *http.Response) (result OperationStatus, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (uuo *UserUpdateOne) ClearChildren() *UserUpdateOne {\n\tuuo.mutation.ClearChildren()\n\treturn uuo\n}", "func (o CMFAdminMenuSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfAdminMenuPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_admin_menu` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfAdminMenuPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfAdminMenu slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfAdminMenu\")\n\t}\n\treturn rowsAff, nil\n}", "func (o CMFUserSuperSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfUserSuperPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_user_super` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfUserSuperPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfUserSuper slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfUserSuper\")\n\t}\n\treturn rowsAff, nil\n}", "func (m MariaDB) Update(ctx context.Context, ep entity.PersonalData) (int64, error) {\n\tp := receive(ep)\n\tsqlQuery := \"UPDATE person SET name=?, last_name=?, phone=?, email=?, year_od_birth=? where id= ?\"\n\n\trslt, err := m.Person.ExecContext(ctx, sqlQuery, p.Name, p.LastName, p.Phone, p.Email, p.YearOfBirth, p.ID)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"could not update data\")\n\t}\n\tcount, err := rslt.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"rows are not affected\")\n\t}\n\treturn count, nil\n}", "func (client ModelClient) UpdateHierarchicalEntityChildSender(req *http.Request) (*http.Response, error) {\n\treturn autorest.SendWithSender(client, req,\n\t\tautorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))\n}", "func (db *DataBase) UpdatePlayerPersonalInfo(userID int32, user *models.UserPrivateInfo) (err error) {\n\tvar (\n\t\tconfirmedUser *models.UserPrivateInfo\n\t\ttx *sql.Tx\n\t)\n\n\tif tx, err = db.Db.Begin(); err != nil {\n\t\treturn\n\t}\n\tdefer tx.Rollback()\n\n\tif confirmedUser, err = db.getPrivateInfo(tx, userID); err != nil {\n\t\treturn\n\t}\n\n\tconfirmedUser.Update(user)\n\n\tif err = db.updatePlayerPersonalInfo(tx, user); err != nil {\n\t\treturn\n\t}\n\n\terr = tx.Commit()\n\treturn\n}", "func (hs *HealthStatusInfo) UpdateHealthInfo(details bool, registeredESS uint32, storedObjects uint32) {\n\ths.lock()\n\tdefer hs.unLock()\n\n\tHealthUsageInfo.RegisteredESS = registeredESS\n\tHealthUsageInfo.StoredObjects = storedObjects\n\n\tDBHealth.DBStatus = Green\n\ttimeSinceLastError := uint64(0)\n\tif DBHealth.DBReadFailures != 0 || DBHealth.DBWriteFailures != 0 {\n\t\ttimeSinceLastError = uint64(time.Since(DBHealth.lastReadWriteErrorTime).Seconds())\n\t\tDBHealth.TimeSinceLastReadWriteError = timeSinceLastError\n\t}\n\tif DBHealth.DisconnectedFromDB {\n\t\tDBHealth.DBStatus = Red\n\t} else if DBHealth.DBReadFailures != 0 || DBHealth.DBWriteFailures != 0 {\n\t\tif timeSinceLastError < uint64(Configuration.ResendInterval*12) {\n\t\t\tDBHealth.DBStatus = Red\n\t\t} else if timeSinceLastError < uint64(Configuration.ResendInterval*60) {\n\t\t\tDBHealth.DBStatus = Yellow\n\t\t}\n\t}\n\n\tMQTTHealth.MQTTConnectionStatus = Green\n\tif Configuration.CommunicationProtocol != HTTPProtocol {\n\t\ttimeSinceLastSubError := uint64(0)\n\t\tif MQTTHealth.SubscribeFailures != 0 {\n\t\t\ttimeSinceLastSubError = uint64(time.Since(MQTTHealth.lastSubscribeErrorTime).Seconds())\n\t\t\tMQTTHealth.TimeSinceLastSubscribeError = timeSinceLastSubError\n\t\t}\n\t\ttimeSinceLastPubError := uint64(0)\n\t\tif MQTTHealth.PublishFailures != 0 {\n\t\t\ttimeSinceLastPubError = uint64(time.Since(MQTTHealth.lastPublishErrorTime).Seconds())\n\t\t\tMQTTHealth.TimeSinceLastPublishError = timeSinceLastPubError\n\t\t}\n\t\tif MQTTHealth.DisconnectedFromMQTTBroker {\n\t\t\tMQTTHealth.MQTTConnectionStatus = Red\n\t\t} else {\n\t\t\tif MQTTHealth.SubscribeFailures != 0 {\n\t\t\t\tif timeSinceLastSubError < uint64(Configuration.ResendInterval*12) {\n\t\t\t\t\tMQTTHealth.MQTTConnectionStatus = Red\n\t\t\t\t} else if timeSinceLastSubError < uint64(Configuration.ResendInterval*60) {\n\t\t\t\t\tMQTTHealth.MQTTConnectionStatus = Yellow\n\t\t\t\t}\n\t\t\t}\n\t\t\tif MQTTHealth.PublishFailures != 0 && MQTTHealth.MQTTConnectionStatus == Green &&\n\t\t\t\ttimeSinceLastPubError < uint64(Configuration.ResendInterval*12) {\n\t\t\t\tMQTTHealth.MQTTConnectionStatus = Yellow\n\t\t\t}\n\t\t}\n\t}\n\n\tif DBHealth.DBStatus == Red || MQTTHealth.MQTTConnectionStatus == Red {\n\t\ths.HealthStatus = Red\n\t} else if DBHealth.DBStatus == Yellow || MQTTHealth.MQTTConnectionStatus == Yellow {\n\t\ths.HealthStatus = Yellow\n\t} else {\n\t\ths.HealthStatus = Green\n\t}\n\n\ths.UpTime = uint64(time.Since(hs.startTime).Seconds())\n\n\tif !details {\n\t\treturn\n\t}\n\n\tif Configuration.CommunicationProtocol != HTTPProtocol {\n\t\tMQTTHealth.LastDisconnectFromBrokerDuration = hs.GetLastDisconnectFromBrokerDuration()\n\t}\n\tDBHealth.LastDisconnectFromDBDuration = hs.GetLastDisconnectFromDBDuration()\n}", "func (o TreeSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), treePrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"trees\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, treePrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in tree slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all tree\")\n\t}\n\treturn rowsAff, nil\n}", "func (client ModelClient) UpdateHierarchicalEntitySender(req *http.Request) (*http.Response, error) {\n\treturn autorest.SendWithSender(client, req,\n\t\tautorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))\n}", "func DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx context.Context, patchee *HealthMenstruationDailyEntry, patcher *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Day\" {\n\t\t\tpatchee.Day = patcher.Day\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"IntensityPercentage\" {\n\t\t\tpatchee.IntensityPercentage = patcher.IntensityPercentage\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Type\" {\n\t\t\tpatchee.Type = patcher.Type\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Manual\" {\n\t\t\tpatchee.Manual = patcher.Manual\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"BasedOnPrediction\" {\n\t\t\tpatchee.BasedOnPrediction = patcher.BasedOnPrediction\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func (storage *HippoStorage) UpdateChild(hashKey string) {\n\tvar (\n\t\thas bool\n\t)\n\t_, has = storage.verified.Load(hashKey)\n\tif !has {\n\t\treturn\n\t}\n\tchild, has := storage.child.Load(hashKey)\n\tif !has {\n\t\tblock, _ := storage.Get(hashKey)\n\t\tstorage.TryUpdateMaxLevel(block.GetLevel())\n\t\treturn\n\t}\n\tchildList := child.([]string)\n\tfor _, childHash := range childList {\n\t\tstorage.UpdateVerified(childHash)\n\t\tstorage.UpdateChild(childHash)\n\t}\n\n\treturn\n}", "func (q cmfUserSuperQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for cmf_user_super\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for cmf_user_super\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (m *HealthMenstruationPersonalInfoORM) ToPB(ctx context.Context) (HealthMenstruationPersonalInfo, error) {\n\tto := HealthMenstruationPersonalInfo{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tto.PeriodLengthInDays = m.PeriodLengthInDays\n\tto.CycleLengthInDays = m.CycleLengthInDays\n\tif posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (q cmfAdminMenuQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for cmf_admin_menu\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for cmf_admin_menu\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (t *MedChain) updateHospital(stub shim.ChaincodeStubInterface, args []string) peer.Response {\n\t\t// ==== Input sanitation ====\n\t\tfmt.Println(\"- start updateHospital\")\n\n\t\t// check if all the args are send\n\t\tif len(args) != 4 {\n\t\t\treturn shim.Error(\"Incorrect number of arguments, Required 4 arguments\")\n\t\t}\n\n\t\t// check if the args are empty\n\t\tfor i := 0; i < len(args); i++ {\n\t\t\tif len(args[i]) <= 0 {\n\t\t\t\treturn shim.Error(\"argument \"+ string(i+1) + \" must be a non-empty string\")\n\t\t\t}\n\t\t}\n\n\t\tgetAssetAsBytes, errT := stub.GetState(args[0])\n\n\t\tif errT != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Error : Cannot find Hospital %s\" , errT))\n\t\t}\n\n\t\tif getAssetAsBytes == nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Cannot find asset Hospital with ID %s\" , args[0]))\n\t\t}\n\n\t\tvar obj = Hospital{}\n\n\t\tjson.Unmarshal(getAssetAsBytes, &obj)\n\t\tobj.HospitalName = args[1]\n\t\tobj.HospitalAddress = args[2]\n\t\tobj.HospitalPhone = args[3]\n\t\tcomAssetAsBytes, errMarshal := json.Marshal(obj)\n\n\t\tif errMarshal != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Marshal Error: %s\", errMarshal))\n\t\t}\n\n\t\terrPut := stub.PutState(obj.Hospital_ID, comAssetAsBytes)\n\n\t\tif errPut != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Failed to update Hospital with ID %s\", args[0]))\n\t\t}\n\n\t\tfmt.Println(\"Hospital asset with ID %s was updated \\n %v\", args[0], obj)\n\n\t\treturn shim.Success(comAssetAsBytes)\n\t}", "func DefaultPatchSetHealthMenstruationDailyEntry(ctx context.Context, objects []*HealthMenstruationDailyEntry, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationDailyEntry, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationDailyEntry, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationDailyEntry(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultStrictUpdateUserInfo(ctx context.Context, in *UserInfo, db *gorm.DB) (*UserInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateUserInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &UserInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func (u updateCachedUploadRevision) apply(data *journalPersist) {\n\tc := data.CachedRevisions[u.Revision.ParentID.String()]\n\tc.Revision = u.Revision\n\tif u.SectorIndex == len(c.MerkleRoots) {\n\t\tc.MerkleRoots = append(c.MerkleRoots, u.SectorRoot)\n\t} else if u.SectorIndex < len(c.MerkleRoots) {\n\t\tc.MerkleRoots[u.SectorIndex] = u.SectorRoot\n\t} else {\n\t\t// Shouldn't happen. TODO: Add correct error handling.\n\t}\n\tdata.CachedRevisions[u.Revision.ParentID.String()] = c\n}", "func (e *Department) Reload() error { return ent.ReloadEnt(e) }", "func DefaultListHealthMenstruationDailyEntry(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationDailyEntry, error) {\n\tin := HealthMenstruationDailyEntry{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationDailyEntryORM{}, &HealthMenstruationDailyEntry{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationDailyEntryORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationDailyEntry{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func (l *LifeSystem) clearEntitiesStatus() {\n\tfor _, entity := range l.entities {\n\t\tentity.updated = false\n\t}\n}", "func (guo *GroupUpdateOne) ClearInfo() *GroupUpdateOne {\n\tguo.mutation.ClearInfo()\n\treturn guo\n}", "func (o CMFFamiliesPolicySlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfFamiliesPolicyPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_families_policies` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfFamiliesPolicyPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfFamiliesPolicy slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfFamiliesPolicy\")\n\t}\n\treturn rowsAff, nil\n}", "func (m *AreaMutation) ClearLevel() {\n\tm.clearedlevel = true\n}", "func (m *AreaMutation) ClearLevel() {\n\tm.clearedlevel = true\n}", "func (hc *LegacyHealthCheckImpl) updateHealth(ts *LegacyTabletStats, conn queryservice.QueryService) {\n\t// Unconditionally send the received update at the end.\n\tdefer func() {\n\t\tif hc.listener != nil {\n\t\t\thc.listener.StatsUpdate(ts)\n\t\t}\n\t}()\n\n\thc.mu.Lock()\n\tth, ok := hc.addrToHealth[ts.Key]\n\tif !ok {\n\t\t// This can happen on delete because the entry is removed first,\n\t\t// or if LegacyHealthCheckImpl has been closed.\n\t\thc.mu.Unlock()\n\t\treturn\n\t}\n\toldts := th.latestTabletStats\n\tth.latestTabletStats = *ts\n\tth.conn = conn\n\thc.mu.Unlock()\n\n\t// In the case where a tablet changes type (but not for the\n\t// initial message), we want to log it, and maybe advertise it too.\n\tif oldts.Target.TabletType != topodatapb.TabletType_UNKNOWN && oldts.Target.TabletType != ts.Target.TabletType {\n\t\t// Log and maybe notify\n\t\tlog.Infof(\"HealthCheckUpdate(Type Change): %v, tablet: %s, target %+v => %+v, reparent time: %v\",\n\t\t\toldts.Name, topotools.TabletIdent(oldts.Tablet), topotools.TargetIdent(oldts.Target), topotools.TargetIdent(ts.Target), ts.TabletExternallyReparentedTimestamp)\n\t\tif hc.listener != nil && hc.sendDownEvents {\n\t\t\toldts.Up = false\n\t\t\thc.listener.StatsUpdate(&oldts)\n\t\t}\n\n\t\t// Track how often a tablet gets promoted to master. It is used for\n\t\t// comparing against the variables in go/vtgate/buffer/variables.go.\n\t\tif oldts.Target.TabletType != topodatapb.TabletType_MASTER && ts.Target.TabletType == topodatapb.TabletType_MASTER {\n\t\t\thcMasterPromotedCounters.Add([]string{ts.Target.Keyspace, ts.Target.Shard}, 1)\n\t\t}\n\t}\n}", "func (q authUserUserPermissionQuery) UpdateAllP(cols M) {\n\tif err := q.UpdateAll(cols); err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n}", "func DefaultReadHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationDailyEntryORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationDailyEntryORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationDailyEntryORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (t *HeathCare_Chaincode) modifyMedicalData(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tfmt.Println(\"\\n=============== start modifyMedicalData function ===============\")\n\tstart := time.Now()\n\ttime.Sleep(time.Second)\n\n\tvar jsonResp string\n\n\tif len(args) != 8 {\n\t\treturn shim.Error(\"expecting 4 argument\")\n\t}\n\n\t//define identity of query-er and new value of medical record\n\tuserid := args[0]\n\tpatientid := args[1]\n\tlocation := args[2]\n\tcollection := args[3]\n\n\tnewPersonalIdentificationInformation := args[4]\n\tnewMedicalHistory := args[5]\n\tnewFamilyMedicalHistory := args[6]\n\tnewMedicationHistory := args[7]\n\tnewTreatmentHistory := args[8]\n\tnewMedicalDirectives := args[9]\n\ttimeQuery := time.Now().String()\n\n\t//get user identity before query\n\tuserIdentityAsBytes, errUserIdentityAsByte := stub.GetPrivateData(collection, userid)\n\tif errUserIdentityAsByte != nil {\n\t\treturn shim.Error(\"cannot get user identity\")\n\t} else if userIdentityAsBytes == nil {\n\t\treturn shim.Error(\"user does not exist\")\n\t}\n\n\t//create query object with purpose: modify\n\tobjectType := \"Query\"\n\tquery := &Query{objectType, userid, patientid, location, timeQuery, \"modify\"}\n\tqueryAsByte, errQueryAsByte := json.Marshal(query)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//save to database\n\terrQueryAsByte = stub.PutPrivateData(\"modifyCollection\", userid, queryAsByte)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//create index key\n\tindexName := \"userid~patientid\"\n\tqueryIndexKey, errQueryIndexKey := stub.CreateCompositeKey(indexName, []string{query.UserID, query.PatientID, query.Location, query.Purpose})\n\tif errQueryIndexKey != nil {\n\t\treturn shim.Error(errQueryIndexKey.Error())\n\t}\n\n\t//save index\n\tvalue := []byte{0x00}\n\tstub.PutPrivateData(\"modifyCollection\", queryIndexKey, value)\n\n\t//get medical record data\n\tmedicalRecordAsBytes, errMedicalRecordAsByte := stub.GetPrivateData(\"MedicalRecordCollection\", patientid)\n\tif errMedicalRecordAsByte != nil {\n\t\tjsonResp = \"{\\\"Error\\\":\\\"Failed to get state for \" + patientid + \": \" + errMedicalRecordAsByte.Error() + \"\\\"}\"\n\t\treturn shim.Error(jsonResp)\n\t} else if errMedicalRecordAsByte == nil {\n\t\treturn shim.Error(\"patient's data does not exist\")\n\t}\n\n\t//convert data of patient to json\n\tmedicalRecord := &MedicalRecord{}\n\terrMedicalRecordAsByte = json.Unmarshal(medicalRecordAsBytes, medicalRecord)\n\n\t//change data\n\tmedicalRecord.PersonalIdentificationInformation = newPersonalIdentificationInformation\n\tmedicalRecord.MedicalHistory = newMedicalHistory\n\tmedicalRecord.FamilyMedicalHistory = newFamilyMedicalHistory\n\tmedicalRecord.MedicationHistory = newMedicationHistory\n\tmedicalRecord.TreatmentHistory = newTreatmentHistory\n\tmedicalRecord.MedicalDirectives = newMedicalDirectives\n\n\t//convert new medical record data to byte\n\tnewMedicalRecordAsByte, errNewMedicalRecordAsByte := json.Marshal(medicalRecord)\n\n\t//store new data\n\terrNewMedicalRecordAsByte = stub.PutPrivateData(\"MedicalRecordCollection\", patientid, newMedicalRecordAsByte)\n\tif errNewMedicalRecordAsByte != nil {\n\t\treturn shim.Error(\"cannot save new medical record's data\")\n\t}\n\n\tend := time.Now()\n\telapsed := time.Since(start)\n\tfmt.Println(\"function modifyMedicalData\")\n\tfmt.Println(\"time start: \", start.String())\n\tfmt.Println(\"time end: \", end.String())\n\tfmt.Println(\"time execute: \", elapsed.String())\n\tfmt.Println(\"=============== end modifyMedicalData function ===============\")\n\n\treturn shim.Success(nil)\n}", "func (hc *HospitalCreate) Save(ctx context.Context) (*Hospital, error) {\n\tif _, ok := hc.mutation.Hospital(); !ok {\n\t\treturn nil, &ValidationError{Name: \"hospital\", err: errors.New(\"ent: missing required field \\\"hospital\\\"\")}\n\t}\n\tvar (\n\t\terr error\n\t\tnode *Hospital\n\t)\n\tif len(hc.hooks) == 0 {\n\t\tnode, err = hc.sqlSave(ctx)\n\t} else {\n\t\tvar mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {\n\t\t\tmutation, ok := m.(*HospitalMutation)\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected mutation type %T\", m)\n\t\t\t}\n\t\t\thc.mutation = mutation\n\t\t\tnode, err = hc.sqlSave(ctx)\n\t\t\tmutation.done = true\n\t\t\treturn node, err\n\t\t})\n\t\tfor i := len(hc.hooks) - 1; i >= 0; i-- {\n\t\t\tmut = hc.hooks[i](mut)\n\t\t}\n\t\tif _, err := mut.Mutate(ctx, hc.mutation); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn node, err\n}", "func (q treeQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for trees\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for trees\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (o AuthUserUserPermissionSlice) UpdateAllP(exec boil.Executor, cols M) {\n\tif err := o.UpdateAll(exec, cols); err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n}", "func (uuo *UserUpdateOne) ClearPersonalEmail() *UserUpdateOne {\n\tuuo.mutation.ClearPersonalEmail()\n\treturn uuo\n}", "func (o CMFUserExperienceLogSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfUserExperienceLogPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_user_experience_log` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfUserExperienceLogPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfUserExperienceLog slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfUserExperienceLog\")\n\t}\n\treturn rowsAff, nil\n}", "func (m *Manager) UpdateAuditLogDetail(ald *AuditLogDetail) error {\n\n\tald.UpdatedAt = mysql.NullTime{Valid: true, Time: time.Now()}\n\n\tfunc(in interface{}) {\n\t\tif ii, ok := in.(initializer.Simple); ok {\n\t\t\tii.Initialize()\n\t\t}\n\t}(ald)\n\n\t_, err := m.GetWDbMap().Update(ald)\n\treturn err\n}", "func (self GroupModel) UpdateAll(db Database) GroupModel {\n for idx, page := range self {\n self[idx] = page.Update(db)\n }\n return self\n}", "func (e *EntitySystem) Update() {\n\t//\tfor _, v := range e.Entities {\n\t//\t\tv.Update()\n\t//\t}\n}", "func (o *Tree) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\ttreeUpdateCacheMut.RLock()\n\tcache, cached := treeUpdateCache[key]\n\ttreeUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\ttreeAllColumns,\n\t\t\ttreePrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update trees, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"trees\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, treePrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(treeType, treeMapping, append(wl, treePrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update trees row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for trees\")\n\t}\n\n\tif !cached {\n\t\ttreeUpdateCacheMut.Lock()\n\t\ttreeUpdateCache[key] = cache\n\t\ttreeUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "func (o *CMFAdminMenu) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tcmfAdminMenuUpdateCacheMut.RLock()\n\tcache, cached := cmfAdminMenuUpdateCache[key]\n\tcmfAdminMenuUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tcmfAdminMenuAllColumns,\n\t\t\tcmfAdminMenuPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update cmf_admin_menu, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `cmf_admin_menu` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, cmfAdminMenuPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(cmfAdminMenuType, cmfAdminMenuMapping, append(wl, cmfAdminMenuPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update cmf_admin_menu row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for cmf_admin_menu\")\n\t}\n\n\tif !cached {\n\t\tcmfAdminMenuUpdateCacheMut.Lock()\n\t\tcmfAdminMenuUpdateCache[key] = cache\n\t\tcmfAdminMenuUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "func (honest *Honest) flushUpdates() {\n\n\thonest.blockUpdates = honest.blockUpdates[:0]\n}", "func (o *CMFFamiliesPolicy) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\to.UpdatedAt = currTime\n\t}\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tcmfFamiliesPolicyUpdateCacheMut.RLock()\n\tcache, cached := cmfFamiliesPolicyUpdateCache[key]\n\tcmfFamiliesPolicyUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tcmfFamiliesPolicyAllColumns,\n\t\t\tcmfFamiliesPolicyPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update cmf_families_policies, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `cmf_families_policies` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, cmfFamiliesPolicyPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(cmfFamiliesPolicyType, cmfFamiliesPolicyMapping, append(wl, cmfFamiliesPolicyPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update cmf_families_policies row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for cmf_families_policies\")\n\t}\n\n\tif !cached {\n\t\tcmfFamiliesPolicyUpdateCacheMut.Lock()\n\t\tcmfFamiliesPolicyUpdateCache[key] = cache\n\t\tcmfFamiliesPolicyUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "func (tree *Tree) persist(root *caching.Node, parent *caching.Node, updateOps chan *caching.Node) {\n\t//if it is a leaf its size is already given\n\tfor _, child := range root.Children {\n\t\tnode, err := tree.GetNode(child.Name)\n\t\t//TODO: handle this somehow\n\t\tif err != nil {\n\t\t}\n\t\ttree.persist(node, root, updateOps)\n\t}\n\tupdateOps <- root\n}", "func (o *CMFUserSuper) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tcmfUserSuperUpdateCacheMut.RLock()\n\tcache, cached := cmfUserSuperUpdateCache[key]\n\tcmfUserSuperUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tcmfUserSuperAllColumns,\n\t\t\tcmfUserSuperPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update cmf_user_super, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `cmf_user_super` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, cmfUserSuperPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(cmfUserSuperType, cmfUserSuperMapping, append(wl, cmfUserSuperPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update cmf_user_super row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for cmf_user_super\")\n\t}\n\n\tif !cached {\n\t\tcmfUserSuperUpdateCacheMut.Lock()\n\t\tcmfUserSuperUpdateCache[key] = cache\n\t\tcmfUserSuperUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "func (q shelfQuery) UpdateAllP(cols M) {\n\tif err := q.UpdateAll(cols); err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n}", "func (client ModelClient) UpdateHierarchicalEntityRoleResponder(resp *http.Response) (result OperationStatus, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (q cmfUserExperienceLogQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for cmf_user_experience_log\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for cmf_user_experience_log\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (o *CMFAdminMenu) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no cmf_admin_menu provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(cmfAdminMenuColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLCMFAdminMenuUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tcmfAdminMenuUpsertCacheMut.RLock()\n\tcache, cached := cmfAdminMenuUpsertCache[key]\n\tcmfAdminMenuUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tcmfAdminMenuAllColumns,\n\t\t\tcmfAdminMenuColumnsWithDefault,\n\t\t\tcmfAdminMenuColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tcmfAdminMenuAllColumns,\n\t\t\tcmfAdminMenuPrimaryKeyColumns,\n\t\t)\n\n\t\tif !updateColumns.IsNone() && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert cmf_admin_menu, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"`cmf_admin_menu`\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `cmf_admin_menu` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(cmfAdminMenuType, cmfAdminMenuMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(cmfAdminMenuType, cmfAdminMenuMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for cmf_admin_menu\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = uint(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == cmfAdminMenuMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(cmfAdminMenuType, cmfAdminMenuMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to retrieve unique values for cmf_admin_menu\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.retQuery)\n\t\tfmt.Fprintln(writer, nzUniqueCols...)\n\t}\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for cmf_admin_menu\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tcmfAdminMenuUpsertCacheMut.Lock()\n\t\tcmfAdminMenuUpsertCache[key] = cache\n\t\tcmfAdminMenuUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (o OktaPlugin) EntityUpdate(e pb.Entity) (pb.Entity, error) {\n\toktaID := getEntityOktaID(e)\n\tif oktaID == \"\" {\n\t\treturn e, nil\n\t}\n\n\tuser, _, err := o.c.User.GetUser(oktaID)\n\tif err != nil {\n\t\tappLogger.Warn(\"No user with OktaID\", \"id\", oktaID, \"error\", err)\n\t\treturn e, nil\n\t}\n\n\tname := e.GetMeta().GetLegalName()\n\tnameParts := strings.SplitN(name, \" \", 2)\n\n\tnewProfile := *user.Profile\n\tif len(nameParts) == 2 {\n\t\tnewProfile[\"firstName\"] = nameParts[0]\n\t\tnewProfile[\"lastName\"] = nameParts[1]\n\t}\n\tnewProfile[\"employeeNumber\"] = e.GetNumber()\n\tnewProfile[\"displayName\"] = e.GetMeta().GetDisplayName()\n\n\tupdatedUser := &okta.User{\n\t\tProfile: &newProfile,\n\t}\n\t_, _, err = o.c.User.UpdateUser(oktaID, *updatedUser, nil)\n\tif err != nil {\n\t\tappLogger.Warn(\"Error updating Okta user\", \"error\", err)\n\t\treturn e, nil\n\t}\n\treturn e, nil\n}", "func (o CurrentChartDataMinutelySlice) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), currentChartDataMinutelyPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"current_chart_data_minutely\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, currentChartDataMinutelyPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.Exec(sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in currentChartDataMinutely slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all currentChartDataMinutely\")\n\t}\n\treturn rowsAff, nil\n}", "func (duo *DocUpdateOne) ClearChildren() *DocUpdateOne {\n\tduo.mutation.ClearChildren()\n\treturn duo\n}", "func (p *Proof) Update(u Update) error {\n\th := p.Leaf\n\n\tfor i := 0; i <= len(p.Steps); i++ {\n\t\tif len(u.u.roots) > i && u.u.roots[i] != nil && *u.u.roots[i] == h {\n\t\t\tp.Steps = p.Steps[:i]\n\t\t\treturn nil\n\t\t}\n\t\tvar step ProofStep\n\t\tif s, ok := u.updated[h]; ok {\n\t\t\tstep = s\n\t\t\tp.Steps = append(p.Steps[:i], step)\n\t\t} else if i == len(p.Steps) {\n\t\t\tbreak\n\t\t} else {\n\t\t\tstep = p.Steps[i]\n\t\t}\n\t\th = u.u.parent(h, step)\n\t}\n\n\treturn ErrInvalid\n}", "func (puo *PatientrecordUpdateOne) ClearMedicalrecordstaff() *PatientrecordUpdateOne {\n\tpuo.mutation.ClearMedicalrecordstaff()\n\treturn puo\n}", "func (q cmfFamiliesPolicyQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for cmf_families_policies\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for cmf_families_policies\")\n\t}\n\n\treturn rowsAff, nil\n}", "func updateLoan(l *models.Loan, db *gorm.DB) error {\n\tomitList := []string{\"id\", \"initial_value\", \"interest\", \"quota\", \"cod_client\", \"cod_collection\", \"cod_user\", \"deleted_at\"}\n\terr := db.Model(l).Omit(omitList...).Save(l).Error\n\treturn err\n}", "func (o TenantSlice) UpdateAllP(ctx context.Context, exec boil.ContextExecutor, cols M) int64 {\n\trowsAff, err := o.UpdateAll(ctx, exec, cols)\n\tif err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n\n\treturn rowsAff\n}", "func (uu *UserUpdate) ClearChildren() *UserUpdate {\n\tuu.mutation.ClearChildren()\n\treturn uu\n}", "func (self GroupModel) Update(page int, db Database) GroupModel {\n if len(self) > page {\n self[page] = self[page].Update(db)\n }\n return self\n}", "func (m *CompetenceMutation) ResetLevel() {\n\tm.level = nil\n\tm.addlevel = nil\n}", "func UpdateExceptionalChildById(id int, child_name string, class int, somatotype int8, allergen string, student_id int) (err error) {\n\to := orm.NewOrm()\n\terr = o.Begin()\n\texceptionalChild := ExceptionalChild{Id: id}\n\tif err = o.Read(&exceptionalChild); err == nil {\n\t\tif child_name != \"\" {\n\t\t\texceptionalChild.ChildName = child_name\n\t\t}\n\n\t\tif class != 0 {\n\t\t\texceptionalChild.Class = class\n\t\t}\n\n\t\tif somatotype != 0 {\n\t\t\texceptionalChild.Somatotype = somatotype\n\t\t}\n\n\t\tif allergen != \"\" {\n\t\t\texceptionalChild.Allergen = allergen\n\t\t}\n\n\t\texceptionalChild.UpdatedAt = time.Now().Format(\"2006-01-02 15:04:05\")\n\n\t\tif student_id != 0 {\n\t\t\texceptionalChild.StudentId = student_id\n\t\t}\n\n\t\tif _, err := o.Update(&exceptionalChild); err == nil {\n\t\t\to.Commit()\n\t\t\treturn err\n\t\t} else {\n\t\t\to.Rollback()\n\t\t\treturn err\n\t\t}\n\t}\n\to.Rollback()\n\treturn err\n}", "func (q authUserUserPermissionQuery) UpdateAll(cols M) error {\n\tqueries.SetUpdate(q.Query, cols)\n\n\t_, err := q.Query.Exec()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to update all for auth_user_user_permissions\")\n\t}\n\n\treturn nil\n}", "func updateLoanPayment(lp *models.LoanPayment, db *gorm.DB) error {\n\tomitList := []string{\"id\", \"cod_collection\", \"cod_user\", \"cod_loan\", \"deleted_at\"}\n\terr := db.Model(lp).Omit(omitList...).Save(lp).Error\n\treturn err\n}", "func DefaultStrictUpdateProfile(ctx context.Context, in *Profile, db *gorm1.DB) (*Profile, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateProfile\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &ProfileORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(ProfileORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tfilterGroups := GroupORM{}\n\tif ormObj.Id == \"\" {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tfilterGroups.ProfileId = new(string)\n\t*filterGroups.ProfileId = ormObj.Id\n\tif err = db.Where(filterGroups).Delete(GroupORM{}).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(ProfileORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(ProfileORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func (t *HeathCare_Chaincode) modifyPatientInformation(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tfmt.Println(\"\\n=============== start modifyPatientInformation function ===============\")\n\tstart := time.Now()\n\ttime.Sleep(time.Second)\n\n\tvar jsonResp string\n\n\tif len(args) != 8 {\n\t\treturn shim.Error(\"expecting 8 argument\")\n\t}\n\n\tuserid := args[0]\n\tpatientid := args[1]\n\tlocation := args[2]\n\tcollection := args[3]\n\n\tnewInsuranceCard := args[4]\n\tnewCurrentMedicationInformation := args[5]\n\tnewRelatedMedicalRecords := args[6]\n\tnewmakeNoteOfAppointmentDate := args[7]\n\ttimeQuery := time.Now().String()\n\n\t//get user identity before query\n\tuserIdentityAsBytes, errUserIdentityAsByte := stub.GetPrivateData(collection, userid)\n\tif errUserIdentityAsByte != nil {\n\t\treturn shim.Error(\"cannot get user identity\")\n\t} else if userIdentityAsBytes == nil {\n\t\treturn shim.Error(\"user does not exist\")\n\t}\n\n\tobjectType := \"Query\"\n\tquery := &Query{objectType, userid, patientid, location, timeQuery, \"modify\"}\n\tqueryAsByte, errQueryAsByte := json.Marshal(query)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//save to database\n\terrQueryAsByte = stub.PutPrivateData(\"modifyCollection\", userid, queryAsByte)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//create index key\n\tindexName := \"userid~patientid\"\n\tqueryIndexKey, errQueryIndexKey := stub.CreateCompositeKey(indexName, []string{query.UserID, query.PatientID, query.Location, query.Purpose})\n\tif errQueryIndexKey != nil {\n\t\treturn shim.Error(errQueryIndexKey.Error())\n\t}\n\n\t//save index\n\tvalue := []byte{0x00}\n\tstub.PutPrivateData(\"modifyCollection\", queryIndexKey, value)\n\n\t//get data\n\tpatientAsBytes, errPatientAsByte := stub.GetPrivateData(\"PatientInformationCollection\", patientid)\n\tif errPatientAsByte != nil {\n\t\tjsonResp = \"{\\\"Error\\\":\\\"Failed to get state for \" + patientid + \": \" + errPatientAsByte.Error() + \"\\\"}\"\n\t\treturn shim.Error(jsonResp)\n\t} else if errPatientAsByte == nil {\n\t\treturn shim.Error(\"patient's data does not exist\")\n\t}\n\n\t//convert data of patient to json\n\tpatient := &PatientInformation{}\n\terrPatientAsByte = json.Unmarshal(patientAsBytes, patient)\n\n\t//change data\n\tpatient.InsuranceCard = newInsuranceCard\n\tpatient.CurrentMedicationInformation = newCurrentMedicationInformation\n\tpatient.RelatedMedicalRecords = newRelatedMedicalRecords\n\tpatient.MakeNoteOfAppointmentDate = newmakeNoteOfAppointmentDate\n\n\tpatientAsByte, errPatientAsByte := json.Marshal(patient)\n\n\terrPatientAsByte = stub.PutPrivateData(\"PatientInformationCollection\", patientid, patientAsByte)\n\tif errPatientAsByte != nil {\n\t\treturn shim.Error(\"cannot patient's data\")\n\t}\n\n\tend := time.Now()\n\telapsed := time.Since(start)\n\tfmt.Println(\"function modifyPatientInformation\")\n\tfmt.Println(\"time start: \", start.String())\n\tfmt.Println(\"time end: \", end.String())\n\tfmt.Println(\"time execute: \", elapsed.String())\n\tfmt.Println(\"=============== end modifyPatientInformation function ===============\")\n\n\treturn shim.Success(nil)\n}", "func (auo *AntenatalinformationUpdateOne) ClearPersonnel() *AntenatalinformationUpdateOne {\n\tauo.mutation.ClearPersonnel()\n\treturn auo\n}", "func (q shelfQuery) UpdateAll(cols M) error {\n\tqueries.SetUpdate(q.Query, cols)\n\n\t_, err := q.Query.Exec()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to update all for shelf\")\n\t}\n\n\treturn nil\n}", "func (m *RoomMutation) ClearRoominfo() {\n\tm.clearedroominfo = true\n}", "func (oumuo *OrgUnitMemberUpdateOne) Save(ctx context.Context) (*OrgUnitMember, error) {\n\tvar (\n\t\terr error\n\t\tnode *OrgUnitMember\n\t)\n\tif err := oumuo.defaults(); err != nil {\n\t\treturn nil, err\n\t}\n\tif len(oumuo.hooks) == 0 {\n\t\tif err = oumuo.check(); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tnode, err = oumuo.sqlSave(ctx)\n\t} else {\n\t\tvar mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {\n\t\t\tmutation, ok := m.(*OrgUnitMemberMutation)\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected mutation type %T\", m)\n\t\t\t}\n\t\t\tif err = oumuo.check(); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\toumuo.mutation = mutation\n\t\t\tnode, err = oumuo.sqlSave(ctx)\n\t\t\tmutation.done = true\n\t\t\treturn node, err\n\t\t})\n\t\tfor i := len(oumuo.hooks) - 1; i >= 0; i-- {\n\t\t\tif oumuo.hooks[i] == nil {\n\t\t\t\treturn nil, fmt.Errorf(\"ent: uninitialized hook (forgotten import ent/runtime?)\")\n\t\t\t}\n\t\t\tmut = oumuo.hooks[i](mut)\n\t\t}\n\t\tif _, err := mut.Mutate(ctx, oumuo.mutation); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn node, err\n}", "func (uuo *UserUpdateOne) ClearBelongToOrgUnitMembers() *UserUpdateOne {\n\tuuo.mutation.ClearBelongToOrgUnitMembers()\n\treturn uuo\n}", "func (m *TeamworkSoftwareUpdateHealth) SetAdditionalData(value map[string]any)() {\n err := m.GetBackingStore().Set(\"additionalData\", value)\n if err != nil {\n panic(err)\n }\n}", "func (q phenotypepropQuery) UpdateAll(cols M) error {\n\tqueries.SetUpdate(q.Query, cols)\n\n\t_, err := q.Query.Exec()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"chado: unable to update all for phenotypeprop\")\n\t}\n\n\treturn nil\n}", "func (o NotificationSlice) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), notificationPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"notification\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, notificationPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.Exec(sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in notification slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all notification\")\n\t}\n\treturn rowsAff, nil\n}", "func (muo *MedicalrecordstaffUpdateOne) Save(ctx context.Context) (*Medicalrecordstaff, error) {\n\n\tvar (\n\t\terr error\n\t\tnode *Medicalrecordstaff\n\t)\n\tif len(muo.hooks) == 0 {\n\t\tnode, err = muo.sqlSave(ctx)\n\t} else {\n\t\tvar mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {\n\t\t\tmutation, ok := m.(*MedicalrecordstaffMutation)\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected mutation type %T\", m)\n\t\t\t}\n\t\t\tmuo.mutation = mutation\n\t\t\tnode, err = muo.sqlSave(ctx)\n\t\t\tmutation.done = true\n\t\t\treturn node, err\n\t\t})\n\t\tfor i := len(muo.hooks) - 1; i >= 0; i-- {\n\t\t\tmut = muo.hooks[i](mut)\n\t\t}\n\t\tif _, err := mut.Mutate(ctx, muo.mutation); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn node, err\n}", "func (q foreignLegalResourceQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for ForeignLegalResources\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for ForeignLegalResources\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (p *Peer) internalFlush() (uint64, bool, error) {\n\tp.mutex.Lock()\n\tdefer p.mutex.Unlock()\n\treq := p.server.createInternalAppendEntriesRequest(p.prevLogIndex)\n\treturn p.sendFlushRequest(req)\n}", "func (o AuthUserUserPermissionSlice) UpdateAll(exec boil.Executor, cols M) error {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), authUserUserPermissionPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\n\t\t\"UPDATE `auth_user_user_permissions` SET %s WHERE (`id`) IN (%s)\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.Placeholders(dialect.IndexPlaceholders, len(o)*len(authUserUserPermissionPrimaryKeyColumns), len(colNames)+1, len(authUserUserPermissionPrimaryKeyColumns)),\n\t)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\t_, err := exec.Exec(sql, args...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to update all in authUserUserPermission slice\")\n\t}\n\n\treturn nil\n}", "func HandleUpdatePerson(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tid := r.URL.Query().Get(\"id\")\n\tif id == \"\" {\n\t\thttp.Error(w, \"id parameter is not found\", http.StatusBadRequest)\n\t\treturn\n\t}\n\tfmt.Println(id)\n\treqBody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"Body Read Error : %v\", err), http.StatusInternalServerError)\n\t}\n\tvar body map[string]string\n\terr = json.Unmarshal(reqBody, &body)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"Request Body parse error : %v\", err), http.StatusBadRequest)\n\t\treturn\n\t}\n\tfmt.Printf(body[\"name\"])\n\tcols := \"\"\n\n\tfor key, val := range body {\n\t\tcols = cols + key + \"=\" + \"'\" + val + \"'\" + \",\"\n\t}\n\tf := cols[:len(cols)-1]\n\n\t_, err = Db.Exec(fmt.Sprintf(\"UPDATE people SET %s where people.id=?\", f), id)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"ERROR in deleting person %s\", err.Error()), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tvar person db.Person\n\tres, err := Db.Query(\"SELECT * FROM people where id=?\", id)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"ERROR in updating person %s\", err.Error()), http.StatusBadRequest)\n\t\treturn\n\t}\n\tdefer res.Close()\n\tfor res.Next() {\n\t\terr = res.Scan(&person.Id, &person.Name, &person.City, &person.ContactNo, &person.PhotoUrl)\n\t\tif err != nil {\n\t\t\thttp.Error(w, fmt.Sprintf(\"ERROR in updating person %s\", err.Error()), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t}\n\tjson.NewEncoder(w).Encode(person)\n}", "func (o ShelfSlice) UpdateAllP(exec boil.Executor, cols M) {\n\tif err := o.UpdateAll(exec, cols); err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n}", "func (auo *AdministratorUpdateOne) Save(ctx context.Context) (*Administrator, error) {\n\tvar (\n\t\terr error\n\t\tnode *Administrator\n\t)\n\tif len(auo.hooks) == 0 {\n\t\tif err = auo.check(); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tnode, err = auo.sqlSave(ctx)\n\t} else {\n\t\tvar mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {\n\t\t\tmutation, ok := m.(*AdministratorMutation)\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected mutation type %T\", m)\n\t\t\t}\n\t\t\tif err = auo.check(); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tauo.mutation = mutation\n\t\t\tnode, err = auo.sqlSave(ctx)\n\t\t\tmutation.done = true\n\t\t\treturn node, err\n\t\t})\n\t\tfor i := len(auo.hooks) - 1; i >= 0; i-- {\n\t\t\tmut = auo.hooks[i](mut)\n\t\t}\n\t\tif _, err := mut.Mutate(ctx, auo.mutation); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn node, err\n}", "func (o TenantSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"dbmodel: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), tenantPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `tenants` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, tenantPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"dbmodel: unable to update all in tenant slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"dbmodel: unable to retrieve rows affected all in update all tenant\")\n\t}\n\treturn rowsAff, nil\n}", "func (p *Personal) Update(ctx context.Context, document *PersonalData) (int64, error) {\n\treturn p.DB.Update(ctx, document)\n}", "func (q tenantQuery) UpdateAllP(ctx context.Context, exec boil.ContextExecutor, cols M) int64 {\n\trowsAff, err := q.UpdateAll(ctx, exec, cols)\n\tif err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n\n\treturn rowsAff\n}", "func (d *Dao) MergeUpInfo(mid int64) (err error) {\n\tvar (\n\t\tctx = context.Background()\n\t\tparams = url.Values{}\n\t\treq = &http.Request{}\n\t\tid int64\n\t\tres struct {\n\t\t\tCode int\n\t\t\tData model.UpUserInfoRes\n\t\t}\n\t)\n\terr = d.db.QueryRow(ctx, \"select mid from user_base where mid = ?\", mid).Scan(&id)\n\tif err == nil {\n\t\tlog.Infow(ctx, \"log\", \"already has mid in user_base\", \"mid\", mid)\n\t\treturn\n\t}\n\tif err == sql.ErrNoRows {\n\t\tparams.Set(\"mid\", strconv.FormatInt(mid, 10))\n\t\treq, err = d.HTTPClient.NewRequest(\"GET\", d.c.URLs[\"account\"], \"\", params)\n\t\tif err != nil {\n\t\t\tlog.Error(\"MergeUpInfo error(%v)\", err)\n\t\t\treturn\n\t\t}\n\t\tif err = d.HTTPClient.Do(ctx, req, &res); err != nil {\n\t\t\tlog.Error(\"MergeUpInfo http req failed ,err:%v\", err)\n\t\t\treturn\n\t\t}\n\t\tres := res.Data\n\t\tvar sex int\n\t\tswitch res.Sex {\n\t\tcase \"男\":\n\t\t\tsex = 1\n\t\tcase \"女\":\n\t\t\tsex = 2\n\t\tdefault:\n\t\t\tsex = 3\n\t\t}\n\t\t_, err = d.db.Exec(ctx,\n\t\t\t\"insert into user_base (mid,uname,face,sex,user_type,complete_degree)values(?,?,?,?,?,?)\",\n\t\t\tres.MID,\n\t\t\tres.Name,\n\t\t\tres.Face,\n\t\t\tsex,\n\t\t\tmodel.UserTypeUp,\n\t\t\t0)\n\t\tif err != nil {\n\t\t\tlog.Error(\"MergeUpInfo insert upinfo failed,err:%v\", err)\n\t\t\treturn\n\t\t}\n\t} else {\n\t\tlog.Error(\"MergeUpInfo query sql failed,err:%v\", err)\n\t}\n\tif err = d.db.QueryRow(ctx, \"select id from user_statistics where mid = ?\", mid).Scan(&id); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\tif _, err = d.db.Exec(ctx, \"insert into user_statistics (mid) values (?)\", mid); err != nil {\n\t\t\t\tlog.Error(\"init insert user_statistics failed,err:%v\", err)\n\t\t\t}\n\t\t} else {\n\t\t\tlog.Error(\"init query user_statistics failed,err:%v\", err)\n\t\t}\n\t}\n\treturn\n}", "func (uuo *UserUpdateOne) ClearPermissions() *UserUpdateOne {\n\tuuo.mutation.ClearPermissions()\n\treturn uuo\n}", "func syncRelatedFieldInfo() {\n\tfor _, mi := range modelRegistry.registryByName {\n\t\tfor _, fi := range mi.fields.registryByName {\n\t\t\tif !fi.related() {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tnewFI := *mi.getRelatedFieldInfo(fi.relatedPath)\n\t\t\tnewFI.name = fi.name\n\t\t\tnewFI.json = fi.json\n\t\t\tnewFI.relatedPath = fi.relatedPath\n\t\t\tnewFI.stored = fi.stored\n\t\t\tnewFI.mi = mi\n\t\t\tnewFI.noCopy = true\n\t\t\t*fi = newFI\n\t\t}\n\t}\n}", "func (pu *PatientrecordUpdate) ClearMedicalrecordstaff() *PatientrecordUpdate {\n\tpu.mutation.ClearMedicalrecordstaff()\n\treturn pu\n}" ]
[ "0.5985215", "0.5647233", "0.5622791", "0.56116664", "0.55547845", "0.5524893", "0.5467763", "0.5339883", "0.5237392", "0.5117131", "0.49089316", "0.4896554", "0.48413637", "0.47866955", "0.47696465", "0.47048986", "0.4685417", "0.46649095", "0.46379796", "0.45873073", "0.4586094", "0.45839667", "0.45713478", "0.45683596", "0.4529119", "0.4523018", "0.4493798", "0.44857076", "0.44797397", "0.44688842", "0.4464216", "0.4441868", "0.44368774", "0.44357392", "0.44334593", "0.44294465", "0.44272453", "0.4408384", "0.44054338", "0.44054338", "0.440298", "0.44016337", "0.43833682", "0.43752134", "0.43637285", "0.43409175", "0.4338649", "0.43299648", "0.43289533", "0.43158352", "0.4311604", "0.43110216", "0.4307064", "0.42847812", "0.42794266", "0.42746156", "0.42709193", "0.42704472", "0.42633682", "0.42593148", "0.42425993", "0.42312574", "0.42300755", "0.4211286", "0.4208756", "0.42033556", "0.42025372", "0.42016336", "0.42015845", "0.42013714", "0.41961473", "0.41844943", "0.41793337", "0.4172414", "0.4167528", "0.41659173", "0.41603348", "0.41564324", "0.41557682", "0.4152555", "0.41508478", "0.41458085", "0.4143731", "0.41423902", "0.41397816", "0.41290015", "0.41229904", "0.41226727", "0.41204318", "0.4120226", "0.41201848", "0.41176945", "0.41171426", "0.4115284", "0.41135246", "0.41134825", "0.41103238", "0.41094837", "0.4109475", "0.41023204" ]
0.66284704
0
DefaultPatchHealthMenstruationPersonalInfo executes a basic gorm update call with patch behavior
func DefaultPatchHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) { if in == nil { return nil, errors1.NilArgumentError } var pbObj HealthMenstruationPersonalInfo var err error if hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchRead); ok { if db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil { return nil, err } } pbReadRes, err := DefaultReadHealthMenstruationPersonalInfo(ctx, &HealthMenstruationPersonalInfo{Id: in.GetId()}, db) if err != nil { return nil, err } pbObj = *pbReadRes if hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchApplyFieldMask); ok { if db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil { return nil, err } } if _, err := DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx, &pbObj, in, updateMask, "", db); err != nil { return nil, err } if hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchSave); ok { if db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil { return nil, err } } pbResponse, err := DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx, &pbObj, db) if err != nil { return nil, err } if hook, ok := interface{}(pbResponse).(HealthMenstruationPersonalInfoWithAfterPatchSave); ok { if err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil { return nil, err } } return pbResponse, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationPersonalInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationPersonalInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func DefaultPatchSetHealthMenstruationPersonalInfo(ctx context.Context, objects []*HealthMenstruationPersonalInfo, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationPersonalInfo, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationPersonalInfo, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationPersonalInfo(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx context.Context, patchee *HealthMenstruationPersonalInfo, patcher *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"PeriodLengthInDays\" {\n\t\t\tpatchee.PeriodLengthInDays = patcher.PeriodLengthInDays\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CycleLengthInDays\" {\n\t\t\tpatchee.CycleLengthInDays = patcher.CycleLengthInDays\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultPatchHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationDailyEntry\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationDailyEntry(ctx, &HealthMenstruationDailyEntry{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationDailyEntry(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationDailyEntryWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultReadHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationPersonalInfoORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationPersonalInfoORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationPersonalInfoORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultCreateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultListHealthMenstruationPersonalInfo(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationPersonalInfo, error) {\n\tin := HealthMenstruationPersonalInfo{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationPersonalInfoORM{}, &HealthMenstruationPersonalInfo{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationPersonalInfoORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationPersonalInfo{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func (handler *profileHandler) Patch(ctx context.Context, req *proto.ProfilePatchRequest, rsp *proto.ProfileData) (err error) {\n\tprofileInstance := handler.getProfileInstance(req.GetId())\n\tprofileInstance.SetAvatar(req.Avatar)\n\terr = profileInstance.SetLocation(req.Location)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tprofileInstance.SetSkype(req.Skype)\n\terr = profileInstance.Save()\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\thandler.setProfileDataToResponse(profileInstance, rsp)\n\n\treturn nil\n}", "func (t *MedChain) updateHospital(stub shim.ChaincodeStubInterface, args []string) peer.Response {\n\t\t// ==== Input sanitation ====\n\t\tfmt.Println(\"- start updateHospital\")\n\n\t\t// check if all the args are send\n\t\tif len(args) != 4 {\n\t\t\treturn shim.Error(\"Incorrect number of arguments, Required 4 arguments\")\n\t\t}\n\n\t\t// check if the args are empty\n\t\tfor i := 0; i < len(args); i++ {\n\t\t\tif len(args[i]) <= 0 {\n\t\t\t\treturn shim.Error(\"argument \"+ string(i+1) + \" must be a non-empty string\")\n\t\t\t}\n\t\t}\n\n\t\tgetAssetAsBytes, errT := stub.GetState(args[0])\n\n\t\tif errT != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Error : Cannot find Hospital %s\" , errT))\n\t\t}\n\n\t\tif getAssetAsBytes == nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Cannot find asset Hospital with ID %s\" , args[0]))\n\t\t}\n\n\t\tvar obj = Hospital{}\n\n\t\tjson.Unmarshal(getAssetAsBytes, &obj)\n\t\tobj.HospitalName = args[1]\n\t\tobj.HospitalAddress = args[2]\n\t\tobj.HospitalPhone = args[3]\n\t\tcomAssetAsBytes, errMarshal := json.Marshal(obj)\n\n\t\tif errMarshal != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Marshal Error: %s\", errMarshal))\n\t\t}\n\n\t\terrPut := stub.PutState(obj.Hospital_ID, comAssetAsBytes)\n\n\t\tif errPut != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Failed to update Hospital with ID %s\", args[0]))\n\t\t}\n\n\t\tfmt.Println(\"Hospital asset with ID %s was updated \\n %v\", args[0], obj)\n\n\t\treturn shim.Success(comAssetAsBytes)\n\t}", "func DefaultPatchUserInfo(ctx context.Context, in *UserInfo, updateMask *field_mask.FieldMask, db *gorm.DB) (*UserInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar pbObj UserInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(UserInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadUserInfo(ctx, &UserInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(UserInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskUserInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(UserInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateUserInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(UserInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func (t *HeathCare_Chaincode) modifyMedicalData(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tfmt.Println(\"\\n=============== start modifyMedicalData function ===============\")\n\tstart := time.Now()\n\ttime.Sleep(time.Second)\n\n\tvar jsonResp string\n\n\tif len(args) != 8 {\n\t\treturn shim.Error(\"expecting 4 argument\")\n\t}\n\n\t//define identity of query-er and new value of medical record\n\tuserid := args[0]\n\tpatientid := args[1]\n\tlocation := args[2]\n\tcollection := args[3]\n\n\tnewPersonalIdentificationInformation := args[4]\n\tnewMedicalHistory := args[5]\n\tnewFamilyMedicalHistory := args[6]\n\tnewMedicationHistory := args[7]\n\tnewTreatmentHistory := args[8]\n\tnewMedicalDirectives := args[9]\n\ttimeQuery := time.Now().String()\n\n\t//get user identity before query\n\tuserIdentityAsBytes, errUserIdentityAsByte := stub.GetPrivateData(collection, userid)\n\tif errUserIdentityAsByte != nil {\n\t\treturn shim.Error(\"cannot get user identity\")\n\t} else if userIdentityAsBytes == nil {\n\t\treturn shim.Error(\"user does not exist\")\n\t}\n\n\t//create query object with purpose: modify\n\tobjectType := \"Query\"\n\tquery := &Query{objectType, userid, patientid, location, timeQuery, \"modify\"}\n\tqueryAsByte, errQueryAsByte := json.Marshal(query)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//save to database\n\terrQueryAsByte = stub.PutPrivateData(\"modifyCollection\", userid, queryAsByte)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//create index key\n\tindexName := \"userid~patientid\"\n\tqueryIndexKey, errQueryIndexKey := stub.CreateCompositeKey(indexName, []string{query.UserID, query.PatientID, query.Location, query.Purpose})\n\tif errQueryIndexKey != nil {\n\t\treturn shim.Error(errQueryIndexKey.Error())\n\t}\n\n\t//save index\n\tvalue := []byte{0x00}\n\tstub.PutPrivateData(\"modifyCollection\", queryIndexKey, value)\n\n\t//get medical record data\n\tmedicalRecordAsBytes, errMedicalRecordAsByte := stub.GetPrivateData(\"MedicalRecordCollection\", patientid)\n\tif errMedicalRecordAsByte != nil {\n\t\tjsonResp = \"{\\\"Error\\\":\\\"Failed to get state for \" + patientid + \": \" + errMedicalRecordAsByte.Error() + \"\\\"}\"\n\t\treturn shim.Error(jsonResp)\n\t} else if errMedicalRecordAsByte == nil {\n\t\treturn shim.Error(\"patient's data does not exist\")\n\t}\n\n\t//convert data of patient to json\n\tmedicalRecord := &MedicalRecord{}\n\terrMedicalRecordAsByte = json.Unmarshal(medicalRecordAsBytes, medicalRecord)\n\n\t//change data\n\tmedicalRecord.PersonalIdentificationInformation = newPersonalIdentificationInformation\n\tmedicalRecord.MedicalHistory = newMedicalHistory\n\tmedicalRecord.FamilyMedicalHistory = newFamilyMedicalHistory\n\tmedicalRecord.MedicationHistory = newMedicationHistory\n\tmedicalRecord.TreatmentHistory = newTreatmentHistory\n\tmedicalRecord.MedicalDirectives = newMedicalDirectives\n\n\t//convert new medical record data to byte\n\tnewMedicalRecordAsByte, errNewMedicalRecordAsByte := json.Marshal(medicalRecord)\n\n\t//store new data\n\terrNewMedicalRecordAsByte = stub.PutPrivateData(\"MedicalRecordCollection\", patientid, newMedicalRecordAsByte)\n\tif errNewMedicalRecordAsByte != nil {\n\t\treturn shim.Error(\"cannot save new medical record's data\")\n\t}\n\n\tend := time.Now()\n\telapsed := time.Since(start)\n\tfmt.Println(\"function modifyMedicalData\")\n\tfmt.Println(\"time start: \", start.String())\n\tfmt.Println(\"time end: \", end.String())\n\tfmt.Println(\"time execute: \", elapsed.String())\n\tfmt.Println(\"=============== end modifyMedicalData function ===============\")\n\n\treturn shim.Success(nil)\n}", "func DefaultStrictUpdateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationDailyEntry\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationDailyEntryORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func (service *EmployeeService) PatchEmployeeDetails(employeeID string, employeeDetails models.Employee) error {\n\tcollection := service.mongoClient.Database(DbName).Collection(CollectionName)\n\tupdatesToBePerformed := bson.M{}\n\tupdatesToBePerformed[\"employeeid\"] = employeeID\n\tif employeeDetails.Department != nil {\n\t\tupdatesToBePerformed[\"department\"] = employeeDetails.Department\n\t}\n\n\tif employeeDetails.Name != nil {\n\t\tupdatesToBePerformed[\"name\"] = employeeDetails.Name\n\t}\n\n\tif employeeDetails.Skills != nil {\n\t\tupdatesToBePerformed[\"skills\"] = employeeDetails.Skills\n\t}\n\n\tif employeeDetails.Address != nil {\n\t\taddress := models.Address{}\n\t\tif employeeDetails.Address.City != nil {\n\t\t\taddress.City = employeeDetails.Address.City\n\t\t}\n\n\t\tif employeeDetails.Address.Country != nil {\n\t\t\taddress.Country = employeeDetails.Address.Country\n\t\t}\n\n\t\tif employeeDetails.Address.DoorNo != nil {\n\t\t\taddress.DoorNo = employeeDetails.Address.DoorNo\n\t\t}\n\n\t\tif employeeDetails.Address.State != nil {\n\t\t\taddress.State = employeeDetails.Address.State\n\t\t}\n\n\t\tupdatesToBePerformed[\"address\"] = address\n\t}\n\n\tif employeeDetails.Status != nil {\n\t\tupdatesToBePerformed[\"status\"] = employeeDetails.Status\n\t}\n\n\t// consolidatedMap(&updatesToBePerformed, employeeDetails)\n\n\tresult, err := collection.UpdateOne(\n\t\tcontext.Background(),\n\t\tbson.M{\"employeeid\": employeeID},\n\t\tbson.M{\n\t\t\t\"$set\": updatesToBePerformed,\n\t\t})\n\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\tfmt.Println(result)\n\n\treturn nil\n}", "func DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx context.Context, patchee *HealthMenstruationDailyEntry, patcher *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Day\" {\n\t\t\tpatchee.Day = patcher.Day\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"IntensityPercentage\" {\n\t\t\tpatchee.IntensityPercentage = patcher.IntensityPercentage\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Type\" {\n\t\t\tpatchee.Type = patcher.Type\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Manual\" {\n\t\t\tpatchee.Manual = patcher.Manual\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"BasedOnPrediction\" {\n\t\t\tpatchee.BasedOnPrediction = patcher.BasedOnPrediction\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultPatchSetHealthMenstruationDailyEntry(ctx context.Context, objects []*HealthMenstruationDailyEntry, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationDailyEntry, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationDailyEntry, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationDailyEntry(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func (t *HeathCare_Chaincode) modifyPatientInformation(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tfmt.Println(\"\\n=============== start modifyPatientInformation function ===============\")\n\tstart := time.Now()\n\ttime.Sleep(time.Second)\n\n\tvar jsonResp string\n\n\tif len(args) != 8 {\n\t\treturn shim.Error(\"expecting 8 argument\")\n\t}\n\n\tuserid := args[0]\n\tpatientid := args[1]\n\tlocation := args[2]\n\tcollection := args[3]\n\n\tnewInsuranceCard := args[4]\n\tnewCurrentMedicationInformation := args[5]\n\tnewRelatedMedicalRecords := args[6]\n\tnewmakeNoteOfAppointmentDate := args[7]\n\ttimeQuery := time.Now().String()\n\n\t//get user identity before query\n\tuserIdentityAsBytes, errUserIdentityAsByte := stub.GetPrivateData(collection, userid)\n\tif errUserIdentityAsByte != nil {\n\t\treturn shim.Error(\"cannot get user identity\")\n\t} else if userIdentityAsBytes == nil {\n\t\treturn shim.Error(\"user does not exist\")\n\t}\n\n\tobjectType := \"Query\"\n\tquery := &Query{objectType, userid, patientid, location, timeQuery, \"modify\"}\n\tqueryAsByte, errQueryAsByte := json.Marshal(query)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//save to database\n\terrQueryAsByte = stub.PutPrivateData(\"modifyCollection\", userid, queryAsByte)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//create index key\n\tindexName := \"userid~patientid\"\n\tqueryIndexKey, errQueryIndexKey := stub.CreateCompositeKey(indexName, []string{query.UserID, query.PatientID, query.Location, query.Purpose})\n\tif errQueryIndexKey != nil {\n\t\treturn shim.Error(errQueryIndexKey.Error())\n\t}\n\n\t//save index\n\tvalue := []byte{0x00}\n\tstub.PutPrivateData(\"modifyCollection\", queryIndexKey, value)\n\n\t//get data\n\tpatientAsBytes, errPatientAsByte := stub.GetPrivateData(\"PatientInformationCollection\", patientid)\n\tif errPatientAsByte != nil {\n\t\tjsonResp = \"{\\\"Error\\\":\\\"Failed to get state for \" + patientid + \": \" + errPatientAsByte.Error() + \"\\\"}\"\n\t\treturn shim.Error(jsonResp)\n\t} else if errPatientAsByte == nil {\n\t\treturn shim.Error(\"patient's data does not exist\")\n\t}\n\n\t//convert data of patient to json\n\tpatient := &PatientInformation{}\n\terrPatientAsByte = json.Unmarshal(patientAsBytes, patient)\n\n\t//change data\n\tpatient.InsuranceCard = newInsuranceCard\n\tpatient.CurrentMedicationInformation = newCurrentMedicationInformation\n\tpatient.RelatedMedicalRecords = newRelatedMedicalRecords\n\tpatient.MakeNoteOfAppointmentDate = newmakeNoteOfAppointmentDate\n\n\tpatientAsByte, errPatientAsByte := json.Marshal(patient)\n\n\terrPatientAsByte = stub.PutPrivateData(\"PatientInformationCollection\", patientid, patientAsByte)\n\tif errPatientAsByte != nil {\n\t\treturn shim.Error(\"cannot patient's data\")\n\t}\n\n\tend := time.Now()\n\telapsed := time.Since(start)\n\tfmt.Println(\"function modifyPatientInformation\")\n\tfmt.Println(\"time start: \", start.String())\n\tfmt.Println(\"time end: \", end.String())\n\tfmt.Println(\"time execute: \", elapsed.String())\n\tfmt.Println(\"=============== end modifyPatientInformation function ===============\")\n\n\treturn shim.Success(nil)\n}", "func DefaultStrictUpdateUserInfo(ctx context.Context, in *UserInfo, db *gorm.DB) (*UserInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateUserInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &UserInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func (c *Client) ModifyHealthCheck(args *ModifyHealthCheckArgs) (*ModifyHealthCheckResponse, error) {\n\tresponse := ModifyHealthCheckResponse{}\n\terr := c.Invoke(\"ModifyHealthCheck\", args, &response)\n\tif err == nil {\n\t\treturn &response, nil\n\t}\n\treturn nil, err\n}", "func (a *HyperflexApiService) PatchHyperflexClusterProfile(ctx context.Context, moid string) ApiPatchHyperflexClusterProfileRequest {\n\treturn ApiPatchHyperflexClusterProfileRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (db *DataBase) UpdatePlayerPersonalInfo(userID int32, user *models.UserPrivateInfo) (err error) {\n\tvar (\n\t\tconfirmedUser *models.UserPrivateInfo\n\t\ttx *sql.Tx\n\t)\n\n\tif tx, err = db.Db.Begin(); err != nil {\n\t\treturn\n\t}\n\tdefer tx.Rollback()\n\n\tif confirmedUser, err = db.getPrivateInfo(tx, userID); err != nil {\n\t\treturn\n\t}\n\n\tconfirmedUser.Update(user)\n\n\tif err = db.updatePlayerPersonalInfo(tx, user); err != nil {\n\t\treturn\n\t}\n\n\terr = tx.Commit()\n\treturn\n}", "func (m MariaDB) Update(ctx context.Context, ep entity.PersonalData) (int64, error) {\n\tp := receive(ep)\n\tsqlQuery := \"UPDATE person SET name=?, last_name=?, phone=?, email=?, year_od_birth=? where id= ?\"\n\n\trslt, err := m.Person.ExecContext(ctx, sqlQuery, p.Name, p.LastName, p.Phone, p.Email, p.YearOfBirth, p.ID)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"could not update data\")\n\t}\n\tcount, err := rslt.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"rows are not affected\")\n\t}\n\treturn count, nil\n}", "func UpdatePerson(c *gin.Context) {\n // Get the person to be updated\n var person models.Person\n if err := models.DB.First(&person, \"id = ?\", c.Param(\"id\")).Error; err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n // Validate input\n var input UpdatePersonInput\n if err := c.ShouldBindJSON(&input); err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n models.DB.Model(&person).Updates(input)\n\n c.JSON(http.StatusOK, gin.H{\"data\": person})\n}", "func (m *CompaniesItemCompanyInformationCompanyInformationItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CompanyInformationable, requestConfiguration *CompaniesItemCompanyInformationCompanyInformationItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CompanyInformationable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateCompanyInformationFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CompanyInformationable), nil\n}", "func EditLocalRep(c *gin.Context) {\n\tuserGUID, _ := c.GetQuery(\"user_guid\")\n\trepGUID, _ := c.GetQuery(\"rep_guid\")\n\teditTask, _ := c.GetQuery(\"editTask\")\n\tc.Header(\"Content-Type\", \"application/json\")\n\ttargetRepIndex := -1\n\tif editTask == \"add\" {\n\t\t// TODO: create map of maps\n\t\tuserReps[userGUID] = append(userReps[userGUID], repGUID)\n\t} else if editTask == \"remove\" {\n\t\ttempUserRepList := userReps[userGUID]\n\t\tfor i, value := range tempUserRepList {\n\t\t\tif value == repGUID {\n\t\t\t\ttargetRepIndex = i\n\t\t\t}\n\t\t}\n\t\tif targetRepIndex != -1 {\n\t\t\tuserReps[userGUID] = append(tempUserRepList[:targetRepIndex], tempUserRepList[targetRepIndex+1:]...)\n\t\t}\n\t} else {\n\t\tfmt.Println(\"edit Rep: provided invalid option\")\n\t\t// log.Info(\"edit Rep: provided invalid option\")\n\t}\n\n\tuserRepUpdate := models.UserRepUpdate{\n\t\tUserGUID: userGUID,\n\t\tRepGUID: repGUID,\n\t\tAction: editTask,\n\t}\n\n\tuserRepUpdateResponse, _ := json.Marshal(userRepUpdate)\n\n\tfmt.Println(string(userRepUpdateResponse))\n\n\tif cfg.Kafka.EnableKafka {\n\t\terr := writer.WriteMessages(context.Background(), kafka.Message{\n\t\t\t//Key: []byte(repGUID),\n\t\t\tValue: []byte(userRepUpdateResponse),\n\t\t})\n\t\tif err != nil {\n\t\t\tpanic(\"could not write kafka message \" + err.Error())\n\t\t}\n\t}\n\n\tmsg := map[string]interface{}{\"Status\": \"Ok\", \"user_guid\": userGUID, \"users_rep_list\": userReps[userGUID]}\n\tc.JSON(http.StatusOK, msg)\n}", "func updatePerson(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-type\", \"application/json\")\n\n\tvar person Person\n\tfname := person.Fname\n\tlname := person.Lname\n\temail := person.Email\n\tpword := person.Pword\n\tid := person.Id\n\n\tstmt, err := db.Prepare(\"UPDATE person SET fname=?, lname=?, email=?, pword=?, id=? WHERE id =?\")\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tresult, err := stmt.Exec(fname, lname, email, pword, id)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\t_, err = result.RowsAffected()\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n}", "func (m *DeviceLocalCredentialInfoItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.DeviceLocalCredentialInfoable, requestConfiguration *DeviceLocalCredentialInfoItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.DeviceLocalCredentialInfoable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateDeviceLocalCredentialInfoFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.DeviceLocalCredentialInfoable), nil\n}", "func updatePerson(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tfmt.Println(\"Update HIT\")\n\tparams := mux.Vars(r)\n\tstmt, err := db.Prepare(\"UPDATE Persons SET pAge = ? WHERE pName = ?\")\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tvar per Person\n\tjson.Unmarshal(body, &per)\n\tage := per.Age\n\t_, err = stmt.Exec(age, params[\"name\"])\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\tfmt.Fprintf(w, \"Person with Name = %s was updated\", params[\"name\"])\n}", "func DefaultApplyFieldMaskUserInfo(ctx context.Context, patchee *UserInfo, patcher *UserInfo, updateMask *field_mask.FieldMask, prefix string, db *gorm.DB) (*UserInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar err error\n\tvar updatedCreatedAt bool\n\tvar updatedUpdatedAt bool\n\tfor i, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UserId\" {\n\t\t\tpatchee.UserId = patcher.UserId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"LastName\" {\n\t\t\tpatchee.LastName = patcher.LastName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"FirstName\" {\n\t\t\tpatchee.FirstName = patcher.FirstName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Period\" {\n\t\t\tpatchee.Period = patcher.Period\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"DepartmentId\" {\n\t\t\tpatchee.DepartmentId = patcher.DepartmentId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"JobId\" {\n\t\t\tpatchee.JobId = patcher.JobId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"EnrollmentFlg\" {\n\t\t\tpatchee.EnrollmentFlg = patcher.EnrollmentFlg\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"AdminFlg\" {\n\t\t\tpatchee.AdminFlg = patcher.AdminFlg\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedCreatedAt && strings.HasPrefix(f, prefix+\"CreatedAt.\") {\n\t\t\tif patcher.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"CreatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.CreatedAt, patchee.CreatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tupdatedCreatedAt = true\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUpdatedAt && strings.HasPrefix(f, prefix+\"UpdatedAt.\") {\n\t\t\tif patcher.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UpdatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.UpdatedAt, patchee.UpdatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tupdatedUpdatedAt = true\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func patchPi(w http.ResponseWriter, r *http.Request) {\n\t// Get pi name from request\n\tvars := mux.Vars(r)\n\tname := vars[\"piname\"]\n\n\t// Retrieve pi object from data store\n\tc := appengine.NewContext(r)\n\tq := datastore.NewQuery(piListKind).Filter(\"name =\", name)\n\tt := q.Run(c)\n\tvar pi Pi\n\t_, err := t.Next(&pi)\n\tif err == datastore.Done {\n\t\thttp.Error(w, \"404 Not found\", http.StatusNotFound)\n\t\treturn\n\t}\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Set Pi object property\n\tr.ParseForm()\n\n\t// Updating the name is not allowed\n\tformName := r.Form.Get(\"name\")\n\tif len(formName) != 0 {\n\t\thttp.Error(w, \"404 Not found\", http.StatusNotFound)\n\t\treturn\n\t}\n\tip := r.Form.Get(\"ip\")\n\tif len(ip) != 0 {\n\t\tpi.Ip = ip\n\t}\n\tlastSeen := r.Form.Get(\"lastSeen\")\n\tif len(lastSeen) != 0 {\n\t\tpi.LastSeen = lastSeen\n\t}\n\tpingCount := r.Form.Get(\"pingCount\")\n\tif len(pingCount) != 0 {\n\t\tpi.PingCount, _ = strconv.Atoi(r.Form.Get(\"pingCount\"))\n\t}\n\n\t//\tfmt.Fprint(w, \"name \", , \"\\n\")\n\tfmt.Fprint(w, \"pingCount \", r.Form.Get(\"pingCount\"), \" \", pi.PingCount, \"\\n\")\n\n\t// Store pi object in data store\n\t_, err = datastore.Put(c, datastore.NewKey(c, piListKind, name, 0, nil), &pi)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\thttp.Error(w, \"200 OK\", http.StatusOK)\n\treturn\n}", "func (p *Person) updateNormal(firstname string) {\n\t// This * get the content of pointer p\n\t// *p gets a variable sitting in RAM, that has a type Person\n\t// and update its firstname to a new named specified by the function parameter.\n\t(*p).firstname = firstname\n}", "func (t *IPDCChaincode) invoke_update_status_with_modification_check(stub shim.ChaincodeStubInterface, args []string, map_specification map[string]interface{}) pb.Response {\r\n\r\n\tfmt.Println(\"***********Entering invoke_update_status_with_modification_check***********\")\r\n\r\n\tif len(args) < 2 {\r\n\r\n\t\tfmt.Println(\"Error: Incorrect number of arguments\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error: Incorrect number of arguments\")\r\n\t}\r\n\r\n\tvar record_specification_input map[string]interface{}\r\n\r\n\tvar err error\r\n\r\n\terr = json.Unmarshal([]byte(args[0]), &record_specification_input)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error in format of record.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error in format of record.\")\r\n\t}\r\n\r\n\tadditional_json, ok := map_specification[\"additional_json\"]\r\n\r\n\tif ok {\r\n\r\n\t\tadditional_json_data, ok1 := additional_json.(map[string]interface{})\r\n\r\n\t\tif ok1 {\r\n\r\n\t\t\tfor spec, _ := range additional_json_data {\r\n\r\n\t\t\t\trecord_specification_input[spec] = additional_json_data[spec]\r\n\t\t\t}\r\n\t\t} else {\r\n\t\t\tfmt.Println(\"Error: Invalid additional JSON fields in specification\")\r\n\r\n\t\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\t\treturn shim.Error(\"Error: Invalid additional JSON fields in specification\")\r\n\t\t}\r\n\t}\r\n\r\n\tvar keys_map interface{}\r\n\r\n\tvar specs map[string]interface{}\r\n\r\n\tkeys_map, error_keys_map := t.get_keys_map(stub, record_specification_input)\r\n\r\n\tif error_keys_map != nil {\r\n\r\n\t\tfmt.Println(error_keys_map.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(error_keys_map.Error())\r\n\t}\r\n\r\n\tspecs, ok = keys_map.(map[string]interface{})\r\n\r\n\tif !ok {\r\n\r\n\t\tfmt.Println(\"Error: Invalid keys_map specification.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error: Invalid keys_map specification.\")\r\n\t}\r\n\r\n\tif specs[\"primary_key\"] == nil {\r\n\r\n\t\tfmt.Println(\"Error: There is no primary key specification.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error : There is no primary key specification.\")\r\n\t}\r\n\r\n\tvar pk_spec []interface{}\r\n\r\n\tpk_spec, ok = specs[\"primary_key\"].([]interface{})\r\n\r\n\tif !ok {\r\n\r\n\t\tfmt.Println(\"Error in Primary key specification.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error in Primary key specification.\")\r\n\t}\r\n\r\n\tkey, err_key := t.createInterfacePrimaryKey(record_specification_input, pk_spec)\r\n\r\n\tif err_key != nil {\r\n\r\n\t\tfmt.Println(err_key.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(err_key.Error())\r\n\r\n\t}\r\n\r\n\tvar valAsBytes []byte\r\n\r\n\tvalAsBytes, err = stub.GetState(key)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error: Failed to get state: \" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error: Failed to get state: \" + err.Error())\r\n\r\n\t} else if valAsBytes == nil {\r\n\r\n\t\tfmt.Println(\"Error: No value for primary key : \" + key)\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error: No value for key\")\r\n\r\n\t}\r\n\r\n\tvar record_specification map[string]interface{}\r\n\r\n\terr = json.Unmarshal([]byte(valAsBytes), &record_specification)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error in format of record\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error in format of record\")\r\n\r\n\t}\r\n\r\n\tvar check int\r\n\r\n\tcheck, err = t.Isfieldsmodified(record_specification_input, record_specification, map_specification)\r\n\r\n\tif check != 0 {\r\n\r\n\t\tfmt.Println(\"Status Update Failed due to error in modification check. \" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Status Update Failed due to error in modification check. \" + err.Error())\r\n\t}\r\n\r\n\terr_del := t.delete_composite_keys(stub, specs, record_specification, key)\r\n\r\n\tif err_del != nil {\r\n\r\n\t\tfmt.Println(\"Error in deleting composite keys\" + err_del.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error in deleting composite keys\" + err_del.Error())\r\n\r\n\t}\r\n\r\n\tvar to_be_updated_map map[string]interface{}\r\n\r\n\terr = json.Unmarshal([]byte(args[1]), &to_be_updated_map)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error in format of update map.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error in format of update map.\")\r\n\r\n\t}\r\n\r\n\tfor spec, spec_val := range to_be_updated_map {\r\n\r\n\t\tvar spec_val_string, spec_ok = spec_val.(string)\r\n\r\n\t\tif !spec_ok {\r\n\r\n\t\t\tfmt.Println(\"Error: Unable to parse value of status update\")\r\n\r\n\t\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\t\treturn shim.Error(\"Error: Unable to parse value of status update\")\r\n\r\n\t\t}\r\n\r\n\t\tvar val_check, val_err = t.updatestatusvaliditycheck(spec, spec_val_string, map_specification)\r\n\r\n\t\tif val_check != 0 {\r\n\r\n\t\t\tfmt.Println(val_err.Error())\r\n\r\n\t\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\t\treturn shim.Error(val_err.Error())\r\n\t\t}\r\n\r\n\t\trecord_specification[spec] = spec_val_string\r\n\t}\r\n\r\n\tvar concatenated_record_json []byte\r\n\r\n\tconcatenated_record_json, err = json.Marshal(record_specification)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error: Unable to Marshal Concatenated Record to JSON \" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error: Unable to Marshal Concatenated Record to JSON \" + err.Error())\r\n\t}\r\n\r\n\terr = stub.PutState(key, []byte(concatenated_record_json))\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error: Failed to put state : \" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error: Failed to put state : \" + err.Error())\r\n\t}\r\n\r\n\terr = t.create_composite_keys(stub, specs, record_specification, key)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error in creating composite keys\" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error in creating composite keys\" + err.Error())\r\n\t}\r\n\r\n\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\treturn shim.Success(nil)\r\n\r\n}", "func (m *TeamworkRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserTeamworkable, requestConfiguration *TeamworkRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserTeamworkable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateUserTeamworkFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserTeamworkable), nil\n}", "func (srv *UsersService) PatchHandler(ctx *gin.Context) {\n\tlogger := srv.logger.New(\"action\", \"PatchHandler\")\n\tuser := GetRequestedUser(ctx)\n\tif user == nil {\n\t\t// Returns a \"404 StatusNotFound\" response\n\t\tsrv.ResponseService.NotFound(ctx)\n\t\treturn\n\t}\n\n\t// Checks if the query entry is valid\n\tform := &validators.PatchUser{}\n\tif err := ctx.ShouldBindJSON(form); err != nil {\n\t\tsrv.ResponseService.ValidatorErrorResponse(ctx, responses.UnprocessableEntity, err)\n\t\treturn\n\t}\n\n\tcurrentUser := GetCurrentUser(ctx)\n\tif currentUser.UID == user.UID ||\n\t\tcurrentUser.RoleName == \"root\" ||\n\t\tcurrentUser.RoleName == \"admin\" {\n\n\t\tif form.FirstName != nil {\n\t\t\tuser.FirstName = *form.FirstName\n\t\t}\n\t\tif form.LastName != nil {\n\t\t\tuser.LastName = *form.LastName\n\t\t}\n\t\tif form.Nickname != nil {\n\t\t\tuser.Nickname = *form.Nickname\n\t\t}\n\n\t\trepo := srv.Repository.GetUsersRepository()\n\t\told, err := repo.FindByUID(user.UID)\n\t\tif err != nil {\n\t\t\tlogger.Error(\"cannot find user\", \"err\", err)\n\t\t\tsrv.ResponseService.NotFound(ctx)\n\t\t\treturn\n\t\t}\n\n\t\t_, err = repo.Update(user)\n\t\tif err != nil {\n\t\t\tlogger.Error(\"cannot update user\", \"err\", err)\n\t\t\tsrv.ResponseService.Error(ctx, responses.CanNotUpdateUser, \"Can't update a user\")\n\t\t\treturn\n\t\t}\n\n\t\tif currentUser.UID != user.UID &&\n\t\t\t(currentUser.RoleName == \"admin\" || currentUser.RoleName == \"root\") {\n\t\t\tsrv.SystemLogsService.LogModifyUserProfileAsync(old, user, currentUser.UID)\n\t\t}\n\t}\n\n\t// Returns a \"204 StatusNoContent\" response\n\tctx.JSON(http.StatusNoContent, nil)\n}", "func (s *Service) patchHalResource(ctx context.Context, resourceName, url string, r interface{}, pf patchFunction) ([]byte, *status.Status) {\n\n\tvar body []byte\n\tvar code int\n\tvar err error\n\n\tb := new(bytes.Buffer)\n\tjson.NewEncoder(b).Encode(r)\n\n\tbody, code, err = pf(ctx, url, b, \"application/json\")\n\tif err != nil {\n\t\tlog.WithFields(event.Fields{\n\t\t\t\"resourceName\": resourceName,\n\t\t\t\"code\": code,\n\t\t\t\"url\": url,\n\t\t}).Error(\"Can not patch HAL resource: \" + err.Error())\n\t\treturn []byte{}, status.NewStatus(body, code, \"Can not modify resource \"+resourceName)\n\t}\n\n\t// A PATCH request should return a value in range of [200,300[\n\tif code < http.StatusOK || code >= http.StatusMultipleChoices {\n\t\tlog.WithFields(event.Fields{\n\t\t\t\"resourceName\": resourceName,\n\t\t\t\"code\": code,\n\t\t\t\"url\": url,\n\t\t}).Error(\"Can not patch HAL resource\")\n\t\treturn []byte{}, status.NewStatus(body, code, \"Can not modify resource \"+resourceName)\n\t}\n\treturn body, nil\n}", "func (m *PrivilegedSignupStatusItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable, requestConfiguration *PrivilegedSignupStatusItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreatePrivilegedSignupStatusFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable), nil\n}", "func UpdateEmployee(c *gin.Context) {\r\n var employee model.Employee\r\n id := c.Params.ByName(\"id\")\r\n err := model.GetEmployeeByID(&employee, id)\r\n if err != nil {\r\n c.JSON(http.StatusNotFound, employee)\r\n }\r\n c.BindJSON(&employee)\r\n err = model.UpdateEmployee(&employee, id)\r\n if err != nil {\r\n c.AbortWithStatus(http.StatusNotFound)\r\n } else {\r\n c.JSON(http.StatusOK, employee)\r\n }\r\n}", "func (t *MedChain) updateHospitalToPatient(stub shim.ChaincodeStubInterface, args []string) peer.Response {\n\t\t// ==== Input sanitation ====\n\t\tfmt.Println(\"- start updateHospitalToPatient\")\n\n\t\t// check if all the args are send\n\t\tif len(args) != 5 {\n\t\t\treturn shim.Error(\"Incorrect number of arguments, Required 5 arguments\")\n\t\t}\n\n\t\t// check if the args are empty\n\t\tfor i := 0; i < len(args); i++ {\n\t\t\tif len(args[i]) <= 0 {\n\t\t\t\treturn shim.Error(\"argument \"+ string(i+1) + \" must be a non-empty string\")\n\t\t\t}\n\t\t}\n\n\t\tgetAssetAsBytes, errT := stub.GetState(args[0])\n\n\t\tif errT != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Error : Cannot find HospitalToPatient %s\" , errT))\n\t\t}\n\n\t\tif getAssetAsBytes == nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Cannot find asset HospitalToPatient with ID %s\" , args[0]))\n\t\t}\n\n\t\tvar obj = HospitalToPatient{}\n\n\t\tjson.Unmarshal(getAssetAsBytes, &obj)\n\t\tobj.PatientID = args[1]\n\t\tobj.PatientName = args[2]\n\t\tobj.HospitalID = args[3]\n\t\tobj.HospitalName = args[4]\n\t\tcomAssetAsBytes, errMarshal := json.Marshal(obj)\n\n\t\tif errMarshal != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Marshal Error: %s\", errMarshal))\n\t\t}\n\n\t\terrPut := stub.PutState(obj.HospitalToPatient_ID, comAssetAsBytes)\n\n\t\tif errPut != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Failed to update HospitalToPatient with ID %s\", args[0]))\n\t\t}\n\n\t\tfmt.Println(\"HospitalToPatient asset with ID %s was updated \\n %v\", args[0], obj)\n\n\t\treturn shim.Success(comAssetAsBytes)\n\t}", "func (c *Controller) patchPlatform(r *web.Request) (*web.Response, error) {\n\tplatformID := r.PathParams[reqPlatformID]\n\tctx := r.Context()\n\tlog.C(ctx).Debugf(\"Updating platform with id %s\", platformID)\n\n\tplatform, err := c.PlatformStorage.Get(ctx, platformID)\n\tif err != nil {\n\t\treturn nil, util.HandleStorageError(err, \"platform\")\n\t}\n\n\tcreatedAt := platform.CreatedAt\n\n\tif err := util.BytesToObject(r.Body, platform); err != nil {\n\t\treturn nil, err\n\t}\n\n\tplatform.ID = platformID\n\tplatform.CreatedAt = createdAt\n\tplatform.UpdatedAt = time.Now().UTC()\n\n\tif err := c.PlatformStorage.Update(ctx, platform); err != nil {\n\t\treturn nil, util.HandleStorageError(err, \"platform\")\n\t}\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn util.NewJSONResponse(http.StatusOK, platform)\n}", "func HandleUpdatePerson(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tid := r.URL.Query().Get(\"id\")\n\tif id == \"\" {\n\t\thttp.Error(w, \"id parameter is not found\", http.StatusBadRequest)\n\t\treturn\n\t}\n\tfmt.Println(id)\n\treqBody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"Body Read Error : %v\", err), http.StatusInternalServerError)\n\t}\n\tvar body map[string]string\n\terr = json.Unmarshal(reqBody, &body)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"Request Body parse error : %v\", err), http.StatusBadRequest)\n\t\treturn\n\t}\n\tfmt.Printf(body[\"name\"])\n\tcols := \"\"\n\n\tfor key, val := range body {\n\t\tcols = cols + key + \"=\" + \"'\" + val + \"'\" + \",\"\n\t}\n\tf := cols[:len(cols)-1]\n\n\t_, err = Db.Exec(fmt.Sprintf(\"UPDATE people SET %s where people.id=?\", f), id)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"ERROR in deleting person %s\", err.Error()), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tvar person db.Person\n\tres, err := Db.Query(\"SELECT * FROM people where id=?\", id)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"ERROR in updating person %s\", err.Error()), http.StatusBadRequest)\n\t\treturn\n\t}\n\tdefer res.Close()\n\tfor res.Next() {\n\t\terr = res.Scan(&person.Id, &person.Name, &person.City, &person.ContactNo, &person.PhotoUrl)\n\t\tif err != nil {\n\t\t\thttp.Error(w, fmt.Sprintf(\"ERROR in updating person %s\", err.Error()), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t}\n\tjson.NewEncoder(w).Encode(person)\n}", "func (c *kuberhealthyChecks) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result KuberhealthyCheck, err error) {\n\tresult = KuberhealthyCheck{}\n\terr = c.client.Patch(pt).\n\t\tNamespace(c.ns).\n\t\tResource(\"khchecks\").\n\t\tSubResource(subresources...).\n\t\tName(name).\n\t\tBody(data).\n\t\tDo(context.TODO()).\n\t\tInto(&result)\n\treturn\n}", "func (m *DBMockedObject) Update(ctx context.Context, document entity.PersonalData) (int64, error) {\n\targs := m.Called(ctx, document)\n\treturn int64(args.Int(0)), args.Error(1)\n}", "func (client LabClient) PatchResourceResponder(resp *http.Response) (result Lab, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func Patch(path string, fn http.HandlerFunc, c ...alice.Constructor) {\n\trecord(\"PATCH\", path)\n\n\tinfoMutex.Lock()\n\tr.PATCH(path, Handler(alice.New(c...).ThenFunc(fn)))\n\tinfoMutex.Unlock()\n}", "func patchLocomotive(w http.ResponseWriter, r *http.Request) {\n\tvar document Locomotive\n\n\t//\tObtener la base de datos y colección a utilizar.-\n\tparams := mux.Vars(r)\n\tdatabaseName := params[\"databaseName\"]\n\tcollectionName := params[\"collectionName\"]\n\n\t//\tObtener el modelo a filtrar.-\n\tmodel := params[\"model\"]\n\n\t//\tConfigurar los filtros.-\n\tfilter := make(map[string]interface{})\n\tif model != \"\" {\n\t\tfilter[\"model\"] = model\n\t}\n\n\t//\tDecodificar el documento json recibido y dejarlo en la variable de tipo struct.-\n\t_ = json.NewDecoder(r.Body).Decode(&document)\n\n\t//\tConfigurar los updates.-\n\tupdate := make(map[string]interface{})\n\tif document.PowerType != \"\" {\n\t\tupdate[\"powertype\"] = document.PowerType\n\t}\n\tif document.Builder != \"\" {\n\t\tupdate[\"builder\"] = document.Builder\n\t}\n\tif document.BuildDate != \"\" {\n\t\tupdate[\"builddate\"] = document.BuildDate\n\t}\n\tif document.WheelSystem != \"\" {\n\t\tupdate[\"wheelsystem\"] = document.WheelSystem\n\t}\n\tif document.MaximunSpeed > 0 {\n\t\tupdate[\"maximunspeed\"] = document.MaximunSpeed\n\t}\n\tif document.PowerOutputHP > 0 {\n\t\tupdate[\"poweroutputhp\"] = document.PowerOutputHP\n\t}\n\n\t//\tActualizar la locomotora.-\n\tupdatedCount, err := UpdateDocument(databaseName, collectionName, filter, update)\n\tif err != nil {\n\t\thttputility.GetJsonResponseMessage(w, \"patchLocomotive: \"+err.Error())\n\t} else {\n\t\tif updatedCount == 0 {\n\t\t\thttputility.GetJsonResponseMessage(w, \"patchLocomotive: No se encontró ningun documento a actualizar en la Base de Datos (MongoDB).\")\n\t\t} else {\n\t\t\tif updatedCount == 1 {\n\t\t\t\thttputility.GetJsonResponseMessage(w, \"patchLocomotive: Se actualizó correctamente el documento de la Base de Datos (MongoDB).\")\n\t\t\t} else {\n\t\t\t\thttputility.GetJsonResponseMessage(w, \"patchLocomotive: Se actualizaron correctamente \"+string(updatedCount)+\" documentos de la Base de Datos (MongoDB).\")\n\t\t\t}\n\t\t}\n\t}\n}", "func (me *PROTECTIONJOBS_IMPL) UpdateProtectionJob (\r\n body *models.ProtectionJobRequest,\r\n id int64) (*models.ProtectionJob, error) {\r\n//validating required parameters\r\n if (body == nil){\r\n return nil,errors.New(\"The parameter 'body' is a required parameter and cannot be nil.\")\r\n} //the endpoint path uri\r\n _pathUrl := \"/public/protectionJobs/{id}\"\r\n\r\n //variable to hold errors\r\n var err error = nil\r\n //process optional template parameters\r\n _pathUrl, err = apihelper.AppendUrlWithTemplateParameters(_pathUrl, map[string]interface{} {\r\n \"id\" : id,\r\n })\r\n if err != nil {\r\n //error in template param handling\r\n return nil, err\r\n }\r\n\r\n //the base uri for api requests\r\n _queryBuilder := configuration.GetBaseURI(configuration.DEFAULT_HOST,me.config);\r\n\r\n //prepare query string for API call\r\n _queryBuilder = _queryBuilder + _pathUrl\r\n\r\n //validate and preprocess url\r\n _queryBuilder, err = apihelper.CleanUrl(_queryBuilder)\r\n if err != nil {\r\n //error in url validation or cleaning\r\n return nil, err\r\n }\r\n if me.config.AccessToken() == nil {\r\n return nil, errors.New(\"Access Token not set. Please authorize the client using client.Authorize()\");\r\n }\r\n //prepare headers for the outgoing request\r\n headers := map[string]interface{} {\r\n \"user-agent\" : \"cohesity-Go-sdk-6.2.0\",\r\n \"accept\" : \"application/json\",\r\n \"content-type\" : \"application/json; charset=utf-8\",\r\n \"Authorization\" : fmt.Sprintf(\"%s %s\",*me.config.AccessToken().TokenType, *me.config.AccessToken().AccessToken),\r\n }\r\n\r\n //prepare API request\r\n _request := unirest.Put(_queryBuilder, headers, body)\r\n //and invoke the API call request to fetch the response\r\n _response, err := unirest.AsString(_request,me.config.SkipSSL());\r\n if err != nil {\r\n //error in API invocation\r\n return nil, err\r\n }\r\n\r\n //error handling using HTTP status codes\r\n if (_response.Code == 0) {\r\n err = apihelper.NewAPIError(\"Error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code < 200) || (_response.Code > 206) { //[200,206] = HTTP OK\r\n err = apihelper.NewAPIError(\"HTTP Response Not OK\", _response.Code, _response.RawBody)\r\n }\r\n if(err != nil) {\r\n //error detected in status code validation\r\n return nil, err\r\n }\r\n\r\n //returning the response\r\n var retVal *models.ProtectionJob = &models.ProtectionJob{}\r\n err = json.Unmarshal(_response.RawBody, &retVal)\r\n\r\n if err != nil {\r\n //error in parsing\r\n return nil, err\r\n }\r\n return retVal, nil\r\n\r\n}", "func (a *HyperflexApiService) PatchHyperflexNodeProfile(ctx context.Context, moid string) ApiPatchHyperflexNodeProfileRequest {\n\treturn ApiPatchHyperflexNodeProfileRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func Patch() echo.HandlerFunc {\n\treturn func(context echo.Context) error {\n\t\tuserID := context.Param(\"userID\")\n\t\tsession := context.Get(\"session\").(*session.Session)\n\t\tif session == nil || (session.UserID != userID && !session.IsAdmin) {\n\t\t\tlog.Printf(\"ERROR: unauthorized attempt to modify account %s by user with session %+v\", userID, session)\n\t\t\treturn context.JSON(http.StatusUnauthorized, \"\")\n\t\t}\n\n\t\tuser, err := FindByID(userID)\n\n\t\tif err != nil || user == nil {\n\t\t\treturn context.JSON(http.StatusInternalServerError, errors.New(\"Cannot load user with ID %s\"))\n\t\t}\n\n\t\tpreviousIsAdmin := user.IsAdmin\n\n\t\terr = context.Bind(&user)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Cannot bind user %v\", err)\n\t\t\treturn context.JSON(http.StatusBadRequest, errors.New(\"Cannot decode request body\"))\n\t\t}\n\t\tif user.IsAdmin != previousIsAdmin && !session.IsAdmin {\n\t\t\tlog.Printf(\"ERROR: unauthorized attempt to give admin rights to account %s by user with session %+v\", user.Email, session)\n\t\t\treturn context.JSON(http.StatusUnauthorized, \"\")\n\t\t}\n\n\t\tsaveErr := Save(user)\n\t\tif saveErr != nil {\n\t\t\tlog.Printf(\"Cannot update user %v\", user.ID)\n\t\t\treturn context.JSON(http.StatusInternalServerError, errors.New(\"Cannot update user \"+user.ID))\n\t\t}\n\t\tuser.Hash = \"\" // never leak the hash\n\t\treturn context.JSON(http.StatusOK, user)\n\t}\n}", "func (m *TeamworkSoftwareUpdateHealth) SetAdditionalData(value map[string]any)() {\n err := m.GetBackingStore().Set(\"additionalData\", value)\n if err != nil {\n panic(err)\n }\n}", "func (m *ItemOnlineMeetingsItemRegistrationRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationable, requestConfiguration *ItemOnlineMeetingsItemRegistrationRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateMeetingRegistrationFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationable), nil\n}", "func updatePerson(w http.ResponseWriter, r *http.Request) {\r\n\tw.Header().Set(\"Content-Type\", \"application/json\")\r\n\tparams := mux.Vars(r)\r\n\tuuid, err := primitive.ObjectIDFromHex(params[\"uuid\"])\r\n\tif err != nil {\r\n\t\tlog.Fatal(err)\r\n\t}\r\n\r\n\tcollection := models.ConnectDB()\r\n\r\n\tvar oldPerson models.Person\r\n\terr = collection.FindOne(context.TODO(), bson.M{\"_id\": uuid}).Decode(&oldPerson)\r\n\tif err != nil {\r\n\t\thttp.Error(w, http.StatusText(500), 500)\r\n\t\treturn\r\n\t}\r\n\tvar person models.Person\r\n\t_ = json.NewDecoder(r.Body).Decode(&person)\r\n\r\n\tSurvived := r.FormValue(\"survived\")\r\n\tPassengerClass := r.FormValue(\"passengerClass \")\r\n\tName := r.FormValue(\"name\")\r\n\tSex := r.FormValue(\"sex\")\r\n\tAge := r.FormValue(\"age\")\r\n\tSiblingsOrSpousesAboard := r.FormValue(\"siblingsOrSpousesAboard\")\r\n\tParentsOrChildrenAboard := r.FormValue(\"parentsOrChildrenAboard\")\r\n\tFare := r.FormValue(\"fare\")\r\n\r\n\tif Survived == \" \" {\r\n\t\tperson.Survived = oldPerson.Survived\r\n\t}\r\n\r\n\tif PassengerClass == \" \" {\r\n\t\tperson.PassengerClass = oldPerson.PassengerClass\r\n\t}\r\n\r\n\tif Name == \"\" {\r\n\t\tperson.Name = oldPerson.Name\r\n\t}\r\n\r\n\tif Sex == \" \" {\r\n\t\tperson.Sex = oldPerson.Sex\r\n\t}\r\n\r\n\tif Age == \" \" {\r\n\t\tperson.Age = oldPerson.Age\r\n\t}\r\n\r\n\tif SiblingsOrSpousesAboard == \" \" {\r\n\t\tperson.SiblingsOrSpousesAboard = oldPerson.SiblingsOrSpousesAboard\r\n\t}\r\n\r\n\tif ParentsOrChildrenAboard == \" \" {\r\n\t\tperson.ParentsOrChildrenAboard = oldPerson.ParentsOrChildrenAboard\r\n\t}\r\n\r\n\tif Fare == \" \" {\r\n\t\tperson.Fare = oldPerson.Fare\r\n\t}\r\n\r\n\tobjectDataToUpdate := bson.M{\r\n\t\t\"$set\": bson.M{\r\n\t\t\"survived\": person.Survived,\r\n\t\t\"passengerClass\": person.PassengerClass,\r\n\t\t\"name\": person.Name,\r\n\t\t\"sex\": person.Sex,\r\n\t\t\"age\": person.Age,\r\n\t\t\"siblingsOrSpousesAboard\": person.SiblingsOrSpousesAboard,\r\n\t\t\"parentsOrChildrenAboard\": person.ParentsOrChildrenAboard,\r\n\t\t\"fare\": person.Fare,\t\t\r\n\t\t},\r\n\t}\r\n\r\n\tobjectToUpdate, err := collection.UpdateOne(context.TODO(), bson.M{\"_id\": uuid}, objectDataToUpdate)\r\n\tif err != nil {\r\n\t\thttp.Error(w, http.StatusText(500), 500)\r\n\t\treturn\r\n\t}\r\n\tjson.NewEncoder(w).Encode(objectToUpdate.ModifiedCount)\r\n}", "func (r *CompanyInformationRequest) Update(ctx context.Context, reqObj *CompanyInformation) error {\n\treturn r.JSONRequest(ctx, \"PATCH\", \"\", reqObj, nil)\n}", "func (m *HealthMenstruationPersonalInfoORM) ToPB(ctx context.Context) (HealthMenstruationPersonalInfo, error) {\n\tto := HealthMenstruationPersonalInfo{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tto.PeriodLengthInDays = m.PeriodLengthInDays\n\tto.CycleLengthInDays = m.CycleLengthInDays\n\tif posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (c *Client) ModifySubAppIdInfo(request *ModifySubAppIdInfoRequest) (response *ModifySubAppIdInfoResponse, err error) {\n if request == nil {\n request = NewModifySubAppIdInfoRequest()\n }\n response = NewModifySubAppIdInfoResponse()\n err = c.Send(request, response)\n return\n}", "func (a *HyperflexApiService) PatchHyperflexHealthCheckDefinition(ctx context.Context, moid string) ApiPatchHyperflexHealthCheckDefinitionRequest {\n\treturn ApiPatchHyperflexHealthCheckDefinitionRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (m *Monocular) UpdateMetadata(info *interfaces.Info, userGUID string, echoContext echo.Context) {\n}", "func updateVolunteer(c *gin.Context) {\n\n\tvar vol Volunteer\n\temail := c.Params.ByName(\"email\")\n\n\t//Checks json data\n\tif err := c.BindJSON(&vol); err != nil {\n\t\tcreateBadRequestResponse(c, err)\n\t\treturn\n\t}\n\n\t//Gets volunteer from database\n\tvar oldvol Volunteer\n\tif err := db.Where(\"email = ?\", email).First(&oldvol).Error; err != nil {\n\t\tcreateNotFoundResponse(c)\n\t\treturn\n\t}\n\t//Sets data which could not be changed (I am still not sure if url path have to be with :email )\n\tvol.ID = oldvol.ID\n\tvol.Email = oldvol.Email\n\tif vol.Password == \"\" {\n\t\tvol.Password = oldvol.Password\n\t}\n\n\t//Checks if data belongs to the user\n\tif !volunteerAuth(c, &vol) {\n\t\treturn\n\t}\n\n\t//Saves Volunteer to the database\n\tif err := db.Save(&vol).Error; err != nil {\n\t\tcreateStatusConflictResponse(c)\n\t\treturn\n\t}\n\t//change password in auth map\n\tauthMap[vol.Email] = vol.Password\n\tc.JSON(200, vol)\n\n}", "func (me *CHARGES_IMPL) UpdateChargeMetadata (\r\n chargeId string,\r\n body *models_pkg.ChargesMetadataRequest,\r\n idempotencyKey *string) (*models_pkg.ChargesMetadataResponse, error) {\r\n //the endpoint path uri\r\n _pathUrl := \"/Charges/{charge_id}/metadata\"\r\n\r\n //variable to hold errors\r\n var err error = nil\r\n //process optional template parameters\r\n _pathUrl, err = apihelper_pkg.AppendUrlWithTemplateParameters(_pathUrl, map[string]interface{} {\r\n \"charge_id\" : chargeId,\r\n })\r\n if err != nil {\r\n //error in template param handling\r\n return nil, err\r\n }\r\n\r\n //the base uri for api requests\r\n _queryBuilder := configuration_pkg.BASEURI;\r\n\r\n //prepare query string for API call\r\n _queryBuilder = _queryBuilder + _pathUrl\r\n\r\n //validate and preprocess url\r\n _queryBuilder, err = apihelper_pkg.CleanUrl(_queryBuilder)\r\n if err != nil {\r\n //error in url validation or cleaning\r\n return nil, err\r\n }\r\n //prepare headers for the outgoing request\r\n headers := map[string]interface{} {\r\n \"user-agent\" : \"MundiSDK - Go 2.4.5\",\r\n \"accept\" : \"application/json\",\r\n \"content-type\" : \"application/json; charset=utf-8\",\r\n \"Content-Type\" : \"application/json\",\r\n \"idempotency-key\" : apihelper_pkg.ToString(idempotencyKey, \"\"),\r\n }\r\n\r\n //prepare API request\r\n _request := unirest.PatchWithAuth(_queryBuilder, headers, body, me.config.BasicAuthUserName(), me.config.BasicAuthPassword())\r\n //and invoke the API call request to fetch the response\r\n _response, err := unirest.AsString(_request,false);\r\n if err != nil {\r\n //error in API invocation\r\n return nil, err\r\n }\r\n\r\n //error handling using HTTP status codes\r\n if (_response.Code == 400) {\r\n err = apihelper_pkg.NewAPIError(\"Invalid request\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 401) {\r\n err = apihelper_pkg.NewAPIError(\"Invalid API key\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 404) {\r\n err = apihelper_pkg.NewAPIError(\"An informed resource was not found\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 412) {\r\n err = apihelper_pkg.NewAPIError(\"Business validation error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 422) {\r\n err = apihelper_pkg.NewAPIError(\"Contract validation error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 500) {\r\n err = apihelper_pkg.NewAPIError(\"Internal server error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code < 200) || (_response.Code > 206) { //[200,206] = HTTP OK\r\n err = apihelper_pkg.NewAPIError(\"HTTP Response Not OK\", _response.Code, _response.RawBody)\r\n }\r\n if(err != nil) {\r\n //error detected in status code validation\r\n return nil, err\r\n }\r\n\r\n //returning the response\r\n var retVal *models_pkg.ChargesMetadataResponse = &models_pkg.ChargesMetadataResponse{}\r\n err = json.Unmarshal(_response.RawBody, &retVal)\r\n\r\n if err != nil {\r\n //error in parsing\r\n return nil, err\r\n }\r\n return retVal, nil\r\n\r\n}", "func (m *FeatureRolloutPolicyItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.FeatureRolloutPolicyable, requestConfiguration *FeatureRolloutPolicyItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.FeatureRolloutPolicyable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateFeatureRolloutPolicyFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.FeatureRolloutPolicyable), nil\n}", "func handlePatchRequest(w http.ResponseWriter, e *models.Endpoint, r *http.Request, entity entities.Entity, h *func() (interface{}, error)) {\n\tw.Header().Add(\"Access-Control-Allow-Origin\", \"*\")\n\tif !checkContentType(w, r) {\n\t\treturn\n\t}\n\n\tbyteData, _ := ioutil.ReadAll(r.Body)\n\terr := entity.ParseEntity(byteData)\n\tif err != nil {\n\t\tsendError(w, []error{err})\n\t\treturn\n\t}\n\n\thandle := *h\n\tdata, err2 := handle()\n\tif err2 != nil {\n\t\tsendError(w, []error{err2})\n\t\treturn\n\t}\n\n\tw.Header().Add(\"Location\", entity.GetSelfLink())\n\n\tsendJSONResponse(w, http.StatusOK, data, nil)\n}", "func Patch(path string, fn http.HandlerFunc, c ...alice.Constructor) {\n\tinfoMutex.Lock()\n\trecord(\"PATCH\", path)\n\tr.Patch(path, alice.New(c...).ThenFunc(fn).(http.HandlerFunc))\n\tinfoMutex.Unlock()\n}", "func (me *INVOICES_IMPL) UpdateInvoiceMetadata (\r\n invoiceId string,\r\n body *models_pkg.InvoicesMetadataRequest,\r\n idempotencyKey *string) (*models_pkg.InvoicesMetadataResponse, error) {\r\n //the endpoint path uri\r\n _pathUrl := \"/invoices/{invoice_id}/metadata\"\r\n\r\n //variable to hold errors\r\n var err error = nil\r\n //process optional template parameters\r\n _pathUrl, err = apihelper_pkg.AppendUrlWithTemplateParameters(_pathUrl, map[string]interface{} {\r\n \"invoice_id\" : invoiceId,\r\n })\r\n if err != nil {\r\n //error in template param handling\r\n return nil, err\r\n }\r\n\r\n //the base uri for api requests\r\n _queryBuilder := configuration_pkg.BASEURI;\r\n\r\n //prepare query string for API call\r\n _queryBuilder = _queryBuilder + _pathUrl\r\n\r\n //validate and preprocess url\r\n _queryBuilder, err = apihelper_pkg.CleanUrl(_queryBuilder)\r\n if err != nil {\r\n //error in url validation or cleaning\r\n return nil, err\r\n }\r\n //prepare headers for the outgoing request\r\n headers := map[string]interface{} {\r\n \"user-agent\" : \"MundiSDK - Go 2.4.5\",\r\n \"accept\" : \"application/json\",\r\n \"content-type\" : \"application/json; charset=utf-8\",\r\n \"Content-Type\" : \"application/json\",\r\n \"idempotency-key\" : apihelper_pkg.ToString(idempotencyKey, \"\"),\r\n }\r\n\r\n //prepare API request\r\n _request := unirest.PatchWithAuth(_queryBuilder, headers, body, me.config.BasicAuthUserName(), me.config.BasicAuthPassword())\r\n //and invoke the API call request to fetch the response\r\n _response, err := unirest.AsString(_request,false);\r\n if err != nil {\r\n //error in API invocation\r\n return nil, err\r\n }\r\n\r\n //error handling using HTTP status codes\r\n if (_response.Code == 400) {\r\n err = apihelper_pkg.NewAPIError(\"Invalid request\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 401) {\r\n err = apihelper_pkg.NewAPIError(\"Invalid API key\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 404) {\r\n err = apihelper_pkg.NewAPIError(\"An informed resource was not found\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 412) {\r\n err = apihelper_pkg.NewAPIError(\"Business validation error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 422) {\r\n err = apihelper_pkg.NewAPIError(\"Contract validation error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 500) {\r\n err = apihelper_pkg.NewAPIError(\"Internal server error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code < 200) || (_response.Code > 206) { //[200,206] = HTTP OK\r\n err = apihelper_pkg.NewAPIError(\"HTTP Response Not OK\", _response.Code, _response.RawBody)\r\n }\r\n if(err != nil) {\r\n //error detected in status code validation\r\n return nil, err\r\n }\r\n\r\n //returning the response\r\n var retVal *models_pkg.InvoicesMetadataResponse = &models_pkg.InvoicesMetadataResponse{}\r\n err = json.Unmarshal(_response.RawBody, &retVal)\r\n\r\n if err != nil {\r\n //error in parsing\r\n return nil, err\r\n }\r\n return retVal, nil\r\n\r\n}", "func patchPrincipal(ctx context.Context, tx *Tx, patch *api.PrincipalPatch) (*api.Principal, error) {\n\tset, args := []string{\"updater_id = ?\"}, []interface{}{patch.UpdaterId}\n\tif v := patch.Name; v != nil {\n\t\tset, args = append(set, \"name = ?\"), append(args, *v)\n\t}\n\tif v := patch.PasswordHash; v != nil {\n\t\tset, args = append(set, \"password_hash = ?\"), append(args, *v)\n\t}\n\n\targs = append(args, patch.ID)\n\n\t// Execute update query with RETURNING.\n\trow, err := tx.QueryContext(ctx, `\n\t\tUPDATE principal\n\t\tSET `+strings.Join(set, \", \")+`\n\t\tWHERE id = ?\n\t\tRETURNING id, creator_id, created_ts, updater_id, updated_ts, type, name, email, password_hash\n\t`,\n\t\targs...,\n\t)\n\tif err != nil {\n\t\treturn nil, FormatError(err)\n\t}\n\tdefer row.Close()\n\n\tif row.Next() {\n\t\tvar principal api.Principal\n\t\tif err := row.Scan(\n\t\t\t&principal.ID,\n\t\t\t&principal.CreatorId,\n\t\t\t&principal.CreatedTs,\n\t\t\t&principal.UpdaterId,\n\t\t\t&principal.UpdatedTs,\n\t\t\t&principal.Type,\n\t\t\t&principal.Name,\n\t\t\t&principal.Email,\n\t\t\t&principal.PasswordHash,\n\t\t); err != nil {\n\t\t\treturn nil, FormatError(err)\n\t\t}\n\n\t\treturn &principal, nil\n\t}\n\n\treturn nil, &common.Error{Code: common.NotFound, Err: fmt.Errorf(\"principal ID not found: %d\", patch.ID)}\n}", "func (r *CompaniesService) Patch(name string, company *Company) *CompaniesPatchCall {\n\tc := &CompaniesPatchCall{s: r.s, urlParams_: make(gensupport.URLParams)}\n\tc.name = name\n\tc.company = company\n\treturn c\n}", "func (m *WindowsFeatureUpdateProfilesWindowsFeatureUpdateProfileItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.WindowsFeatureUpdateProfileable, requestConfiguration *WindowsFeatureUpdateProfilesWindowsFeatureUpdateProfileItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.WindowsFeatureUpdateProfileable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateWindowsFeatureUpdateProfileFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.WindowsFeatureUpdateProfileable), nil\n}", "func Patch() int {\n\treturn patch\n}", "func (c *Client) patch(rawURL string, authenticate bool, expectedStatus int, in interface{}, out interface{}) error {\n\terr := c.do(rawURL, \"PATCH\", authenticate, expectedStatus, in, out)\n\treturn errio.Error(err)\n}", "func HelmReposModify(c *gin.Context) {\n\tlog := logger.WithFields(logrus.Fields{\"tag\": \"HelmReposModify\"})\n\tlog.Info(\"modify helm repository\")\n\n\tclusterName, ok := GetCommonClusterNameFromRequest(c)\n\tif ok != true {\n\t\treturn\n\t}\n\n\trepoName := c.Param(\"name\")\n\tlog.Debugln(\"repoName:\", repoName)\n\n\tvar newRepo *repo.Entry\n\terr := c.BindJSON(&newRepo)\n\tif err != nil {\n\t\tlog.Errorf(\"Error parsing request: %s\", err.Error())\n\t\tc.JSON(http.StatusBadRequest, htype.ErrorResponse{\n\t\t\tCode: http.StatusBadRequest,\n\t\t\tMessage: \"error parsing request\",\n\t\t\tError: err.Error(),\n\t\t})\n\t\treturn\n\t}\n\n\terrModify := helm.ReposModify(clusterName, repoName, newRepo)\n\tif errModify != nil {\n\t\tif errModify == helm.ErrRepoNotFound {\n\t\t\tc.JSON(http.StatusNotFound, htype.ErrorResponse{\n\t\t\t\tCode: http.StatusNotFound,\n\t\t\t\tError: errModify.Error(),\n\t\t\t\tMessage: \"repo not found\",\n\t\t\t})\n\t\t\treturn\n\n\t\t}\n\t\tlog.Error(\"Error during helm repo modified.\", errModify.Error())\n\t\tc.JSON(http.StatusBadRequest, htype.ErrorResponse{\n\t\t\tCode: http.StatusBadRequest,\n\t\t\tError: errModify.Error(),\n\t\t\tMessage: \"repo modification failed\",\n\t\t})\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, htype.StatusResponse{\n\t\tStatus: http.StatusOK,\n\t\tMessage: \"resource modified successfully\",\n\t\tName: repoName})\n\treturn\n}", "func (client JobClient) UpdateResponder(resp *http.Response) (result JobResourceDescription, err error) {\n err = autorest.Respond(\n resp,\n azure.WithErrorUnlessStatusCode(http.StatusOK,http.StatusCreated,http.StatusAccepted),\n autorest.ByUnmarshallingJSON(&result),\n autorest.ByClosing())\n result.Response = autorest.Response{Response: resp}\n return\n }", "func Modify(jsonIn []byte) ([]byte, error) {\n\tvar s modify\n\n\tif err := json.Unmarshal(jsonIn, &s); err != nil {\n\t\treturn jsonStatusError(err)\n\t}\n\n\tif err := validateAdmin(s.Name, s.Password); err != nil {\n\t\tlog.Printf(\"Error validating admin status of %s: %s\", s.Name, err)\n\t\treturn jsonStatusError(err)\n\t}\n\n\tif _, ok := passvault.GetRecord(s.ToModify); !ok {\n\t\treturn jsonStatusError(errors.New(\"Record to modify missing\"))\n\t}\n\n\tif s.Name == s.ToModify {\n\t\treturn jsonStatusError(errors.New(\"Cannot modify own record\"))\n\t}\n\n\tvar err error\n\tswitch s.Command {\n\tcase \"delete\":\n\t\terr = passvault.DeleteRecord(s.ToModify)\n\tcase \"revoke\":\n\t\terr = passvault.RevokeRecord(s.ToModify)\n\tcase \"admin\":\n\t\terr = passvault.MakeAdmin(s.ToModify)\n\tdefault:\n\t\treturn jsonStatusError(errors.New(\"Unknown command\"))\n\t}\n\n\tif err != nil {\n\t\treturn jsonStatusError(err)\n\t} else {\n\t\treturn jsonStatusOk()\n\t}\n}", "func patch(newObj runtime.Object, existingObj runtime.Object, c client.Client) error {\n\tnewObjJSON, _ := apijson.Marshal(newObj)\n\tkey, _ := client.ObjectKeyFromObject(newObj)\n\t_, isUnstructured := newObj.(runtime.Unstructured)\n\t_, isCRD := newObj.(*apiextv1beta1.CustomResourceDefinition)\n\n\tif isUnstructured || isCRD || isKudoType(newObj) {\n\t\t// strategic merge patch is not supported for these types, falling back to merge patch\n\t\terr := c.Patch(context.TODO(), newObj, client.ConstantPatch(types.MergePatchType, newObjJSON))\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to apply merge patch to object %s/%s: %w\", key.Name, key.Name, err)\n\t\t}\n\t} else {\n\t\terr := c.Patch(context.TODO(), existingObj, client.ConstantPatch(types.StrategicMergePatchType, newObjJSON))\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to apply StrategicMergePatch to object %s/%s: %w\", key.Namespace, key.Name, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (w *Worker) Patch(c *http.Client, url string, data interface{}, bind interface{}) (int, error) {\n\tbs, err := json.Marshal(data)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treq, err := http.NewRequest(\"PATCH\", url, bytes.NewReader(bs))\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\tres, err := c.Do(req)\n\tif err != nil {\n\t\tif res != nil {\n\t\t\tioutil.ReadAll(res.Body)\n\t\t\tres.Body.Close()\n\t\t}\n\t\treturn 0, err\n\t}\n\tdefer res.Body.Close()\n\terr = json.NewDecoder(res.Body).Decode(bind)\n\tif res.StatusCode == http.StatusNoContent || bind == nil {\n\t\treturn res.StatusCode, nil\n\t}\n\treturn res.StatusCode, err\n}", "func (s *Server) HandleUpdatePerson(c *gin.Context) {\n\tqueryTeamID := c.Param(\"team-id\")\n\tteamID, err := strconv.ParseInt(queryTeamID, 10, 64)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tqueryPersonID := c.Param(\"person-id\")\n\tpersonID, err := strconv.ParseInt(queryPersonID, 10, 64)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\texists, err := s.teamExists(c.Request.Context(), teamID)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\tif !exists {\n\t\tc.AbortWithStatusJSON(http.StatusNotFound, gin.H{\"error\": \"Team not found.\"})\n\t\treturn\n\t}\n\n\tbinding := struct {\n\t\tFirstName string `json:\"first_name\"`\n\t\tLastName string `json:\"last_name\"`\n\t\tEmail string `json:\"email\"`\n\t\tTeamID int64 `json:\"team_id\"`\n\t}{}\n\terr = c.BindJSON(&binding)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\t// Check if the person exists in the specified team\n\tgetPersonArgs := db.GetPersonParams{\n\t\tID: personID,\n\t\tTeamID: teamID,\n\t}\n\tperson, err := s.peopleService.GetPerson(c.Request.Context(), getPersonArgs)\n\tif err != nil {\n\t\tif errors.Is(err, sql.ErrNoRows) {\n\t\t\tc.AbortWithStatusJSON(http.StatusNotFound, gin.H{\n\t\t\t\t\"error\": \"Person not found in the specified team.\",\n\t\t\t})\n\t\t\treturn\n\t\t}\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tupdatePersonArgs := db.UpdatePersonParams{\n\t\tFirstName: binding.FirstName,\n\t\tLastName: binding.LastName,\n\t\tEmail: binding.Email,\n\t\tTeamID: binding.TeamID,\n\t\tID: personID,\n\t}\n\n\tperson, err = s.peopleService.UpdatePerson(c.Request.Context(), updatePersonArgs)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, gin.H{\"data\": person})\n}", "func WrapUpdateMe(h Handler, w http.ResponseWriter, r *http.Request) {\n\tvar aUpdateUser Profile\n\n\tif r.Body == nil {\n\t\thttp.Error(w, \"Parameter 'update_user' expected in body, but got no body\", http.StatusBadRequest)\n\t\treturn\n\t}\n\t{\n\t\tvar err error\n\t\tr.Body = http.MaxBytesReader(w, r.Body, 1024*1024)\n\t\tbody, err := ioutil.ReadAll(r.Body)\n\t\tif err != nil {\n\t\t\thttp.Error(w, \"Body unreadable: \"+err.Error(), http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\n\t\terr = ValidateAgainstProfileSchema(body)\n\t\tif err != nil {\n\t\t\thttp.Error(w, \"Failed to validate against schema: \"+err.Error(), http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\n\t\terr = json.Unmarshal(body, &aUpdateUser)\n\t\tif err != nil {\n\t\t\thttp.Error(w, \"Error JSON-decoding body parameter 'update_user': \"+err.Error(),\n\t\t\t\thttp.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t}\n\n\th.UpdateMe(w,\n\t\tr,\n\t\taUpdateUser)\n}", "func (m *VirtualEndpointUserSettingsCloudPcUserSettingItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CloudPcUserSettingable, requestConfiguration *VirtualEndpointUserSettingsCloudPcUserSettingItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CloudPcUserSettingable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateCloudPcUserSettingFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CloudPcUserSettingable), nil\n}", "func (m *TeamTemplatesItemDefinitionsItemTeamDefinitionPhotoRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ProfilePhotoable, requestConfiguration *TeamTemplatesItemDefinitionsItemTeamDefinitionPhotoRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ProfilePhotoable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateProfilePhotoFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ProfilePhotoable), nil\n}", "func HandleModify(w http.ResponseWriter, r *http.Request) {\r\n\tdefer r.Body.Close()\r\n\tfmt.Println(\"In handleModify\")\r\n\tvar ab AddressBook\r\n\tif r.Method != http.MethodPost {\r\n\t\terr := fmt.Sprintf(\"Method %s not supported on this action %s\\n\", r.Method, r.URL.Path)\r\n\t\tfmt.Printf(\"%s\", err)\r\n\t\thttp.Error(w, err, http.StatusMethodNotAllowed)\r\n\t\treturn\r\n\t}\r\n\r\n\tif err := json.NewDecoder(r.Body).Decode(&ab); err != nil {\r\n\t\tmsg := fmt.Sprintf(\"Error %s while decoding json\\n\", err.Error())\r\n\t\tfmt.Printf(\"%s\", msg)\r\n\t\thttp.Error(w, msg, http.StatusBadRequest)\r\n\t\treturn\r\n\t}\r\n\r\n\tif ab.FirstName == \"\" {\r\n\t\tmsg := \"Name not provided as part of the query\\n\"\r\n\t\tfmt.Printf(\"%s\", msg)\r\n\t\thttp.Error(w, msg, http.StatusNotFound)\r\n\t\treturn\r\n\t}\r\n\r\n\tMutex.RLock()\r\n\tdefer Mutex.RUnlock()\r\n\tif len(AddrBook) == 0 {\r\n\t\tmsg := fmt.Sprintf(\"Address book is empty nothing to modify\\n\")\r\n\t\tfmt.Printf(\"%s\\n\", msg)\r\n\t\thttp.Error(w, msg, http.StatusNotFound)\r\n\t\treturn\r\n\t}\r\n\r\n\tif abTmp, ok := AddrBook[ab.FirstName]; !ok {\r\n\t\tmsg := fmt.Sprintf(\"%s not found in the Address book, nothing to modify\\n\", abTmp.FirstName)\r\n\t\tfmt.Printf(\"%s\", msg)\r\n\t\thttp.Error(w, msg, http.StatusNotFound)\r\n\t\treturn\r\n\t}\r\n\tabTmp, _ := AddrBook[ab.FirstName]\r\n\tif ab.LastName != \"\" && abTmp.LastName != ab.LastName {\r\n\t\tabTmp.LastName = ab.LastName\r\n\t}\r\n\tif ab.Email != \"\" && abTmp.Email != ab.Email {\r\n\t\tabTmp.Email = ab.Email\r\n\t}\r\n\tif ab.PhoneNumber != 0 && abTmp.PhoneNumber != ab.PhoneNumber {\r\n\t\tabTmp.PhoneNumber = ab.PhoneNumber\r\n\t}\r\n\tAddrBook[ab.FirstName] = abTmp\r\n\tmsg := fmt.Sprintf(\"Modified name %s present in the address book\\n\", ab.FirstName)\r\n\tfmt.Printf(\"%s\", msg)\r\n\thttp.Error(w, msg, http.StatusOK)\r\n}", "func (t *SimpleChaincode) pat_invoke(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tfmt.Println(\"########### Patient invoke ###########\")\n\n\tif len(args) < 2 {\n\t\treturn shim.Error(\"The number of arguments is insufficient.\")\n\t}\n\n\t// Changing details of Patient by Accepting Key and Value\n\n\tif args[1] == \"changePat\" && len(args) == 4 {\n\n\t\tpatAsBytes, _ := stub.GetState(args[2])\n\t\tpat := Pat{}\n\n\t\tjson.Unmarshal(patAsBytes, &pat)\n\t\tpat.Owner = args[3]\n\n\t\tpatAsBytes, _ = json.Marshal(pat)\n\t\tstub.PutState(args[2], patAsBytes)\n\n\t\t// Notify listeners that an event \"eventInvoke\" have been executed (check line 19 in the file invoke.go)\n\n\t\terr := stub.SetEvent(\"eventChangePat\", []byte{})\n\t\tif err != nil {\n\t\t\treturn shim.Error(err.Error())\n\t\t}\n\n\t\treturn shim.Success(nil)\n\t}\n\n\n\t //Updating all fields of record\n\n\tif args[1] == \"updateRecord\" && len(args) == 4 {\n\t\tfmt.Println(\"Update All\")\n\t\tvar newPat Pat\n\t\tjson.Unmarshal([]byte(args[3]), &newPat)\n\t\tvar pat = Pat{Name: newPat.Name, Id: newPat.Id, Quality: newPat.Quality, Owner: newPat.Owner}\n\t\tpatAsBytes, _ := json.Marshal(pat)\n\n\t\t// Updating Record\n\n\t\tstub.PutState(args[2], patAsBytes)\n\n\t\t// Notify listeners that an event \"eventInvoke\" have been executed (check line 19 in the file invoke.go)\n\n\t\terr := stub.SetEvent(\"eventUpdateRecords\", []byte{})\n\t\tif err != nil {\n\t\t\treturn shim.Error(err.Error())\n\t\t}\n\n\t\treturn shim.Success(nil)\n\t}\n\n\t// If the arguments given don’t match any function, we return an error\n\n\treturn shim.Error(\"Unknown invoke action, check the second argument.\")\n}", "func (client ModelClient) UpdateHierarchicalEntityResponder(resp *http.Response) (result OperationStatus, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (client ApplicationsClient) PatchResponder(resp *http.Response) (result autorest.Response, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent),\n\t\tautorest.ByClosing())\n\tresult.Response = resp\n\treturn\n}", "func (ctl Controller) Patch(ctx *gin.Context) {\n\n\tMethodNotAllowedJSON(ctx)\n}", "func (t *IPDCChaincode) invoke_update_status(stub shim.ChaincodeStubInterface, args []string, map_specification map[string]interface{}) pb.Response {\r\n\r\n\tfmt.Println(\"***********Entering invoke_update_status***********\")\r\n\r\n\tif len(args) < 2 {\r\n\r\n\t\tfmt.Println(\"Error: Incorrect number of arguments\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(\"Error: Incorrect number of arguments\")\r\n\t}\r\n\r\n\tvar record_specification map[string]interface{}\r\n\r\n\tvar err error\r\n\r\n\terr = json.Unmarshal([]byte(args[0]), &record_specification)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error in format of record.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(\"Error in format of record.\")\r\n\t}\r\n\r\n\tadditional_json, ok := map_specification[\"additional_json\"]\r\n\r\n\tif ok {\r\n\r\n\t\tadditional_json_data, ok1 := additional_json.(map[string]interface{})\r\n\r\n\t\tif ok1 {\r\n\r\n\t\t\tfor spec, _ := range additional_json_data {\r\n\r\n\t\t\t\trecord_specification[spec] = additional_json_data[spec]\r\n\t\t\t}\r\n\t\t} else {\r\n\t\t\tfmt.Println(\"Invalid additional JSON fields in specification\")\r\n\r\n\t\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\t\treturn shim.Error(\"Invalid additional JSON fields in specification\")\r\n\t\t}\r\n\t}\r\n\r\n\tvar keys_map interface{}\r\n\r\n\tvar specs map[string]interface{}\r\n\r\n\tkeys_map, error_keys_map := t.get_keys_map(stub, record_specification)\r\n\r\n\tif error_keys_map != nil {\r\n\r\n\t\tfmt.Println(error_keys_map.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(error_keys_map.Error())\r\n\t}\r\n\r\n\tspecs, ok = keys_map.(map[string]interface{})\r\n\r\n\tif !ok {\r\n\r\n\t\tfmt.Println(\"Invalid keys_map specification.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(\"Invalid keys_map specification.\")\r\n\t}\r\n\r\n\tif specs[\"primary_key\"] == nil {\r\n\r\n\t\tfmt.Println(\"There is no primary key specification.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(\"Error : There is no primary key specification.\")\r\n\t}\r\n\r\n\tvar pk_spec []interface{}\r\n\r\n\tpk_spec, ok = specs[\"primary_key\"].([]interface{})\r\n\r\n\tif !ok {\r\n\r\n\t\tfmt.Println(\"Error in Primary key specification.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(\"Error in Primary key specification.\")\r\n\t}\r\n\r\n\tkey, err_key := t.createInterfacePrimaryKey(record_specification, pk_spec)\r\n\r\n\tif err_key != nil {\r\n\r\n\t\tfmt.Println(err_key.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(err_key.Error())\r\n\r\n\t}\r\n\r\n\tvar valAsBytes []byte\r\n\r\n\tvalAsBytes, err = stub.GetState(key)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error: Failed to get state for primary key. \" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(\"Error: Failed to get state for primary key. \" + err.Error())\r\n\r\n\t} else if valAsBytes == nil {\r\n\r\n\t\tfmt.Println(\"Error: No value for key : \" + key)\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(\"Error: No value for primary key.\")\r\n\r\n\t}\r\n\r\n\terr = json.Unmarshal([]byte(valAsBytes), &record_specification)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error in format of Blockchain record\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(\"Error in format of Blockchain record\")\r\n\r\n\t}\r\n\r\n\terr_del := t.delete_composite_keys(stub, specs, record_specification, key)\r\n\r\n\tif err_del != nil {\r\n\r\n\t\tfmt.Println(\"Error while deleting composite keys: \" + err_del.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(\"Error while deleting composite keys: \" + err_del.Error())\r\n\r\n\t}\r\n\r\n\tvar to_be_updated_map map[string]interface{}\r\n\r\n\terr = json.Unmarshal([]byte(args[1]), &to_be_updated_map)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error in format of update map\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(\"Error in format of update map\")\r\n\r\n\t}\r\n\r\n\tfor spec, spec_val := range to_be_updated_map {\r\n\r\n\t\tvar spec_val_string, spec_ok = spec_val.(string)\r\n\r\n\t\tif !spec_ok {\r\n\r\n\t\t\tfmt.Println(\"Unable to parse value of status update\")\r\n\r\n\t\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\t\treturn shim.Error(\"Unable to parse value of status update\")\r\n\r\n\t\t}\r\n\r\n\t\tvar val_check, val_err = t.updatestatusvaliditycheck(spec, spec_val_string, map_specification)\r\n\r\n\t\tif val_check != 0 {\r\n\r\n\t\t\tfmt.Println(val_err.Error())\r\n\r\n\t\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\t\treturn shim.Error(val_err.Error())\r\n\t\t}\r\n\r\n\t\trecord_specification[spec] = spec_val_string\r\n\t}\r\n\r\n\tvar concatenated_record_json []byte\r\n\r\n\tconcatenated_record_json, err = json.Marshal(record_specification)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error: Unable to Marshal Concatenated Record to JSON \" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(\"Error: Unable to Marshal Concatenated Record to JSON \" + err.Error())\r\n\t}\r\n\r\n\terr = stub.PutState(key, []byte(concatenated_record_json))\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Failed to put state : \" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(\"Failed to put state : \" + err.Error())\r\n\t}\r\n\r\n\terr = t.create_composite_keys(stub, specs, record_specification, key)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Received error while creating composite keys\" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\t\treturn shim.Error(\"Received error while creating composite keys\" + err.Error())\r\n\t}\r\n\r\n\tfmt.Println(\"***********Exiting invoke_update_status***********\")\r\n\r\n\treturn shim.Success(nil)\r\n\r\n}", "func (m *RiskyUsersItemHistoryRiskyUserHistoryItemItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RiskyUserHistoryItemable, requestConfiguration *RiskyUsersItemHistoryRiskyUserHistoryItemItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RiskyUserHistoryItemable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateRiskyUserHistoryItemFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RiskyUserHistoryItemable), nil\n}", "func TestPatchUserService (t *testing.T){\n\terr := PatchUserService(user_01.SocialNumber, mongoDB.User{Name:new_name_user_01})\n\tassert.Equal(t, 200, err.HTTPStatus)\n}", "func (client HTTPSuccessClient) Patch200(booleanValue *bool) (result autorest.Response, err error) {\n req, err := client.Patch200Preparer(booleanValue)\n if err != nil {\n return result, autorest.NewErrorWithError(err, \"httpinfrastructuregroup.HTTPSuccessClient\", \"Patch200\", nil , \"Failure preparing request\")\n }\n\n resp, err := client.Patch200Sender(req)\n if err != nil {\n result.Response = resp\n return result, autorest.NewErrorWithError(err, \"httpinfrastructuregroup.HTTPSuccessClient\", \"Patch200\", resp, \"Failure sending request\")\n }\n\n result, err = client.Patch200Responder(resp)\n if err != nil {\n err = autorest.NewErrorWithError(err, \"httpinfrastructuregroup.HTTPSuccessClient\", \"Patch200\", resp, \"Failure responding to request\")\n }\n\n return\n}", "func (s *Service) UpdatePersonInformation(c context.Context, personID ulid.ULID, newInfo *Person) (*Person, error) {\n\treturn &Person{}, nil\n}", "func (a *HyperflexApiService) PatchHyperflexClusterProfileExecute(r ApiPatchHyperflexClusterProfileRequest) (*HyperflexClusterProfile, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexClusterProfile\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexClusterProfile\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ClusterProfiles/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexClusterProfile == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexClusterProfile is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexClusterProfile\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func DefaultPatchProfile(ctx context.Context, in *Profile, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*Profile, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj Profile\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(ProfileWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadProfile(ctx, &Profile{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(ProfileWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskProfile(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(ProfileWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateProfile(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(ProfileWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func UpdateConfession(w http.ResponseWriter, r *http.Request) {\n\tfmt.Fprintf(w, \"user updated\")\n}", "func (h *Handler) updateDeveloperAttributes(c *gin.Context) handlerResponse {\n\n\tvar receivedAttributes struct {\n\t\tAttributes types.Attributes `json:\"attribute\"`\n\t}\n\tif err := c.ShouldBindJSON(&receivedAttributes); err != nil {\n\t\treturn handleBadRequest(err)\n\t}\n\tif err := h.service.Developer.UpdateAttributes(c.Param(developerParameter),\n\t\treceivedAttributes.Attributes, h.who(c)); err != nil {\n\t\treturn handleError(err)\n\t}\n\treturn handleOKAttributes(receivedAttributes.Attributes)\n}", "func (m *EntitlementManagementRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable, requestConfiguration *EntitlementManagementRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateRbacApplicationFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable), nil\n}", "func (m *OnlineMeetingsItemRegistrationCustomQuestionsMeetingRegistrationQuestionItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationQuestionable, requestConfiguration *OnlineMeetingsItemRegistrationCustomQuestionsMeetingRegistrationQuestionItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationQuestionable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateMeetingRegistrationQuestionFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationQuestionable), nil\n}", "func updateHandler(w http.ResponseWriter, r *http.Request) {\n\t////\n\t// handle only pull request\n\t//\n\tif r.Header.Get(\"X-Github-Event\") != \"pull_request\" {\n\t\treturn\n\t}\n\n\tvar pr pullRequestModel\n\tif err := json.NewDecoder(r.Body).Decode(&pr); err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\tif pr.Action != \"opened\" {\n\t\treturn\n\t}\n\n\t//\n\t////\n\n\t////\n\t// check if the pr just opened has step.yml in it\n\t//\n\texists, err := isPRHasStepYML(fmt.Sprintf(\"%d\", pr.Number))\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\tif !exists {\n\t\treturn\n\t}\n\n\t//\n\t////\n\tif strings.Contains(pr.PullRequest.Body, fmt.Sprintf(\"https://%s/tag?pr=%d\", hostBaseURL, pr.Number)) {\n\t\treturn\n\t}\n\n\t////\n\t// updating the PR's initial comment section: append badge as first element\n\t//\n\n\tapiURL := fmt.Sprintf(\"https://api.github.com/repos/bitrise-io/bitrise-steplib/pulls/%d\", pr.Number)\n\tbadgeContent := fmt.Sprintf(\"![TagCheck](https://%s/tag?pr=%d)\\r\\n\\r\\n\", hostBaseURL, pr.Number)\n\tnewBody := map[string]interface{}{\n\t\t\"body\": badgeContent + pr.PullRequest.Body,\n\t}\n\n\t// convert new body message to json\n\tb, err := json.Marshal(newBody)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\t// call authenticated PATCH request\n\tc := http.Client{}\n\treq, err := http.NewRequest(\"PATCH\", apiURL, bytes.NewReader(b))\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\treq.SetBasicAuth(os.Getenv(\"GITHUB_USER\"), os.Getenv(\"GITHUB_ACCESS_TOKEN\"))\n\t_, err = c.Do(req)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\t//\n\t////\n}", "func (m *TeamTemplatesItemDefinitionsItemTeamDefinitionPermissionGrantsResourceSpecificPermissionGrantItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ResourceSpecificPermissionGrantable, requestConfiguration *TeamTemplatesItemDefinitionsItemTeamDefinitionPermissionGrantsResourceSpecificPermissionGrantItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ResourceSpecificPermissionGrantable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateResourceSpecificPermissionGrantFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ResourceSpecificPermissionGrantable), nil\n}", "func patchAPIUserHandler(w http.ResponseWriter, r *http.Request, _ map[string]string) {\n\tuserName := sessionHandler.GetUserName(r)\n\tuserID, err := getUserID(userName)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tdecoder := json.NewDecoder(r.Body)\n\tvar json JSONUser\n\terr = decoder.Decode(&json)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\t// Make sure user id is over 0\n\tif json.ID < 1 {\n\t\thttp.Error(w, \"Wrong user id.\", http.StatusInternalServerError)\n\t\treturn\n\t} else if userID != json.ID { // Make sure the authenticated user is only changing his/her own data. TODO: Make sure the user is admin when multiple users have been introduced\n\t\thttp.Error(w, \"You don't have permission to change this data.\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\t// Get old user data to compare\n\ttempUser, err := database.RetrieveUser(json.ID)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\t// Make sure user email is provided\n\tif json.Email == \"\" {\n\t\tjson.Email = string(tempUser.Email)\n\t}\n\t// Make sure user name is provided\n\tif json.Name == \"\" {\n\t\tjson.Name = string(tempUser.Name)\n\t}\n\t// Make sure user slug is provided\n\tif json.Slug == \"\" {\n\t\tjson.Slug = tempUser.Slug\n\t}\n\t// Check if new name is already taken\n\tif json.Name != string(tempUser.Name) {\n\t\t_, err = database.RetrieveUserByName([]byte(json.Name))\n\t\tif err == nil {\n\t\t\t// The new user name is already taken. Assign the old name.\n\t\t\t// TODO: Return error that will be displayed in the admin interface.\n\t\t\tjson.Name = string(tempUser.Name)\n\t\t}\n\t}\n\t// Check if new slug is already taken\n\tif json.Slug != tempUser.Slug {\n\t\t_, err = database.RetrieveUserBySlug(json.Slug)\n\t\tif err == nil {\n\t\t\t// The new user slug is already taken. Assign the old slug.\n\t\t\t// TODO: Return error that will be displayed in the admin interface.\n\t\t\tjson.Slug = tempUser.Slug\n\t\t}\n\t}\n\tuser := structure.User{ID: json.ID, Name: []byte(json.Name), Slug: json.Slug, Email: []byte(json.Email), Image: []byte(json.Image), Cover: []byte(json.Cover), Bio: []byte(json.Bio), Website: []byte(json.Website), Location: []byte(json.Location)}\n\terr = methods.UpdateUser(&user, userID)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tif json.Password != \"\" && (json.Password == json.PasswordRepeated) { // Update password if a new one was submitted\n\t\tencryptedPassword, err := authentication.EncryptPassword(json.Password)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\terr = database.UpdateUserPassword(user.ID, encryptedPassword, date.GetCurrentTime(), json.ID)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t}\n\t// Check if the user name was changed. If so, update the session cookie to the new user name.\n\tif json.Name != string(tempUser.Name) {\n\t\tlogInUser(json.Name, w)\n\t}\n\tw.WriteHeader(http.StatusOK)\n\tw.Write([]byte(\"User settings updated!\"))\n\treturn\n}", "func PatchMethod(e *echo.Group, newFn newInstanceFn, pFn patchFn) {\n\te.PATCH(\"/:id\", func(c echo.Context) error {\n\t\tvar id int\n\n\t\tif err := Param(\"id\").InPath().Int(c, &id); err != nil {\n\t\t\treturn BadRequest(err)\n\t\t}\n\t\tresource := newFn()\n\t\tif err := c.Bind(resource); err != nil {\n\t\t\treturn BadRequest(err)\n\t\t}\n\n\t\tif err := pFn(resource); err != nil {\n\t\t\treturn InternalServerError(err)\n\t\t}\n\n\t\treturn c.NoContent(http.StatusNoContent)\n\t})\n\n}", "func UpdatePerson(c *gin.Context) {\n\tvar person models.Person\n\tvar address models.Address\n\terr := c.ShouldBindJSON(&person)\n\t// if person does not exist, return error\n\tpersonAux, errAux := models.LoadPersonByID(fmt.Sprint(person.ID))\n\n\tif personAux.ID == 0 || personAux.IsDel == 1 {\n\t\tc.JSON(400, gin.H{\n\t\t\t\"error\": \"cant update person who does not exist\",\n\t\t})\n\t\treturn\n\t}\n\n\tif err != nil {\n\t\tc.JSON(400, gin.H{\n\t\t\t\"error\": \"cannot bind JSON: \" + err.Error(),\n\t\t})\n\t\treturn\n\t}\n\tperson, err = models.UpdatePerson(person, address)\n\tif err != nil && errAux != nil {\n\t\tc.JSON(400, gin.H{\n\t\t\t\"error\": \"cannot bind update: \" + err.Error(),\n\t\t})\n\t\treturn\n\t}\n\tc.JSON(200, person)\n}", "func (a *HyperflexApiService) PatchHyperflexSoftwareDistributionEntry(ctx context.Context, moid string) ApiPatchHyperflexSoftwareDistributionEntryRequest {\n\treturn ApiPatchHyperflexSoftwareDistributionEntryRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (m *ReportsRequestBuilder) Patch(ctx context.Context, body i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable, requestConfiguration *ReportsRequestBuilderPatchRequestConfiguration)(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.CreateReportsFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable), nil\n}", "func (m *TermStoreRequestBuilder) Patch(ctx context.Context, body ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable, requestConfiguration *TermStoreRequestBuilderPatchRequestConfiguration)(ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.CreateStoreFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable), nil\n}", "func (client HTTPSuccessClient) Patch202(booleanValue *bool) (result autorest.Response, err error) {\n req, err := client.Patch202Preparer(booleanValue)\n if err != nil {\n return result, autorest.NewErrorWithError(err, \"httpinfrastructuregroup.HTTPSuccessClient\", \"Patch202\", nil , \"Failure preparing request\")\n }\n\n resp, err := client.Patch202Sender(req)\n if err != nil {\n result.Response = resp\n return result, autorest.NewErrorWithError(err, \"httpinfrastructuregroup.HTTPSuccessClient\", \"Patch202\", resp, \"Failure sending request\")\n }\n\n result, err = client.Patch202Responder(resp)\n if err != nil {\n err = autorest.NewErrorWithError(err, \"httpinfrastructuregroup.HTTPSuccessClient\", \"Patch202\", resp, \"Failure responding to request\")\n }\n\n return\n}" ]
[ "0.6955901", "0.6920714", "0.67520493", "0.6015795", "0.5904203", "0.58468133", "0.57761717", "0.54057676", "0.5382812", "0.5315544", "0.53136206", "0.52500725", "0.52303386", "0.52146894", "0.52062315", "0.5170349", "0.51479495", "0.5147029", "0.5141474", "0.5135123", "0.5125511", "0.5101121", "0.5083942", "0.50726545", "0.5071128", "0.5069812", "0.5028236", "0.5021061", "0.5008922", "0.50086683", "0.49922538", "0.4952382", "0.49497375", "0.49471214", "0.49394295", "0.49302003", "0.4927359", "0.49163014", "0.4908727", "0.49025458", "0.4899358", "0.48945987", "0.4876046", "0.48607317", "0.48504463", "0.4836716", "0.48353332", "0.48252985", "0.481998", "0.4809417", "0.4804645", "0.4800944", "0.4797264", "0.4792788", "0.4792489", "0.47860497", "0.478159", "0.47810772", "0.47760323", "0.47752503", "0.47674626", "0.4754302", "0.47509095", "0.47458446", "0.4716117", "0.47151", "0.4713531", "0.47055367", "0.46995384", "0.469039", "0.4684577", "0.46818817", "0.46809295", "0.46798444", "0.46700087", "0.46693328", "0.46646535", "0.466007", "0.46519968", "0.46488938", "0.46465868", "0.4643667", "0.46427062", "0.46426105", "0.4639943", "0.46376058", "0.46366644", "0.46347108", "0.46333006", "0.4631847", "0.46289334", "0.4620733", "0.46137637", "0.46098557", "0.4608375", "0.46075356", "0.46068078", "0.46022654", "0.4601006", "0.45931968" ]
0.7722697
0
DefaultPatchSetHealthMenstruationPersonalInfo executes a bulk gorm update call with patch behavior
func DefaultPatchSetHealthMenstruationPersonalInfo(ctx context.Context, objects []*HealthMenstruationPersonalInfo, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationPersonalInfo, error) { if len(objects) != len(updateMasks) { return nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects)) } results := make([]*HealthMenstruationPersonalInfo, 0, len(objects)) for i, patcher := range objects { pbResponse, err := DefaultPatchHealthMenstruationPersonalInfo(ctx, patcher, updateMasks[i], db) if err != nil { return nil, err } results = append(results, pbResponse) } return results, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultPatchHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationPersonalInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationPersonalInfo(ctx, &HealthMenstruationPersonalInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationPersonalInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationPersonalInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationPersonalInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx context.Context, patchee *HealthMenstruationPersonalInfo, patcher *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"PeriodLengthInDays\" {\n\t\t\tpatchee.PeriodLengthInDays = patcher.PeriodLengthInDays\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CycleLengthInDays\" {\n\t\t\tpatchee.CycleLengthInDays = patcher.CycleLengthInDays\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultListHealthMenstruationPersonalInfo(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationPersonalInfo, error) {\n\tin := HealthMenstruationPersonalInfo{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationPersonalInfoORM{}, &HealthMenstruationPersonalInfo{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationPersonalInfoORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationPersonalInfo{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func DefaultReadHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationPersonalInfoORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationPersonalInfoORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationPersonalInfoORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultCreateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultPatchHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationDailyEntry\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationDailyEntry(ctx, &HealthMenstruationDailyEntry{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationDailyEntry(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationDailyEntryWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultPatchSetHealthMenstruationDailyEntry(ctx context.Context, objects []*HealthMenstruationDailyEntry, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationDailyEntry, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationDailyEntry, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationDailyEntry(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func (t *HeathCare_Chaincode) modifyMedicalData(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tfmt.Println(\"\\n=============== start modifyMedicalData function ===============\")\n\tstart := time.Now()\n\ttime.Sleep(time.Second)\n\n\tvar jsonResp string\n\n\tif len(args) != 8 {\n\t\treturn shim.Error(\"expecting 4 argument\")\n\t}\n\n\t//define identity of query-er and new value of medical record\n\tuserid := args[0]\n\tpatientid := args[1]\n\tlocation := args[2]\n\tcollection := args[3]\n\n\tnewPersonalIdentificationInformation := args[4]\n\tnewMedicalHistory := args[5]\n\tnewFamilyMedicalHistory := args[6]\n\tnewMedicationHistory := args[7]\n\tnewTreatmentHistory := args[8]\n\tnewMedicalDirectives := args[9]\n\ttimeQuery := time.Now().String()\n\n\t//get user identity before query\n\tuserIdentityAsBytes, errUserIdentityAsByte := stub.GetPrivateData(collection, userid)\n\tif errUserIdentityAsByte != nil {\n\t\treturn shim.Error(\"cannot get user identity\")\n\t} else if userIdentityAsBytes == nil {\n\t\treturn shim.Error(\"user does not exist\")\n\t}\n\n\t//create query object with purpose: modify\n\tobjectType := \"Query\"\n\tquery := &Query{objectType, userid, patientid, location, timeQuery, \"modify\"}\n\tqueryAsByte, errQueryAsByte := json.Marshal(query)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//save to database\n\terrQueryAsByte = stub.PutPrivateData(\"modifyCollection\", userid, queryAsByte)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//create index key\n\tindexName := \"userid~patientid\"\n\tqueryIndexKey, errQueryIndexKey := stub.CreateCompositeKey(indexName, []string{query.UserID, query.PatientID, query.Location, query.Purpose})\n\tif errQueryIndexKey != nil {\n\t\treturn shim.Error(errQueryIndexKey.Error())\n\t}\n\n\t//save index\n\tvalue := []byte{0x00}\n\tstub.PutPrivateData(\"modifyCollection\", queryIndexKey, value)\n\n\t//get medical record data\n\tmedicalRecordAsBytes, errMedicalRecordAsByte := stub.GetPrivateData(\"MedicalRecordCollection\", patientid)\n\tif errMedicalRecordAsByte != nil {\n\t\tjsonResp = \"{\\\"Error\\\":\\\"Failed to get state for \" + patientid + \": \" + errMedicalRecordAsByte.Error() + \"\\\"}\"\n\t\treturn shim.Error(jsonResp)\n\t} else if errMedicalRecordAsByte == nil {\n\t\treturn shim.Error(\"patient's data does not exist\")\n\t}\n\n\t//convert data of patient to json\n\tmedicalRecord := &MedicalRecord{}\n\terrMedicalRecordAsByte = json.Unmarshal(medicalRecordAsBytes, medicalRecord)\n\n\t//change data\n\tmedicalRecord.PersonalIdentificationInformation = newPersonalIdentificationInformation\n\tmedicalRecord.MedicalHistory = newMedicalHistory\n\tmedicalRecord.FamilyMedicalHistory = newFamilyMedicalHistory\n\tmedicalRecord.MedicationHistory = newMedicationHistory\n\tmedicalRecord.TreatmentHistory = newTreatmentHistory\n\tmedicalRecord.MedicalDirectives = newMedicalDirectives\n\n\t//convert new medical record data to byte\n\tnewMedicalRecordAsByte, errNewMedicalRecordAsByte := json.Marshal(medicalRecord)\n\n\t//store new data\n\terrNewMedicalRecordAsByte = stub.PutPrivateData(\"MedicalRecordCollection\", patientid, newMedicalRecordAsByte)\n\tif errNewMedicalRecordAsByte != nil {\n\t\treturn shim.Error(\"cannot save new medical record's data\")\n\t}\n\n\tend := time.Now()\n\telapsed := time.Since(start)\n\tfmt.Println(\"function modifyMedicalData\")\n\tfmt.Println(\"time start: \", start.String())\n\tfmt.Println(\"time end: \", end.String())\n\tfmt.Println(\"time execute: \", elapsed.String())\n\tfmt.Println(\"=============== end modifyMedicalData function ===============\")\n\n\treturn shim.Success(nil)\n}", "func DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx context.Context, patchee *HealthMenstruationDailyEntry, patcher *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Day\" {\n\t\t\tpatchee.Day = patcher.Day\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"IntensityPercentage\" {\n\t\t\tpatchee.IntensityPercentage = patcher.IntensityPercentage\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Type\" {\n\t\t\tpatchee.Type = patcher.Type\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Manual\" {\n\t\t\tpatchee.Manual = patcher.Manual\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"BasedOnPrediction\" {\n\t\t\tpatchee.BasedOnPrediction = patcher.BasedOnPrediction\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func (m *CompaniesItemCompanyInformationCompanyInformationItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CompanyInformationable, requestConfiguration *CompaniesItemCompanyInformationCompanyInformationItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CompanyInformationable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateCompanyInformationFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CompanyInformationable), nil\n}", "func DefaultStrictUpdateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationDailyEntry\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationDailyEntryORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func (service *EmployeeService) PatchEmployeeDetails(employeeID string, employeeDetails models.Employee) error {\n\tcollection := service.mongoClient.Database(DbName).Collection(CollectionName)\n\tupdatesToBePerformed := bson.M{}\n\tupdatesToBePerformed[\"employeeid\"] = employeeID\n\tif employeeDetails.Department != nil {\n\t\tupdatesToBePerformed[\"department\"] = employeeDetails.Department\n\t}\n\n\tif employeeDetails.Name != nil {\n\t\tupdatesToBePerformed[\"name\"] = employeeDetails.Name\n\t}\n\n\tif employeeDetails.Skills != nil {\n\t\tupdatesToBePerformed[\"skills\"] = employeeDetails.Skills\n\t}\n\n\tif employeeDetails.Address != nil {\n\t\taddress := models.Address{}\n\t\tif employeeDetails.Address.City != nil {\n\t\t\taddress.City = employeeDetails.Address.City\n\t\t}\n\n\t\tif employeeDetails.Address.Country != nil {\n\t\t\taddress.Country = employeeDetails.Address.Country\n\t\t}\n\n\t\tif employeeDetails.Address.DoorNo != nil {\n\t\t\taddress.DoorNo = employeeDetails.Address.DoorNo\n\t\t}\n\n\t\tif employeeDetails.Address.State != nil {\n\t\t\taddress.State = employeeDetails.Address.State\n\t\t}\n\n\t\tupdatesToBePerformed[\"address\"] = address\n\t}\n\n\tif employeeDetails.Status != nil {\n\t\tupdatesToBePerformed[\"status\"] = employeeDetails.Status\n\t}\n\n\t// consolidatedMap(&updatesToBePerformed, employeeDetails)\n\n\tresult, err := collection.UpdateOne(\n\t\tcontext.Background(),\n\t\tbson.M{\"employeeid\": employeeID},\n\t\tbson.M{\n\t\t\t\"$set\": updatesToBePerformed,\n\t\t})\n\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\tfmt.Println(result)\n\n\treturn nil\n}", "func (t *HeathCare_Chaincode) modifyPatientInformation(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tfmt.Println(\"\\n=============== start modifyPatientInformation function ===============\")\n\tstart := time.Now()\n\ttime.Sleep(time.Second)\n\n\tvar jsonResp string\n\n\tif len(args) != 8 {\n\t\treturn shim.Error(\"expecting 8 argument\")\n\t}\n\n\tuserid := args[0]\n\tpatientid := args[1]\n\tlocation := args[2]\n\tcollection := args[3]\n\n\tnewInsuranceCard := args[4]\n\tnewCurrentMedicationInformation := args[5]\n\tnewRelatedMedicalRecords := args[6]\n\tnewmakeNoteOfAppointmentDate := args[7]\n\ttimeQuery := time.Now().String()\n\n\t//get user identity before query\n\tuserIdentityAsBytes, errUserIdentityAsByte := stub.GetPrivateData(collection, userid)\n\tif errUserIdentityAsByte != nil {\n\t\treturn shim.Error(\"cannot get user identity\")\n\t} else if userIdentityAsBytes == nil {\n\t\treturn shim.Error(\"user does not exist\")\n\t}\n\n\tobjectType := \"Query\"\n\tquery := &Query{objectType, userid, patientid, location, timeQuery, \"modify\"}\n\tqueryAsByte, errQueryAsByte := json.Marshal(query)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//save to database\n\terrQueryAsByte = stub.PutPrivateData(\"modifyCollection\", userid, queryAsByte)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//create index key\n\tindexName := \"userid~patientid\"\n\tqueryIndexKey, errQueryIndexKey := stub.CreateCompositeKey(indexName, []string{query.UserID, query.PatientID, query.Location, query.Purpose})\n\tif errQueryIndexKey != nil {\n\t\treturn shim.Error(errQueryIndexKey.Error())\n\t}\n\n\t//save index\n\tvalue := []byte{0x00}\n\tstub.PutPrivateData(\"modifyCollection\", queryIndexKey, value)\n\n\t//get data\n\tpatientAsBytes, errPatientAsByte := stub.GetPrivateData(\"PatientInformationCollection\", patientid)\n\tif errPatientAsByte != nil {\n\t\tjsonResp = \"{\\\"Error\\\":\\\"Failed to get state for \" + patientid + \": \" + errPatientAsByte.Error() + \"\\\"}\"\n\t\treturn shim.Error(jsonResp)\n\t} else if errPatientAsByte == nil {\n\t\treturn shim.Error(\"patient's data does not exist\")\n\t}\n\n\t//convert data of patient to json\n\tpatient := &PatientInformation{}\n\terrPatientAsByte = json.Unmarshal(patientAsBytes, patient)\n\n\t//change data\n\tpatient.InsuranceCard = newInsuranceCard\n\tpatient.CurrentMedicationInformation = newCurrentMedicationInformation\n\tpatient.RelatedMedicalRecords = newRelatedMedicalRecords\n\tpatient.MakeNoteOfAppointmentDate = newmakeNoteOfAppointmentDate\n\n\tpatientAsByte, errPatientAsByte := json.Marshal(patient)\n\n\terrPatientAsByte = stub.PutPrivateData(\"PatientInformationCollection\", patientid, patientAsByte)\n\tif errPatientAsByte != nil {\n\t\treturn shim.Error(\"cannot patient's data\")\n\t}\n\n\tend := time.Now()\n\telapsed := time.Since(start)\n\tfmt.Println(\"function modifyPatientInformation\")\n\tfmt.Println(\"time start: \", start.String())\n\tfmt.Println(\"time end: \", end.String())\n\tfmt.Println(\"time execute: \", elapsed.String())\n\tfmt.Println(\"=============== end modifyPatientInformation function ===============\")\n\n\treturn shim.Success(nil)\n}", "func EditLocalRep(c *gin.Context) {\n\tuserGUID, _ := c.GetQuery(\"user_guid\")\n\trepGUID, _ := c.GetQuery(\"rep_guid\")\n\teditTask, _ := c.GetQuery(\"editTask\")\n\tc.Header(\"Content-Type\", \"application/json\")\n\ttargetRepIndex := -1\n\tif editTask == \"add\" {\n\t\t// TODO: create map of maps\n\t\tuserReps[userGUID] = append(userReps[userGUID], repGUID)\n\t} else if editTask == \"remove\" {\n\t\ttempUserRepList := userReps[userGUID]\n\t\tfor i, value := range tempUserRepList {\n\t\t\tif value == repGUID {\n\t\t\t\ttargetRepIndex = i\n\t\t\t}\n\t\t}\n\t\tif targetRepIndex != -1 {\n\t\t\tuserReps[userGUID] = append(tempUserRepList[:targetRepIndex], tempUserRepList[targetRepIndex+1:]...)\n\t\t}\n\t} else {\n\t\tfmt.Println(\"edit Rep: provided invalid option\")\n\t\t// log.Info(\"edit Rep: provided invalid option\")\n\t}\n\n\tuserRepUpdate := models.UserRepUpdate{\n\t\tUserGUID: userGUID,\n\t\tRepGUID: repGUID,\n\t\tAction: editTask,\n\t}\n\n\tuserRepUpdateResponse, _ := json.Marshal(userRepUpdate)\n\n\tfmt.Println(string(userRepUpdateResponse))\n\n\tif cfg.Kafka.EnableKafka {\n\t\terr := writer.WriteMessages(context.Background(), kafka.Message{\n\t\t\t//Key: []byte(repGUID),\n\t\t\tValue: []byte(userRepUpdateResponse),\n\t\t})\n\t\tif err != nil {\n\t\t\tpanic(\"could not write kafka message \" + err.Error())\n\t\t}\n\t}\n\n\tmsg := map[string]interface{}{\"Status\": \"Ok\", \"user_guid\": userGUID, \"users_rep_list\": userReps[userGUID]}\n\tc.JSON(http.StatusOK, msg)\n}", "func (m MariaDB) Update(ctx context.Context, ep entity.PersonalData) (int64, error) {\n\tp := receive(ep)\n\tsqlQuery := \"UPDATE person SET name=?, last_name=?, phone=?, email=?, year_od_birth=? where id= ?\"\n\n\trslt, err := m.Person.ExecContext(ctx, sqlQuery, p.Name, p.LastName, p.Phone, p.Email, p.YearOfBirth, p.ID)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"could not update data\")\n\t}\n\tcount, err := rslt.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"rows are not affected\")\n\t}\n\treturn count, nil\n}", "func DefaultPatchSetUserInfo(ctx context.Context, objects []*UserInfo, updateMasks []*field_mask.FieldMask, db *gorm.DB) ([]*UserInfo, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*UserInfo, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchUserInfo(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func (m *TeamworkSoftwareUpdateHealth) SetAdditionalData(value map[string]any)() {\n err := m.GetBackingStore().Set(\"additionalData\", value)\n if err != nil {\n panic(err)\n }\n}", "func (m *DeviceLocalCredentialInfoItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.DeviceLocalCredentialInfoable, requestConfiguration *DeviceLocalCredentialInfoItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.DeviceLocalCredentialInfoable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateDeviceLocalCredentialInfoFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.DeviceLocalCredentialInfoable), nil\n}", "func (m *TeamworkRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserTeamworkable, requestConfiguration *TeamworkRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserTeamworkable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateUserTeamworkFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserTeamworkable), nil\n}", "func (m *TeamTemplatesItemDefinitionsItemTeamDefinitionPermissionGrantsResourceSpecificPermissionGrantItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ResourceSpecificPermissionGrantable, requestConfiguration *TeamTemplatesItemDefinitionsItemTeamDefinitionPermissionGrantsResourceSpecificPermissionGrantItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ResourceSpecificPermissionGrantable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateResourceSpecificPermissionGrantFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ResourceSpecificPermissionGrantable), nil\n}", "func (m *ItemOnlineMeetingsItemRegistrationRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationable, requestConfiguration *ItemOnlineMeetingsItemRegistrationRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateMeetingRegistrationFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationable), nil\n}", "func (m *PrivilegedSignupStatusItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable, requestConfiguration *PrivilegedSignupStatusItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreatePrivilegedSignupStatusFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable), nil\n}", "func (o CMFUserSuperSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfUserSuperPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_user_super` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfUserSuperPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfUserSuper slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfUserSuper\")\n\t}\n\treturn rowsAff, nil\n}", "func HandleUpdatePerson(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tid := r.URL.Query().Get(\"id\")\n\tif id == \"\" {\n\t\thttp.Error(w, \"id parameter is not found\", http.StatusBadRequest)\n\t\treturn\n\t}\n\tfmt.Println(id)\n\treqBody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"Body Read Error : %v\", err), http.StatusInternalServerError)\n\t}\n\tvar body map[string]string\n\terr = json.Unmarshal(reqBody, &body)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"Request Body parse error : %v\", err), http.StatusBadRequest)\n\t\treturn\n\t}\n\tfmt.Printf(body[\"name\"])\n\tcols := \"\"\n\n\tfor key, val := range body {\n\t\tcols = cols + key + \"=\" + \"'\" + val + \"'\" + \",\"\n\t}\n\tf := cols[:len(cols)-1]\n\n\t_, err = Db.Exec(fmt.Sprintf(\"UPDATE people SET %s where people.id=?\", f), id)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"ERROR in deleting person %s\", err.Error()), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tvar person db.Person\n\tres, err := Db.Query(\"SELECT * FROM people where id=?\", id)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"ERROR in updating person %s\", err.Error()), http.StatusBadRequest)\n\t\treturn\n\t}\n\tdefer res.Close()\n\tfor res.Next() {\n\t\terr = res.Scan(&person.Id, &person.Name, &person.City, &person.ContactNo, &person.PhotoUrl)\n\t\tif err != nil {\n\t\t\thttp.Error(w, fmt.Sprintf(\"ERROR in updating person %s\", err.Error()), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t}\n\tjson.NewEncoder(w).Encode(person)\n}", "func (t *MedChain) updateHospital(stub shim.ChaincodeStubInterface, args []string) peer.Response {\n\t\t// ==== Input sanitation ====\n\t\tfmt.Println(\"- start updateHospital\")\n\n\t\t// check if all the args are send\n\t\tif len(args) != 4 {\n\t\t\treturn shim.Error(\"Incorrect number of arguments, Required 4 arguments\")\n\t\t}\n\n\t\t// check if the args are empty\n\t\tfor i := 0; i < len(args); i++ {\n\t\t\tif len(args[i]) <= 0 {\n\t\t\t\treturn shim.Error(\"argument \"+ string(i+1) + \" must be a non-empty string\")\n\t\t\t}\n\t\t}\n\n\t\tgetAssetAsBytes, errT := stub.GetState(args[0])\n\n\t\tif errT != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Error : Cannot find Hospital %s\" , errT))\n\t\t}\n\n\t\tif getAssetAsBytes == nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Cannot find asset Hospital with ID %s\" , args[0]))\n\t\t}\n\n\t\tvar obj = Hospital{}\n\n\t\tjson.Unmarshal(getAssetAsBytes, &obj)\n\t\tobj.HospitalName = args[1]\n\t\tobj.HospitalAddress = args[2]\n\t\tobj.HospitalPhone = args[3]\n\t\tcomAssetAsBytes, errMarshal := json.Marshal(obj)\n\n\t\tif errMarshal != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Marshal Error: %s\", errMarshal))\n\t\t}\n\n\t\terrPut := stub.PutState(obj.Hospital_ID, comAssetAsBytes)\n\n\t\tif errPut != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Failed to update Hospital with ID %s\", args[0]))\n\t\t}\n\n\t\tfmt.Println(\"Hospital asset with ID %s was updated \\n %v\", args[0], obj)\n\n\t\treturn shim.Success(comAssetAsBytes)\n\t}", "func (m *VirtualEndpointUserSettingsCloudPcUserSettingItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CloudPcUserSettingable, requestConfiguration *VirtualEndpointUserSettingsCloudPcUserSettingItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CloudPcUserSettingable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateCloudPcUserSettingFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CloudPcUserSettingable), nil\n}", "func updatePerson(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-type\", \"application/json\")\n\n\tvar person Person\n\tfname := person.Fname\n\tlname := person.Lname\n\temail := person.Email\n\tpword := person.Pword\n\tid := person.Id\n\n\tstmt, err := db.Prepare(\"UPDATE person SET fname=?, lname=?, email=?, pword=?, id=? WHERE id =?\")\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tresult, err := stmt.Exec(fname, lname, email, pword, id)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\t_, err = result.RowsAffected()\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n}", "func (o CMFFamiliesPolicySlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfFamiliesPolicyPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_families_policies` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfFamiliesPolicyPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfFamiliesPolicy slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfFamiliesPolicy\")\n\t}\n\treturn rowsAff, nil\n}", "func DefaultApplyFieldMaskUserInfo(ctx context.Context, patchee *UserInfo, patcher *UserInfo, updateMask *field_mask.FieldMask, prefix string, db *gorm.DB) (*UserInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar err error\n\tvar updatedCreatedAt bool\n\tvar updatedUpdatedAt bool\n\tfor i, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UserId\" {\n\t\t\tpatchee.UserId = patcher.UserId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"LastName\" {\n\t\t\tpatchee.LastName = patcher.LastName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"FirstName\" {\n\t\t\tpatchee.FirstName = patcher.FirstName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Period\" {\n\t\t\tpatchee.Period = patcher.Period\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"DepartmentId\" {\n\t\t\tpatchee.DepartmentId = patcher.DepartmentId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"JobId\" {\n\t\t\tpatchee.JobId = patcher.JobId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"EnrollmentFlg\" {\n\t\t\tpatchee.EnrollmentFlg = patcher.EnrollmentFlg\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"AdminFlg\" {\n\t\t\tpatchee.AdminFlg = patcher.AdminFlg\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedCreatedAt && strings.HasPrefix(f, prefix+\"CreatedAt.\") {\n\t\t\tif patcher.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"CreatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.CreatedAt, patchee.CreatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tupdatedCreatedAt = true\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUpdatedAt && strings.HasPrefix(f, prefix+\"UpdatedAt.\") {\n\t\t\tif patcher.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UpdatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.UpdatedAt, patchee.UpdatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tupdatedUpdatedAt = true\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func (m *FeatureRolloutPolicyItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.FeatureRolloutPolicyable, requestConfiguration *FeatureRolloutPolicyItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.FeatureRolloutPolicyable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateFeatureRolloutPolicyFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.FeatureRolloutPolicyable), nil\n}", "func (puo *ProductUpdateOne) SetPersonal(p *Personal) *ProductUpdateOne {\n\treturn puo.SetPersonalID(p.ID)\n}", "func (handler *profileHandler) Patch(ctx context.Context, req *proto.ProfilePatchRequest, rsp *proto.ProfileData) (err error) {\n\tprofileInstance := handler.getProfileInstance(req.GetId())\n\tprofileInstance.SetAvatar(req.Avatar)\n\terr = profileInstance.SetLocation(req.Location)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tprofileInstance.SetSkype(req.Skype)\n\terr = profileInstance.Save()\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\thandler.setProfileDataToResponse(profileInstance, rsp)\n\n\treturn nil\n}", "func (db *DataBase) UpdatePlayerPersonalInfo(userID int32, user *models.UserPrivateInfo) (err error) {\n\tvar (\n\t\tconfirmedUser *models.UserPrivateInfo\n\t\ttx *sql.Tx\n\t)\n\n\tif tx, err = db.Db.Begin(); err != nil {\n\t\treturn\n\t}\n\tdefer tx.Rollback()\n\n\tif confirmedUser, err = db.getPrivateInfo(tx, userID); err != nil {\n\t\treturn\n\t}\n\n\tconfirmedUser.Update(user)\n\n\tif err = db.updatePlayerPersonalInfo(tx, user); err != nil {\n\t\treturn\n\t}\n\n\terr = tx.Commit()\n\treturn\n}", "func (o AuthUserUserPermissionSlice) UpdateAllP(exec boil.Executor, cols M) {\n\tif err := o.UpdateAll(exec, cols); err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n}", "func (m *RiskyUsersItemHistoryRiskyUserHistoryItemItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RiskyUserHistoryItemable, requestConfiguration *RiskyUsersItemHistoryRiskyUserHistoryItemItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RiskyUserHistoryItemable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateRiskyUserHistoryItemFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RiskyUserHistoryItemable), nil\n}", "func (m *OnlineMeetingsItemRegistrationCustomQuestionsMeetingRegistrationQuestionItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationQuestionable, requestConfiguration *OnlineMeetingsItemRegistrationCustomQuestionsMeetingRegistrationQuestionItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationQuestionable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateMeetingRegistrationQuestionFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationQuestionable), nil\n}", "func HandleModify(w http.ResponseWriter, r *http.Request) {\r\n\tdefer r.Body.Close()\r\n\tfmt.Println(\"In handleModify\")\r\n\tvar ab AddressBook\r\n\tif r.Method != http.MethodPost {\r\n\t\terr := fmt.Sprintf(\"Method %s not supported on this action %s\\n\", r.Method, r.URL.Path)\r\n\t\tfmt.Printf(\"%s\", err)\r\n\t\thttp.Error(w, err, http.StatusMethodNotAllowed)\r\n\t\treturn\r\n\t}\r\n\r\n\tif err := json.NewDecoder(r.Body).Decode(&ab); err != nil {\r\n\t\tmsg := fmt.Sprintf(\"Error %s while decoding json\\n\", err.Error())\r\n\t\tfmt.Printf(\"%s\", msg)\r\n\t\thttp.Error(w, msg, http.StatusBadRequest)\r\n\t\treturn\r\n\t}\r\n\r\n\tif ab.FirstName == \"\" {\r\n\t\tmsg := \"Name not provided as part of the query\\n\"\r\n\t\tfmt.Printf(\"%s\", msg)\r\n\t\thttp.Error(w, msg, http.StatusNotFound)\r\n\t\treturn\r\n\t}\r\n\r\n\tMutex.RLock()\r\n\tdefer Mutex.RUnlock()\r\n\tif len(AddrBook) == 0 {\r\n\t\tmsg := fmt.Sprintf(\"Address book is empty nothing to modify\\n\")\r\n\t\tfmt.Printf(\"%s\\n\", msg)\r\n\t\thttp.Error(w, msg, http.StatusNotFound)\r\n\t\treturn\r\n\t}\r\n\r\n\tif abTmp, ok := AddrBook[ab.FirstName]; !ok {\r\n\t\tmsg := fmt.Sprintf(\"%s not found in the Address book, nothing to modify\\n\", abTmp.FirstName)\r\n\t\tfmt.Printf(\"%s\", msg)\r\n\t\thttp.Error(w, msg, http.StatusNotFound)\r\n\t\treturn\r\n\t}\r\n\tabTmp, _ := AddrBook[ab.FirstName]\r\n\tif ab.LastName != \"\" && abTmp.LastName != ab.LastName {\r\n\t\tabTmp.LastName = ab.LastName\r\n\t}\r\n\tif ab.Email != \"\" && abTmp.Email != ab.Email {\r\n\t\tabTmp.Email = ab.Email\r\n\t}\r\n\tif ab.PhoneNumber != 0 && abTmp.PhoneNumber != ab.PhoneNumber {\r\n\t\tabTmp.PhoneNumber = ab.PhoneNumber\r\n\t}\r\n\tAddrBook[ab.FirstName] = abTmp\r\n\tmsg := fmt.Sprintf(\"Modified name %s present in the address book\\n\", ab.FirstName)\r\n\tfmt.Printf(\"%s\", msg)\r\n\thttp.Error(w, msg, http.StatusOK)\r\n}", "func (m *StoreItemRequestBuilder) Patch(ctx context.Context, body ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable, requestConfiguration *StoreItemRequestBuilderPatchRequestConfiguration)(ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.CreateStoreFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable), nil\n}", "func DefaultStrictUpdateUserInfo(ctx context.Context, in *UserInfo, db *gorm.DB) (*UserInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateUserInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &UserInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func UpdateEmployee(c *gin.Context) {\r\n var employee model.Employee\r\n id := c.Params.ByName(\"id\")\r\n err := model.GetEmployeeByID(&employee, id)\r\n if err != nil {\r\n c.JSON(http.StatusNotFound, employee)\r\n }\r\n c.BindJSON(&employee)\r\n err = model.UpdateEmployee(&employee, id)\r\n if err != nil {\r\n c.AbortWithStatus(http.StatusNotFound)\r\n } else {\r\n c.JSON(http.StatusOK, employee)\r\n }\r\n}", "func (o EmployeeSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), employeePrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"employee\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, employeePrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in employee slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all employee\")\n\t}\n\treturn rowsAff, nil\n}", "func (c *Client) ModifyHealthCheck(args *ModifyHealthCheckArgs) (*ModifyHealthCheckResponse, error) {\n\tresponse := ModifyHealthCheckResponse{}\n\terr := c.Invoke(\"ModifyHealthCheck\", args, &response)\n\tif err == nil {\n\t\treturn &response, nil\n\t}\n\treturn nil, err\n}", "func (m *ManagedTenantsManagementActionsManagementActionItemRequestBuilder) Patch(ctx context.Context, body i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagementActionable, requestConfiguration *ManagedTenantsManagementActionsManagementActionItemRequestBuilderPatchRequestConfiguration)(i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagementActionable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.CreateManagementActionFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagementActionable), nil\n}", "func (t *IPDCChaincode) invoke_update_status_with_modification_check(stub shim.ChaincodeStubInterface, args []string, map_specification map[string]interface{}) pb.Response {\r\n\r\n\tfmt.Println(\"***********Entering invoke_update_status_with_modification_check***********\")\r\n\r\n\tif len(args) < 2 {\r\n\r\n\t\tfmt.Println(\"Error: Incorrect number of arguments\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error: Incorrect number of arguments\")\r\n\t}\r\n\r\n\tvar record_specification_input map[string]interface{}\r\n\r\n\tvar err error\r\n\r\n\terr = json.Unmarshal([]byte(args[0]), &record_specification_input)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error in format of record.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error in format of record.\")\r\n\t}\r\n\r\n\tadditional_json, ok := map_specification[\"additional_json\"]\r\n\r\n\tif ok {\r\n\r\n\t\tadditional_json_data, ok1 := additional_json.(map[string]interface{})\r\n\r\n\t\tif ok1 {\r\n\r\n\t\t\tfor spec, _ := range additional_json_data {\r\n\r\n\t\t\t\trecord_specification_input[spec] = additional_json_data[spec]\r\n\t\t\t}\r\n\t\t} else {\r\n\t\t\tfmt.Println(\"Error: Invalid additional JSON fields in specification\")\r\n\r\n\t\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\t\treturn shim.Error(\"Error: Invalid additional JSON fields in specification\")\r\n\t\t}\r\n\t}\r\n\r\n\tvar keys_map interface{}\r\n\r\n\tvar specs map[string]interface{}\r\n\r\n\tkeys_map, error_keys_map := t.get_keys_map(stub, record_specification_input)\r\n\r\n\tif error_keys_map != nil {\r\n\r\n\t\tfmt.Println(error_keys_map.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(error_keys_map.Error())\r\n\t}\r\n\r\n\tspecs, ok = keys_map.(map[string]interface{})\r\n\r\n\tif !ok {\r\n\r\n\t\tfmt.Println(\"Error: Invalid keys_map specification.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error: Invalid keys_map specification.\")\r\n\t}\r\n\r\n\tif specs[\"primary_key\"] == nil {\r\n\r\n\t\tfmt.Println(\"Error: There is no primary key specification.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error : There is no primary key specification.\")\r\n\t}\r\n\r\n\tvar pk_spec []interface{}\r\n\r\n\tpk_spec, ok = specs[\"primary_key\"].([]interface{})\r\n\r\n\tif !ok {\r\n\r\n\t\tfmt.Println(\"Error in Primary key specification.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error in Primary key specification.\")\r\n\t}\r\n\r\n\tkey, err_key := t.createInterfacePrimaryKey(record_specification_input, pk_spec)\r\n\r\n\tif err_key != nil {\r\n\r\n\t\tfmt.Println(err_key.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(err_key.Error())\r\n\r\n\t}\r\n\r\n\tvar valAsBytes []byte\r\n\r\n\tvalAsBytes, err = stub.GetState(key)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error: Failed to get state: \" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error: Failed to get state: \" + err.Error())\r\n\r\n\t} else if valAsBytes == nil {\r\n\r\n\t\tfmt.Println(\"Error: No value for primary key : \" + key)\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error: No value for key\")\r\n\r\n\t}\r\n\r\n\tvar record_specification map[string]interface{}\r\n\r\n\terr = json.Unmarshal([]byte(valAsBytes), &record_specification)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error in format of record\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error in format of record\")\r\n\r\n\t}\r\n\r\n\tvar check int\r\n\r\n\tcheck, err = t.Isfieldsmodified(record_specification_input, record_specification, map_specification)\r\n\r\n\tif check != 0 {\r\n\r\n\t\tfmt.Println(\"Status Update Failed due to error in modification check. \" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Status Update Failed due to error in modification check. \" + err.Error())\r\n\t}\r\n\r\n\terr_del := t.delete_composite_keys(stub, specs, record_specification, key)\r\n\r\n\tif err_del != nil {\r\n\r\n\t\tfmt.Println(\"Error in deleting composite keys\" + err_del.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error in deleting composite keys\" + err_del.Error())\r\n\r\n\t}\r\n\r\n\tvar to_be_updated_map map[string]interface{}\r\n\r\n\terr = json.Unmarshal([]byte(args[1]), &to_be_updated_map)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error in format of update map.\")\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error in format of update map.\")\r\n\r\n\t}\r\n\r\n\tfor spec, spec_val := range to_be_updated_map {\r\n\r\n\t\tvar spec_val_string, spec_ok = spec_val.(string)\r\n\r\n\t\tif !spec_ok {\r\n\r\n\t\t\tfmt.Println(\"Error: Unable to parse value of status update\")\r\n\r\n\t\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\t\treturn shim.Error(\"Error: Unable to parse value of status update\")\r\n\r\n\t\t}\r\n\r\n\t\tvar val_check, val_err = t.updatestatusvaliditycheck(spec, spec_val_string, map_specification)\r\n\r\n\t\tif val_check != 0 {\r\n\r\n\t\t\tfmt.Println(val_err.Error())\r\n\r\n\t\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\t\treturn shim.Error(val_err.Error())\r\n\t\t}\r\n\r\n\t\trecord_specification[spec] = spec_val_string\r\n\t}\r\n\r\n\tvar concatenated_record_json []byte\r\n\r\n\tconcatenated_record_json, err = json.Marshal(record_specification)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error: Unable to Marshal Concatenated Record to JSON \" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error: Unable to Marshal Concatenated Record to JSON \" + err.Error())\r\n\t}\r\n\r\n\terr = stub.PutState(key, []byte(concatenated_record_json))\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error: Failed to put state : \" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error: Failed to put state : \" + err.Error())\r\n\t}\r\n\r\n\terr = t.create_composite_keys(stub, specs, record_specification, key)\r\n\r\n\tif err != nil {\r\n\r\n\t\tfmt.Println(\"Error in creating composite keys\" + err.Error())\r\n\r\n\t\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\t\treturn shim.Error(\"Error in creating composite keys\" + err.Error())\r\n\t}\r\n\r\n\tfmt.Println(\"***********Exiting invoke_update_status_with_modification_check***********\")\r\n\r\n\treturn shim.Success(nil)\r\n\r\n}", "func (pu *ProductUpdate) SetPersonal(p *Personal) *ProductUpdate {\n\treturn pu.SetPersonalID(p.ID)\n}", "func (o TenantSlice) UpdateAllP(ctx context.Context, exec boil.ContextExecutor, cols M) int64 {\n\trowsAff, err := o.UpdateAll(ctx, exec, cols)\n\tif err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n\n\treturn rowsAff\n}", "func (m *TeamTemplatesItemDefinitionsItemTeamDefinitionPhotoRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ProfilePhotoable, requestConfiguration *TeamTemplatesItemDefinitionsItemTeamDefinitionPhotoRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ProfilePhotoable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateProfilePhotoFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ProfilePhotoable), nil\n}", "func (svc record) procUpdate(invokerID uint64, m *types.Module, upd *types.Record, old *types.Record) *types.RecordValueErrorSet {\n\t// Mark all values as updated (new)\n\tupd.Values.SetUpdatedFlag(true)\n\n\t// First sanitization\n\t//\n\t// Before values are merged with existing data and\n\t// sent to automation scripts (if any)\n\t// we need to make sure it does not get sanitized data\n\tupd.Values = svc.sanitizer.Run(m, upd.Values)\n\n\t// Copy values to updated record\n\t// to make sure nobody slips in something we do not want\n\tupd.CreatedAt = old.CreatedAt\n\tupd.CreatedBy = old.CreatedBy\n\tupd.UpdatedAt = nowPtr()\n\tupd.UpdatedBy = invokerID\n\tupd.DeletedAt = old.DeletedAt\n\tupd.DeletedBy = old.DeletedBy\n\n\t// Merge new (updated) values with old ones\n\t// This way we get list of updated, stale and deleted values\n\t// that we can selectively update in the repository\n\tupd.Values = old.Values.Merge(upd.Values)\n\n\tif upd.OwnedBy == 0 {\n\t\tif old.OwnedBy > 0 {\n\t\t\t// Owner not set/send in the payload\n\t\t\t//\n\t\t\t// Fallback to old owner (if set)\n\t\t\tupd.OwnedBy = old.OwnedBy\n\t\t} else {\n\t\t\t// If od owner is not set, make current user\n\t\t\t// the owner of the record\n\t\t\tupd.OwnedBy = invokerID\n\t\t}\n\t}\n\n\t// Run validation of the updated records\n\treturn svc.validator.Run(m, upd)\n}", "func (o ForeignLegalResourceSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), foreignLegalResourcePrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"ForeignLegalResources\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, foreignLegalResourcePrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in foreignLegalResource slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all foreignLegalResource\")\n\t}\n\treturn rowsAff, nil\n}", "func UpdatePerson(c *gin.Context) {\n // Get the person to be updated\n var person models.Person\n if err := models.DB.First(&person, \"id = ?\", c.Param(\"id\")).Error; err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n // Validate input\n var input UpdatePersonInput\n if err := c.ShouldBindJSON(&input); err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n models.DB.Model(&person).Updates(input)\n\n c.JSON(http.StatusOK, gin.H{\"data\": person})\n}", "func (m *ManagedEBooksManagedEBookItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ManagedEBookable, requestConfiguration *ManagedEBooksManagedEBookItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ManagedEBookable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateManagedEBookFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ManagedEBookable), nil\n}", "func DefaultPatchUserInfo(ctx context.Context, in *UserInfo, updateMask *field_mask.FieldMask, db *gorm.DB) (*UserInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar pbObj UserInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(UserInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadUserInfo(ctx, &UserInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(UserInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskUserInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(UserInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateUserInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(UserInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func (m *ReportsRequestBuilder) Patch(ctx context.Context, body i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable, requestConfiguration *ReportsRequestBuilderPatchRequestConfiguration)(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.CreateReportsFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable), nil\n}", "func (m *HealthMenstruationPersonalInfoORM) ToPB(ctx context.Context) (HealthMenstruationPersonalInfo, error) {\n\tto := HealthMenstruationPersonalInfo{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tto.PeriodLengthInDays = m.PeriodLengthInDays\n\tto.CycleLengthInDays = m.CycleLengthInDays\n\tif posthook, ok := interface{}(m).(HealthMenstruationPersonalInfoWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (o NotificationSlice) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), notificationPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"notification\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, notificationPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.Exec(sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in notification slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all notification\")\n\t}\n\treturn rowsAff, nil\n}", "func UpdatePerson(response http.ResponseWriter, request *http.Request){\n\tresponse.Header().Set(\"Content-type\", \"application/json\")\n\tparams := mux.Vars(request)\n\tid, _ := strconv.Atoi(params[\"id\"])\n\tfor index, item := range models.People {\n\t\tif item.ID == id {\n\t\t\tmodels.People = append(models.People[:index], models.People[index+1:]...)\n\n\t\t\tvar person models.Person\n\n\t\t\t_ = json.NewDecoder(request.Body).Decode(person)\n\t\t\tperson.ID = id\n\t\t\tmodels.People = append(models.People, person)\n\t\t\tjson.NewEncoder(response).Encode(&person) \n\t\t\treturn\n\t\t}\n\t}\n\tjson.NewEncoder(response).Encode(models.People)\n}", "func (m *TermStoreRequestBuilder) Patch(ctx context.Context, body ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable, requestConfiguration *TermStoreRequestBuilderPatchRequestConfiguration)(ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.CreateStoreFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable), nil\n}", "func patchLocomotive(w http.ResponseWriter, r *http.Request) {\n\tvar document Locomotive\n\n\t//\tObtener la base de datos y colección a utilizar.-\n\tparams := mux.Vars(r)\n\tdatabaseName := params[\"databaseName\"]\n\tcollectionName := params[\"collectionName\"]\n\n\t//\tObtener el modelo a filtrar.-\n\tmodel := params[\"model\"]\n\n\t//\tConfigurar los filtros.-\n\tfilter := make(map[string]interface{})\n\tif model != \"\" {\n\t\tfilter[\"model\"] = model\n\t}\n\n\t//\tDecodificar el documento json recibido y dejarlo en la variable de tipo struct.-\n\t_ = json.NewDecoder(r.Body).Decode(&document)\n\n\t//\tConfigurar los updates.-\n\tupdate := make(map[string]interface{})\n\tif document.PowerType != \"\" {\n\t\tupdate[\"powertype\"] = document.PowerType\n\t}\n\tif document.Builder != \"\" {\n\t\tupdate[\"builder\"] = document.Builder\n\t}\n\tif document.BuildDate != \"\" {\n\t\tupdate[\"builddate\"] = document.BuildDate\n\t}\n\tif document.WheelSystem != \"\" {\n\t\tupdate[\"wheelsystem\"] = document.WheelSystem\n\t}\n\tif document.MaximunSpeed > 0 {\n\t\tupdate[\"maximunspeed\"] = document.MaximunSpeed\n\t}\n\tif document.PowerOutputHP > 0 {\n\t\tupdate[\"poweroutputhp\"] = document.PowerOutputHP\n\t}\n\n\t//\tActualizar la locomotora.-\n\tupdatedCount, err := UpdateDocument(databaseName, collectionName, filter, update)\n\tif err != nil {\n\t\thttputility.GetJsonResponseMessage(w, \"patchLocomotive: \"+err.Error())\n\t} else {\n\t\tif updatedCount == 0 {\n\t\t\thttputility.GetJsonResponseMessage(w, \"patchLocomotive: No se encontró ningun documento a actualizar en la Base de Datos (MongoDB).\")\n\t\t} else {\n\t\t\tif updatedCount == 1 {\n\t\t\t\thttputility.GetJsonResponseMessage(w, \"patchLocomotive: Se actualizó correctamente el documento de la Base de Datos (MongoDB).\")\n\t\t\t} else {\n\t\t\t\thttputility.GetJsonResponseMessage(w, \"patchLocomotive: Se actualizaron correctamente \"+string(updatedCount)+\" documentos de la Base de Datos (MongoDB).\")\n\t\t\t}\n\t\t}\n\t}\n}", "func (m *WindowsFeatureUpdateProfilesWindowsFeatureUpdateProfileItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.WindowsFeatureUpdateProfileable, requestConfiguration *WindowsFeatureUpdateProfilesWindowsFeatureUpdateProfileItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.WindowsFeatureUpdateProfileable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateWindowsFeatureUpdateProfileFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.WindowsFeatureUpdateProfileable), nil\n}", "func (o CMFUserExperienceLogSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfUserExperienceLogPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_user_experience_log` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfUserExperienceLogPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfUserExperienceLog slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfUserExperienceLog\")\n\t}\n\treturn rowsAff, nil\n}", "func (m *EntitlementManagementRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable, requestConfiguration *EntitlementManagementRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateRbacApplicationFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable), nil\n}", "func (o AuthUserSlice) UpdateAllP(exec boil.Executor, cols M) {\n\tif err := o.UpdateAll(exec, cols); err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n}", "func (m *ThreatSubmissionEmailThreatsEmailThreatSubmissionItemRequestBuilder) Patch(ctx context.Context, body i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.EmailThreatSubmissionable, requestConfiguration *ThreatSubmissionEmailThreatsEmailThreatSubmissionItemRequestBuilderPatchRequestConfiguration)(i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.EmailThreatSubmissionable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.CreateEmailThreatSubmissionFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.EmailThreatSubmissionable), nil\n}", "func (m *TeamsAppItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TeamsAppable, requestConfiguration *TeamsAppItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TeamsAppable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateTeamsAppFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TeamsAppable), nil\n}", "func (a *HyperflexApiService) PatchHyperflexClusterProfile(ctx context.Context, moid string) ApiPatchHyperflexClusterProfileRequest {\n\treturn ApiPatchHyperflexClusterProfileRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (client LabClient) PatchResourceResponder(resp *http.Response) (result Lab, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func patchPi(w http.ResponseWriter, r *http.Request) {\n\t// Get pi name from request\n\tvars := mux.Vars(r)\n\tname := vars[\"piname\"]\n\n\t// Retrieve pi object from data store\n\tc := appengine.NewContext(r)\n\tq := datastore.NewQuery(piListKind).Filter(\"name =\", name)\n\tt := q.Run(c)\n\tvar pi Pi\n\t_, err := t.Next(&pi)\n\tif err == datastore.Done {\n\t\thttp.Error(w, \"404 Not found\", http.StatusNotFound)\n\t\treturn\n\t}\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Set Pi object property\n\tr.ParseForm()\n\n\t// Updating the name is not allowed\n\tformName := r.Form.Get(\"name\")\n\tif len(formName) != 0 {\n\t\thttp.Error(w, \"404 Not found\", http.StatusNotFound)\n\t\treturn\n\t}\n\tip := r.Form.Get(\"ip\")\n\tif len(ip) != 0 {\n\t\tpi.Ip = ip\n\t}\n\tlastSeen := r.Form.Get(\"lastSeen\")\n\tif len(lastSeen) != 0 {\n\t\tpi.LastSeen = lastSeen\n\t}\n\tpingCount := r.Form.Get(\"pingCount\")\n\tif len(pingCount) != 0 {\n\t\tpi.PingCount, _ = strconv.Atoi(r.Form.Get(\"pingCount\"))\n\t}\n\n\t//\tfmt.Fprint(w, \"name \", , \"\\n\")\n\tfmt.Fprint(w, \"pingCount \", r.Form.Get(\"pingCount\"), \" \", pi.PingCount, \"\\n\")\n\n\t// Store pi object in data store\n\t_, err = datastore.Put(c, datastore.NewKey(c, piListKind, name, 0, nil), &pi)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\thttp.Error(w, \"200 OK\", http.StatusOK)\n\treturn\n}", "func (m *ItemSettingsRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.UserSettingsable, requestConfiguration *ItemSettingsRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.UserSettingsable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateUserSettingsFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.UserSettingsable), nil\n}", "func (client HTTPSuccessClient) Patch202(booleanValue *bool) (result autorest.Response, err error) {\n req, err := client.Patch202Preparer(booleanValue)\n if err != nil {\n return result, autorest.NewErrorWithError(err, \"httpinfrastructuregroup.HTTPSuccessClient\", \"Patch202\", nil , \"Failure preparing request\")\n }\n\n resp, err := client.Patch202Sender(req)\n if err != nil {\n result.Response = resp\n return result, autorest.NewErrorWithError(err, \"httpinfrastructuregroup.HTTPSuccessClient\", \"Patch202\", resp, \"Failure sending request\")\n }\n\n result, err = client.Patch202Responder(resp)\n if err != nil {\n err = autorest.NewErrorWithError(err, \"httpinfrastructuregroup.HTTPSuccessClient\", \"Patch202\", resp, \"Failure responding to request\")\n }\n\n return\n}", "func (m *SiteItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Siteable, requestConfiguration *SiteItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Siteable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateSiteFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Siteable), nil\n}", "func (o AuthUserUserPermissionSlice) UpdateAll(exec boil.Executor, cols M) error {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), authUserUserPermissionPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\n\t\t\"UPDATE `auth_user_user_permissions` SET %s WHERE (`id`) IN (%s)\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.Placeholders(dialect.IndexPlaceholders, len(o)*len(authUserUserPermissionPrimaryKeyColumns), len(colNames)+1, len(authUserUserPermissionPrimaryKeyColumns)),\n\t)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\t_, err := exec.Exec(sql, args...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to update all in authUserUserPermission slice\")\n\t}\n\n\treturn nil\n}", "func (o SmallblogSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), smallblogPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `smallblog` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, smallblogPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in smallblog slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all smallblog\")\n\t}\n\treturn rowsAff, nil\n}", "func (srv *UsersService) PatchHandler(ctx *gin.Context) {\n\tlogger := srv.logger.New(\"action\", \"PatchHandler\")\n\tuser := GetRequestedUser(ctx)\n\tif user == nil {\n\t\t// Returns a \"404 StatusNotFound\" response\n\t\tsrv.ResponseService.NotFound(ctx)\n\t\treturn\n\t}\n\n\t// Checks if the query entry is valid\n\tform := &validators.PatchUser{}\n\tif err := ctx.ShouldBindJSON(form); err != nil {\n\t\tsrv.ResponseService.ValidatorErrorResponse(ctx, responses.UnprocessableEntity, err)\n\t\treturn\n\t}\n\n\tcurrentUser := GetCurrentUser(ctx)\n\tif currentUser.UID == user.UID ||\n\t\tcurrentUser.RoleName == \"root\" ||\n\t\tcurrentUser.RoleName == \"admin\" {\n\n\t\tif form.FirstName != nil {\n\t\t\tuser.FirstName = *form.FirstName\n\t\t}\n\t\tif form.LastName != nil {\n\t\t\tuser.LastName = *form.LastName\n\t\t}\n\t\tif form.Nickname != nil {\n\t\t\tuser.Nickname = *form.Nickname\n\t\t}\n\n\t\trepo := srv.Repository.GetUsersRepository()\n\t\told, err := repo.FindByUID(user.UID)\n\t\tif err != nil {\n\t\t\tlogger.Error(\"cannot find user\", \"err\", err)\n\t\t\tsrv.ResponseService.NotFound(ctx)\n\t\t\treturn\n\t\t}\n\n\t\t_, err = repo.Update(user)\n\t\tif err != nil {\n\t\t\tlogger.Error(\"cannot update user\", \"err\", err)\n\t\t\tsrv.ResponseService.Error(ctx, responses.CanNotUpdateUser, \"Can't update a user\")\n\t\t\treturn\n\t\t}\n\n\t\tif currentUser.UID != user.UID &&\n\t\t\t(currentUser.RoleName == \"admin\" || currentUser.RoleName == \"root\") {\n\t\t\tsrv.SystemLogsService.LogModifyUserProfileAsync(old, user, currentUser.UID)\n\t\t}\n\t}\n\n\t// Returns a \"204 StatusNoContent\" response\n\tctx.JSON(http.StatusNoContent, nil)\n}", "func (me *INVOICES_IMPL) UpdateInvoiceMetadata (\r\n invoiceId string,\r\n body *models_pkg.InvoicesMetadataRequest,\r\n idempotencyKey *string) (*models_pkg.InvoicesMetadataResponse, error) {\r\n //the endpoint path uri\r\n _pathUrl := \"/invoices/{invoice_id}/metadata\"\r\n\r\n //variable to hold errors\r\n var err error = nil\r\n //process optional template parameters\r\n _pathUrl, err = apihelper_pkg.AppendUrlWithTemplateParameters(_pathUrl, map[string]interface{} {\r\n \"invoice_id\" : invoiceId,\r\n })\r\n if err != nil {\r\n //error in template param handling\r\n return nil, err\r\n }\r\n\r\n //the base uri for api requests\r\n _queryBuilder := configuration_pkg.BASEURI;\r\n\r\n //prepare query string for API call\r\n _queryBuilder = _queryBuilder + _pathUrl\r\n\r\n //validate and preprocess url\r\n _queryBuilder, err = apihelper_pkg.CleanUrl(_queryBuilder)\r\n if err != nil {\r\n //error in url validation or cleaning\r\n return nil, err\r\n }\r\n //prepare headers for the outgoing request\r\n headers := map[string]interface{} {\r\n \"user-agent\" : \"MundiSDK - Go 2.4.5\",\r\n \"accept\" : \"application/json\",\r\n \"content-type\" : \"application/json; charset=utf-8\",\r\n \"Content-Type\" : \"application/json\",\r\n \"idempotency-key\" : apihelper_pkg.ToString(idempotencyKey, \"\"),\r\n }\r\n\r\n //prepare API request\r\n _request := unirest.PatchWithAuth(_queryBuilder, headers, body, me.config.BasicAuthUserName(), me.config.BasicAuthPassword())\r\n //and invoke the API call request to fetch the response\r\n _response, err := unirest.AsString(_request,false);\r\n if err != nil {\r\n //error in API invocation\r\n return nil, err\r\n }\r\n\r\n //error handling using HTTP status codes\r\n if (_response.Code == 400) {\r\n err = apihelper_pkg.NewAPIError(\"Invalid request\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 401) {\r\n err = apihelper_pkg.NewAPIError(\"Invalid API key\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 404) {\r\n err = apihelper_pkg.NewAPIError(\"An informed resource was not found\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 412) {\r\n err = apihelper_pkg.NewAPIError(\"Business validation error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 422) {\r\n err = apihelper_pkg.NewAPIError(\"Contract validation error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 500) {\r\n err = apihelper_pkg.NewAPIError(\"Internal server error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code < 200) || (_response.Code > 206) { //[200,206] = HTTP OK\r\n err = apihelper_pkg.NewAPIError(\"HTTP Response Not OK\", _response.Code, _response.RawBody)\r\n }\r\n if(err != nil) {\r\n //error detected in status code validation\r\n return nil, err\r\n }\r\n\r\n //returning the response\r\n var retVal *models_pkg.InvoicesMetadataResponse = &models_pkg.InvoicesMetadataResponse{}\r\n err = json.Unmarshal(_response.RawBody, &retVal)\r\n\r\n if err != nil {\r\n //error in parsing\r\n return nil, err\r\n }\r\n return retVal, nil\r\n\r\n}", "func (m *DBMockedObject) Update(ctx context.Context, document entity.PersonalData) (int64, error) {\n\targs := m.Called(ctx, document)\n\treturn int64(args.Int(0)), args.Error(1)\n}", "func (a *api) h_PUT_persons_persId(c *gin.Context) {\n\tpersId := c.Param(\"persId\")\n\ta.logger.Debug(\"PUT /persons/\", persId)\n\tvar p Person\n\tif a.errorResponse(c, bindAppJson(c, &p)) {\n\t\treturn\n\t}\n\n\taCtx := a.getAuthContext(c)\n\tif a.errorResponse(c, aCtx.AuthZCamAccess(ptr2int64(p.CamId, 0), auth.AUTHZ_LEVEL_OU)) {\n\t\treturn\n\t}\n\n\tp.Id = persId\n\tmp, err := a.person2mperson(&p)\n\tif a.errorResponse(c, err) {\n\t\treturn\n\t}\n\tif a.errorResponse(c, a.Dc.UpdatePerson(mp)) {\n\t\treturn\n\t}\n\tc.Status(http.StatusNoContent)\n}", "func (o TenantSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"dbmodel: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), tenantPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `tenants` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, tenantPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"dbmodel: unable to update all in tenant slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"dbmodel: unable to retrieve rows affected all in update all tenant\")\n\t}\n\treturn rowsAff, nil\n}", "func (a *BulkApiService) PatchBulkExport(ctx context.Context, moid string) ApiPatchBulkExportRequest {\n\treturn ApiPatchBulkExportRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (o RecipeLipidSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), recipeLipidPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"recipe_lipid\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, recipeLipidPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in recipeLipid slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all recipeLipid\")\n\t}\n\treturn rowsAff, nil\n}", "func (me *PROTECTIONJOBS_IMPL) UpdateProtectionJob (\r\n body *models.ProtectionJobRequest,\r\n id int64) (*models.ProtectionJob, error) {\r\n//validating required parameters\r\n if (body == nil){\r\n return nil,errors.New(\"The parameter 'body' is a required parameter and cannot be nil.\")\r\n} //the endpoint path uri\r\n _pathUrl := \"/public/protectionJobs/{id}\"\r\n\r\n //variable to hold errors\r\n var err error = nil\r\n //process optional template parameters\r\n _pathUrl, err = apihelper.AppendUrlWithTemplateParameters(_pathUrl, map[string]interface{} {\r\n \"id\" : id,\r\n })\r\n if err != nil {\r\n //error in template param handling\r\n return nil, err\r\n }\r\n\r\n //the base uri for api requests\r\n _queryBuilder := configuration.GetBaseURI(configuration.DEFAULT_HOST,me.config);\r\n\r\n //prepare query string for API call\r\n _queryBuilder = _queryBuilder + _pathUrl\r\n\r\n //validate and preprocess url\r\n _queryBuilder, err = apihelper.CleanUrl(_queryBuilder)\r\n if err != nil {\r\n //error in url validation or cleaning\r\n return nil, err\r\n }\r\n if me.config.AccessToken() == nil {\r\n return nil, errors.New(\"Access Token not set. Please authorize the client using client.Authorize()\");\r\n }\r\n //prepare headers for the outgoing request\r\n headers := map[string]interface{} {\r\n \"user-agent\" : \"cohesity-Go-sdk-6.2.0\",\r\n \"accept\" : \"application/json\",\r\n \"content-type\" : \"application/json; charset=utf-8\",\r\n \"Authorization\" : fmt.Sprintf(\"%s %s\",*me.config.AccessToken().TokenType, *me.config.AccessToken().AccessToken),\r\n }\r\n\r\n //prepare API request\r\n _request := unirest.Put(_queryBuilder, headers, body)\r\n //and invoke the API call request to fetch the response\r\n _response, err := unirest.AsString(_request,me.config.SkipSSL());\r\n if err != nil {\r\n //error in API invocation\r\n return nil, err\r\n }\r\n\r\n //error handling using HTTP status codes\r\n if (_response.Code == 0) {\r\n err = apihelper.NewAPIError(\"Error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code < 200) || (_response.Code > 206) { //[200,206] = HTTP OK\r\n err = apihelper.NewAPIError(\"HTTP Response Not OK\", _response.Code, _response.RawBody)\r\n }\r\n if(err != nil) {\r\n //error detected in status code validation\r\n return nil, err\r\n }\r\n\r\n //returning the response\r\n var retVal *models.ProtectionJob = &models.ProtectionJob{}\r\n err = json.Unmarshal(_response.RawBody, &retVal)\r\n\r\n if err != nil {\r\n //error in parsing\r\n return nil, err\r\n }\r\n return retVal, nil\r\n\r\n}", "func (m *ManagedAppRegistrationItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ManagedAppRegistrationable, requestConfiguration *ManagedAppRegistrationItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ManagedAppRegistrationable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateManagedAppRegistrationFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ManagedAppRegistrationable), nil\n}", "func BulkUpdateContact(r *http.Request, ctx types.Context) contract.Response {\n\trequest := &contract.BulkUpdateContactRequest{}\n\tresponse := &contract.BulkUpdateContactResponse{}\n\n\tif err := json.NewDecoder(r.Body).Decode(request); err != nil {\n\t\tlogger.Get().Errorf(\"Unable to parse request body. Err : %v\", err)\n\t\ter := error.ErrBadRequestInvalidBody(err)\n\t\treturn util.FailureResponse(ctx, response, er.HTTPCode, er)\n\t}\n\n\tfor _, c := range request.Contacts {\n\t\tcontact := &c\n\t\tif err := validater.Of(contact).Validate(); err != nil {\n\t\t\tlogger.Get().Errorf(\"Validation error. Err : %v\", err)\n\t\t\treturn util.FailureResponse(ctx, response, err.HTTPCode, err)\n\t\t}\n\t}\n\n\tif err := core.BulkUpdateContact(ctx, request, response); err != nil {\n\t\treturn util.FailureResponse(ctx, response, err.HTTPCode, err)\n\t}\n\n\treturn util.SuccessResponse(ctx, response, http.StatusCreated)\n}", "func (m *ApplicationSignInDetailedSummaryApplicationSignInDetailedSummaryItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ApplicationSignInDetailedSummaryable, requestConfiguration *ApplicationSignInDetailedSummaryApplicationSignInDetailedSummaryItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ApplicationSignInDetailedSummaryable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateApplicationSignInDetailedSummaryFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ApplicationSignInDetailedSummaryable), nil\n}", "func (m *TeamItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Teamable, requestConfiguration *TeamItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Teamable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateTeamFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Teamable), nil\n}", "func updatePerson(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tfmt.Println(\"Update HIT\")\n\tparams := mux.Vars(r)\n\tstmt, err := db.Prepare(\"UPDATE Persons SET pAge = ? WHERE pName = ?\")\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tvar per Person\n\tjson.Unmarshal(body, &per)\n\tage := per.Age\n\t_, err = stmt.Exec(age, params[\"name\"])\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\tfmt.Fprintf(w, \"Person with Name = %s was updated\", params[\"name\"])\n}", "func (client JobClient) UpdateResponder(resp *http.Response) (result JobResourceDescription, err error) {\n err = autorest.Respond(\n resp,\n azure.WithErrorUnlessStatusCode(http.StatusOK,http.StatusCreated,http.StatusAccepted),\n autorest.ByUnmarshallingJSON(&result),\n autorest.ByClosing())\n result.Response = autorest.Response{Response: resp}\n return\n }", "func (m *TeamworkSoftwareUpdateHealth) SetCompanyPortalSoftwareUpdateStatus(value TeamworkSoftwareUpdateStatusable)() {\n err := m.GetBackingStore().Set(\"companyPortalSoftwareUpdateStatus\", value)\n if err != nil {\n panic(err)\n }\n}", "func (o FriendshipSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), friendshipPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `friendship` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, friendshipPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in friendship slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all friendship\")\n\t}\n\treturn rowsAff, nil\n}", "func (o ClaimInListSlice) UpdateAllP(exec boil.Executor, cols M) {\n\terr := o.UpdateAll(exec, cols)\n\tif err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n}", "func (o CMFAdminMenuSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfAdminMenuPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_admin_menu` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfAdminMenuPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfAdminMenu slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfAdminMenu\")\n\t}\n\treturn rowsAff, nil\n}", "func (m *ItemPhotoRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ProfilePhotoable, requestConfiguration *ItemPhotoRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ProfilePhotoable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateProfilePhotoFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ProfilePhotoable), nil\n}", "func (o PremiumCodeSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), premiumCodePrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"premium_codes\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, premiumCodePrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in premiumCode slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all premiumCode\")\n\t}\n\treturn rowsAff, nil\n}", "func (m *TenantStatusRequestBuilder) Patch(ctx context.Context, body i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.TenantStatusable, requestConfiguration *TenantStatusRequestBuilderPatchRequestConfiguration)(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.TenantStatusable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.CreateTenantStatusFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.TenantStatusable), nil\n}", "func (m *UserFlowLanguagePageItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserFlowLanguagePageable, requestConfiguration *UserFlowLanguagePageItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserFlowLanguagePageable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateUserFlowLanguagePageFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserFlowLanguagePageable), nil\n}", "func (client HTTPSuccessClient) Patch200(booleanValue *bool) (result autorest.Response, err error) {\n req, err := client.Patch200Preparer(booleanValue)\n if err != nil {\n return result, autorest.NewErrorWithError(err, \"httpinfrastructuregroup.HTTPSuccessClient\", \"Patch200\", nil , \"Failure preparing request\")\n }\n\n resp, err := client.Patch200Sender(req)\n if err != nil {\n result.Response = resp\n return result, autorest.NewErrorWithError(err, \"httpinfrastructuregroup.HTTPSuccessClient\", \"Patch200\", resp, \"Failure sending request\")\n }\n\n result, err = client.Patch200Responder(resp)\n if err != nil {\n err = autorest.NewErrorWithError(err, \"httpinfrastructuregroup.HTTPSuccessClient\", \"Patch200\", resp, \"Failure responding to request\")\n }\n\n return\n}", "func (m *ItemSitesItemAnalyticsItemActivityStatsItemActivityStatItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ItemActivityStatable, requestConfiguration *ItemSitesItemAnalyticsItemActivityStatsItemActivityStatItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ItemActivityStatable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateItemActivityStatFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ItemActivityStatable), nil\n}", "func (m *ExactMatchDataStoresExactMatchDataStoreItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ExactMatchDataStoreable, requestConfiguration *ExactMatchDataStoresExactMatchDataStoreItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ExactMatchDataStoreable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateExactMatchDataStoreFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ExactMatchDataStoreable), nil\n}", "func (m *OnlineMeetingInfo) SetAdditionalData(value map[string]any)() {\n err := m.GetBackingStore().Set(\"additionalData\", value)\n if err != nil {\n panic(err)\n }\n}", "func (m *User) SetPeople(value []Personable)() {\n m.people = value\n}" ]
[ "0.73712814", "0.68489635", "0.67874825", "0.6128026", "0.5711539", "0.5652175", "0.56520283", "0.5566631", "0.531773", "0.5257406", "0.5192818", "0.51048386", "0.50799334", "0.50211227", "0.4973502", "0.495612", "0.49225268", "0.48665044", "0.48514032", "0.4844714", "0.48313862", "0.4830478", "0.4793071", "0.4792159", "0.47867763", "0.4774733", "0.47635353", "0.4762021", "0.47599232", "0.47589865", "0.4756209", "0.47486022", "0.4739003", "0.47281945", "0.47168058", "0.4711996", "0.4706448", "0.47047845", "0.46979553", "0.46959293", "0.46947232", "0.46808952", "0.4679722", "0.46679652", "0.4664411", "0.46553522", "0.4648938", "0.46473116", "0.46456534", "0.4638421", "0.46322945", "0.46164823", "0.46090096", "0.46075472", "0.46065015", "0.46005312", "0.45962244", "0.4584698", "0.4582003", "0.45717284", "0.45714712", "0.4570428", "0.4559427", "0.45545945", "0.45507422", "0.4547406", "0.4547258", "0.4539511", "0.45362163", "0.45265812", "0.45256948", "0.45252904", "0.4525259", "0.45239672", "0.45220938", "0.4521129", "0.4520057", "0.45108375", "0.4506389", "0.45028207", "0.45025688", "0.44983417", "0.4496532", "0.4493466", "0.44896656", "0.4489295", "0.44852275", "0.4484733", "0.44818506", "0.44791925", "0.44768992", "0.44768426", "0.44767293", "0.44736987", "0.4472264", "0.44698074", "0.44685453", "0.44668272", "0.44624093", "0.4459183" ]
0.7332748
1
DefaultApplyFieldMaskHealthMenstruationPersonalInfo patches an pbObject with patcher according to a field mask.
func DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx context.Context, patchee *HealthMenstruationPersonalInfo, patcher *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) { if patcher == nil { return nil, nil } else if patchee == nil { return nil, errors1.NilArgumentError } var err error for _, f := range updateMask.Paths { if f == prefix+"Id" { patchee.Id = patcher.Id continue } if f == prefix+"CreatedAt" { patchee.CreatedAt = patcher.CreatedAt continue } if f == prefix+"UpdatedAt" { patchee.UpdatedAt = patcher.UpdatedAt continue } if f == prefix+"ProfileId" { patchee.ProfileId = patcher.ProfileId continue } if f == prefix+"PeriodLengthInDays" { patchee.PeriodLengthInDays = patcher.PeriodLengthInDays continue } if f == prefix+"CycleLengthInDays" { patchee.CycleLengthInDays = patcher.CycleLengthInDays continue } } if err != nil { return nil, err } return patchee, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultPatchSetHealthMenstruationPersonalInfo(ctx context.Context, objects []*HealthMenstruationPersonalInfo, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationPersonalInfo, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationPersonalInfo, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationPersonalInfo(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultPatchHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationPersonalInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationPersonalInfo(ctx, &HealthMenstruationPersonalInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationPersonalInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultApplyFieldMaskUserInfo(ctx context.Context, patchee *UserInfo, patcher *UserInfo, updateMask *field_mask.FieldMask, prefix string, db *gorm.DB) (*UserInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar err error\n\tvar updatedCreatedAt bool\n\tvar updatedUpdatedAt bool\n\tfor i, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UserId\" {\n\t\t\tpatchee.UserId = patcher.UserId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"LastName\" {\n\t\t\tpatchee.LastName = patcher.LastName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"FirstName\" {\n\t\t\tpatchee.FirstName = patcher.FirstName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Period\" {\n\t\t\tpatchee.Period = patcher.Period\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"DepartmentId\" {\n\t\t\tpatchee.DepartmentId = patcher.DepartmentId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"JobId\" {\n\t\t\tpatchee.JobId = patcher.JobId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"EnrollmentFlg\" {\n\t\t\tpatchee.EnrollmentFlg = patcher.EnrollmentFlg\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"AdminFlg\" {\n\t\t\tpatchee.AdminFlg = patcher.AdminFlg\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedCreatedAt && strings.HasPrefix(f, prefix+\"CreatedAt.\") {\n\t\t\tif patcher.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"CreatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.CreatedAt, patchee.CreatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tupdatedCreatedAt = true\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUpdatedAt && strings.HasPrefix(f, prefix+\"UpdatedAt.\") {\n\t\t\tif patcher.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UpdatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.UpdatedAt, patchee.UpdatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tupdatedUpdatedAt = true\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx context.Context, patchee *HealthMenstruationDailyEntry, patcher *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Day\" {\n\t\t\tpatchee.Day = patcher.Day\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"IntensityPercentage\" {\n\t\t\tpatchee.IntensityPercentage = patcher.IntensityPercentage\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Type\" {\n\t\t\tpatchee.Type = patcher.Type\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Manual\" {\n\t\t\tpatchee.Manual = patcher.Manual\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"BasedOnPrediction\" {\n\t\t\tpatchee.BasedOnPrediction = patcher.BasedOnPrediction\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultApplyFieldMaskProfile(ctx context.Context, patchee *Profile, patcher *Profile, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*Profile, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Notes\" {\n\t\t\tpatchee.Notes = patcher.Notes\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"FirstName\" {\n\t\t\tpatchee.FirstName = patcher.FirstName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"LastName\" {\n\t\t\tpatchee.LastName = patcher.LastName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"PrimaryEmail\" {\n\t\t\tpatchee.PrimaryEmail = patcher.PrimaryEmail\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Groups\" {\n\t\t\tpatchee.Groups = patcher.Groups\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfilePictureUrl\" {\n\t\t\tpatchee.ProfilePictureUrl = patcher.ProfilePictureUrl\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultPatchSetUserInfo(ctx context.Context, objects []*UserInfo, updateMasks []*field_mask.FieldMask, db *gorm.DB) ([]*UserInfo, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*UserInfo, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchUserInfo(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func WithOverrideFieldMask(d *presenceInterceptorOptionsDecorator) {\n\td.overrideFieldMask = true\n}", "func DefaultPatchUserInfo(ctx context.Context, in *UserInfo, updateMask *field_mask.FieldMask, db *gorm.DB) (*UserInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar pbObj UserInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(UserInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadUserInfo(ctx, &UserInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(UserInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskUserInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(UserInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateUserInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(UserInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultApplyFieldMaskIntPoint(ctx context.Context, patchee *IntPoint, patcher *IntPoint, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*IntPoint, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors.New(\"Patchee inputs to DefaultApplyFieldMaskIntPoint must be non-nil\")\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"X\" {\n\t\t\tpatchee.X = patcher.X\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Y\" {\n\t\t\tpatchee.Y = patcher.Y\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultPatchSetHealthMenstruationDailyEntry(ctx context.Context, objects []*HealthMenstruationDailyEntry, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationDailyEntry, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationDailyEntry, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationDailyEntry(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultPatchSetProfile(ctx context.Context, objects []*Profile, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*Profile, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*Profile, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchProfile(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultPatchHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationDailyEntry\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationDailyEntry(ctx, &HealthMenstruationDailyEntry{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationDailyEntry(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationDailyEntryWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultApplyFieldMaskPeriod(ctx context.Context, patchee *Period, patcher *Period, updateMask *field_mask.FieldMask, prefix string, db *gorm.DB) (*Period, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar err error\n\tvar updatedCreatedAt bool\n\tvar updatedUpdatedAt bool\n\tfor i, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Period\" {\n\t\t\tpatchee.Period = patcher.Period\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedCreatedAt && strings.HasPrefix(f, prefix+\"CreatedAt.\") {\n\t\t\tif patcher.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"CreatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.CreatedAt, patchee.CreatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tupdatedCreatedAt = true\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUpdatedAt && strings.HasPrefix(f, prefix+\"UpdatedAt.\") {\n\t\t\tif patcher.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UpdatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.UpdatedAt, patchee.UpdatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tupdatedUpdatedAt = true\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationPersonalInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationPersonalInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func DefaultApplyFieldMaskComment(ctx context.Context, patchee *Comment, patcher *Comment, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*Comment, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tvar updatedCreatedAt bool\n\tvar updatedUpdatedAt bool\n\tvar updatedDeletedAt bool\n\tvar updatedBoardId bool\n\tvar updatedPostId bool\n\tvar updatedContentId bool\n\tvar updatedUserid bool\n\tvar updatedUsername bool\n\tvar updatedNickname bool\n\tvar updatedEmail bool\n\tvar updatedPassword bool\n\tvar updatedUrl bool\n\tvar updatedUseHtml bool\n\tvar updatedUseSecret bool\n\tfor i, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedCreatedAt && strings.HasPrefix(f, prefix+\"CreatedAt.\") {\n\t\t\tif patcher.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"CreatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.CreatedAt, patchee.CreatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tupdatedCreatedAt = true\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUpdatedAt && strings.HasPrefix(f, prefix+\"UpdatedAt.\") {\n\t\t\tif patcher.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UpdatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.UpdatedAt, patchee.UpdatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tupdatedUpdatedAt = true\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedDeletedAt && strings.HasPrefix(f, prefix+\"DeletedAt.\") {\n\t\t\tif patcher.DeletedAt == nil {\n\t\t\t\tpatchee.DeletedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.DeletedAt == nil {\n\t\t\t\tpatchee.DeletedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"DeletedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.DeletedAt, patchee.DeletedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"DeletedAt\" {\n\t\t\tupdatedDeletedAt = true\n\t\t\tpatchee.DeletedAt = patcher.DeletedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedBoardId && strings.HasPrefix(f, prefix+\"BoardId.\") {\n\t\t\tif patcher.BoardId == nil {\n\t\t\t\tpatchee.BoardId = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.BoardId == nil {\n\t\t\t\tpatchee.BoardId = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"BoardId.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.BoardId, patchee.BoardId, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"BoardId\" {\n\t\t\tupdatedBoardId = true\n\t\t\tpatchee.BoardId = patcher.BoardId\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedPostId && strings.HasPrefix(f, prefix+\"PostId.\") {\n\t\t\tif patcher.PostId == nil {\n\t\t\t\tpatchee.PostId = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.PostId == nil {\n\t\t\t\tpatchee.PostId = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"PostId.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.PostId, patchee.PostId, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"PostId\" {\n\t\t\tupdatedPostId = true\n\t\t\tpatchee.PostId = patcher.PostId\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedContentId && strings.HasPrefix(f, prefix+\"ContentId.\") {\n\t\t\tif patcher.ContentId == nil {\n\t\t\t\tpatchee.ContentId = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.ContentId == nil {\n\t\t\t\tpatchee.ContentId = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"ContentId.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.ContentId, patchee.ContentId, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"ContentId\" {\n\t\t\tupdatedContentId = true\n\t\t\tpatchee.ContentId = patcher.ContentId\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUserid && strings.HasPrefix(f, prefix+\"Userid.\") {\n\t\t\tif patcher.Userid == nil {\n\t\t\t\tpatchee.Userid = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.Userid == nil {\n\t\t\t\tpatchee.Userid = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"Userid.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.Userid, patchee.Userid, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"Userid\" {\n\t\t\tupdatedUserid = true\n\t\t\tpatchee.Userid = patcher.Userid\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUsername && strings.HasPrefix(f, prefix+\"Username.\") {\n\t\t\tif patcher.Username == nil {\n\t\t\t\tpatchee.Username = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.Username == nil {\n\t\t\t\tpatchee.Username = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"Username.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.Username, patchee.Username, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"Username\" {\n\t\t\tupdatedUsername = true\n\t\t\tpatchee.Username = patcher.Username\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedNickname && strings.HasPrefix(f, prefix+\"Nickname.\") {\n\t\t\tif patcher.Nickname == nil {\n\t\t\t\tpatchee.Nickname = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.Nickname == nil {\n\t\t\t\tpatchee.Nickname = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"Nickname.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.Nickname, patchee.Nickname, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"Nickname\" {\n\t\t\tupdatedNickname = true\n\t\t\tpatchee.Nickname = patcher.Nickname\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedEmail && strings.HasPrefix(f, prefix+\"Email.\") {\n\t\t\tif patcher.Email == nil {\n\t\t\t\tpatchee.Email = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.Email == nil {\n\t\t\t\tpatchee.Email = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"Email.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.Email, patchee.Email, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"Email\" {\n\t\t\tupdatedEmail = true\n\t\t\tpatchee.Email = patcher.Email\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedPassword && strings.HasPrefix(f, prefix+\"Password.\") {\n\t\t\tif patcher.Password == nil {\n\t\t\t\tpatchee.Password = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.Password == nil {\n\t\t\t\tpatchee.Password = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"Password.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.Password, patchee.Password, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"Password\" {\n\t\t\tupdatedPassword = true\n\t\t\tpatchee.Password = patcher.Password\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUrl && strings.HasPrefix(f, prefix+\"Url.\") {\n\t\t\tif patcher.Url == nil {\n\t\t\t\tpatchee.Url = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.Url == nil {\n\t\t\t\tpatchee.Url = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"Url.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.Url, patchee.Url, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"Url\" {\n\t\t\tupdatedUrl = true\n\t\t\tpatchee.Url = patcher.Url\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUseHtml && strings.HasPrefix(f, prefix+\"UseHtml.\") {\n\t\t\tif patcher.UseHtml == nil {\n\t\t\t\tpatchee.UseHtml = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UseHtml == nil {\n\t\t\t\tpatchee.UseHtml = &wrappers.BoolValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UseHtml.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.UseHtml, patchee.UseHtml, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UseHtml\" {\n\t\t\tupdatedUseHtml = true\n\t\t\tpatchee.UseHtml = patcher.UseHtml\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUseSecret && strings.HasPrefix(f, prefix+\"UseSecret.\") {\n\t\t\tif patcher.UseSecret == nil {\n\t\t\t\tpatchee.UseSecret = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UseSecret == nil {\n\t\t\t\tpatchee.UseSecret = &wrappers.BoolValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UseSecret.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.UseSecret, patchee.UseSecret, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UseSecret\" {\n\t\t\tupdatedUseSecret = true\n\t\t\tpatchee.UseSecret = patcher.UseSecret\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpVoteCount\" {\n\t\t\tpatchee.UpVoteCount = patcher.UpVoteCount\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"DownVoteCount\" {\n\t\t\tpatchee.DownVoteCount = patcher.DownVoteCount\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func (m *DeviceLocalCredentialInfoItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.DeviceLocalCredentialInfoable, requestConfiguration *DeviceLocalCredentialInfoItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.DeviceLocalCredentialInfoable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateDeviceLocalCredentialInfoFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.DeviceLocalCredentialInfoable), nil\n}", "func DefaultCreateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (m *TeamworkRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserTeamworkable, requestConfiguration *TeamworkRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserTeamworkable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateUserTeamworkFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserTeamworkable), nil\n}", "func DefaultListHealthMenstruationPersonalInfo(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationPersonalInfo, error) {\n\tin := HealthMenstruationPersonalInfo{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationPersonalInfoORM{}, &HealthMenstruationPersonalInfo{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationPersonalInfoORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationPersonalInfo{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func (m *ShiftPreferencesRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ShiftPreferencesable, requestConfiguration *ShiftPreferencesRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ShiftPreferencesable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateShiftPreferencesFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ShiftPreferencesable), nil\n}", "func (m *TeamTemplatesItemDefinitionsItemTeamDefinitionPermissionGrantsResourceSpecificPermissionGrantItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ResourceSpecificPermissionGrantable, requestConfiguration *TeamTemplatesItemDefinitionsItemTeamDefinitionPermissionGrantsResourceSpecificPermissionGrantItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ResourceSpecificPermissionGrantable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateResourceSpecificPermissionGrantFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ResourceSpecificPermissionGrantable), nil\n}", "func (m *CompaniesItemCompanyInformationCompanyInformationItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CompanyInformationable, requestConfiguration *CompaniesItemCompanyInformationCompanyInformationItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CompanyInformationable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateCompanyInformationFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CompanyInformationable), nil\n}", "func (m *TeamTemplatesItemDefinitionsItemTeamDefinitionPhotoRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ProfilePhotoable, requestConfiguration *TeamTemplatesItemDefinitionsItemTeamDefinitionPhotoRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ProfilePhotoable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateProfilePhotoFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ProfilePhotoable), nil\n}", "func DefaultReadHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationPersonalInfoORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationPersonalInfoORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationPersonalInfoORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (fieldMask UpdateAlertingPolicyRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (c *AppsModulesPatchCall) Mask(mask string) *AppsModulesPatchCall {\n\tc.urlParams_.Set(\"mask\", mask)\n\treturn c\n}", "func mutate(newObj runtime.Object) (admission.PatchOps, error) {\n\torphan := newObj.(*longhorn.Orphan)\n\tvar patchOps admission.PatchOps\n\n\tpatchOp, err := common.GetLonghornFinalizerPatchOpIfNeeded(orphan)\n\tif err != nil {\n\t\terr := errors.Wrapf(err, \"failed to get finalizer patch for orphan %v\", orphan.Name)\n\t\treturn nil, werror.NewInvalidError(err.Error(), \"\")\n\t}\n\tif patchOp != \"\" {\n\t\tpatchOps = append(patchOps, patchOp)\n\t}\n\n\treturn patchOps, nil\n}", "func (fieldMask BatchGetAlertingPoliciesResponse_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (fieldMask CreateAlertingPolicyRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func TestFieldMaskFromRequestBodyWithDescriptor(t *testing.T) {\n\tif testing.Short() {\n\t\tt.Skip()\n\t\treturn\n\t}\n\n\t_, md := descriptor.ForMessage(new(examplepb.NonStandardMessage))\n\tjsonInput := `{\"id\":\"foo\", \"thing\":{\"subThing\":{\"sub_value\":\"bar\"}}}`\n\texpected := newFieldMask(\"id\", \"thing.subThing.sub_value\")\n\n\tactual, err := runtime.FieldMaskFromRequestBody(bytes.NewReader([]byte(jsonInput)), md)\n\tif !fieldMasksEqual(actual, expected) {\n\t\tt.Errorf(\"want %v; got %v\", fieldMaskString(expected), fieldMaskString(actual))\n\t}\n\tif err != nil {\n\t\tt.Errorf(\"err %v\", err)\n\t}\n}", "func (x *fastReflection_QueryAccountInfoRequest) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch fd.FullName() {\n\tcase \"cosmos.auth.v1beta1.QueryAccountInfoRequest.address\":\n\t\tpanic(fmt.Errorf(\"field address of message cosmos.auth.v1beta1.QueryAccountInfoRequest is not mutable\"))\n\tdefault:\n\t\tif fd.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.auth.v1beta1.QueryAccountInfoRequest\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.auth.v1beta1.QueryAccountInfoRequest does not contain field %s\", fd.FullName()))\n\t}\n}", "func DefaultPatchProfile(ctx context.Context, in *Profile, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*Profile, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj Profile\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(ProfileWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadProfile(ctx, &Profile{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(ProfileWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskProfile(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(ProfileWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateProfile(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(ProfileWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func (fieldMask WatchAlertingPolicyResponse_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (fieldMask GetAlertingPolicyRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (m *FeatureRolloutPolicyItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.FeatureRolloutPolicyable, requestConfiguration *FeatureRolloutPolicyItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.FeatureRolloutPolicyable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateFeatureRolloutPolicyFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.FeatureRolloutPolicyable), nil\n}", "func apply(m proto.Message, ops ...interface{}) proto.Message {\n\tmr := m.ProtoReflect()\n\tmd := mr.Descriptor()\n\tfor _, op := range ops {\n\t\tswitch op := op.(type) {\n\t\tcase setField:\n\t\t\tfd := md.Fields().ByNumber(op.num)\n\t\t\tmr.Set(fd, protoreflect.ValueOf(op.val))\n\t\tcase setUnknown:\n\t\t\tmr.SetUnknown(op.raw)\n\t\tcase setExtension:\n\t\t\tmr.Set(op.typ.TypeDescriptor(), protoreflect.ValueOf(op.val))\n\t\t}\n\t}\n\treturn m\n}", "func (fieldMask BatchGetAlertingPoliciesRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (m *ItemPhotoRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ProfilePhotoable, requestConfiguration *ItemPhotoRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ProfilePhotoable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateProfilePhotoFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ProfilePhotoable), nil\n}", "func (fieldMask SearchAlertingPoliciesResponse_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (m *ReportsRequestBuilder) Patch(ctx context.Context, body i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable, requestConfiguration *ReportsRequestBuilderPatchRequestConfiguration)(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.CreateReportsFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable), nil\n}", "func patch(newObj runtime.Object, existingObj runtime.Object, c client.Client) error {\n\tnewObjJSON, _ := apijson.Marshal(newObj)\n\tkey, _ := client.ObjectKeyFromObject(newObj)\n\t_, isUnstructured := newObj.(runtime.Unstructured)\n\t_, isCRD := newObj.(*apiextv1beta1.CustomResourceDefinition)\n\n\tif isUnstructured || isCRD || isKudoType(newObj) {\n\t\t// strategic merge patch is not supported for these types, falling back to merge patch\n\t\terr := c.Patch(context.TODO(), newObj, client.ConstantPatch(types.MergePatchType, newObjJSON))\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to apply merge patch to object %s/%s: %w\", key.Name, key.Name, err)\n\t\t}\n\t} else {\n\t\terr := c.Patch(context.TODO(), existingObj, client.ConstantPatch(types.StrategicMergePatchType, newObjJSON))\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to apply StrategicMergePatch to object %s/%s: %w\", key.Namespace, key.Name, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (m *PrivilegedSignupStatusItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable, requestConfiguration *PrivilegedSignupStatusItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreatePrivilegedSignupStatusFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable), nil\n}", "func (fieldMask WatchAlertingPoliciesResponse_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (fieldMask ListAlertingPoliciesResponse_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (a *HyperflexApiService) PatchHyperflexFeatureLimitInternal(ctx context.Context, moid string) ApiPatchHyperflexFeatureLimitInternalRequest {\n\treturn ApiPatchHyperflexFeatureLimitInternalRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (m *AssignmentDefaultsRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.EducationAssignmentDefaultsable, requestConfiguration *AssignmentDefaultsRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.EducationAssignmentDefaultsable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateEducationAssignmentDefaultsFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.EducationAssignmentDefaultsable), nil\n}", "func (m *ConditionalAccessRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable, requestConfiguration *ConditionalAccessRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateConditionalAccessRootFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable), nil\n}", "func mutate(newObj runtime.Object) (admission.PatchOps, error) {\n\tsupportBundle := newObj.(*longhorn.SupportBundle)\n\tvar patchOps admission.PatchOps\n\n\tpatchOp, err := common.GetLonghornFinalizerPatchOpIfNeeded(supportBundle)\n\tif err != nil {\n\t\terr := errors.Wrapf(err, \"failed to get finalizer patch for supportBundle %v\", supportBundle.Name)\n\t\treturn nil, werror.NewInvalidError(err.Error(), \"\")\n\t}\n\tif patchOp != \"\" {\n\t\tpatchOps = append(patchOps, patchOp)\n\t}\n\n\treturn patchOps, nil\n}", "func (fieldMask WatchAlertingPolicyRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (fuo *FriendshipUpdateOne) Modify(modifiers ...func(u *sql.UpdateBuilder)) *FriendshipUpdateOne {\n\tfuo.modifiers = append(fuo.modifiers, modifiers...)\n\treturn fuo\n}", "func (m *ConditionalAccessRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable, requestConfiguration *ConditionalAccessRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateConditionalAccessRootFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable), nil\n}", "func (fieldMask SearchAlertingPoliciesRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (m *ConditionalAccessRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable, requestConfiguration *ConditionalAccessRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateConditionalAccessRootFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable), nil\n}", "func (x *fastReflection_QueryModuleAccountsRequest) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch fd.FullName() {\n\tdefault:\n\t\tif fd.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.auth.v1beta1.QueryModuleAccountsRequest\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.auth.v1beta1.QueryModuleAccountsRequest does not contain field %s\", fd.FullName()))\n\t}\n}", "func (m *DirectoryRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable, requestConfiguration *DirectoryRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateRbacApplicationFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable), nil\n}", "func (x *fastReflection_Bech32PrefixRequest) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch fd.FullName() {\n\tdefault:\n\t\tif fd.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.auth.v1beta1.Bech32PrefixRequest\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.auth.v1beta1.Bech32PrefixRequest does not contain field %s\", fd.FullName()))\n\t}\n}", "func (m *TeamItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Teamable, requestConfiguration *TeamItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Teamable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateTeamFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Teamable), nil\n}", "func (c *PropertiesGoogleAdsLinksPatchCall) UpdateMask(updateMask string) *PropertiesGoogleAdsLinksPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (fieldMask GetMonitoredResourceDescriptorRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (m *ItemJoinedTeamsItemPrimaryChannelMembersConversationMemberItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConversationMemberable, requestConfiguration *ItemJoinedTeamsItemPrimaryChannelMembersConversationMemberItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConversationMemberable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateConversationMemberFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConversationMemberable), nil\n}", "func (mi *MessageInfo) makeKnownFieldsFunc(si structInfo) {\n\tmi.fields = map[pref.FieldNumber]*fieldInfo{}\n\tfor i := 0; i < mi.PBType.Descriptor().Fields().Len(); i++ {\n\t\tfd := mi.PBType.Descriptor().Fields().Get(i)\n\t\tfs := si.fieldsByNumber[fd.Number()]\n\t\tvar fi fieldInfo\n\t\tswitch {\n\t\tcase fd.ContainingOneof() != nil:\n\t\t\tfi = fieldInfoForOneof(fd, si.oneofsByName[fd.ContainingOneof().Name()], si.oneofWrappersByNumber[fd.Number()])\n\t\tcase fd.IsMap():\n\t\t\tfi = fieldInfoForMap(fd, fs)\n\t\tcase fd.IsList():\n\t\t\tfi = fieldInfoForList(fd, fs)\n\t\tcase fd.Kind() == pref.MessageKind || fd.Kind() == pref.GroupKind:\n\t\t\tfi = fieldInfoForMessage(fd, fs)\n\t\tdefault:\n\t\t\tfi = fieldInfoForScalar(fd, fs)\n\t\t}\n\t\tmi.fields[fd.Number()] = &fi\n\t}\n\n\tmi.oneofs = map[pref.Name]*oneofInfo{}\n\tfor i := 0; i < mi.PBType.Descriptor().Oneofs().Len(); i++ {\n\t\tod := mi.PBType.Descriptor().Oneofs().Get(i)\n\t\tmi.oneofs[od.Name()] = makeOneofInfo(od, si.oneofsByName[od.Name()], si.oneofWrappersByType)\n\t}\n}", "func (fieldMask Ping_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (m *VirtualEndpointUserSettingsCloudPcUserSettingItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CloudPcUserSettingable, requestConfiguration *VirtualEndpointUserSettingsCloudPcUserSettingItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CloudPcUserSettingable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateCloudPcUserSettingFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CloudPcUserSettingable), nil\n}", "func (m *TeamsAppItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TeamsAppable, requestConfiguration *TeamsAppItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TeamsAppable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateTeamsAppFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TeamsAppable), nil\n}", "func (fieldMask WatchAlertingPoliciesRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (c *Patch) Apply(cp, cd runtime.Object, only ...PatchType) error {\n\tif c.filterPatch(only...) {\n\t\treturn nil\n\t}\n\n\tswitch c.Type {\n\tcase PatchTypeFromCompositeFieldPath:\n\t\treturn c.applyFromFieldPathPatch(cp, cd)\n\tcase PatchTypeToCompositeFieldPath:\n\t\treturn c.applyFromFieldPathPatch(cd, cp)\n\tcase PatchTypeCombineFromComposite:\n\t\treturn c.applyCombineFromVariablesPatch(cp, cd)\n\tcase PatchTypeCombineToComposite:\n\t\treturn c.applyCombineFromVariablesPatch(cd, cp)\n\tcase PatchTypePatchSet:\n\t\t// Already resolved - nothing to do.\n\t}\n\treturn errors.Errorf(errFmtInvalidPatchType, c.Type)\n}", "func (fieldMask DeleteAlertingPolicyRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (conn *DB) WithPersonalInfoViewApprovals(manager *User) *DB {\n\treturn conn.Joins(\"LEFT JOIN LATERAL ? AS personal_info_view_approvals ON 1\",\n\t\tNewDataStore(conn.New()).ActiveGroupAncestors().ManagedByUser(manager).\n\t\t\tJoins(`\n\t\t\t\tJOIN groups_groups_active\n\t\t\t\t\tON groups_groups_active.parent_group_id = groups_ancestors_active.child_group_id AND\n\t\t\t\t\t groups_groups_active.personal_info_view_approved`).\n\t\t\tWhere(\"groups_groups_active.child_group_id = users.group_id\").\n\t\t\tSelect(\"1 AS approved\").\n\t\t\tLimit(1).\n\t\t\tSubQuery())\n}", "func (fieldMask ListAlertingPoliciesRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (m *EntitlementManagementRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable, requestConfiguration *EntitlementManagementRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateRbacApplicationFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable), nil\n}", "func (fieldMask ListMonitoredResourceDescriptorsResponse_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (x *fastReflection_QueryAccountRequest) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch fd.FullName() {\n\tcase \"cosmos.auth.v1beta1.QueryAccountRequest.address\":\n\t\tpanic(fmt.Errorf(\"field address of message cosmos.auth.v1beta1.QueryAccountRequest is not mutable\"))\n\tdefault:\n\t\tif fd.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.auth.v1beta1.QueryAccountRequest\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.auth.v1beta1.QueryAccountRequest does not contain field %s\", fd.FullName()))\n\t}\n}", "func (p localPatchList) patch(f *funcProto, soff int) {\n\tfor _, l := range p {\n\t\tf.localVars[l].sPC = int32(len(f.code) + soff)\n\t}\n}", "func (x *fastReflection_MsgUpdateParamsResponse) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch fd.FullName() {\n\tdefault:\n\t\tif fd.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.distribution.v1beta1.MsgUpdateParamsResponse\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.distribution.v1beta1.MsgUpdateParamsResponse does not contain field %s\", fd.FullName()))\n\t}\n}", "func (m *FileRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.AgreementFileable, requestConfiguration *FileRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.AgreementFileable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateAgreementFileFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.AgreementFileable), nil\n}", "func (*FieldMask) Descriptor() ([]byte, []int) {\n\treturn file_google_protobuf_types_known_field_mask_proto_rawDescGZIP(), []int{0}\n}", "func (_Token *TokenCaller) PendingMelterAdmin(opts *bind.CallOpts) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _Token.contract.Call(opts, out, \"pendingMelterAdmin\")\n\treturn *ret0, err\n}", "func (fieldMask Distribution_BucketOptions_Linear_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (x *fastReflection_QueryModuleAccountByNameRequest) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch fd.FullName() {\n\tcase \"cosmos.auth.v1beta1.QueryModuleAccountByNameRequest.name\":\n\t\tpanic(fmt.Errorf(\"field name of message cosmos.auth.v1beta1.QueryModuleAccountByNameRequest is not mutable\"))\n\tdefault:\n\t\tif fd.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.auth.v1beta1.QueryModuleAccountByNameRequest\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.auth.v1beta1.QueryModuleAccountByNameRequest does not contain field %s\", fd.FullName()))\n\t}\n}", "func (m *WindowsFeatureUpdateProfilesWindowsFeatureUpdateProfileItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.WindowsFeatureUpdateProfileable, requestConfiguration *WindowsFeatureUpdateProfilesWindowsFeatureUpdateProfileItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.WindowsFeatureUpdateProfileable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateWindowsFeatureUpdateProfileFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.WindowsFeatureUpdateProfileable), nil\n}", "func (c *OrganizationsDatacollectorsPatchCall) UpdateMask(updateMask string) *OrganizationsDatacollectorsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (p *Patch) Patch() {\n\tp.patched = true\n\tif p.funcInfo != nil {\n\t\tp.applyFunc()\n\t} else if p.varInfo != nil {\n\t\tp.applyVar()\n\t}\n}", "func (c *kuberhealthyChecks) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result KuberhealthyCheck, err error) {\n\tresult = KuberhealthyCheck{}\n\terr = c.client.Patch(pt).\n\t\tNamespace(c.ns).\n\t\tResource(\"khchecks\").\n\t\tSubResource(subresources...).\n\t\tName(name).\n\t\tBody(data).\n\t\tDo(context.TODO()).\n\t\tInto(&result)\n\treturn\n}", "func (fieldMask UpdateAlertingPolicyRequest_CAS_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (x *fastReflection_Bech32PrefixResponse) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch fd.FullName() {\n\tcase \"cosmos.auth.v1beta1.Bech32PrefixResponse.bech32_prefix\":\n\t\tpanic(fmt.Errorf(\"field bech32_prefix of message cosmos.auth.v1beta1.Bech32PrefixResponse is not mutable\"))\n\tdefault:\n\t\tif fd.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.auth.v1beta1.Bech32PrefixResponse\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.auth.v1beta1.Bech32PrefixResponse does not contain field %s\", fd.FullName()))\n\t}\n}", "func (x *fastReflection_QueryAccountAddressByIDResponse) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch fd.FullName() {\n\tcase \"cosmos.auth.v1beta1.QueryAccountAddressByIDResponse.account_address\":\n\t\tpanic(fmt.Errorf(\"field account_address of message cosmos.auth.v1beta1.QueryAccountAddressByIDResponse is not mutable\"))\n\tdefault:\n\t\tif fd.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.auth.v1beta1.QueryAccountAddressByIDResponse\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.auth.v1beta1.QueryAccountAddressByIDResponse does not contain field %s\", fd.FullName()))\n\t}\n}", "func (m *AssignmentDefaultsRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.EducationAssignmentDefaultsable, requestConfiguration *AssignmentDefaultsRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (c *OrganizationsSecurityProfilesPatchCall) UpdateMask(updateMask string) *OrganizationsSecurityProfilesPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (fieldMask ConnectResponse_ResumeResponse_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (m *ShiftPreferencesRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ShiftPreferencesable, requestConfiguration *ShiftPreferencesRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (i PreserveFields) ApplyToHelper(opts *PatchOptions) {\n\topts.preserveFields = i\n}", "func (m *ChatItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Chatable, requestConfiguration *ChatItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Chatable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateChatFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Chatable), nil\n}", "func (b *Builder) PatchOffset(patchedOffset, patch dwarf.Offset) {\n\tinfoBytes := b.info.Bytes()\n\tbuf := new(bytes.Buffer)\n\tbinary.Write(buf, binary.LittleEndian, patch)\n\tcopy(infoBytes[patchedOffset:], buf.Bytes())\n}", "func (service *EmployeeService) PatchEmployeeDetails(employeeID string, employeeDetails models.Employee) error {\n\tcollection := service.mongoClient.Database(DbName).Collection(CollectionName)\n\tupdatesToBePerformed := bson.M{}\n\tupdatesToBePerformed[\"employeeid\"] = employeeID\n\tif employeeDetails.Department != nil {\n\t\tupdatesToBePerformed[\"department\"] = employeeDetails.Department\n\t}\n\n\tif employeeDetails.Name != nil {\n\t\tupdatesToBePerformed[\"name\"] = employeeDetails.Name\n\t}\n\n\tif employeeDetails.Skills != nil {\n\t\tupdatesToBePerformed[\"skills\"] = employeeDetails.Skills\n\t}\n\n\tif employeeDetails.Address != nil {\n\t\taddress := models.Address{}\n\t\tif employeeDetails.Address.City != nil {\n\t\t\taddress.City = employeeDetails.Address.City\n\t\t}\n\n\t\tif employeeDetails.Address.Country != nil {\n\t\t\taddress.Country = employeeDetails.Address.Country\n\t\t}\n\n\t\tif employeeDetails.Address.DoorNo != nil {\n\t\t\taddress.DoorNo = employeeDetails.Address.DoorNo\n\t\t}\n\n\t\tif employeeDetails.Address.State != nil {\n\t\t\taddress.State = employeeDetails.Address.State\n\t\t}\n\n\t\tupdatesToBePerformed[\"address\"] = address\n\t}\n\n\tif employeeDetails.Status != nil {\n\t\tupdatesToBePerformed[\"status\"] = employeeDetails.Status\n\t}\n\n\t// consolidatedMap(&updatesToBePerformed, employeeDetails)\n\n\tresult, err := collection.UpdateOne(\n\t\tcontext.Background(),\n\t\tbson.M{\"employeeid\": employeeID},\n\t\tbson.M{\n\t\t\t\"$set\": updatesToBePerformed,\n\t\t})\n\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\tfmt.Println(result)\n\n\treturn nil\n}", "func (fieldMask ListMonitoredResourceDescriptorsRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (a *APIPatchingApplicator) Apply(ctx context.Context, o client.Object, ao ...ApplyOption) error {\n\tif o.GetNamespace() == \"\" {\n\t\to.SetNamespace(\"default\")\n\t}\n\n\tm, ok := o.(metav1.Object)\n\tif !ok {\n\t\treturn errors.New(\"cannot access object metadata\")\n\t}\n\n\tif m.GetName() == \"\" && m.GetGenerateName() != \"\" {\n\t\treturn errors.Wrap(a.client.Create(ctx, o), \"cannot create object\")\n\t}\n\n\tdesired := o.DeepCopyObject()\n\n\terr := a.client.Get(ctx, types.NamespacedName{Name: m.GetName(), Namespace: m.GetNamespace()}, o)\n\tif kerrors.IsNotFound(err) {\n\t\t// TODO: Apply ApplyOptions here too?\n\t\treturn errors.Wrap(a.client.Create(ctx, o), \"cannot create object\")\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"cannot get object\")\n\t}\n\n\tfor _, fn := range ao {\n\t\tif err := fn(ctx, o, desired); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// TODO: Allow callers to override the kind of patch used.\n\treturn errors.Wrap(a.client.Patch(ctx, o, &patch{desired.(client.Object)}), \"cannot patch object\")\n}", "func (handler *profileHandler) Patch(ctx context.Context, req *proto.ProfilePatchRequest, rsp *proto.ProfileData) (err error) {\n\tprofileInstance := handler.getProfileInstance(req.GetId())\n\tprofileInstance.SetAvatar(req.Avatar)\n\terr = profileInstance.SetLocation(req.Location)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tprofileInstance.SetSkype(req.Skype)\n\terr = profileInstance.Save()\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\thandler.setProfileDataToResponse(profileInstance, rsp)\n\n\treturn nil\n}", "func (p *Permission) GetPatch() (map[string]interface{}, error) {\n\tpatch := make(map[string]interface{})\n\n\tif p.Owner != \"\" {\n\t\tpatch[\"owner\"] = p.Owner\n\t}\n\tif p.Username != \"\" {\n\t\treturn nil, errors.NewUnsupportedPatchError(\"permission\", \"username\")\n\t}\n\tif p.Password != \"\" {\n\t\treturn nil, errors.NewUnsupportedPatchError(\"permission\", \"password\")\n\t}\n\tif p.Creator != \"\" {\n\t\treturn nil, errors.NewUnsupportedPatchError(\"permission\", \"creator\")\n\t}\n\tif p.Categories != nil {\n\t\tpatch[\"categories\"] = p.Categories\n\t\tif p.ACLs != nil {\n\t\t\tif err := p.ValidateACLs(p.ACLs...); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tpatch[\"acls\"] = p.ACLs\n\t\t} else {\n\t\t\tpatch[\"acls\"] = category.ACLsFor(p.Categories...)\n\t\t}\n\t}\n\tif p.Ops != nil {\n\t\tpatch[\"ops\"] = p.Ops\n\t}\n\tif p.Indices != nil {\n\t\tpatch[\"indices\"] = p.Indices\n\t}\n\tif p.Sources != nil {\n\t\tif err := validateSources(p.Sources); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpatch[\"sources\"] = p.Sources\n\t}\n\tif p.Referers != nil {\n\t\tif err := validateReferers(p.Referers); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpatch[\"referers\"] = p.Referers\n\t}\n\tif p.CreatedAt != \"\" {\n\t\treturn nil, errors.NewUnsupportedPatchError(\"permission\", \"created_at\")\n\t}\n\tif p.TTL.String() != \"0s\" {\n\t\tpatch[\"ttl\"] = p.TTL\n\t}\n\t// Cannot patch individual limits to 0\n\tif p.Limits != nil {\n\t\tlimits := make(map[string]interface{})\n\t\tif p.Limits.IPLimit != 0 {\n\t\t\tlimits[\"ip_limit\"] = p.Limits.IPLimit\n\t\t}\n\t\tif p.Limits.DocsLimit != 0 {\n\t\t\tlimits[\"docs_limit\"] = p.Limits.DocsLimit\n\t\t}\n\t\tif p.Limits.SearchLimit != 0 {\n\t\t\tlimits[\"search_limit\"] = p.Limits.SearchLimit\n\t\t}\n\t\tif p.Limits.IndicesLimit != 0 {\n\t\t\tlimits[\"indices_limit\"] = p.Limits.IndicesLimit\n\t\t}\n\t\tif p.Limits.CatLimit != 0 {\n\t\t\tlimits[\"cat_limit\"] = p.Limits.CatLimit\n\t\t}\n\t\tif p.Limits.ClustersLimit != 0 {\n\t\t\tlimits[\"clusters_limit\"] = p.Limits.ClustersLimit\n\t\t}\n\t\tif p.Limits.MiscLimit != 0 {\n\t\t\tlimits[\"misc_limit\"] = p.Limits.MiscLimit\n\t\t}\n\t\tpatch[\"limits\"] = limits\n\t}\n\tif p.Description != \"\" {\n\t\tpatch[\"description\"] = p.Description\n\t}\n\n\treturn patch, nil\n}", "func (x *fastReflection_QueryAccountInfoResponse) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {\n\tswitch fd.FullName() {\n\tcase \"cosmos.auth.v1beta1.QueryAccountInfoResponse.info\":\n\t\tif x.Info == nil {\n\t\t\tx.Info = new(BaseAccount)\n\t\t}\n\t\treturn protoreflect.ValueOfMessage(x.Info.ProtoReflect())\n\tdefault:\n\t\tif fd.IsExtension() {\n\t\t\tpanic(fmt.Errorf(\"proto3 declared messages do not support extensions: cosmos.auth.v1beta1.QueryAccountInfoResponse\"))\n\t\t}\n\t\tpanic(fmt.Errorf(\"message cosmos.auth.v1beta1.QueryAccountInfoResponse does not contain field %s\", fd.FullName()))\n\t}\n}", "func (m *ItemTermStoreGroupsGroupItemRequestBuilder) Patch(ctx context.Context, body ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Groupable, requestConfiguration *ItemTermStoreGroupsGroupItemRequestBuilderPatchRequestConfiguration)(ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Groupable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.CreateGroupFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Groupable), nil\n}" ]
[ "0.68944365", "0.6709619", "0.6301576", "0.6023577", "0.59261936", "0.58738303", "0.53895783", "0.52678984", "0.5183263", "0.51596296", "0.5065274", "0.5012141", "0.4974999", "0.49678084", "0.4946724", "0.4827448", "0.47386423", "0.4725287", "0.4687354", "0.46094146", "0.4591762", "0.45882094", "0.4581174", "0.45793536", "0.4566458", "0.45374462", "0.45334655", "0.45280072", "0.45277336", "0.4522703", "0.45006236", "0.44746363", "0.44665933", "0.44659188", "0.4454249", "0.4444732", "0.44239536", "0.44227827", "0.44149968", "0.43944463", "0.43862182", "0.43824562", "0.43819728", "0.43758243", "0.43746436", "0.4360057", "0.43571395", "0.43526757", "0.4351507", "0.4350739", "0.43407062", "0.43377578", "0.43333185", "0.43227232", "0.43178746", "0.43165824", "0.43148056", "0.4304304", "0.4300212", "0.42990708", "0.42989364", "0.42904413", "0.42841616", "0.4282886", "0.42822704", "0.4276233", "0.4275292", "0.42654148", "0.4258195", "0.42561385", "0.42512712", "0.4248263", "0.42452478", "0.42444107", "0.42426383", "0.42407385", "0.42388323", "0.42372733", "0.42340967", "0.42337698", "0.4226868", "0.42214304", "0.42178166", "0.42169988", "0.42151117", "0.41996422", "0.4197058", "0.4193142", "0.41915902", "0.41901252", "0.41895583", "0.4187222", "0.4181302", "0.41767234", "0.4176093", "0.41745922", "0.4173161", "0.4171399", "0.4166181", "0.4162787" ]
0.7750504
0
DefaultListHealthMenstruationPersonalInfo executes a gorm list call
func DefaultListHealthMenstruationPersonalInfo(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationPersonalInfo, error) { in := HealthMenstruationPersonalInfo{} ormObj, err := in.ToORM(ctx) if err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListApplyQuery); ok { if db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil { return nil, err } } db, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationPersonalInfoORM{}, &HealthMenstruationPersonalInfo{}, f, s, p, fs) if err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListFind); ok { if db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil { return nil, err } } db = db.Where(&ormObj) db = db.Order("id") ormResponse := []HealthMenstruationPersonalInfoORM{} if err := db.Find(&ormResponse).Error; err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterListFind); ok { if err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil { return nil, err } } pbResponse := []*HealthMenstruationPersonalInfo{} for _, responseEntry := range ormResponse { temp, err := responseEntry.ToPB(ctx) if err != nil { return nil, err } pbResponse = append(pbResponse, &temp) } return pbResponse, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultReadHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationPersonalInfoORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationPersonalInfoORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationPersonalInfoORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultListHealthMenstruationDailyEntry(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationDailyEntry, error) {\n\tin := HealthMenstruationDailyEntry{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationDailyEntryORM{}, &HealthMenstruationDailyEntry{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationDailyEntryORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationDailyEntry{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func DefaultListUserInfo(ctx context.Context, db *gorm.DB) ([]*UserInfo, error) {\n\tin := UserInfo{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm1.ApplyCollectionOperators(ctx, db, &UserInfoORM{}, &UserInfo{}, nil, nil, nil, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []UserInfoORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*UserInfo{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func DefaultCreateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultPatchSetHealthMenstruationPersonalInfo(ctx context.Context, objects []*HealthMenstruationPersonalInfo, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationPersonalInfo, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationPersonalInfo, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationPersonalInfo(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultPatchHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationPersonalInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationPersonalInfo(ctx, &HealthMenstruationPersonalInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationPersonalInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func (h *User) List(w http.ResponseWriter, r *http.Request) {\n\tlimit, offset := utils.GetPaginationParams(r.URL.Query())\n\tresp, err := h.Storage.GetUserList(limit, offset)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tR.JSON500(w)\n\t\treturn\n\t}\n\n\tif len(resp) < 1 {\n\t\tR.JSON404(w)\n\t\treturn\n\t}\n\n\tR.JSON200(w, resp)\n}", "func (us UserService) List(dto dto.GeneralListDto) ([]model.User, int64) {\n\treturn userDao.List(dto)\n}", "func (srv *UsersService) ListHandler(ctx *gin.Context) {\n\tlogger := srv.logger.New(\"action\", \"ListHandler\")\n\n\tcurrentUser := GetCurrentUser(ctx)\n\n\tlimitQuery := ctx.DefaultQuery(\"limit\", \"10\")\n\tpageQuery := ctx.DefaultQuery(\"page\", \"1\")\n\tparams := ctx.Request.URL.Query()\n\n\tvar adminsRoleIncluded = false\n\n\troles := params[\"filter[role_name]\"]\n\tif len(roles) > 0 {\n\t\tfor key, role := range roles {\n\t\t\t// remove root from role names if user is not root\n\t\t\t// only root can see root users\n\t\t\tif role == models.RoleRoot && currentUser.RoleName != models.RoleRoot {\n\t\t\t\tcopy(roles[key:], roles[key+1:])\n\t\t\t\troles[len(roles)-1] = \"\"\n\t\t\t\troles = roles[:len(roles)-1]\n\t\t\t}\n\t\t\tif role == models.RoleRoot || role == models.RoleAdmin {\n\t\t\t\tadminsRoleIncluded = true\n\t\t\t}\n\t\t}\n\t} else {\n\t\tadminsRoleIncluded = true\n\t}\n\n\tvar hasPerm bool\n\tif adminsRoleIncluded {\n\t\thasPerm = srv.PermissionsService.CanViewAdminProfile(currentUser.UID)\n\t} else {\n\t\thasPerm = srv.PermissionsService.CanViewUserProfile(currentUser.UID)\n\t}\n\n\tif !hasPerm {\n\t\tsrv.ResponseService.Forbidden(ctx)\n\t\treturn\n\t}\n\n\tquery := srv.Repository.GetUsersRepository().Filter(params)\n\n\tpagination, err := srv.Repository.GetUsersRepository().Paginate(query, pageQuery, limitQuery, serializers.NewUsers())\n\tif err != nil {\n\t\tlogger.Error(\"сan't load list of user\", \"error\", err)\n\t\t// Returns a \"400 StatusBadRequest\" response\n\t\tsrv.ResponseService.Error(ctx, responses.CannotRetrieveCollection, \"Can't load list of users\")\n\t\treturn\n\t}\n\n\t// Returns a \"200 OK\" response\n\tsrv.ResponseService.OkResponse(ctx, pagination)\n}", "func (h *Handler) list() http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tentities, err := h.UserDAO.FetchAll(r.Context())\n\t\tswitch {\n\t\tcase errors.Is(err, errorx.ErrNoUser):\n\t\t\tmsg := &errorMessage{\n\t\t\t\tMessage: fmt.Sprintf(\"no users exist\"),\n\t\t\t}\n\t\t\tresponse.JSON(w, http.StatusNotFound, msg)\n\t\t\treturn\n\t\tcase err != nil:\n\t\t\tmsg := &errorMessage{\n\t\t\t\tError: err.Error(),\n\t\t\t\tMessage: \"user datastore error\",\n\t\t\t}\n\t\t\tresponse.JSON(w, http.StatusInternalServerError, msg)\n\t\t\treturn\n\t\tdefault:\n\t\t\tresponse.JSON(w, http.StatusOK, entities)\n\t\t}\n\t}\n}", "func (client PermissionsClient) ListByBillingProfileResponder(resp *http.Response) (result PermissionsListResult, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (h WorkloadHandler) List(ctx *gin.Context) {\n}", "func (UserService) List(ctx context.Context, gdto dto.GeneralListDto) ([]model.User, int64) {\n\tcols := \"*\"\n\tgdto.Q, cols = dataPermService.DataPermFilter(ctx, \"users\", gdto)\n\treturn userDao.List(gdto, cols)\n}", "func (hh *HealthCheckHandler) List(w http.ResponseWriter, r *http.Request) {\n\tqueryParams := r.URL.Query()\n\tpage, err := strconv.Atoi(queryParams[\"page\"][0])\n\tif err != nil {\n\t\thttp.Error(w, marshalError(err.Error()), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tlist := hh.db.List()\n\tsort.Sort(models.HealthChecks(list))\n\tstart, end := paginate(page, 10, len(list))\n\tpaginated := list[start:end]\n\n\tres := &models.HealthCheckList{\n\t\tItems: paginated,\n\t\tTotal: len(list),\n\t\tPage: page,\n\t\tSize: 10,\n\t}\n\n\tb, err := json.Marshal(res)\n\tif err != nil {\n\t\thttp.Error(w, marshalError(err.Error()), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tw.Write(b)\n}", "func genericListHandler(options CrudOptions) uhttp.Handler {\n\tvar middlewares []uhttp.Middleware\n\tif options.ListPermission != nil {\n\t\tmiddlewares = []uhttp.Middleware{uauth.AuthJWT()}\n\t}\n\treturn uhttp.NewHandler(\n\t\tuhttp.WithPreProcess(options.ListPreprocess),\n\t\tuhttp.WithMiddlewares(middlewares...),\n\t\tuhttp.WithRequiredGet(options.ListRequiredGet),\n\t\tuhttp.WithOptionalGet(options.ListOptionalGet),\n\t\tuhttp.WithGet(func(r *http.Request, ret *int) interface{} {\n\t\t\t// Sanity check: ListOthersPermission can only be set if ListPermission is set\n\t\t\tif options.ListPermission == nil && options.ListOthersPermission != nil {\n\t\t\t\t*ret = http.StatusInternalServerError\n\t\t\t\treturn map[string]string{\"err\": \"Configuration problem: ListOthersPermission can only be set if ListPermission is set.\"}\n\t\t\t}\n\n\t\t\t// Check permissions\n\t\t\tvar limitToUser *uauth.User\n\t\t\tvar tmpUser *uauth.User\n\t\t\tvar err error\n\t\t\tif options.ListPermission != nil {\n\t\t\t\t// Return nothing, if listPermission is required but the user does not have it\n\t\t\t\ttmpUser, err = uauth.UserFromRequest(r)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"Could not get user (%s)\", err)\n\t\t\t\t}\n\n\t\t\t\tif !tmpUser.CheckPermission(*options.ListPermission) {\n\t\t\t\t\treturn fmt.Errorf(\"User does not have the required permission: %s\", *options.ListPermission)\n\t\t\t\t}\n\n\t\t\t\t// Limit results if ListOthersPermission is required but the user does not have it\n\t\t\t\tif options.ListOthersPermission != nil {\n\t\t\t\t\tif !tmpUser.CheckPermission(*options.ListOthersPermission) {\n\t\t\t\t\t\tlimitToUser = tmpUser\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Load\n\t\t\tobjsFromDb, err := options.ModelService.List(limitToUser != nil, r.Context())\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t// Render Response\n\t\t\treturn objsFromDb\n\t\t}),\n\t)\n}", "func (rm *RequestManager) ListHandler(w http.ResponseWriter, r *http.Request) {\n\tperson := &rm.Person;\n\tif r.Method != \"GET\" {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t}\n\n\tlogger.log(\"GET /list \" + r.URL.Path)\n\n\tdata, err := person.GetList(w)\n\tif(err == nil) {\n\t\tjson.NewEncoder(w).Encode(&data)\n\t}\n}", "func (h *Handlers) ListHandler(w http.ResponseWriter, r *http.Request) {\n\tif r.Method == \"POST\" {\n\t\tfirstnameInput := strings.Trim(r.FormValue(\"firstname\"), \" \")\n\t\tlastnameInput := strings.Trim(r.FormValue(\"lastname\"), \" \")\n\n\t\tif h.ValidInput.MatchString(firstnameInput) && h.ValidInput.MatchString(lastnameInput) {\n\t\t\tperson := entity.Person{\n\t\t\t\tFirstname: firstnameInput,\n\t\t\t\tLastname: lastnameInput,\n\t\t\t}\n\t\t\th.DBClient.InsertPerson(person)\n\t\t}\n\n\t\thttp.Redirect(w, r, \"/\", http.StatusMovedPermanently)\n\t}\n\n\tpersons, err := h.DBClient.GetPersons()\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tfmt.Println(err)\n\t}\n\n\terr = h.ListTemplate.ExecuteTemplate(w, \"layout\", struct{ Persons []entity.Person }{persons})\n\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tfmt.Println(err)\n\t}\n}", "func DefaultListProfile(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*Profile, error) {\n\tin := Profile{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(ProfileORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &ProfileORM{}, &Profile{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(ProfileORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []ProfileORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(ProfileORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*Profile{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func LitUsersUnderHim(w http.ResponseWriter, r *http.Request) { \n w.Header().Set(\"Content-Type\", \"text/html; charset=utf-8\")\n t, err := template.ParseFiles(\"templates/listUsersUnderHim.html\")\n\n userDetails := getSession(r)\n\n AuthorizePages(w,r)\n if err != nil {\n fmt.Println(err) // Ugly debug output\n w.WriteHeader(http.StatusInternalServerError) // Proper HTTP response\n return\n }\n \n if err != nil {\n fmt.Println(err)\n }\n var userList []helpers.User\n var listLen int\n var failedMessage string\n var isShow bool = false\n\n userList = dbquery.GetUserByMngrList(userDetails.UserId)\n listLen = len(userList);\n\n if listLen == 0 {\n isShow = true\n failedMessage = \"Currently you are not assigned for any User\"\n } \n\n t.Execute(w, AllUsersResponse{Users: userList, ListLen: listLen, FailedMessage: failedMessage, IsShow: isShow}) \n}", "func userList(w http.ResponseWriter, r *http.Request) {}", "func (u *User) List(ctx context.Context, w http.ResponseWriter, r *http.Request, params map[string]string) error {\n\tctx, span := trace.StartSpan(ctx, \"handlers.User.List\")\n\tdefer span.End()\n\n\tusers, err := user.List(ctx, u.db)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn web.Respond(ctx, w, users, http.StatusOK)\n}", "func DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx context.Context, patchee *HealthMenstruationPersonalInfo, patcher *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"PeriodLengthInDays\" {\n\t\t\tpatchee.PeriodLengthInDays = patcher.PeriodLengthInDays\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CycleLengthInDays\" {\n\t\t\tpatchee.CycleLengthInDays = patcher.CycleLengthInDays\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func list(s *discordgo.Session, m *discordgo.MessageCreate) error {\n\tch, err := s.State.Channel(m.ChannelID)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tmembers, err := getChamberMembers(s, ch)\n\tif err != nil {\n\t\tif err == ERR_NOT_A_CHAMBER {\n\t\t\t_, err = s.ChannelMessageSend(m.ChannelID, err.Error())\n\t\t}\n\t\treturn err\n\t}\n\n\tmessage := ch.Mention() + \" members:\\n\\n\"\n\tfor _, member := range members {\n\t\tuser := member.User\n\t\tmessage += user.Username + \"#\" + user.Discriminator + \"\\n\"\n\t}\n\n\t_, err = s.ChannelMessageSend(m.ChannelID, message)\n\treturn err\n}", "func listHandler(w http.ResponseWriter, user datastore.User, apiCall bool) {\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\n\terr := auth.CheckUserPermissions(user, datastore.Admin, apiCall)\n\tif err != nil {\n\t\tresponse.FormatStandardResponse(false, response.ErrorAuth.Code, \"\", \"\", w)\n\t\treturn\n\t}\n\n\tkeypairs, err := datastore.Environ.DB.ListAllowedKeypairs(user)\n\tif err != nil {\n\t\tresponse.FormatStandardResponse(false, response.ErrorFetchKeypairs.Code, \"\", err.Error(), w)\n\t\treturn\n\t}\n\n\t// Return successful JSON response with the list of keypairs\n\tw.WriteHeader(http.StatusOK)\n\tformatListResponse(true, \"\", \"\", \"\", keypairs, w)\n}", "func DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationPersonalInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationPersonalInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func ForPersonalLoans(httpCrawlService func(string, string, int) ([]byte, common.CustomError), baseURL string, page int, accumulator []personalloan.Entity) (*[]personalloan.Entity, common.CustomError) {\n\n\tfmt.Println(\"Start crawl personal loans for\", baseURL, page)\n\n\tbody, crawlErr := httpCrawlService(baseURL, \"products-services/v1/personal-loans\", page)\n\n\tif crawlErr != nil {\n\t\tfmt.Println(crawlErr)\n\t}\n\n\tjsonData := &personalLoanJSON{}\n\n\tmetaInfo := &MetaInfoJSON{}\n\tjson.Unmarshal(body, &metaInfo)\n\n\tjsonUnmarshallErr := json.Unmarshal(body, &jsonData)\n\n\tif jsonUnmarshallErr != nil {\n\t\tfmt.Printf(\"Error crawl personal loans for %s %s %s\", baseURL, strconv.Itoa(page), jsonUnmarshallErr)\n\t\treturn nil, common.NewInternalServerError(\"Unable to unmarshall data\", jsonUnmarshallErr)\n\t}\n\n\titems := accumulator\n\n\tfor i := range jsonData.Data.Brand.Companies {\n\t\tcompany := jsonData.Data.Brand.Companies[i]\n\t\tresult := company.PersonalLoans\n\t\titems = append(items, result...)\n\t}\n\n\tif metaInfo.Meta.TotalPages > page {\n\t\treturn ForPersonalLoans(httpCrawlService, baseURL, page+1, items)\n\t}\n\n\tfmt.Println(\"End crawl personal loans for\", baseURL, page)\n\n\treturn &items, nil\n\n}", "func (h *accountHandler) List(ctx context.Context, req *api.Request, rsp *api.Response) error {\n\tlog.Info(\"Received Example.Call request\")\n\n\t// parse values from the get request\n\tlimitStr, ok := req.Get[\"limit\"]\n\n\tif !ok || len(limitStr.Values) == 0 {\n\t\treturn errors.BadRequest(\"go.micro.api.account\", \"no content\")\n\t}\n\n\tlimit, _ := strconv.Atoi(limitStr.Values[0])\n\t// make request\n\tresponse, err := h.userSrvClient.List(ctx, &userPB.UserListQuery{\n\t\tLimit: &wrappers.UInt32Value{Value: uint32(limit)},\n\t\tPage: &wrappers.UInt32Value{Value: 1},\n\t})\n\tif err != nil {\n\t\treturn errors.InternalServerError(\"go.micro.api.account.call\", err.Error())\n\t}\n\tlog.Info(response)\n\n\t// set response status\n\trsp.StatusCode = 200\n\n\t// respond with some json\n\tb, _ := json.Marshal(response)\n\n\t// set json body\n\trsp.Body = string(b)\n\n\treturn nil\n}", "func (gs *GreetingService) List(c endpoints.Context, r *GreetingsListReq) (*GreetingsList, error) {\n\tif r.Limit <= 0 {\n\t\tr.Limit = 10\n\t}\n\n\tq := datastore.NewQuery(\"Greeting\").Limit(r.Limit)\n\tgreets := make([]*Greeting, 0, r.Limit)\n\tkeys, err := q.GetAll(c, &greets)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor i, k := range keys {\n\t\tgreets[i].Key = k\n\t}\n\treturn &GreetingsList{greets}, nil\n}", "func (up *userProvider) List(ctx context.Context) ([]models.User, error) {\n\tusers, err := up.userStore.List(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn users, nil\n}", "func listPeopleOnActivity(echoReq *alexa.EchoRequest, col *mgo.Collection, user *User) *alexa.EchoResponse {\n\tmsg := \"\"\n\n\t//get activity name from request\n\tactivityName, errActivity := echoReq.GetSlotValue(\"activity\")\n\n\t//if there was an error getting the activity slot value\n\tif errActivity != nil {\n\t\tlog.Println(\"error\")\n\t\tmsg = \"There was an error with your activity name\"\n\t\techoResp := alexa.NewEchoResponse().OutputSpeech(msg).EndSession(false)\n\t\treturn echoResp\n\t}\n\n\t//get index of specific activity\n\tactivityIndex := getActivityIndex(user.Activities, activityName)\n\n\tswitch len(user.Activities[activityIndex].People) {\n\n\tcase 0:\n\t\tmsg = \"There is no one currently assigned to \" + activityName\n\n\tcase 1:\n\t\tmsg = user.Activities[activityIndex].People[0] + \" is the only one assigned to \" + activityName\n\n\tdefault:\n\t\tpeopleLen := strconv.Itoa(len(user.Activities[activityIndex].People))\n\t\tmsg = \"You have \" + peopleLen + \" people on this activity \"\n\n\t\t//loop through all people on an activity and formulate a response message\n\t\tfor index, person := range user.Activities[activityIndex].People {\n\t\t\tif index == len(user.Activities[activityIndex].People)-1 {\n\t\t\t\tmsg = msg + \" and \" + person + \" \"\n\t\t\t} else {\n\t\t\t\tmsg = msg + person + \" \"\n\t\t\t}\n\t\t}\n\t}\n\n\t//return a response with message of people on a specfiic activity\n\techoResp := alexa.NewEchoResponse().OutputSpeech(msg).EndSession(false)\n\treturn echoResp\n\n}", "func GetPeople() models.People { return people }", "func ListAllUsers(w http.ResponseWriter, r *http.Request) {\n\tdefer func() {\n\t\tif err := recover(); err != nil {\n\t\t\thelper.WriteHTTPResponse(r.Context(), w, http.StatusBadRequest, \"Mohomaaf ...dsb\", nil, nil)\n\t\t}\n\t}()\n\n\tfLog := userMgmtLogger.WithField(\"func\", \"ListAllUsers\").WithField(\"RequestID\", r.Context().Value(constants.RequestID)).WithField(\"path\", r.URL.Path).WithField(\"method\", r.Method)\n\n\tiauthctx := r.Context().Value(constants.HansipAuthentication)\n\tif iauthctx == nil {\n\t\thelper.WriteHTTPResponse(r.Context(), w, http.StatusUnauthorized, \"You are not authorized to access this resource\", nil, nil)\n\t\treturn\n\t}\n\n\tfLog.Trace(\"Listing Users\")\n\tpageRequest, err := helper.NewPageRequestFromRequest(r)\n\tif err != nil {\n\t\tfLog.Errorf(\"helper.NewPageRequestFromRequest got %s\", err.Error())\n\t\thelper.WriteHTTPResponse(r.Context(), w, http.StatusBadRequest, err.Error(), nil, nil)\n\t\treturn\n\t}\n\tusers, page, err := UserRepo.ListUser(r.Context(), pageRequest)\n\tif err != nil {\n\t\tfLog.Errorf(\"UserRepo.ListUser got %s\", err.Error())\n\t\thelper.WriteHTTPResponse(r.Context(), w, http.StatusInternalServerError, err.Error(), nil, nil)\n\t\treturn\n\t}\n\tsusers := make([]*SimpleUser, len(users))\n\tfor i, v := range users {\n\t\tsusers[i] = &SimpleUser{\n\t\t\tRecID: v.RecID,\n\t\t\tEmail: v.Email,\n\t\t\tEnabled: v.Enabled,\n\t\t\tSuspended: v.Suspended,\n\t\t}\n\t}\n\tret := make(map[string]interface{})\n\tret[\"users\"] = susers\n\tret[\"page\"] = page\n\thelper.WriteHTTPResponse(r.Context(), w, http.StatusOK, \"List of all user paginated\", nil, ret)\n}", "func List(w http.ResponseWriter, r *http.Request) {\n\tauthUser, err := auth.GetUserFromJWT(w, r)\n\tif err != nil {\n\t\tresponse.FormatStandardResponse(false, \"error-auth\", \"\", err.Error(), w)\n\t\treturn\n\t}\n\n\tlistHandler(w, authUser, false)\n}", "func (gs *GreetingService) List(c endpoints.Context, r *GreetingsListReq) (*GreetingsList, error) {\n\tif r.Limit <= 0 {\n\t\tr.Limit = 10\n\t}\n\n\tq := datastore.NewQuery(\"Greeting\").Order(\"-Date\").Limit(r.Limit)\n\tgreets := make([]*Greeting, 0, r.Limit)\n\tkeys, err := q.GetAll(c, &greets)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor i, k := range keys {\n\t\tgreets[i].Key = k\n\t}\n\treturn &GreetingsList{greets}, nil\n}", "func (p *PersonServer) ListPerson(query *higrpc.PersonQuery, stream higrpc.PersonRoute_ListPersonServer) error {\n\tfmt.Println(\"PersonServer::ListPerson is called. id :\", query.Id, \", name :\", query.Name)\n\n\t// ignore query fields, only send hard coded responses\n\tfor i := 0; i < 5; i++ {\n\t\t_ = stream.Send(&higrpc.PersonResponse{\n\t\t\tId: int64(i),\n\t\t\tName: \"Person\" + strconv.Itoa(i),\n\t\t\tAge: int32(i),\n\t\t})\n\t}\n\treturn nil\n}", "func GetPeople(db *gorm.DB) func(c echo.Context) error {\n return func(c echo.Context) error {\n // get user\n if user, err := GetUser(c, db); err == nil {\n people := []models.User{}\n // get search key\n key := c.QueryParam(\"key\")\n if len(key) > 0 {\n key := \"%\" + key + \"%\"\n // search for other people but user\n db.Where(\"user_name LIKE ? OR email LIKE ? OR first_name LIKE ? OR last_name LIKE ?\", key, key, key, key).\n Not(\"id = ?\", user.ID).\n Find(&people)\n }\n return c.JSON(http.StatusOK, people)\n } else {\n return c.JSON(http.StatusBadRequest, map[string]string{\"message\": err.Error()})\n }\n }\n}", "func ViewListOtherManagers(w http.ResponseWriter, r *http.Request) { \n w.Header().Set(\"Content-Type\", \"text/html; charset=utf-8\")\n t, err := template.ParseFiles(\"templates/viewListOtherManagers.html\")\n\n userDetails := getSession(r)\n\n AuthorizePages(w,r)\n if err != nil {\n fmt.Println(err) // Ugly debug output\n w.WriteHeader(http.StatusInternalServerError) // Proper HTTP response\n return\n }\n \n if err != nil {\n fmt.Println(err)\n }\n var managerList []helpers.User\n var listLen int\n var failedMessage string\n var isShow bool = false\n\n managerList = dbquery.GetManagerList()\n listLen = len(managerList);\n\n var managerList1 []helpers.User\n\n for i := 0; i < listLen; i++ {\n if managerList[i].UserId != userDetails.UserId {\n managerList1 = append(managerList1, helpers.User{\n FirstName: managerList[i].FirstName,\n LastName: managerList[i].LastName,\n UserId: managerList[i].UserId,\n })\n }\n }\n if listLen == 0 {\n isShow = true\n failedMessage = \"Currently you are not assigned for any User\"\n } \n\n t.Execute(w, AllUsersResponse{Users: managerList1, ListLen: listLen, FailedMessage: failedMessage, IsShow: isShow}) \n}", "func (db database) list(w http.ResponseWriter, req *http.Request) {\n\n\tif err := itemList.Execute(w, db); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "func GetAllPerson(c *gin.Context) {\n\tperson, _ := models.LoadPeople()\n\tc.JSON(http.StatusOK, person)\n\n}", "func List(c echo.Context) error {\n\t// TODO: check authorized\n\tctx := ServerContext(c)\n\n\titem := reflect.New(ctx.Type).Interface().(gruff.ArangoObject)\n\n\tparams := item.DefaultQueryParameters()\n\tparams = params.Merge(GetListParametersFromRequest(c))\n\n\tuserID := ActiveUserID(c, ctx)\n\tfilters := gruff.BindVars{}\n\tvar query string\n\tif userID != \"\" && gruff.IsVersionedModel(ctx.Type) {\n\t\tfilters[\"creator\"] = userID\n\t\tquery = gruff.DefaultListQueryForUser(item, params)\n\t} else {\n\t\tquery = gruff.DefaultListQuery(item, params)\n\t}\n\n\titems := []interface{}{}\n\tif err := gruff.FindArangoObjects(ctx, query, filters, &items); err != nil {\n\t\treturn AddError(ctx, c, err)\n\t}\n\n\tctx.Payload[\"results\"] = items\n\treturn c.JSON(http.StatusOK, ctx.Payload)\n}", "func (core *Plugin) listAdmins(c cmd.Context) (string, slack.PostMessageParameters) {\n\tnoParams := slack.PostMessageParameters{}\n\tmembers := model.Members{}\n\tif err := core.Bot.DAL.GetAdmins(&members); err != nil {\n\t\tlog.WithError(err).Error(\"failed to get admins\")\n\t\treturn \"Failed to get admins\", noParams\n\t}\n\tnames := \"\"\n\tfor _, member := range members {\n\t\tnames += member.Name + \"\\n\"\n\t}\n\treturn names, noParams\n}", "func listProfiles(ctx context.Context, _ []string) error {\n\tm, err := cmdutils.LoadManager(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Printf(\"%15s %s\\n\", \"ID\", \"NAME\")\n\tfor _, p := range m.Profiles() {\n\t\tfmt.Printf(\"%15s %s\\n\", p.Id, p.Name)\n\t}\n\n\treturn nil\n}", "func DefaultReadHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationDailyEntryORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationDailyEntryORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationDailyEntryORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (ps *PersonService) List(ids []int, opts ...FuncOption) ([]*Person, error) {\n\turl, err := ps.client.multiURL(PersonEndpoint, ids, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar p []*Person\n\n\terr = ps.client.get(url, &p)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn p, nil\n}", "func (client JobClient) ListByAccountResponder(resp *http.Response) (result JobResourceDescriptionList, err error) {\n err = autorest.Respond(\n resp,\n azure.WithErrorUnlessStatusCode(http.StatusOK),\n autorest.ByUnmarshallingJSON(&result),\n autorest.ByClosing())\n result.Response = autorest.Response{Response: resp}\n return\n }", "func AllLogManagementsGet(c *gin.Context) {\n\t// 分表注释下面两行代码\n\tmeta := model.TableMetaFromQuery(c)\n\tginutils.WriteGinJSON(c, http.StatusOK, model.AllLogManagements(meta))\n\t// 分表取消注释下面三行代码\n\t// meta := model.TableMetaFromQuery(c, \"suffix\")\n\t// suffix := c.Query(\"suffix\")\n\t// ginutils.WriteGinJSON(c, http.StatusOK, model.AllLogManagements(meta, suffix))\n}", "func (m *Mgr) List(ctx context.Context) ([]*User, error) {\n\tm.mu.Lock()\n\tdefer m.mu.Unlock()\n\n\tusers, err := m.list(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// hide passwords\n\tfor _, u := range users {\n\t\tu.Password = \"\"\n\t}\n\treturn users, nil\n}", "func (s *Server) HandleListPeople(c *gin.Context) {\n\tqueryTeamID := c.Param(\"team-id\")\n\n\tteamID, err := strconv.ParseInt(queryTeamID, 10, 64)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\texists, err := s.teamExists(c.Request.Context(), teamID)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\tif !exists {\n\t\tc.AbortWithStatusJSON(http.StatusNotFound, gin.H{\"error\": \"Team not found.\"})\n\t\treturn\n\t}\n\n\tpeople, err := s.peopleService.ListPeople(c.Request.Context(), teamID)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, gin.H{\"data\": people})\n}", "func (h UserHTTP) List(w http.ResponseWriter, r *http.Request) {\n\tlistRequest := listRequestDecoder(r)\n\tusers, err := h.svc.ListUsers(r.Context(), listRequest)\n\tif err != nil {\n\t\th.logger.With(r.Context()).Errorf(\"list users error : %s\", err)\n\t\trender.Render(w, r, e.BadRequest(err, \"bad request\"))\n\t\treturn\n\t}\n\trender.Respond(w, r, users)\n}", "func (p *Personal) All(ctx context.Context) (*[]PersonalData, error) {\n\tusrs, err := p.DB.All(ctx)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"could not select all personal data\")\n\t}\n\treturn usrs, nil\n}", "func GetAllPeople(w http.ResponseWriter, r *http.Request) {\n\tpeople := defaultPeople()\n\tresponseJSON(w, 200, people)\n}", "func (m *publicUser) GetUserList(c *gin.Context) (int, interface{}) {\n\tuser := plugins.CurrentPlugin(c, m.config.LoginVersion)\n\tuserList, err := user.GetUserList(c, m.config.ConfigMap)\n\trspBody := metadata.LonginSystemUserListResult{}\n\tif nil != err {\n\t\trspBody.Code = common.CCErrCommHTTPDoRequestFailed\n\t\trspBody.ErrMsg = err.Error()\n\t\trspBody.Result = false\n\t}\n\trspBody.Result = true\n\trspBody.Data = userList\n\treturn 200, rspBody\n}", "func (s *business) List(where repository.Example, order string, limit int32, page int32) (*repository.ExampleList, *response.Error) {\n\ts.logger = s.loggerClone\n\ts.logger.SugaredLogger = s.logger.With(\"method\", \"List\")\n\n\t// Pagination\n\tif limit == 0 {\n\t\tlimit = viper.GetInt32(\"PAGE_LIMIT\")\n\t}\n\tif page <= 0 {\n\t\tpage = 1\n\t}\n\toffset := limit * (page - 1)\n\n\t// Get data list\n\texamples, count, err := s.repository.ListWhere(where, order, limit, offset)\n\tif err != nil {\n\t\ts.logger.Errorw(\"list data error\", \"error\", err)\n\t\treturn nil, response.NewErrorFromCode(errorcode.GetDataError)\n\t}\n\texampleList := repository.NewExampleList(examples, count)\n\treturn exampleList, nil\n}", "func ViewManagerAndUsers(w http.ResponseWriter, r *http.Request) {\n AuthorizePages(w,r) // Restrict Unauthorized User \n tmpl, err := template.ParseFiles(\"templates/viewManagersAndUsers.html\")\n if err != nil {\n fmt.Println(err)\n }\n\n var managerList []helpers.User\n var userList []helpers.User\n\n managerList = dbquery.GetManagerList()\n\n userId := UserIds{\n ManagerId: r.FormValue(\"managerId\"),\n }\n \n var isShow bool = false\n var noDataMessage string\n var listLen int\n \n if userId.ManagerId != \"Select\" && userId.ManagerId != \"\" {\n userList = dbquery.GetUserByMngrList(userId.ManagerId)\n listLen = len(userList);\n } else {\n isShow = true\n noDataMessage = \"Please select Manager\"\n }\n\n if (listLen == 0 && (userId.ManagerId != \"Select\" && userId.ManagerId != \"\")) {\n isShow = true\n noDataMessage = \"There are no users for this Manager\"\n }\n\n AuthorizePages(w,r) // Restrict Unauthorized User\n \n tmpl.Execute(w, AllUsersResponse{ListLen: listLen, Managers: managerList, Users: userList, IsShow: isShow, FailedMessage: noDataMessage})\n}", "func (t *MedChain) getAllHospital(stub shim.ChaincodeStubInterface, args []string) peer.Response {\n\t\t// ==== Input sanitation ====\n\t\tfmt.Println(\"- start getAllHospital\")\n\n\t\tAssetType := \"Hospital\"\n\n\t\tqueryString := fmt.Sprintf(\"SELECT valueJson FROM <STATE> WHERE json_extract(valueJson, '$.AssetType') = '%s'\", AssetType)\n\t\t\n\t\tqueryResults, err := getQueryResultForQueryString(stub, queryString)\n\t\t\n\t\tif err != nil {\n\t\t\treturn shim.Error(err.Error())\n\t\t}\n\t\t\n\t\treturn shim.Success(queryResults)\n\t}", "func (m *DBMockedObject) All(ctx context.Context) ([]entity.PersonalData, error) {\n\targs := m.Called(ctx)\n\treturn args.Get(0).([]entity.PersonalData), args.Error(1)\n}", "func Index(w http.ResponseWriter, r *http.Request) {\r\n\tdb := dbconn()\r\n\tselDB, err := db.Query(\"SELECT * FROM person ORDER BY firstname DESC\")\r\n\tif err != nil {\r\n\t\tpanic(err.Error())\r\n\t}\r\n\tper := person{}\r\n\tres := []person{}\r\n\tfor selDB.Next() {\r\n\t\tvar firstname, lastname, bloodgroup string\r\n\t\tvar age int\r\n\t\terr = selDB.Scan(&firstname, &lastname, &age, &bloodgroup)\r\n\t\tif err != nil {\r\n\t\t\tpanic(err.Error())\r\n\t\t}\r\n\t\tper.firstname = firstname\r\n\t\tper.lastname = lastname\r\n\t\tper.age = age\r\n\t\tper.bloodgroup = bloodgroup\r\n\t\tres = append(res, per)\r\n\t}\r\n\ttmpl.ExecuteTemplate(w, \"Index\", res)\r\n\tdefer db.Close()\r\n}", "func (client PermissionsClient) ListByCustomerResponder(resp *http.Response) (result PermissionsListResult, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func FetchList() (*List, error) {\n db := OpenDb()\n list := new(List) \n err := db.View(func(tx *bolt.Tx) error {\n b := tx.Bucket([]byte(\"Fika\"))\n err := b.ForEach(func(k, v []byte) error {\n var fetchedPerson Person\n err := json.Unmarshal(v, &fetchedPerson)\n \n if err != nil {\n return err\n }\n list.AddPerson(&fetchedPerson)\n return nil\n })\n \n if err != nil {\n return err\n }\n \n return nil\n })\n \n if err != nil {\n return nil, err\n }\n CloseDb(db)\n return list, nil\n}", "func provisionerList(w http.ResponseWriter, r *http.Request, t auth.Token) (err error) {\n\tallowed := permission.Check(t, permission.PermClusterRead)\n\tif !allowed {\n\t\treturn permission.ErrUnauthorized\n\t}\n\tprovs, err := provision.Registry()\n\tif err != nil {\n\t\treturn err\n\t}\n\tinfo := make([]provisionerInfo, len(provs))\n\tfor i, p := range provs {\n\t\tinfo[i].Name = p.GetName()\n\t\tif clusterProv, ok := p.(cluster.ClusteredProvisioner); ok {\n\t\t\tinfo[i].ClusterHelp = clusterProv.ClusterHelp()\n\t\t}\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\treturn json.NewEncoder(w).Encode(info)\n}", "func HomePageList(c *server.Context) error {\n\tvar (\n\t\terr error\n\t\tres []ware.BriefInfo\n\t\tidReq struct {\n\t\t\tLastID uint32 `json:\"last_id\"`\n\t\t}\n\t)\n\n\terr = c.JSONBody(&idReq)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInvalidParam, nil)\n\t}\n\n\terr = c.Validate(idReq)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInvalidParam, nil)\n\t}\n\n\tconn, err := mysql.Pool.Get()\n\tdefer mysql.Pool.Release(conn)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrMysql, nil)\n\t}\n\n\tres, err = ware.Service.HomePageList(conn, idReq.LastID)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrMysql, nil)\n\t}\n\n\treturn core.WriteStatusAndDataJSON(c, constants.ErrSucceed, res)\n}", "func List(ctx echo.Context) error {\n\tvar res []*userResponse\n\terr := db.Model(&User{}).Where(\"type = ?\", ctx.QueryParams().Get(\"type\")).Scan(&res).Error\n\tif err != nil {\n\t\treturn ctx.JSON(http.StatusBadRequest, &response{Code: 1})\n\t}\n\treturn ctx.JSON(http.StatusOK, &response{\n\t\tCode: 0,\n\t\tData: res,\n\t})\n}", "func PersonsIndex(c *gin.Context) {\r\n\tvar lis []models.Person\r\n\r\n\tdb, _ := c.Get(\"db\")\r\n\tconn := db.(gorm.DB)\r\n\r\n\t// Migrate the schema\r\n\tconn.AutoMigrate(&models.Person{})\r\n\r\n\tconn.Find(&lis)\r\n\tc.JSON(http.StatusOK, gin.H{\r\n\t\t\"msg\": \"thank you\",\r\n\t\t\"r\": lis,\r\n\t})\r\n\r\n}", "func (srv *SecurityQuestionService) ListHandler(ctx *gin.Context) {\n\tquestions, err := srv.Repository.GetSecurityQuestionRepository().GetAll()\n\tif err != nil {\n\t\t// Returns a \"404 StatusNotFound\" response\n\t\tsrv.ResponseService.NotFound(ctx)\n\t\treturn\n\t}\n\n\t// Returns a \"200 OK\" response\n\tsrv.ResponseService.OkResponse(ctx, questions)\n\treturn\n}", "func List() {\n\terr := ListCmd.Parse(os.Args[2:])\n\tif err != nil || internal.Help {\n\t\tListCmd.Usage()\n\t\tos.Exit(0)\n\t}\n\n\tconfigurator, err := config.NewConfigurator()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tlist, err := configurator.GetCollaborators()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tsort.Slice(list, func(i, j int) bool {\n\t\treturn config.Less(list[i], list[j])\n\t})\n\n\ttw := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0x0)\n\tfor _, collab := range list {\n\t\tline := fmt.Sprintf(\"\\t%s\\t<%s>\", collab.Name, collab.Email)\n\t\tif (collab.Alias != collab.Name) {\n\t\t\tline = fmt.Sprintf(\"%s:%s\", collab.Alias, line)\n\t\t}\n\t\tfmt.Fprintln(tw, line)\n\t}\n\ttw.Flush()\n}", "func List(modelIns interface{}, paramCreators ...CriteriaCreator) gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\n\t\ttotal, data := ListHandlerWithoutServe(modelIns, c, paramCreators...)\n\n\t\tc.JSON(200, gin.H{\n\t\t\t\"total\": total,\n\t\t\t\"data\": data,\n\t\t})\n\t}\n}", "func List(c *gin.Context){\n\tlimitStr := c.Query(\"limit\")\n\tlimit, err := strconv.Atoi(limitStr)\n\tif err != nil {\n\t\tlimit = 0\n\t}\n\tres, err := list(limit)\n\tif err != nil {\n\t\tresponese.Error(c, err, nil)\n\t\treturn\n\t}\n\tresponese.Success(c, \"successed\", res)\n}", "func (hpSrv *HomePageServ) List() {\n\tvar (\n\t\tarticelMd []serializer.ArticleModel\n\t)\n\tconf.MYSQL_CONNECT.Order(\"created_at desc\").Find(&articelMd)\n\tif hpSrv.Limit == 0 {\n\t\thpSrv.Limit = 4\n\t}\n\thpSrv.setArticleSet(articelMd)\n\thpSrv.pageCount = setPageCount(len(articelMd), hpSrv.Limit)\n\thpSrv.setPage()\n}", "func (r *UserRead) list(q *msg.Request, mr *msg.Result) {\n\tvar (\n\t\tuserID, userName string\n\t\trows *sql.Rows\n\t\terr error\n\t)\n\n\tif rows, err = r.stmtList.Query(); err != nil {\n\t\tmr.ServerError(err, q.Section)\n\t\treturn\n\t}\n\n\tfor rows.Next() {\n\t\tif err = rows.Scan(\n\t\t\t&userID,\n\t\t\t&userName,\n\t\t); err != nil {\n\t\t\trows.Close()\n\t\t\tmr.ServerError(err, q.Section)\n\t\t\treturn\n\t\t}\n\t\tmr.User = append(mr.User, proto.User{\n\t\t\tID: userID,\n\t\t\tUserName: userName,\n\t\t})\n\t}\n\tif err = rows.Err(); err != nil {\n\t\tmr.ServerError(err, q.Section)\n\t\treturn\n\t}\n\tmr.OK()\n}", "func (userAuthorizationServerObject *userAuthorizationEngineServerObjectStruct) sqlListUsersAuthorizedCompanies(userAuthorizedCompaniesRequest *userAuthorizationEngine_grpc_api.UserAuthorizedCompaniesRequest) *userAuthorizationEngine_grpc_api.UserAuthorizedCompaniesResponse {\n\tvar err error\n\tvar returnMessage *userAuthorizationEngine_grpc_api.UserAuthorizedCompaniesResponse\n\n\t// SQl for 'List users authorized accounts'\n\tsqlText := \"SELECT Company \"\n\tsqlText += \"FROM AuthorizedCompany \"\n\tsqlText += \"WHERE \"\n\tsqlText += \"UserName = '\" + userAuthorizedCompaniesRequest.UserId + \"' AND \"\n\tsqlText += \"ORDER BY Company \"\n\n\t// Execute a sql quesry\n\tsqlResponseRows, err := userAuthorizationServerObject.sqlDbObject.Query(sqlText)\n\tif err != nil {\n\t\tuserAuthorizationServerObject.logger.WithFields(logrus.Fields{\n\t\t\t\"Id\": \"6c93ed23-02a0-454c-8975-49906677b83c\",\n\t\t\t\"err.Error()\": err.Error(),\n\t\t\t\"sqlText\": sqlText,\n\t\t}).Warning(\"Couldn't execute sql-query\")\n\n\t\t// Create return message\n\t\treturnMessage = &userAuthorizationEngine_grpc_api.UserAuthorizedCompaniesResponse{\n\t\t\tUserId: userAuthorizedCompaniesRequest.UserId,\n\t\t\tAcknack: false,\n\t\t\tComments: \"Error While executing SQL\",\n\t\t\tCompanies: nil,\n\t\t}\n\t\treturn returnMessage\n\n\t} else {\n\n\t\t// Success in executing sqlStatement\n\t\tuserAuthorizationServerObject.logger.WithFields(logrus.Fields{\n\t\t\t\"Id\": \"0d3417ef-c952-4ffd-aed4-e7bb2fd4066a\",\n\t\t\t\"sqlResponseRows\": sqlResponseRows,\n\t\t}).Debug(\"Success in executing sql for 'List users authorized companies'\")\n\n\t\t// Extract data from SQL results and create response object\n\t\tvar companiesList []*userAuthorizationEngine_grpc_api.Company\n\t\tvar Company string\n\n\t\t// Iterate and fetch the records from result cursor\n\t\tfor sqlResponseRows.Next() {\n\t\t\tsqlResponseRows.Scan(&Company)\n\t\t\tconvertedCompany := &userAuthorizationEngine_grpc_api.Company{Company: Company}\n\t\t\tcompaniesList = append(companiesList, convertedCompany)\n\t\t}\n\n\t\t// Create return message\n\t\treturnMessage = &userAuthorizationEngine_grpc_api.UserAuthorizedCompaniesResponse{\n\t\t\tUserId: userAuthorizedCompaniesRequest.UserId,\n\t\t\tAcknack: true,\n\t\t\tComments: \"\",\n\t\t\tCompanies: companiesList,\n\t\t}\n\t}\n\n\treturn returnMessage\n}", "func ShowCurrentUserList() {\n\tul := &define.UserList\n\tShowUserList(ul)\n}", "func (u *UsersController) List(ctx *gin.Context) {\n\tcriteria := u.buildCriteria(ctx)\n\n\tvar listAsAdmin bool\n\tif isTatAdmin(ctx) {\n\t\tlistAsAdmin = true\n\t} else {\n\t\tuser, e := PreCheckUser(ctx)\n\t\tif e != nil {\n\t\t\tctx.AbortWithError(http.StatusInternalServerError, e)\n\t\t\treturn\n\t\t}\n\t\tlistAsAdmin = user.CanListUsersAsAdmin\n\t}\n\tcount, users, err := userDB.ListUsers(criteria, listAsAdmin)\n\tif err != nil {\n\t\tctx.AbortWithError(http.StatusInternalServerError, err)\n\t\treturn\n\t}\n\tout := &tat.UsersJSON{\n\t\tCount: count,\n\t\tUsers: users,\n\t}\n\tctx.JSON(http.StatusOK, out)\n}", "func Index(w http.ResponseWriter, r *http.Request) {\r\n\t\r\n\tdb := Database.Dbconn()\r\n\tselDB, err := db.Query(\"SELECT * FROM employee.dbo.employee\")\r\n\tif err != nil {\r\n\t\tpanic(err.Error())\r\n\t}\r\n\r\n\tper := persona{}\r\n\tres := []persona{}\r\n\r\n\tfor selDB.Next() {\r\n\t\tvar id string\r\n\t\tvar name string\r\n\t\tvar location string\r\n\r\n\t\terr = selDB.Scan(&id, &name, &location)\r\n\t\tif err != nil {\r\n\t\t\tpanic(err.Error())\r\n\t\t}\r\n\t\tper.Id = id\r\n\t\tper.Name = name\r\n\t\tper.Location = location\r\n\r\n\t\tres = append(res, per)\r\n\t}\r\n\ttmpl.ExecuteTemplate(w, \"Index\", res)\r\n\tdefer db.Close()\r\n}", "func (m MariaDB) All(ctx context.Context) ([]entity.PersonalData, error) {\n\tsqlQuery := fmt.Sprintf(\"SELECT * FROM person\")\n\tvar p personalData\n\tvar persons []entity.PersonalData\n\trows, err := m.Person.QueryContext(ctx, sqlQuery)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"could not make query\")\n\t}\n\tdefer rows.Close()\n\tfor rows.Next() {\n\t\terr = rows.Scan(&p.ID, &p.Name, &p.LastName, &p.Phone, &p.Email, &p.YearOfBirth)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"could not scan rows\")\n\t\t}\n\t\tpersons = append(persons, p.transmit())\n\t}\n\tif err = rows.Err(); err != nil {\n\t\treturn nil, errors.Wrap(err, \"rows error\")\n\t}\n\treturn persons, nil\n}", "func (client UsageDetailsClient) ListByDepartmentResponder(resp *http.Response) (result UsageDetailsListResult, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (s *EmployeesEndpoint) List(ctx context.Context, division int, all bool, o *api.ListOptions) ([]*Employees, error) {\n\tvar entities []*Employees\n\tu, _ := s.client.ResolvePathWithDivision(\"/api/v1/{division}/payroll/Employees\", division) // #nosec\n\tapi.AddListOptionsToURL(u, o)\n\n\tif all {\n\t\terr := s.client.ListRequestAndDoAll(ctx, u.String(), &entities)\n\t\treturn entities, err\n\t}\n\t_, _, err := s.client.NewRequestAndDo(ctx, \"GET\", u.String(), nil, &entities)\n\treturn entities, err\n}", "func (client *Client) GetUserInfos(allDomains bool, domainName string) ([]UserFullDataWrapper, error) {\n\tquery, _ := json.Marshal(InfoListData{\n\t\tAllDomains: allDomains,\n\t\tDomainName: domainName,\n\t})\n\n\tresRaw, err := client.ListClients([]byte(query))\n\tif err != nil {\n\t\treturn []UserFullDataWrapper{}, err\n\t}\n\n\tresUnmarshaled := []UserFullDataWrapper{}\n\tif err := json.Unmarshal(resRaw, &resUnmarshaled); err != nil {\n\t\treturn []UserFullDataWrapper{}, err\n\t}\n\n\treturn resUnmarshaled, err\n}", "func (client PermissionsClient) ListByInvoiceSectionsResponder(resp *http.Response) (result PermissionsListResult, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (r repository) List(ctx context.Context, list *ListUsersRequest) ([]model.User, error) {\n\tusers := make([]model.User, 0)\n\toffset := (list.Page - 1) * list.Limit\n\terr := r.db.Select(&users, ListUsersSQL, offset, list.Limit)\n\tif err != nil {\n\t\tr.logger.Errorf(\"Failed to select users %s\", err)\n\t\treturn nil, err\n\t}\n\treturn users, nil\n}", "func (s *Service) List(c context.Context, req *user.ListReq) (*user.ListResp, error) {\n\tif err := req.Validate(); err != nil {\n\t\treturn nil, err\n\t}\n\n\tu := s.auth.GetUser(c)\n\n\tlimit, offset := query.Paginate(req.Limit, req.Page)\n\n\tusers, err := s.udb.List(\n\t\ts.dbcl.WithContext(c),\n\t\tquery.ForTenant(u, req.TenantId),\n\t\tlimit,\n\t\toffset,\n\t)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar pu []*user.Resp\n\tfor _, v := range users {\n\t\tpu = append(pu, v.Proto())\n\t}\n\n\treturn &user.ListResp{Users: pu}, nil\n}", "func (client AppsClient) ListResponder(resp *http.Response) (result ListApplicationInfoResponse, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result.Value),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func getLoanList(ls *[]models.Loan, db *gorm.DB) error {\n\tvar l models.Loan\n\tif len(*ls) == 1 {\n\t\tl = (*ls)[0]\n\t}\n\twhere := \"\"\n\tif l.CodCollection != 0 {\n\t\twhere = fmt.Sprintf(\"cod_collection = %v\", l.CodCollection)\n\t}\n\tif l.CodClient != 0 {\n\t\twhere = fmt.Sprintf(\"cod_client = %v\", l.CodClient)\n\t}\n\tif where != \"\" && l.CodLoanState != 0 {\n\t\twhere += fmt.Sprintf(\" and cod_loan_state = %v\", l.CodLoanState)\n\t}\n\terr := db.Where(where).Select(\"id,created_at,updated_at,initial_value,interest,quota,balance,cod_loan_state,cod_client\").Find(ls).GetErrors()\n\tif len(err) != 0 {\n\t\treturn errors.New(\"no se encuentra\")\n\t}\n\treturn nil\n}", "func List(memoryStorage *PassgenStorage) {\n\tif len(memoryStorage.Storage) == 0 {\n\t\thelpers.NegativePrintf(\"\\nThere is no item in your storage\\n\\n\")\n\t\treturn\n\t}\n\n\ttable := tablewriter.NewWriter(os.Stdout)\n\ttable.SetHeader([]string{\"Application Name\", \"User Name\", \"Password\"})\n\n\tfor _, v := range memoryStorage.Storage {\n\t\ttable.Append([]string{v.AppName, v.UserName, v.Password})\n\t}\n\ttable.Render()\n}", "func (v AdminsResource) List(c buffalo.Context) error {\n\t// Get the DB connection from the context\n\ttx, ok := c.Value(\"tx\").(*pop.Connection)\n\tif !ok {\n\t\treturn errors.WithStack(errors.New(\"no transaction found\"))\n\t}\n\n\tadmins := &models.Admins{}\n\n\t// Paginate results. Params \"page\" and \"per_page\" control pagination.\n\t// Default values are \"page=1\" and \"per_page=20\".\n\tq := tx.PaginateFromParams(c.Params())\n\n\t// Retrieve all Admins from the DB\n\tif err := q.All(admins); err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\n\t// Add the paginator to the context so it can be used in the template.\n\tc.Set(\"pagination\", q.Paginator)\n\n\treturn c.Render(200, r.JSON(admins))\n}", "func (uc UsersController) List(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {\n\tfmt.Fprintf(w, \"UsersList\")\n}", "func listActivities(echoReq *alexa.EchoRequest, user *User) *alexa.EchoResponse {\n\tmsg := \"\"\n\n\t//switch depending on number of activities user has\n\tswitch len(user.Activities) {\n\n\tcase 0:\n\t\tmsg = \"You have no activities currently\"\n\n\tcase 1:\n\t\tmsg = \"You have one activity \" + user.Activities[0].Name\n\n\tdefault:\n\t\tactivitiesLen := strconv.Itoa(len(user.Activities))\n\t\tmsg = \"You have \" + activitiesLen + \" activities \"\n\n\t\t//go through all activites and formulate a response message\n\t\tfor index, activity := range user.Activities {\n\t\t\tif index == len(user.Activities)-1 {\n\t\t\t\tmsg = msg + \" and \" + user.Activities[index].Name + \" \"\n\t\t\t} else {\n\t\t\t\tmsg = msg + activity.Name + \" \"\n\t\t\t}\n\t\t}\n\t}\n\n\t//return a response with message of listed activities\n\techoResp := alexa.NewEchoResponse().OutputSpeech(msg).EndSession(false)\n\treturn echoResp\n}", "func (c *productsServicesInterface) GetPersonalLoans(id string, page int) ([]personalloan.Entity, *subentities.Pagination, common.CustomError) {\n\treturn c.personalLoanService.FindByInstitution(id, page)\n}", "func (m *manager) List(ctx context.Context, query *q.Query) (models.Users, error) {\n\tquery = q.MustClone(query)\n\tif query.Sorting == \"\" {\n\t\tquery.Sorting = \"username\"\n\t}\n\n\texcludeAdmin := true\n\tfor key := range query.Keywords {\n\t\tstr := strings.ToLower(key)\n\t\tif str == \"user_id__in\" {\n\t\t\texcludeAdmin = false\n\t\t\tbreak\n\t\t} else if str == \"user_id\" {\n\t\t\texcludeAdmin = false\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif excludeAdmin {\n\t\t// Exclude admin account when not filter by UserIDs, see https://github.com/goharbor/harbor/issues/2527\n\t\tquery.Keywords[\"user_id__gt\"] = 1\n\t}\n\n\treturn m.dao.List(ctx, query)\n}", "func (retUser) List(ctx context.Context, db *sqlx.DB) ([]User, error) {\n\tctx, span := global.Tracer(\"service\").Start(ctx, \"internal.data.retrieve.user.list\")\n\tdefer span.End()\n\n\tusers := []User{}\n\tconst q = `SELECT * FROM users`\n\n\tif err := db.SelectContext(ctx, &users, q); err != nil {\n\t\treturn nil, errors.Wrap(err, \"selecting users\")\n\t}\n\n\treturn users, nil\n}", "func (pubManager PublicationManager) List(page, pageNum int) func() (Publication, error) {\n\n\tvar rows *sql.Rows\n\tvar err error\n\tdriver, _ := config.GetDatabase(config.Config.FrontendServer.Database)\n\tif driver == \"mssql\" {\n\t\trows, err = pubManager.dbList.Query(pageNum*page, page)\n\t} else {\n\t\trows, err = pubManager.dbList.Query(page, pageNum*page)\n\t}\n\tif err != nil {\n\t\treturn func() (Publication, error) { return Publication{}, err }\n\t}\n\n\treturn func() (Publication, error) {\n\t\tvar pub Publication\n\t\tvar err error\n\t\tif rows.Next() {\n\t\t\terr = rows.Scan(&pub.ID, &pub.UUID, &pub.Title, &pub.Status)\n\t\t} else {\n\t\t\trows.Close()\n\t\t\terr = ErrNotFound\n\t\t}\n\t\treturn pub, err\n\t}\n}", "func AdminList(w http.ResponseWriter, data interface{}) {\n\trender(tpAdminList, w, data)\n}", "func (client UsageDetailsClient) ListByManagementGroupResponder(resp *http.Response) (result UsageDetailsListResult, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func getLoanPaymentList(lps *[]models.LoanPayment, db *gorm.DB) error {\n\tvar lp models.LoanPayment\n\tif len(*lps) == 1 {\n\t\tlp = (*lps)[0]\n\t}\n\twhere := fmt.Sprintf(\"cod_collection = %v\", lp.CodCollection)\n\tif lp.CodLoan != 0 {\n\t\twhere = fmt.Sprintf(\"cod_loan = %v\", lp.CodLoan)\n\t}\n\tif lp.CodUser != 0 {\n\t\twhere += fmt.Sprintf(\" and cod_user = %v\", lp.CodUser)\n\t}\n\terr := db.Where(where).Select(\"id,updated_at,cod_loan,cash,cod_user\").Find(lps).GetErrors()\n\tif len(err) != 0 {\n\t\treturn errors.New(\"no se encuentra\")\n\t}\n\treturn nil\n}", "func (hc *Hailconfig) List() error {\n\tcols, _ := consolesize.GetConsoleSize()\n\tmaxLenAlias := 25\n\tmaxLenCommand := 80\n\tmaxLenDescription := 25\n\tif cols > 10 {\n\t\tmaxLenAlias = cols/4 - 5\n\t\tmaxLenCommand = cols / 2\n\t\tmaxLenDescription = cols/4 - 5\n\t}\n\n\tt := table.NewWriter()\n\tt.SetOutputMirror(os.Stdout)\n\tt.AppendHeader(table.Row{\"Alias\", \"Command\", \"Description\"})\n\tt.SetColumnConfigs([]table.ColumnConfig{\n\t\t{\n\t\t\tName: \"Alias\",\n\t\t\tWidthMin: 5,\n\t\t\tWidthMax: maxLenAlias,\n\t\t},\n\t\t{\n\t\t\tName: \"Command\",\n\t\t\tWidthMin: 10,\n\t\t\tWidthMax: maxLenCommand,\n\t\t}, {\n\t\t\tName: \"Description\",\n\t\t\tWidthMin: 5,\n\t\t\tWidthMax: maxLenDescription,\n\t\t},\n\t})\n\t//t.SetAllowedRowLength(90)\n\tfor alias, script := range hc.Scripts {\n\t\tt.AppendRow([]interface{}{alias, script.Command, script.Description})\n\t\tt.AppendSeparator()\n\t}\n\tt.Render()\n\treturn nil\n}", "func getSpecificPersons(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tfmt.Println(\"Get Specific HIT\")\n\tparams := mux.Vars(r)\n\tresult, err := db.Query(\"SELECT pAge,pName FROM Persons WHERE pAge >= ?\", params[\"age\"])\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tdefer result.Close()\n\tvar pers []Person\n\tfor result.Next() {\n\t\tvar per Person\n\t\terr := result.Scan(&per.Age, &per.Name)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tpers = append(pers, per)\n\t}\n\tjson.NewEncoder(w).Encode(pers)\n}", "func (client PermissionsClient) ListByBillingAccountResponder(resp *http.Response) (result PermissionsListResult, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (r *Resource) getAllHandler(c *gin.Context) {\n // fetch all from database\n meals, err := r.db.GetAllMenuMeals()\n if err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n // return result as JSON\n c.JSON(http.StatusOK, meals)\n}", "func (r *LoggingRepository) List(ctx context.Context, teamID, userID string) ([]*model.Member, error) {\n\tstart := time.Now()\n\trecords, err := r.upstream.List(ctx, teamID, userID)\n\n\tlogger := r.logger.With().\n\t\tStr(\"request\", r.requestID(ctx)).\n\t\tStr(\"method\", \"list\").\n\t\tDur(\"duration\", time.Since(start)).\n\t\tStr(\"team\", teamID).\n\t\tStr(\"user\", userID).\n\t\tLogger()\n\n\tif err != nil {\n\t\tlogger.Warn().\n\t\t\tErr(err).\n\t\t\tMsg(\"failed to fetch members\")\n\t} else {\n\t\tlogger.Debug().\n\t\t\tMsg(\"\")\n\t}\n\n\treturn records, err\n}", "func List(ctx context.Context) ([]meta.SimpleTreeNode, error) {\n\tvar managerService = services.NewManagerService()\n\tvar data, err = managerService.MenuList()\n\treturn data, err\n}", "func (aaa *UserAchievementsService) PublicListUserAchievements(input *user_achievements.PublicListUserAchievementsParams) (*achievementclientmodels.ModelsPaginatedUserAchievementResponse, error) {\n\ttoken, err := aaa.TokenRepository.GetToken()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tok, badRequest, unauthorized, notFound, internalServerError, err := aaa.Client.UserAchievements.PublicListUserAchievements(input, client.BearerToken(*token.AccessToken))\n\tif badRequest != nil {\n\t\treturn nil, badRequest\n\t}\n\tif unauthorized != nil {\n\t\treturn nil, unauthorized\n\t}\n\tif notFound != nil {\n\t\treturn nil, notFound\n\t}\n\tif internalServerError != nil {\n\t\treturn nil, internalServerError\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn ok.GetPayload(), nil\n}" ]
[ "0.66679907", "0.62190366", "0.6153077", "0.59601074", "0.5529576", "0.54876083", "0.54529107", "0.53328735", "0.53084356", "0.5276146", "0.5210479", "0.5182747", "0.51378894", "0.51373833", "0.51039445", "0.5103527", "0.5103344", "0.5102155", "0.5064931", "0.5045218", "0.50413835", "0.50163555", "0.50063264", "0.49829236", "0.49692866", "0.4961069", "0.4959318", "0.4946596", "0.49445292", "0.4929917", "0.49165195", "0.49161023", "0.49138248", "0.4911301", "0.49069887", "0.49049574", "0.48979262", "0.48944563", "0.48909584", "0.4857625", "0.48550412", "0.48492557", "0.48458785", "0.48190162", "0.48149642", "0.48093504", "0.4809098", "0.47951135", "0.47950354", "0.47930846", "0.47881842", "0.47843853", "0.47762424", "0.47676426", "0.47635606", "0.47308332", "0.4727782", "0.47239056", "0.47234347", "0.47202873", "0.47185832", "0.4714808", "0.47035894", "0.47024292", "0.4698194", "0.46932617", "0.46815398", "0.4679357", "0.46791977", "0.46741998", "0.4671231", "0.46710113", "0.46652558", "0.46621376", "0.46611065", "0.46578142", "0.46496254", "0.46488565", "0.46448624", "0.4644287", "0.46424508", "0.4641455", "0.46408495", "0.4637506", "0.46323854", "0.4624224", "0.4619987", "0.46185598", "0.46131632", "0.46081093", "0.46078888", "0.4605971", "0.46032703", "0.4602507", "0.46018225", "0.45982665", "0.45980096", "0.4594926", "0.45928058", "0.4591224" ]
0.821063
0
DefaultCreateHealthMenstruationDailyEntry executes a basic gorm create call
func DefaultCreateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) { if in == nil { return nil, errors1.NilArgumentError } ormObj, err := in.ToORM(ctx) if err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeCreate_); ok { if db, err = hook.BeforeCreate_(ctx, db); err != nil { return nil, err } } if err = db.Create(&ormObj).Error; err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterCreate_); ok { if err = hook.AfterCreate_(ctx, db); err != nil { return nil, err } } pbResponse, err := ormObj.ToPB(ctx) return &pbResponse, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultCreateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultReadHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationDailyEntryORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationDailyEntryORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationDailyEntryORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultListHealthMenstruationDailyEntry(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationDailyEntry, error) {\n\tin := HealthMenstruationDailyEntry{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationDailyEntryORM{}, &HealthMenstruationDailyEntry{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationDailyEntryORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationDailyEntry{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func DefaultPatchHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationDailyEntry\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationDailyEntry(ctx, &HealthMenstruationDailyEntry{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationDailyEntry(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationDailyEntryWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultStrictUpdateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationDailyEntry\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationDailyEntryORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func NewCreateMailerEntryDefault(code int) *CreateMailerEntryDefault {\n\tif code <= 0 {\n\t\tcode = 500\n\t}\n\n\treturn &CreateMailerEntryDefault{\n\t\t_statusCode: code,\n\t}\n}", "func DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx context.Context, patchee *HealthMenstruationDailyEntry, patcher *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Day\" {\n\t\t\tpatchee.Day = patcher.Day\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"IntensityPercentage\" {\n\t\t\tpatchee.IntensityPercentage = patcher.IntensityPercentage\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Type\" {\n\t\t\tpatchee.Type = patcher.Type\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Manual\" {\n\t\t\tpatchee.Manual = patcher.Manual\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"BasedOnPrediction\" {\n\t\t\tpatchee.BasedOnPrediction = patcher.BasedOnPrediction\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func (s *Service) CreateEntry(ctx context.Context, in *pb.CreateEntryRequest) (*pb.Entry, error) {\n\tcurrentUser, err := s.getCurrentUser(ctx)\n\tif err != nil {\n\t\treturn nil, status.Errorf(codes.PermissionDenied, \"Authentication failed\")\n\t}\n\n\tvar year int\n\terr = s.db.Get(&year, \"select year from calendars where id = ?\", in.GetCalendarId())\n\tif err == sql.ErrNoRows {\n\t\treturn nil, status.Errorf(codes.NotFound, \"Calendar not found\")\n\t}\n\tif err != nil {\n\t\treturn nil, xerrors.Errorf(\"Failed query to fetch calendar: %w\", err)\n\t}\n\n\tday := in.GetDay()\n\tif day < 1 || day > 25 {\n\t\treturn nil, status.Errorf(codes.InvalidArgument, \"Invalid day: %d\", day)\n\t}\n\n\tlastID, err := s.insertEntry(currentUser.ID, in.GetCalendarId(), day)\n\tif err != nil {\n\t\treturn nil, xerrors.Errorf(\"Failed to insert entry: %w\", err)\n\t}\n\n\tvar entryID int64\n\terr = s.db.Get(&entryID, \"select id from entries where id = ?\", lastID)\n\tif err != nil {\n\t\treturn nil, xerrors.Errorf(\"Failed query to fetch entry: %w\", err)\n\t}\n\n\treturn &pb.Entry{Id: entryID}, nil\n}", "func DefaultPatchSetHealthMenstruationDailyEntry(ctx context.Context, objects []*HealthMenstruationDailyEntry, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationDailyEntry, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationDailyEntry, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationDailyEntry(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func (m *HealthMenstruationDailyEntry) ToORM(ctx context.Context) (HealthMenstruationDailyEntryORM, error) {\n\tto := HealthMenstruationDailyEntryORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.Day = &t\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = int32(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *HealthMenstruationDailyEntryORM) ToPB(ctx context.Context) (HealthMenstruationDailyEntry, error) {\n\tto := HealthMenstruationDailyEntry{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tif to.Day, err = ptypes1.TimestampProto(*m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = HealthMenstruationDailyEntry_Type(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (dbHandler *Handler) CreateEntry(userID uint, text string, ts time.Time) (api.Entry, error) {\n\tentry := api.Entry{UserID: userID, Text: text}\n\tif !ts.IsZero() {\n\t\tentry.CreatedAt = ts\n\t\tentry.UpdatedAt = ts\n\t}\n\n\tdb := dbHandler.DB.Create(&entry)\n\tif db.Error != nil {\n\t\treturn entry, errors.WrapWithDetails(db.Error, \"cannot create entry\", \"userID\", userID)\n\t}\n\n\treturn entry, nil\n}", "func CreateMeeting(c *gin.Context) {\n // Validate input\n var input CreateMeetingInput\n if err := c.ShouldBindJSON(&input); err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n // Create meeting\n meeting := models.Meeting{CreatedBy: input.CreatedBy, Title: input.Title, Description: input.Description, StartDate: input.StartDate, EndDate: input.EndDate, Location: input.Location}\n models.DB.Create(&meeting)\n\n c.JSON(http.StatusOK, gin.H{\"data\": meeting})\n}", "func (m *stashModel) createTodayNote(day time.Time) (*stashModel, tea.Cmd) {\n\treturn m, func() tea.Msg {\n\t\tif entries, err := fsPlugin.ListAll(); err == nil {\n\t\t\t// if the most recent entry isnt the same as our expected filename, create a new entry for today\n\t\t\texpectedFilename := day.Format(fs.StorageFilenameFormat)\n\t\t\tif len(entries) == 0 || (len(entries) > 0 && entries[0].Metadata.CreationTimestamp.Format(fs.StorageFilenameFormat) != expectedFilename) {\n\t\t\t\t_, err := fsPlugin.CreateOrUpdateNote(&v1.Note{\n\t\t\t\t\tMetadata: v1.NoteMetadata{\n\t\t\t\t\t\tAuthor: m.User.Username,\n\t\t\t\t\t\tTitle: TitleFromTime(day, m.config.StartWorkHours, m.config.EndWorkHours),\n\t\t\t\t\t\tTags: DefaultTagsForTime(day, m.config.HolidayTags, m.config.WorkdayTags, m.config.WeekendTags),\n\t\t\t\t\t\tLabels: map[string]string{},\n\t\t\t\t\t},\n\t\t\t\t\tContent: m.config.EntryTemplate,\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn errMsg{fmt.Errorf(\"unable to create new entry: %w\", err)}\n\t\t\t\t}\n\t\t\t\t// TODO: we should not need to reload the whole collection, but I dunno how to make this work otherwise\n\t\t\t\treturn m.ReloadNoteCollectionCmd()\n\t\t\t} else {\n\t\t\t\treturn m.newStatusMessage(statusMessage{\n\t\t\t\t\tstatus: normalStatusMessage,\n\t\t\t\t\tmessage: fmt.Sprintf(\"Entry %s already exists\", expectedFilename),\n\t\t\t\t})\n\t\t\t}\n\t\t} else {\n\t\t\treturn errMsg{fmt.Errorf(\"unable to list entries: %w\", err)}\n\t\t}\n\t}\n}", "func CreateStatisticDefinition(settings *playfab.Settings, postData *CreateStatisticDefinitionRequestModel, entityToken string) (*EmptyResponseModel, error) {\n if entityToken == \"\" {\n return nil, playfab.NewCustomError(\"entityToken should not be an empty string\", playfab.ErrorGeneric)\n }\n b, errMarshal := json.Marshal(postData)\n if errMarshal != nil {\n return nil, playfab.NewCustomError(errMarshal.Error(), playfab.ErrorMarshal)\n }\n\n sourceMap, err := playfab.Request(settings, b, \"/Statistic/CreateStatisticDefinition\", \"X-EntityToken\", entityToken)\n if err != nil {\n return nil, err\n }\n \n result := &EmptyResponseModel{}\n\n config := mapstructure.DecoderConfig{\n DecodeHook: playfab.StringToDateTimeHook,\n Result: result,\n }\n \n decoder, errDecoding := mapstructure.NewDecoder(&config)\n if errDecoding != nil {\n return nil, playfab.NewCustomError(errDecoding.Error(), playfab.ErrorDecoding)\n }\n \n errDecoding = decoder.Decode(sourceMap)\n if errDecoding != nil {\n return nil, playfab.NewCustomError(errDecoding.Error(), playfab.ErrorDecoding)\n }\n\n return result, nil\n}", "func (r *RPC) CreationUpThirtyDayStat(c context.Context, arg *model.ArgMid, res *[]*model.ThirtyDayArticle) (err error) {\n\t*res, err = r.s.UpThirtyDayStat(c, arg.Mid)\n\treturn\n}", "func (dao *OHLCVDao) Create(tick *types.Tick) error {\n\terr := db.Create(dao.dbName, dao.collectionName, tick)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn err\n\t}\n\treturn nil\n}", "func create(entity domain.ValidableEntity, ctx *gin.Context) {\n\terr := ctx.BindJSON(entity)\n\tif IsError(err, ctx) {\n\t\treturn\n\t}\n\terr = domain.Save(entity)\n\tif IsError(err, ctx) {\n\t\treturn\n\t}\n\tctx.JSON(200, entity)\n}", "func (p *Loan) Create(w http.ResponseWriter, r *http.Request) {\n\tpost := models.Loan{}\n\tjson.NewDecoder(r.Body).Decode(&post)\n\n\t// Loan DATA\n\tAmount := post.Amount\n\tTenor := post.Tenor\n\tRequestDate, _ := time.Parse(\"2006-01-02\", post.Date)\n\n\t// GET Interest\n\tTempInterest, _ := p.repo.GetInterest(r.Context(), Tenor)\n\tInterest := TempInterest[0].Interest\n\n\tTempAmountInstallment := Amount / Tenor\n\tTempAmountInterest := float64(Amount) * Interest / 100\n\n\ttotalInstallment := float64(TempAmountInstallment) + TempAmountInterest\n\n\t// Loan CODE\n\tDateTimeNow := time.Now()\n\tTempDate := DateTimeNow.Format(\"01-02-2006\")\n\tTempTime := DateTimeNow.Format(\"15:04:05\")\n\n\tLoanCode := \"LOAN\" + strings.Replace(string(TempDate), \"-\", \"\", -1) + strings.Replace(string(TempTime), \":\", \"\", -1)\n\n\tfor i := 1; i <= Tenor; i++ {\n\t\t// fmt.Println(\"============================================================\")\n\t\t// fmt.Println(\"== Temp Amount Installment \t: \", TempAmountInstallment)\n\t\t// fmt.Println(\"== Temp Amount Interest\t\t: \", TempAmountInterest)\n\t\t// fmt.Println(\"== Total Installment\t\t: \", totalInstallment)\n\t\t// fmt.Println(\"== Request Date\t\t\t: \", RequestDate.AddDate(0, i, 0).Format(\"2006-01-02\"))\n\t\t// fmt.Println(\"============================================================\")\n\n\t\tnewID, err := p.repo.InsertInstallment(r.Context(), LoanCode, float64(TempAmountInstallment), TempAmountInterest, totalInstallment, i, RequestDate.AddDate(0, i, 0).Format(\"2006-01-02\"))\n\n\t\tif err != nil {\n\t\t\trespondWithErrorLoan(w, http.StatusInternalServerError, \"Server Error\")\n\t\t}\n\n\t\tfmt.Println(\"Success insert installment : ID => \", newID)\n\t}\n\tpayload, _ := p.repo.GetInstallmentByLoanCode(r.Context(), LoanCode)\n\n\trespondwithJSONLoan(w, http.StatusCreated, payload)\n}", "func (adapter *GORMAdapter) Create(entity interface{}) orm.Result {\n\treturn orm.Result{\n\t\tError: adapter.db.Create(entity).Error,\n\t}\n}", "func (r *HealthResource) Create(item HealthConfig) error {\n\tif err := r.c.ModQuery(\"POST\", BasePath+HealthEndpoint, item); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (HealthMenstruationDailyEntryORM) TableName() string {\n\treturn \"health_menstruation_daily_entries\"\n}", "func (a *App) CreateEntity(w http.ResponseWriter, r *http.Request) {\n\tvar e Entity\n\te.Uuid = uuid.NewV4().String()\n\tdecoder := json.NewDecoder(r.Body)\n\tif err := decoder.Decode(&e); err != nil {\n\t\trespondWithError(w, http.StatusBadRequest, errors.New(\"invalid request payload\"))\n\t\treturn\n\t}\n\tdefer func() { _ = r.Body.Close() }()\n\n\tif err := e.createEntity(a.DB); err != nil {\n\t\tlog.Print(err)\n\t\trespondWithError(w, http.StatusInternalServerError, err)\n\t\treturn\n\t}\n\n\trespondWithJSON(w, http.StatusCreated, e)\n}", "func (h *StatsHandlers) createStatsRecord(c *gin.Context) {\n\n\tfreeMemory, err := strconv.ParseUint(c.PostForm(\"freeMemory\"), 10, 64)\n\tif err != nil {\n\t\tfmt.Println(\"Couldn't read free memory\")\n\t\tpanic(err)\n\t}\n\n\tuptime, err := strconv.ParseUint(c.PostForm(\"uptime\"), 10, 64)\n\tif err != nil {\n\t\tfmt.Println(\"Couldn't read uptime\")\n\t\tpanic(err)\n\t}\n\n\ttemperature, err := strconv.ParseFloat(c.PostForm(\"temp\"), 64)\n\tif err != nil {\n\t\tfmt.Println(\"Couldn't read temp\")\n\t\tpanic(err)\n\t}\n\n\tcpuTemp, err := strconv.ParseFloat(c.PostForm(\"cpuTemp\"), 64)\n\tif err != nil {\n\t\tfmt.Println(\"Couldn't read cpu temp\")\n\t\tpanic(err)\n\t}\n\n\tambientTemp, err := strconv.ParseFloat(c.PostForm(\"ambientTemp\"), 64)\n\tif err != nil {\n\t\tfmt.Println(\"Couldn't read ambient temp\")\n\t\tpanic(err)\n\t}\n\n\thumidity, err := strconv.ParseFloat(c.PostForm(\"humidity\"), 64)\n\tif err != nil {\n\t\tfmt.Println(\"Couldn't read humidity\")\n\t\tpanic(err)\n\t}\n\n\tdb, ok := c.MustGet(\"databaseConn\").(*gorm.DB)\n\tif !ok {\n\t\treturn\n\t}\n\n\tnewStats := models.Stats{\n\t\tFreeMemory: freeMemory,\n\t\tUptime: uptime,\n\t\tTemperature: temperature,\n\t\tAmbientTemperature: ambientTemp,\n\t\tCPUTemperature: cpuTemp,\n\t\tHumidity: humidity}\n\n\tfmt.Println(newStats)\n\tdb.Save(&newStats)\n\n\tc.Status(http.StatusOK)\n}", "func CreateTimeEntry(newEntry models.TimeEntry) uint {\n\t// Connect to the DB\n\tdbConn := ConnectDB()\n\n\t// sp_time_insert stored procedure returns the id of the new time entry\n\t// so call QueryRow with a prepared statement to just grab this value\n\t// QueryRow returns a pointer to an instance of sql.Row with a single value\n\trow := dbConn.QueryRow(`call sp_time_insert($1, $2, $3, $4, $5, 0)`,\n\t\tnewEntry.User.Id,\n\t\tnewEntry.Organisation.Id,\n\t\tnewEntry.Comments,\n\t\tnewEntry.Value,\n\t\tnewEntry.ValueType)\n\n\t// Parse the new time entry id returned by the db in the row variable\n\t// and handle any returned errors\n\terr := row.Scan(&newEntry.Id)\n\thelpers.HandleError(err)\n\n\t// Insert any tags linked to the time entry\n\tinsertTags(&newEntry.Tags, newEntry.Id, newEntry.User.Id)\n\n\t// Insert any repo items for the new time entry\n\tinsertRepoItem(&newEntry.RepoItems, newEntry.Id)\n\n\t// Return the time entries new id\n\treturn newEntry.Id\n}", "func PostEntryNew(w http.ResponseWriter, req *http.Request, _ httprouter.Params) {\n\tif isLoggedIn(w, req) {\n\t\tq := req.URL.Query()\n\t\tbox, _ := strconv.Atoi(q[\"box\"][0])\n\t\tpacket, _ := strconv.Atoi(q[\"packet\"][0])\n\t\tid, _ := strconv.Atoi(q[\"product\"][0])\n\t\tje := opdatabase.JournalEntry{\n\t\t\tID: 0,\n\t\t\tLabour: q[\"labour\"][0],\n\t\t\tDate: q[\"date\"][0],\n\t\t\tBox: box,\n\t\t\tPacket: packet,\n\t\t\tProductID: id,\n\t\t}\n\t\tgo model.UpdateLabourNames(je.Labour, je.Date, labours)\n\t\tmodel.CreateJournalEntry(je)\n\t\tres := Response{\n\t\t\t301,\n\t\t\tResponse{20, \", \"},\n\t\t}\n\t\tp, err := json.Marshal(res)\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t}\n\t\tio.WriteString(w, string(p))\n\n\t}\n}", "func CreateMenu(w http.ResponseWriter, r *http.Request) {\n\tdefer r.Body.Close()\n\tvar menu Menu\n\tif err := json.NewDecoder(r.Body).Decode(&menu); err != nil {\n\t\tRespondWithError(w, http.StatusBadRequest, \"Invalid request payload\")\n\t\treturn\n\t}\n\tmenu.ID = bson.NewObjectId()\n\n\t// set Date for every entry\n\tdays := []string{\"Mon\", \"Tue\", \"Wed\", \"Thr\", \"Fri\", \"Sat\", \"Sun\"}\n\tdates := WholeWeekDates(time.Now().AddDate(0, 0, 7))\n\n\tfor i := range days {\n\t\treflect.ValueOf(&menu.MessUP).Elem().FieldByName(days[i]).FieldByName(\"Date\").Set(reflect.ValueOf(dates[i]))\n\t\treflect.ValueOf(&menu.MessDown).Elem().FieldByName(days[i]).FieldByName(\"Date\").Set(reflect.ValueOf(dates[i]))\n\t}\n\n\tif err := mdao.Insert(menu); err != nil {\n\t\tRespondWithError(w, http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\n\tRespondWithJSON(w, http.StatusCreated, menu)\n}", "func (c *ConsulDB) CreateEntry(key string, value string) error {\n\tkv := c.consulClient.KV()\n\n\tp := &consulapi.KVPair{Key: key, Value: []byte(value)}\n\n\t_, err := kv.Put(p, nil)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func DefaultCreateUserInfo(ctx context.Context, in *UserInfo, db *gorm.DB) (*UserInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (m *UserExtModel) Create(ctx context.Context, kv query.KV) (int64, error) {\n\n\tif _, ok := kv[\"created_at\"]; !ok {\n\t\tkv[\"created_at\"] = time.Now()\n\t}\n\n\tif _, ok := kv[\"updated_at\"]; !ok {\n\t\tkv[\"updated_at\"] = time.Now()\n\t}\n\n\tsqlStr, params := m.query.Table(m.tableName).ResolveInsert(kv)\n\n\tres, err := m.db.ExecContext(ctx, sqlStr, params...)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn res.LastInsertId()\n}", "func (repo *Repository) Create(ctx context.Context, claims auth.Claims, req CreateRequest, now time.Time) (*Expenditure, error) {\n\tspan, ctx := tracer.StartSpanFromContext(ctx, \"internal.expenditure.Create\")\n\tdefer span.Finish()\n\tif claims.Audience == \"\" {\n\t\treturn nil, errors.WithStack(ErrForbidden)\n\t}\n\n\t// Admin users can update branch they have access to.\n\tif !claims.HasRole(auth.RoleAdmin) {\n\t\treturn nil, errors.WithStack(ErrForbidden)\n\t}\n\n\t// Validate the request.\n\tv := webcontext.Validator()\n\terr := v.StructCtx(ctx, req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// If now empty set it to the current time.\n\tif now.IsZero() {\n\t\tnow = time.Now()\n\t}\n\n\t// Always store the time as UTC.\n\tnow = now.UTC()\n\t// Postgres truncates times to milliseconds when storing. We and do the same\n\t// here so the value we return is consistent with what we store.\n\tnow = now.Truncate(time.Millisecond)\n\n\tsalesRep, err := models.Users(models.UserWhere.PhoneNumber.EQ(req.SalesRepPhoneNumber)).One(ctx, repo.DbConn)\n\tif err != nil {\n\t\tif err.Error() == sql.ErrNoRows.Error() {\n\t\t\treturn nil, errors.New(\"Invalid phone number\")\n\t\t}\n\t\treturn nil, err\n\t}\n\tm := models.RepsExpense{\n\t\tID: uuid.NewRandom().String(),\n\t\tSalesRepID: salesRep.ID,\n\t\tDate: now.Unix(),\n\t\tAmount: req.Amount,\n\t\tReason: req.Reason,\n\t}\n\n\tif err := m.Insert(ctx, repo.DbConn, boil.Infer()); err != nil {\n\t\treturn nil, errors.WithMessage(err, \"Insert expenditure failed\")\n\t}\n\n\treturn &Expenditure{\n\t\tID: m.ID,\n\t\tSalesRepID: req.SalesRepPhoneNumber,\n\t\tDate: now,\n\t\tAmount: req.Amount,\n\t\tReason: req.Reason,\n\t}, nil\n}", "func (e *Exhibition) Create() error {\n\tif err := e.Validate(); err != nil {\n\t\treturn err\n\t}\n\tb := e.GetByteId()\n\t_, err := db.Exec(`\n\t\tINSERT INTO\n\t\t\texhibition\n\t\t\t(id, _byteid, gallery_id, title, description, date_range)\n\t\tVALUES\n\t\t\t($1, $2, $3, $4, $5, $6)\n\t`, e.Id, b, e.GalleryId, e.Title, e.Description, e.DateRange.Format())\n\treturn err\n}", "func makeEntry(title string, status string, action string, e string) {\n\tquery, err := db.Prepare(\"INSERT INTO vlog (VideoTitle, DownloadStatus, Activity , ErrorMsg) VALUES (?, ?, ?, ?)\")\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\tquery.Exec(title, status, action, e)\n}", "func createGameDayReport(date string) (*gameDayReport, error) {\n\tmatches, err := findMatchesByGameDateID(date)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif len(matches) == 0 {\n\t\treturn nil, fmt.Errorf(\"no matches found for date %s\", date)\n\t}\n\n\treportGames := make(map[int64]gameReport)\n\tfor _, game := range matches {\n\t\tgameReport := gameReport{\n\t\t\tHomeTeam: game.HomeTeam,\n\t\t\tAwayTeam: game.AwayTeam,\n\t\t\tVenue: game.Venue,\n\t\t\tDate: game.StartDate,\n\t\t}\n\n\t\treportGames[game.ID] = gameReport\n\t}\n\n\treport := gameDayReport{\n\t\tID: date,\n\t\tGames: reportGames,\n\t\tDeadline: matches[0].StartDate,\n\t\tEvaluated: false,\n\t}\n\n\terr = upsertGameDayReport(report)\n\treturn &report, err\n}", "func (c *Cruder) create(request *CrudRequest) {\n\tvalues := request.GetValues()\n\tmodelFields := c.model.GetFields()\n\n\tif len(values) == len(modelFields) {\n\t\tformattedValues := make([]interface{}, len(values))\n\n\t\tsort.Sort(ByFieldValueName(values))\n\n\t\tfor i, value := range values {\n\t\t\tformattedValues[i] = value.Value\n\t\t}\n\n\t\t_, err := c.createStatement.Exec(formattedValues...)\n\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t} else {\n\t\tlog.Fatal(\"total columns in create statement doesn't match total columns in table\")\n\t}\n\n}", "func TestCreate(t *testing.T) {\n\tassert := assert.New(t)\n\tacctDefEntry := new(AccountDefEntry)\n\tacctDefEBuilder := &AccountDefEntryBuilder{\n\t\taccountDefEntry: acctDefEntry,\n\t}\n\tacctDE := acctDefEBuilder.Create()\n\tassert.Equal(acctDefEntry, acctDE)\n}", "func (hh *HealthCheckHandler) Create(w http.ResponseWriter, r *http.Request) {\n\treq := &models.CreateHealthCheckRequest{}\n\tif err := json.NewDecoder(r.Body).Decode(req); err != nil {\n\t\thttp.Error(w, marshalError(err.Error()), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif req.Endpoint == \"\" {\n\t\thttp.Error(w, marshalError(\"empty healthcheck endpoint\"), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif _, err := url.ParseRequestURI(req.Endpoint); err != nil {\n\t\thttp.Error(w, marshalError(\"invalid URL\"), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\thc, err := models.NewHealthCheck(req.Endpoint)\n\tif err != nil {\n\t\thttp.Error(w, marshalError(err.Error()), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tif err := hh.db.Create(hc); err != nil {\n\t\thttp.Error(w, marshalError(err.Error()), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tresp := &models.CreateHealthCheckResponse{\n\t\tID: hc.ID,\n\t\tEndpoint: hc.Endpoint,\n\t}\n\n\tb, err := json.Marshal(resp)\n\tif err != nil {\n\t\thttp.Error(w, marshalError(err.Error()), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tw.Write(b)\n}", "func (h *Handlers) CreateRecord(c *gin.Context) {\n\tproject := c.Param(\"project\")\n\tprojectIndex := &ProjectIndex{}\n\tc.BindJSON(projectIndex)\n\n\terr := h.db.AddProjectData(project, projectIndex.Data, projectIndex.Meta)\n\tif err != nil {\n\t\ttErr := h.db.TranslateError(err)\n\t\tc.JSON(tErr.Code, tErr.Error())\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusCreated, gin.H{})\n}", "func (m *UserModel) Create(ctx context.Context, kv query.KV) (int64, error) {\n\n\tif _, ok := kv[\"created_at\"]; !ok {\n\t\tkv[\"created_at\"] = time.Now()\n\t}\n\n\tif _, ok := kv[\"updated_at\"]; !ok {\n\t\tkv[\"updated_at\"] = time.Now()\n\t}\n\n\tsqlStr, params := m.query.Table(m.tableName).ResolveInsert(kv)\n\n\tres, err := m.db.ExecContext(ctx, sqlStr, params...)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn res.LastInsertId()\n}", "func (s *serviceOutlite) EntityCreate(input *schemas.SchemaOutlet) (*models.ModelOutlet, schemas.SchemaDatabaseError) {\n\tvar outlet schemas.SchemaOutlet\n\toutlet.Name = input.Name\n\toutlet.Phone = input.Phone\n\toutlet.Address = input.Address\n\toutlet.MerchatID = input.MerchatID\n\n\tres, err := s.outlet.EntityCreate(&outlet)\n\treturn res, err\n}", "func (db *Database) CreateApointment(date string, time string, name string) (string, error) {\n\tuuid := uuid.New().String()\n\n\tif db.TimeIsTaken(date, time) {\n\t\treturn \"\", errors.New(\"time is already taken\")\n\t}\n\n\tdb.Taken[uuid] = Appointment{\n\t\tDate: date,\n\t\tTime: time,\n\t\tName: name,\n\t}\n\n\treturn uuid, nil\n}", "func (handler *TemperatureHandler) Create(c *gin.Context) {\n\tform := forms.NewStoresTemperature()\n\n\tif c.BindJSON(form) == nil {\n\t\tservice := services.NewTemperatureService(handler.MetricRepository)\n\t\tc.JSON(http.StatusOK, service.StoresTemperature(form))\n\t}\n}", "func (r *RootRepository) Create(ent Entity) error {\n\tif err := ent.Validate(); err != nil {\n\t\treturn err\n\t}\n\tif err := r.DB.Create(ent).Error; err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func createDefaultUser() error {\n\tdb, err := gorm.Open(\"sqlite3\", dbPath)\n\tdefer db.Close()\n\tif err != nil {\n\t\treturn err\n\t}\n\tregisteredValues := 0\n\tdb.Find(&user{}).Count(&registeredValues)\n\tif registeredValues == 0 {\n\t\thashedPassword, _ := bcrypt.GenerateFromPassword([]byte(\"admin\"), 14)\n\t\tdb.Create(&user{\n\t\t\tUsername: \"admin\",\n\t\t\tHashedPassword: hashedPassword,\n\t\t})\n\t}\n\treturn nil\n}", "func DefaultCreatePeriod(ctx context.Context, in *Period, db *gorm.DB) (*Period, error) {\n\tif in == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(PeriodORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(PeriodORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (r *HousingTypology) Create(db *sql.DB) error {\n\treturn db.QueryRow(`INSERT INTO housing_typology(name) VALUES($1) RETURNING id`,\n\t\t&r.Name).Scan(&r.ID)\n}", "func (s service) Create(ctx context.Context, email, component, environment, message string, data map[string]string) (*models.Event, error) {\n\tval, _ := json.Marshal(data)\n\te := &models.Event{\n\t\tEmail: email,\n\t\tComponent: component,\n\t\tEnvironment: environment,\n\t\tMessage: message,\n\t\tData: datatypes.JSON([]byte(val)),\n\t}\n\tevent, err := e.Create(s.DB)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn event, nil\n}", "func (vrsConnection *VRSConnection) CreateEntity(info EntityInfo) error {\n\n\tif len(info.UUID) == 0 {\n\t\treturn fmt.Errorf(\"Uuid absent\")\n\t}\n\n\tif len(info.Name) == 0 {\n\t\treturn fmt.Errorf(\"Name absent\")\n\t}\n\n\t// The Nuage_VM_Table has separate columns for enterprise and user.\n\t// Hence make a copy of the metadata and delete these keys.\n\tvar metadata map[string]string\n\tif info.Metadata != nil {\n\t\tmetadata = make(map[string]string)\n\t\tfor k, v := range info.Metadata {\n\t\t\tmetadata[string(k)] = v\n\t\t}\n\t}\n\t//delete(metadata, string(entity.MetadataKeyEnterprise))\n\tdelete(metadata, string(entity.MetadataKeyUser))\n\n\tnuageVMTableRow := ovsdb.NuageVMTableRow{\n\t\tType: int(info.Type),\n\t\tVMName: info.Name,\n\t\tVMUuid: info.UUID,\n\t\tDomain: info.Domain,\n\t\tNuageUser: info.Metadata[entity.MetadataKeyUser],\n\t\tNuageEnterprise: info.Metadata[entity.MetadataKeyEnterprise],\n\t\tMetadata: metadata,\n\t\tPorts: info.Ports,\n\t\tEvent: int(entity.EventCategoryDefined),\n\t\tEventType: int(entity.EventDefinedAdded),\n\t\tState: int(entity.Running),\n\t\tReason: int(entity.RunningUnknown),\n\t}\n\n\tif info.Events != nil {\n\t\tnuageVMTableRow.Event = int(info.Events.EntityEventCategory)\n\t\tnuageVMTableRow.EventType = int(info.Events.EntityEventType)\n\t\tnuageVMTableRow.State = int(info.Events.EntityState)\n\t\tnuageVMTableRow.Reason = int(info.Events.EntityReason)\n\t}\n\n\tif err := vrsConnection.vmTable.InsertRow(vrsConnection.ovsdbClient, &nuageVMTableRow); err != nil {\n\t\treturn fmt.Errorf(\"Problem adding entity info to VRS %v\", err)\n\t}\n\n\treturn nil\n}", "func DefaultCreateIntPoint(ctx context.Context, in *IntPoint, db *gorm1.DB) (*IntPoint, error) {\n\tif in == nil {\n\t\treturn nil, errors.New(\"Nil argument to DefaultCreateIntPoint\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (s *business) Create(example repository.Example) *response.Error {\n\ts.logger = s.loggerClone\n\ts.logger.SugaredLogger = s.logger.With(\"method\", \"Create\")\n\n\texample.ExampleID = util.GetUUID()\n\texample.Status = \"Pending\"\n\texample.CreateTime = time.Now()\n\n\terr := s.repository.CreateExample(example)\n\tif err != nil {\n\t\ts.logger.Errorw(\"create data error\", \"error\", err)\n\t\treturn response.NewErrorFromCode(errorcode.CreateDataError)\n\t}\n\n\treturn nil\n}", "func Create(ctx *gin.Context, data interface{}) {\n\tctx.JSON(http.StatusCreated, gin.H{\"code\": merrors.ErrSuccess, \"data\": nil})\n\treturn\n}", "func CreateLog(contextName string, env string) map[string]interface{} {\n\tmessage := fmt.Sprint(\"Creating log in Logentries for \", contextName, \" \", env, \"\\n\")\n\tlog.Println(message)\n\n\tlogName := fmt.Sprint(strings.Title(contextName), \" \", strings.Title(env))\n\n\tpostData := map[string]interface{}{\n\t\t\"log\": map[string]interface{}{\n\t\t\t\"name\": logName,\n\t\t\t\"structures\": [1]string{\n\t\t\t\tconfig.Reader.LogentriesHerokuLogStructureID,\n\t\t\t},\n\t\t\t\"user_data\": map[string]string{},\n\t\t\t\"source_type\": \"token\",\n\t\t\t\"tokens\": [0]string{},\n\t\t\t\"logsets_info\": [1]map[string]string{\n\t\t\t\tmap[string]string{\n\t\t\t\t\t\"id\": config.Reader.LogentriesLogsetKey,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\turl := fmt.Sprint(config.Reader.LogentriesAPIEndpoint, \"/management/logs\")\n\tresp := httphelper.ExecutePostReqAndParseResp(postData, url, client)\n\treturn resp.(map[string]interface{})\n}", "func (m *PasswordResetModel) Create(ctx context.Context, kv query.KV) (int64, error) {\n\n\tif _, ok := kv[\"created_at\"]; !ok {\n\t\tkv[\"created_at\"] = time.Now()\n\t}\n\n\tsqlStr, params := m.query.Table(m.tableName).ResolveInsert(kv)\n\n\tres, err := m.db.ExecContext(ctx, sqlStr, params...)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn res.LastInsertId()\n}", "func (rh *RestHandler) Create(w http.ResponseWriter, r *http.Request) {\n\terr := r.ParseForm()\n\tif err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"unable to parse form values\")\n\t\treturn\n\t}\n\n\tjob := model.Job{}\n\tif r.FormValue(\"partner_id\") == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"missing partner_id value\")\n\t\treturn\n\t}\n\tjob.PartnerID, err = strconv.ParseInt(r.FormValue(\"partner_id\"), 10, 64)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"invalid partner_id value\")\n\t\treturn\n\t}\n\tif r.FormValue(\"category_id\") == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"missing category_id value\")\n\t\treturn\n\t}\n\tjob.CategoryID, err = strconv.ParseInt(r.FormValue(\"category_id\"), 10, 64)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadRequest, errors.Wrap(err, \"invalid category_id value\").Error())\n\t\treturn\n\t}\n\tjob.Title = strings.TrimSpace(r.FormValue(\"title\"))\n\tif job.Title == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"missing or empty title given\")\n\t\treturn\n\t}\n\n\tif r.FormValue(\"expires_at\") == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"missing expires_at value\")\n\t\treturn\n\t}\n\tnow := time.Now()\n\tjob.ExpiresAt, err = time.ParseInLocation(model.DateFormat, r.FormValue(\"expires_at\"), now.Location())\n\tif err != nil || job.ExpiresAt.IsZero() {\n\t\twriteError(w, http.StatusBadRequest, errors.Wrap(err, \"invalid expiration date\").Error())\n\t\treturn\n\t}\n\t//Times are parsed without hour, so whe have to add the hours until de end of the day\n\tjob.ExpiresAt = job.ExpiresAt.Add(23*time.Hour + 59*time.Minute + 59*time.Second)\n\tif job.ExpiresAt.Before(now) {\n\t\twriteError(w, http.StatusBadRequest, \"job already expired\")\n\t\treturn\n\t}\n\treq := model.RequestCreate{}\n\treq.Job = job\n\n\tencreq, err := crypt.EncryptRequest(rh.cipher, req)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\n\t_, err = rh.backend.Create(context.Background(), encreq)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\n\twriteResponse(w, http.StatusCreated, nil)\n}", "func (d *DiaryAPI) CreateNewDiaryHandler(w http.ResponseWriter, r *http.Request) {\n\tfmt.Println(\"/createNewDiary\")\n\n\tvar diary Diary\n\tcurrentTime := time.Now()\n\tcreatedDate := currentTime.Format(layoutISO)\n\tdiary.UpdatedAt = createdDate\n\tdiary.CreatedAt = createdDate\n\terr := json.NewDecoder(r.Body).Decode(&diary)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\td.isAuthorized(w, r)\n\tdefer r.Body.Close()\n\n\tfmt.Println(diary.Id)\n\tif _, err := d.DiaryService.createNewDiary(diary); err != nil {\n\t\trespondWithError(w, http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\trespondWithJSON(w, http.StatusOK, diary)\n}", "func (h *Hotel) Create(a *config.AppContext) error {\n\treturn a.Db.Create(h).Error\n}", "func (s *TimeEntriesService) Create(te *TimeEntry) (*TimeEntry, error) {\n\tu := \"time_entries\"\n\ttec := &TimeEntryCreate{te}\n\treq, err := s.client.NewRequest(\"POST\", u, tec)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdata := new(TimeEntryResponse)\n\t_, err = s.client.Do(req, data)\n\n\treturn data.Data, err\n}", "func (s *Postgres) Create(metric Metric) (Metric, error) {\n\t_, err := s.db.Model(&metric).\n\t\tReturning(\"*\").\n\t\tInsert()\n\n\tif err != nil {\n\t\treturn Metric{},\n\t\t\terrors.Wrapf(err, \"failed to store metric %s\", metric.Name)\n\t}\n\n\treturn metric, nil\n}", "func hNewEvent(c echo.Context) error {\n\tvar e httpError\n\n\tif (len(c.FormValue(\"code\")) == 0) || (len(c.FormValue(\"title\")) == 0) {\n\t\treturn c.JSON(http.StatusNotAcceptable, \"input information is not valid\")\n\t}\n\tuserCODE := c.FormValue(\"code\")\n\n\t// read from token user id\n\tvar tokenUserID int64\n\ttokenUserID = 1\n\n\tu, errGet := blog.GetUserByCode(tokenUserID, userCODE)\n\tif errGet != nil {\n\t\te.TheError = \"user code \" + userCODE + \" not found.\"\n\t\treturn c.JSON(http.StatusNotFound, e)\n\t}\n\tvar ev Event\n\tev.OpenedByUserID = u.ID\n\tev.Contents = c.FormValue(\"content\")\n\tev.Title = c.FormValue(\"title\")\n\n\terrAdd := blog.AddEvent(&ev)\n\tif errAdd != nil {\n\t\te.TheError = errAdd.Error()\n\t\treturn c.JSON(http.StatusInternalServerError, e)\n\t}\n\tfname, errUpload := lowlevelUploadFile(c, u.ID, ev.ID)\n\tif errUpload != nil {\n\t\te.TheError = \"could not upload file: \" + errUpload.Error()\n\t\treturn c.JSON(http.StatusInternalServerError, e)\n\t}\n\te.TheError = \"OK\" + \" - \" + fname\n\treturn c.JSON(http.StatusOK, e)\n}", "func (t TalentRepositoryImpl) Create(ctx context.Context, talent entity.Talent) (int64, error) {\n\tquery, args, err := squirrel.Insert(\"talent\").Columns(\n\t\t\"name\",\n\t).Values(talent.Name).ToSql()\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\tres, err := t.db.ExecContext(ctx, query, args...)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\tid, err := res.LastInsertId()\n\tlog.Printf(\"success insert talent : %s, with id %d\", talent.Name, id)\n\n\treturn id, nil\n}", "func (t *HeathCare_Chaincode) createMedicalRecord(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tfmt.Println(\"\\n=============== start createMedicalRecord function ===============\")\n\tstart := time.Now()\n\ttime.Sleep(time.Second)\n\n\tif len(args) != 7 {\n\t\treturn shim.Error(\"there must be 7 argument\")\n\t}\n\n\tfor i := 0; i < len(args); i++ {\n\t\tif len(args[i]) == 0 {\n\t\t\treturn shim.Error(\"argument \" + strconv.Itoa(i+1) + \" must be declare\")\n\t\t}\n\t}\n\tpatientId := args[0]\n\tpersonalIdentificationInformation := args[1]\n\tmedicalHistory := args[2]\n\tfamilyMedicalHistory := args[3]\n\tmedicationHistory := args[4]\n\ttreatmentHistory := args[5]\n\tmedicalDirectives := args[6]\n\n\t//convert variable to json\n\tobjectType := \"MedicalRecord\"\n\tmedialRecord := &MedicalRecord{objectType, patientId, personalIdentificationInformation,\n\t\tmedicalHistory, familyMedicalHistory, medicationHistory,\n\t\ttreatmentHistory, medicalDirectives}\n\n\t//convert data to byte\n\tMedicalRecordAsByte, errMedicalRecordAsByte := json.Marshal(medialRecord)\n\tif errMedicalRecordAsByte != nil {\n\t\treturn shim.Error(errMedicalRecordAsByte.Error())\n\t}\n\n\t//save to database\n\terrMedicalRecordAsByte = stub.PutPrivateData(\"MedicalRecordCollection\", patientId, MedicalRecordAsByte)\n\tif errMedicalRecordAsByte != nil {\n\t\treturn shim.Error(errMedicalRecordAsByte.Error())\n\t}\n\n\t//create index key\n\tindexName := \"id\"\n\tmedicalRecordIndexKey, errMedicalRecordIndexKey := stub.CreateCompositeKey(indexName, []string{medialRecord.ID, medialRecord.PersonalIdentificationInformation, medialRecord.MedicalHistory, medialRecord.FamilyMedicalHistory, medialRecord.MedicationHistory, medialRecord.TreatmentHistory, medialRecord.MedicalDirectives})\n\tif errMedicalRecordIndexKey != nil {\n\t\treturn shim.Error(errMedicalRecordIndexKey.Error())\n\t}\n\n\t//save index\n\tvalue := []byte{0x00}\n\tstub.PutPrivateData(\"MedicalRecordCollection\", medicalRecordIndexKey, value)\n\n\tend := time.Now()\n\telapsed := time.Since(start)\n\n\tfmt.Println(\"\\nfunction createMedicalRecord\")\n\tfmt.Printf(\"time start: %s\", start.String())\n\tfmt.Printf(\"time end: %s\", end.String())\n\tfmt.Println(\"time execute: \", elapsed.String())\n\tfmt.Println(\"=============== end createMedicalRecord function ===============\")\n\treturn shim.Success(nil)\n}", "func (a *HyperflexApiService) CreateHyperflexSoftwareDistributionEntry(ctx context.Context) ApiCreateHyperflexSoftwareDistributionEntryRequest {\n\treturn ApiCreateHyperflexSoftwareDistributionEntryRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "func (g *Group) NewEntry() (*Entry, error) {\n\tid, err := uuids.New4(g.db.rand)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\te := &Entry{UUID: id, db: g.db}\n\tg.entries = append(g.entries, e)\n\tg.db.entries = append(g.db.entries, e)\n\treturn e, nil\n}", "func (s *Store) Create(c *gin.Context) {\n\n}", "func (u *InformationRepository) Create(model *models.Information) error {\n\tquery := u.InformationTable().Create(model)\n\tif err := query.Error; err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (t *HeathCare_Chaincode) createHospitalFees(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tfmt.Println(\"\\n=============== start createHospitalFees function ===============\")\n\tstart := time.Now()\n\ttime.Sleep(time.Second)\n\n\t//check length of data\n\tif len(args) != 3 {\n\t\treturn shim.Error(\"expecting 3 argument\")\n\t}\n\n\t//define data variable\n\tid := args[0]\n\tpatientName := args[1]\n\taccount := args[2]\n\tdateOfService := args[3]\n\tpatientService := args[4]\n\tprimaryInsuranceBilled := args[5]\n\tsecondaryInsuranceBilled := args[6]\n\tpharmacy := args[7]\n\troom := args[8]\n\tamountDue := args[9]\n\n\tObjectType := \"HospitalFees\"\n\thospitalFees := &HospitalFees{ObjectType, id, patientName, account, dateOfService,\n\t\tpatientService, primaryInsuranceBilled, secondaryInsuranceBilled, pharmacy,\n\t\troom, amountDue}\n\n\t//marshal delivery to byte\n\thospitalFeesAsByte, errHospitalFeesAsByte := json.Marshal(hospitalFees)\n\tif errHospitalFeesAsByte != nil {\n\t\treturn shim.Error(\"cannot marshal pharmacy's data\")\n\t}\n\n\t//put data to ledger\n\terrHospitalFeesAsByte = stub.PutPrivateData(\"HospitalFeesCollection\", id, hospitalFeesAsByte)\n\tif errHospitalFeesAsByte != nil {\n\t\treturn shim.Error(\"cannot put private data of pharmacy\")\n\t}\n\n\t//create index key\n\tindexKey := \"id~patient_name\"\n\thospitalFeesIndexKey, errHospitalFeesIndexKey := stub.CreateCompositeKey(indexKey, []string{hospitalFees.ID, hospitalFees.PatientName, hospitalFees.Account, hospitalFees.DateOfService, hospitalFees.PatientService, hospitalFees.PrimaryInsuranceBilled, hospitalFees.SecondaryInsuranceBilled, hospitalFees.Pharmacy, hospitalFees.Room, hospitalFees.AmountDue})\n\tif errHospitalFeesIndexKey != nil {\n\t\treturn shim.Error(\"cannot create index key of delivery\")\n\t}\n\n\t//save key\n\tvalue := []byte{0x00}\n\tstub.PutPrivateData(\"HospitalFeesCollection\", hospitalFeesIndexKey, value)\n\n\tend := time.Now()\n\telapsed := time.Since(start)\n\n\tfmt.Println(\"\\nfunction createHospitalFees\")\n\tfmt.Println(\"time start: \", start.String())\n\tfmt.Println(\"time end: \", end.String())\n\tfmt.Println(\"time execute: \", elapsed.String())\n\tfmt.Println(\"=============== end createHospitalFees function ===============\")\n\ttime.Sleep(time.Second)\n\n\treturn shim.Success(nil)\n}", "func CreatePerson(c *gin.Context) {\n // Validate input\n var input CreatePersonInput\n if err := c.ShouldBindJSON(&input); err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n // Create person\n person := models.Person{CreatedBy: input.CreatedBy, FirstName: input.FirstName, LastName: input.LastName, Email: input.Email, Phone: input.Phone, Birthday: input.Birthday, Title: input.Title, Department: input.Department}\n models.DB.Create(&person)\n\n c.JSON(http.StatusOK, gin.H{\"data\": person})\n}", "func (fs EntityWatcherFuncs) EntityCreated(e Entity, t Type) { fs.Created(e, t) }", "func (r *repositorySupplier) EntityCreate(input *schemas.SchemaSupplier) (*models.ModelSupplier, schemas.SchemaDatabaseError) {\n\tvar supplier models.ModelSupplier\n\tphone, _ := strconv.ParseUint(input.Phone, 10, 64)\n\tsupplier.Name = input.Name\n\tsupplier.Phone = phone\n\tsupplier.Address = input.Address\n\n\terr := make(chan schemas.SchemaDatabaseError, 1)\n\n\tdb := r.db.Model(&supplier)\n\n\tcheckSupplierName := db.Debug().First(&supplier, \"name = ?\", supplier.Name)\n\n\tif checkSupplierName.RowsAffected > 0 {\n\t\terr <- schemas.SchemaDatabaseError{\n\t\t\tCode: http.StatusConflict,\n\t\t\tType: \"error_create_01\",\n\t\t}\n\t\treturn &supplier, <-err\n\t}\n\n\tcheckSupplierPhone := db.Debug().First(&supplier, \"phone = ?\", supplier.Phone)\n\n\tif checkSupplierPhone.RowsAffected > 0 {\n\t\terr <- schemas.SchemaDatabaseError{\n\t\t\tCode: http.StatusConflict,\n\t\t\tType: \"error_create_02\",\n\t\t}\n\t\treturn &supplier, <-err\n\t}\n\n\taddSupplier := db.Debug().Create(&supplier).Commit()\n\n\tif addSupplier.RowsAffected < 1 {\n\t\terr <- schemas.SchemaDatabaseError{\n\t\t\tCode: http.StatusForbidden,\n\t\t\tType: \"error_create_03\",\n\t\t}\n\t\treturn &supplier, <-err\n\t}\n\n\terr <- schemas.SchemaDatabaseError{}\n\treturn &supplier, <-err\n}", "func (h *Hook) newEntry(entry *logrus.Entry) *logrus.Entry {\n\tdata := map[string]interface{}{}\n\n\tfor k, v := range h.Options {\n\t\tdata[k] = v\n\t}\n\tfor k, v := range entry.Data {\n\t\tdata[k] = v\n\t}\n\treturn &logrus.Entry{\n\t\tLogger: entry.Logger,\n\t\tTime: entry.Time,\n\t\tLevel: entry.Level,\n\t\tData: data,\n\t\tMessage: entry.Message,\n\t}\n}", "func (l *log) create(value []byte) *Entry {\n\tentry := &Entry{\n\t\tIndex: len(l.entries),\n\t\tValue: value,\n\t}\n\tl.entries = append(l.entries, entry)\n\treturn entry\n}", "func Create(record Record) {\r\n\tdb.QueryRow(\"insert into records values($1,$2,$3,$4,$5)\", record.Uuid, record.Mail, record.Seq, record.Pssm, record.Result)\r\n}", "func CreateSearchRecord(s SearchFields) error {\n\t_, err := db.Query(`INSERT INTO queries(data, created_at, updated_at) VALUES($1,'now()',now())`, s)\n\treturn err\n}", "func CreateDinner(date string, venue string, hostName string, attendeeNames []string) (model.Dinner, error) {\n\tdateTime, err := time.Parse(model.DateFormat, date)\n\tif err != nil {\n\t\treturn model.Dinner{}, err\n\t}\n\n\trow, err := database.InsertDinner(dateTime, venue, hostName)\n\tif err != nil {\n\t\treturn model.Dinner{}, err\n\t}\n\n\tdinner, err := model.NewDinnerFromMap(row)\n\tif err != nil {\n\t\treturn dinner, err\n\t}\n\n\tdinner.Attended, err = database.InsertGuests(dinner.ID, attendeeNames)\n\treturn dinner, err\n}", "func (dao *DAOName) Create(m *ReferenceModel) error {\n\tif err := dao.db.Create(m).Error; err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func CreateDefaultUser(username, password string) (err error) {\n\n err = checkInit()\n if err != nil {\n return\n }\n\n var users = data[\"users\"].(map[string]interface{})\n // Check if the default user exists\n if len(users) > 0 {\n err = createError(001)\n return\n }\n\n var defaults = defaultsForNewUser(username, password)\n users[defaults[\"_id\"].(string)] = defaults\n saveDatabase(data)\n\n return\n}", "func NewEntry(env env.Core, logger *logrus.Logger) *logrus.Entry {\n\tlogger.AddHook(&payloadHook{\n\t\tpayload: &AuditPayload{},\n\t\tenv: env,\n\t})\n\n\treturn logrus.NewEntry(logger)\n}", "func (s *Store) createEntry(URL, remoteAddr string) (string, error) {\n\tid := generateRandomString(s.idLength)\n\texists := s.checkExistence(id)\n\tif !exists {\n\t\traw, err := json.Marshal(Entry{\n\t\t\tURL: URL,\n\t\t\tRemoteAddr: remoteAddr,\n\t\t\tCreatedOn: time.Now(),\n\t\t})\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\treturn id, s.createEntryRaw([]byte(id), raw)\n\t}\n\treturn \"\", errors.New(\"entry already exists\")\n}", "func StartCreateEntryCall(m telemetry.Metrics) *telemetry.CallCounter {\n\treturn telemetry.StartCall(m, telemetry.RegistrationAPI, telemetry.Entry, telemetry.Create)\n}", "func CreateMilestoneData(advance int, interval string, logger *log.Logger, api string) map[string]Milestone {\n\ttoday := time.Now().Local()\n\tmilestones := map[string]Milestone{}\n\tswitch interval {\n\tcase \"daily\":\n\t\tfor i := 0; i < advance; i++ {\n\t\t\tvar m Milestone\n\t\t\tvar dueDate string\n\t\t\ttitle := today.AddDate(0, 0, i).Format(\"2006-01-02\")\n\t\t\tswitch api {\n\t\t\tcase \"gitlab\":\n\t\t\t\tdueDate = today.AddDate(0, 0, i).Format(\"2006-01-02\")\n\t\t\tcase \"github\":\n\t\t\t\tdueDate = today.AddDate(0, 0, i).Format(time.RFC3339)\n\t\t\t}\n\t\t\tm.Title = title\n\t\t\tm.DueDate = dueDate\n\t\t\tmilestones[title] = m\n\t\t}\n\tcase \"weekly\":\n\t\tfor i := 0; i < advance; i++ {\n\t\t\tvar m Milestone\n\t\t\tvar dueDate string\n\t\t\tlastDay := LastDayWeek(today)\n\t\t\tyear, week := lastDay.ISOWeek()\n\t\t\ttitle := strconv.Itoa(year) + \"-w\" + strconv.Itoa(week)\n\t\t\tswitch api {\n\t\t\tcase \"gitlab\":\n\t\t\t\tdueDate = lastDay.Format(\"2006-01-02\")\n\t\t\tcase \"github\":\n\t\t\t\tdueDate = lastDay.Format(time.RFC3339)\n\t\t\t}\n\t\t\tm.Title = title\n\t\t\tm.DueDate = dueDate\n\t\t\tmilestones[title] = m\n\t\t\ttoday = lastDay.AddDate(0, 0, 7)\n\t\t}\n\tcase \"monthly\":\n\t\tfor i := 0; i < advance; i++ {\n\t\t\tvar m Milestone\n\t\t\tvar dueDate string\n\t\t\tdate := today.AddDate(0, i, 0)\n\t\t\tlastDay := LastDayMonth(date.Year(), int(date.Month()), time.UTC)\n\t\t\ttitle := date.Format(\"2006-01\")\n\t\t\tswitch api {\n\t\t\tcase \"gitlab\":\n\t\t\t\tdueDate = lastDay.Format(\"2006-01-02\")\n\t\t\tcase \"github\":\n\t\t\t\tdueDate = lastDay.Format(time.RFC3339)\n\t\t\t}\n\t\t\tm.Title = title\n\t\t\tm.DueDate = dueDate\n\t\t\tmilestones[title] = m\n\t\t}\n\tdefault:\n\t\tlogger.Println(\"Error: Incorrect interval\")\n\t\treturn milestones\n\t}\n\n\treturn milestones\n}", "func handleAppInstanceStatusCreate(ctxArg interface{}, key string,\n\tstatusArg interface{}) {\n\tctx := ctxArg.(*zedmanagerContext)\n\tpublishAppInstanceSummary(ctx)\n}", "func (this *EntityManager) createEntity(parent Entity, attributeName string, entity Entity) (Entity, *CargoEntities.Error) {\r\n\r\n\t// Set the entity values here.\r\n\ttypeName := entity.GetTypeName() // Set the type name if not already set...\r\n\tentity.SetParentLnk(attributeName)\r\n\tentity.SetParentUuid(parent.GetUuid())\r\n\r\n\t// Here I will set the entity on the cache...\r\n\tthis.setEntity(entity)\r\n\r\n\tstoreId := typeName[0:strings.Index(typeName, \".\")]\r\n\tprototype, _ := GetServer().GetEntityManager().getEntityPrototype(typeName, storeId)\r\n\r\n\t// Now entity are quadify I will save it in the graph store.\r\n\tstore := GetServer().GetDataManager().getDataStore(storeId)\r\n\r\n\t// I will create the entity.\r\n\t_, err := store.Create(\"\", []interface{}{entity})\r\n\tif err != nil {\r\n\t\tcargoError := NewError(Utility.FileLine(), ENTITY_CREATION_ERROR, SERVER_ERROR_CODE, err)\r\n\t\treturn nil, cargoError\r\n\t}\r\n\r\n\t//LogInfo(\"---> create entity \", entity.GetUuid())\r\n\tentity.SetNeedSave(false)\r\n\r\n\t// also save it parent.\r\n\tif parent != nil {\r\n\t\t// Now I will save it in the datastore.\r\n\t\t// I will set the entity parent.\r\n\t\tcargoError := this.setParent(parent, entity)\r\n\t\tif cargoError != nil {\r\n\t\t\treturn nil, cargoError\r\n\t\t}\r\n\t} else {\r\n\t\tcargoError := NewError(Utility.FileLine(), ENTITY_CREATION_ERROR, SERVER_ERROR_CODE, errors.New(\"parent must not be nil when createEntity is call.\"))\r\n\t\treturn nil, cargoError\r\n\t}\r\n\r\n\t// The event data...\r\n\teventData := make([]*MessageData, 2)\r\n\tmsgData0 := new(MessageData)\r\n\tmsgData0.Name = \"entity\"\r\n\tif reflect.TypeOf(entity).String() == \"*Server.DynamicEntity\" {\r\n\t\tmsgData0.Value = entity.(*DynamicEntity).getValues()\r\n\t} else {\r\n\t\tmsgData0.Value = entity\r\n\t}\r\n\teventData[0] = msgData0\r\n\r\n\tmsgData1 := new(MessageData)\r\n\tmsgData1.Name = \"prototype\"\r\n\tmsgData1.Value = prototype\r\n\teventData[1] = msgData1\r\n\r\n\tevt, _ := NewEvent(NewEntityEvent, EntityEvent, eventData)\r\n\tGetServer().GetEventManager().BroadcastEvent(evt)\r\n\r\n\t// Set it childs...\r\n\tthis.saveChilds(entity, prototype)\r\n\r\n\treturn entity, nil\r\n}", "func Create(ctx *gin.Context) {\n\n}", "func CreatePsychologist(dbase *gorm.DB, w http.ResponseWriter, r *http.Request) {\n\tuser := &db.Psychologist{}\n\terr := json.NewDecoder(r.Body).Decode(&user)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t_ = json.NewEncoder(w).Encode(utils.ErrorResponse{\n\t\t\tCode: http.StatusInternalServerError,\n\t\t\tMessage: \"An error occurred\",\n\t\t})\n\t\treturn\n\t}\n\n\tuser.Password = utils.HashPassword(user.Password, w)\n\tif user.Password == \"\" {\n\t\treturn\n\t}\n\n\trs := dbase.Create(&user)\n\tif rs.Error != nil {\n\t\tlog.Println(rs)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tlog.Println(json.NewEncoder(w).Encode(utils.ErrorResponse{\n\t\t\tCode: http.StatusInternalServerError,\n\t\t\tMessage: \"Could not create your account. Please try again later\",\n\t\t}))\n\t\treturn\n\t}\n\n\t// body := struct {\n\t// \tName string\n\t// \tLink string\n\t// }{\n\t// \tName: fmt.Sprintf(\"%s %s\", user.FirstName, user.LastName),\n\t// \tLink: \"https://google.com\",\n\t// }\n\n\t// go func(dbase *gorm.DB, email string, subject string, HTMLTemp string, body interface{}) {\n\t// \terr := utils.SendEmail(dbase, email, subject, HTMLTemp, body)\n\t// \tif err != nil {\n\t// \t\tlog.Println(err)\n\t// \t\t_ = json.NewEncoder(w).Encode(err.Error())\n\t// \t\treturn\n\t// \t}\n\t// }(dbase, user.Email, \"Welcome\", \"templates/email/confirm.html\", body)\n\n\tw.WriteHeader(http.StatusCreated)\n\tlog.Println(json.NewEncoder(w).Encode(user))\n}", "func LogCreate(entity interface{}, logger pqdep.Logger) error {\n\treturn createFunc(queryFuncWrapper(logger), entity)\n}", "func HealthHandler(w http.ResponseWriter, r *http.Request) {\n\tlog.Println(\"--- RECEIVED HEALTH DATA FROM HEALTH PULSE ---\")\n\n\tauth, httpErr := auth.CheckAuth(w, r)\n\tif !auth {\n\t\thttp.Error(w, httpErr.Status, httpErr.StatusCode)\n\t\treturn\n\t}\n\n\tvar healthSample health.Data\n\tdata, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tlog.Println(string(data))\n\n\terr = json.Unmarshal(data, &healthSample)\n\tif err != nil {\n\t\tlog.Errorf(\"Error while unmarshaling incoming health data: %s\", err)\n\t\tlog.Println(string(data))\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tc := utils.MONGOSESSION.DB(\"healthDB\").C(\"healthData\")\n\tvar results []dailyData\n\terr = c.Find(bson.M{\"date\": healthSample.Date}).All(&results)\n\n\tif err != nil {\n\t\tlog.Errorf(\"Error while finding health data entries: %s\", err)\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Calculate Min and Max heart rate\n\tminMaxHR := calcMinMaxDailyHeartRate(healthSample)\n\thealthSample.MinMaxHeartRate = minMaxHR\n\n\t// If there is no entry for the current day create one with the current sample\n\tif len(results) == 0 {\n\t\terr = c.Insert(&dailyData{Date: healthSample.Date, Data: healthSample})\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Error while inserting health data entries: %s\", err)\n\t\t\treturn\n\t\t}\n\t\tlog.Infof(\"LOGGED ENTRY %s\", healthSample.Date)\n\t\treturn\n\t}\n\n\t// If there is an entry for the current day, update the entry with the current sample\n\tcolQuerier := bson.M{\"date\": healthSample.Date}\n\tchange := bson.M{\"$set\": bson.M{\"date\": healthSample.Date, \"data\": healthSample}}\n\terr = c.Update(colQuerier, change)\n\tif err != nil {\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tlog.Infof(\"UPDATING ENTRY %s\", healthSample.Date)\n}", "func (db *Datastore) Create(txn transaction.Transaction) error {\n\tfmt.Println(`Creating txn with data:`, txn)\n\t// your DB operations to transactions ...\n\treturn nil\n}", "func (dshree DeployedServiceHealthReportExpiredEvent) AsStatefulReplicaHealthReportCreatedEvent() (*StatefulReplicaHealthReportCreatedEvent, bool) {\n\treturn nil, false\n}", "func (dahree DeployedApplicationHealthReportExpiredEvent) AsStatefulReplicaHealthReportCreatedEvent() (*StatefulReplicaHealthReportCreatedEvent, bool) {\n\treturn nil, false\n}", "func (r *Repository) create(user *domain.UserInfoModel) error {\n\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancel()\n\n\tquery := \"INSERT INTO users (namee, email, password) VALUES ($1, $2, $3)\"\n\tstmt, err := r.db.PrepareContext(ctx, query)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer stmt.Close()\n\n\tqueryStart := time.Now().Nanosecond() / 1000\n\t_, err = stmt.ExecContext(ctx, user.Name, user.Email, user.PassWord)\n\tif err != nil {\n\t\treturn err\n\t}\n\tqueryEnd := time.Now().Nanosecond() / 1000\n\texecutionTime := queryEnd - queryStart\n\terr = r.insertTimeSpent(\"Create\", executionTime)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nil\n}", "func (m *Manager) CreateAuditLogDetail(ald *AuditLogDetail) error {\n\tnow := time.Now()\n\tald.CreatedAt = now\n\n\tfunc(in interface{}) {\n\t\tif ii, ok := in.(initializer.Simple); ok {\n\t\t\tii.Initialize()\n\t\t}\n\t}(ald)\n\n\treturn m.GetWDbMap().Insert(ald)\n}", "func CreateDefault() {\n\texistingAdmin, err := FindByEmail(config.AdminEmail)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tif existingAdmin != nil && !existingAdmin.IsAdmin {\n\t\tlog.Fatalf(\"FATAL: User with email %v exists but has no admin rights.\", config.AdminEmail)\n\t}\n\n\tif existingAdmin == nil {\n\t\tadmin, err := New(config.AdminEmail, \"Admin\", \"Admin\", config.AdminPassword)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tadmin.IsAdmin = true\n\t\terr = Save(&admin)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Cannot create admin user with email %v. Error: %v\", config.AdminEmail, err.Error())\n\t\t}\n\t\treturn\n\t}\n\tif config.ServerDebug() {\n\t\tlog.Println(\"Admin account found.\")\n\t}\n\n\t//Create the test user if we are not in production\n\tif !config.ProductionMode {\n\t\texistingTestUser, err := FindByEmail(config.TestUserEmail)\n\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t}\n\n\t\tif existingTestUser == nil {\n\t\t\ttestuser, err := New(config.TestUserEmail, \"TestUser\", \"TestUser\", \"xaFqJDeJldIEcdfZS\")\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t}\n\t\t\terr = Save(&testuser)\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatalf(\"Cannot create user with email %v. Error: %v\", config.TestUserEmail, err.Error())\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tlog.Println(\"Test account found.\")\n\t}\n}", "func (orm *JallyORM) Create(q Query) error {\n\treturn orm.Query(q.Create()).Exec()\n}", "func Create(dst interface{}) {\n\tif datastore != nil {\n\t\tdatastore.Create(dst)\n\t} else {\n\t\tlog.Printf(\"No database configured, not creating %v\", dst)\n\t}\n}", "func (s Seeder) Create() string {\n\treturn fmt.Sprintf(`{ \"name\": \"building-%s\",\"address\": \"address::%s\",\"floors\": [\"floor-%s\",\"floor-%s\"] }`,\n\t\tfake.DigitsN(12),\n\t\tfake.DigitsN(15),\n\t\tfake.DigitsN(5),\n\t\tfake.DigitsN(5),\n\t)\n}", "func (s service) Create(ctx context.Context, req CreateTemperatureRequest) (Temperature, error) {\n\tif err := req.Validate(); err != nil {\n\t\treturn Temperature{}, err\n\t}\n\tnow := time.Now()\n\ttemperature := entity.Temperature{\n\t\tCityID: req.CityID,\n\t\tMin: *req.Min,\n\t\tMax: *req.Max,\n\t\tCreatedAt: now,\n\t}\n\terr := s.repo.Create(ctx, &temperature)\n\tif err != nil {\n\t\treturn Temperature{}, err\n\t}\n\treturn s.Get(ctx, temperature.ID)\n}", "func (epc *EntryPointCreate) defaults() {\n\tif _, ok := epc.mutation.CreateTime(); !ok {\n\t\tv := entrypoint.DefaultCreateTime()\n\t\tepc.mutation.SetCreateTime(v)\n\t}\n\tif _, ok := epc.mutation.UpdateTime(); !ok {\n\t\tv := entrypoint.DefaultUpdateTime()\n\t\tepc.mutation.SetUpdateTime(v)\n\t}\n}", "func (app *builder) Now() (Entry, error) {\n\tif app.hash == nil {\n\t\treturn nil, errors.New(\"the hash is mandatory in order to build an Entry instance\")\n\t}\n\n\tif app.name == \"\" {\n\t\treturn nil, errors.New(\"the name is mandatory in order to build an Entry instance\")\n\t}\n\n\tif app.trx == nil {\n\t\treturn nil, errors.New(\"the []Transaction are mandatory in order to build an Entry instance\")\n\t}\n\n\tif len(app.trx) <= 0 {\n\t\treturn nil, errors.New(\"there must be at least 1 Transaction in order to build an Entry instance\")\n\t}\n\n\timmutable, err := app.immutableBuilder.Create().WithHash(*app.hash).CreatedOn(app.createdOn).Now()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif app.description != \"\" {\n\t\treturn createEntryWithDescription(immutable, app.name, app.trx, app.description), nil\n\t}\n\n\treturn createEntry(immutable, app.name, app.trx), nil\n}", "func NewEntry() *Entry {\n return &Entry{\n Active: true,\n Passwords: make([]string, 0),\n Tags: make([]string, 0),\n }\n}", "func (s *Store) createEntryRaw(key, value []byte) error {\n\terr := s.db.Update(func(tx *bolt.Tx) error {\n\t\tbucket := tx.Bucket(s.bucketName)\n\t\traw := bucket.Get(key)\n\t\tif raw != nil {\n\t\t\treturn errors.New(\"entry value is not empty\")\n\t\t}\n\t\terr := bucket.Put(key, value)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"could not put data into bucket\")\n\t\t}\n\t\treturn nil\n\t})\n\treturn err\n}" ]
[ "0.63600236", "0.6293879", "0.62592554", "0.6080919", "0.5723222", "0.56579703", "0.5514651", "0.5431401", "0.5408429", "0.5314862", "0.5306656", "0.53030455", "0.5234485", "0.5054075", "0.5029634", "0.5025869", "0.49344203", "0.49251345", "0.49183023", "0.4915359", "0.48764566", "0.4872175", "0.48679247", "0.48644716", "0.48519567", "0.48451415", "0.4843055", "0.48014605", "0.4780552", "0.47777832", "0.47707486", "0.4706567", "0.46964854", "0.4692998", "0.46922708", "0.46769688", "0.4669539", "0.46620753", "0.46486086", "0.4646952", "0.46445516", "0.4644405", "0.46404353", "0.4639725", "0.4637852", "0.46362162", "0.4631003", "0.4625511", "0.46184465", "0.46162915", "0.46147147", "0.46095768", "0.4604026", "0.4584052", "0.4583571", "0.45716333", "0.45665944", "0.45609552", "0.45547497", "0.4551468", "0.45504436", "0.4544844", "0.4543102", "0.45425645", "0.4540948", "0.45337883", "0.45302826", "0.45263305", "0.45259175", "0.45140797", "0.45139202", "0.45051208", "0.45024168", "0.44994932", "0.44975936", "0.44955668", "0.44855902", "0.44831857", "0.44778004", "0.44648945", "0.44536936", "0.44456917", "0.444551", "0.44451222", "0.44427195", "0.44421124", "0.4440579", "0.44396448", "0.4439486", "0.4432992", "0.44299334", "0.44295862", "0.442718", "0.44268373", "0.4422683", "0.44196203", "0.4417509", "0.44174048", "0.44173548", "0.4415921" ]
0.8274689
0
DefaultReadHealthMenstruationDailyEntry executes a basic gorm read call
func DefaultReadHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) { if in == nil { return nil, errors1.NilArgumentError } ormObj, err := in.ToORM(ctx) if err != nil { return nil, err } if ormObj.Id == 0 { return nil, errors1.EmptyIdError } if hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadApplyQuery); ok { if db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil { return nil, err } } if db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationDailyEntryORM{}); err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadFind); ok { if db, err = hook.BeforeReadFind(ctx, db); err != nil { return nil, err } } ormResponse := HealthMenstruationDailyEntryORM{} if err = db.Where(&ormObj).First(&ormResponse).Error; err != nil { return nil, err } if hook, ok := interface{}(&ormResponse).(HealthMenstruationDailyEntryORMWithAfterReadFind); ok { if err = hook.AfterReadFind(ctx, db); err != nil { return nil, err } } pbResponse, err := ormResponse.ToPB(ctx) return &pbResponse, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultListHealthMenstruationDailyEntry(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationDailyEntry, error) {\n\tin := HealthMenstruationDailyEntry{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationDailyEntryORM{}, &HealthMenstruationDailyEntry{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationDailyEntryORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationDailyEntry{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func DefaultReadHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationPersonalInfoORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationPersonalInfoORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationPersonalInfoORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultCreateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultPatchHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationDailyEntry\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationDailyEntry(ctx, &HealthMenstruationDailyEntry{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationDailyEntry(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationDailyEntryWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultStrictUpdateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationDailyEntry\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationDailyEntryORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx context.Context, patchee *HealthMenstruationDailyEntry, patcher *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Day\" {\n\t\t\tpatchee.Day = patcher.Day\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"IntensityPercentage\" {\n\t\t\tpatchee.IntensityPercentage = patcher.IntensityPercentage\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Type\" {\n\t\t\tpatchee.Type = patcher.Type\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Manual\" {\n\t\t\tpatchee.Manual = patcher.Manual\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"BasedOnPrediction\" {\n\t\t\tpatchee.BasedOnPrediction = patcher.BasedOnPrediction\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultPatchSetHealthMenstruationDailyEntry(ctx context.Context, objects []*HealthMenstruationDailyEntry, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationDailyEntry, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationDailyEntry, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationDailyEntry(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func GetData(w http.ResponseWriter, r *http.Request) {\n\tresult := dailyData{}\n\tc := utils.MONGOSESSION.DB(\"healthDB\").C(\"healthData\")\n\tc.Find(bson.M{\"date\": utils.GetDate(time.Now())}).One(&result)\n\tb, _ := json.Marshal(result)\n\tfmt.Fprintf(w, string(b))\n}", "func GetEntryAtDate(date interface{}, entry string) interface{} {\n\tisRequestedFieldEntryValid := false\n\tresult := dailyData{}\n\n\tc := utils.MONGOSESSION.DB(\"healthDB\").C(\"healthData\")\n\tc.Find(bson.M{\"date\": date}).One(&result)\n\tif entry != \"\" {\n\t\tavailableFields, _ := reflections.Fields(result.Data)\n\n\t\t// Check if the requested field entry is available at the requested date\n\t\tfor _, field := range availableFields {\n\t\t\tif entry == field {\n\t\t\t\tisRequestedFieldEntryValid = true\n\t\t\t}\n\t\t}\n\n\t\tif !isRequestedFieldEntryValid {\n\t\t\treturn nil\n\t\t}\n\n\t\trequestedData, err := reflections.GetField(result.Data, entry)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"%s\", err)\n\t\t}\n\t\treturn requestedData\n\t}\n\treturn result\n}", "func (lsm *lsm) Read(key string, ts uint64) (*Entry, error) {\n\tfor _, level := range lsm.levels {\n\t\tentry, err := level.Find(key, ts)\n\t\tif err != nil {\n\t\t\tswitch err.(type) {\n\t\t\tcase *ErrKeyNotFound:\n\t\t\t\tcontinue\n\t\t\tdefault:\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t} else {\n\t\t\treturn entry, nil\n\t\t}\n\t}\n\treturn nil, newErrKeyNotFound()\n}", "func HealthHandler(w http.ResponseWriter, r *http.Request) {\n\tlog.Println(\"--- RECEIVED HEALTH DATA FROM HEALTH PULSE ---\")\n\n\tauth, httpErr := auth.CheckAuth(w, r)\n\tif !auth {\n\t\thttp.Error(w, httpErr.Status, httpErr.StatusCode)\n\t\treturn\n\t}\n\n\tvar healthSample health.Data\n\tdata, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tlog.Println(string(data))\n\n\terr = json.Unmarshal(data, &healthSample)\n\tif err != nil {\n\t\tlog.Errorf(\"Error while unmarshaling incoming health data: %s\", err)\n\t\tlog.Println(string(data))\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tc := utils.MONGOSESSION.DB(\"healthDB\").C(\"healthData\")\n\tvar results []dailyData\n\terr = c.Find(bson.M{\"date\": healthSample.Date}).All(&results)\n\n\tif err != nil {\n\t\tlog.Errorf(\"Error while finding health data entries: %s\", err)\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Calculate Min and Max heart rate\n\tminMaxHR := calcMinMaxDailyHeartRate(healthSample)\n\thealthSample.MinMaxHeartRate = minMaxHR\n\n\t// If there is no entry for the current day create one with the current sample\n\tif len(results) == 0 {\n\t\terr = c.Insert(&dailyData{Date: healthSample.Date, Data: healthSample})\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Error while inserting health data entries: %s\", err)\n\t\t\treturn\n\t\t}\n\t\tlog.Infof(\"LOGGED ENTRY %s\", healthSample.Date)\n\t\treturn\n\t}\n\n\t// If there is an entry for the current day, update the entry with the current sample\n\tcolQuerier := bson.M{\"date\": healthSample.Date}\n\tchange := bson.M{\"$set\": bson.M{\"date\": healthSample.Date, \"data\": healthSample}}\n\terr = c.Update(colQuerier, change)\n\tif err != nil {\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tlog.Infof(\"UPDATING ENTRY %s\", healthSample.Date)\n}", "func DefaultListHealthMenstruationPersonalInfo(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationPersonalInfo, error) {\n\tin := HealthMenstruationPersonalInfo{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationPersonalInfoORM{}, &HealthMenstruationPersonalInfo{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationPersonalInfoORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationPersonalInfo{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func (e *ExpenseModel) ReadAll(filter interface{}) ([]Expense, error) {\n\tvar expenses []Expense\n\tcollection := e.db.Client.Database(e.db.DBName).Collection(\"expenses\")\n\tlog.Printf(\"filter: %v\\n\", filter)\n\t// sort the entries based on the `date` field\n\topts := options.FindOptions{}\n\topts.SetSort(bson.D{{\"date\", -1}})\n\tcur, err := collection.Find(context.TODO(), filter, &opts)\n\tif err != nil {\n\t\tlog.Printf(\"ERROR FINDING DATA: %v\\n\", err)\n\t\treturn expenses, err\n\t}\n\tfor cur.Next(context.TODO()) {\n\t\tvar expense Expense\n\t\terr = cur.Decode(&expense)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error on Decoding the document: %v\\n\", err)\n\t\t}\n\t\texpenses = append(expenses, expense)\n\t}\n\tlog.Printf(\"documentReturned: %v\\n\", expenses)\n\treturn expenses, nil\n}", "func (ps *Store) Read(ctx context.Context, key datastore.Key, entity datastore.Entity) error {\n\tc := GetCon(ctx)\n\temd := entity.GetEntityMetadata()\n\titer := c.Query(getJSONSelect(emd.GetName(), emd.GetIDColumnName()), key).Iter()\n\tvar json string\n\tvalid := iter.Scan(&json)\n\tif !valid {\n\t\treturn common.NewError(datastore.EntityNotFound, fmt.Sprintf(\"%v not found with id = %v\", emd.GetName(), key))\n\t}\n\tdatastore.FromJSON(json, entity)\n\tif err := iter.Close(); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (m *HealthMenstruationDailyEntry) ToORM(ctx context.Context) (HealthMenstruationDailyEntryORM, error) {\n\tto := HealthMenstruationDailyEntryORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.Day = &t\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = int32(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (store TodoStore) Read(_ sqlx.Queryer, filters ...gtimer.TodoFilter) (gtimer.Todos, error) {\n\tvar todo gtimer.Todo\n\tfor _, filter := range filters {\n\t\tfilter(&todo)\n\t}\n\tif todo.ID != \"\" {\n\t\ttodo, err := store.Get(todo.ID)\n\t\tif err != nil {\n\t\t\treturn gtimer.Todos{}, err\n\t\t}\n\t\treturn gtimer.Todos{todo}, err\n\t}\n\tif todo.Status != \"\" {\n\t\treturn store.ByStatus(todo.Status)\n\t}\n\treturn store.All()\n}", "func (m *HealthMenstruationDailyEntryORM) ToPB(ctx context.Context) (HealthMenstruationDailyEntry, error) {\n\tto := HealthMenstruationDailyEntry{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tif to.Day, err = ptypes1.TimestampProto(*m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = HealthMenstruationDailyEntry_Type(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (h *Handler) FetchDailyTimeSeries(w http.ResponseWriter, r *http.Request, params httprouter.Params) error {\n\tstockSymbol := params.ByName(\"stock\")\n\n\tstock, err := h.Stocky.Postgres.GetOrCreateStock(stockSymbol)\n\tif err != nil {\n\t\tError(w, err)\n\t\treturn err\n\t}\n\n\tredisKey := fmt.Sprintf(\"stocky_%s\", stockSymbol)\n\t_, err = h.Stocky.Redis.Get(redisKey)\n\tif err == redis.Nil {\n\t\terr = h.updateDaily(stockSymbol, redisKey, stock.ID)\n\t\tif err != nil {\n\t\t\tError(w, err)\n\t\t\treturn err\n\t\t}\n\t} else if err != nil {\n\t\tError(w, err)\n\t\treturn err\n\t}\n\n\tstockDailySeries, err := h.Stocky.Postgres.FetchDailySeriesByStock(stock.ID)\n\tif err != nil {\n\t\tError(w, err)\n\t\treturn err\n\t}\n\n\tOK(w, entity.NewStockDailyResponse(stock, stockDailySeries), \"\")\n\treturn nil\n}", "func (model *TodoerModel) Read(id int) Todoer {\n\tvar todoer = Todoer{}\n\n\tconnect(func(db *sql.DB) {\n\t\trows, err := db.Query(\"select * from todoer where id = ?\", id)\n\n\t\tif err != nil {\n\t\t\tlog.Panic(err)\n\t\t}\n\n\t\tdefer rows.Close()\n\n\t\tfor rows.Next() {\n\t\t\terr := rows.Scan(&todoer.ID, &todoer.Username, &todoer.CreatedAt, &todoer.ModifiedAt)\n\t\t\tif err != nil {\n\t\t\t\tlog.Panic(err)\n\t\t\t}\n\t\t}\n\n\t\terr = rows.Err()\n\n\t\tif err != nil {\n\t\t\tlog.Panic(err)\n\t\t}\n\t})\n\n\treturn todoer\n}", "func GetHealth(w http.ResponseWriter, r *http.Request, db *sqlx.DB) {\n\tparams := mux.Vars(r)\n\n\thealth := []Health{}\n\n\tvar err error\n\n\tsession, err := store.Get(r, \"auth\")\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Convert our session data into an instance of User\n\tuser := User{}\n\tuser, _ = session.Values[\"user\"].(User)\n\n\tif user.Username != \"\" && user.AccessLevel == \"admin\" {\n\t\tif _, ok := params[\"id\"]; ok {\n\t\t\terr = db.Select(&health, \"SELECT id, username, ts, variable, value \"+\n\t\t\t\t\"FROM public.health \"+\n\t\t\t\t\"WHERE id = $1 \", params[\"id\"])\n\t\t} else if _, ok = params[\"ts\"]; ok {\n\t\t\terr = db.Select(&health, \"SELECT id, username, ts, variable, value \"+\n\t\t\t\t\"FROM public.health \"+\n\t\t\t\t\"WHERE ts = $1 \", params[\"ts\"])\n\t\t} else if _, ok = params[\"variable\"]; ok {\n\t\t\terr = db.Select(&health, \"SELECT id, username, ts, variable, value \"+\n\t\t\t\t\"FROM public.health \"+\n\t\t\t\t\"WHERE variable = $1 \", params[\"variable\"])\n\t\t} else {\n\t\t\terr = db.Select(&health, \"SELECT id, username, ts, variable, value \"+\n\t\t\t\t\"FROM public.health \")\n\t\t}\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\t\tw.WriteHeader(http.StatusOK)\n\n\t\tif err := json.NewEncoder(w).Encode(health); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t} else {\n\t\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\t\tw.WriteHeader(http.StatusOK)\n\n\t\tif err := json.NewEncoder(w).Encode(\"access denied\"); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}\n\n\tlogRequest(r)\n}", "func (tb *tableManager) read(keyIn uint64) (*Block, error) {\n\tentry, err := tb.getEntry(keyIn)\n\tif err != nil {\n\t\tlog.Println(\"Could not obtain entry.\")\n\t\treturn nil, errors.New(\"Could not obtain entry.\")\n\t}\n\tif entry.flags&flagRemove != 0 {\n\t\t// dataBase should be able to tell if a dirtyKey is marked\n\t\t// for removal so it can write it as removed in log.\n\t\treturn nil, nil\n\t}\n\ttb.updateLRUCacheHead(entry)\n\treturn entry.block, nil\n}", "func (s *Tplink) GetDailyStats(month, year int) (SysInfo, error) {\n\tvar (\n\t\tpayload dailyStats\n\t\tjsonResp SysInfo\n\t)\n\n\tpayload.Emeter.GetDaystat.Month = month\n\tpayload.Emeter.GetDaystat.Year = year\n\n\tj, _ := json.Marshal(payload)\n\n\tdata := encrypt(string(j))\n\tresp, err := send(s.Host, data)\n\tif err != nil {\n\t\treturn jsonResp, err\n\t}\n\n\tif err := json.Unmarshal([]byte(decrypt(resp)), &jsonResp); err != nil {\n\t\treturn jsonResp, err\n\t}\n\treturn jsonResp, nil\n}", "func (e *ExpenseModel) ReadOne(filter interface{}) (Expense, error) {\n\tvar expense Expense\n\tcollection := e.db.Client.Database(e.db.DBName).Collection(\"expenses\")\n\tdocumentReturned := collection.FindOne(context.TODO(), filter)\n\tdocumentReturned.Decode(&expense)\n\treturn expense, nil\n}", "func (o *Object) readMetaData(ctx context.Context) (err error) {\n\tif !o.modTime.IsZero() {\n\t\treturn nil\n\t}\n\t// Last resort\n\treturn o.readEntryAndSetMetadata(ctx)\n}", "func (d dynamo) Get(date time.Time, mealType string, details bool) (Meal, error) {\n\tsess, err := session.NewSession(&aws.Config{Region: aws.String(region)})\n\tif err != nil {\n\t\tfmt.Println(\"Error creating AWS session\")\n\t\treturn Meal{}, err\n\t}\n\tsvc := dynamodb.New(sess)\n\tresult, err := svc.GetItem(&dynamodb.GetItemInput{\n\t\tTableName: aws.String(\"meal\"),\n\t\tKey: map[string]*dynamodb.AttributeValue{\n\t\t\t\"id\": {\n\t\t\t\tS: aws.String(fmt.Sprintf(\"%s-%s\", date.Format(time.RFC3339), mealType)),\n\t\t\t},\n\t\t},\n\t})\n\tif err != nil {\n\t\tfmt.Println(\"error getting meal: \", err)\n\t\treturn Meal{}, err\n\t}\n\tm := Meal{}\n\terr = dynamodbattribute.UnmarshalMap(result.Item, &m)\n\tif err != nil {\n\t\tfmt.Println(\"error while unmarshalling: \", err)\n\t\treturn Meal{}, err\n\t}\n\treturn m, nil\n}", "func (wal *WalDB) ReadAll(snapshot *raftpb.Snapshot) (id *consensus.RaftIdentity, state *raftpb.HardState, ents []raftpb.Entry, err error) {\n\tif id, err = wal.GetIdentity(); err != nil {\n\t\treturn nil, state, ents, err\n\t}\n\n\tstate, err = wal.GetHardState()\n\tif err != nil {\n\t\treturn id, state, ents, ErrWalGetHardState\n\t}\n\n\tcommitIdx := state.Commit\n\tlastIdx, err := wal.GetRaftEntryLastIdx()\n\tif err != nil {\n\t\treturn id, state, ents, ErrWalGetLastIdx\n\t}\n\n\tvar snapIdx, snapTerm uint64\n\tif snapshot != nil {\n\t\tsnapIdx = snapshot.Metadata.Index\n\t\tsnapTerm = snapshot.Metadata.Term\n\t}\n\n\tlogger.Info().Uint64(\"snapidx\", snapIdx).Uint64(\"snapterm\", snapTerm).Uint64(\"commit\", commitIdx).Uint64(\"last\", lastIdx).Msg(\"read all entries of wal\")\n\n\tstart := snapIdx + 1\n\n\tfor i := start; i <= lastIdx; i++ {\n\t\twalEntry, err := wal.GetRaftEntry(i)\n\t\t// if snapshot is nil, initial confchange entry isn't saved to db\n\t\tif err != nil {\n\t\t\tlogger.Error().Err(err).Uint64(\"idx\", i).Msg(\"failed to get raft entry\")\n\t\t\treturn id, state, nil, err\n\t\t}\n\n\t\tif walEntry.Term < snapTerm {\n\t\t\tlogger.Error().Str(\"wal\", walEntry.ToString()).Err(ErrWalEntryTooLowTerm).Msg(\"invalid wal entry\")\n\t\t\treturn id, state, nil, ErrWalEntryTooLowTerm\n\t\t}\n\n\t\traftEntry, err := wal.convertWalToRaft(walEntry)\n\t\tif err != nil {\n\t\t\treturn id, state, nil, err\n\t\t}\n\n\t\tlogger.Debug().Str(\"walentry\", walEntry.ToString()).Msg(\"read wal entry\")\n\t\tents = append(ents, *raftEntry)\n\t}\n\n\treturn id, state, ents, nil\n}", "func hostRead(d *schema.ResourceData, m interface{}, params zabbix.Params) error {\n\tapi := m.(*zabbix.API)\n\n\tlog.Debug(\"Lookup of host with params %#v\", params)\n\n\thosts, err := api.HostsGet(params)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif len(hosts) < 1 {\n\t\td.SetId(\"\")\n\t\treturn nil\n\t}\n\tif len(hosts) > 1 {\n\t\treturn errors.New(\"multiple hosts found\")\n\t}\n\thost := hosts[0]\n\n\tlog.Debug(\"Got host: %+v\", host)\n\n\td.SetId(host.HostID)\n\td.Set(\"name\", host.Name)\n\td.Set(\"host\", host.Host)\n\td.Set(\"proxyid\", host.ProxyID)\n\td.Set(\"enabled\", host.Status == 0)\n\td.Set(\"inventory_mode\", HINV_LOOKUP_REV[host.InventoryMode])\n\n\td.Set(\"interface\", flattenHostInterfaces(host, d, m))\n\td.Set(\"templates\", flattenTemplateIds(host.ParentTemplateIDs))\n\td.Set(\"inventory\", flattenInventory(host))\n\td.Set(\"groups\", flattenHostGroupIds(host.GroupIds))\n\td.Set(\"macro\", flattenMacros(host.UserMacros))\n\td.Set(\"tag\", flattenTags(host.Tags))\n\n\treturn nil\n}", "func DefaultReadContact(ctx context.Context, in *Contact, db *gorm.DB) (*Contact, error) {\n\tif in == nil {\n\t\treturn nil, errors.New(\"Nil argument to DefaultReadContact\")\n\t}\n\tormParams, err := ConvertContactToORM(*in)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tormResponse := ContactORM{}\n\tif err = db.Set(\"gorm:auto_preload\", true).Where(&ormParams).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse, err := ConvertContactFromORM(ormResponse)\n\treturn &pbResponse, err\n}", "func LoadDailyMoney() {\n\tglobalInfo := path.Join(GetDesignerDir(), \"juewei.csv\")\n\tcsvcfg.LoadCSVConfig(globalInfo, &globalinfoCfg)\n}", "func executeReadingQuery(query *arangolite.Query) (*Ingredient, error) {\n\tvar result []Ingredient\n\n\trawResult, err := config.DB().Run(query)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tmarshallErr := json.Unmarshal(rawResult, &result)\n\tif marshallErr != nil {\n\t\treturn nil, marshallErr\n\t}\n\tif len(result) > 0 {\n\t\treturn &result[0], nil\n\t}\n\treturn nil, nil\n}", "func dataHostRead(d *schema.ResourceData, m interface{}) error {\n\tparams := zabbix.Params{\n\t\t\"selectInterfaces\": \"extend\",\n\t\t\"selectParentTemplates\": \"extend\",\n\t\t\"selectGroups\": \"extend\",\n\t\t\"selectMacros\": \"extend\",\n\t\t\"selectTags\": \"extend\",\n\t\t\"selectInventory\": \"extend\",\n\t\t\"filter\": map[string]interface{}{},\n\t}\n\n\tlookups := []string{\"host\", \"hostid\", \"name\"}\n\tfor _, k := range lookups {\n\t\tif v, ok := d.GetOk(k); ok {\n\t\t\tparams[\"filter\"].(map[string]interface{})[k] = v\n\t\t}\n\t}\n\n\tif len(params[\"filter\"].(map[string]interface{})) < 1 {\n\t\treturn errors.New(\"no host lookup attribute\")\n\t}\n\tlog.Debug(\"performing data lookup with params: %#v\", params)\n\n\treturn hostRead(d, m, params)\n}", "func DefaultReadEmail(ctx context.Context, in *Email, db *gorm.DB) (*Email, error) {\n\tif in == nil {\n\t\treturn nil, errors.New(\"Nil argument to DefaultReadEmail\")\n\t}\n\tormParams, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tormResponse := EmailORM{}\n\tif err = db.Set(\"gorm:auto_preload\", true).Where(&ormParams).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultCreateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (handler WeatherReadingHandler) Index(c *gin.Context) {\n\tweatherReadings := []m.WeatherReading{}\t\n\tvar query = handler.db\n\n\tstartParam,startParamExist := c.GetQuery(\"start\")\n\tlimitParam,limitParamExist := c.GetQuery(\"limit\")\n\n\t//start param exist\n\tif startParamExist {\n\t\tstart,_ := strconv.Atoi(startParam)\n\t\tif start != 0 {\n\t\t\tquery = query.Offset(start).Order(\"created_at asc\")\t\t\n\t\t} else {\n\t\t\tquery = query.Offset(0).Order(\"created_at desc\")\n\t\t}\n\t} \n\n\t//limit param exist\n\tif limitParamExist {\n\t\tlimit,_ := strconv.Atoi(limitParam)\n\t\tquery = query.Limit(limit)\n\t} else {\n\t\tquery = query.Limit(10)\n\t}\n\n\tquery.Order(\"created_at desc\").Find(&weatherReadings)\n\tc.JSON(http.StatusOK,weatherReadings)\n\treturn\n}", "func (c *IloClient) GetAggHealthDataDell(model string) ([]HealthList, error) {\n\n\tif strings.ToLower(model) == \"r730xd\" {\n\n\t\treturn nil, nil\n\n\t} else if strings.ToLower(model) == \"r740xd\" {\n\t\turl := c.Hostname + \"/redfish/v1/UpdateService/FirmwareInventory\"\n\n\t\tresp, _, _, err := queryData(c, \"GET\", url, nil)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tvar (\n\t\t\tx MemberCountDell\n\t\t\t_healthdata []HealthList\n\t\t)\n\n\t\tjson.Unmarshal(resp, &x)\n\n\t\tfor i := range x.Members {\n\t\t\tr, _ := regexp.Compile(\"Installed\")\n\t\t\tif r.MatchString(x.Members[i].OdataId) == true {\n\t\t\t\t_url := c.Hostname + x.Members[i].OdataId\n\t\t\t\tresp, _, _, err := queryData(c, \"GET\", _url, nil)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\n\t\t\t\tvar y FirmwareDataDell\n\n\t\t\t\tjson.Unmarshal(resp, &y)\n\n\t\t\t\thealthData := HealthList{\n\t\t\t\t\tName: y.Name,\n\t\t\t\t\tState: y.Status.State,\n\t\t\t\t\tHealth: y.Status.Health,\n\t\t\t\t}\n\n\t\t\t\t_healthdata = append(_healthdata, healthData)\n\n\t\t\t}\n\t\t}\n\n\t\treturn _healthdata, nil\n\t}\n\treturn nil, nil\n}", "func DefaultReadContact(ctx context.Context, in *Contact, db *gorm.DB) (*Contact, error) {\n\tif in == nil {\n\t\treturn nil, errors.New(\"Nil argument to DefaultReadContact\")\n\t}\n\tormParams, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\taccountID, err := auth.GetAccountID(ctx, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tormParams.AccountID = accountID\n\tormResponse := ContactORM{}\n\tif err = db.Set(\"gorm:auto_preload\", true).Where(&ormParams).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (at *AnswerTypeDAO) ReadOne(id int32) {}", "func (c *UserRepoImpl) Read(id int) (*model.User, error) {\n\tuser := new(model.User)\n\n\tif err := c.db.Table(\"user\").Where(\"user_id = ?\", id).First(&user).Error; err != nil {\n\t\tlogrus.Error(err)\n\t\treturn nil, errors.New(\"get user data : error \")\n\t}\n\n\treturn user, nil\n}", "func (d *DepartmentRepoImpl) Read(deptno int32) (models.Department, error) {\n\tdept := models.Department{}\n\tgetDepartment := d.DB.Table(\"departments\").Where(\"id = ?\", deptno).Find(&dept)\n\tif getDepartment.Error != nil {\n\t\tfmt.Println(\"[Repo Error] :\", getDepartment.Error)\n\t\treturn dept, getDepartment.Error\n\t}\n\n\treturn dept, nil\n}", "func (self *Client) ReadMetric(t MetricType, id string, o ...Modifier) ([]*Datapoint, error) {\n\to = prepend(o, self.Url(\"GET\", TypeEndpoint(t), SingleMetricEndpoint(id), DataEndpoint()))\n\n\tr, err := self.Send(o...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdefer r.Body.Close()\n\n\tif r.StatusCode == http.StatusOK {\n\t\tb, err := ioutil.ReadAll(r.Body)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// Check for GaugeBucketpoint and so on for the rest.. uh\n\t\tdp := []*Datapoint{}\n\t\tif b != nil {\n\t\t\tif err = json.Unmarshal(b, &dp); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\treturn dp, nil\n\t} else if r.StatusCode > 399 {\n\t\treturn nil, self.parseErrorResponse(r)\n\t}\n\n\treturn nil, nil\n}", "func (r *Resource) getAllHandler(c *gin.Context) {\n // fetch all from database\n meals, err := r.db.GetAllMenuMeals()\n if err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n // return result as JSON\n c.JSON(http.StatusOK, meals)\n}", "func ReadLedgerEntries(rows *sql.Rows, a *LedgerEntry) error {\n\treturn rows.Scan(&a.LEID, &a.BID, &a.JID, &a.JAID, &a.LID, &a.RAID, &a.RID, &a.TCID, &a.Dt, &a.Amount, &a.Comment, &a.CreateTS, &a.CreateBy, &a.LastModTime, &a.LastModBy)\n}", "func getLoan(l *models.Loan, db *gorm.DB) error {\n\terr := db.Select(\"id,created_at,updated_at,initial_value,interest,quota,balance,cod_loan_state,cod_client,cod_collection,cod_user\").First(l).GetErrors()\n\tif len(err) != 0 {\n\t\treturn errors.New(\"no se encuentra\")\n\t}\n\treturn nil\n}", "func (l *Logger) Read(n int) ([]LogEntry, error) {\n\tentries, err := l.client.readLog(n)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tids := make(map[int64]*LogEntry)\n\tlogEntries := make([]LogEntry, 0, len(entries))\n\tfor _, le := range entries {\n\t\tentry, ok := ids[le.ID]\n\t\tif !ok {\n\t\t\tnewEntry := LogEntry{\n\t\t\t\tTime: time.Unix(le.Time, 0).UTC(),\n\t\t\t\tRemoteAddr: le.RemoteAddr,\n\t\t\t\tHijacked: le.Hijacked,\n\t\t\t\tQtype: le.Qtype,\n\t\t\t\tQuestion: le.Question,\n\t\t\t}\n\t\t\tlogEntries = append(logEntries, newEntry)\n\t\t\tentry = &logEntries[len(logEntries)-1]\n\t\t\tids[le.ID] = entry\n\t\t}\n\t\tif le.Answer != \"\" {\n\t\t\tentry.Answers = append(entry.Answers, le.Answer)\n\t\t}\n\t}\n\treturn logEntries, nil\n}", "func (d *Dao) GetAllDayExpenseInfo(c context.Context, beginDate time.Time, ctype, from, limit int) (infos []*model.BudgetDayStatistics, err error) {\n\trows, err := d.rddb.Query(c, _getAllDayExpenseSQL, beginDate, ctype, from, limit)\n\tif err != nil {\n\t\tlog.Error(\"dao.GetAllDayExpenseInfo query error(%v)\", err)\n\t\treturn\n\t}\n\tdefer rows.Close()\n\tfor rows.Next() {\n\t\ta := &model.BudgetDayStatistics{}\n\t\tif err = rows.Scan(&a.DayExpense, &a.UpCount, &a.AvCount, &a.UpAvgExpense, &a.AvAvgExpense, &a.TotalExpense, &a.Date); err != nil {\n\t\t\tlog.Error(\"dao.GetAllDayExpenseInfo scan error(%v)\", err)\n\t\t\treturn\n\t\t}\n\t\tinfos = append(infos, a)\n\t}\n\terr = rows.Err()\n\treturn\n}", "func (r *record) read() (err error) {\n\treply, err := sched0.cache.Get(prefixTask + r.ID)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = json.Unmarshal([]byte(reply), r)\n\treturn\n}", "func (self *botStats) load(t db.Table, index int) error {\n\tkey := fmt.Sprintf(\"%s-%2d\",botStatsRecordKey,index)\n\terr := t.Get(key,self)\n\tif len(self.Rows) == 0 {\n\t\tself.newRow()\n\t}\n\treturn err\n}", "func (c *Client) ReadMetric(t MetricType, id string, o ...Modifier) ([]*Datapoint, error) {\n\to = prepend(o, c.Url(\"GET\", TypeEndpoint(t), SingleMetricEndpoint(id), DataEndpoint()))\n\n\tr, err := c.Send(o...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdefer r.Body.Close()\n\n\tif r.StatusCode == http.StatusOK {\n\t\tb, err := ioutil.ReadAll(r.Body)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// Check for GaugeBucketpoint and so on for the rest.. uh\n\t\tdp := []*Datapoint{}\n\t\tif b != nil {\n\t\t\tif err = json.Unmarshal(b, &dp); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\treturn dp, nil\n\t} else if r.StatusCode > 399 {\n\t\treturn nil, c.parseErrorResponse(r)\n\t}\n\n\treturn nil, nil\n}", "func (s *CqlStore) Read(id ID) (Item, error) {\n\tvar item Item\n\tsts, err := s.History(id, 1)\n\tif err != nil {\n\t\treturn item, err\n\t}\n\tif len(sts) == 1 && sts[0].Status == \"ALIVE\" {\n\t\titem = sts[0].Item\n\t}\n\treturn item, nil\n}", "func DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationPersonalInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationPersonalInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func (dao *DAOName) Read(m *ReferenceModel) ([]ReferenceModel, error) {\n\tretVal := []ReferenceModel{}\n\tif err := dao.db.Where(m).Find(&retVal).Error; err != nil {\n\t\treturn nil, err\n\t}\n\treturn retVal, nil\n}", "func (mgr *EntryManager) GetOne(entryID int) *Entry {\n\tdb, err := sql.Open(\"postgres\", mgr.ConnStr)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tdefer db.Close()\n\n\tqueryStr := `\n\t\tSELECT title, date_posted, tags \n\t\tFROM entries \n\t\tWHERE id = $1;\n\t`\n\n\t// Create a \"prepared\" SQL statement context\n\tstmt, err := db.Prepare(queryStr)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn nil\n\t}\n\tdefer stmt.Close()\n\n\t// Fetch Entry record\n\tvar (\n\t\ttitle, tagsText string\n\t\tdatePosted time.Time\n\t)\n\terr = stmt.QueryRow(entryID).Scan(&title, &datePosted, &tagsText)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn nil\n\t}\n\n\t// Populate Entry record\n\treturn &Entry{\n\t\tID: entryID,\n\t\tTitle: title,\n\t\tDatePosted: datePosted,\n\t\tTags: strings.Split(tagsText, \",\"),\n\t}\n}", "func (mgr *EntryManager) GetAll() []*Entry {\n\tvar entries []*Entry\n\n\tdb, err := sql.Open(\"postgres\", mgr.ConnStr)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tdefer db.Close()\n\n\t// Generate a Rows iterator from a SQL query\n\tqueryStr := \"SELECT id, title, date_posted, tags FROM entries ORDER BY id;\"\n\trows, err := db.Query(queryStr)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn nil\n\t}\n\tdefer rows.Close()\n\n\t// Iterate over rows and populate Entry records\n\tfor rows.Next() {\n\t\tvar (\n\t\t\tentryID int\n\t\t\ttitle, tagsText string\n\t\t\tdatePosted time.Time\n\t\t)\n\n\t\terr = rows.Scan(&entryID, &title, &datePosted, &tagsText)\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t\treturn nil\n\t\t}\n\n\t\tentries = append(entries, &Entry{\n\t\t\tID: entryID,\n\t\t\tTitle: title,\n\t\t\tDatePosted: datePosted,\n\t\t\tTags: strings.Split(tagsText, \",\"),\n\t\t},\n\t\t)\n\t}\n\n\treturn entries\n}", "func (HealthMenstruationDailyEntryORM) TableName() string {\n\treturn \"health_menstruation_daily_entries\"\n}", "func (m *MonkeyWrench) Read(table string, keys []spanner.KeySet, columns []string) ([]*spanner.Row, error) {\n\t// Default to all keys.\n\tvar spannerKeys = spanner.AllKeys()\n\n\t// If we have some specified keys, use those instead.\n\tif len(keys) > 0 {\n\t\tspannerKeys = spanner.KeySets(keys...)\n\t}\n\n\t// Execute the query.\n\titer := m.Client.Single().Read(m.Context, table, spannerKeys, columns)\n\treturn getResultSlice(iter)\n}", "func ReadExpenses(rows *sql.Rows, a *Expense) error {\n\treturn rows.Scan(&a.EXPID, &a.RPEXPID, &a.BID, &a.RID, &a.RAID, &a.Amount, &a.Dt, &a.AcctRule, &a.ARID, &a.FLAGS, &a.Comment, &a.CreateTS, &a.CreateBy, &a.LastModTime, &a.LastModBy)\n}", "func (repo *Repository) Read(ctx context.Context, claims auth.Claims, req ChecklistReadRequest) (*Checklist, error) {\n\tspan, ctx := tracer.StartSpanFromContext(ctx, \"internal.checklist.Read\")\n\tdefer span.Finish()\n\n\t// Validate the request.\n\tv := webcontext.Validator()\n\terr := v.Struct(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Filter base select query by id\n\tquery := sqlbuilder.NewSelectBuilder()\n\tquery.Where(query.Equal(\"id\", req.ID))\n\n\tres, err := find(ctx, claims, repo.DbConn, query, []interface{}{}, req.IncludeArchived)\n\tif err != nil {\n\t\treturn nil, err\n\t} else if res == nil || len(res) == 0 {\n\t\terr = errors.WithMessagef(ErrNotFound, \"checklist %s not found\", req.ID)\n\t\treturn nil, err\n\t}\n\n\tu := res[0]\n\treturn u, nil\n}", "func DefaultReadProfile(ctx context.Context, in *Profile, db *gorm1.DB) (*Profile, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == \"\" {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(ProfileORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &ProfileORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(ProfileORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := ProfileORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(ProfileORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (c *Log) ReadSingle(module, version string) (eventlog.Event, error) {\n\treturn eventlog.Event{}, errors.New(\"TODO: implement\")\n}", "func (dao *OHLCVDao) GetAll() ([]types.Tick, error) {\n\tvar response []types.Tick\n\terr := db.Get(dao.dbName, dao.collectionName, bson.M{}, 0, 0, &response)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn nil, err\n\t}\n\n\treturn response, nil\n}", "func (r DasboardRepository) GetData(q validator.DashboardRequest) (*model.Dashboard, error) {\n\tnow := time.Now()\n\tif q.MonthStart.IsZero() {\n\t\tq.MonthStart = time.Date(now.Year(), time.January, 1, 0, 0, 0, 0, time.Local)\n\t}\n\tif q.MonthEnd.IsZero() {\n\t\tq.MonthEnd = time.Date(now.Year(), time.December, 31, 0, 0, 0, 0, time.Local)\n\t}\n\n\tsql := `select x.yearmonth,x.type,sum(x.count) count,sum(x.alerts) alerts,sum(x.news) news,sum(x.bigger) bigger\n\tfrom (\n\t\tselect q.yearmonth,q.type,count(*) count,sum(q.alerts) alerts,0 news,0 bigger\n\t\tfrom (\n\t\t\tselect to_char(a.created_at, 'YYYY-MM') yearmonth,a.type,(\n\t\t\t\tselect count(*) from alert_user where alert_user.alert_id=a.id\n\t\t\t) alerts\n\t\t\tfrom alert a\n\t\t) q\n\t\t group by q.yearmonth,q.type\n\t\tunion all\n\t\tselect to_char(c.created_at, 'YYYY-MM') yearmonth,4,0,0,count(*),0\n\t\tfrom customer c\n\t\tgroup by yearmonth\n\t\tunion all\n\t\tselect to_char(u.created_at, 'YYYY-MM') yearmonth,3,0,0,count(*),0\n\t\tfrom \"user\" u\n\t\tgroup by yearmonth\n\t\tunion all\n\t\tselect to_char(p.created_at, 'YYYY-MM') yearmonth,1,0,0,count(*),0\n\t\tfrom public_agent p\n\t\tgroup by yearmonth\n\t\tunion all\n\t\tselect z.yearmonth,1,0,0,0,count(*)\n\t\tfrom (\n\t\t\tselect to_char(a.created_at, 'YYYY-MM') yearmonth,a.type,(\n\t\t\t\tselect count(*) from alert_user where alert_user.alert_id=a.id\n\t\t\t) alerts\n\t\t\tfrom alert a\n\t\t\twhere a.type=2 and not a.public_agent_id is null\n\t\t) z\n\t\t group by z.yearmonth,z.type\n\t) x\n\twhere x.yearmonth >= ? and x.yearmonth <= ?\n\tgroup by x.yearmonth,x.type\n\torder by x.yearmonth,x.type`\n\tresults, err := r.DB.Query(sql, q.MonthStart.Format(YearMonthFormat), q.MonthEnd.Format(YearMonthFormat))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tsqlTotal := `select 4 \"type\",count(*) count\n\tfrom customer c\n\tunion all\n\tselect 3,count(*)\n\tfrom \"user\" u\n\tunion all\n\tselect 1,count(*)\n\tfrom public_agent p`\n\n\ttotals, err := r.DB.Query(sqlTotal)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\td := builder.DashboardFromDB(results, totals)\n\n\treturn d, nil\n}", "func ReadExpense(row *sql.Row, a *Expense) error {\n\terr := row.Scan(&a.EXPID, &a.RPEXPID, &a.BID, &a.RID, &a.RAID, &a.Amount, &a.Dt, &a.AcctRule, &a.ARID, &a.FLAGS, &a.Comment, &a.CreateTS, &a.CreateBy, &a.LastModTime, &a.LastModBy)\n\tSkipSQLNoRowsError(&err)\n\treturn err\n}", "func (r *REST) defaultOnRead(obj runtime.Object) {\n\tswitch s := obj.(type) {\n\tcase *api.PersistentVolumeClaim:\n\t\tr.defaultOnReadPvc(s)\n\tcase *api.PersistentVolumeClaimList:\n\t\tr.defaultOnReadPvcList(s)\n\tdefault:\n\t\t// This was not an object we can default. This is not an error, as the\n\t\t// caching layer can pass through here, too.\n\t}\n}", "func (migration *Migration) RunReadQuery(query string, args ...interface{}) (map[string][]string, error) {\n\tif args == nil {\n\t\tglog.Infof(\"mig_id=%d: Running query '%s'.\", migration.Id, query)\n\t} else {\n\t\tglog.Infof(\"mig_id=%d: Running query '%s' (args: %v).\", migration.Id, query, args)\n\t}\n\tresponse, err := migration.DbClient.QueryReturnColumnDict(query, args...)\n\tif err != nil {\n\t\tglog.Errorf(\"mig_id=%d: Query '%s' failed (error: %s).\", migration.Id, query, err)\n\t\treturn nil, NewErrQueryFailed(query, err)\n\t}\n\tglog.Infof(\"mig_id=%d: Query response was '%v'\", migration.Id, response)\n\treturn response, nil\n}", "func (o *Object) readEntry(ctx context.Context) (*files.FileMetadata, error) {\n\treturn o.fs.getFileMetadata(ctx, o.remotePath())\n}", "func (c *CacheTable) Read(key interface{}) {\n\n}", "func (cc *ConfigClient) ReadEntry(entryKey string, defaultVal string) (string, error) {\n\n\tif cc.shutdownInvoked {\n\t\treturn defaultVal, errors.New(\"all connections have been closed via Close()\")\n\t}\n\n\trsp, err := cc.etcd.Get(context.Background(), cc.expandKey(entryKey))\n\tif err != nil {\n\t\treturn defaultVal, err\n\t}\n\n\tif len(rsp.Kvs) == 0 {\n\t\treturn defaultVal, nil\n\t}\n\n\treturn string(rsp.Kvs[0].Value), nil\n}", "func (d *Dao) ReadHumiture() (resp interface{}, err error) {\n\treq := SensorHumitureUnit.Request()\n\toutput, err := send(d, req.Bytes())\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn resp, err\n\t}\n\thumidity := binary.BigEndian.Uint16(output[1:3])\n\ttemperature := binary.BigEndian.Uint16(output[3:6])\n\tresp = []float64{dividedByTen(humidity), dividedByTen(temperature)}\n\treturn resp, nil\n}", "func (s *Source) ReadHealth() (settings settings.Health, err error) {\n\tfor _, source := range s.sources {\n\t\tsettingsFromSource, err := source.ReadHealth()\n\t\tif err != nil {\n\t\t\treturn settings, fmt.Errorf(\"reading from %s: %w\", source, err)\n\t\t}\n\t\tsettings.MergeWith(settingsFromSource)\n\t}\n\tsettings.SetDefaults()\n\n\terr = settings.Validate()\n\tif err != nil {\n\t\treturn settings, err\n\t}\n\n\treturn settings, nil\n}", "func (repo *SingleStoryRepository) FindAllStoriesForLoggedUser(userId uuid.UUID) []model.SingleStory {\n\tvar stories []model.SingleStory\n\trepo.Database.Select(\"*\").Where(\"user_id = ? and is_deleted = ?\", userId, false).Find(&stories)\n\n\tfor i:=0; i< len(stories); i++{\n\t\tif time.Now().After(stories[i].CreationDate.Add(24 * time.Hour)){\n\t\t\t// PASSED TIME SHOULD SET STORY AS EXPIRED\n\t\t\t//stories[i].IsExpired = true\n\t\t\trepo.Database.Model(&model.SingleStory{}).Where(\"id = ?\", stories[i].ID).Update(\"is_expired\", true)\n\t\t\trepo.Database.Model(&model.Story{}).Where(\"id = ?\", stories[i].ID).Update(\"is_expired\", true)\n\t\t}\n\t}\n\n\treturn stories\n}", "func (c *ConsulDB) ReadEntry(key string) (string, bool, error) {\n\n\tkv := c.consulClient.KV()\n\n\tpair, _, err := kv.Get(key, nil)\n\n\tif pair == nil {\n\t\treturn string(\"No value found for ID: \" + key), false, err\n\t}\n\treturn string(pair.Value), true, err\n}", "func (d *Dao) GetDayTotalExpenseInfo(c context.Context, date time.Time, ctype int) (totalExpense int64, err error) {\n\terr = d.rddb.QueryRow(c, _getDayTotalExpenseSQL, date, ctype).Scan(&totalExpense)\n\treturn\n}", "func GetDailyReport(w http.ResponseWriter, req *http.Request, _ httprouter.Params) {\n\tif isLoggedIn(w, req) {\n\t\tt := template.Must(template.ParseGlob(\"views/components/navbar.comp\"))\n\t\tt.ParseFiles(\"views/report.html\")\n\t\tdata := struct {\n\t\t\tU User\n\t\t}{\n\t\t\tcurrentUser,\n\t\t}\n\t\tt.ExecuteTemplate(w, \"report.html\", data)\n\t}\n}", "func getEntityByUuid(uuid string) (map[string]interface{}, error) {\r\n\r\n\ttypeName := strings.Split(uuid, \"%\")[0]\r\n\tstoreId := typeName[0:strings.Index(typeName, \".\")]\r\n\r\n\tvar query EntityQuery\r\n\tquery.TYPENAME = typeName\r\n\tquery.TypeName = typeName\r\n\tquery.Fields = []string{}\r\n\tquery.Query = typeName + `.UUID==\"` + uuid + `\"`\r\n\r\n\tstore := GetServer().GetDataManager().getDataStore(storeId)\r\n\tqueryStr, _ := json.Marshal(query)\r\n\tvalues, err := store.Read(string(queryStr), []interface{}{}, []interface{}{})\r\n\r\n\tif err == nil {\r\n\t\t// init it child values.\r\n\t\tinitChilds(values[0][0].(map[string]interface{}))\r\n\t\t// return the resulting map.\r\n\t\treturn values[0][0].(map[string]interface{}), nil\r\n\t}\r\n\r\n\treturn nil, err\r\n}", "func (c *AllergyIntoleranceController) Read(ctx *app.ReadAllergyIntoleranceContext) error {\n\t//var sqlParams []interface{}\n\tx := json.RawMessage{}\n\n\tabsPath, _ := filepath.Abs(\"bundle.json\")\n\tfmt.Println(absPath)\n\tfile, err := os.Open(absPath)\n\tif err == nil {\n\t\t_ = json.NewDecoder(file).Decode(&x)\n\t}\n\tfile.Close()\n\n\t//getJson(\"http://nprogram.azurewebsites.net/Patient/1?_format=json\", patient)\n\t//getJson(\"http://localhost:3001\", patient)\n\t// //getJson(\"https://open-ic.epic.com/FHIR/api/FHIR/DSTU2/Patient/Tbt3KuCY0B5PSrJvCu2j-PlK.aiHsu2xUjUM8bWpetXoB\", patient)\n\t//_, err = json.NewDecoder(file).Decode(patient)\n\t//, err := cc.Get(\"http://fhirtest.uhn.ca/baseDstu2/Patient/EXexample\")\n\t//r, err := cc.Get(\"http://nprogram.azurewebsites.net/Patient/1?_format=json\")\n\n\t// if err != nil {\n\t// \tfmt.Println(\"A timeout error occured\")\n\t// \tos.Exit(3)\n\t// }\n\t// defer r.Body.Close()\n\n\t// body, _ := ioutil.ReadAll(r.Body)\n\n\t// isJSON := IsJSON(string(body))\n\t// if isJSON == true {\n\t// \tfmt.Println(\"Yesssssssssssssssssssssssssssssssssssssssssssssssss\")\n\t// \t_ = json.Unmarshal(body, &x)\n\t// } else {\n\t// \tfmt.Println(\"NOoooooooooooooooooooooooooooooooooooooooooooo\")\n\t// \t_ = xml.Unmarshal(body, &x)\n\t// }\n\n\t//err = json.Unmarshal([]byte(r), &f)\n\n\t// //b, _ := json.Marshal(body)\n\t// return json.Unmarshal(body, &target)\n\t//fmt.Printf(\"%s\", x)\n\n\tfmt.Println()\n\t//main_rt := gjson.Get(string(x), \"resourceType\")\n\t//sub_rt := gjson.Get(string(x), \"entry.#.resource.resourceType\")\n\t// m, ok := gjson.Parse(string(x)).Value().(map[string]interface{})\n\t// if !ok {\n\t// \t// not a map\n\t// }\n\n\t//value1 := gjson.Get(string(x), \"*\")\n\t//value2 := gjson.Get(string(x), \"entry.#.resource.code.coding.#.display\")\n\t//value2 := gjson.Get(string(x), \"entry.#.resource.component.#.valueQuantity.value\")\n\t//value2 := gjson.Get(string(x), \"entry.#.resource.component.#.code.coding.#.display\")\n\n\t//value3 := gjson.Get(string(x), \"identifier\")\n\t//fmt.Println(\"value *:\", value1.String())\n\n\t//fmt.Println(\"resource:\", value11.String())\n\t//fmt.Println(\"entry:\", value2.String())\n\t//fmt.Println(\"value:\", value3.String())\n\t//result := gjson.Get(string(x), \"entry.#.resource\")\n\n\t// //results := gjson.GetMany(string(x), \"entry.#.resource.code.coding.#.display\", \"entry.#.resource.component.#.code.coding.#.display\", \"entry.#.resource.component.#.valueQuantity.Value\")\n\t// results := gjson.GetMany(string(x), \"entry.#.resource.component.#.code.coding.#.display\", \"entry.#.resource.component.#.valueQuantity.value\")\n\n\t//_ = json.Unmarshal(x, &patient)\n\t//_, _ = json.Marshal(string(x))\n\n\t//_ = json.NewDecoder(r.Body).Decode(&x)\n\t//fmt.Println(results)\n\t//fmt.Printf(\"Indentifer:\", x.Matches[0].Ad, \"\\n\")\n\t//fmt.Println(\"Address:\", x.Address)\n\t//fmt.Println(\"Telecom:\", x.Telecom)\n\t//fmt.Println(\"CareProvider:\", x.CareProvider)\n\t//fmt.Println(\"Name:\", x.Name)\n\t//fmt.Printf(\"Contact:\", x.Contact)\n\n\t//fmt.Println(\"x is equal to:\", x.Address[0].Text)\n\n\t// res := &app.Patient{}\n\t// res.Active = patient.Active\n\t// res.BirthDate = patient.BirthDate\n\t// res.Gender = patient.Gender\n\t// //res.Telecom = patient.Telecom\n\t//res.Address=patient.Address\n\t//res.Address=\n\n\t//json.Marshal(patient)\n\n\t// \ttype int64array []int64\n\n\t// func (a int64array) Value() (driver.Value, error) {\n\t// // Format a in PostgreSQL's array input format {1,2,3} and return it as as string or []byte.\n\t// }\n\t//_, _ = json.Marshal(x)\n\n\t//g, _ := json.Marshal(x)\n\t//fmt.Println(string(g))\n\t// _, err = stmt.Exec(uuid.NewV4().String(), string(g))\n\t// if err != nil {\n\t// \tfmt.Println(err.Error())\n\t// \tfmt.Println(\"Error with db\")\n\t// }\n\n\t//json.Marshal(x)\n\n\t// t := app.Observation{}\n\t// s := reflect.ValueOf(&t).Elem()\n\t// typeOfT := s.Type()\n\n\t// for i := 0; i < s.NumField(); i++ {\n\t// \t//f := s.Field(i)\n\t// \t//fmt.Printf(\"%d: %s %s = %v\\n\", i,\n\t// \t//typeOfT.Field(i).Name, f.Type(), f.Interface())\n\t// \t//test2 := fmt.Sprintf(\"%T\", s.Field(i))\n\t// \ttest2 := fmt.Sprintf(\"%T\", s.Field(i))\n\n\t// \tfmt.Println(reflect.TypeOf(test2).Kind())\n\t// \t//fmt.Println(*test2)\n\n\t// \ttest := fmt.Sprintf(\"%s\", strings.ToLower(typeOfT.Field(i).Name))\n\t// \tfmt.Println(test)\n\n\t// \tvalue := gjson.Get(string(x), test)\n\t// \tfmt.Println()\n\t// \tfmt.Println(\"value *:\", value.String())\n\n\t// }\n\n\t//res := &app.AllergyIntoleranceMedia{}\n\treturn nil\n\t//return ctx.OK(res)\n\n}", "func (entity *MilitaryHistory) Get(context *pg.DB, account int64) (int, error) {\n\tentity.AccountID = account\n\n\toptions := &orm.CreateTableOptions{\n\t\tTemp: false,\n\t\tIfNotExists: true,\n\t}\n\n\tvar err error\n\tif err = context.CreateTable(&MilitaryHistory{}, options); err != nil {\n\t\treturn entity.ID, err\n\t}\n\n\tif entity.ID != 0 {\n\t\terr = context.Select(entity)\n\t}\n\n\tif entity.HasServedID != 0 {\n\t\tif _, err := entity.HasServed.Get(context, account); err != nil {\n\t\t\treturn entity.ID, err\n\t\t}\n\t}\n\n\tif entity.ListID != 0 {\n\t\tif _, err := entity.List.Get(context, account); err != nil {\n\t\t\treturn entity.ID, err\n\t\t}\n\t}\n\n\treturn entity.ID, err\n}", "func resourceHostRead(d *schema.ResourceData, m interface{}) error {\n\tlog.Debug(\"Lookup of hostgroup with id %s\", d.Id())\n\n\treturn hostRead(d, m, zabbix.Params{\n\t\t\"selectInterfaces\": \"extend\",\n\t\t\"selectParentTemplates\": \"extend\",\n\t\t\"selectGroups\": \"extend\",\n\t\t\"selectMacros\": \"extend\",\n\t\t\"selectTags\": \"extend\",\n\t\t\"selectInventory\": \"extend\",\n\t\t\"hostids\": d.Id(),\n\t})\n}", "func (h *provider) Read(ctx wfContext.Context, v *value.Value, act types.Action) error {\n\tobj := new(unstructured.Unstructured)\n\tif err := v.UnmarshalTo(obj); err != nil {\n\t\treturn err\n\t}\n\tkey, err := client.ObjectKeyFromObject(obj)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif key.Namespace == \"\" {\n\t\tkey.Namespace = \"default\"\n\t}\n\tif err := h.cli.Get(context.Background(), key, obj); err != nil {\n\t\treturn err\n\t}\n\treturn v.FillObject(obj.Object, \"result\")\n}", "func resourceRepositoryRead(d *schema.ResourceData, meta interface{}) error {\n\treturn nil\n}", "func DefaultPatchHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationPersonalInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationPersonalInfo(ctx, &HealthMenstruationPersonalInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationPersonalInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func (t *DbService) Read(request *ReadRequest) (*ReadResponse, error) {\n\trsp := &ReadResponse{}\n\treturn rsp, t.client.Call(\"db\", \"Read\", request, rsp)\n}", "func (this *dataStore) Read(queryName string, dest, arg interface{}) (error) {\r\n\r\n\tif stmt, err := this.namedStmt(queryName, dest); err != nil {\r\n\t\treturn err\r\n\t} else if stmt.query.Command != `select` {\r\n\t\treturn fmt.Errorf(`invalid SQL command for Read: %s`, stmt.query.Command)\r\n\t} else if stmt.query.MultiRow == true {\r\n\t\treturn stmt.Select(dest, arg)\r\n\t} else {\r\n\t\treturn stmt.Get(dest, arg)\r\n\t}\r\n}", "func (o *Object) readEntryAndSetMetadata(ctx context.Context) error {\n\t// Last resort set time from client\n\tif !o.modTime.IsZero() {\n\t\treturn nil\n\t}\n\tentry, err := o.readEntry(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn o.setMetadataFromEntry(entry)\n}", "func GetLogOne(c *gin.Context) {\n\tid := c.GetInt(\"id\")\n\tfmt.Println(id)\n\n\tlogs, _ := dao.GetLogsFirst(fmt.Sprintf(\"id = %d\", id), []string{\"id desc\"})\n\n\tresponse.JsonResponse(c, response.SUCCESS, logs)\n}", "func (w *Wug) GetRawHourlyTenDay(query *Query) ([]byte, error) {\n\treturn w.Get(HourTenDay, query)\n}", "func (p *GORMPersistence) GetData(pk tm.PersistenceKey) tm.Data {\n\tvar dataRecord ConversationData\n\tp.DB.Where(pk).Attrs(ConversationData{Data: datatypes.JSONMap{}}).FirstOrCreate(&dataRecord)\n\treturn dataRecord.Data\n}", "func (d *InfluxDevops) AvgMemAvailableDayByHour(q *Query) {\n\tinterval := d.AllInterval.RandWindow(24 * time.Hour)\n\n\tv := url.Values{}\n\tv.Set(\"db\", d.DatabaseName)\n\tv.Set(\"q\", fmt.Sprintf(\"SELECT mean(available) from mem where time >= '%s' and time < '%s' group by time(1h)\", interval.StartString(), interval.EndString()))\n\n\thumanLabel := []byte(\"Influx avg mem, all hosts, rand 1d by 1h\")\n\tq.HumanLabel = humanLabel\n\tq.HumanDescription = []byte(fmt.Sprintf(\"%s: %s\", humanLabel, interval.StartString()))\n\tq.Method = []byte(\"GET\")\n\tq.Path = []byte(fmt.Sprintf(\"/query?%s\", v.Encode()))\n\tq.Body = nil\n}", "func (s *storage) getEntry(index uint64, e *entry) error {\n\tb, err := s.log.Get(index)\n\tif err == log.ErrNotFound {\n\t\treturn err\n\t} else if err != nil {\n\t\tpanic(opError(err, \"Log.Get(%d)\", index))\n\t}\n\tif err = e.decode(bytes.NewReader(b)); err != nil {\n\t\tpanic(opError(err, \"log.Get(%d).decode()\", index))\n\t}\n\tif e.index != index {\n\t\tpanic(opError(fmt.Errorf(\"got %d, want %d\", e.index, index), \"log.Get(%d).index: \", index))\n\t}\n\treturn nil\n}", "func (list *APTAuditList) fetchOne(client *network.S3Head, key string) {\n\tclient.Head(key)\n\tif client.ErrorMessage != \"\" {\n\t\tfmt.Fprintln(os.Stderr, client.ErrorMessage)\n\t\tlist.flagError()\n\t\treturn\n\t}\n\tstrRecord := \"\"\n\tvar err error\n\trecord := client.StoredFile()\n\tif list.format == \"json\" {\n\t\tstrRecord, err = record.ToJson()\n\t} else {\n\t\tstrRecord, err = record.ToCSV(list.csvDelimiter)\n\t}\n\tif err != nil {\n\t\tfmt.Fprintln(os.Stderr, \"[Key\", key, \"]\", err.Error())\n\t\tlist.flagError()\n\t} else {\n\t\tlist.addResult(strRecord)\n\t}\n}", "func (i *InfluxDAO) QueryForSensorReadings(accountID, sensorID string, startTime, endTime int64) (*QueryForSensorReadingsResults, error) {\n\tresults := &QueryForSensorReadingsResults{accountID, sensorID, make([]*MinimalReading, 0)}\n\n\tres, err := i.queryDB(fmt.Sprintf(\"SELECT * from %s where sensor_id = '%s' and account_id = '%s' and time >= '%s' and time <= '%s' order by time desc\", sensorMeasurementsTableName, sensorID, accountID, time.Unix(startTime, 0).Format(time.RFC3339), time.Unix(endTime, 0).Format(time.RFC3339)))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif len(res) != 1 || res[0].Series == nil || len(res[0].Series) == 0 {\n\t\treturn results, nil\n\t}\n\n\trow := res[0].Series[0]\n\n\tfor _, rowValues := range row.Values {\n\t\trowReading := &MinimalReading{\n\t\t\tMeasurements: make([]Measurement, 0),\n\t\t}\n\t\tfor k, v := range rowValues {\n\t\t\tvalueName := row.Columns[k]\n\t\t\tif strings.Contains(valueName, \"time\") {\n\t\t\t\tvar timestamp time.Time\n\t\t\t\ttimestamp, err = time.Parse(time.RFC3339, v.(string))\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn results, err\n\t\t\t\t}\n\t\t\t\trowReading.Timestamp = timestamp.Unix()\n\t\t\t} else if strings.Contains(valueName, \"temperature\") {\n\t\t\t\tvar temperatureValue float64\n\t\t\t\ttemperatureValue, err = v.(json.Number).Float64()\n\t\t\t\tif err == nil {\n\t\t\t\t\trowReading.Measurements = append(rowReading.Measurements, Measurement{\n\t\t\t\t\t\tName: valueName,\n\t\t\t\t\t\tUnit: \"Celsius\",\n\t\t\t\t\t\tValue: temperatureValue,\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t} else if strings.Contains(valueName, \"humidity\") {\n\t\t\t\tvar humidityValue float64\n\t\t\t\thumidityValue, err = v.(json.Number).Float64()\n\t\t\t\tif err == nil {\n\t\t\t\t\trowReading.Measurements = append(rowReading.Measurements, Measurement{\n\t\t\t\t\t\tName: valueName,\n\t\t\t\t\t\tUnit: \"%\",\n\t\t\t\t\t\tValue: humidityValue,\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t} else if strings.Contains(valueName, \"soil_moisture\") {\n\t\t\t\tvar soilMoistureValue float64\n\t\t\t\tsoilMoistureValue, err = v.(json.Number).Float64()\n\t\t\t\tif err == nil {\n\t\t\t\t\trowReading.Measurements = append(rowReading.Measurements, Measurement{\n\t\t\t\t\t\tName: valueName,\n\t\t\t\t\t\tUnit: \"VWC\",\n\t\t\t\t\t\tValue: soilMoistureValue,\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tresults.Readings = append(results.Readings, rowReading)\n\t}\n\treturn results, err\n}", "func (q elasticClient) Fire(entry *logrus.Entry) (err error) {\n\t// Reflect on eventType and Key on their native type.\n\tentryStr, ok := entry.Data[\"EventType\"].(string)\n\tif !ok {\n\t\treturn nil\n\t}\n\tkeyStr, ok := entry.Data[\"Key\"].(string)\n\tif !ok {\n\t\treturn nil\n\t}\n\n\tswitch q.params.Format {\n\tcase formatNamespace:\n\t\t// If event matches as delete, we purge the previous index.\n\t\tif eventMatch(entryStr, []string{\"s3:ObjectRemoved:*\"}) {\n\t\t\t_, err = q.Client.Delete().Index(q.params.Index).\n\t\t\t\tType(\"event\").Id(keyStr).Do(context.Background())\n\t\t\tbreak\n\t\t} // else we update elastic index or create a new one.\n\t\t_, err = q.Client.Index().Index(q.params.Index).\n\t\t\tType(\"event\").\n\t\t\tBodyJson(map[string]interface{}{\n\t\t\t\t\"Records\": entry.Data[\"Records\"],\n\t\t\t}).Id(keyStr).Do(context.Background())\n\tcase formatAccess:\n\t\t// eventTime is taken from the first entry in the\n\t\t// records.\n\t\tevents, ok := entry.Data[\"Records\"].([]NotificationEvent)\n\t\tif !ok {\n\t\t\treturn esErrFunc(\"Unable to extract event time due to conversion error of entry.Data[\\\"Records\\\"]=%v\", entry.Data[\"Records\"])\n\t\t}\n\t\tvar eventTime time.Time\n\t\teventTime, err = time.Parse(timeFormatAMZ, events[0].EventTime)\n\t\tif err != nil {\n\t\t\treturn esErrFunc(\"Unable to parse event time \\\"%s\\\": %v\",\n\t\t\t\tevents[0].EventTime, err)\n\t\t}\n\t\t// Extract event time in milliseconds for Elasticsearch.\n\t\teventTimeStr := fmt.Sprintf(\"%d\", eventTime.UnixNano()/1000000)\n\t\t_, err = q.Client.Index().Index(q.params.Index).Type(\"event\").\n\t\t\tTimestamp(eventTimeStr).\n\t\t\tBodyJson(map[string]interface{}{\n\t\t\t\t\"Records\": entry.Data[\"Records\"],\n\t\t\t}).Do(context.Background())\n\t}\n\tif err != nil {\n\t\treturn esErrFunc(\"Error inserting/deleting entry: %v\", err)\n\t}\n\treturn nil\n}", "func read(ctx *cli.Context) error {\n\tif err := initStore(ctx); err != nil {\n\t\treturn err\n\t}\n\tif ctx.Args().Len() < 1 {\n\t\treturn errors.New(\"Key arg is required\")\n\t}\n\n\tenv, err := util.GetEnv(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// get the namespace\n\tns, err := namespace.Get(env.Name)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\topts := []store.ReadOption{\n\t\tstore.ReadFrom(ns, ctx.String(\"table\")),\n\t}\n\tif ctx.Bool(\"prefix\") {\n\t\topts = append(opts, store.ReadPrefix())\n\t}\n\tif ctx.Bool(\"suffix\") {\n\t\topts = append(opts, store.ReadSuffix())\n\t}\n\tif ctx.Uint(\"limit\") != 0 {\n\t\topts = append(opts, store.ReadLimit(ctx.Uint(\"limit\")))\n\t}\n\tif ctx.Uint(\"offset\") != 0 {\n\t\topts = append(opts, store.ReadLimit(ctx.Uint(\"offset\")))\n\t}\n\tif v := ctx.String(\"order\"); len(v) > 0 {\n\t\torder := store.OrderAsc\n\t\tif v == \"desc\" {\n\t\t\torder = store.OrderDesc\n\t\t}\n\t\topts = append(opts, store.ReadOrder(order))\n\t}\n\n\trecords, err := store.DefaultStore.Read(ctx.Args().First(), opts...)\n\tif err != nil {\n\t\tif err.Error() == \"not found\" {\n\t\t\treturn err\n\t\t}\n\t\treturn errors.Wrapf(err, \"Couldn't read %s from store\", ctx.Args().First())\n\t}\n\tswitch ctx.String(\"output\") {\n\tcase \"json\":\n\t\tjsonRecords, err := json.MarshalIndent(records, \"\", \" \")\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"failed marshalling JSON\")\n\t\t}\n\t\tfmt.Printf(\"%s\\n\", string(jsonRecords))\n\tdefault:\n\t\tif ctx.Bool(\"verbose\") {\n\t\t\tw := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0)\n\t\t\tfmt.Fprintf(w, \"%v \\t %v \\t %v\\n\", \"KEY\", \"VALUE\", \"EXPIRY\")\n\t\t\tfor _, r := range records {\n\t\t\t\tvar key, value, expiry string\n\t\t\t\tkey = r.Key\n\t\t\t\tif isPrintable(r.Value) {\n\t\t\t\t\tvalue = string(r.Value)\n\t\t\t\t} else {\n\t\t\t\t\tvalue = fmt.Sprintf(\"%#x\", r.Value[:20])\n\t\t\t\t}\n\t\t\t\tif r.Expiry == 0 {\n\t\t\t\t\texpiry = \"None\"\n\t\t\t\t} else {\n\t\t\t\t\texpiry = humanize.Time(time.Now().Add(r.Expiry))\n\t\t\t\t}\n\t\t\t\tfmt.Fprintf(w, \"%v \\t %v \\t %v\\n\", key, value, expiry)\n\t\t\t}\n\t\t\tw.Flush()\n\t\t\treturn nil\n\t\t}\n\t\tfor _, r := range records {\n\t\t\tfmt.Println(string(r.Value))\n\t\t}\n\t}\n\treturn nil\n}", "func getMedium(c cookoo.Context) (*Medium, error) {\n\tds, ok := c.HasDatasource(MediumDS)\n\tif !ok {\n\t\treturn nil, errors.New(\"Cannot find a Medium\")\n\t}\n\treturn ds.(*Medium), nil\n}", "func (db *MongoDBAccess) GetAll() ([]Record, error) {\n\tvar records []Record\n\tcursor, err := db.client.Database(db.database).Collection(\"days\").Find(context.Background(), bson.D{})\n\tif err != nil {\n\t\treturn []Record{}, err\n\t}\n\tdefer cursor.Close(context.Background())\n\tfor cursor.Next(context.Background()) {\n\t\tvar record Record\n\t\tif err = cursor.Decode(&record); err != nil {\n\t\t\treturn []Record{}, err\n\t\t}\n\t\trecords = append(records, record)\n\t}\n\treturn records, nil\n}", "func GetHealth(c *gin.Context) {\n\tservicer := c.MustGet(registry.ServiceKey).(registry.Servicer)\n\thealthCheckSearvice := servicer.NewHealthCheck()\n\n\tvar input model.HealthCheckSearchInput\n\n\terr := c.ShouldBindQuery(&input)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\n\toutput, err := healthCheckSearvice.GetHealth(input.ID)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, output)\n}", "func (self* userRestAPI) dailyStats(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson,err := self.engine.SummaryStats()\n\tif err != nil {\n\t\tlogError(err)\n\t\thttp.Error(w, fmt.Sprintf(\"\\nFailed to retrieve daily stats with error '%s'\\n\",err), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tio.WriteString(w,json)\n}", "func calcMinMaxDailyHeartRate(data health.Data) health.MinMaxStruct {\n\tvar result health.MinMaxStruct\n\tmin := float64(0)\n\tmax := float64(0)\n\tfor _, entry := range data.HeartRate {\n\t\tif min == 0 || entry.Value < min {\n\t\t\tmin = entry.Value\n\t\t}\n\t\tif entry.Value > max {\n\t\t\tmax = entry.Value\n\t\t}\n\t}\n\n\tif len(data.HeartRate) > 0 {\n\t\tresult.StartDate = data.HeartRate[0].StartDate\n\t}\n\tresult.Min = min\n\tresult.Max = max\n\treturn result\n}", "func ReadLedgerEntry(row *sql.Row, a *LedgerEntry) error {\n\terr := row.Scan(&a.LEID, &a.BID, &a.JID, &a.JAID, &a.LID, &a.RAID, &a.RID, &a.TCID, &a.Dt, &a.Amount, &a.Comment, &a.CreateTS, &a.CreateBy, &a.LastModTime, &a.LastModBy)\n\tSkipSQLNoRowsError(&err)\n\treturn err\n}", "func (t Table) Read(d Data) ([]map[string]interface{}, error) {\n\tdb, err := openDB(t.Config)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer db.Close()\n\tvar rows *sql.Rows\n\tif d.KeyVal != nil {\n\t\trows, _ = db.Query(t.getReadStr(d), d.KeyVal)\n\t} else {\n\t\trows, _ = db.Query(t.getReadStr(d))\n\t}\n\tresult, err := rowsToJSON(rows)\n\tif err != nil {\n\t\treturn result, err\n\t}\n\treturn result, err\n}", "func (c InfluxDBClient) FetchAll(start int64, end int64) ([]EventModel, error) {\n\tlog.Printf(\"Fetch all events from now - %ds to now - %ds\", start, end)\n\n\tcmd := fmt.Sprintf(`SELECT * FROM %s\n\t\t\t\t\t\tWHERE time >= NOW() - %ds AND time <= NOW() - %ds`, eventsTableName, start, end)\n\n\tlog.Println(\"Query data with command\", cmd)\n\n\tresponse, err := c.queryDB(cmd)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn c.parseResponse(response)\n}" ]
[ "0.68675816", "0.65901476", "0.630962", "0.59756255", "0.57466877", "0.5386397", "0.5284462", "0.51186156", "0.5106763", "0.51045865", "0.50709534", "0.5024787", "0.49427426", "0.4860348", "0.48528224", "0.48139605", "0.47631463", "0.47389635", "0.46977806", "0.4647224", "0.46396595", "0.45886165", "0.45533726", "0.4542439", "0.45357236", "0.45272487", "0.45231503", "0.4522438", "0.45108384", "0.45066264", "0.4504384", "0.4501869", "0.4480825", "0.44681904", "0.44651276", "0.44479552", "0.4422654", "0.44091284", "0.4386573", "0.43848905", "0.43832177", "0.43826178", "0.4382609", "0.4376865", "0.4374041", "0.43672225", "0.43653736", "0.43652737", "0.43551975", "0.43507537", "0.4339964", "0.43267542", "0.43249694", "0.43198764", "0.43124595", "0.43116152", "0.43038264", "0.43012872", "0.4282357", "0.42806533", "0.4273617", "0.4265976", "0.42647353", "0.4234152", "0.4224212", "0.42214456", "0.42155313", "0.42101797", "0.4207076", "0.42023715", "0.41996062", "0.4195951", "0.41888684", "0.4186663", "0.41854426", "0.4173848", "0.41719136", "0.4167067", "0.41650018", "0.41568515", "0.41563004", "0.41551948", "0.41419265", "0.4134424", "0.41330174", "0.41307974", "0.4128899", "0.41241246", "0.41176128", "0.41142222", "0.41108495", "0.41094133", "0.41059837", "0.41001722", "0.40992358", "0.40971234", "0.40922868", "0.40892076", "0.40867007", "0.40808156" ]
0.83156854
0
DefaultStrictUpdateHealthMenstruationDailyEntry clears first level 1:many children and then executes a gorm update call
func DefaultStrictUpdateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) { if in == nil { return nil, fmt.Errorf("Nil argument to DefaultStrictUpdateHealthMenstruationDailyEntry") } ormObj, err := in.ToORM(ctx) if err != nil { return nil, err } lockedRow := &HealthMenstruationDailyEntryORM{} db.Model(&ormObj).Set("gorm:query_option", "FOR UPDATE").Where("id=?", ormObj.Id).First(lockedRow) if hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateCleanup); ok { if db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil { return nil, err } } if hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateSave); ok { if db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil { return nil, err } } if err = db.Save(&ormObj).Error; err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterStrictUpdateSave); ok { if err = hook.AfterStrictUpdateSave(ctx, db); err != nil { return nil, err } } pbResponse, err := ormObj.ToPB(ctx) if err != nil { return nil, err } return &pbResponse, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultPatchHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationDailyEntry\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationDailyEntry(ctx, &HealthMenstruationDailyEntry{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationDailyEntry(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationDailyEntryWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultPatchSetHealthMenstruationDailyEntry(ctx context.Context, objects []*HealthMenstruationDailyEntry, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationDailyEntry, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationDailyEntry, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationDailyEntry(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultListHealthMenstruationDailyEntry(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationDailyEntry, error) {\n\tin := HealthMenstruationDailyEntry{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationDailyEntryORM{}, &HealthMenstruationDailyEntry{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationDailyEntryORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationDailyEntry{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx context.Context, patchee *HealthMenstruationDailyEntry, patcher *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Day\" {\n\t\t\tpatchee.Day = patcher.Day\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"IntensityPercentage\" {\n\t\t\tpatchee.IntensityPercentage = patcher.IntensityPercentage\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Type\" {\n\t\t\tpatchee.Type = patcher.Type\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Manual\" {\n\t\t\tpatchee.Manual = patcher.Manual\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"BasedOnPrediction\" {\n\t\t\tpatchee.BasedOnPrediction = patcher.BasedOnPrediction\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultReadHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationDailyEntryORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationDailyEntryORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationDailyEntryORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultCreateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationPersonalInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationPersonalInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func (m *HealthMenstruationDailyEntry) ToORM(ctx context.Context) (HealthMenstruationDailyEntryORM, error) {\n\tto := HealthMenstruationDailyEntryORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.Day = &t\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = int32(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (hc *LegacyHealthCheckImpl) updateHealth(ts *LegacyTabletStats, conn queryservice.QueryService) {\n\t// Unconditionally send the received update at the end.\n\tdefer func() {\n\t\tif hc.listener != nil {\n\t\t\thc.listener.StatsUpdate(ts)\n\t\t}\n\t}()\n\n\thc.mu.Lock()\n\tth, ok := hc.addrToHealth[ts.Key]\n\tif !ok {\n\t\t// This can happen on delete because the entry is removed first,\n\t\t// or if LegacyHealthCheckImpl has been closed.\n\t\thc.mu.Unlock()\n\t\treturn\n\t}\n\toldts := th.latestTabletStats\n\tth.latestTabletStats = *ts\n\tth.conn = conn\n\thc.mu.Unlock()\n\n\t// In the case where a tablet changes type (but not for the\n\t// initial message), we want to log it, and maybe advertise it too.\n\tif oldts.Target.TabletType != topodatapb.TabletType_UNKNOWN && oldts.Target.TabletType != ts.Target.TabletType {\n\t\t// Log and maybe notify\n\t\tlog.Infof(\"HealthCheckUpdate(Type Change): %v, tablet: %s, target %+v => %+v, reparent time: %v\",\n\t\t\toldts.Name, topotools.TabletIdent(oldts.Tablet), topotools.TargetIdent(oldts.Target), topotools.TargetIdent(ts.Target), ts.TabletExternallyReparentedTimestamp)\n\t\tif hc.listener != nil && hc.sendDownEvents {\n\t\t\toldts.Up = false\n\t\t\thc.listener.StatsUpdate(&oldts)\n\t\t}\n\n\t\t// Track how often a tablet gets promoted to master. It is used for\n\t\t// comparing against the variables in go/vtgate/buffer/variables.go.\n\t\tif oldts.Target.TabletType != topodatapb.TabletType_MASTER && ts.Target.TabletType == topodatapb.TabletType_MASTER {\n\t\t\thcMasterPromotedCounters.Add([]string{ts.Target.Keyspace, ts.Target.Shard}, 1)\n\t\t}\n\t}\n}", "func update(rt *Runtime, r goengage.Fundraise, key string) {\n\tg := Stat{}\n\trt.DB.Where(\"id = ?\", key).First(&g)\n\tif g.CreatedDate == nil {\n\t\tg.ID = key\n\t\tt := time.Now()\n\t\tg.CreatedDate = &t\n\t\trt.DB.Create(&g)\n\t}\n\tfor _, t := range r.Transactions {\n\t\tg.AllCount++\n\t\tg.AllAmount = g.AllAmount + t.Amount\n\t\tif r.WasImported {\n\t\t\tg.OfflineCount++\n\t\t\tg.OfflineAmount += t.Amount\n\t\t} else {\n\t\t\tswitch r.DonationType {\n\t\t\tcase goengage.OneTime:\n\t\t\t\tg.OneTimeCount++\n\t\t\t\tg.OneTimeAmount += t.Amount\n\t\t\tcase goengage.Recurring:\n\t\t\t\tg.RecurringCount++\n\t\t\t\tg.RecurringAmount += t.Amount\n\t\t\t}\n\t\t\tswitch t.Type {\n\t\t\tcase goengage.Refund:\n\t\t\t\tg.RefundsCount++\n\t\t\t\tg.RefundsAmount += t.Amount\n\t\t\t}\n\t\t}\n\t\tg.Largest = math.Max(g.Largest, t.Amount)\n\t\tif t.Amount > 0.0 {\n\t\t\tif g.Smallest < 1.0 {\n\t\t\t\tg.Smallest = t.Amount\n\t\t\t} else {\n\t\t\t\tg.Smallest = math.Min(g.Smallest, t.Amount)\n\t\t\t}\n\t\t}\n\t\trt.DB.Model(&g).Updates(&g)\n\t}\n}", "func (client ModelClient) UpdateHierarchicalEntityChildResponder(resp *http.Response) (result OperationStatus, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (honest *Honest) flushUpdates() {\n\n\thonest.blockUpdates = honest.blockUpdates[:0]\n}", "func (client ModelClient) UpdateHierarchicalEntityResponder(resp *http.Response) (result OperationStatus, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (du *DoctorinfoUpdate) ClearEducationlevel() *DoctorinfoUpdate {\n\tdu.mutation.ClearEducationlevel()\n\treturn du\n}", "func (m *HealthMenstruationDailyEntryORM) ToPB(ctx context.Context) (HealthMenstruationDailyEntry, error) {\n\tto := HealthMenstruationDailyEntry{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tif to.Day, err = ptypes1.TimestampProto(*m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = HealthMenstruationDailyEntry_Type(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (o *Tree) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\ttreeUpdateCacheMut.RLock()\n\tcache, cached := treeUpdateCache[key]\n\ttreeUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\ttreeAllColumns,\n\t\t\ttreePrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update trees, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"trees\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, treePrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(treeType, treeMapping, append(wl, treePrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update trees row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for trees\")\n\t}\n\n\tif !cached {\n\t\ttreeUpdateCacheMut.Lock()\n\t\ttreeUpdateCache[key] = cache\n\t\ttreeUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "func (u updateCachedUploadRevision) apply(data *journalPersist) {\n\tc := data.CachedRevisions[u.Revision.ParentID.String()]\n\tc.Revision = u.Revision\n\tif u.SectorIndex == len(c.MerkleRoots) {\n\t\tc.MerkleRoots = append(c.MerkleRoots, u.SectorRoot)\n\t} else if u.SectorIndex < len(c.MerkleRoots) {\n\t\tc.MerkleRoots[u.SectorIndex] = u.SectorRoot\n\t} else {\n\t\t// Shouldn't happen. TODO: Add correct error handling.\n\t}\n\tdata.CachedRevisions[u.Revision.ParentID.String()] = c\n}", "func (duo *DoctorinfoUpdateOne) ClearEducationlevel() *DoctorinfoUpdateOne {\n\tduo.mutation.ClearEducationlevel()\n\treturn duo\n}", "func (o *CurrentChartDataMinutely) Update(exec boil.Executor, columns boil.Columns) (int64, error) {\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\to.UpdatedAt = currTime\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tcurrentChartDataMinutelyUpdateCacheMut.RLock()\n\tcache, cached := currentChartDataMinutelyUpdateCache[key]\n\tcurrentChartDataMinutelyUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tcurrentChartDataMinutelyColumns,\n\t\t\tcurrentChartDataMinutelyPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update current_chart_data_minutely, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"current_chart_data_minutely\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, currentChartDataMinutelyPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(currentChartDataMinutelyType, currentChartDataMinutelyMapping, append(wl, currentChartDataMinutelyPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tvar result sql.Result\n\tresult, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update current_chart_data_minutely row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for current_chart_data_minutely\")\n\t}\n\n\tif !cached {\n\t\tcurrentChartDataMinutelyUpdateCacheMut.Lock()\n\t\tcurrentChartDataMinutelyUpdateCache[key] = cache\n\t\tcurrentChartDataMinutelyUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(exec)\n}", "func (q shelfQuery) UpdateAll(cols M) error {\n\tqueries.SetUpdate(q.Query, cols)\n\n\t_, err := q.Query.Exec()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to update all for shelf\")\n\t}\n\n\treturn nil\n}", "func (q holdenAtQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for HoldenAt\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for HoldenAt\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (q treeQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for trees\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for trees\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (client ModelClient) UpdateHierarchicalEntityChildSender(req *http.Request) (*http.Response, error) {\n\treturn autorest.SendWithSender(client, req,\n\t\tautorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))\n}", "func (duo *DocUpdateOne) ClearChildren() *DocUpdateOne {\n\tduo.mutation.ClearChildren()\n\treturn duo\n}", "func (uuo *UserUpdateOne) ClearChildren() *UserUpdateOne {\n\tuuo.mutation.ClearChildren()\n\treturn uuo\n}", "func (o CMFAdminMenuSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfAdminMenuPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_admin_menu` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfAdminMenuPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfAdminMenu slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfAdminMenu\")\n\t}\n\treturn rowsAff, nil\n}", "func (q currentChartDataMinutelyQuery) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.Exec(exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for current_chart_data_minutely\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for current_chart_data_minutely\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (o TreeSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), treePrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"trees\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, treePrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in tree slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all tree\")\n\t}\n\treturn rowsAff, nil\n}", "func (q cmfAdminMenuQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for cmf_admin_menu\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for cmf_admin_menu\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (s *MyEntityManager) Update(dt float64) {\n}", "func (o CurrentChartDataMinutelySlice) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), currentChartDataMinutelyPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"current_chart_data_minutely\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, currentChartDataMinutelyPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.Exec(sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in currentChartDataMinutely slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all currentChartDataMinutely\")\n\t}\n\treturn rowsAff, nil\n}", "func (d *dataUsageCache) deleteRecursive(h dataUsageHash) {\n\tif existing, ok := d.Cache[h.String()]; ok {\n\t\t// Delete first if there should be a loop.\n\t\tdelete(d.Cache, h.Key())\n\t\tfor child := range existing.Children {\n\t\t\td.deleteRecursive(dataUsageHash(child))\n\t\t}\n\t}\n}", "func (q smallblogQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for smallblog\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for smallblog\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (o *Smallblog) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tsmallblogUpdateCacheMut.RLock()\n\tcache, cached := smallblogUpdateCache[key]\n\tsmallblogUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tsmallblogAllColumns,\n\t\t\tsmallblogPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update smallblog, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `smallblog` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, smallblogPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(smallblogType, smallblogMapping, append(wl, smallblogPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update smallblog row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for smallblog\")\n\t}\n\n\tif !cached {\n\t\tsmallblogUpdateCacheMut.Lock()\n\t\tsmallblogUpdateCache[key] = cache\n\t\tsmallblogUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "func (duo *DayUpdateOne) Save(ctx context.Context) (*Day, error) {\n\n\tvar (\n\t\terr error\n\t\tnode *Day\n\t)\n\tif len(duo.hooks) == 0 {\n\t\tnode, err = duo.sqlSave(ctx)\n\t} else {\n\t\tvar mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {\n\t\t\tmutation, ok := m.(*DayMutation)\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected mutation type %T\", m)\n\t\t\t}\n\t\t\tduo.mutation = mutation\n\t\t\tnode, err = duo.sqlSave(ctx)\n\t\t\tmutation.done = true\n\t\t\treturn node, err\n\t\t})\n\t\tfor i := len(duo.hooks) - 1; i >= 0; i-- {\n\t\t\tmut = duo.hooks[i](mut)\n\t\t}\n\t\tif _, err := mut.Mutate(ctx, duo.mutation); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn node, err\n}", "func DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx context.Context, patchee *HealthMenstruationPersonalInfo, patcher *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"PeriodLengthInDays\" {\n\t\t\tpatchee.PeriodLengthInDays = patcher.PeriodLengthInDays\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CycleLengthInDays\" {\n\t\t\tpatchee.CycleLengthInDays = patcher.CycleLengthInDays\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func (c *dbCache) flush() error {\n\tc.lastFlush = time.Now()\n\n\t// Sync the current write file associated with the block store. This is\n\t// necessary before writing the metadata to prevent the case where the\n\t// metadata contains information about a block which actually hasn't\n\t// been written yet in unexpected shutdown scenarios.\n\tif err := c.store.syncBlocks(); err != nil {\n\t\treturn err\n\t}\n\n\t// Nothing to do if there are no transactions to flush.\n\tif len(c.txLog) == 0 {\n\t\treturn nil\n\t}\n\n\t// Perform all leveldb updates using batches for atomicity.\n\tbatchLen := 0\n\tbatchTxns := 0\n\tbatch := new(leveldb.Batch)\n\tfor logTxNum, txLogEntries := range c.txLog {\n\t\t// Replay the transaction from the log into the current batch.\n\t\tfor _, logEntry := range txLogEntries {\n\t\t\tswitch logEntry.entryType {\n\t\t\tcase entryTypeUpdate:\n\t\t\t\tbatch.Put(logEntry.key, logEntry.value)\n\t\t\tcase entryTypeRemove:\n\t\t\t\tbatch.Delete(logEntry.key)\n\t\t\t}\n\t\t}\n\t\tbatchTxns++\n\n\t\t// Write and reset the current batch when the number of items in\n\t\t// it exceeds the the batch threshold or this is the last\n\t\t// transaction in the log.\n\t\tbatchLen += len(txLogEntries)\n\t\tif batchLen > batchThreshold || logTxNum == len(c.txLog)-1 {\n\t\t\tif err := c.ldb.Write(batch, nil); err != nil {\n\t\t\t\treturn convertErr(\"failed to write batch\", err)\n\t\t\t}\n\t\t\tbatch.Reset()\n\t\t\tbatchLen = 0\n\n\t\t\t// Clear the transactions that were written from the\n\t\t\t// log so the memory can be reclaimed.\n\t\t\tfor i := logTxNum - (batchTxns - 1); i <= logTxNum; i++ {\n\t\t\t\tc.txLog[i] = nil\n\t\t\t}\n\t\t\tbatchTxns = 0\n\t\t}\n\t}\n\tc.txLog = c.txLog[:]\n\n\t// Clear the cache since it has been flushed.\n\tc.cacheLock.Lock()\n\tc.cachedKeys = treap.NewImmutable()\n\tc.cachedRemove = treap.NewImmutable()\n\tc.cacheLock.Unlock()\n\n\treturn nil\n}", "func (self GroupModel) UpdateAll(db Database) GroupModel {\n for idx, page := range self {\n self[idx] = page.Update(db)\n }\n return self\n}", "func (o *CMFAdminMenu) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tcmfAdminMenuUpdateCacheMut.RLock()\n\tcache, cached := cmfAdminMenuUpdateCache[key]\n\tcmfAdminMenuUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tcmfAdminMenuAllColumns,\n\t\t\tcmfAdminMenuPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update cmf_admin_menu, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `cmf_admin_menu` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, cmfAdminMenuPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(cmfAdminMenuType, cmfAdminMenuMapping, append(wl, cmfAdminMenuPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update cmf_admin_menu row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for cmf_admin_menu\")\n\t}\n\n\tif !cached {\n\t\tcmfAdminMenuUpdateCacheMut.Lock()\n\t\tcmfAdminMenuUpdateCache[key] = cache\n\t\tcmfAdminMenuUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "func (e *EntitySystem) Update() {\n\t//\tfor _, v := range e.Entities {\n\t//\t\tv.Update()\n\t//\t}\n}", "func (e *Department) Reload() error { return ent.ReloadEnt(e) }", "func (storage *HippoStorage) UpdateChild(hashKey string) {\n\tvar (\n\t\thas bool\n\t)\n\t_, has = storage.verified.Load(hashKey)\n\tif !has {\n\t\treturn\n\t}\n\tchild, has := storage.child.Load(hashKey)\n\tif !has {\n\t\tblock, _ := storage.Get(hashKey)\n\t\tstorage.TryUpdateMaxLevel(block.GetLevel())\n\t\treturn\n\t}\n\tchildList := child.([]string)\n\tfor _, childHash := range childList {\n\t\tstorage.UpdateVerified(childHash)\n\t\tstorage.UpdateChild(childHash)\n\t}\n\n\treturn\n}", "func (u *usecase) Update() error {\n\t// Time execution\n\tstart := time.Now()\n\n\t// Creating context with timeout duration process\n\tctx, cancel := context.WithTimeout(context.Background(), 20*time.Minute)\n\tdefer cancel()\n\n\t// Get all archieve from scrapper repository\n\tarchieves, err := u.scrapperRepo.GetAllArchieve()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Create array (slice) of archieve object\n\tvar newArchieves []*model.Archieve\n\n\t// Check if archieve is exist in DB\n\tfor _, archieve := range archieves {\n\t\t// Get archieve from DB by archieve code\n\t\t_, err := u.mysqlRepo.GetArchieveByCode(ctx, archieve.Code)\n\n\t\t// if archieve not exist then add to newArchieve array (slice)\n\t\tif err == model.ErrDataNotFound {\n\t\t\t// Add archieve\n\t\t\tnewArchieves = append(newArchieves, archieve)\n\t\t\tlog.Printf(\"New archieve: %v\", archieve.Code)\n\t\t} else if err != nil && err != model.ErrDataNotFound {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// Counter new journal from archieves\n\tvar totalNewJournal int\n\n\t// Get new archieves journals\n\tfor _, newArchieve := range newArchieves {\n\t\t// Get all journal from scrapper repository based on archieve\n\t\tjournals, err := u.scrapperRepo.GetAllJournalByArchieveObject(newArchieve)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// Append Journals into archieve object\n\t\tnewArchieve.Journals = journals\n\t\ttotalNewJournal += len(newArchieve.Journals)\n\t}\n\n\t// Check if there's new archieve then saved new archieve into DB\n\tif len(newArchieves) > 0 {\n\t\t// Insert new archieves into DB\n\t\tif err := u.mysqlRepo.BatchArchieves(ctx, newArchieves); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tlog.Printf(\"Added %v archieve and %v journal (%v)m\", len(newArchieves), totalNewJournal, time.Since(start).Minutes())\n\n\t// if there's no update then do nothing or finish pull data from archieve scrapper\n\treturn nil\n}", "func (o *HoldenAt) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tkey := makeCacheKey(columns, nil)\n\tholdenAtUpdateCacheMut.RLock()\n\tcache, cached := holdenAtUpdateCache[key]\n\tholdenAtUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tholdenAtAllColumns,\n\t\t\tholdenAtPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update HoldenAt, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"HoldenAt\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, holdenAtPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(holdenAtType, holdenAtMapping, append(wl, holdenAtPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update HoldenAt row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for HoldenAt\")\n\t}\n\n\tif !cached {\n\t\tholdenAtUpdateCacheMut.Lock()\n\t\tholdenAtUpdateCache[key] = cache\n\t\tholdenAtUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, nil\n}", "func (m *mEntry) Flush(msg string) {\n\tvar r interface{}\n\t// Since this might be a Flush() call on panic level, we have to recover from each call.\n\t// Otherwise, the rest of the loggers will not flush\n\tfor i := range m.es {\n\t\tfunc() {\n\t\t\tdefer func() {\n\t\t\t\tr = recover()\n\t\t\t}()\n\t\t\tm.es[i].Flush(msg)\n\t\t}()\n\t}\n\t// If any panic call was recovered from, panic again, but only once\n\tif r != nil {\n\t\tpanic(r)\n\t}\n}", "func (l *LifeSystem) clearEntitiesStatus() {\n\tfor _, entity := range l.entities {\n\t\tentity.updated = false\n\t}\n}", "func (client ModelClient) UpdateHierarchicalEntitySender(req *http.Request) (*http.Response, error) {\n\treturn autorest.SendWithSender(client, req,\n\t\tautorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))\n}", "func DefaultPatchHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationPersonalInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationPersonalInfo(ctx, &HealthMenstruationPersonalInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationPersonalInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func (q cmfUserExperienceLogQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for cmf_user_experience_log\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for cmf_user_experience_log\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (q cmfUserSuperQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for cmf_user_super\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for cmf_user_super\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (o HoldenAtSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), holdenAtPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"HoldenAt\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, holdenAtPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in holdenAt slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all holdenAt\")\n\t}\n\treturn rowsAff, nil\n}", "func (o SmallblogSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), smallblogPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `smallblog` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, smallblogPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in smallblog slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all smallblog\")\n\t}\n\treturn rowsAff, nil\n}", "func (o CMFUserExperienceLogSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfUserExperienceLogPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_user_experience_log` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfUserExperienceLogPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfUserExperienceLog slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfUserExperienceLog\")\n\t}\n\treturn rowsAff, nil\n}", "func (u updateCachedDownloadRevision) apply(data *journalPersist) {\n\tc := data.CachedRevisions[u.Revision.ParentID.String()]\n\tc.Revision = u.Revision\n\tdata.CachedRevisions[u.Revision.ParentID.String()] = c\n}", "func (o CMFUserSuperSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfUserSuperPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_user_super` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfUserSuperPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfUserSuper slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfUserSuper\")\n\t}\n\treturn rowsAff, nil\n}", "func (c *hllCache) Flush() {\n\tstart := time.Now()\n\tc.dirtyWriteCache.Purge()\n\tcost := time.Since(start)\n\tif cost > time.Millisecond*100 {\n\t\tdbLog.Infof(\"flush hll cache cost: %v\", cost)\n\t}\n}", "func (du *DayUpdate) Save(ctx context.Context) (int, error) {\n\n\tvar (\n\t\terr error\n\t\taffected int\n\t)\n\tif len(du.hooks) == 0 {\n\t\taffected, err = du.sqlSave(ctx)\n\t} else {\n\t\tvar mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {\n\t\t\tmutation, ok := m.(*DayMutation)\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected mutation type %T\", m)\n\t\t\t}\n\t\t\tdu.mutation = mutation\n\t\t\taffected, err = du.sqlSave(ctx)\n\t\t\tmutation.done = true\n\t\t\treturn affected, err\n\t\t})\n\t\tfor i := len(du.hooks) - 1; i >= 0; i-- {\n\t\t\tmut = du.hooks[i](mut)\n\t\t}\n\t\tif _, err := mut.Mutate(ctx, du.mutation); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\treturn affected, err\n}", "func (k Keeper) UpdateDailyPercent(ctx sdk.Context, addr sdk.AccAddress, coin coins.Coin) {\n\tbalance := k.BankKeeper.GetPosminableBalance(ctx, addr, coin)\n\n\tposmining := k.GetPosmining(ctx, addr, coin)\n\n\tnewDailyPercent := coin.GetDailyPercent(balance)\n\n\tif !posmining.DailyPercent.Equal(newDailyPercent) {\n\t\tposmining.DailyPercent = newDailyPercent\n\n\t\tk.SetPosmining(ctx, posmining, coin)\n\t}\n}", "func (q illnessQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for illness\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for illness\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (s *Storage) applyExpunge(ctx context.Context, conv types.UnboxConversationInfo,\n\tuid gregor1.UID, expunge chat1.Expunge) (*chat1.Expunge, Error) {\n\n\tconvID := conv.GetConvID()\n\ts.Debug(ctx, \"applyExpunge(%v, %v, %v)\", convID, uid, expunge.Upto)\n\n\tde := func(format string, args ...interface{}) {\n\t\ts.Debug(ctx, \"applyExpunge: \"+fmt.Sprintf(format, args...))\n\t}\n\n\trc := NewInsatiableResultCollector() // collect all messages\n\terr := s.engine.ReadMessages(ctx, rc, convID, uid, expunge.Upto-1, 0)\n\tswitch err.(type) {\n\tcase nil:\n\t\t// ok\n\tcase MissError:\n\t\tde(\"record-only delh: no local messages\")\n\t\terr := s.delhTracker.setMaxDeleteHistoryUpto(ctx, convID, uid, expunge.Upto)\n\t\tif err != nil {\n\t\t\tde(\"failed to store delh track: %v\", err)\n\t\t}\n\t\treturn nil, nil\n\tdefault:\n\t\treturn nil, err\n\t}\n\n\tvar allAssets []chat1.Asset\n\tvar writeback, allPurged []chat1.MessageUnboxed\n\tfor _, msg := range rc.Result() {\n\t\tmtype := msg.GetMessageType()\n\t\tif !chat1.IsDeletableByDeleteHistory(mtype) {\n\t\t\t// Skip message types that cannot be deleted this way\n\t\t\tcontinue\n\t\t}\n\t\tif !msg.IsValid() {\n\t\t\tde(\"skipping invalid msg: %v\", msg.DebugString())\n\t\t\tcontinue\n\t\t}\n\t\tmvalid := msg.Valid()\n\t\tif mvalid.MessageBody.IsNil() {\n\t\t\tcontinue\n\t\t}\n\t\t// METADATA and HEADLINE messages are only expunged if they are not the\n\t\t// latest max message.\n\t\tswitch mtype {\n\t\tcase chat1.MessageType_METADATA,\n\t\t\tchat1.MessageType_HEADLINE:\n\t\t\tmaxMsg, err := conv.GetMaxMessage(mtype)\n\t\t\tif err != nil {\n\t\t\t\tde(\"delh: %v, not expunging %v\", err, msg.DebugString())\n\t\t\t\tcontinue\n\t\t\t} else if maxMsg.MsgID == msg.GetMessageID() {\n\t\t\t\tde(\"delh: not expunging %v, latest max message\", msg.DebugString())\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tde(\"delh: expunging %v, non-max message\", msg.DebugString())\n\t\tdefault:\n\t\t}\n\n\t\tmvalid.ServerHeader.SupersededBy = expunge.Basis // Can be 0\n\t\tmsgPurged, assets := s.purgeMessage(mvalid)\n\t\tallPurged = append(allPurged, msg)\n\t\tallAssets = append(allAssets, assets...)\n\t\twriteback = append(writeback, msgPurged)\n\t}\n\n\t// queue asset deletions in the background\n\ts.assetDeleter.DeleteAssets(ctx, uid, convID, allAssets)\n\t// queue search index update in the background\n\tgo func(ctx context.Context) {\n\t\terr := s.G().Indexer.Remove(ctx, convID, allPurged)\n\t\tif err != nil {\n\t\t\ts.Debug(ctx, \"Error removing from indexer: %+v\", err)\n\t\t}\n\t}(globals.BackgroundChatCtx(ctx, s.G()))\n\n\tde(\"deleting %v messages\", len(writeback))\n\tif err = s.engine.WriteMessages(ctx, convID, uid, writeback); err != nil {\n\t\tde(\"write messages failed: %v\", err)\n\t\treturn nil, err\n\t}\n\n\terr = s.delhTracker.setDeletedUpto(ctx, convID, uid, expunge.Upto)\n\tif err != nil {\n\t\tde(\"failed to store delh track: %v\", err)\n\t}\n\n\treturn &expunge, nil\n}", "func (q rawVisitQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for raw_visits\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for raw_visits\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (d *dataUsageCache) replaceHashed(hash dataUsageHash, parent *dataUsageHash, e dataUsageEntry) {\n\tif d.Cache == nil {\n\t\td.Cache = make(map[string]dataUsageEntry, 100)\n\t}\n\td.Cache[hash.Key()] = e\n\tif parent != nil {\n\t\tp := d.Cache[parent.Key()]\n\t\tp.addChild(hash)\n\t\td.Cache[parent.Key()] = p\n\t}\n}", "func (m *metricRedisRdbChangesSinceLastSave) emit(metrics pmetric.MetricSlice) {\n\tif m.config.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}", "func (q automodRuleDatumQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for automod_rule_data\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for automod_rule_data\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (c *hllCache) onEvicted(rawKey interface{}, value interface{}) {\n\titem, ok := value.(*hllCacheItem)\n\tif !ok {\n\t\treturn\n\t}\n\titem.Lock()\n\tif item.deleting || !item.dirty {\n\t\titem.Unlock()\n\t\treturn\n\t}\n\ttsBuf := PutInt64(item.ts)\n\titem.dirty = false\n\tnewV, err := item.HLLToBytes()\n\toldCnt := atomic.LoadUint64(&item.cachedCount)\n\tht := item.hllType\n\titem.Unlock()\n\tif err != nil {\n\t\tdbLog.Warningf(\"failed to encode %v hll: %v\", rawKey, err.Error())\n\t\treturn\n\t}\n\tcachedKey := []byte(rawKey.(string))\n\ttable, key, err := convertRedisKeyToDBKVKey(cachedKey)\n\tif err != nil {\n\t\tdbLog.Warningf(\"key invalid %v : %v\", cachedKey, err.Error())\n\t\treturn\n\t}\n\n\ts := time.Now()\n\twb := c.db.rockEng.NewWriteBatch()\n\tdefer wb.Destroy()\n\toldV, _ := c.db.GetBytesNoLock(key)\n\n\tif c.db.cfg.EnableTableCounter && oldV == nil {\n\t\tc.db.IncrTableKeyCount(table, 1, wb)\n\t}\n\n\tif len(oldV) >= 1+8 {\n\t\toldV = oldV[:1+8]\n\t} else {\n\t\toldV = make([]byte, 8+1)\n\t}\n\toldV[0] = ht\n\tbinary.BigEndian.PutUint64(oldV[1:1+8], oldCnt)\n\toldV = append(oldV, newV...)\n\toldV = append(oldV, tsBuf...)\n\twb.Put(key, oldV)\n\tc.db.rockEng.Write(wb)\n\tcost := time.Since(s)\n\tslow.LogSlowDBWrite(cost, slow.NewSlowLogInfo(c.db.cfg.DataDir, string(key), \"flush pfadd\"))\n\tc.AddToReadCache(cachedKey, item)\n}", "func (q buildingQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"record: unable to update all for buildings\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"record: unable to retrieve rows affected for buildings\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (o *Shelf) Update(exec boil.Executor, whitelist ...string) error {\n\to.whitelist = whitelist\n\twhitelist = o.Whitelist()\n\n\to.operation = \"UPDATE\"\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn err\n\t}\n\tkey := makeCacheKey(whitelist, nil)\n\tshelfUpdateCacheMut.RLock()\n\tcache, cached := shelfUpdateCache[key]\n\tshelfUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := strmangle.UpdateColumnSet(shelfColumns, shelfPrimaryKeyColumns, whitelist)\n\t\tif len(wl) == 0 {\n\t\t\treturn errors.New(\"models: unable to update shelf, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `shelf` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, shelfPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(shelfType, shelfMapping, append(wl, shelfPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\t_, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to update shelf row\")\n\t}\n\n\tif !cached {\n\t\tshelfUpdateCacheMut.Lock()\n\t\tshelfUpdateCache[key] = cache\n\t\tshelfUpdateCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpdateHooks(exec)\n}", "func (q notificationQuery) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.Exec(exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for notification\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for notification\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (q rowerGroupQuery) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.Exec(exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for rower_group\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for rower_group\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (d *dataUsageCache) flatten(root dataUsageEntry) dataUsageEntry {\n\tfor id := range root.Children {\n\t\te := d.Cache[id]\n\t\tif len(e.Children) > 0 {\n\t\t\te = d.flatten(e)\n\t\t}\n\t\troot.merge(e)\n\t}\n\troot.Children = nil\n\treturn root\n}", "func (q foreignLegalResourceQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for ForeignLegalResources\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for ForeignLegalResources\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (q activityLogQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"dbmodel: unable to update all for activity_logs\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"dbmodel: unable to retrieve rows affected for activity_logs\")\n\t}\n\n\treturn rowsAff, nil\n}", "func FlushAt(e *events.Event, db Storer, tm time.Time) error {\n\ts := getSnapshot()\n\tdefer putSnapshot(s)\n\ts.Counters = e.Flush(s.Counters[:0])\n\tif len(s.Counters) == 0 {\n\t\treturn nil\n\t}\n\ts.Labels, s.Time = e.Labels, tm\n\tif err := db.Store(s); err != nil {\n\t\te.Merge(s.Counters)\n\t\treturn err\n\t}\n\treturn nil\n}", "func (l *LifeSystem) Update(dt float32) {\n\tl.since += dt\n\n\tif l.since > LifeStepInterval || l.isFirstTime {\n\t\tl.gen += 1\n\t\tl.clearEntitiesStatus()\n\t\tvar sys *common.RenderSystem\n\n\t\tfor _, system := range l.world.Systems() {\n\t\t\tswitch system.(type) {\n\t\t\tcase *common.RenderSystem:\n\t\t\t\tsys = system.(*common.RenderSystem)\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\tfor _, entity := range l.entities {\n\t\t\tif !l.isFirstTime {\n\t\t\t\tentity.updateStatus()\n\t\t\t}\n\n\t\t\tif entity.status {\n\t\t\t\tsys.AddByInterface(entity.AliveComponent)\n\t\t\t} else {\n\t\t\t\tsys.Remove(entity.AliveComponent.BasicEntity)\n\t\t\t}\n\t\t}\n\t\tl.since = 0\n\t}\n\n\tif l.isFirstTime {\n\t\tl.isFirstTime = false\n\t}\n}", "func (tc *LegacyTabletStatsCache) StatsUpdate(ts *LegacyTabletStats) {\n\tif ts.Target.TabletType != topodatapb.TabletType_MASTER &&\n\t\tts.Tablet.Alias.Cell != tc.cell &&\n\t\ttc.getAliasByCell(ts.Tablet.Alias.Cell) != tc.getAliasByCell(tc.cell) {\n\t\t// this is for a non-master tablet in a different cell and a different alias, drop it\n\t\treturn\n\t}\n\n\te := tc.getOrCreateEntry(ts.Target)\n\te.mu.Lock()\n\tdefer e.mu.Unlock()\n\n\t// Update our full map.\n\ttrivialNonMasterUpdate := false\n\tif existing, ok := e.all[ts.Key]; ok {\n\t\tif ts.Up {\n\t\t\t// We have an existing entry, and a new entry.\n\t\t\t// Remember if they are both good (most common case).\n\t\t\ttrivialNonMasterUpdate = existing.LastError == nil && existing.Serving && ts.LastError == nil &&\n\t\t\t\tts.Serving && ts.Target.TabletType != topodatapb.TabletType_MASTER && existing.TrivialStatsUpdate(ts)\n\n\t\t\t// We already have the entry, update the\n\t\t\t// values if necessary. (will update both\n\t\t\t// 'all' and 'healthy' as they use pointers).\n\t\t\tif !trivialNonMasterUpdate {\n\t\t\t\t*existing = *ts\n\t\t\t}\n\t\t} else {\n\t\t\t// We have an entry which we shouldn't. Remove it.\n\t\t\tdelete(e.all, ts.Key)\n\t\t}\n\t} else {\n\t\tif ts.Up {\n\t\t\t// Add the entry.\n\t\t\te.all[ts.Key] = ts\n\t\t} else {\n\t\t\t// We were told to remove an entry which we\n\t\t\t// didn't have anyway, nothing should happen.\n\t\t\treturn\n\t\t}\n\t}\n\n\t// Update our healthy list.\n\tvar allArray []*LegacyTabletStats\n\tif ts.Target.TabletType == topodatapb.TabletType_MASTER {\n\t\t// The healthy list is different for TabletType_MASTER: we\n\t\t// only keep the most recent one.\n\t\te.updateHealthyMapForMaster(ts)\n\t} else {\n\t\t// For non-master, if it is a trivial update,\n\t\t// we just skip everything else. We don't even update the\n\t\t// aggregate stats.\n\t\tif trivialNonMasterUpdate {\n\t\t\treturn\n\t\t}\n\n\t\t// Now we need to do some work. Recompute our healthy list.\n\t\tallArray = make([]*LegacyTabletStats, 0, len(e.all))\n\t\tfor _, s := range e.all {\n\t\t\tallArray = append(allArray, s)\n\t\t}\n\t\te.healthy = FilterLegacyStatsByReplicationLag(allArray)\n\t}\n}", "func (du *DocUpdate) ClearChildren() *DocUpdate {\n\tdu.mutation.ClearChildren()\n\treturn du\n}", "func (o ActivityLogSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"dbmodel: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activityLogPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"activity_logs\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activityLogPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"dbmodel: unable to update all in activityLog slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"dbmodel: unable to retrieve rows affected all in update all activityLog\")\n\t}\n\treturn rowsAff, nil\n}", "func (q rssAnnouncementQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for rss_announcements\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for rss_announcements\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (uu *UserUpdate) ClearChildren() *UserUpdate {\n\tuu.mutation.ClearChildren()\n\treturn uu\n}", "func (stateObj *stateObject) updateRoot(db StateDatabase) {\n\t// If nothing changed, don't bother with hashing anything\n\tif stateObj.updateTrie(db) == nil {\n\t\treturn\n\t}\n\t// Track the amount of time wasted on hashing the storage trie\n\tif metrics.EnabledExpensive {\n\t\tdefer func(start time.Time) { stateObj.db.StorageHashes += time.Since(start) }(time.Now())\n\t}\n\tstateObj.data.Root = stateObj.trie.Hash()\n}", "func (m *EmployeeWorkingHoursMutation) ClearDay() {\n\tm.clearedday = true\n}", "func (o *AutomodRuleDatum) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tkey := makeCacheKey(columns, nil)\n\tautomodRuleDatumUpdateCacheMut.RLock()\n\tcache, cached := automodRuleDatumUpdateCache[key]\n\tautomodRuleDatumUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tautomodRuleDatumAllColumns,\n\t\t\tautomodRuleDatumPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update automod_rule_data, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"automod_rule_data\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, automodRuleDatumPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(automodRuleDatumType, automodRuleDatumMapping, append(wl, automodRuleDatumPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update automod_rule_data row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for automod_rule_data\")\n\t}\n\n\tif !cached {\n\t\tautomodRuleDatumUpdateCacheMut.Lock()\n\t\tautomodRuleDatumUpdateCache[key] = cache\n\t\tautomodRuleDatumUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, nil\n}", "func (o ShelfSlice) UpdateAll(exec boil.Executor, cols M) error {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), shelfPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\n\t\t\"UPDATE `shelf` SET %s WHERE (`id`) IN (%s)\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.Placeholders(dialect.IndexPlaceholders, len(o)*len(shelfPrimaryKeyColumns), len(colNames)+1, len(shelfPrimaryKeyColumns)),\n\t)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\t_, err := exec.Exec(sql, args...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to update all in shelf slice\")\n\t}\n\n\treturn nil\n}", "func (o *CMFAdminMenu) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no cmf_admin_menu provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(cmfAdminMenuColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLCMFAdminMenuUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tcmfAdminMenuUpsertCacheMut.RLock()\n\tcache, cached := cmfAdminMenuUpsertCache[key]\n\tcmfAdminMenuUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tcmfAdminMenuAllColumns,\n\t\t\tcmfAdminMenuColumnsWithDefault,\n\t\t\tcmfAdminMenuColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tcmfAdminMenuAllColumns,\n\t\t\tcmfAdminMenuPrimaryKeyColumns,\n\t\t)\n\n\t\tif !updateColumns.IsNone() && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert cmf_admin_menu, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"`cmf_admin_menu`\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `cmf_admin_menu` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(cmfAdminMenuType, cmfAdminMenuMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(cmfAdminMenuType, cmfAdminMenuMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for cmf_admin_menu\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = uint(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == cmfAdminMenuMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(cmfAdminMenuType, cmfAdminMenuMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to retrieve unique values for cmf_admin_menu\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.retQuery)\n\t\tfmt.Fprintln(writer, nzUniqueCols...)\n\t}\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for cmf_admin_menu\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tcmfAdminMenuUpsertCacheMut.Lock()\n\t\tcmfAdminMenuUpsertCache[key] = cache\n\t\tcmfAdminMenuUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (s *EntityStorage) update() {\n\ts.outdated = false\n\ts.occupied = s.occupied[:0]\n\tl := len(s.vec)\n\tfor i := 0; i < l; i++ {\n\t\tif s.vec[i].occupied {\n\t\t\ts.occupied = append(s.occupied, i)\n\t\t}\n\t}\n}", "func (q dMessageEmbedQuery) UpdateAll(cols M) error {\n\tqueries.SetUpdate(q.Query, cols)\n\n\t_, err := q.Query.Exec()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to update all for d_message_embeds\")\n\t}\n\n\treturn nil\n}", "func (o *Illness) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tillnessUpdateCacheMut.RLock()\n\tcache, cached := illnessUpdateCache[key]\n\tillnessUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tillnessAllColumns,\n\t\t\tillnessPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update illness, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"illness\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, illnessPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(illnessType, illnessMapping, append(wl, illnessPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update illness row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for illness\")\n\t}\n\n\tif !cached {\n\t\tillnessUpdateCacheMut.Lock()\n\t\tillnessUpdateCache[key] = cache\n\t\tillnessUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "func (p *statusUpdate) persistHealthyField(\n\tstate pb_task.TaskState,\n\treason string,\n\thealthy bool,\n\tnewRuntime *pb_task.RuntimeInfo) {\n\n\tswitch {\n\tcase util.IsPelotonStateTerminal(state):\n\t\t// Set healthy to INVALID for all terminal state\n\t\tnewRuntime.Healthy = pb_task.HealthState_INVALID\n\tcase state == pb_task.TaskState_RUNNING:\n\t\t// Only record the health check result when\n\t\t// the reason for the event is TASK_HEALTH_CHECK_STATUS_UPDATED\n\t\tif reason == mesos.TaskStatus_REASON_TASK_HEALTH_CHECK_STATUS_UPDATED.String() {\n\t\t\tnewRuntime.Reason = reason\n\t\t\tif healthy {\n\t\t\t\tnewRuntime.Healthy = pb_task.HealthState_HEALTHY\n\t\t\t\tp.metrics.TasksHealthyTotal.Inc(1)\n\t\t\t} else {\n\t\t\t\tnewRuntime.Healthy = pb_task.HealthState_UNHEALTHY\n\t\t\t\tp.metrics.TasksUnHealthyTotal.Inc(1)\n\t\t\t}\n\t\t}\n\t}\n}", "func (v *VersionData) UpdateCategoriesRecord(categoriesData CategoryUnitCollection) {\n\tts := GetMillionSecondTimestamp()\n\ttmp := SentencesUnitCollection{}\n\tfor _, c := range categoriesData {\n\t\tif unit, ok := v.Sentences.Find(c.Key); !ok || unit.Name != c.Name || unit.Path != c.Path {\n\t\t\ttmp = append(tmp, SentencesUnit{\n\t\t\t\tName: c.Name,\n\t\t\t\tKey: c.Key,\n\t\t\t\tPath: c.Path,\n\t\t\t\tTimestamp: ts,\n\t\t\t})\n\t\t} else {\n\t\t\ttmp = append(tmp, *unit)\n\t\t}\n\t}\n\tv.Sentences = tmp\n\tv.Categories.Timestamp = ts\n\tv.UpdatedAt = ts\n}", "func (o CMFBalanceChargeAdminSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfBalanceChargeAdminPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_balance_charge_admin` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfBalanceChargeAdminPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfBalanceChargeAdmin slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfBalanceChargeAdmin\")\n\t}\n\treturn rowsAff, nil\n}", "func (o RSSAnnouncementSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), rssAnnouncementPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"rss_announcements\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, rssAnnouncementPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in rssAnnouncement slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all rssAnnouncement\")\n\t}\n\treturn rowsAff, nil\n}", "func (r *Registrations) normalize() {\n\tfor _, entry := range r.Entries {\n\t\tif entry.RegistrationContent != nil {\n\t\t\tentry.RegistrationContent.normalize()\n\t\t}\n\t}\n}", "func (m *ReportRoot) SetDailyPrintUsage(value []PrintUsageable)() {\n err := m.GetBackingStore().Set(\"dailyPrintUsage\", value)\n if err != nil {\n panic(err)\n }\n}", "func (hs *HealthStatusInfo) UpdateHealthInfo(details bool, registeredESS uint32, storedObjects uint32) {\n\ths.lock()\n\tdefer hs.unLock()\n\n\tHealthUsageInfo.RegisteredESS = registeredESS\n\tHealthUsageInfo.StoredObjects = storedObjects\n\n\tDBHealth.DBStatus = Green\n\ttimeSinceLastError := uint64(0)\n\tif DBHealth.DBReadFailures != 0 || DBHealth.DBWriteFailures != 0 {\n\t\ttimeSinceLastError = uint64(time.Since(DBHealth.lastReadWriteErrorTime).Seconds())\n\t\tDBHealth.TimeSinceLastReadWriteError = timeSinceLastError\n\t}\n\tif DBHealth.DisconnectedFromDB {\n\t\tDBHealth.DBStatus = Red\n\t} else if DBHealth.DBReadFailures != 0 || DBHealth.DBWriteFailures != 0 {\n\t\tif timeSinceLastError < uint64(Configuration.ResendInterval*12) {\n\t\t\tDBHealth.DBStatus = Red\n\t\t} else if timeSinceLastError < uint64(Configuration.ResendInterval*60) {\n\t\t\tDBHealth.DBStatus = Yellow\n\t\t}\n\t}\n\n\tMQTTHealth.MQTTConnectionStatus = Green\n\tif Configuration.CommunicationProtocol != HTTPProtocol {\n\t\ttimeSinceLastSubError := uint64(0)\n\t\tif MQTTHealth.SubscribeFailures != 0 {\n\t\t\ttimeSinceLastSubError = uint64(time.Since(MQTTHealth.lastSubscribeErrorTime).Seconds())\n\t\t\tMQTTHealth.TimeSinceLastSubscribeError = timeSinceLastSubError\n\t\t}\n\t\ttimeSinceLastPubError := uint64(0)\n\t\tif MQTTHealth.PublishFailures != 0 {\n\t\t\ttimeSinceLastPubError = uint64(time.Since(MQTTHealth.lastPublishErrorTime).Seconds())\n\t\t\tMQTTHealth.TimeSinceLastPublishError = timeSinceLastPubError\n\t\t}\n\t\tif MQTTHealth.DisconnectedFromMQTTBroker {\n\t\t\tMQTTHealth.MQTTConnectionStatus = Red\n\t\t} else {\n\t\t\tif MQTTHealth.SubscribeFailures != 0 {\n\t\t\t\tif timeSinceLastSubError < uint64(Configuration.ResendInterval*12) {\n\t\t\t\t\tMQTTHealth.MQTTConnectionStatus = Red\n\t\t\t\t} else if timeSinceLastSubError < uint64(Configuration.ResendInterval*60) {\n\t\t\t\t\tMQTTHealth.MQTTConnectionStatus = Yellow\n\t\t\t\t}\n\t\t\t}\n\t\t\tif MQTTHealth.PublishFailures != 0 && MQTTHealth.MQTTConnectionStatus == Green &&\n\t\t\t\ttimeSinceLastPubError < uint64(Configuration.ResendInterval*12) {\n\t\t\t\tMQTTHealth.MQTTConnectionStatus = Yellow\n\t\t\t}\n\t\t}\n\t}\n\n\tif DBHealth.DBStatus == Red || MQTTHealth.MQTTConnectionStatus == Red {\n\t\ths.HealthStatus = Red\n\t} else if DBHealth.DBStatus == Yellow || MQTTHealth.MQTTConnectionStatus == Yellow {\n\t\ths.HealthStatus = Yellow\n\t} else {\n\t\ths.HealthStatus = Green\n\t}\n\n\ths.UpTime = uint64(time.Since(hs.startTime).Seconds())\n\n\tif !details {\n\t\treturn\n\t}\n\n\tif Configuration.CommunicationProtocol != HTTPProtocol {\n\t\tMQTTHealth.LastDisconnectFromBrokerDuration = hs.GetLastDisconnectFromBrokerDuration()\n\t}\n\tDBHealth.LastDisconnectFromDBDuration = hs.GetLastDisconnectFromDBDuration()\n}", "func (o NotificationSlice) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), notificationPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"notification\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, notificationPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.Exec(sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in notification slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all notification\")\n\t}\n\treturn rowsAff, nil\n}", "func (o *CompartimentoHistorico) UnsetUpd() {\n\to.Upd.Unset()\n}", "func (o *RSSAnnouncement) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tkey := makeCacheKey(columns, nil)\n\trssAnnouncementUpdateCacheMut.RLock()\n\tcache, cached := rssAnnouncementUpdateCache[key]\n\trssAnnouncementUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\trssAnnouncementAllColumns,\n\t\t\trssAnnouncementPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update rss_announcements, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"rss_announcements\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, rssAnnouncementPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(rssAnnouncementType, rssAnnouncementMapping, append(wl, rssAnnouncementPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update rss_announcements row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for rss_announcements\")\n\t}\n\n\tif !cached {\n\t\trssAnnouncementUpdateCacheMut.Lock()\n\t\trssAnnouncementUpdateCache[key] = cache\n\t\trssAnnouncementUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, nil\n}", "func (o AutomodRuleDatumSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), automodRuleDatumPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"automod_rule_data\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, automodRuleDatumPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in automodRuleDatum slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all automodRuleDatum\")\n\t}\n\treturn rowsAff, nil\n}", "func (t *TableCache) populate(tableUpdates ovsdb.TableUpdates) {\n\tt.cacheMutex.Lock()\n\tdefer t.cacheMutex.Unlock()\n\tfor table := range t.dbModel.Types() {\n\t\tupdates, ok := tableUpdates.Updates[table]\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\t\tvar tCache *RowCache\n\t\tif tCache, ok = t.cache[table]; !ok {\n\t\t\tt.cache[table] = newRowCache()\n\t\t\ttCache = t.cache[table]\n\t\t}\n\t\ttCache.mutex.Lock()\n\t\tfor uuid, row := range updates.Rows {\n\t\t\tif !reflect.DeepEqual(row.New, ovsdb.Row{}) {\n\t\t\t\tnewModel, err := t.createModel(table, &row.New, uuid)\n\t\t\t\tif err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t\tif existing, ok := tCache.cache[uuid]; ok {\n\t\t\t\t\tif !reflect.DeepEqual(newModel, existing) {\n\t\t\t\t\t\ttCache.cache[uuid] = newModel\n\t\t\t\t\t\toldModel, err := t.createModel(table, &row.Old, uuid)\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\tpanic(err)\n\t\t\t\t\t\t}\n\t\t\t\t\t\tt.eventProcessor.AddEvent(updateEvent, table, oldModel, newModel)\n\t\t\t\t\t}\n\t\t\t\t\t// no diff\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\ttCache.cache[uuid] = newModel\n\t\t\t\tt.eventProcessor.AddEvent(addEvent, table, nil, newModel)\n\t\t\t\tcontinue\n\t\t\t} else {\n\t\t\t\toldModel, err := t.createModel(table, &row.Old, uuid)\n\t\t\t\tif err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t\t// delete from cache\n\t\t\t\tdelete(tCache.cache, uuid)\n\t\t\t\tt.eventProcessor.AddEvent(deleteEvent, table, oldModel, nil)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\ttCache.mutex.Unlock()\n\t}\n}", "func (q tenantQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"dbmodel: unable to update all for tenants\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"dbmodel: unable to retrieve rows affected for tenants\")\n\t}\n\n\treturn rowsAff, nil\n}" ]
[ "0.6325353", "0.60747105", "0.6054701", "0.59975785", "0.58065385", "0.5620147", "0.54418504", "0.50306076", "0.49060896", "0.47815248", "0.47454336", "0.4728229", "0.47249833", "0.47200248", "0.47140735", "0.46733493", "0.46712738", "0.46654415", "0.4653481", "0.46406284", "0.46342558", "0.46295357", "0.46259916", "0.4616088", "0.45883068", "0.4577795", "0.45722157", "0.45480183", "0.45427206", "0.45283964", "0.45149377", "0.45134613", "0.45088157", "0.45050785", "0.4483675", "0.44623795", "0.44618136", "0.4453958", "0.44515485", "0.4449064", "0.4432145", "0.4418475", "0.44139844", "0.4408777", "0.44050318", "0.44035465", "0.4402882", "0.43957898", "0.43695548", "0.43628818", "0.4357074", "0.4351401", "0.43397585", "0.43396118", "0.43381852", "0.4315483", "0.4314298", "0.43136254", "0.43087494", "0.4301813", "0.4301727", "0.4290592", "0.42905027", "0.4280297", "0.42707503", "0.42705566", "0.42704806", "0.4270035", "0.42682332", "0.4262553", "0.4257568", "0.4254821", "0.42529306", "0.4244732", "0.4241827", "0.42403424", "0.42279086", "0.42218736", "0.4217232", "0.42095387", "0.42079636", "0.42051926", "0.4200568", "0.41935602", "0.41930246", "0.41823748", "0.4181491", "0.41797787", "0.41784242", "0.41753227", "0.41697842", "0.41682667", "0.41660923", "0.41649732", "0.41576633", "0.41564828", "0.41546154", "0.41521734", "0.41512814", "0.414986" ]
0.6900533
0
DefaultPatchHealthMenstruationDailyEntry executes a basic gorm update call with patch behavior
func DefaultPatchHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) { if in == nil { return nil, errors1.NilArgumentError } var pbObj HealthMenstruationDailyEntry var err error if hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchRead); ok { if db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil { return nil, err } } pbReadRes, err := DefaultReadHealthMenstruationDailyEntry(ctx, &HealthMenstruationDailyEntry{Id: in.GetId()}, db) if err != nil { return nil, err } pbObj = *pbReadRes if hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchApplyFieldMask); ok { if db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil { return nil, err } } if _, err := DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx, &pbObj, in, updateMask, "", db); err != nil { return nil, err } if hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchSave); ok { if db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil { return nil, err } } pbResponse, err := DefaultStrictUpdateHealthMenstruationDailyEntry(ctx, &pbObj, db) if err != nil { return nil, err } if hook, ok := interface{}(pbResponse).(HealthMenstruationDailyEntryWithAfterPatchSave); ok { if err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil { return nil, err } } return pbResponse, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultPatchSetHealthMenstruationDailyEntry(ctx context.Context, objects []*HealthMenstruationDailyEntry, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationDailyEntry, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationDailyEntry, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationDailyEntry(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultStrictUpdateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationDailyEntry\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationDailyEntryORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx context.Context, patchee *HealthMenstruationDailyEntry, patcher *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Day\" {\n\t\t\tpatchee.Day = patcher.Day\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"IntensityPercentage\" {\n\t\t\tpatchee.IntensityPercentage = patcher.IntensityPercentage\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Type\" {\n\t\t\tpatchee.Type = patcher.Type\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Manual\" {\n\t\t\tpatchee.Manual = patcher.Manual\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"BasedOnPrediction\" {\n\t\t\tpatchee.BasedOnPrediction = patcher.BasedOnPrediction\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultListHealthMenstruationDailyEntry(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationDailyEntry, error) {\n\tin := HealthMenstruationDailyEntry{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationDailyEntryORM{}, &HealthMenstruationDailyEntry{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationDailyEntryORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationDailyEntry{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func DefaultPatchHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationPersonalInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationPersonalInfo(ctx, &HealthMenstruationPersonalInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationPersonalInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultReadHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationDailyEntryORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationDailyEntryORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationDailyEntryORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultCreateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationPersonalInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationPersonalInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx context.Context, patchee *HealthMenstruationPersonalInfo, patcher *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"PeriodLengthInDays\" {\n\t\t\tpatchee.PeriodLengthInDays = patcher.PeriodLengthInDays\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CycleLengthInDays\" {\n\t\t\tpatchee.CycleLengthInDays = patcher.CycleLengthInDays\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func (s *Service) Update(r *http.Request, args *UpdateEntryArgs, result *UpdateResponse) error {\n\t// Since there is no fixed data schema, we can update as we like, so be careful\n\tif args.UserID == \"\" {\n\t\tresult.Message = uidMissing\n\t\treturn nil\n\t}\n\tuuid := args.UUID\n\tif uuid != \"\" {\n\t\tcoll := s.Session.DB(MentatDatabase).C(args.UserID)\n\t\tentry := Entry{}\n\t\tmgoErr := coll.Find(bson.M{\"uuid\": uuid}).One(&entry)\n\t\tif mgoErr != nil {\n\t\t\tif mgoErr.Error() == MongoNotFound {\n\t\t\t\tresult.Message = \"No entry with provided UUID\"\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\ts.Log.Infof(\"mgo error: %s\", mgoErr)\n\t\t\tresult.Message = fmt.Sprintf(\"mgo error: %s\", mgoErr)\n\t\t\treturn nil\n\t\t}\n\t\t// TODO: maybe use reflection\n\t\tif args.Type != \"\" {\n\t\t\tentry.Type = args.Type\n\t\t}\n\t\tif args.Content != \"\" {\n\t\t\tentry.Content = args.Content\n\t\t}\n\t\tif len(args.Tags) > 0 {\n\t\t\tentry.Tags = args.Tags\n\t\t}\n\t\tif args.Scheduled != \"\" {\n\t\t\tscheduled, err := time.Parse(DatetimeLayout, args.Scheduled)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tentry.Scheduled = scheduled\n\t\t}\n\t\tif args.Deadline != \"\" {\n\t\t\tdeadline, err := time.Parse(DatetimeLayout, args.Deadline)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tentry.Deadline = deadline\n\t\t}\n\n\t\tif args.Priority != \"\" {\n\t\t\trexp, err := regexp.Compile(\"\\\\#[A-Z]$\")\n\t\t\tif err != nil {\n\t\t\t\tpanic(err) // sentinel, should fail, because such error is predictable\n\t\t\t}\n\t\t\tif rexp.Match([]byte(args.Priority)) {\n\t\t\t\tentry.Priority = args.Priority\n\t\t\t} else {\n\t\t\t\tresult.Message = \"Malformed priority value\"\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\n\t\tif args.TodoStatus != \"\" {\n\t\t\tentry.TodoStatus = strings.ToUpper(args.TodoStatus)\n\t\t}\n\t\tentry.ModifiedAt = time.Now()\n\t\t_, err := coll.Upsert(bson.M{\"uuid\": uuid}, entry)\n\t\tif err != nil {\n\t\t\tresult.Message = fmt.Sprintf(\"update failed: %s\", err)\n\t\t\treturn nil\n\t\t}\n\t\tresult.Message = \"updated\"\n\t\treturn nil\n\t}\n\tresult.Message = \"No UUID found, cannot proceed with updating\"\n\treturn nil\n}", "func DefaultPatchSetHealthMenstruationPersonalInfo(ctx context.Context, objects []*HealthMenstruationPersonalInfo, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationPersonalInfo, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationPersonalInfo, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationPersonalInfo(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func (c *kuberhealthyChecks) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result KuberhealthyCheck, err error) {\n\tresult = KuberhealthyCheck{}\n\terr = c.client.Patch(pt).\n\t\tNamespace(c.ns).\n\t\tResource(\"khchecks\").\n\t\tSubResource(subresources...).\n\t\tName(name).\n\t\tBody(data).\n\t\tDo(context.TODO()).\n\t\tInto(&result)\n\treturn\n}", "func (c *cronFederatedHPAs) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.CronFederatedHPA, err error) {\n\tresult = &v1alpha1.CronFederatedHPA{}\n\terr = c.client.Patch(pt).\n\t\tNamespace(c.ns).\n\t\tResource(\"cronfederatedhpas\").\n\t\tName(name).\n\t\tSubResource(subresources...).\n\t\tVersionedParams(&opts, scheme.ParameterCodec).\n\t\tBody(data).\n\t\tDo(ctx).\n\t\tInto(result)\n\treturn\n}", "func (a *HyperflexApiService) PatchHyperflexSoftwareDistributionEntry(ctx context.Context, moid string) ApiPatchHyperflexSoftwareDistributionEntryRequest {\n\treturn ApiPatchHyperflexSoftwareDistributionEntryRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func update(rt *Runtime, r goengage.Fundraise, key string) {\n\tg := Stat{}\n\trt.DB.Where(\"id = ?\", key).First(&g)\n\tif g.CreatedDate == nil {\n\t\tg.ID = key\n\t\tt := time.Now()\n\t\tg.CreatedDate = &t\n\t\trt.DB.Create(&g)\n\t}\n\tfor _, t := range r.Transactions {\n\t\tg.AllCount++\n\t\tg.AllAmount = g.AllAmount + t.Amount\n\t\tif r.WasImported {\n\t\t\tg.OfflineCount++\n\t\t\tg.OfflineAmount += t.Amount\n\t\t} else {\n\t\t\tswitch r.DonationType {\n\t\t\tcase goengage.OneTime:\n\t\t\t\tg.OneTimeCount++\n\t\t\t\tg.OneTimeAmount += t.Amount\n\t\t\tcase goengage.Recurring:\n\t\t\t\tg.RecurringCount++\n\t\t\t\tg.RecurringAmount += t.Amount\n\t\t\t}\n\t\t\tswitch t.Type {\n\t\t\tcase goengage.Refund:\n\t\t\t\tg.RefundsCount++\n\t\t\t\tg.RefundsAmount += t.Amount\n\t\t\t}\n\t\t}\n\t\tg.Largest = math.Max(g.Largest, t.Amount)\n\t\tif t.Amount > 0.0 {\n\t\t\tif g.Smallest < 1.0 {\n\t\t\t\tg.Smallest = t.Amount\n\t\t\t} else {\n\t\t\t\tg.Smallest = math.Min(g.Smallest, t.Amount)\n\t\t\t}\n\t\t}\n\t\trt.DB.Model(&g).Updates(&g)\n\t}\n}", "func (o *Smallblog) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tsmallblogUpdateCacheMut.RLock()\n\tcache, cached := smallblogUpdateCache[key]\n\tsmallblogUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tsmallblogAllColumns,\n\t\t\tsmallblogPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update smallblog, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `smallblog` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, smallblogPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(smallblogType, smallblogMapping, append(wl, smallblogPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update smallblog row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for smallblog\")\n\t}\n\n\tif !cached {\n\t\tsmallblogUpdateCacheMut.Lock()\n\t\tsmallblogUpdateCache[key] = cache\n\t\tsmallblogUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "func SchedulePatch(ctx context.Context, env evergreen.Environment, patchId string, version *model.Version, patchUpdateReq model.PatchUpdate) (int, error) {\n\tvar err error\n\tp, err := patch.FindOneId(patchId)\n\tif err != nil {\n\t\treturn http.StatusInternalServerError, errors.Wrapf(err, \"loading patch '%s'\", patchId)\n\t}\n\tif p == nil {\n\t\treturn http.StatusBadRequest, errors.Errorf(\"patch '%s' not found\", patchId)\n\t}\n\n\tif p.IsCommitQueuePatch() {\n\t\treturn http.StatusBadRequest, errors.New(\"can't schedule commit queue patch\")\n\t}\n\tprojectRef, err := model.FindMergedProjectRef(p.Project, p.Version, true)\n\tif err != nil {\n\t\treturn http.StatusInternalServerError, errors.Wrapf(err, \"finding project ref '%s' for version '%s'\", p.Project, p.Version)\n\t}\n\tif projectRef == nil {\n\t\treturn http.StatusInternalServerError, errors.Errorf(\"project '%s' for version '%s' not found\", p.Project, p.Version)\n\t}\n\n\tstatusCode, err := model.ConfigurePatch(ctx, env.Settings(), p, version, projectRef, patchUpdateReq)\n\tif err != nil {\n\t\treturn statusCode, err\n\t}\n\tif p.Version != \"\" { // if the version already exists, no more to do\n\t\treturn http.StatusOK, nil\n\t}\n\n\t// create a separate context from the one the caller has so that the caller\n\t// can't interrupt the db operations here\n\tnewCxt := context.Background()\n\t// Process additional patch trigger aliases added via UI.\n\t// Child patches created with the CLI --trigger-alias flag go through a separate flow, so ensure that new child patches are also created before the parent is finalized.\n\tif err := ProcessTriggerAliases(ctx, p, projectRef, env, patchUpdateReq.PatchTriggerAliases); err != nil {\n\t\treturn http.StatusInternalServerError, errors.Wrap(err, \"processing patch trigger aliases\")\n\t}\n\tif len(patchUpdateReq.PatchTriggerAliases) > 0 {\n\t\tp.Triggers.Aliases = patchUpdateReq.PatchTriggerAliases\n\t\tif err = p.SetTriggerAliases(); err != nil {\n\t\t\treturn http.StatusInternalServerError, errors.Wrapf(err, \"attaching trigger aliases '%s'\", p.Id.Hex())\n\t\t}\n\t}\n\t_, err = model.FinalizePatch(newCxt, p, p.GetRequester(), \"\")\n\tif err != nil {\n\t\treturn http.StatusInternalServerError, errors.Wrap(err, \"finalizing patch\")\n\t}\n\n\tif p.IsGithubPRPatch() {\n\t\tjob := NewGithubStatusUpdateJobForNewPatch(p.Id.Hex())\n\t\tif err := evergreen.GetEnvironment().LocalQueue().Put(newCxt, job); err != nil {\n\t\t\treturn http.StatusInternalServerError, errors.Wrap(err, \"adding GitHub status update job to queue\")\n\t\t}\n\t}\n\treturn http.StatusOK, nil\n}", "func updateHandler(w http.ResponseWriter, r *http.Request) {\n\t////\n\t// handle only pull request\n\t//\n\tif r.Header.Get(\"X-Github-Event\") != \"pull_request\" {\n\t\treturn\n\t}\n\n\tvar pr pullRequestModel\n\tif err := json.NewDecoder(r.Body).Decode(&pr); err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\tif pr.Action != \"opened\" {\n\t\treturn\n\t}\n\n\t//\n\t////\n\n\t////\n\t// check if the pr just opened has step.yml in it\n\t//\n\texists, err := isPRHasStepYML(fmt.Sprintf(\"%d\", pr.Number))\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\tif !exists {\n\t\treturn\n\t}\n\n\t//\n\t////\n\tif strings.Contains(pr.PullRequest.Body, fmt.Sprintf(\"https://%s/tag?pr=%d\", hostBaseURL, pr.Number)) {\n\t\treturn\n\t}\n\n\t////\n\t// updating the PR's initial comment section: append badge as first element\n\t//\n\n\tapiURL := fmt.Sprintf(\"https://api.github.com/repos/bitrise-io/bitrise-steplib/pulls/%d\", pr.Number)\n\tbadgeContent := fmt.Sprintf(\"![TagCheck](https://%s/tag?pr=%d)\\r\\n\\r\\n\", hostBaseURL, pr.Number)\n\tnewBody := map[string]interface{}{\n\t\t\"body\": badgeContent + pr.PullRequest.Body,\n\t}\n\n\t// convert new body message to json\n\tb, err := json.Marshal(newBody)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\t// call authenticated PATCH request\n\tc := http.Client{}\n\treq, err := http.NewRequest(\"PATCH\", apiURL, bytes.NewReader(b))\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\treq.SetBasicAuth(os.Getenv(\"GITHUB_USER\"), os.Getenv(\"GITHUB_ACCESS_TOKEN\"))\n\t_, err = c.Do(req)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\t//\n\t////\n}", "func Patching(c *gin.Context) {\n\n\tvar request Request\n\terr := c.BindJSON(&request)\n\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\tc.Writer.WriteHeader(400)\n\t\treturn\n\t}\n\n\te, err := db.GetRepo().Patch(c.Param(\"id\"), request.Value, request.Key)\n\tif err != nil && err.Error() == \"record not found\" {\n\t\tc.Writer.WriteHeader(404)\n\t\treturn\n\t}\n\tif err != nil {\n\t\tc.Writer.WriteHeader(500)\n\t\treturn\n\t}\n\n\tc.JSON(200, gin.H{\n\t\t\"id\": e.Uuid,\n\t\t\"value\": e.Value,\n\t\t\"key\": e.Key,\n\t})\n\treturn\n}", "func (c *globalThreatFeeds) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v3.GlobalThreatFeed, err error) {\n\tresult = &v3.GlobalThreatFeed{}\n\terr = c.client.Patch(pt).\n\t\tResource(\"globalthreatfeeds\").\n\t\tName(name).\n\t\tSubResource(subresources...).\n\t\tVersionedParams(&opts, scheme.ParameterCodec).\n\t\tBody(data).\n\t\tDo(ctx).\n\t\tInto(result)\n\treturn\n}", "func (m *ThreatSubmissionEmailThreatsEmailThreatSubmissionItemRequestBuilder) Patch(ctx context.Context, body i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.EmailThreatSubmissionable, requestConfiguration *ThreatSubmissionEmailThreatsEmailThreatSubmissionItemRequestBuilderPatchRequestConfiguration)(i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.EmailThreatSubmissionable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.CreateEmailThreatSubmissionFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.EmailThreatSubmissionable), nil\n}", "func (a *HyperflexApiService) PatchHyperflexHxdpVersion(ctx context.Context, moid string) ApiPatchHyperflexHxdpVersionRequest {\n\treturn ApiPatchHyperflexHxdpVersionRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (o *Notification) Update(exec boil.Executor, columns boil.Columns) (int64, error) {\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\to.UpdatedAt = currTime\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tnotificationUpdateCacheMut.RLock()\n\tcache, cached := notificationUpdateCache[key]\n\tnotificationUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tnotificationColumns,\n\t\t\tnotificationPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update notification, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"notification\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, notificationPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(notificationType, notificationMapping, append(wl, notificationPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tvar result sql.Result\n\tresult, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update notification row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for notification\")\n\t}\n\n\tif !cached {\n\t\tnotificationUpdateCacheMut.Lock()\n\t\tnotificationUpdateCache[key] = cache\n\t\tnotificationUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(exec)\n}", "func (d *defaultJobRepository) PatchJobStatus(ctxIn context.Context, jobPatcher JobPatch) error {\n _, span := trace.StartSpan(ctxIn, \"(*defaultJobRepository).PatchJobStatus\")\n defer span.End()\n\n job := &Job{\n Status: jobPatcher.Status,\n ForeignJobID: ForeignJobID{\n BigQueryID: jobPatcher.ForeignJobID.BigQueryID,\n CloudStorageID: jobPatcher.ForeignJobID.CloudStorageID,\n },\n EntityAudit: EntityAudit{\n UpdatedTimestamp: time.Now(),\n },\n }\n\n _, err := d.storageService.DB().Model(job).\n Column(\"status\", \"audit_updated_timestamp\", \"bigquery_extract_job_id\", \"cloudstorage_transfer_job_id\").\n Where(\"audit_deleted_timestamp IS NULL\").\n Where(\"id = ?\", jobPatcher.ID).\n Update()\n\n if err != nil {\n return fmt.Errorf(\"error during executing updating job statement: %s\", err)\n }\n\n return nil\n}", "func (m *CompaniesItemJournalsItemJournalLinesJournalLineItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.JournalLineable, requestConfiguration *CompaniesItemJournalsItemJournalLinesJournalLineItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.JournalLineable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateJournalLineFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.JournalLineable), nil\n}", "func DefaultApplyFieldMaskPeriod(ctx context.Context, patchee *Period, patcher *Period, updateMask *field_mask.FieldMask, prefix string, db *gorm.DB) (*Period, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar err error\n\tvar updatedCreatedAt bool\n\tvar updatedUpdatedAt bool\n\tfor i, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Period\" {\n\t\t\tpatchee.Period = patcher.Period\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedCreatedAt && strings.HasPrefix(f, prefix+\"CreatedAt.\") {\n\t\t\tif patcher.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"CreatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.CreatedAt, patchee.CreatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tupdatedCreatedAt = true\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUpdatedAt && strings.HasPrefix(f, prefix+\"UpdatedAt.\") {\n\t\t\tif patcher.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UpdatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.UpdatedAt, patchee.UpdatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tupdatedUpdatedAt = true\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func patchAPIPostHandler(w http.ResponseWriter, r *http.Request, _ map[string]string) {\n\tuserName := sessionHandler.GetUserName(r)\n\tif userName != \"\" {\n\t\tuserID, err := getUserID(userName)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\t// Update post\n\t\tdecoder := json.NewDecoder(r.Body)\n\t\tvar json JSONPost\n\t\terr = decoder.Decode(&json)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\tvar postSlug string\n\t\t// Get current slug of post\n\t\tpost, err := database.RetrievePostByID(json.ID)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\tif json.Slug != post.Slug { // Check if user has submitted a custom slug\n\t\t\tpostSlug = slug.Generate(json.Slug, \"posts\")\n\t\t} else {\n\t\t\tpostSlug = post.Slug\n\t\t}\n\t\tcurrentTime := date.GetCurrentTime()\n\t\t*post = structure.Post{ID: json.ID, Title: []byte(json.Title), Slug: postSlug, Markdown: []byte(json.Markdown), HTML: conversion.GenerateHTMLFromMarkdown([]byte(json.Markdown)), IsFeatured: json.IsFeatured, IsPage: json.IsPage, IsPublished: json.IsPublished, MetaDescription: []byte(json.MetaDescription), Image: []byte(json.Image), Date: &currentTime, Tags: methods.GenerateTagsFromCommaString(json.Tags), Author: &structure.User{ID: userID}}\n\t\tnewlyPublished, err := methods.UpdatePost(post)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\tif newlyPublished {\n\t\t\tnotifications.Send(string(post.Title), \"https://svjaneo.com/\"+post.Slug)\n\t\t}\n\t\tw.WriteHeader(http.StatusOK)\n\t\tw.Write([]byte(\"Post updated!\"))\n\t\treturn\n\t}\n\thttp.Error(w, \"Not logged in!\", http.StatusInternalServerError)\n}", "func (o *Rental) Update(exec boil.Executor, whitelist ...string) error {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn err\n\t}\n\tkey := makeCacheKey(whitelist, nil)\n\trentalUpdateCacheMut.RLock()\n\tcache, cached := rentalUpdateCache[key]\n\trentalUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := strmangle.UpdateColumnSet(\n\t\t\trentalColumns,\n\t\t\trentalPrimaryKeyColumns,\n\t\t\twhitelist,\n\t\t)\n\n\t\tif len(whitelist) == 0 {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn errors.New(\"sqlboiler: unable to update rental, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `rental` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, rentalPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(rentalType, rentalMapping, append(wl, rentalPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\t_, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"sqlboiler: unable to update rental row\")\n\t}\n\n\tif !cached {\n\t\trentalUpdateCacheMut.Lock()\n\t\trentalUpdateCache[key] = cache\n\t\trentalUpdateCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpdateHooks(exec)\n}", "func (repo *Repository) Update(ctx context.Context, claims auth.Claims, req UpdateRequest, now time.Time) error {\n\tspan, ctx := tracer.StartSpanFromContext(ctx, \"internal.expenditure.Update\")\n\tdefer span.Finish()\n\n\tif claims.Audience == \"\" {\n\t\treturn errors.WithStack(ErrForbidden)\n\t}\n\t// Admin users can update branches they have access to.\n\tif !claims.HasRole(auth.RoleAdmin) {\n\t\treturn errors.WithStack(ErrForbidden)\n\t}\n\n\t// Validate the request.\n\tv := webcontext.Validator()\n\terr := v.Struct(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcols := models.M{}\n\tif req.Amount != nil {\n\t\tcols[models.RepsExpenseColumns.Amount] = *req.Amount\n\t}\n\tif req.Reason != nil {\n\t\tcols[models.RepsExpenseColumns.Reason] = *req.Reason\n\t}\n\tif len(cols) == 0 {\n\t\treturn nil\n\t}\n\n\t// If now empty set it to the current time.\n\tif now.IsZero() {\n\t\tnow = time.Now()\n\t}\n\n\t// Always store the time as UTC.\n\tnow = now.UTC()\n\t// Postgres truncates times to milliseconds when storing. We and do the same\n\t// here so the value we return is consistent with what we store.\n\tnow = now.Truncate(time.Millisecond)\n\n\tcols[models.BranchColumns.UpdatedAt] = now\n\n\t_, err = models.RepsExpenses(models.RepsExpenseWhere.ID.EQ(req.ID)).UpdateAll(ctx, repo.DbConn, cols)\n\n\treturn nil\n}", "func (a *HyperflexApiService) PatchHyperflexSoftwareDistributionEntryExecute(r ApiPatchHyperflexSoftwareDistributionEntryRequest) (*HyperflexSoftwareDistributionEntry, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexSoftwareDistributionEntry\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexSoftwareDistributionEntry\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/SoftwareDistributionEntries/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexSoftwareDistributionEntry == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexSoftwareDistributionEntry is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexSoftwareDistributionEntry\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (o *HoldenAt) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tkey := makeCacheKey(columns, nil)\n\tholdenAtUpdateCacheMut.RLock()\n\tcache, cached := holdenAtUpdateCache[key]\n\tholdenAtUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tholdenAtAllColumns,\n\t\t\tholdenAtPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update HoldenAt, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"HoldenAt\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, holdenAtPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(holdenAtType, holdenAtMapping, append(wl, holdenAtPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update HoldenAt row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for HoldenAt\")\n\t}\n\n\tif !cached {\n\t\tholdenAtUpdateCacheMut.Lock()\n\t\tholdenAtUpdateCache[key] = cache\n\t\tholdenAtUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, nil\n}", "func (res *Resource) Patch(storage store.Update) {\n\tres.HandleFuncC(\n\t\tpat.Patch(patID),\n\t\tfunc(ctx context.Context, w http.ResponseWriter, r *http.Request) {\n\t\t\tres.patchHandler(ctx, w, r, storage)\n\t\t},\n\t)\n\n\tres.addRoute(patch, patID)\n}", "func handlePatchRequest(w http.ResponseWriter, e *models.Endpoint, r *http.Request, entity entities.Entity, h *func() (interface{}, error)) {\n\tw.Header().Add(\"Access-Control-Allow-Origin\", \"*\")\n\tif !checkContentType(w, r) {\n\t\treturn\n\t}\n\n\tbyteData, _ := ioutil.ReadAll(r.Body)\n\terr := entity.ParseEntity(byteData)\n\tif err != nil {\n\t\tsendError(w, []error{err})\n\t\treturn\n\t}\n\n\thandle := *h\n\tdata, err2 := handle()\n\tif err2 != nil {\n\t\tsendError(w, []error{err2})\n\t\treturn\n\t}\n\n\tw.Header().Add(\"Location\", entity.GetSelfLink())\n\n\tsendJSONResponse(w, http.StatusOK, data, nil)\n}", "func (a *HyperflexApiService) PatchHyperflexServerFirmwareVersionEntry(ctx context.Context, moid string) ApiPatchHyperflexServerFirmwareVersionEntryRequest {\n\treturn ApiPatchHyperflexServerFirmwareVersionEntryRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (cc *LogController) Patch(c *gin.Context) {\n\trequest := &LogPatchRequest{}\n\tif err := c.ShouldBindJSON(request); err != nil {\n\t\tjsonAPIError(c, http.StatusUnprocessableEntity, err)\n\t\treturn\n\t}\n\n\tif request.Level == \"\" && request.SqlEnabled == nil {\n\t\tjsonAPIError(c, http.StatusBadRequest, fmt.Errorf(\"please set either logLevel or logSql as params in order to set the log level\"))\n\t\treturn\n\t}\n\n\tif request.Level != \"\" {\n\t\tvar ll zapcore.Level\n\t\terr := ll.UnmarshalText([]byte(request.Level))\n\t\tif err != nil {\n\t\t\tjsonAPIError(c, http.StatusBadRequest, err)\n\t\t\treturn\n\t\t}\n\t\tif err = cc.App.GetStore().Config.SetLogLevel(c.Request.Context(), ll.String()); err != nil {\n\t\t\tjsonAPIError(c, http.StatusInternalServerError, err)\n\t\t\treturn\n\t\t}\n\t}\n\n\tif request.SqlEnabled != nil {\n\t\tif err := cc.App.GetStore().Config.SetLogSQLStatements(c.Request.Context(), *request.SqlEnabled); err != nil {\n\t\t\tjsonAPIError(c, http.StatusInternalServerError, err)\n\t\t\treturn\n\t\t}\n\t\tcc.App.GetStore().SetLogging(*request.SqlEnabled)\n\t}\n\n\t// Set default logger with new configurations\n\tlogger.SetLogger(cc.App.GetStore().Config.CreateProductionLogger())\n\n\tresponse := &presenters.LogResource{\n\t\tJAID: presenters.JAID{\n\t\t\tID: \"log\",\n\t\t},\n\t\tLevel: cc.App.GetStore().Config.LogLevel().String(),\n\t\tSqlEnabled: cc.App.GetStore().Config.LogSQLStatements(),\n\t}\n\n\tjsonAPIResponse(c, response, \"log\")\n}", "func DefaultApplyFieldMaskUserInfo(ctx context.Context, patchee *UserInfo, patcher *UserInfo, updateMask *field_mask.FieldMask, prefix string, db *gorm.DB) (*UserInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar err error\n\tvar updatedCreatedAt bool\n\tvar updatedUpdatedAt bool\n\tfor i, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UserId\" {\n\t\t\tpatchee.UserId = patcher.UserId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"LastName\" {\n\t\t\tpatchee.LastName = patcher.LastName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"FirstName\" {\n\t\t\tpatchee.FirstName = patcher.FirstName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Period\" {\n\t\t\tpatchee.Period = patcher.Period\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"DepartmentId\" {\n\t\t\tpatchee.DepartmentId = patcher.DepartmentId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"JobId\" {\n\t\t\tpatchee.JobId = patcher.JobId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"EnrollmentFlg\" {\n\t\t\tpatchee.EnrollmentFlg = patcher.EnrollmentFlg\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"AdminFlg\" {\n\t\t\tpatchee.AdminFlg = patcher.AdminFlg\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedCreatedAt && strings.HasPrefix(f, prefix+\"CreatedAt.\") {\n\t\t\tif patcher.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"CreatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.CreatedAt, patchee.CreatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tupdatedCreatedAt = true\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUpdatedAt && strings.HasPrefix(f, prefix+\"UpdatedAt.\") {\n\t\t\tif patcher.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UpdatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.UpdatedAt, patchee.UpdatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tupdatedUpdatedAt = true\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func ModifyVersionHandler(ctx context.Context, dataConnector data.Connector, patchID string, modifications VersionModifications) error {\n\tversion, err := dataConnector.FindVersionById(patchID)\n\tif err != nil {\n\t\treturn ResourceNotFound.Send(ctx, fmt.Sprintf(\"error finding version %s: %s\", patchID, err.Error()))\n\t}\n\tuser := MustHaveUser(ctx)\n\thttpStatus, err := ModifyVersion(*version, *user, nil, modifications)\n\tif err != nil {\n\t\treturn mapHTTPStatusToGqlError(ctx, httpStatus, err)\n\t}\n\n\tif evergreen.IsPatchRequester(version.Requester) {\n\t\t// restart is handled through graphql because we need the user to specify\n\t\t// which downstream tasks they want to restart\n\t\tif modifications.Action != Restart {\n\t\t\t//do the same for child patches\n\t\t\tp, err := patch.FindOneId(patchID)\n\t\t\tif err != nil {\n\t\t\t\treturn ResourceNotFound.Send(ctx, fmt.Sprintf(\"error finding patch %s: %s\", patchID, err.Error()))\n\t\t\t}\n\t\t\tif p == nil {\n\t\t\t\treturn ResourceNotFound.Send(ctx, fmt.Sprintf(\"patch '%s' not found \", patchID))\n\t\t\t}\n\t\t\tif p.IsParent() {\n\t\t\t\tfor _, childPatchId := range p.Triggers.ChildPatches {\n\t\t\t\t\tp, err := patch.FindOneId(childPatchId)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn ResourceNotFound.Send(ctx, fmt.Sprintf(\"error finding child patch %s: %s\", childPatchId, err.Error()))\n\t\t\t\t\t}\n\t\t\t\t\tif p == nil {\n\t\t\t\t\t\treturn ResourceNotFound.Send(ctx, fmt.Sprintf(\"child patch '%s' not found \", childPatchId))\n\t\t\t\t\t}\n\t\t\t\t\t// only modify the child patch if it is finalized\n\t\t\t\t\tif p.Version != \"\" {\n\t\t\t\t\t\terr = ModifyVersionHandler(ctx, dataConnector, childPatchId, modifications)\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\treturn errors.Wrap(mapHTTPStatusToGqlError(ctx, httpStatus, err), fmt.Sprintf(\"error modifying child patch '%s'\", patchID))\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t}\n\t\t\t}\n\n\t\t}\n\t}\n\n\treturn nil\n}", "func (m *HealthMenstruationDailyEntryORM) ToPB(ctx context.Context) (HealthMenstruationDailyEntry, error) {\n\tto := HealthMenstruationDailyEntry{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tif to.Day, err = ptypes1.TimestampProto(*m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = HealthMenstruationDailyEntry_Type(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func DefaultStrictUpdatePeriod(ctx context.Context, in *Period, db *gorm.DB) (*Period, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdatePeriod\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &PeriodORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(PeriodORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(PeriodORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(PeriodORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func TestUpdateCrashedTask(t *testing.T) {\n\trouter := setupRouter()\n\n\t// Create a task with updated information\n\t// Slack - #cobra => nil\n\t// Post - nil => http://example.com/\n\tvar task CrashedDBTask\n\ttask.App = \"gotest-voltron\"\n\ttask.Post = zero.StringFrom(\"http://example.com/\")\n\ttaskBytes, err := json.Marshal(task)\n\tassert.Nil(t, err, \"Converting from CrashedDBTask to JSON should not throw an error\")\n\n\treq, _ := http.NewRequest(\"PATCH\", \"/task/crashed\", bytes.NewBuffer(taskBytes))\n\tw := httptest.NewRecorder()\n\trouter.ServeHTTP(w, req)\n\n\tassert.Equal(t, http.StatusCreated, w.Code, \"HTTP response code for PATCH /task/crashed should be 201\")\n\n\t// Check that the new task exists and contains expected data\n\treq, _ = http.NewRequest(\"GET\", \"/task/crashed/gotest-voltron\", nil)\n\tw = httptest.NewRecorder()\n\trouter.ServeHTTP(w, req)\n\n\tassert.Equal(t, http.StatusOK, w.Code, \"HTTP response code for GET /task/crashed/:app should be 200\")\n\n\tvar returnedTask CrashedDBTask\n\terr = json.Unmarshal([]byte(w.Body.String()), &returnedTask)\n\n\t// API sets nil slack channels to #\n\ttask.Slack = zero.StringFrom(\"#\")\n\n\tassert.Nil(t, err, \"Converting from JSON to CrashedDBTask should not throw an error\")\n\tassert.Equal(t, returnedTask.App, task.App, \"Task app name should match\")\n\tassert.Equal(t, returnedTask.Slack, task.Slack, \"Task slack should match\")\n\tassert.Equal(t, returnedTask.Email, task.Email, \"Task email should match\")\n\tassert.Equal(t, returnedTask.Post, task.Post, \"Task post should match\")\n}", "func (a *HyperflexApiService) PatchHyperflexHealthCheckDefinition(ctx context.Context, moid string) ApiPatchHyperflexHealthCheckDefinitionRequest {\n\treturn ApiPatchHyperflexHealthCheckDefinitionRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (api *FoodRecipeAPI) partialRecipeUpdate(w http.ResponseWriter, req *http.Request) {\n\tdefer DrainBody(req)\n\tctx := req.Context()\n\n\tvars := mux.Vars(req)\n\tid := vars[\"id\"]\n\tlogData := log.Data{\"id\": id}\n\n\tvar errorObjects []*models.ErrorObject\n\n\tpatchJSON, recipePatches, err := patch.Get(ctx, req.Body)\n\tif err != nil {\n\t\terrorObjects = append(errorObjects, &models.ErrorObject{Error: err.Error()})\n\t\tErrorResponse(ctx, w, http.StatusBadRequest, &models.ErrorResponse{Errors: errorObjects})\n\t\treturn\n\t}\n\n\t// Validate patch request\n\tfor i, recipePatch := range *recipePatches {\n\t\tif err = recipePatch.Validate(nil); err != nil {\n\t\t\tif _, ok := err.(*validator.InvalidValidationError); ok {\n\t\t\t\terrorObjects = append(errorObjects, &models.ErrorObject{Error: errs.ErrInternalServer.Error()})\n\t\t\t\tErrorResponse(ctx, w, http.StatusInternalServerError, &models.ErrorResponse{Errors: errorObjects})\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tfor _, err := range err.(validator.ValidationErrors) {\n\t\t\t\terrorObjects = append(errorObjects, models.HandleValidationErrors(strconv.Itoa(i), err.ActualTag(), err.StructField(), err.Value().(string), err.Param()))\n\t\t\t}\n\t\t}\n\t}\n\tif len(errorObjects) > 0 {\n\t\tErrorResponse(ctx, w, http.StatusBadRequest, &models.ErrorResponse{Errors: errorObjects})\n\t\treturn\n\t}\n\n\t// apply patch against recipe resource\n\tp, err := jsonpatch.DecodePatch(patchJSON)\n\tif err != nil {\n\t\tlog.Error(ctx, \"patch recipe: unable to decode patch\", err)\n\t\terrorObjects = append(errorObjects, &models.ErrorObject{Error: err.Error()})\n\t\tErrorResponse(ctx, w, http.StatusBadRequest, &models.ErrorResponse{Errors: errorObjects})\n\t\treturn\n\t}\n\n\t// find current recipe doc\n\tvar recipe models.Recipe\n\n\tcollection := api.MongoClient.Database(\"food-recipes\").Collection(\"recipes\")\n\tif err = collection.FindOne(ctx, bson.M{\"_id\": id}).Decode(&recipe); err != nil {\n\t\tif err == mongo.ErrNoDocuments {\n\t\t\tlog.Warn(ctx, \"patch recipe: failed to find recipe\", log.FormatErrors([]error{err}), logData)\n\t\t\terrorObjects = append(errorObjects, &models.ErrorObject{Error: errs.ErrRecipeNotFound.Error()})\n\t\t\tErrorResponse(ctx, w, http.StatusNotFound, &models.ErrorResponse{Errors: errorObjects})\n\t\t\treturn\n\t\t}\n\n\t\tlog.Error(ctx, \"patch recipe: failed to find recipe, bad connection?\", err)\n\t\terrorObjects = append(errorObjects, &models.ErrorObject{Error: errs.ErrInternalServer.Error()})\n\t\tErrorResponse(ctx, w, http.StatusInternalServerError, &models.ErrorResponse{Errors: errorObjects})\n\t\treturn\n\t}\n\n\tb, err := json.Marshal(recipe)\n\tif err != nil {\n\t\tlog.Error(ctx, \"patch recipe: error returned from json marshal\", err, logData)\n\t\terrorObjects = append(errorObjects, &models.ErrorObject{Error: errs.ErrInternalServer.Error()})\n\t\tErrorResponse(ctx, w, http.StatusInternalServerError, &models.ErrorResponse{Errors: errorObjects})\n\t\treturn\n\t}\n\n\t// apply patch to existing recipe\n\tmodified, err := p.Apply(b)\n\tif err != nil {\n\t\tlog.Error(ctx, \"patch recipe: unable to apply patch to recipe\", err, logData)\n\t\terrorObjects = append(errorObjects, &models.ErrorObject{Error: err.Error()})\n\t\tErrorResponse(ctx, w, http.StatusBadRequest, &models.ErrorResponse{Errors: errorObjects})\n\t\treturn\n\t}\n\n\terr = json.Unmarshal(modified, &recipe)\n\tif err != nil {\n\t\tlog.Error(ctx, \"patch recipe: unmarshal modified recipe into recipe struct\", err, logData)\n\t\terrorObjects = append(errorObjects, &models.ErrorObject{Error: err.Error()})\n\t\tErrorResponse(ctx, w, http.StatusBadRequest, &models.ErrorResponse{Errors: errorObjects})\n\t\treturn\n\t}\n\n\t// store new recipe\n\tif _, err = collection.ReplaceOne(ctx, bson.M{\"_id\": id}, recipe); err != nil {\n\t\tif err == mongo.ErrNoDocuments {\n\t\t\tlog.Error(ctx, \"update recipe: failed to update recipe, recipe deos not exists\", err, logData)\n\t\t\terrorObjects = append(errorObjects, &models.ErrorObject{Error: errs.ErrRecipeNotFound.Error()})\n\t\t\tErrorResponse(ctx, w, http.StatusNotFound, &models.ErrorResponse{Errors: errorObjects})\n\t\t\treturn\n\t\t}\n\n\t\tlog.Error(ctx, \"update recipe: failed to insert recipe\", err, logData)\n\t\terrorObjects = append(errorObjects, &models.ErrorObject{Error: errs.ErrInternalServer.Error()})\n\t\tErrorResponse(ctx, w, http.StatusInternalServerError, &models.ErrorResponse{Errors: errorObjects})\n\t\treturn\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(http.StatusOK)\n\n\tlog.Info(ctx, \"update recipe: request successful\", logData)\n}", "func (_m *FakeScheduleService) UpdateAlertRule(key models.AlertRuleKey) {\n\t_m.Called(key)\n}", "func patchResource(mapping *meta.RESTMapping, config *rest.Config, group string,\n\tversion string, namespace string, data []byte) error {\n\trestClient, err := getRESTClient(config, group, version)\n\tif err != nil {\n\t\treturn &kfapis.KfError{\n\t\t\tCode: int(kfapis.INVALID_ARGUMENT),\n\t\t\tMessage: fmt.Sprintf(\"patchResource error: %v\", err),\n\t\t}\n\t}\n\n\tif _, err = restClient.\n\t\tPatch(k8stypes.JSONPatchType).\n\t\tResource(mapping.Resource.Resource).\n\t\tNamespaceIfScoped(namespace, mapping.Scope.Name() == \"namespace\").\n\t\tBody(data).\n\t\tDo().\n\t\tGet(); err == nil {\n\t\treturn nil\n\t} else {\n\t\treturn &kfapis.KfError{\n\t\t\tCode: int(kfapis.INVALID_ARGUMENT),\n\t\t\tMessage: fmt.Sprintf(\"patchResource error: %v\", err),\n\t\t}\n\t}\n}", "func (client ModelClient) UpdateHierarchicalEntityResponder(resp *http.Response) (result OperationStatus, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (m *RiskyUsersItemHistoryRiskyUserHistoryItemItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RiskyUserHistoryItemable, requestConfiguration *RiskyUsersItemHistoryRiskyUserHistoryItemItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RiskyUserHistoryItemable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateRiskyUserHistoryItemFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RiskyUserHistoryItemable), nil\n}", "func (m *ExactMatchDataStoresExactMatchDataStoreItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ExactMatchDataStoreable, requestConfiguration *ExactMatchDataStoresExactMatchDataStoreItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ExactMatchDataStoreable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateExactMatchDataStoreFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ExactMatchDataStoreable), nil\n}", "func (o *RecipeLipid) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\to.UpdatedAt = currTime\n\t}\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\trecipeLipidUpdateCacheMut.RLock()\n\tcache, cached := recipeLipidUpdateCache[key]\n\trecipeLipidUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\trecipeLipidAllColumns,\n\t\t\trecipeLipidPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update recipe_lipid, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"recipe_lipid\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, recipeLipidPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(recipeLipidType, recipeLipidMapping, append(wl, recipeLipidPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update recipe_lipid row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for recipe_lipid\")\n\t}\n\n\tif !cached {\n\t\trecipeLipidUpdateCacheMut.Lock()\n\t\trecipeLipidUpdateCache[key] = cache\n\t\trecipeLipidUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "func (me *CHARGES_IMPL) UpdateChargeDueDate (\r\n chargeId string,\r\n body *models_pkg.ChargesDueDateRequest,\r\n idempotencyKey *string) (*models_pkg.ChargesDueDateResponse, error) {\r\n //the endpoint path uri\r\n _pathUrl := \"/Charges/{charge_id}/due-date\"\r\n\r\n //variable to hold errors\r\n var err error = nil\r\n //process optional template parameters\r\n _pathUrl, err = apihelper_pkg.AppendUrlWithTemplateParameters(_pathUrl, map[string]interface{} {\r\n \"charge_id\" : chargeId,\r\n })\r\n if err != nil {\r\n //error in template param handling\r\n return nil, err\r\n }\r\n\r\n //the base uri for api requests\r\n _queryBuilder := configuration_pkg.BASEURI;\r\n\r\n //prepare query string for API call\r\n _queryBuilder = _queryBuilder + _pathUrl\r\n\r\n //validate and preprocess url\r\n _queryBuilder, err = apihelper_pkg.CleanUrl(_queryBuilder)\r\n if err != nil {\r\n //error in url validation or cleaning\r\n return nil, err\r\n }\r\n //prepare headers for the outgoing request\r\n headers := map[string]interface{} {\r\n \"user-agent\" : \"MundiSDK - Go 2.4.5\",\r\n \"accept\" : \"application/json\",\r\n \"content-type\" : \"application/json; charset=utf-8\",\r\n \"Content-Type\" : \"application/json\",\r\n \"idempotency-key\" : apihelper_pkg.ToString(idempotencyKey, \"\"),\r\n }\r\n\r\n //prepare API request\r\n _request := unirest.PatchWithAuth(_queryBuilder, headers, body, me.config.BasicAuthUserName(), me.config.BasicAuthPassword())\r\n //and invoke the API call request to fetch the response\r\n _response, err := unirest.AsString(_request,false);\r\n if err != nil {\r\n //error in API invocation\r\n return nil, err\r\n }\r\n\r\n //error handling using HTTP status codes\r\n if (_response.Code == 400) {\r\n err = apihelper_pkg.NewAPIError(\"Invalid request\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 401) {\r\n err = apihelper_pkg.NewAPIError(\"Invalid API key\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 404) {\r\n err = apihelper_pkg.NewAPIError(\"An informed resource was not found\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 412) {\r\n err = apihelper_pkg.NewAPIError(\"Business validation error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 422) {\r\n err = apihelper_pkg.NewAPIError(\"Contract validation error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code == 500) {\r\n err = apihelper_pkg.NewAPIError(\"Internal server error\", _response.Code, _response.RawBody)\r\n } else if (_response.Code < 200) || (_response.Code > 206) { //[200,206] = HTTP OK\r\n err = apihelper_pkg.NewAPIError(\"HTTP Response Not OK\", _response.Code, _response.RawBody)\r\n }\r\n if(err != nil) {\r\n //error detected in status code validation\r\n return nil, err\r\n }\r\n\r\n //returning the response\r\n var retVal *models_pkg.ChargesDueDateResponse = &models_pkg.ChargesDueDateResponse{}\r\n err = json.Unmarshal(_response.RawBody, &retVal)\r\n\r\n if err != nil {\r\n //error in parsing\r\n return nil, err\r\n }\r\n return retVal, nil\r\n\r\n}", "func (r *Route) Patch(h interface{}) *Route {\n\treturn r.Handle(toHandler(h), \"PATCH\")\n}", "func (srv *UsersService) PatchHandler(ctx *gin.Context) {\n\tlogger := srv.logger.New(\"action\", \"PatchHandler\")\n\tuser := GetRequestedUser(ctx)\n\tif user == nil {\n\t\t// Returns a \"404 StatusNotFound\" response\n\t\tsrv.ResponseService.NotFound(ctx)\n\t\treturn\n\t}\n\n\t// Checks if the query entry is valid\n\tform := &validators.PatchUser{}\n\tif err := ctx.ShouldBindJSON(form); err != nil {\n\t\tsrv.ResponseService.ValidatorErrorResponse(ctx, responses.UnprocessableEntity, err)\n\t\treturn\n\t}\n\n\tcurrentUser := GetCurrentUser(ctx)\n\tif currentUser.UID == user.UID ||\n\t\tcurrentUser.RoleName == \"root\" ||\n\t\tcurrentUser.RoleName == \"admin\" {\n\n\t\tif form.FirstName != nil {\n\t\t\tuser.FirstName = *form.FirstName\n\t\t}\n\t\tif form.LastName != nil {\n\t\t\tuser.LastName = *form.LastName\n\t\t}\n\t\tif form.Nickname != nil {\n\t\t\tuser.Nickname = *form.Nickname\n\t\t}\n\n\t\trepo := srv.Repository.GetUsersRepository()\n\t\told, err := repo.FindByUID(user.UID)\n\t\tif err != nil {\n\t\t\tlogger.Error(\"cannot find user\", \"err\", err)\n\t\t\tsrv.ResponseService.NotFound(ctx)\n\t\t\treturn\n\t\t}\n\n\t\t_, err = repo.Update(user)\n\t\tif err != nil {\n\t\t\tlogger.Error(\"cannot update user\", \"err\", err)\n\t\t\tsrv.ResponseService.Error(ctx, responses.CanNotUpdateUser, \"Can't update a user\")\n\t\t\treturn\n\t\t}\n\n\t\tif currentUser.UID != user.UID &&\n\t\t\t(currentUser.RoleName == \"admin\" || currentUser.RoleName == \"root\") {\n\t\t\tsrv.SystemLogsService.LogModifyUserProfileAsync(old, user, currentUser.UID)\n\t\t}\n\t}\n\n\t// Returns a \"204 StatusNoContent\" response\n\tctx.JSON(http.StatusNoContent, nil)\n}", "func Patch(path string, fn http.HandlerFunc, c ...alice.Constructor) {\n\trecord(\"PATCH\", path)\n\n\tinfoMutex.Lock()\n\tr.PATCH(path, Handler(alice.New(c...).ThenFunc(fn)))\n\tinfoMutex.Unlock()\n}", "func (s *Service) patchHalResource(ctx context.Context, resourceName, url string, r interface{}, pf patchFunction) ([]byte, *status.Status) {\n\n\tvar body []byte\n\tvar code int\n\tvar err error\n\n\tb := new(bytes.Buffer)\n\tjson.NewEncoder(b).Encode(r)\n\n\tbody, code, err = pf(ctx, url, b, \"application/json\")\n\tif err != nil {\n\t\tlog.WithFields(event.Fields{\n\t\t\t\"resourceName\": resourceName,\n\t\t\t\"code\": code,\n\t\t\t\"url\": url,\n\t\t}).Error(\"Can not patch HAL resource: \" + err.Error())\n\t\treturn []byte{}, status.NewStatus(body, code, \"Can not modify resource \"+resourceName)\n\t}\n\n\t// A PATCH request should return a value in range of [200,300[\n\tif code < http.StatusOK || code >= http.StatusMultipleChoices {\n\t\tlog.WithFields(event.Fields{\n\t\t\t\"resourceName\": resourceName,\n\t\t\t\"code\": code,\n\t\t\t\"url\": url,\n\t\t}).Error(\"Can not patch HAL resource\")\n\t\treturn []byte{}, status.NewStatus(body, code, \"Can not modify resource \"+resourceName)\n\t}\n\treturn body, nil\n}", "func updateFunc(cmd *cobra.Command, args []string) {\n\tdomain := cmd.Flag(\"domain\").Value.String()\n\t_type := cmd.Flag(\"type\").Value.String()\n\tstatus := cmd.Flag(\"status\").Value.String()\n\tline := cmd.Flag(\"line\").Value.String()\n\trr := cmd.Flag(\"rr\").Value.String()\n\tvalue := cmd.Flag(\"value\").Value.String()\n\tlid := cmd.Flag(\"line-id\").Value.String()\n\tid, _ := cmd.Flags().GetUint64(\"id\")\n\tttl, _ := cmd.Flags().GetUint64(\"ttl\")\n\tmx, _ := cmd.Flags().GetUint64(\"mx\")\n\tdid, _ := cmd.Flags().GetUint64(\"domain-id\")\n\tweight, _ := cmd.Flags().GetUint64(\"weight\")\n\n\tif domain == \"\" || id == 0 || _type == \"\" || value == \"\" || line == \"\" {\n\t\tpanic(\"Domain or RecordID or Type or Value or RecordLine is mandatory for this action.\")\n\t}\n\n\tresp, err := app.Client.UpdateRecord(\n\t\tdomain, rr, _type, value, status, line, lid,\n\t\tid, ttl, mx, did, weight,\n\t)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tdata, _ := convert.StringToJSONWithIndent(string(resp))\n\tfmt.Println(data)\n}", "func (m *ItemSitesItemAnalyticsItemActivityStatsItemActivityStatItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ItemActivityStatable, requestConfiguration *ItemSitesItemAnalyticsItemActivityStatsItemActivityStatItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ItemActivityStatable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateItemActivityStatFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ItemActivityStatable), nil\n}", "func patch(newObj runtime.Object, existingObj runtime.Object, c client.Client) error {\n\tnewObjJSON, _ := apijson.Marshal(newObj)\n\tkey, _ := client.ObjectKeyFromObject(newObj)\n\t_, isUnstructured := newObj.(runtime.Unstructured)\n\t_, isCRD := newObj.(*apiextv1beta1.CustomResourceDefinition)\n\n\tif isUnstructured || isCRD || isKudoType(newObj) {\n\t\t// strategic merge patch is not supported for these types, falling back to merge patch\n\t\terr := c.Patch(context.TODO(), newObj, client.ConstantPatch(types.MergePatchType, newObjJSON))\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to apply merge patch to object %s/%s: %w\", key.Name, key.Name, err)\n\t\t}\n\t} else {\n\t\terr := c.Patch(context.TODO(), existingObj, client.ConstantPatch(types.StrategicMergePatchType, newObjJSON))\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to apply StrategicMergePatch to object %s/%s: %w\", key.Namespace, key.Name, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (m *MobileAppTroubleshootingEventsMobileAppTroubleshootingEventItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.MobileAppTroubleshootingEventable, requestConfiguration *MobileAppTroubleshootingEventsMobileAppTroubleshootingEventItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.MobileAppTroubleshootingEventable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateMobileAppTroubleshootingEventFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.MobileAppTroubleshootingEventable), nil\n}", "func (dn *Daemon) updateHypershift(oldConfig, newConfig *mcfgv1.MachineConfig, diff *machineConfigDiff) (retErr error) {\n\toldIgnConfig, err := ctrlcommon.ParseAndConvertConfig(oldConfig.Spec.Config.Raw)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"parsing old Ignition config failed: %w\", err)\n\t}\n\tnewIgnConfig, err := ctrlcommon.ParseAndConvertConfig(newConfig.Spec.Config.Raw)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"parsing new Ignition config failed: %w\", err)\n\t}\n\n\t// update files on disk that need updating\n\t// We should't skip the certificate write in HyperShift since it does not run the extra daemon process\n\tif err := dn.updateFiles(oldIgnConfig, newIgnConfig, false); err != nil {\n\t\treturn err\n\t}\n\n\tdefer func() {\n\t\tif retErr != nil {\n\t\t\tif err := dn.updateFiles(newIgnConfig, oldIgnConfig, false); err != nil {\n\t\t\t\terrs := kubeErrs.NewAggregate([]error{err, retErr})\n\t\t\t\tretErr = fmt.Errorf(\"error rolling back files writes: %w\", errs)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n\n\tif err := dn.updateSSHKeys(newIgnConfig.Passwd.Users, oldIgnConfig.Passwd.Users); err != nil {\n\t\treturn err\n\t}\n\n\tdefer func() {\n\t\tif retErr != nil {\n\t\t\tif err := dn.updateSSHKeys(newIgnConfig.Passwd.Users, oldIgnConfig.Passwd.Users); err != nil {\n\t\t\t\terrs := kubeErrs.NewAggregate([]error{err, retErr})\n\t\t\t\tretErr = fmt.Errorf(\"error rolling back SSH keys updates: %w\", errs)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n\n\tif dn.os.IsCoreOSVariant() {\n\t\tcoreOSDaemon := CoreOSDaemon{dn}\n\t\tif err := coreOSDaemon.applyOSChanges(*diff, oldConfig, newConfig); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tdefer func() {\n\t\t\tif retErr != nil {\n\t\t\t\tif err := coreOSDaemon.applyOSChanges(*diff, newConfig, oldConfig); err != nil {\n\t\t\t\t\terrs := kubeErrs.NewAggregate([]error{err, retErr})\n\t\t\t\t\tretErr = fmt.Errorf(\"error rolling back changes to OS: %w\", errs)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}()\n\t} else {\n\t\tklog.Info(\"updating the OS on non-CoreOS nodes is not supported\")\n\t}\n\n\tif err := UpdateTuningArgs(KernelTuningFile, CmdLineFile); err != nil {\n\t\treturn err\n\t}\n\n\tklog.Info(\"Successfully completed Hypershift config update\")\n\treturn nil\n}", "func (c *FakeHelms) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1alpha1.Helm, err error) {\n\tobj, err := c.Fake.\n\t\tInvokes(testing.NewPatchSubresourceAction(helmsResource, c.ns, name, pt, data, subresources...), &v1alpha1.Helm{})\n\n\tif obj == nil {\n\t\treturn nil, err\n\t}\n\treturn obj.(*v1alpha1.Helm), err\n}", "func (r *DeviceHealthScriptRunSummaryRequest) Update(ctx context.Context, reqObj *DeviceHealthScriptRunSummary) error {\n\treturn r.JSONRequest(ctx, \"PATCH\", \"\", reqObj, nil)\n}", "func (s *Store) UpdateSilencedEntry(ctx context.Context, silenced *corev2.Silenced) error {\n\tif err := silenced.Validate(); err != nil {\n\t\treturn &store.ErrNotValid{Err: err}\n\t}\n\n\tif silenced.ExpireAt == 0 && silenced.Expire > 0 {\n\t\tstart := time.Now()\n\t\tif silenced.Begin > 0 {\n\t\t\tstart = time.Unix(silenced.Begin, 0)\n\t\t}\n\t\tsilenced.ExpireAt = start.Add(time.Duration(silenced.Expire) * time.Second).Unix()\n\t}\n\n\tsilencedBytes, err := proto.Marshal(silenced)\n\tif err != nil {\n\t\treturn &store.ErrEncode{Err: err}\n\t}\n\tcmp := clientv3.Compare(clientv3.Version(getNamespacePath(silenced.Namespace)), \">\", 0)\n\treq := clientv3.OpPut(GetSilencedPath(ctx, silenced.Name), string(silencedBytes))\n\tvar res *clientv3.TxnResponse\n\terr = kvc.Backoff(ctx).Retry(func(n int) (done bool, err error) {\n\t\tres, err = s.client.Txn(ctx).If(cmp).Then(req).Commit()\n\t\treturn kvc.RetryRequest(n, err)\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\tif !res.Succeeded {\n\t\treturn &store.ErrNamespaceMissing{Namespace: silenced.Namespace}\n\t}\n\n\treturn nil\n}", "func (a *HyperflexApiService) PatchHyperflexServerFirmwareVersionEntryExecute(r ApiPatchHyperflexServerFirmwareVersionEntryRequest) (*HyperflexServerFirmwareVersionEntry, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexServerFirmwareVersionEntry\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexServerFirmwareVersionEntry\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ServerFirmwareVersionEntries/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexServerFirmwareVersionEntry == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexServerFirmwareVersionEntry is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexServerFirmwareVersionEntry\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (engine *Engine) PATCH(relativePath string, handlers ...HandlerFunc) IRoutes {\n\treturn engine.handle(http.MethodPatch, relativePath, handlers)\n}", "func (a *HyperflexApiService) PatchHyperflexHealthCheckDefinitionExecute(r ApiPatchHyperflexHealthCheckDefinitionRequest) (*HyperflexHealthCheckDefinition, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexHealthCheckDefinition\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexHealthCheckDefinition\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/HealthCheckDefinitions/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexHealthCheckDefinition == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexHealthCheckDefinition is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexHealthCheckDefinition\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (t *MedChain) updateHospital(stub shim.ChaincodeStubInterface, args []string) peer.Response {\n\t\t// ==== Input sanitation ====\n\t\tfmt.Println(\"- start updateHospital\")\n\n\t\t// check if all the args are send\n\t\tif len(args) != 4 {\n\t\t\treturn shim.Error(\"Incorrect number of arguments, Required 4 arguments\")\n\t\t}\n\n\t\t// check if the args are empty\n\t\tfor i := 0; i < len(args); i++ {\n\t\t\tif len(args[i]) <= 0 {\n\t\t\t\treturn shim.Error(\"argument \"+ string(i+1) + \" must be a non-empty string\")\n\t\t\t}\n\t\t}\n\n\t\tgetAssetAsBytes, errT := stub.GetState(args[0])\n\n\t\tif errT != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Error : Cannot find Hospital %s\" , errT))\n\t\t}\n\n\t\tif getAssetAsBytes == nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Cannot find asset Hospital with ID %s\" , args[0]))\n\t\t}\n\n\t\tvar obj = Hospital{}\n\n\t\tjson.Unmarshal(getAssetAsBytes, &obj)\n\t\tobj.HospitalName = args[1]\n\t\tobj.HospitalAddress = args[2]\n\t\tobj.HospitalPhone = args[3]\n\t\tcomAssetAsBytes, errMarshal := json.Marshal(obj)\n\n\t\tif errMarshal != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Marshal Error: %s\", errMarshal))\n\t\t}\n\n\t\terrPut := stub.PutState(obj.Hospital_ID, comAssetAsBytes)\n\n\t\tif errPut != nil {\n\t\t\treturn shim.Error(fmt.Sprintf(\"Failed to update Hospital with ID %s\", args[0]))\n\t\t}\n\n\t\tfmt.Println(\"Hospital asset with ID %s was updated \\n %v\", args[0], obj)\n\n\t\treturn shim.Success(comAssetAsBytes)\n\t}", "func (m *PrivilegedSignupStatusItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable, requestConfiguration *PrivilegedSignupStatusItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreatePrivilegedSignupStatusFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable), nil\n}", "func (r *DeviceManagementReportScheduleRequest) Update(ctx context.Context, reqObj *DeviceManagementReportSchedule) error {\n\treturn r.JSONRequest(ctx, \"PATCH\", \"\", reqObj, nil)\n}", "func (r *ImpossibleTravelRiskEventRequest) Update(ctx context.Context, reqObj *ImpossibleTravelRiskEvent) error {\n\treturn r.JSONRequest(ctx, \"PATCH\", \"\", reqObj, nil)\n}", "func (m *HealthMenstruationDailyEntry) ToORM(ctx context.Context) (HealthMenstruationDailyEntryORM, error) {\n\tto := HealthMenstruationDailyEntryORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.Day = &t\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = int32(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (client LabClient) PatchResourceResponder(resp *http.Response) (result Lab, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (w *Worker) Patch(c *http.Client, url string, data interface{}, bind interface{}) (int, error) {\n\tbs, err := json.Marshal(data)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treq, err := http.NewRequest(\"PATCH\", url, bytes.NewReader(bs))\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\tres, err := c.Do(req)\n\tif err != nil {\n\t\tif res != nil {\n\t\t\tioutil.ReadAll(res.Body)\n\t\t\tres.Body.Close()\n\t\t}\n\t\treturn 0, err\n\t}\n\tdefer res.Body.Close()\n\terr = json.NewDecoder(res.Body).Decode(bind)\n\tif res.StatusCode == http.StatusNoContent || bind == nil {\n\t\treturn res.StatusCode, nil\n\t}\n\treturn res.StatusCode, err\n}", "func (c *Controller) patchPlatform(r *web.Request) (*web.Response, error) {\n\tplatformID := r.PathParams[reqPlatformID]\n\tctx := r.Context()\n\tlog.C(ctx).Debugf(\"Updating platform with id %s\", platformID)\n\n\tplatform, err := c.PlatformStorage.Get(ctx, platformID)\n\tif err != nil {\n\t\treturn nil, util.HandleStorageError(err, \"platform\")\n\t}\n\n\tcreatedAt := platform.CreatedAt\n\n\tif err := util.BytesToObject(r.Body, platform); err != nil {\n\t\treturn nil, err\n\t}\n\n\tplatform.ID = platformID\n\tplatform.CreatedAt = createdAt\n\tplatform.UpdatedAt = time.Now().UTC()\n\n\tif err := c.PlatformStorage.Update(ctx, platform); err != nil {\n\t\treturn nil, util.HandleStorageError(err, \"platform\")\n\t}\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn util.NewJSONResponse(http.StatusOK, platform)\n}", "func (service *EmployeeService) PatchEmployeeDetails(employeeID string, employeeDetails models.Employee) error {\n\tcollection := service.mongoClient.Database(DbName).Collection(CollectionName)\n\tupdatesToBePerformed := bson.M{}\n\tupdatesToBePerformed[\"employeeid\"] = employeeID\n\tif employeeDetails.Department != nil {\n\t\tupdatesToBePerformed[\"department\"] = employeeDetails.Department\n\t}\n\n\tif employeeDetails.Name != nil {\n\t\tupdatesToBePerformed[\"name\"] = employeeDetails.Name\n\t}\n\n\tif employeeDetails.Skills != nil {\n\t\tupdatesToBePerformed[\"skills\"] = employeeDetails.Skills\n\t}\n\n\tif employeeDetails.Address != nil {\n\t\taddress := models.Address{}\n\t\tif employeeDetails.Address.City != nil {\n\t\t\taddress.City = employeeDetails.Address.City\n\t\t}\n\n\t\tif employeeDetails.Address.Country != nil {\n\t\t\taddress.Country = employeeDetails.Address.Country\n\t\t}\n\n\t\tif employeeDetails.Address.DoorNo != nil {\n\t\t\taddress.DoorNo = employeeDetails.Address.DoorNo\n\t\t}\n\n\t\tif employeeDetails.Address.State != nil {\n\t\t\taddress.State = employeeDetails.Address.State\n\t\t}\n\n\t\tupdatesToBePerformed[\"address\"] = address\n\t}\n\n\tif employeeDetails.Status != nil {\n\t\tupdatesToBePerformed[\"status\"] = employeeDetails.Status\n\t}\n\n\t// consolidatedMap(&updatesToBePerformed, employeeDetails)\n\n\tresult, err := collection.UpdateOne(\n\t\tcontext.Background(),\n\t\tbson.M{\"employeeid\": employeeID},\n\t\tbson.M{\n\t\t\t\"$set\": updatesToBePerformed,\n\t\t})\n\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\tfmt.Println(result)\n\n\treturn nil\n}", "func (c *configAuditReports) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.ConfigAuditReport, err error) {\n\tresult = &v1alpha1.ConfigAuditReport{}\n\terr = c.client.Patch(pt).\n\t\tNamespace(c.ns).\n\t\tResource(\"configauditreports\").\n\t\tName(name).\n\t\tSubResource(subresources...).\n\t\tVersionedParams(&opts, scheme.ParameterCodec).\n\t\tBody(data).\n\t\tDo(ctx).\n\t\tInto(result)\n\treturn\n}", "func (hc *LegacyHealthCheckImpl) updateHealth(ts *LegacyTabletStats, conn queryservice.QueryService) {\n\t// Unconditionally send the received update at the end.\n\tdefer func() {\n\t\tif hc.listener != nil {\n\t\t\thc.listener.StatsUpdate(ts)\n\t\t}\n\t}()\n\n\thc.mu.Lock()\n\tth, ok := hc.addrToHealth[ts.Key]\n\tif !ok {\n\t\t// This can happen on delete because the entry is removed first,\n\t\t// or if LegacyHealthCheckImpl has been closed.\n\t\thc.mu.Unlock()\n\t\treturn\n\t}\n\toldts := th.latestTabletStats\n\tth.latestTabletStats = *ts\n\tth.conn = conn\n\thc.mu.Unlock()\n\n\t// In the case where a tablet changes type (but not for the\n\t// initial message), we want to log it, and maybe advertise it too.\n\tif oldts.Target.TabletType != topodatapb.TabletType_UNKNOWN && oldts.Target.TabletType != ts.Target.TabletType {\n\t\t// Log and maybe notify\n\t\tlog.Infof(\"HealthCheckUpdate(Type Change): %v, tablet: %s, target %+v => %+v, reparent time: %v\",\n\t\t\toldts.Name, topotools.TabletIdent(oldts.Tablet), topotools.TargetIdent(oldts.Target), topotools.TargetIdent(ts.Target), ts.TabletExternallyReparentedTimestamp)\n\t\tif hc.listener != nil && hc.sendDownEvents {\n\t\t\toldts.Up = false\n\t\t\thc.listener.StatsUpdate(&oldts)\n\t\t}\n\n\t\t// Track how often a tablet gets promoted to master. It is used for\n\t\t// comparing against the variables in go/vtgate/buffer/variables.go.\n\t\tif oldts.Target.TabletType != topodatapb.TabletType_MASTER && ts.Target.TabletType == topodatapb.TabletType_MASTER {\n\t\t\thcMasterPromotedCounters.Add([]string{ts.Target.Keyspace, ts.Target.Shard}, 1)\n\t\t}\n\t}\n}", "func (m *ItemCalendarViewBookingAppointmentItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.BookingAppointmentable, requestConfiguration *ItemCalendarViewBookingAppointmentItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.BookingAppointmentable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateBookingAppointmentFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.BookingAppointmentable), nil\n}", "func DefaultPatchPeriod(ctx context.Context, in *Period, updateMask *field_mask.FieldMask, db *gorm.DB) (*Period, error) {\n\tif in == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar pbObj Period\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(PeriodWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadPeriod(ctx, &Period{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(PeriodWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskPeriod(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(PeriodWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdatePeriod(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(PeriodWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func (g *Group) PATCH(path string, h Handler, gases ...Gas) {\n\tg.Air.PATCH(g.Prefix+path, h, append(g.Gases, gases...)...)\n}", "func (c *Client) ModifyHealthCheck(args *ModifyHealthCheckArgs) (*ModifyHealthCheckResponse, error) {\n\tresponse := ModifyHealthCheckResponse{}\n\terr := c.Invoke(\"ModifyHealthCheck\", args, &response)\n\tif err == nil {\n\t\treturn &response, nil\n\t}\n\treturn nil, err\n}", "func (o *CurrentChartDataMinutely) Update(exec boil.Executor, columns boil.Columns) (int64, error) {\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\to.UpdatedAt = currTime\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tcurrentChartDataMinutelyUpdateCacheMut.RLock()\n\tcache, cached := currentChartDataMinutelyUpdateCache[key]\n\tcurrentChartDataMinutelyUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tcurrentChartDataMinutelyColumns,\n\t\t\tcurrentChartDataMinutelyPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update current_chart_data_minutely, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"current_chart_data_minutely\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, currentChartDataMinutelyPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(currentChartDataMinutelyType, currentChartDataMinutelyMapping, append(wl, currentChartDataMinutelyPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tvar result sql.Result\n\tresult, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update current_chart_data_minutely row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for current_chart_data_minutely\")\n\t}\n\n\tif !cached {\n\t\tcurrentChartDataMinutelyUpdateCacheMut.Lock()\n\t\tcurrentChartDataMinutelyUpdateCache[key] = cache\n\t\tcurrentChartDataMinutelyUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(exec)\n}", "func (r *Route) Patch(h http.Handler) *Route {\n\tr.Add(\"PATCH\", h)\n\n\treturn r\n}", "func (client JobClient) UpdateResponder(resp *http.Response) (result JobResourceDescription, err error) {\n err = autorest.Respond(\n resp,\n azure.WithErrorUnlessStatusCode(http.StatusOK,http.StatusCreated,http.StatusAccepted),\n autorest.ByUnmarshallingJSON(&result),\n autorest.ByClosing())\n result.Response = autorest.Response{Response: resp}\n return\n }", "func (du *DayUpdate) Save(ctx context.Context) (int, error) {\n\n\tvar (\n\t\terr error\n\t\taffected int\n\t)\n\tif len(du.hooks) == 0 {\n\t\taffected, err = du.sqlSave(ctx)\n\t} else {\n\t\tvar mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {\n\t\t\tmutation, ok := m.(*DayMutation)\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected mutation type %T\", m)\n\t\t\t}\n\t\t\tdu.mutation = mutation\n\t\t\taffected, err = du.sqlSave(ctx)\n\t\t\tmutation.done = true\n\t\t\treturn affected, err\n\t\t})\n\t\tfor i := len(du.hooks) - 1; i >= 0; i-- {\n\t\t\tmut = du.hooks[i](mut)\n\t\t}\n\t\tif _, err := mut.Mutate(ctx, du.mutation); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\treturn affected, err\n}", "func (m *ReportsRequestBuilder) Patch(ctx context.Context, body i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable, requestConfiguration *ReportsRequestBuilderPatchRequestConfiguration)(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.CreateReportsFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable), nil\n}", "func UpdateMeeting(c *gin.Context) {\n // Get the meeting to be updated\n var meeting models.Meeting\n if err := models.DB.First(&meeting, \"id = ?\", c.Param(\"id\")).Error; err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n // Validate input\n var input UpdateMeetingInput\n if err := c.ShouldBindJSON(&input); err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n models.DB.Model(&meeting).Updates(input)\n\n c.JSON(http.StatusOK, gin.H{\"data\": meeting})\n}", "func (k Keeper) UpdateDailyPercent(ctx sdk.Context, addr sdk.AccAddress, coin coins.Coin) {\n\tbalance := k.BankKeeper.GetPosminableBalance(ctx, addr, coin)\n\n\tposmining := k.GetPosmining(ctx, addr, coin)\n\n\tnewDailyPercent := coin.GetDailyPercent(balance)\n\n\tif !posmining.DailyPercent.Equal(newDailyPercent) {\n\t\tposmining.DailyPercent = newDailyPercent\n\n\t\tk.SetPosmining(ctx, posmining, coin)\n\t}\n}", "func DefaultStrictUpdateUserInfo(ctx context.Context, in *UserInfo, db *gorm.DB) (*UserInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateUserInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &UserInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func (m *ManagedTenantsManagementActionsManagementActionItemRequestBuilder) Patch(ctx context.Context, body i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagementActionable, requestConfiguration *ManagedTenantsManagementActionsManagementActionItemRequestBuilderPatchRequestConfiguration)(i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagementActionable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.CreateManagementActionFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagementActionable), nil\n}", "func (o *RentalRower) Update(exec boil.Executor, columns boil.Columns) (int64, error) {\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\to.UpdatedAt = currTime\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\trentalRowerUpdateCacheMut.RLock()\n\tcache, cached := rentalRowerUpdateCache[key]\n\trentalRowerUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\trentalRowerColumns,\n\t\t\trentalRowerPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update rental_rowers, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"rental_rowers\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, rentalRowerPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(rentalRowerType, rentalRowerMapping, append(wl, rentalRowerPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tvar result sql.Result\n\tresult, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update rental_rowers row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for rental_rowers\")\n\t}\n\n\tif !cached {\n\t\trentalRowerUpdateCacheMut.Lock()\n\t\trentalRowerUpdateCache[key] = cache\n\t\trentalRowerUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(exec)\n}", "func (r *DeviceHealthScriptRequest) Update(ctx context.Context, reqObj *DeviceHealthScript) error {\n\treturn r.JSONRequest(ctx, \"PATCH\", \"\", reqObj, nil)\n}", "func (m *VirtualEventsWebinarsItemSessionsItemVirtualAppointmentRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.VirtualAppointmentable, requestConfiguration *VirtualEventsWebinarsItemSessionsItemVirtualAppointmentRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.VirtualAppointmentable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateVirtualAppointmentFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.VirtualAppointmentable), nil\n}", "func Patch(path string, fn http.HandlerFunc, c ...alice.Constructor) {\n\tinfoMutex.Lock()\n\trecord(\"PATCH\", path)\n\tr.Patch(path, alice.New(c...).ThenFunc(fn).(http.HandlerFunc))\n\tinfoMutex.Unlock()\n}", "func (client ModelClient) UpdateHierarchicalEntityChildResponder(resp *http.Response) (result OperationStatus, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func modifyAppConfigHandler(ctx *gin.Context) {\n log.Info(fmt.Sprintf(\"received request to modify config %s\", ctx.Param(\"appId\")))\n var request struct{Operation []map[string]interface{} `json:\"operation\" binding:\"required\"`}\n // parse config from request body\n if err := ctx.ShouldBind(&request); err != nil {\n log.Error(fmt.Errorf(\"unable to extract JSON Patch from body: %+v\", err))\n ctx.JSON(http.StatusBadRequest, gin.H{\n \"status_code\": http.StatusBadRequest, \"message\": \"Invalid JSON request body\"})\n return\n }\n // get app ID from path and convert to UUID\n appId, err := uuid.Parse(ctx.Param(\"appId\"))\n if err != nil {\n log.Error(fmt.Errorf(\"unable to app ID: %+v\", err))\n ctx.JSON(http.StatusBadRequest, gin.H{\n \"status_code\": http.StatusBadRequest, \"message\": \"Invalid app ID\"})\n return\n }\n\n // insert new config item into database\n db, _ := ctx.MustGet(\"db\").(*Persistence)\n current, err := db.GetConfigByAppId(appId)\n if err != nil {\n switch err {\n case ErrAppNotFound:\n log.Warn(fmt.Sprintf(\"cannot find config for app %s\", appId))\n ctx.JSON(http.StatusNotFound, gin.H{\n \"http_code\": http.StatusNotFound, \"message\": \"Cannot find config for app\"})\n default:\n log.Error(fmt.Errorf(\"unable to retrieve config from database: %+v\", err))\n ctx.JSON(http.StatusInternalServerError, gin.H{\n \"http_code\": http.StatusInternalServerError, \"message\": \"Internal server error\"})\n }\n return\n }\n\n // perform JSON patch on config\n updated, err := PatchConfig(current, request.Operation)\n if err != nil {\n switch err {\n case ErrInvalidJSONConfig, ErrInvalidPatch:\n log.Warn(fmt.Sprintf(\"cannot process JSON Patch %+v\", err))\n ctx.JSON(http.StatusBadRequest, gin.H{\n \"http_code\": http.StatusBadRequest, \"message\": \"Invalid JSON Patch Operation\"})\n default:\n log.Error(fmt.Errorf(\"unable to apply JSON Patch: %+v\", err))\n ctx.JSON(http.StatusInternalServerError, gin.H{\n \"http_code\": http.StatusInternalServerError, \"message\": \"Internal server error\"})\n }\n return\n }\n\n // update config in postgres database\n if err := db.UpdateConfigByAppId(appId, updated); err != nil {\n log.Error(fmt.Errorf(\"unable to updated config in database: %+v\", err))\n ctx.JSON(http.StatusInternalServerError, gin.H{\n \"http_code\": http.StatusInternalServerError, \"message\": \"Internal server error\"})\n return\n }\n ctx.JSON(http.StatusOK, gin.H{\n \"http_code\": http.StatusOK, \"message\": \"Successfully updated config\"})\n}", "func (e *Engine) PATCH(path string, handler Handler) {\n\te.registerRoute(http.MethodPatch, path, handler)\n}", "func (op *updateHealthCheckUpdateOperation) do(ctx context.Context, r *HealthCheck, c *Client) error {\n\t_, err := c.GetHealthCheck(ctx, r.urlNormalized())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tu, err := r.updateURL(c.Config.BasePath, \"update\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq, err := newUpdateHealthCheckUpdateRequest(ctx, r, c)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tc.Config.Logger.Infof(\"Created update: %#v\", req)\n\tbody, err := marshalUpdateHealthCheckUpdateRequest(c, req)\n\tif err != nil {\n\t\treturn err\n\t}\n\tresp, err := dcl.SendRequest(ctx, c.Config, \"PATCH\", u, bytes.NewBuffer(body), c.Config.RetryProvider)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar o operations.ComputeOperation\n\tif err := dcl.ParseResponse(resp.Response, &o); err != nil {\n\t\treturn err\n\t}\n\terr = o.Wait(ctx, c.Config, \"https://www.googleapis.com/compute/v1/\", \"GET\")\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (m *ManagedTenantsManagedTenantTicketingEndpointsManagedTenantTicketingEndpointItemRequestBuilder) Patch(ctx context.Context, body i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagedTenantTicketingEndpointable, requestConfiguration *ManagedTenantsManagedTenantTicketingEndpointsManagedTenantTicketingEndpointItemRequestBuilderPatchRequestConfiguration)(i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagedTenantTicketingEndpointable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.CreateManagedTenantTicketingEndpointFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagedTenantTicketingEndpointable), nil\n}", "func (svc record) procUpdate(invokerID uint64, m *types.Module, upd *types.Record, old *types.Record) *types.RecordValueErrorSet {\n\t// Mark all values as updated (new)\n\tupd.Values.SetUpdatedFlag(true)\n\n\t// First sanitization\n\t//\n\t// Before values are merged with existing data and\n\t// sent to automation scripts (if any)\n\t// we need to make sure it does not get sanitized data\n\tupd.Values = svc.sanitizer.Run(m, upd.Values)\n\n\t// Copy values to updated record\n\t// to make sure nobody slips in something we do not want\n\tupd.CreatedAt = old.CreatedAt\n\tupd.CreatedBy = old.CreatedBy\n\tupd.UpdatedAt = nowPtr()\n\tupd.UpdatedBy = invokerID\n\tupd.DeletedAt = old.DeletedAt\n\tupd.DeletedBy = old.DeletedBy\n\n\t// Merge new (updated) values with old ones\n\t// This way we get list of updated, stale and deleted values\n\t// that we can selectively update in the repository\n\tupd.Values = old.Values.Merge(upd.Values)\n\n\tif upd.OwnedBy == 0 {\n\t\tif old.OwnedBy > 0 {\n\t\t\t// Owner not set/send in the payload\n\t\t\t//\n\t\t\t// Fallback to old owner (if set)\n\t\t\tupd.OwnedBy = old.OwnedBy\n\t\t} else {\n\t\t\t// If od owner is not set, make current user\n\t\t\t// the owner of the record\n\t\t\tupd.OwnedBy = invokerID\n\t\t}\n\t}\n\n\t// Run validation of the updated records\n\treturn svc.validator.Run(m, upd)\n}", "func Patch() int {\n\treturn patch\n}", "func (m *ItemOnlineMeetingsItemRegistrationRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationable, requestConfiguration *ItemOnlineMeetingsItemRegistrationRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateMeetingRegistrationFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationable), nil\n}" ]
[ "0.7222593", "0.7203764", "0.71346027", "0.61198723", "0.611169", "0.61003435", "0.60970485", "0.5833735", "0.56815296", "0.5427646", "0.5337418", "0.5189905", "0.51254106", "0.5099041", "0.5030987", "0.5012633", "0.5009468", "0.49955863", "0.49036223", "0.490358", "0.48860726", "0.48621657", "0.4861886", "0.48563743", "0.48387185", "0.483586", "0.48273245", "0.48189846", "0.48164183", "0.48127258", "0.4807634", "0.47995535", "0.47881183", "0.47769138", "0.47615167", "0.4760432", "0.47491086", "0.47487918", "0.4740121", "0.4727007", "0.47220367", "0.47107896", "0.4708286", "0.4687348", "0.46809092", "0.46786997", "0.46737066", "0.46666843", "0.4663873", "0.46611065", "0.46582687", "0.46530327", "0.4639478", "0.4634715", "0.4633626", "0.46330494", "0.4627864", "0.4625547", "0.4623128", "0.4609028", "0.46066672", "0.46035686", "0.46028784", "0.46025014", "0.4600517", "0.45998132", "0.45845866", "0.45841023", "0.45790523", "0.45788875", "0.45787808", "0.45767757", "0.4576544", "0.4572145", "0.45694575", "0.45678172", "0.45649534", "0.45588264", "0.45578912", "0.45490512", "0.45349866", "0.45311055", "0.45233712", "0.45230585", "0.4514821", "0.45147046", "0.4514674", "0.4503778", "0.4501267", "0.4500857", "0.44984233", "0.4495083", "0.44936582", "0.4493436", "0.44929913", "0.44928384", "0.44897166", "0.4489202", "0.44878206", "0.4477194" ]
0.7921038
0
DefaultPatchSetHealthMenstruationDailyEntry executes a bulk gorm update call with patch behavior
func DefaultPatchSetHealthMenstruationDailyEntry(ctx context.Context, objects []*HealthMenstruationDailyEntry, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationDailyEntry, error) { if len(objects) != len(updateMasks) { return nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects)) } results := make([]*HealthMenstruationDailyEntry, 0, len(objects)) for i, patcher := range objects { pbResponse, err := DefaultPatchHealthMenstruationDailyEntry(ctx, patcher, updateMasks[i], db) if err != nil { return nil, err } results = append(results, pbResponse) } return results, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultPatchHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationDailyEntry\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationDailyEntry(ctx, &HealthMenstruationDailyEntry{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationDailyEntry(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationDailyEntryWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx context.Context, patchee *HealthMenstruationDailyEntry, patcher *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Day\" {\n\t\t\tpatchee.Day = patcher.Day\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"IntensityPercentage\" {\n\t\t\tpatchee.IntensityPercentage = patcher.IntensityPercentage\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Type\" {\n\t\t\tpatchee.Type = patcher.Type\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Manual\" {\n\t\t\tpatchee.Manual = patcher.Manual\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"BasedOnPrediction\" {\n\t\t\tpatchee.BasedOnPrediction = patcher.BasedOnPrediction\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultStrictUpdateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationDailyEntry\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationDailyEntryORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func DefaultListHealthMenstruationDailyEntry(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationDailyEntry, error) {\n\tin := HealthMenstruationDailyEntry{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationDailyEntryORM{}, &HealthMenstruationDailyEntry{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationDailyEntryORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationDailyEntry{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func DefaultPatchHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationPersonalInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationPersonalInfo(ctx, &HealthMenstruationPersonalInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationPersonalInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationPersonalInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationPersonalInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func DefaultReadHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationDailyEntryORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationDailyEntryORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationDailyEntryORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultPatchSetHealthMenstruationPersonalInfo(ctx context.Context, objects []*HealthMenstruationPersonalInfo, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationPersonalInfo, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationPersonalInfo, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationPersonalInfo(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultCreateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx context.Context, patchee *HealthMenstruationPersonalInfo, patcher *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"PeriodLengthInDays\" {\n\t\t\tpatchee.PeriodLengthInDays = patcher.PeriodLengthInDays\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CycleLengthInDays\" {\n\t\t\tpatchee.CycleLengthInDays = patcher.CycleLengthInDays\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func (o SmallblogSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), smallblogPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `smallblog` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, smallblogPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in smallblog slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all smallblog\")\n\t}\n\treturn rowsAff, nil\n}", "func (o *Smallblog) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tsmallblogUpdateCacheMut.RLock()\n\tcache, cached := smallblogUpdateCache[key]\n\tsmallblogUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tsmallblogAllColumns,\n\t\t\tsmallblogPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update smallblog, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `smallblog` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, smallblogPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(smallblogType, smallblogMapping, append(wl, smallblogPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update smallblog row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for smallblog\")\n\t}\n\n\tif !cached {\n\t\tsmallblogUpdateCacheMut.Lock()\n\t\tsmallblogUpdateCache[key] = cache\n\t\tsmallblogUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "func (a *BulkApiService) PatchBulkExport(ctx context.Context, moid string) ApiPatchBulkExportRequest {\n\treturn ApiPatchBulkExportRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (o HoldenAtSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), holdenAtPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"HoldenAt\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, holdenAtPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in holdenAt slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all holdenAt\")\n\t}\n\treturn rowsAff, nil\n}", "func (w *Workload) SanitizeBulkUpdate() {\n\n\t// All Workloads\n\tw.CreatedAt = \"\"\n\tw.CreatedBy = nil\n\tw.DeleteType = \"\"\n\tw.Deleted = nil\n\tw.DeletedAt = \"\"\n\tw.DeletedBy = nil\n\tw.UpdatedAt = \"\"\n\tw.UpdatedBy = nil\n\n\t// Managed workloads\n\tif w.Agent != nil && w.Agent.Status != nil {\n\t\tw.Hostname = \"\"\n\t\tw.Interfaces = nil\n\t\tw.Online = false\n\t\tw.OsDetail = \"\"\n\t\tw.OsID = \"\"\n\t\tw.PublicIP = \"\"\n\t\tw.Agent.Status = nil\n\t\tw.Services = nil\n\t\tw.Online = false\n\t}\n\n\t// Replace Labels with Hrefs\n\tnewLabels := []*Label{}\n\tfor _, l := range w.Labels {\n\t\tnewLabel := Label{Href: l.Href}\n\t\tnewLabels = append(newLabels, &newLabel)\n\t}\n\tw.Labels = newLabels\n}", "func (m *ItemSitesItemAnalyticsItemActivityStatsItemActivityStatItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ItemActivityStatable, requestConfiguration *ItemSitesItemAnalyticsItemActivityStatsItemActivityStatItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ItemActivityStatable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateItemActivityStatFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ItemActivityStatable), nil\n}", "func (m *ThreatSubmissionEmailThreatsEmailThreatSubmissionItemRequestBuilder) Patch(ctx context.Context, body i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.EmailThreatSubmissionable, requestConfiguration *ThreatSubmissionEmailThreatsEmailThreatSubmissionItemRequestBuilderPatchRequestConfiguration)(i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.EmailThreatSubmissionable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.CreateEmailThreatSubmissionFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.EmailThreatSubmissionable), nil\n}", "func update(rt *Runtime, r goengage.Fundraise, key string) {\n\tg := Stat{}\n\trt.DB.Where(\"id = ?\", key).First(&g)\n\tif g.CreatedDate == nil {\n\t\tg.ID = key\n\t\tt := time.Now()\n\t\tg.CreatedDate = &t\n\t\trt.DB.Create(&g)\n\t}\n\tfor _, t := range r.Transactions {\n\t\tg.AllCount++\n\t\tg.AllAmount = g.AllAmount + t.Amount\n\t\tif r.WasImported {\n\t\t\tg.OfflineCount++\n\t\t\tg.OfflineAmount += t.Amount\n\t\t} else {\n\t\t\tswitch r.DonationType {\n\t\t\tcase goengage.OneTime:\n\t\t\t\tg.OneTimeCount++\n\t\t\t\tg.OneTimeAmount += t.Amount\n\t\t\tcase goengage.Recurring:\n\t\t\t\tg.RecurringCount++\n\t\t\t\tg.RecurringAmount += t.Amount\n\t\t\t}\n\t\t\tswitch t.Type {\n\t\t\tcase goengage.Refund:\n\t\t\t\tg.RefundsCount++\n\t\t\t\tg.RefundsAmount += t.Amount\n\t\t\t}\n\t\t}\n\t\tg.Largest = math.Max(g.Largest, t.Amount)\n\t\tif t.Amount > 0.0 {\n\t\t\tif g.Smallest < 1.0 {\n\t\t\t\tg.Smallest = t.Amount\n\t\t\t} else {\n\t\t\t\tg.Smallest = math.Min(g.Smallest, t.Amount)\n\t\t\t}\n\t\t}\n\t\trt.DB.Model(&g).Updates(&g)\n\t}\n}", "func (q holdenAtQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for HoldenAt\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for HoldenAt\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (o CurrentChartDataMinutelySlice) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), currentChartDataMinutelyPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"current_chart_data_minutely\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, currentChartDataMinutelyPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.Exec(sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in currentChartDataMinutely slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all currentChartDataMinutely\")\n\t}\n\treturn rowsAff, nil\n}", "func (o NotificationSlice) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), notificationPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"notification\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, notificationPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.Exec(sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in notification slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all notification\")\n\t}\n\treturn rowsAff, nil\n}", "func (o *HoldenAt) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tkey := makeCacheKey(columns, nil)\n\tholdenAtUpdateCacheMut.RLock()\n\tcache, cached := holdenAtUpdateCache[key]\n\tholdenAtUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tholdenAtAllColumns,\n\t\t\tholdenAtPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update HoldenAt, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"HoldenAt\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, holdenAtPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(holdenAtType, holdenAtMapping, append(wl, holdenAtPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update HoldenAt row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for HoldenAt\")\n\t}\n\n\tif !cached {\n\t\tholdenAtUpdateCacheMut.Lock()\n\t\tholdenAtUpdateCache[key] = cache\n\t\tholdenAtUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, nil\n}", "func (q smallblogQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for smallblog\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for smallblog\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (c *kuberhealthyChecks) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result KuberhealthyCheck, err error) {\n\tresult = KuberhealthyCheck{}\n\terr = c.client.Patch(pt).\n\t\tNamespace(c.ns).\n\t\tResource(\"khchecks\").\n\t\tSubResource(subresources...).\n\t\tName(name).\n\t\tBody(data).\n\t\tDo(context.TODO()).\n\t\tInto(&result)\n\treturn\n}", "func (s *Service) Update(r *http.Request, args *UpdateEntryArgs, result *UpdateResponse) error {\n\t// Since there is no fixed data schema, we can update as we like, so be careful\n\tif args.UserID == \"\" {\n\t\tresult.Message = uidMissing\n\t\treturn nil\n\t}\n\tuuid := args.UUID\n\tif uuid != \"\" {\n\t\tcoll := s.Session.DB(MentatDatabase).C(args.UserID)\n\t\tentry := Entry{}\n\t\tmgoErr := coll.Find(bson.M{\"uuid\": uuid}).One(&entry)\n\t\tif mgoErr != nil {\n\t\t\tif mgoErr.Error() == MongoNotFound {\n\t\t\t\tresult.Message = \"No entry with provided UUID\"\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\ts.Log.Infof(\"mgo error: %s\", mgoErr)\n\t\t\tresult.Message = fmt.Sprintf(\"mgo error: %s\", mgoErr)\n\t\t\treturn nil\n\t\t}\n\t\t// TODO: maybe use reflection\n\t\tif args.Type != \"\" {\n\t\t\tentry.Type = args.Type\n\t\t}\n\t\tif args.Content != \"\" {\n\t\t\tentry.Content = args.Content\n\t\t}\n\t\tif len(args.Tags) > 0 {\n\t\t\tentry.Tags = args.Tags\n\t\t}\n\t\tif args.Scheduled != \"\" {\n\t\t\tscheduled, err := time.Parse(DatetimeLayout, args.Scheduled)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tentry.Scheduled = scheduled\n\t\t}\n\t\tif args.Deadline != \"\" {\n\t\t\tdeadline, err := time.Parse(DatetimeLayout, args.Deadline)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tentry.Deadline = deadline\n\t\t}\n\n\t\tif args.Priority != \"\" {\n\t\t\trexp, err := regexp.Compile(\"\\\\#[A-Z]$\")\n\t\t\tif err != nil {\n\t\t\t\tpanic(err) // sentinel, should fail, because such error is predictable\n\t\t\t}\n\t\t\tif rexp.Match([]byte(args.Priority)) {\n\t\t\t\tentry.Priority = args.Priority\n\t\t\t} else {\n\t\t\t\tresult.Message = \"Malformed priority value\"\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\n\t\tif args.TodoStatus != \"\" {\n\t\t\tentry.TodoStatus = strings.ToUpper(args.TodoStatus)\n\t\t}\n\t\tentry.ModifiedAt = time.Now()\n\t\t_, err := coll.Upsert(bson.M{\"uuid\": uuid}, entry)\n\t\tif err != nil {\n\t\t\tresult.Message = fmt.Sprintf(\"update failed: %s\", err)\n\t\t\treturn nil\n\t\t}\n\t\tresult.Message = \"updated\"\n\t\treturn nil\n\t}\n\tresult.Message = \"No UUID found, cannot proceed with updating\"\n\treturn nil\n}", "func (o ForeignLegalResourceSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), foreignLegalResourcePrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"ForeignLegalResources\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, foreignLegalResourcePrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in foreignLegalResource slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all foreignLegalResource\")\n\t}\n\treturn rowsAff, nil\n}", "func (m *RiskyUsersItemHistoryRiskyUserHistoryItemItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RiskyUserHistoryItemable, requestConfiguration *RiskyUsersItemHistoryRiskyUserHistoryItemItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RiskyUserHistoryItemable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateRiskyUserHistoryItemFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RiskyUserHistoryItemable), nil\n}", "func (o CMFUserExperienceLogSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfUserExperienceLogPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_user_experience_log` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfUserExperienceLogPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfUserExperienceLog slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfUserExperienceLog\")\n\t}\n\treturn rowsAff, nil\n}", "func DefaultApplyFieldMaskPeriod(ctx context.Context, patchee *Period, patcher *Period, updateMask *field_mask.FieldMask, prefix string, db *gorm.DB) (*Period, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar err error\n\tvar updatedCreatedAt bool\n\tvar updatedUpdatedAt bool\n\tfor i, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Period\" {\n\t\t\tpatchee.Period = patcher.Period\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedCreatedAt && strings.HasPrefix(f, prefix+\"CreatedAt.\") {\n\t\t\tif patcher.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"CreatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.CreatedAt, patchee.CreatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tupdatedCreatedAt = true\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUpdatedAt && strings.HasPrefix(f, prefix+\"UpdatedAt.\") {\n\t\t\tif patcher.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UpdatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.UpdatedAt, patchee.UpdatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tupdatedUpdatedAt = true\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func (m *ManagedTenantsManagementActionsManagementActionItemRequestBuilder) Patch(ctx context.Context, body i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagementActionable, requestConfiguration *ManagedTenantsManagementActionsManagementActionItemRequestBuilderPatchRequestConfiguration)(i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagementActionable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.CreateManagementActionFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagementActionable), nil\n}", "func (m *ExactMatchDataStoresExactMatchDataStoreItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ExactMatchDataStoreable, requestConfiguration *ExactMatchDataStoresExactMatchDataStoreItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ExactMatchDataStoreable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateExactMatchDataStoreFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ExactMatchDataStoreable), nil\n}", "func (o *CurrentChartDataMinutely) Update(exec boil.Executor, columns boil.Columns) (int64, error) {\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\to.UpdatedAt = currTime\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tcurrentChartDataMinutelyUpdateCacheMut.RLock()\n\tcache, cached := currentChartDataMinutelyUpdateCache[key]\n\tcurrentChartDataMinutelyUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tcurrentChartDataMinutelyColumns,\n\t\t\tcurrentChartDataMinutelyPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update current_chart_data_minutely, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"current_chart_data_minutely\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, currentChartDataMinutelyPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(currentChartDataMinutelyType, currentChartDataMinutelyMapping, append(wl, currentChartDataMinutelyPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tvar result sql.Result\n\tresult, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update current_chart_data_minutely row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for current_chart_data_minutely\")\n\t}\n\n\tif !cached {\n\t\tcurrentChartDataMinutelyUpdateCacheMut.Lock()\n\t\tcurrentChartDataMinutelyUpdateCache[key] = cache\n\t\tcurrentChartDataMinutelyUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(exec)\n}", "func (m *CompaniesItemJournalsItemJournalLinesJournalLineItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.JournalLineable, requestConfiguration *CompaniesItemJournalsItemJournalLinesJournalLineItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.JournalLineable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateJournalLineFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.JournalLineable), nil\n}", "func (o RSSAnnouncementSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), rssAnnouncementPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"rss_announcements\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, rssAnnouncementPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in rssAnnouncement slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all rssAnnouncement\")\n\t}\n\treturn rowsAff, nil\n}", "func (m *TermsAndConditionsItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TermsAndConditionsable, requestConfiguration *TermsAndConditionsItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TermsAndConditionsable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateTermsAndConditionsFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TermsAndConditionsable), nil\n}", "func (ob *OrderBook) BatchUpdate() {\n\n}", "func (m *MobileAppTroubleshootingEventsMobileAppTroubleshootingEventItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.MobileAppTroubleshootingEventable, requestConfiguration *MobileAppTroubleshootingEventsMobileAppTroubleshootingEventItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.MobileAppTroubleshootingEventable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateMobileAppTroubleshootingEventFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.MobileAppTroubleshootingEventable), nil\n}", "func (o RecipeLipidSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), recipeLipidPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"recipe_lipid\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, recipeLipidPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in recipeLipid slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all recipeLipid\")\n\t}\n\treturn rowsAff, nil\n}", "func (c *globalThreatFeeds) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v3.GlobalThreatFeed, err error) {\n\tresult = &v3.GlobalThreatFeed{}\n\terr = c.client.Patch(pt).\n\t\tResource(\"globalthreatfeeds\").\n\t\tName(name).\n\t\tSubResource(subresources...).\n\t\tVersionedParams(&opts, scheme.ParameterCodec).\n\t\tBody(data).\n\t\tDo(ctx).\n\t\tInto(result)\n\treturn\n}", "func (o TenantSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"dbmodel: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), tenantPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `tenants` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, tenantPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"dbmodel: unable to update all in tenant slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"dbmodel: unable to retrieve rows affected all in update all tenant\")\n\t}\n\treturn rowsAff, nil\n}", "func (o CMFUserSuperSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfUserSuperPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_user_super` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfUserSuperPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfUserSuper slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfUserSuper\")\n\t}\n\treturn rowsAff, nil\n}", "func (o *Rental) Update(exec boil.Executor, whitelist ...string) error {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn err\n\t}\n\tkey := makeCacheKey(whitelist, nil)\n\trentalUpdateCacheMut.RLock()\n\tcache, cached := rentalUpdateCache[key]\n\trentalUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := strmangle.UpdateColumnSet(\n\t\t\trentalColumns,\n\t\t\trentalPrimaryKeyColumns,\n\t\t\twhitelist,\n\t\t)\n\n\t\tif len(whitelist) == 0 {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn errors.New(\"sqlboiler: unable to update rental, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `rental` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, rentalPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(rentalType, rentalMapping, append(wl, rentalPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\t_, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"sqlboiler: unable to update rental row\")\n\t}\n\n\tif !cached {\n\t\trentalUpdateCacheMut.Lock()\n\t\trentalUpdateCache[key] = cache\n\t\trentalUpdateCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpdateHooks(exec)\n}", "func (o AutomodRuleDatumSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), automodRuleDatumPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"automod_rule_data\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, automodRuleDatumPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in automodRuleDatum slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all automodRuleDatum\")\n\t}\n\treturn rowsAff, nil\n}", "func DefaultListHealthMenstruationPersonalInfo(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationPersonalInfo, error) {\n\tin := HealthMenstruationPersonalInfo{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationPersonalInfoORM{}, &HealthMenstruationPersonalInfo{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationPersonalInfoORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationPersonalInfo{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func (m *HealthMenstruationDailyEntryORM) ToPB(ctx context.Context) (HealthMenstruationDailyEntry, error) {\n\tto := HealthMenstruationDailyEntry{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tif to.Day, err = ptypes1.TimestampProto(*m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = HealthMenstruationDailyEntry_Type(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (client ModelClient) UpdateHierarchicalEntityResponder(resp *http.Response) (result OperationStatus, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (c *cronFederatedHPAs) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.CronFederatedHPA, err error) {\n\tresult = &v1alpha1.CronFederatedHPA{}\n\terr = c.client.Patch(pt).\n\t\tNamespace(c.ns).\n\t\tResource(\"cronfederatedhpas\").\n\t\tName(name).\n\t\tSubResource(subresources...).\n\t\tVersionedParams(&opts, scheme.ParameterCodec).\n\t\tBody(data).\n\t\tDo(ctx).\n\t\tInto(result)\n\treturn\n}", "func (svc record) procUpdate(invokerID uint64, m *types.Module, upd *types.Record, old *types.Record) *types.RecordValueErrorSet {\n\t// Mark all values as updated (new)\n\tupd.Values.SetUpdatedFlag(true)\n\n\t// First sanitization\n\t//\n\t// Before values are merged with existing data and\n\t// sent to automation scripts (if any)\n\t// we need to make sure it does not get sanitized data\n\tupd.Values = svc.sanitizer.Run(m, upd.Values)\n\n\t// Copy values to updated record\n\t// to make sure nobody slips in something we do not want\n\tupd.CreatedAt = old.CreatedAt\n\tupd.CreatedBy = old.CreatedBy\n\tupd.UpdatedAt = nowPtr()\n\tupd.UpdatedBy = invokerID\n\tupd.DeletedAt = old.DeletedAt\n\tupd.DeletedBy = old.DeletedBy\n\n\t// Merge new (updated) values with old ones\n\t// This way we get list of updated, stale and deleted values\n\t// that we can selectively update in the repository\n\tupd.Values = old.Values.Merge(upd.Values)\n\n\tif upd.OwnedBy == 0 {\n\t\tif old.OwnedBy > 0 {\n\t\t\t// Owner not set/send in the payload\n\t\t\t//\n\t\t\t// Fallback to old owner (if set)\n\t\t\tupd.OwnedBy = old.OwnedBy\n\t\t} else {\n\t\t\t// If od owner is not set, make current user\n\t\t\t// the owner of the record\n\t\t\tupd.OwnedBy = invokerID\n\t\t}\n\t}\n\n\t// Run validation of the updated records\n\treturn svc.validator.Run(m, upd)\n}", "func (o *Notification) Update(exec boil.Executor, columns boil.Columns) (int64, error) {\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\to.UpdatedAt = currTime\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tnotificationUpdateCacheMut.RLock()\n\tcache, cached := notificationUpdateCache[key]\n\tnotificationUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tnotificationColumns,\n\t\t\tnotificationPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update notification, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"notification\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, notificationPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(notificationType, notificationMapping, append(wl, notificationPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tvar result sql.Result\n\tresult, err = exec.Exec(cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update notification row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for notification\")\n\t}\n\n\tif !cached {\n\t\tnotificationUpdateCacheMut.Lock()\n\t\tnotificationUpdateCache[key] = cache\n\t\tnotificationUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(exec)\n}", "func DefaultApplyFieldMaskUserInfo(ctx context.Context, patchee *UserInfo, patcher *UserInfo, updateMask *field_mask.FieldMask, prefix string, db *gorm.DB) (*UserInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar err error\n\tvar updatedCreatedAt bool\n\tvar updatedUpdatedAt bool\n\tfor i, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UserId\" {\n\t\t\tpatchee.UserId = patcher.UserId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"LastName\" {\n\t\t\tpatchee.LastName = patcher.LastName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"FirstName\" {\n\t\t\tpatchee.FirstName = patcher.FirstName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Period\" {\n\t\t\tpatchee.Period = patcher.Period\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"DepartmentId\" {\n\t\t\tpatchee.DepartmentId = patcher.DepartmentId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"JobId\" {\n\t\t\tpatchee.JobId = patcher.JobId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"EnrollmentFlg\" {\n\t\t\tpatchee.EnrollmentFlg = patcher.EnrollmentFlg\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"AdminFlg\" {\n\t\t\tpatchee.AdminFlg = patcher.AdminFlg\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedCreatedAt && strings.HasPrefix(f, prefix+\"CreatedAt.\") {\n\t\t\tif patcher.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"CreatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.CreatedAt, patchee.CreatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tupdatedCreatedAt = true\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUpdatedAt && strings.HasPrefix(f, prefix+\"UpdatedAt.\") {\n\t\t\tif patcher.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UpdatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.UpdatedAt, patchee.UpdatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tupdatedUpdatedAt = true\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func (o CMFAdminMenuSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfAdminMenuPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_admin_menu` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfAdminMenuPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfAdminMenu slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfAdminMenu\")\n\t}\n\treturn rowsAff, nil\n}", "func (o EmployeeSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), employeePrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"employee\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, employeePrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in employee slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all employee\")\n\t}\n\treturn rowsAff, nil\n}", "func (m *SiteItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Siteable, requestConfiguration *SiteItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Siteable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateSiteFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Siteable), nil\n}", "func (t *HeathCare_Chaincode) modifyMedicalData(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tfmt.Println(\"\\n=============== start modifyMedicalData function ===============\")\n\tstart := time.Now()\n\ttime.Sleep(time.Second)\n\n\tvar jsonResp string\n\n\tif len(args) != 8 {\n\t\treturn shim.Error(\"expecting 4 argument\")\n\t}\n\n\t//define identity of query-er and new value of medical record\n\tuserid := args[0]\n\tpatientid := args[1]\n\tlocation := args[2]\n\tcollection := args[3]\n\n\tnewPersonalIdentificationInformation := args[4]\n\tnewMedicalHistory := args[5]\n\tnewFamilyMedicalHistory := args[6]\n\tnewMedicationHistory := args[7]\n\tnewTreatmentHistory := args[8]\n\tnewMedicalDirectives := args[9]\n\ttimeQuery := time.Now().String()\n\n\t//get user identity before query\n\tuserIdentityAsBytes, errUserIdentityAsByte := stub.GetPrivateData(collection, userid)\n\tif errUserIdentityAsByte != nil {\n\t\treturn shim.Error(\"cannot get user identity\")\n\t} else if userIdentityAsBytes == nil {\n\t\treturn shim.Error(\"user does not exist\")\n\t}\n\n\t//create query object with purpose: modify\n\tobjectType := \"Query\"\n\tquery := &Query{objectType, userid, patientid, location, timeQuery, \"modify\"}\n\tqueryAsByte, errQueryAsByte := json.Marshal(query)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//save to database\n\terrQueryAsByte = stub.PutPrivateData(\"modifyCollection\", userid, queryAsByte)\n\tif errQueryAsByte != nil {\n\t\treturn shim.Error(errQueryAsByte.Error())\n\t}\n\n\t//create index key\n\tindexName := \"userid~patientid\"\n\tqueryIndexKey, errQueryIndexKey := stub.CreateCompositeKey(indexName, []string{query.UserID, query.PatientID, query.Location, query.Purpose})\n\tif errQueryIndexKey != nil {\n\t\treturn shim.Error(errQueryIndexKey.Error())\n\t}\n\n\t//save index\n\tvalue := []byte{0x00}\n\tstub.PutPrivateData(\"modifyCollection\", queryIndexKey, value)\n\n\t//get medical record data\n\tmedicalRecordAsBytes, errMedicalRecordAsByte := stub.GetPrivateData(\"MedicalRecordCollection\", patientid)\n\tif errMedicalRecordAsByte != nil {\n\t\tjsonResp = \"{\\\"Error\\\":\\\"Failed to get state for \" + patientid + \": \" + errMedicalRecordAsByte.Error() + \"\\\"}\"\n\t\treturn shim.Error(jsonResp)\n\t} else if errMedicalRecordAsByte == nil {\n\t\treturn shim.Error(\"patient's data does not exist\")\n\t}\n\n\t//convert data of patient to json\n\tmedicalRecord := &MedicalRecord{}\n\terrMedicalRecordAsByte = json.Unmarshal(medicalRecordAsBytes, medicalRecord)\n\n\t//change data\n\tmedicalRecord.PersonalIdentificationInformation = newPersonalIdentificationInformation\n\tmedicalRecord.MedicalHistory = newMedicalHistory\n\tmedicalRecord.FamilyMedicalHistory = newFamilyMedicalHistory\n\tmedicalRecord.MedicationHistory = newMedicationHistory\n\tmedicalRecord.TreatmentHistory = newTreatmentHistory\n\tmedicalRecord.MedicalDirectives = newMedicalDirectives\n\n\t//convert new medical record data to byte\n\tnewMedicalRecordAsByte, errNewMedicalRecordAsByte := json.Marshal(medicalRecord)\n\n\t//store new data\n\terrNewMedicalRecordAsByte = stub.PutPrivateData(\"MedicalRecordCollection\", patientid, newMedicalRecordAsByte)\n\tif errNewMedicalRecordAsByte != nil {\n\t\treturn shim.Error(\"cannot save new medical record's data\")\n\t}\n\n\tend := time.Now()\n\telapsed := time.Since(start)\n\tfmt.Println(\"function modifyMedicalData\")\n\tfmt.Println(\"time start: \", start.String())\n\tfmt.Println(\"time end: \", end.String())\n\tfmt.Println(\"time execute: \", elapsed.String())\n\tfmt.Println(\"=============== end modifyMedicalData function ===============\")\n\n\treturn shim.Success(nil)\n}", "func (client ModelClient) PatchClosedListResponder(resp *http.Response) (result OperationStatus, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (du *DayUpdate) Save(ctx context.Context) (int, error) {\n\n\tvar (\n\t\terr error\n\t\taffected int\n\t)\n\tif len(du.hooks) == 0 {\n\t\taffected, err = du.sqlSave(ctx)\n\t} else {\n\t\tvar mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {\n\t\t\tmutation, ok := m.(*DayMutation)\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected mutation type %T\", m)\n\t\t\t}\n\t\t\tdu.mutation = mutation\n\t\t\taffected, err = du.sqlSave(ctx)\n\t\t\tmutation.done = true\n\t\t\treturn affected, err\n\t\t})\n\t\tfor i := len(du.hooks) - 1; i >= 0; i-- {\n\t\t\tmut = du.hooks[i](mut)\n\t\t}\n\t\tif _, err := mut.Mutate(ctx, du.mutation); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\treturn affected, err\n}", "func (o WeatherSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"db: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), weatherPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"prh\\\".\\\"weather\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, weatherPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"db: unable to update all in weather slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"db: unable to retrieve rows affected all in update all weather\")\n\t}\n\treturn rowsAff, nil\n}", "func DefaultStrictUpdatePeriod(ctx context.Context, in *Period, db *gorm.DB) (*Period, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdatePeriod\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &PeriodORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(PeriodORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(PeriodORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(PeriodORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func (a *HyperflexApiService) PatchHyperflexSoftwareDistributionEntry(ctx context.Context, moid string) ApiPatchHyperflexSoftwareDistributionEntryRequest {\n\treturn ApiPatchHyperflexSoftwareDistributionEntryRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (o PostSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"orm: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), postPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"posts\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, postPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"orm: unable to update all in post slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"orm: unable to retrieve rows affected all in update all post\")\n\t}\n\treturn rowsAff, nil\n}", "func (a *HyperflexApiService) PatchHyperflexSoftwareDistributionEntryExecute(r ApiPatchHyperflexSoftwareDistributionEntryRequest) (*HyperflexSoftwareDistributionEntry, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexSoftwareDistributionEntry\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexSoftwareDistributionEntry\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/SoftwareDistributionEntries/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexSoftwareDistributionEntry == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexSoftwareDistributionEntry is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexSoftwareDistributionEntry\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func batch(lullTime time.Duration, maxTime time.Duration, exists existenceChecker, ch chan notify.EventInfo) *Mod {\n\tadded := make(map[string]bool)\n\tremoved := make(map[string]bool)\n\tchanged := make(map[string]bool)\n\trenamed := make(map[string]bool)\n\t// Have we had a modification in the last lull\n\thadLullMod := false\n\tfor {\n\t\tselect {\n\t\tcase evt := <-ch:\n\t\t\thadLullMod = true\n\t\t\tLogger.SayAs(\"debug\", \"%s\", evt)\n\t\t\tswitch evt.Event() {\n\t\t\tcase notify.Create:\n\t\t\t\tadded[evt.Path()] = true\n\t\t\tcase notify.Remove:\n\t\t\t\tremoved[evt.Path()] = true\n\t\t\tcase notify.Write:\n\t\t\t\tchanged[evt.Path()] = true\n\t\t\tcase notify.Rename:\n\t\t\t\trenamed[evt.Path()] = true\n\t\t\t}\n\t\tcase <-time.After(lullTime):\n\t\t\t// Have we had a lull?\n\t\t\tif hadLullMod == false {\n\t\t\t\treturn mkmod(exists, added, removed, changed, renamed)\n\t\t\t}\n\t\t\thadLullMod = false\n\t\tcase <-time.After(maxTime):\n\t\t\treturn mkmod(exists, added, removed, changed, renamed)\n\t\t}\n\t}\n}", "func (client ReferenceDataSetsClient) UpdateResponder(resp *http.Response) (result ReferenceDataSetResource, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (o *RecipeLipid) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\to.UpdatedAt = currTime\n\t}\n\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\trecipeLipidUpdateCacheMut.RLock()\n\tcache, cached := recipeLipidUpdateCache[key]\n\trecipeLipidUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\trecipeLipidAllColumns,\n\t\t\trecipeLipidPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update recipe_lipid, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"recipe_lipid\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, recipeLipidPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(recipeLipidType, recipeLipidMapping, append(wl, recipeLipidPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update recipe_lipid row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for recipe_lipid\")\n\t}\n\n\tif !cached {\n\t\trecipeLipidUpdateCacheMut.Lock()\n\t\trecipeLipidUpdateCache[key] = cache\n\t\trecipeLipidUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "func (service *EmployeeService) PatchEmployeeDetails(employeeID string, employeeDetails models.Employee) error {\n\tcollection := service.mongoClient.Database(DbName).Collection(CollectionName)\n\tupdatesToBePerformed := bson.M{}\n\tupdatesToBePerformed[\"employeeid\"] = employeeID\n\tif employeeDetails.Department != nil {\n\t\tupdatesToBePerformed[\"department\"] = employeeDetails.Department\n\t}\n\n\tif employeeDetails.Name != nil {\n\t\tupdatesToBePerformed[\"name\"] = employeeDetails.Name\n\t}\n\n\tif employeeDetails.Skills != nil {\n\t\tupdatesToBePerformed[\"skills\"] = employeeDetails.Skills\n\t}\n\n\tif employeeDetails.Address != nil {\n\t\taddress := models.Address{}\n\t\tif employeeDetails.Address.City != nil {\n\t\t\taddress.City = employeeDetails.Address.City\n\t\t}\n\n\t\tif employeeDetails.Address.Country != nil {\n\t\t\taddress.Country = employeeDetails.Address.Country\n\t\t}\n\n\t\tif employeeDetails.Address.DoorNo != nil {\n\t\t\taddress.DoorNo = employeeDetails.Address.DoorNo\n\t\t}\n\n\t\tif employeeDetails.Address.State != nil {\n\t\t\taddress.State = employeeDetails.Address.State\n\t\t}\n\n\t\tupdatesToBePerformed[\"address\"] = address\n\t}\n\n\tif employeeDetails.Status != nil {\n\t\tupdatesToBePerformed[\"status\"] = employeeDetails.Status\n\t}\n\n\t// consolidatedMap(&updatesToBePerformed, employeeDetails)\n\n\tresult, err := collection.UpdateOne(\n\t\tcontext.Background(),\n\t\tbson.M{\"employeeid\": employeeID},\n\t\tbson.M{\n\t\t\t\"$set\": updatesToBePerformed,\n\t\t})\n\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\tfmt.Println(result)\n\n\treturn nil\n}", "func (m *ReportsRequestBuilder) Patch(ctx context.Context, body i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable, requestConfiguration *ReportsRequestBuilderPatchRequestConfiguration)(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.CreateReportsFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable), nil\n}", "func (u *usecase) Update() error {\n\t// Time execution\n\tstart := time.Now()\n\n\t// Creating context with timeout duration process\n\tctx, cancel := context.WithTimeout(context.Background(), 20*time.Minute)\n\tdefer cancel()\n\n\t// Get all archieve from scrapper repository\n\tarchieves, err := u.scrapperRepo.GetAllArchieve()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Create array (slice) of archieve object\n\tvar newArchieves []*model.Archieve\n\n\t// Check if archieve is exist in DB\n\tfor _, archieve := range archieves {\n\t\t// Get archieve from DB by archieve code\n\t\t_, err := u.mysqlRepo.GetArchieveByCode(ctx, archieve.Code)\n\n\t\t// if archieve not exist then add to newArchieve array (slice)\n\t\tif err == model.ErrDataNotFound {\n\t\t\t// Add archieve\n\t\t\tnewArchieves = append(newArchieves, archieve)\n\t\t\tlog.Printf(\"New archieve: %v\", archieve.Code)\n\t\t} else if err != nil && err != model.ErrDataNotFound {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// Counter new journal from archieves\n\tvar totalNewJournal int\n\n\t// Get new archieves journals\n\tfor _, newArchieve := range newArchieves {\n\t\t// Get all journal from scrapper repository based on archieve\n\t\tjournals, err := u.scrapperRepo.GetAllJournalByArchieveObject(newArchieve)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// Append Journals into archieve object\n\t\tnewArchieve.Journals = journals\n\t\ttotalNewJournal += len(newArchieve.Journals)\n\t}\n\n\t// Check if there's new archieve then saved new archieve into DB\n\tif len(newArchieves) > 0 {\n\t\t// Insert new archieves into DB\n\t\tif err := u.mysqlRepo.BatchArchieves(ctx, newArchieves); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tlog.Printf(\"Added %v archieve and %v journal (%v)m\", len(newArchieves), totalNewJournal, time.Since(start).Minutes())\n\n\t// if there's no update then do nothing or finish pull data from archieve scrapper\n\treturn nil\n}", "func (o TicketSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), ticketPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"tickets\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, ticketPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in ticket slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all ticket\")\n\t}\n\treturn rowsAff, nil\n}", "func (o CMFBalanceChargeAdminSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cmfBalanceChargeAdminPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE `cmf_balance_charge_admin` SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cmfBalanceChargeAdminPrimaryKeyColumns, len(o)))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in cmfBalanceChargeAdmin slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all cmfBalanceChargeAdmin\")\n\t}\n\treturn rowsAff, nil\n}", "func (q foreignLegalResourceQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for ForeignLegalResources\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for ForeignLegalResources\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (dn *Daemon) updateHypershift(oldConfig, newConfig *mcfgv1.MachineConfig, diff *machineConfigDiff) (retErr error) {\n\toldIgnConfig, err := ctrlcommon.ParseAndConvertConfig(oldConfig.Spec.Config.Raw)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"parsing old Ignition config failed: %w\", err)\n\t}\n\tnewIgnConfig, err := ctrlcommon.ParseAndConvertConfig(newConfig.Spec.Config.Raw)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"parsing new Ignition config failed: %w\", err)\n\t}\n\n\t// update files on disk that need updating\n\t// We should't skip the certificate write in HyperShift since it does not run the extra daemon process\n\tif err := dn.updateFiles(oldIgnConfig, newIgnConfig, false); err != nil {\n\t\treturn err\n\t}\n\n\tdefer func() {\n\t\tif retErr != nil {\n\t\t\tif err := dn.updateFiles(newIgnConfig, oldIgnConfig, false); err != nil {\n\t\t\t\terrs := kubeErrs.NewAggregate([]error{err, retErr})\n\t\t\t\tretErr = fmt.Errorf(\"error rolling back files writes: %w\", errs)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n\n\tif err := dn.updateSSHKeys(newIgnConfig.Passwd.Users, oldIgnConfig.Passwd.Users); err != nil {\n\t\treturn err\n\t}\n\n\tdefer func() {\n\t\tif retErr != nil {\n\t\t\tif err := dn.updateSSHKeys(newIgnConfig.Passwd.Users, oldIgnConfig.Passwd.Users); err != nil {\n\t\t\t\terrs := kubeErrs.NewAggregate([]error{err, retErr})\n\t\t\t\tretErr = fmt.Errorf(\"error rolling back SSH keys updates: %w\", errs)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n\n\tif dn.os.IsCoreOSVariant() {\n\t\tcoreOSDaemon := CoreOSDaemon{dn}\n\t\tif err := coreOSDaemon.applyOSChanges(*diff, oldConfig, newConfig); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tdefer func() {\n\t\t\tif retErr != nil {\n\t\t\t\tif err := coreOSDaemon.applyOSChanges(*diff, newConfig, oldConfig); err != nil {\n\t\t\t\t\terrs := kubeErrs.NewAggregate([]error{err, retErr})\n\t\t\t\t\tretErr = fmt.Errorf(\"error rolling back changes to OS: %w\", errs)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}()\n\t} else {\n\t\tklog.Info(\"updating the OS on non-CoreOS nodes is not supported\")\n\t}\n\n\tif err := UpdateTuningArgs(KernelTuningFile, CmdLineFile); err != nil {\n\t\treturn err\n\t}\n\n\tklog.Info(\"Successfully completed Hypershift config update\")\n\treturn nil\n}", "func triggerDaemonSetRollout(c client.Client, ds *appsv1.DaemonSet) error {\n\tannotations := map[string]string{}\n\tdscpy := ds.DeepCopy()\n\n\tif dscpy.Spec.Template.Annotations == nil {\n\t\tdscpy.Spec.Template.Annotations = annotations\n\t}\n\tdscpy.Spec.Template.Annotations[\"fileintegrity.openshift.io/restart-\"+fmt.Sprintf(\"%d\", time.Now().Unix())] = \"\"\n\treturn c.Update(context.TODO(), dscpy)\n}", "func (hc *LegacyHealthCheckImpl) updateHealth(ts *LegacyTabletStats, conn queryservice.QueryService) {\n\t// Unconditionally send the received update at the end.\n\tdefer func() {\n\t\tif hc.listener != nil {\n\t\t\thc.listener.StatsUpdate(ts)\n\t\t}\n\t}()\n\n\thc.mu.Lock()\n\tth, ok := hc.addrToHealth[ts.Key]\n\tif !ok {\n\t\t// This can happen on delete because the entry is removed first,\n\t\t// or if LegacyHealthCheckImpl has been closed.\n\t\thc.mu.Unlock()\n\t\treturn\n\t}\n\toldts := th.latestTabletStats\n\tth.latestTabletStats = *ts\n\tth.conn = conn\n\thc.mu.Unlock()\n\n\t// In the case where a tablet changes type (but not for the\n\t// initial message), we want to log it, and maybe advertise it too.\n\tif oldts.Target.TabletType != topodatapb.TabletType_UNKNOWN && oldts.Target.TabletType != ts.Target.TabletType {\n\t\t// Log and maybe notify\n\t\tlog.Infof(\"HealthCheckUpdate(Type Change): %v, tablet: %s, target %+v => %+v, reparent time: %v\",\n\t\t\toldts.Name, topotools.TabletIdent(oldts.Tablet), topotools.TargetIdent(oldts.Target), topotools.TargetIdent(ts.Target), ts.TabletExternallyReparentedTimestamp)\n\t\tif hc.listener != nil && hc.sendDownEvents {\n\t\t\toldts.Up = false\n\t\t\thc.listener.StatsUpdate(&oldts)\n\t\t}\n\n\t\t// Track how often a tablet gets promoted to master. It is used for\n\t\t// comparing against the variables in go/vtgate/buffer/variables.go.\n\t\tif oldts.Target.TabletType != topodatapb.TabletType_MASTER && ts.Target.TabletType == topodatapb.TabletType_MASTER {\n\t\t\thcMasterPromotedCounters.Add([]string{ts.Target.Keyspace, ts.Target.Shard}, 1)\n\t\t}\n\t}\n}", "func (gm *gmap) applyEntries(gmp *gmapProgress, apply *apply) {\n\t// Has entry?\n\tif len(apply.entries) == 0 {\n\t\treturn\n\t}\n\t// Is the node leave the cluster tool long, the latest snapshot is better than the entry.\n\tfirsti := apply.entries[0].Index\n\tif firsti > gmp.appliedi+1 {\n\t\tlogger.Panicf(\"first index of committed entry[%d] should <= appliedi[%d] + 1\", firsti, gmp.appliedi)\n\t}\n\t// Extract useful entries.\n\tvar ents []raftpb.Entry\n\tif gmp.appliedi+1-firsti < uint64(len(apply.entries)) {\n\t\tents = apply.entries[gmp.appliedi+1-firsti:]\n\t}\n\t// Iterate all entries\n\tfor _, e := range ents {\n\t\tswitch e.Type {\n\t\t// Normal entry.\n\t\tcase raftpb.EntryNormal:\n\t\t\tif len(e.Data) != 0 {\n\t\t\t\t// Unmarshal request.\n\t\t\t\tvar req InternalRaftRequest\n\t\t\t\tpbutil.MustUnmarshal(&req, e.Data)\n\n\t\t\t\tvar ar applyResult\n\t\t\t\t// Put new value\n\t\t\t\tif put := req.Put; put != nil {\n\t\t\t\t\t// Get set.\n\t\t\t\t\tset, exist := gm.sets[put.Set]\n\t\t\t\t\tif !exist {\n\t\t\t\t\t\tlogger.Panicf(\"set(%s) is not exist\", put.Set)\n\t\t\t\t\t}\n\t\t\t\t\t// Get key, value and revision.\n\t\t\t\t\tkey, value, revision := put.Key, set.vtype.unwrap(put.Value), e.Index\n\t\t\t\t\t// Get map and put value into map.\n\t\t\t\t\tm := set.get(put.Map)\n\t\t\t\t\tm.put(key, value, revision)\n\t\t\t\t\t// Send put event to watcher\n\t\t\t\t\tevent := MapEvent{Type: PUT, KV: &KeyValue{Key: key, Value: value}}\n\t\t\t\t\tm.watchers.Range(func(key, value interface{}) bool {\n\t\t\t\t\t\tkey.(*watcher).eventc <- event\n\t\t\t\t\t\treturn true\n\t\t\t\t\t})\n\t\t\t\t\t// Set apply result.\n\t\t\t\t\tar.rev = revision\n\t\t\t\t}\n\t\t\t\t// Delete value\n\t\t\t\tif del := req.Delete; del != nil {\n\t\t\t\t\t// Get set.\n\t\t\t\t\tset, exist := gm.sets[del.Set]\n\t\t\t\t\tif !exist {\n\t\t\t\t\t\tlogger.Panicf(\"set(%s) is not exist\", del.Set)\n\t\t\t\t\t}\n\t\t\t\t\t// Get map and delete value from map.\n\t\t\t\t\tm := set.get(del.Map)\n\t\t\t\t\tif pre := m.delete(del.Key); nil != pre {\n\t\t\t\t\t\t// Send put event to watcher\n\t\t\t\t\t\tar.pre = *pre\n\t\t\t\t\t\tevent := MapEvent{Type: DELETE, PrevKV: &KeyValue{Key: del.Key, Value: ar.pre.Value}}\n\t\t\t\t\t\tm.watchers.Range(func(key, value interface{}) bool {\n\t\t\t\t\t\t\tkey.(*watcher).eventc <- event\n\t\t\t\t\t\t\treturn true\n\t\t\t\t\t\t})\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// Update value\n\t\t\t\tif update := req.Update; update != nil {\n\t\t\t\t\t// Get set.\n\t\t\t\t\tset, exist := gm.sets[update.Set]\n\t\t\t\t\tif !exist {\n\t\t\t\t\t\tlogger.Panicf(\"set(%s) is not exist\", update.Set)\n\t\t\t\t\t}\n\t\t\t\t\t// Get map.\n\t\t\t\t\tm := set.get(update.Map)\n\t\t\t\t\t// Update value.\n\t\t\t\t\tpre, ok := m.update(update.Key, update.Value, update.Revision, e.Index)\n\t\t\t\t\tif ok {\n\t\t\t\t\t\t// The revision will be set only if update succeed\n\t\t\t\t\t\tar.rev = e.Index\n\t\t\t\t\t}\n\t\t\t\t\tif nil != pre {\n\t\t\t\t\t\tar.pre = *pre\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// Trigger proposal waiter.\n\t\t\t\tgm.wait.Trigger(req.ID, &ar)\n\t\t\t}\n\t\t// The configuration of gmap is fixed and wil not be synchronized through raft.\n\t\tcase raftpb.EntryConfChange:\n\t\tdefault:\n\t\t\tlogger.Panicf(\"entry type should be either EntryNormal or EntryConfChange\")\n\t\t}\n\n\t\tgmp.appliedi, gmp.appliedt = e.Index, e.Term\n\t}\n}", "func (rc *RecordCollection) applyMethod(methodName string) {\n\tfor _, rec := range rc.Records() {\n\t\tretVal := rec.Call(methodName)\n\t\tdata := retVal.(RecordData).Underlying()\n\t\t// Check if the values actually changed\n\t\tvar doUpdate bool\n\t\tfor f, v := range data.FieldMap {\n\t\t\tif f == \"write_date\" {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif rs, isRS := rec.Get(rec.model.FieldName(f)).(RecordSet); isRS {\n\t\t\t\tif !rs.Collection().Equals(v.(RecordSet).Collection()) {\n\t\t\t\t\tdoUpdate = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif rec.Get(rec.model.FieldName(f)) != v {\n\t\t\t\tdoUpdate = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif doUpdate {\n\t\t\trec.WithContext(\"hexya_force_compute_write\", true).Call(\"Write\", data)\n\t\t}\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexHealthCheckDefinitionExecute(r ApiPatchHyperflexHealthCheckDefinitionRequest) (*HyperflexHealthCheckDefinition, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexHealthCheckDefinition\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexHealthCheckDefinition\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/HealthCheckDefinitions/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexHealthCheckDefinition == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexHealthCheckDefinition is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexHealthCheckDefinition\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (q currentChartDataMinutelyQuery) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.Exec(exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for current_chart_data_minutely\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for current_chart_data_minutely\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (s *MatcherStore) UpdateEnrichments(ctx context.Context, name string, fp driver.Fingerprint, es []driver.EnrichmentRecord) (uuid.UUID, error) {\n\tconst (\n\t\tcreate = `\nINSERT\nINTO\n\tupdate_operation (updater, fingerprint, kind)\nVALUES\n\t($1, $2, 'enrichment')\nRETURNING\n\tid, ref;`\n\t\tinsert = `\nINSERT\nINTO\n\tenrichment (hash_kind, hash, updater, tags, data)\nVALUES\n\t($1, $2, $3, $4, $5)\nON CONFLICT\n\t(hash_kind, hash)\nDO\n\tNOTHING;`\n\t\tassoc = `\nINSERT\nINTO\n\tuo_enrich (enrich, updater, uo, date)\nVALUES\n\t(\n\t\t(\n\t\t\tSELECT\n\t\t\t\tid\n\t\t\tFROM\n\t\t\t\tenrichment\n\t\t\tWHERE\n\t\t\t\thash_kind = $1\n\t\t\t\tAND hash = $2\n\t\t\t\tAND updater = $3\n\t\t),\n\t\t$3,\n\t\t$4,\n\t\ttransaction_timestamp()\n\t)\nON CONFLICT\nDO\n\tNOTHING;`\n\t\trefreshView = `REFRESH MATERIALIZED VIEW CONCURRENTLY latest_update_operations;`\n\t)\n\tctx = zlog.ContextWithValues(ctx, \"component\", \"datastore/postgres/UpdateEnrichments\")\n\n\tvar id uint64\n\tvar ref uuid.UUID\n\n\tstart := time.Now()\n\n\tif err := s.pool.QueryRow(ctx, create, name, string(fp)).Scan(&id, &ref); err != nil {\n\t\treturn uuid.Nil, fmt.Errorf(\"failed to create update_operation: %w\", err)\n\t}\n\n\tupdateEnrichmentsCounter.WithLabelValues(\"create\").Add(1)\n\tupdateEnrichmentsDuration.WithLabelValues(\"create\").Observe(time.Since(start).Seconds())\n\n\ttx, err := s.pool.Begin(ctx)\n\tif err != nil {\n\t\treturn uuid.Nil, fmt.Errorf(\"unable to start transaction: %w\", err)\n\t}\n\tdefer tx.Rollback(ctx)\n\n\tzlog.Debug(ctx).\n\t\tStr(\"ref\", ref.String()).\n\t\tMsg(\"update_operation created\")\n\n\tbatch := microbatch.NewInsert(tx, 2000, time.Minute)\n\tstart = time.Now()\n\tfor i := range es {\n\t\thashKind, hash := hashEnrichment(&es[i])\n\t\terr := batch.Queue(ctx, insert,\n\t\t\thashKind, hash, name, es[i].Tags, es[i].Enrichment,\n\t\t)\n\t\tif err != nil {\n\t\t\treturn uuid.Nil, fmt.Errorf(\"failed to queue enrichment: %w\", err)\n\t\t}\n\t\tif err := batch.Queue(ctx, assoc, hashKind, hash, name, id); err != nil {\n\t\t\treturn uuid.Nil, fmt.Errorf(\"failed to queue association: %w\", err)\n\t\t}\n\t}\n\tif err := batch.Done(ctx); err != nil {\n\t\treturn uuid.Nil, fmt.Errorf(\"failed to finish batch enrichment insert: %w\", err)\n\t}\n\tupdateEnrichmentsCounter.WithLabelValues(\"insert_batch\").Add(1)\n\tupdateEnrichmentsDuration.WithLabelValues(\"insert_batch\").Observe(time.Since(start).Seconds())\n\n\tif err := tx.Commit(ctx); err != nil {\n\t\treturn uuid.Nil, fmt.Errorf(\"failed to commit transaction: %w\", err)\n\t}\n\tif _, err = s.pool.Exec(ctx, refreshView); err != nil {\n\t\treturn uuid.Nil, fmt.Errorf(\"could not refresh latest_update_operations: %w\", err)\n\t}\n\tzlog.Debug(ctx).\n\t\tStringer(\"ref\", ref).\n\t\tInt(\"inserted\", len(es)).\n\t\tMsg(\"update_operation committed\")\n\treturn ref, nil\n}", "func (m *ManagedTenantsManagedTenantTicketingEndpointsManagedTenantTicketingEndpointItemRequestBuilder) Patch(ctx context.Context, body i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagedTenantTicketingEndpointable, requestConfiguration *ManagedTenantsManagedTenantTicketingEndpointsManagedTenantTicketingEndpointItemRequestBuilderPatchRequestConfiguration)(i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagedTenantTicketingEndpointable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.CreateManagedTenantTicketingEndpointFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i72d786f54cc0bb289c971b085dd642b2fc3af6394328682e69783fd7e229b582.ManagedTenantTicketingEndpointable), nil\n}", "func (m *ItemSitesSiteItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Siteable, requestConfiguration *ItemSitesSiteItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Siteable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateSiteFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Siteable), nil\n}", "func UpdateOrSaveManyEmployeeHours(hours []EmployeeHours) {\n\tfor _, hourSet := range hours {\n\t\tUpdateOrSaveEmployeeHours(hourSet)\n\t}\n}", "func (o IllnessSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), illnessPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"illness\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, illnessPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in illness slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all illness\")\n\t}\n\treturn rowsAff, nil\n}", "func (m *ItemCalendarViewBookingAppointmentItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.BookingAppointmentable, requestConfiguration *ItemCalendarViewBookingAppointmentItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.BookingAppointmentable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateBookingAppointmentFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.BookingAppointmentable), nil\n}", "func (o *Employee) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\temployeeUpdateCacheMut.RLock()\n\tcache, cached := employeeUpdateCache[key]\n\temployeeUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\temployeeAllColumns,\n\t\t\temployeePrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update employee, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE \\\"employee\\\" SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, wl),\n\t\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", len(wl)+1, employeePrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(employeeType, employeeMapping, append(wl, employeePrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update employee row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for employee\")\n\t}\n\n\tif !cached {\n\t\temployeeUpdateCacheMut.Lock()\n\t\temployeeUpdateCache[key] = cache\n\t\temployeeUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "func (repo *Repository) Update(ctx context.Context, claims auth.Claims, req UpdateRequest, now time.Time) error {\n\tspan, ctx := tracer.StartSpanFromContext(ctx, \"internal.expenditure.Update\")\n\tdefer span.Finish()\n\n\tif claims.Audience == \"\" {\n\t\treturn errors.WithStack(ErrForbidden)\n\t}\n\t// Admin users can update branches they have access to.\n\tif !claims.HasRole(auth.RoleAdmin) {\n\t\treturn errors.WithStack(ErrForbidden)\n\t}\n\n\t// Validate the request.\n\tv := webcontext.Validator()\n\terr := v.Struct(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcols := models.M{}\n\tif req.Amount != nil {\n\t\tcols[models.RepsExpenseColumns.Amount] = *req.Amount\n\t}\n\tif req.Reason != nil {\n\t\tcols[models.RepsExpenseColumns.Reason] = *req.Reason\n\t}\n\tif len(cols) == 0 {\n\t\treturn nil\n\t}\n\n\t// If now empty set it to the current time.\n\tif now.IsZero() {\n\t\tnow = time.Now()\n\t}\n\n\t// Always store the time as UTC.\n\tnow = now.UTC()\n\t// Postgres truncates times to milliseconds when storing. We and do the same\n\t// here so the value we return is consistent with what we store.\n\tnow = now.Truncate(time.Millisecond)\n\n\tcols[models.BranchColumns.UpdatedAt] = now\n\n\t_, err = models.RepsExpenses(models.RepsExpenseWhere.ID.EQ(req.ID)).UpdateAll(ctx, repo.DbConn, cols)\n\n\treturn nil\n}", "func (m *ItemOnlineMeetingsItemRegistrationRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationable, requestConfiguration *ItemOnlineMeetingsItemRegistrationRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateMeetingRegistrationFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationable), nil\n}", "func (k Keeper) UpdateDailyPercent(ctx sdk.Context, addr sdk.AccAddress, coin coins.Coin) {\n\tbalance := k.BankKeeper.GetPosminableBalance(ctx, addr, coin)\n\n\tposmining := k.GetPosmining(ctx, addr, coin)\n\n\tnewDailyPercent := coin.GetDailyPercent(balance)\n\n\tif !posmining.DailyPercent.Equal(newDailyPercent) {\n\t\tposmining.DailyPercent = newDailyPercent\n\n\t\tk.SetPosmining(ctx, posmining, coin)\n\t}\n}", "func (o RentalRowerSlice) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), rentalRowerPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"rental_rowers\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, rentalRowerPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.Exec(sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in rentalRower slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all rentalRower\")\n\t}\n\treturn rowsAff, nil\n}", "func (m *MobileThreatDefenseConnectorsMobileThreatDefenseConnectorItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.MobileThreatDefenseConnectorable, requestConfiguration *MobileThreatDefenseConnectorsMobileThreatDefenseConnectorItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.MobileThreatDefenseConnectorable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateMobileThreatDefenseConnectorFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.MobileThreatDefenseConnectorable), nil\n}", "func (m *TermStoreRequestBuilder) Patch(ctx context.Context, body ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable, requestConfiguration *TermStoreRequestBuilderPatchRequestConfiguration)(ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.CreateStoreFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable), nil\n}", "func (m *PrivilegedSignupStatusItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable, requestConfiguration *PrivilegedSignupStatusItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreatePrivilegedSignupStatusFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.PrivilegedSignupStatusable), nil\n}", "func (m *HealthMenstruationDailyEntry) ToORM(ctx context.Context) (HealthMenstruationDailyEntryORM, error) {\n\tto := HealthMenstruationDailyEntryORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.Day = &t\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = int32(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (m *ComanagedDevicesItemLogCollectionRequestsDeviceLogCollectionResponseItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.DeviceLogCollectionResponseable, requestConfiguration *ComanagedDevicesItemLogCollectionRequestsDeviceLogCollectionResponseItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.DeviceLogCollectionResponseable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateDeviceLogCollectionResponseFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.DeviceLogCollectionResponseable), nil\n}", "func (a *BulkApiService) PatchBulkExportExecute(r ApiPatchBulkExportRequest) (*BulkExport, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *BulkExport\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"BulkApiService.PatchBulkExport\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/bulk/Exports/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.bulkExport == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"bulkExport is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.bulkExport\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (o *CurrentChartDataMinutely) Upsert(exec boil.Executor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no current_chart_data_minutely provided for upsert\")\n\t}\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\tif o.CreatedAt.IsZero() {\n\t\to.CreatedAt = currTime\n\t}\n\to.UpdatedAt = currTime\n\n\tif err := o.doBeforeUpsertHooks(exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(currentChartDataMinutelyColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tcurrentChartDataMinutelyUpsertCacheMut.RLock()\n\tcache, cached := currentChartDataMinutelyUpsertCache[key]\n\tcurrentChartDataMinutelyUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tcurrentChartDataMinutelyColumns,\n\t\t\tcurrentChartDataMinutelyColumnsWithDefault,\n\t\t\tcurrentChartDataMinutelyColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tcurrentChartDataMinutelyColumns,\n\t\t\tcurrentChartDataMinutelyPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert current_chart_data_minutely, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(currentChartDataMinutelyPrimaryKeyColumns))\n\t\t\tcopy(conflict, currentChartDataMinutelyPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"current_chart_data_minutely\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(currentChartDataMinutelyType, currentChartDataMinutelyMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(currentChartDataMinutelyType, currentChartDataMinutelyMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRow(cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.Exec(cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert current_chart_data_minutely\")\n\t}\n\n\tif !cached {\n\t\tcurrentChartDataMinutelyUpsertCacheMut.Lock()\n\t\tcurrentChartDataMinutelyUpsertCache[key] = cache\n\t\tcurrentChartDataMinutelyUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(exec)\n}", "func (o RowerGroupSlice) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tln := int64(len(o))\n\tif ln == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(cols) == 0 {\n\t\treturn 0, errors.New(\"models: update all requires at least one column argument\")\n\t}\n\n\tcolNames := make([]string, len(cols))\n\targs := make([]interface{}, len(cols))\n\n\ti := 0\n\tfor name, value := range cols {\n\t\tcolNames[i] = name\n\t\targs[i] = value\n\t\ti++\n\t}\n\n\t// Append all of the primary key values for each column\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), rowerGroupPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := fmt.Sprintf(\"UPDATE \\\"rower_group\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, colNames),\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, rowerGroupPrimaryKeyColumns, len(o)))\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.Exec(sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all in rowerGroup slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected all in update all rowerGroup\")\n\t}\n\treturn rowsAff, nil\n}", "func (q rowerGroupQuery) UpdateAll(exec boil.Executor, cols M) (int64, error) {\n\tqueries.SetUpdate(q.Query, cols)\n\n\tresult, err := q.Query.Exec(exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update all for rower_group\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to retrieve rows affected for rower_group\")\n\t}\n\n\treturn rowsAff, nil\n}", "func (client *VirtualMachineScaleSetsClient) ForceRecoveryServiceFabricPlatformUpdateDomainWalk(ctx context.Context, resourceGroupName string, vmScaleSetName string, platformUpdateDomain int32, options *VirtualMachineScaleSetsForceRecoveryServiceFabricPlatformUpdateDomainWalkOptions) (VirtualMachineScaleSetsForceRecoveryServiceFabricPlatformUpdateDomainWalkResponse, error) {\n\treq, err := client.forceRecoveryServiceFabricPlatformUpdateDomainWalkCreateRequest(ctx, resourceGroupName, vmScaleSetName, platformUpdateDomain, options)\n\tif err != nil {\n\t\treturn VirtualMachineScaleSetsForceRecoveryServiceFabricPlatformUpdateDomainWalkResponse{}, err\n\t}\n\tresp, err := client.pl.Do(req)\n\tif err != nil {\n\t\treturn VirtualMachineScaleSetsForceRecoveryServiceFabricPlatformUpdateDomainWalkResponse{}, err\n\t}\n\tif !runtime.HasStatusCode(resp, http.StatusOK) {\n\t\treturn VirtualMachineScaleSetsForceRecoveryServiceFabricPlatformUpdateDomainWalkResponse{}, client.forceRecoveryServiceFabricPlatformUpdateDomainWalkHandleError(resp)\n\t}\n\treturn client.forceRecoveryServiceFabricPlatformUpdateDomainWalkHandleResponse(resp)\n}", "func (cc *LogController) Patch(c *gin.Context) {\n\trequest := &LogPatchRequest{}\n\tif err := c.ShouldBindJSON(request); err != nil {\n\t\tjsonAPIError(c, http.StatusUnprocessableEntity, err)\n\t\treturn\n\t}\n\n\tif request.Level == \"\" && request.SqlEnabled == nil {\n\t\tjsonAPIError(c, http.StatusBadRequest, fmt.Errorf(\"please set either logLevel or logSql as params in order to set the log level\"))\n\t\treturn\n\t}\n\n\tif request.Level != \"\" {\n\t\tvar ll zapcore.Level\n\t\terr := ll.UnmarshalText([]byte(request.Level))\n\t\tif err != nil {\n\t\t\tjsonAPIError(c, http.StatusBadRequest, err)\n\t\t\treturn\n\t\t}\n\t\tif err = cc.App.GetStore().Config.SetLogLevel(c.Request.Context(), ll.String()); err != nil {\n\t\t\tjsonAPIError(c, http.StatusInternalServerError, err)\n\t\t\treturn\n\t\t}\n\t}\n\n\tif request.SqlEnabled != nil {\n\t\tif err := cc.App.GetStore().Config.SetLogSQLStatements(c.Request.Context(), *request.SqlEnabled); err != nil {\n\t\t\tjsonAPIError(c, http.StatusInternalServerError, err)\n\t\t\treturn\n\t\t}\n\t\tcc.App.GetStore().SetLogging(*request.SqlEnabled)\n\t}\n\n\t// Set default logger with new configurations\n\tlogger.SetLogger(cc.App.GetStore().Config.CreateProductionLogger())\n\n\tresponse := &presenters.LogResource{\n\t\tJAID: presenters.JAID{\n\t\t\tID: \"log\",\n\t\t},\n\t\tLevel: cc.App.GetStore().Config.LogLevel().String(),\n\t\tSqlEnabled: cc.App.GetStore().Config.LogSQLStatements(),\n\t}\n\n\tjsonAPIResponse(c, response, \"log\")\n}", "func batch(lullTime time.Duration, maxTime time.Duration, exists existenceChecker, ch chan notify.EventInfo) *Mod {\n\tadded := make(map[string]bool)\n\tremoved := make(map[string]bool)\n\tchanged := make(map[string]bool)\n\trenamed := make(map[string]bool)\n\t// Have we had a modification in the last lull\n\thadLullMod := false\n\tfor {\n\t\tselect {\n\t\tcase evt := <-ch:\n\t\t\tif evt == nil {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\thadLullMod = true\n\t\t\tswitch evt.Event() {\n\t\t\tcase notify.Create:\n\t\t\t\tadded[evt.Path()] = true\n\t\t\tcase notify.Remove:\n\t\t\t\tremoved[evt.Path()] = true\n\t\t\tcase notify.Write:\n\t\t\t\tchanged[evt.Path()] = true\n\t\t\tcase notify.Rename:\n\t\t\t\trenamed[evt.Path()] = true\n\t\t\t}\n\t\tcase <-time.After(lullTime):\n\t\t\t// Have we had a lull?\n\t\t\tif hadLullMod == false {\n\t\t\t\tm := mkmod(exists, added, removed, changed, renamed)\n\t\t\t\treturn &m\n\t\t\t}\n\t\t\thadLullMod = false\n\t\tcase <-time.After(maxTime):\n\t\t\tm := mkmod(exists, added, removed, changed, renamed)\n\t\t\treturn &m\n\t\t}\n\t}\n}" ]
[ "0.75062126", "0.7110765", "0.7044403", "0.6352366", "0.584969", "0.58379257", "0.5815472", "0.58114034", "0.58000535", "0.5753678", "0.49336112", "0.49001288", "0.4879783", "0.487446", "0.4820178", "0.48102048", "0.4777965", "0.47738504", "0.47666943", "0.47600615", "0.47389394", "0.4734152", "0.4724264", "0.47007543", "0.46946195", "0.46920747", "0.46747327", "0.4661909", "0.46493888", "0.464884", "0.4647721", "0.46321335", "0.4610523", "0.46010378", "0.45797524", "0.45713025", "0.45692766", "0.45605028", "0.45550182", "0.45506993", "0.45463067", "0.4537918", "0.45369008", "0.45265025", "0.45221362", "0.45211223", "0.4520731", "0.45175555", "0.45084268", "0.4505236", "0.45046702", "0.4501087", "0.44992504", "0.4498412", "0.44894", "0.44848716", "0.44801262", "0.44769648", "0.4463861", "0.44590086", "0.44573662", "0.44572547", "0.44563764", "0.4454831", "0.44538903", "0.4451262", "0.4450918", "0.4448334", "0.44429293", "0.4441626", "0.44328377", "0.44304702", "0.44289806", "0.4428401", "0.44281414", "0.44278616", "0.44249135", "0.4424676", "0.44237927", "0.44231856", "0.44167557", "0.4414446", "0.44099826", "0.44086352", "0.44085276", "0.44058368", "0.4396833", "0.439243", "0.43868697", "0.43859583", "0.43841565", "0.4383061", "0.43717834", "0.4371608", "0.43714094", "0.43707985", "0.4370529", "0.4366893", "0.43649518", "0.4364724" ]
0.755557
0
DefaultApplyFieldMaskHealthMenstruationDailyEntry patches an pbObject with patcher according to a field mask.
func DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx context.Context, patchee *HealthMenstruationDailyEntry, patcher *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) { if patcher == nil { return nil, nil } else if patchee == nil { return nil, errors1.NilArgumentError } var err error for _, f := range updateMask.Paths { if f == prefix+"Id" { patchee.Id = patcher.Id continue } if f == prefix+"CreatedAt" { patchee.CreatedAt = patcher.CreatedAt continue } if f == prefix+"UpdatedAt" { patchee.UpdatedAt = patcher.UpdatedAt continue } if f == prefix+"ProfileId" { patchee.ProfileId = patcher.ProfileId continue } if f == prefix+"Day" { patchee.Day = patcher.Day continue } if f == prefix+"IntensityPercentage" { patchee.IntensityPercentage = patcher.IntensityPercentage continue } if f == prefix+"Type" { patchee.Type = patcher.Type continue } if f == prefix+"Manual" { patchee.Manual = patcher.Manual continue } if f == prefix+"BasedOnPrediction" { patchee.BasedOnPrediction = patcher.BasedOnPrediction continue } } if err != nil { return nil, err } return patchee, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultPatchSetHealthMenstruationDailyEntry(ctx context.Context, objects []*HealthMenstruationDailyEntry, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationDailyEntry, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationDailyEntry, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationDailyEntry(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultPatchHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationDailyEntry\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationDailyEntry(ctx, &HealthMenstruationDailyEntry{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationDailyEntry(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationDailyEntryWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx context.Context, patchee *HealthMenstruationPersonalInfo, patcher *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"PeriodLengthInDays\" {\n\t\t\tpatchee.PeriodLengthInDays = patcher.PeriodLengthInDays\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CycleLengthInDays\" {\n\t\t\tpatchee.CycleLengthInDays = patcher.CycleLengthInDays\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultApplyFieldMaskUserInfo(ctx context.Context, patchee *UserInfo, patcher *UserInfo, updateMask *field_mask.FieldMask, prefix string, db *gorm.DB) (*UserInfo, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar err error\n\tvar updatedCreatedAt bool\n\tvar updatedUpdatedAt bool\n\tfor i, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UserId\" {\n\t\t\tpatchee.UserId = patcher.UserId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"LastName\" {\n\t\t\tpatchee.LastName = patcher.LastName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"FirstName\" {\n\t\t\tpatchee.FirstName = patcher.FirstName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Period\" {\n\t\t\tpatchee.Period = patcher.Period\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"DepartmentId\" {\n\t\t\tpatchee.DepartmentId = patcher.DepartmentId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"JobId\" {\n\t\t\tpatchee.JobId = patcher.JobId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"EnrollmentFlg\" {\n\t\t\tpatchee.EnrollmentFlg = patcher.EnrollmentFlg\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"AdminFlg\" {\n\t\t\tpatchee.AdminFlg = patcher.AdminFlg\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedCreatedAt && strings.HasPrefix(f, prefix+\"CreatedAt.\") {\n\t\t\tif patcher.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"CreatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.CreatedAt, patchee.CreatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tupdatedCreatedAt = true\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUpdatedAt && strings.HasPrefix(f, prefix+\"UpdatedAt.\") {\n\t\t\tif patcher.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UpdatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.UpdatedAt, patchee.UpdatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tupdatedUpdatedAt = true\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultApplyFieldMaskPeriod(ctx context.Context, patchee *Period, patcher *Period, updateMask *field_mask.FieldMask, prefix string, db *gorm.DB) (*Period, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar err error\n\tvar updatedCreatedAt bool\n\tvar updatedUpdatedAt bool\n\tfor i, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Period\" {\n\t\t\tpatchee.Period = patcher.Period\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedCreatedAt && strings.HasPrefix(f, prefix+\"CreatedAt.\") {\n\t\t\tif patcher.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"CreatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.CreatedAt, patchee.CreatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tupdatedCreatedAt = true\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUpdatedAt && strings.HasPrefix(f, prefix+\"UpdatedAt.\") {\n\t\t\tif patcher.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UpdatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm1.MergeWithMask(patcher.UpdatedAt, patchee.UpdatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tupdatedUpdatedAt = true\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultStrictUpdateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationDailyEntry\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationDailyEntryORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func DefaultApplyFieldMaskComment(ctx context.Context, patchee *Comment, patcher *Comment, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*Comment, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tvar updatedCreatedAt bool\n\tvar updatedUpdatedAt bool\n\tvar updatedDeletedAt bool\n\tvar updatedBoardId bool\n\tvar updatedPostId bool\n\tvar updatedContentId bool\n\tvar updatedUserid bool\n\tvar updatedUsername bool\n\tvar updatedNickname bool\n\tvar updatedEmail bool\n\tvar updatedPassword bool\n\tvar updatedUrl bool\n\tvar updatedUseHtml bool\n\tvar updatedUseSecret bool\n\tfor i, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedCreatedAt && strings.HasPrefix(f, prefix+\"CreatedAt.\") {\n\t\t\tif patcher.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.CreatedAt == nil {\n\t\t\t\tpatchee.CreatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"CreatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.CreatedAt, patchee.CreatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tupdatedCreatedAt = true\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUpdatedAt && strings.HasPrefix(f, prefix+\"UpdatedAt.\") {\n\t\t\tif patcher.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UpdatedAt == nil {\n\t\t\t\tpatchee.UpdatedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UpdatedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.UpdatedAt, patchee.UpdatedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tupdatedUpdatedAt = true\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedDeletedAt && strings.HasPrefix(f, prefix+\"DeletedAt.\") {\n\t\t\tif patcher.DeletedAt == nil {\n\t\t\t\tpatchee.DeletedAt = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.DeletedAt == nil {\n\t\t\t\tpatchee.DeletedAt = &timestamp.Timestamp{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"DeletedAt.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.DeletedAt, patchee.DeletedAt, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"DeletedAt\" {\n\t\t\tupdatedDeletedAt = true\n\t\t\tpatchee.DeletedAt = patcher.DeletedAt\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedBoardId && strings.HasPrefix(f, prefix+\"BoardId.\") {\n\t\t\tif patcher.BoardId == nil {\n\t\t\t\tpatchee.BoardId = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.BoardId == nil {\n\t\t\t\tpatchee.BoardId = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"BoardId.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.BoardId, patchee.BoardId, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"BoardId\" {\n\t\t\tupdatedBoardId = true\n\t\t\tpatchee.BoardId = patcher.BoardId\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedPostId && strings.HasPrefix(f, prefix+\"PostId.\") {\n\t\t\tif patcher.PostId == nil {\n\t\t\t\tpatchee.PostId = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.PostId == nil {\n\t\t\t\tpatchee.PostId = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"PostId.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.PostId, patchee.PostId, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"PostId\" {\n\t\t\tupdatedPostId = true\n\t\t\tpatchee.PostId = patcher.PostId\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedContentId && strings.HasPrefix(f, prefix+\"ContentId.\") {\n\t\t\tif patcher.ContentId == nil {\n\t\t\t\tpatchee.ContentId = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.ContentId == nil {\n\t\t\t\tpatchee.ContentId = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"ContentId.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.ContentId, patchee.ContentId, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"ContentId\" {\n\t\t\tupdatedContentId = true\n\t\t\tpatchee.ContentId = patcher.ContentId\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUserid && strings.HasPrefix(f, prefix+\"Userid.\") {\n\t\t\tif patcher.Userid == nil {\n\t\t\t\tpatchee.Userid = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.Userid == nil {\n\t\t\t\tpatchee.Userid = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"Userid.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.Userid, patchee.Userid, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"Userid\" {\n\t\t\tupdatedUserid = true\n\t\t\tpatchee.Userid = patcher.Userid\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUsername && strings.HasPrefix(f, prefix+\"Username.\") {\n\t\t\tif patcher.Username == nil {\n\t\t\t\tpatchee.Username = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.Username == nil {\n\t\t\t\tpatchee.Username = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"Username.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.Username, patchee.Username, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"Username\" {\n\t\t\tupdatedUsername = true\n\t\t\tpatchee.Username = patcher.Username\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedNickname && strings.HasPrefix(f, prefix+\"Nickname.\") {\n\t\t\tif patcher.Nickname == nil {\n\t\t\t\tpatchee.Nickname = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.Nickname == nil {\n\t\t\t\tpatchee.Nickname = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"Nickname.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.Nickname, patchee.Nickname, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"Nickname\" {\n\t\t\tupdatedNickname = true\n\t\t\tpatchee.Nickname = patcher.Nickname\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedEmail && strings.HasPrefix(f, prefix+\"Email.\") {\n\t\t\tif patcher.Email == nil {\n\t\t\t\tpatchee.Email = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.Email == nil {\n\t\t\t\tpatchee.Email = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"Email.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.Email, patchee.Email, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"Email\" {\n\t\t\tupdatedEmail = true\n\t\t\tpatchee.Email = patcher.Email\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedPassword && strings.HasPrefix(f, prefix+\"Password.\") {\n\t\t\tif patcher.Password == nil {\n\t\t\t\tpatchee.Password = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.Password == nil {\n\t\t\t\tpatchee.Password = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"Password.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.Password, patchee.Password, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"Password\" {\n\t\t\tupdatedPassword = true\n\t\t\tpatchee.Password = patcher.Password\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUrl && strings.HasPrefix(f, prefix+\"Url.\") {\n\t\t\tif patcher.Url == nil {\n\t\t\t\tpatchee.Url = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.Url == nil {\n\t\t\t\tpatchee.Url = &wrappers.StringValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"Url.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.Url, patchee.Url, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"Url\" {\n\t\t\tupdatedUrl = true\n\t\t\tpatchee.Url = patcher.Url\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUseHtml && strings.HasPrefix(f, prefix+\"UseHtml.\") {\n\t\t\tif patcher.UseHtml == nil {\n\t\t\t\tpatchee.UseHtml = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UseHtml == nil {\n\t\t\t\tpatchee.UseHtml = &wrappers.BoolValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UseHtml.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.UseHtml, patchee.UseHtml, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UseHtml\" {\n\t\t\tupdatedUseHtml = true\n\t\t\tpatchee.UseHtml = patcher.UseHtml\n\t\t\tcontinue\n\t\t}\n\t\tif !updatedUseSecret && strings.HasPrefix(f, prefix+\"UseSecret.\") {\n\t\t\tif patcher.UseSecret == nil {\n\t\t\t\tpatchee.UseSecret = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif patchee.UseSecret == nil {\n\t\t\t\tpatchee.UseSecret = &wrappers.BoolValue{}\n\t\t\t}\n\t\t\tchildMask := &field_mask1.FieldMask{}\n\t\t\tfor j := i; j < len(updateMask.Paths); j++ {\n\t\t\t\tif trimPath := strings.TrimPrefix(updateMask.Paths[j], prefix+\"UseSecret.\"); trimPath != updateMask.Paths[j] {\n\t\t\t\t\tchildMask.Paths = append(childMask.Paths, trimPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif err := gorm2.MergeWithMask(patcher.UseSecret, patchee.UseSecret, childMask); err != nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\t\t}\n\t\tif f == prefix+\"UseSecret\" {\n\t\t\tupdatedUseSecret = true\n\t\t\tpatchee.UseSecret = patcher.UseSecret\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpVoteCount\" {\n\t\t\tpatchee.UpVoteCount = patcher.UpVoteCount\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"DownVoteCount\" {\n\t\t\tpatchee.DownVoteCount = patcher.DownVoteCount\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultApplyFieldMaskIntPoint(ctx context.Context, patchee *IntPoint, patcher *IntPoint, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*IntPoint, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors.New(\"Patchee inputs to DefaultApplyFieldMaskIntPoint must be non-nil\")\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"X\" {\n\t\t\tpatchee.X = patcher.X\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Y\" {\n\t\t\tpatchee.Y = patcher.Y\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultPatchSetHealthMenstruationPersonalInfo(ctx context.Context, objects []*HealthMenstruationPersonalInfo, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationPersonalInfo, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationPersonalInfo, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationPersonalInfo(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultListHealthMenstruationDailyEntry(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationDailyEntry, error) {\n\tin := HealthMenstruationDailyEntry{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationDailyEntryORM{}, &HealthMenstruationDailyEntry{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationDailyEntryORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationDailyEntry{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func DefaultApplyFieldMaskProfile(ctx context.Context, patchee *Profile, patcher *Profile, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*Profile, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Notes\" {\n\t\t\tpatchee.Notes = patcher.Notes\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"FirstName\" {\n\t\t\tpatchee.FirstName = patcher.FirstName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"LastName\" {\n\t\t\tpatchee.LastName = patcher.LastName\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"PrimaryEmail\" {\n\t\t\tpatchee.PrimaryEmail = patcher.PrimaryEmail\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Groups\" {\n\t\t\tpatchee.Groups = patcher.Groups\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfilePictureUrl\" {\n\t\t\tpatchee.ProfilePictureUrl = patcher.ProfilePictureUrl\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultPatchHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationPersonalInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationPersonalInfo(ctx, &HealthMenstruationPersonalInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationPersonalInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultCreateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultReadHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationDailyEntryORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationDailyEntryORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationDailyEntryORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultPatchSetUserInfo(ctx context.Context, objects []*UserInfo, updateMasks []*field_mask.FieldMask, db *gorm.DB) ([]*UserInfo, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*UserInfo, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchUserInfo(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultPatchSetPeriod(ctx context.Context, objects []*Period, updateMasks []*field_mask.FieldMask, db *gorm.DB) ([]*Period, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*Period, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchPeriod(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultPatchPeriod(ctx context.Context, in *Period, updateMask *field_mask.FieldMask, db *gorm.DB) (*Period, error) {\n\tif in == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar pbObj Period\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(PeriodWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadPeriod(ctx, &Period{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(PeriodWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskPeriod(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(PeriodWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdatePeriod(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(PeriodWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func WithOverrideFieldMask(d *presenceInterceptorOptionsDecorator) {\n\td.overrideFieldMask = true\n}", "func (c *kuberhealthyChecks) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result KuberhealthyCheck, err error) {\n\tresult = KuberhealthyCheck{}\n\terr = c.client.Patch(pt).\n\t\tNamespace(c.ns).\n\t\tResource(\"khchecks\").\n\t\tSubResource(subresources...).\n\t\tName(name).\n\t\tBody(data).\n\t\tDo(context.TODO()).\n\t\tInto(&result)\n\treturn\n}", "func (c *globalThreatFeeds) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v3.GlobalThreatFeed, err error) {\n\tresult = &v3.GlobalThreatFeed{}\n\terr = c.client.Patch(pt).\n\t\tResource(\"globalthreatfeeds\").\n\t\tName(name).\n\t\tSubResource(subresources...).\n\t\tVersionedParams(&opts, scheme.ParameterCodec).\n\t\tBody(data).\n\t\tDo(ctx).\n\t\tInto(result)\n\treturn\n}", "func DefaultPatchSetProfile(ctx context.Context, objects []*Profile, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*Profile, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*Profile, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchProfile(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func (m *HealthMenstruationDailyEntryORM) ToPB(ctx context.Context) (HealthMenstruationDailyEntry, error) {\n\tto := HealthMenstruationDailyEntry{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tif to.Day, err = ptypes1.TimestampProto(*m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = HealthMenstruationDailyEntry_Type(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func DefaultPatchUserInfo(ctx context.Context, in *UserInfo, updateMask *field_mask.FieldMask, db *gorm.DB) (*UserInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors.NilArgumentError\n\t}\n\tvar pbObj UserInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(UserInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadUserInfo(ctx, &UserInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(UserInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskUserInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(UserInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateUserInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(UserInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func (c *Patch) Apply(cp, cd runtime.Object, only ...PatchType) error {\n\tif c.filterPatch(only...) {\n\t\treturn nil\n\t}\n\n\tswitch c.Type {\n\tcase PatchTypeFromCompositeFieldPath:\n\t\treturn c.applyFromFieldPathPatch(cp, cd)\n\tcase PatchTypeToCompositeFieldPath:\n\t\treturn c.applyFromFieldPathPatch(cd, cp)\n\tcase PatchTypeCombineFromComposite:\n\t\treturn c.applyCombineFromVariablesPatch(cp, cd)\n\tcase PatchTypeCombineToComposite:\n\t\treturn c.applyCombineFromVariablesPatch(cd, cp)\n\tcase PatchTypePatchSet:\n\t\t// Already resolved - nothing to do.\n\t}\n\treturn errors.Errorf(errFmtInvalidPatchType, c.Type)\n}", "func (c *PropertiesGoogleAdsLinksPatchCall) UpdateMask(updateMask string) *PropertiesGoogleAdsLinksPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (c *ProjectsLocationsDataExchangesListingsPatchCall) UpdateMask(updateMask string) *ProjectsLocationsDataExchangesListingsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func NewPatchPrefilterHandler(d *Daemon) PatchPrefilterHandler {\n\treturn &patchPrefilter{d: d}\n}", "func (c *OrganizationsEnvironmentsArchiveDeploymentsPatchCall) UpdateMask(updateMask string) *OrganizationsEnvironmentsArchiveDeploymentsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationPersonalInfo\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationPersonalInfoORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func (m *ThreatSubmissionEmailThreatsEmailThreatSubmissionItemRequestBuilder) Patch(ctx context.Context, body i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.EmailThreatSubmissionable, requestConfiguration *ThreatSubmissionEmailThreatsEmailThreatSubmissionItemRequestBuilderPatchRequestConfiguration)(i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.EmailThreatSubmissionable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.CreateEmailThreatSubmissionFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.EmailThreatSubmissionable), nil\n}", "func (m *ItemCalendarViewBookingAppointmentItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.BookingAppointmentable, requestConfiguration *ItemCalendarViewBookingAppointmentItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.BookingAppointmentable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateBookingAppointmentFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.BookingAppointmentable), nil\n}", "func (m *DeviceManagementSettings) SetDeviceComplianceCheckinThresholdDays(value *int32)() {\n err := m.GetBackingStore().Set(\"deviceComplianceCheckinThresholdDays\", value)\n if err != nil {\n panic(err)\n }\n}", "func (m *AssignmentDefaultsRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.EducationAssignmentDefaultsable, requestConfiguration *AssignmentDefaultsRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.EducationAssignmentDefaultsable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateEducationAssignmentDefaultsFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.EducationAssignmentDefaultsable), nil\n}", "func AdjustDay(r []entry.Entry) []entry.Entry {\n\tfor i, _ := range r {\n\t\tr[i].Day = r[i].Day[:10]\n\t}\n\n\treturn r\n}", "func NewPatchReportGroupByIDDefault(code int) *PatchReportGroupByIDDefault {\n\treturn &PatchReportGroupByIDDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (m *AccessReviewHistoryDefinitionItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.AccessReviewHistoryDefinitionable, requestConfiguration *AccessReviewHistoryDefinitionItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.AccessReviewHistoryDefinitionable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateAccessReviewHistoryDefinitionFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.AccessReviewHistoryDefinitionable), nil\n}", "func (m *ReportsRequestBuilder) Patch(ctx context.Context, body i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable, requestConfiguration *ReportsRequestBuilderPatchRequestConfiguration)(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.CreateReportsFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(i43e723cc778f0f3f3a05d36b9df74faa56771e9360d8ed793c50bdaacec8d5d2.Reportsable), nil\n}", "func (c *OrganizationsEnvironmentsTraceConfigOverridesPatchCall) UpdateMask(updateMask string) *OrganizationsEnvironmentsTraceConfigOverridesPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (a *APIPatchingApplicator) Apply(ctx context.Context, o client.Object, ao ...ApplyOption) error {\n\tif o.GetNamespace() == \"\" {\n\t\to.SetNamespace(\"default\")\n\t}\n\n\tm, ok := o.(metav1.Object)\n\tif !ok {\n\t\treturn errors.New(\"cannot access object metadata\")\n\t}\n\n\tif m.GetName() == \"\" && m.GetGenerateName() != \"\" {\n\t\treturn errors.Wrap(a.client.Create(ctx, o), \"cannot create object\")\n\t}\n\n\tdesired := o.DeepCopyObject()\n\n\terr := a.client.Get(ctx, types.NamespacedName{Name: m.GetName(), Namespace: m.GetNamespace()}, o)\n\tif kerrors.IsNotFound(err) {\n\t\t// TODO: Apply ApplyOptions here too?\n\t\treturn errors.Wrap(a.client.Create(ctx, o), \"cannot create object\")\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"cannot get object\")\n\t}\n\n\tfor _, fn := range ao {\n\t\tif err := fn(ctx, o, desired); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// TODO: Allow callers to override the kind of patch used.\n\treturn errors.Wrap(a.client.Patch(ctx, o, &patch{desired.(client.Object)}), \"cannot patch object\")\n}", "func (gm *gmap) applyEntries(gmp *gmapProgress, apply *apply) {\n\t// Has entry?\n\tif len(apply.entries) == 0 {\n\t\treturn\n\t}\n\t// Is the node leave the cluster tool long, the latest snapshot is better than the entry.\n\tfirsti := apply.entries[0].Index\n\tif firsti > gmp.appliedi+1 {\n\t\tlogger.Panicf(\"first index of committed entry[%d] should <= appliedi[%d] + 1\", firsti, gmp.appliedi)\n\t}\n\t// Extract useful entries.\n\tvar ents []raftpb.Entry\n\tif gmp.appliedi+1-firsti < uint64(len(apply.entries)) {\n\t\tents = apply.entries[gmp.appliedi+1-firsti:]\n\t}\n\t// Iterate all entries\n\tfor _, e := range ents {\n\t\tswitch e.Type {\n\t\t// Normal entry.\n\t\tcase raftpb.EntryNormal:\n\t\t\tif len(e.Data) != 0 {\n\t\t\t\t// Unmarshal request.\n\t\t\t\tvar req InternalRaftRequest\n\t\t\t\tpbutil.MustUnmarshal(&req, e.Data)\n\n\t\t\t\tvar ar applyResult\n\t\t\t\t// Put new value\n\t\t\t\tif put := req.Put; put != nil {\n\t\t\t\t\t// Get set.\n\t\t\t\t\tset, exist := gm.sets[put.Set]\n\t\t\t\t\tif !exist {\n\t\t\t\t\t\tlogger.Panicf(\"set(%s) is not exist\", put.Set)\n\t\t\t\t\t}\n\t\t\t\t\t// Get key, value and revision.\n\t\t\t\t\tkey, value, revision := put.Key, set.vtype.unwrap(put.Value), e.Index\n\t\t\t\t\t// Get map and put value into map.\n\t\t\t\t\tm := set.get(put.Map)\n\t\t\t\t\tm.put(key, value, revision)\n\t\t\t\t\t// Send put event to watcher\n\t\t\t\t\tevent := MapEvent{Type: PUT, KV: &KeyValue{Key: key, Value: value}}\n\t\t\t\t\tm.watchers.Range(func(key, value interface{}) bool {\n\t\t\t\t\t\tkey.(*watcher).eventc <- event\n\t\t\t\t\t\treturn true\n\t\t\t\t\t})\n\t\t\t\t\t// Set apply result.\n\t\t\t\t\tar.rev = revision\n\t\t\t\t}\n\t\t\t\t// Delete value\n\t\t\t\tif del := req.Delete; del != nil {\n\t\t\t\t\t// Get set.\n\t\t\t\t\tset, exist := gm.sets[del.Set]\n\t\t\t\t\tif !exist {\n\t\t\t\t\t\tlogger.Panicf(\"set(%s) is not exist\", del.Set)\n\t\t\t\t\t}\n\t\t\t\t\t// Get map and delete value from map.\n\t\t\t\t\tm := set.get(del.Map)\n\t\t\t\t\tif pre := m.delete(del.Key); nil != pre {\n\t\t\t\t\t\t// Send put event to watcher\n\t\t\t\t\t\tar.pre = *pre\n\t\t\t\t\t\tevent := MapEvent{Type: DELETE, PrevKV: &KeyValue{Key: del.Key, Value: ar.pre.Value}}\n\t\t\t\t\t\tm.watchers.Range(func(key, value interface{}) bool {\n\t\t\t\t\t\t\tkey.(*watcher).eventc <- event\n\t\t\t\t\t\t\treturn true\n\t\t\t\t\t\t})\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// Update value\n\t\t\t\tif update := req.Update; update != nil {\n\t\t\t\t\t// Get set.\n\t\t\t\t\tset, exist := gm.sets[update.Set]\n\t\t\t\t\tif !exist {\n\t\t\t\t\t\tlogger.Panicf(\"set(%s) is not exist\", update.Set)\n\t\t\t\t\t}\n\t\t\t\t\t// Get map.\n\t\t\t\t\tm := set.get(update.Map)\n\t\t\t\t\t// Update value.\n\t\t\t\t\tpre, ok := m.update(update.Key, update.Value, update.Revision, e.Index)\n\t\t\t\t\tif ok {\n\t\t\t\t\t\t// The revision will be set only if update succeed\n\t\t\t\t\t\tar.rev = e.Index\n\t\t\t\t\t}\n\t\t\t\t\tif nil != pre {\n\t\t\t\t\t\tar.pre = *pre\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// Trigger proposal waiter.\n\t\t\t\tgm.wait.Trigger(req.ID, &ar)\n\t\t\t}\n\t\t// The configuration of gmap is fixed and wil not be synchronized through raft.\n\t\tcase raftpb.EntryConfChange:\n\t\tdefault:\n\t\t\tlogger.Panicf(\"entry type should be either EntryNormal or EntryConfChange\")\n\t\t}\n\n\t\tgmp.appliedi, gmp.appliedt = e.Index, e.Term\n\t}\n}", "func (m *ItemItemsItemWorkbookWorksheetsItemChartsItemDataLabelsFormatRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.WorkbookChartDataLabelFormatable, requestConfiguration *ItemItemsItemWorkbookWorksheetsItemChartsItemDataLabelsFormatRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.WorkbookChartDataLabelFormatable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateWorkbookChartDataLabelFormatFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.WorkbookChartDataLabelFormatable), nil\n}", "func (c *ProjectsPatchDeploymentsPatchCall) UpdateMask(updateMask string) *ProjectsPatchDeploymentsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (c *PropertiesIosAppDataStreamsPatchCall) UpdateMask(updateMask string) *PropertiesIosAppDataStreamsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func TestFieldMaskFromRequestBodyWithDescriptor(t *testing.T) {\n\tif testing.Short() {\n\t\tt.Skip()\n\t\treturn\n\t}\n\n\t_, md := descriptor.ForMessage(new(examplepb.NonStandardMessage))\n\tjsonInput := `{\"id\":\"foo\", \"thing\":{\"subThing\":{\"sub_value\":\"bar\"}}}`\n\texpected := newFieldMask(\"id\", \"thing.subThing.sub_value\")\n\n\tactual, err := runtime.FieldMaskFromRequestBody(bytes.NewReader([]byte(jsonInput)), md)\n\tif !fieldMasksEqual(actual, expected) {\n\t\tt.Errorf(\"want %v; got %v\", fieldMaskString(expected), fieldMaskString(actual))\n\t}\n\tif err != nil {\n\t\tt.Errorf(\"err %v\", err)\n\t}\n}", "func (c *AppsModulesPatchCall) Mask(mask string) *AppsModulesPatchCall {\n\tc.urlParams_.Set(\"mask\", mask)\n\treturn c\n}", "func patch(newObj runtime.Object, existingObj runtime.Object, c client.Client) error {\n\tnewObjJSON, _ := apijson.Marshal(newObj)\n\tkey, _ := client.ObjectKeyFromObject(newObj)\n\t_, isUnstructured := newObj.(runtime.Unstructured)\n\t_, isCRD := newObj.(*apiextv1beta1.CustomResourceDefinition)\n\n\tif isUnstructured || isCRD || isKudoType(newObj) {\n\t\t// strategic merge patch is not supported for these types, falling back to merge patch\n\t\terr := c.Patch(context.TODO(), newObj, client.ConstantPatch(types.MergePatchType, newObjJSON))\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to apply merge patch to object %s/%s: %w\", key.Name, key.Name, err)\n\t\t}\n\t} else {\n\t\terr := c.Patch(context.TODO(), existingObj, client.ConstantPatch(types.StrategicMergePatchType, newObjJSON))\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to apply StrategicMergePatch to object %s/%s: %w\", key.Namespace, key.Name, err)\n\t\t}\n\t}\n\treturn nil\n}", "func Apply(data []byte, x interface{}) error {\n\trx := reflect.ValueOf(x)\n\tif rx.Kind() != reflect.Ptr || rx.IsNil() {\n\t\treturn ErrNonPointer\n\t}\n\n\tvar patches []Patch\n\terr := json.Unmarshal(data, &patches)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\try := reflect.New(rx.Elem().Type())\n\t// I am making a copy of the interface so that when an\n\t// error arises while performing one of the patches the\n\t// original data structure does not get altered.\n\terr = deep.Copy(x, ry.Interface())\n\tif err != nil {\n\t\treturn ErrCouldNotCopy\n\t}\n\n\tfor _, p := range patches {\n\t\tpath := strings.Trim(p.Path, \"/\")\n\t\terr := rapply(path, &p, ry)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\trx.Elem().Set(ry.Elem())\n\treturn nil\n}", "func NewPatchRecipientGroupByIDDefault(code int) *PatchRecipientGroupByIDDefault {\n\treturn &PatchRecipientGroupByIDDefault{\n\t\t_statusCode: code,\n\t}\n}", "func getDaemonSetPatch(ds *appsv1.DaemonSet) ([]byte, error) {\n\tdsBytes, err := json.Marshal(ds)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar raw map[string]interface{}\n\terr = json.Unmarshal(dsBytes, &raw)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tobjCopy := make(map[string]interface{})\n\tspecCopy := make(map[string]interface{})\n\n\t// Create a patch of the DaemonSet that replaces spec.template\n\tspec := raw[\"spec\"].(map[string]interface{})\n\ttemplate := spec[\"template\"].(map[string]interface{})\n\tspecCopy[\"template\"] = template\n\ttemplate[\"$patch\"] = \"replace\"\n\tobjCopy[\"spec\"] = specCopy\n\tpatch, err := json.Marshal(objCopy)\n\treturn patch, err\n}", "func NewDcimDeviceTypesPartialUpdateDefault(code int) *DcimDeviceTypesPartialUpdateDefault {\n\treturn &DcimDeviceTypesPartialUpdateDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (fieldMask UpdateAlertingPolicyRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (m *TermsAndConditionsItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TermsAndConditionsable, requestConfiguration *TermsAndConditionsItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TermsAndConditionsable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateTermsAndConditionsFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TermsAndConditionsable), nil\n}", "func (c *PropertiesAndroidAppDataStreamsPatchCall) UpdateMask(updateMask string) *PropertiesAndroidAppDataStreamsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (m MarketDataSnapshotFullRefresh) SetMaturityDay(v int) {\n\tm.Set(field.NewMaturityDay(v))\n}", "func createDaemonSetPatch(daemonSet *appsv1.DaemonSet, labels map[string]string, annotations map[string]string) ([]byte, error) {\n\tvar patch []k8s.PatchOperation\n\tpatch = append(patch, k8s.GenerateSpecTemplateAnnotationPatch(daemonSet.Spec.Template.Annotations, annotations)...)\n\tpatch = append(patch, k8s.GenerateMetadataLabelsPatch(daemonSet.Labels, labels)...)\n\treturn json.Marshal(patch)\n}", "func (m *DirectoryRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable, requestConfiguration *DirectoryRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateRbacApplicationFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable), nil\n}", "func (m *ExactMatchDataStoresExactMatchDataStoreItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ExactMatchDataStoreable, requestConfiguration *ExactMatchDataStoresExactMatchDataStoreItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ExactMatchDataStoreable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateExactMatchDataStoreFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.ExactMatchDataStoreable), nil\n}", "func (a *HyperflexApiService) PatchHyperflexHealthCheckDefinition(ctx context.Context, moid string) ApiPatchHyperflexHealthCheckDefinitionRequest {\n\treturn ApiPatchHyperflexHealthCheckDefinitionRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (c *PropertiesIosAppDataStreamsMeasurementProtocolSecretsPatchCall) UpdateMask(updateMask string) *PropertiesIosAppDataStreamsMeasurementProtocolSecretsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (c *OrganizationsEnvironmentsModifyEnvironmentCall) UpdateMask(updateMask string) *OrganizationsEnvironmentsModifyEnvironmentCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (m *TeamsAppSettingsRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TeamsAppSettingsable, requestConfiguration *TeamsAppSettingsRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TeamsAppSettingsable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateTeamsAppSettingsFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TeamsAppSettingsable), nil\n}", "func ApplyPatch(obj Obj, patches []JSONPatchOp) (Obj, error) {\n\tfor _, patch := range patches {\n\t\tval, ok := patch.Value.(float64)\n\t\tif !ok {\n\t\t\treturn Obj{}, fmt.Errorf(\"unsupported value for path '\"+patch.Path+\"': %T\", patch.Value)\n\t\t}\n\n\t\tpath := strings.Split(patch.Path, \"/\")\n\t\tif len(path) != 4 {\n\t\t\treturn Obj{}, fmt.Errorf(\"unsupported patch path for Obj, expected 4 parts: '%v' got '%+v'\", patch.Path, path)\n\t\t}\n\t\tpath = path[1:] // remove initial empty /\n\t\tif path[0] == \"foo-a\" {\n\t\t\tif path[1] == \"bar-a\" {\n\t\t\t\tif path[2] == \"baz-a\" {\n\t\t\t\t\tobj.FooA.BarA.BazA = int64(val)\n\t\t\t\t} else if path[2] == \"baz-b\" {\n\t\t\t\t\tobj.FooA.BarA.BazB = int64(val)\n\t\t\t\t} else {\n\t\t\t\t\treturn Obj{}, errors.New(\"unsupported patch path for Obj, unsupported part: '\" + path[2] + \"' in '\" + patch.Path + \"'\")\n\t\t\t\t}\n\t\t\t} else if path[1] == \"bar-b\" {\n\t\t\t\tif path[2] == \"baz-a\" {\n\t\t\t\t\tobj.FooA.BarB.BazA = int64(val)\n\t\t\t\t} else if path[2] == \"baz-b\" {\n\t\t\t\t\tobj.FooA.BarB.BazB = int64(val)\n\t\t\t\t} else {\n\t\t\t\t\treturn Obj{}, errors.New(\"unsupported patch path for Obj, unsupported part: '\" + path[2] + \"' in '\" + patch.Path + \"'\")\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\treturn Obj{}, errors.New(\"unsupported patch path for Obj, unsupported part: '\" + path[1] + \"' in '\" + patch.Path + \"'\")\n\t\t\t}\n\t\t} else if path[0] == \"foo-b\" {\n\t\t\tif path[1] == \"bar-a\" {\n\t\t\t\tif path[2] == \"baz-a\" {\n\t\t\t\t\tobj.FooB.BarA.BazA = int64(val)\n\t\t\t\t} else if path[2] == \"baz-b\" {\n\t\t\t\t\tobj.FooB.BarA.BazB = int64(val)\n\t\t\t\t} else {\n\t\t\t\t\treturn Obj{}, errors.New(\"unsupported patch path for Obj, unsupported part: '\" + path[2] + \"' in '\" + patch.Path + \"'\")\n\t\t\t\t}\n\t\t\t} else if path[1] == \"bar-b\" {\n\t\t\t\tif path[2] == \"baz-a\" {\n\t\t\t\t\tobj.FooB.BarB.BazA = int64(val)\n\t\t\t\t} else if path[2] == \"baz-b\" {\n\t\t\t\t\tobj.FooB.BarB.BazB = int64(val)\n\t\t\t\t} else {\n\t\t\t\t\treturn Obj{}, errors.New(\"unsupported patch path for Obj, unsupported part: '\" + path[2] + \"' in '\" + patch.Path + \"'\")\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\treturn Obj{}, errors.New(\"unsupported patch path for Obj, unsupported part: '\" + path[1] + \"' in '\" + patch.Path + \"'\")\n\t\t\t}\n\t\t} else {\n\t\t\treturn Obj{}, errors.New(\"unsupported patch path for Obj, unsupported part: '\" + path[0] + \"' in '\" + patch.Path + \"'\")\n\t\t}\n\t}\n\treturn obj, nil\n}", "func (c *PropertiesCustomDimensionsPatchCall) UpdateMask(updateMask string) *PropertiesCustomDimensionsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (m *TeamworkRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserTeamworkable, requestConfiguration *TeamworkRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserTeamworkable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateUserTeamworkFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserTeamworkable), nil\n}", "func (c *OrganizationsDatacollectorsPatchCall) UpdateMask(updateMask string) *OrganizationsDatacollectorsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (fieldMask GetMonitoredResourceDescriptorRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func NewUpdateHookDefault(code int) *UpdateHookDefault {\n\treturn &UpdateHookDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (fieldMask CreateAlertingPolicyRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (c *cronFederatedHPAs) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.CronFederatedHPA, err error) {\n\tresult = &v1alpha1.CronFederatedHPA{}\n\terr = c.client.Patch(pt).\n\t\tNamespace(c.ns).\n\t\tResource(\"cronfederatedhpas\").\n\t\tName(name).\n\t\tSubResource(subresources...).\n\t\tVersionedParams(&opts, scheme.ParameterCodec).\n\t\tBody(data).\n\t\tDo(ctx).\n\t\tInto(result)\n\treturn\n}", "func (fieldMask GetAlertingPolicyRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (c *OrganizationsEnvgroupsPatchCall) UpdateMask(updateMask string) *OrganizationsEnvgroupsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (uuo *UserUpdateOne) SetNillableBirthday(t *time.Time) *UserUpdateOne {\n\tif t != nil {\n\t\tuuo.SetBirthday(*t)\n\t}\n\treturn uuo\n}", "func (c *DatasetsPatchCall) UpdateMask(updateMask string) *DatasetsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (fieldMask WatchAlertingPolicyResponse_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (fieldMask DeleteAlertingPolicyRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (c *ProjectsLocationsJobsPatchCall) UpdateMask(updateMask string) *ProjectsLocationsJobsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (c *ProjectsLocationsMigrationJobsPatchCall) UpdateMask(updateMask string) *ProjectsLocationsMigrationJobsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (m *ConditionalAccessRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable, requestConfiguration *ConditionalAccessRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateConditionalAccessRootFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable), nil\n}", "func (o *PartialUpdateAppDefault) WithPayload(payload *models.Error) *PartialUpdateAppDefault {\n\to.Payload = payload\n\treturn o\n}", "func (c *ProjectsLocationsDataExchangesPatchCall) UpdateMask(updateMask string) *ProjectsLocationsDataExchangesPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (fieldMask WatchAlertingPolicyRequest_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (m *ItemInsightsTrendingTrendingItemRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Trendingable, requestConfiguration *ItemInsightsTrendingTrendingItemRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Trendingable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateTrendingFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Trendingable), nil\n}", "func (a *BulkApiService) PatchBulkExport(ctx context.Context, moid string) ApiPatchBulkExportRequest {\n\treturn ApiPatchBulkExportRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func RecoverableDays(days int) ChangeOption {\n\treturn changeOption{\n\t\tapplier: applierFunc(\n\t\t\tfunc(caller caller, co interface{}) {\n\t\t\t\tco.(*secret.UpdateSetRequest).Attributes.RecoverableDays = days\n\t\t\t},\n\t\t),\n\t}\n}", "func (m *IosUpdateConfiguration) SetScheduledInstallDays(value []DayOfWeek)() {\n err := m.GetBackingStore().Set(\"scheduledInstallDays\", value)\n if err != nil {\n panic(err)\n }\n}", "func (c *OrganizationsEnvironmentsUpdateDebugmaskCall) UpdateMask(updateMask string) *OrganizationsEnvironmentsUpdateDebugmaskCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (m *ShiftPreferencesRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ShiftPreferencesable, requestConfiguration *ShiftPreferencesRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ShiftPreferencesable, error) {\n requestInfo, err := m.CreatePatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateShiftPreferencesFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ShiftPreferencesable), nil\n}", "func (m *RecurrencePattern) SetDaysOfWeek(value []DayOfWeek)() {\n m.daysOfWeek = value\n}", "func (c *CallsetsPatchCall) UpdateMask(updateMask string) *CallsetsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (a *HyperflexApiService) PatchHyperflexSoftwareDistributionEntry(ctx context.Context, moid string) ApiPatchHyperflexSoftwareDistributionEntryRequest {\n\treturn ApiPatchHyperflexSoftwareDistributionEntryRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func FieldMask(paths ...string) *fieldmaskpb.FieldMask {\n\treturn &fieldmaskpb.FieldMask{Paths: paths}\n}", "func DefaultPatchSetComment(ctx context.Context, objects []*Comment, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*Comment, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*Comment, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchComment(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func (m *TeamsAppsItemAppDefinitionsItemOutlineIconRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.TeamsAppIconable, requestConfiguration *TeamsAppsItemAppDefinitionsItemOutlineIconRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.TeamsAppIconable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateTeamsAppIconFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.TeamsAppIconable), nil\n}", "func (m *ConditionalAccessRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable, requestConfiguration *ConditionalAccessRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateConditionalAccessRootFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable), nil\n}", "func (m *ConditionalAccessRequestBuilder) Patch(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable, requestConfiguration *ConditionalAccessRequestBuilderPatchRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.Send(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateConditionalAccessRootFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable), nil\n}", "func (c *ProjectsTraceSinksPatchCall) UpdateMask(updateMask string) *ProjectsTraceSinksPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (c *ProjectsLocationsProcessesRunsPatchCall) UpdateMask(updateMask string) *ProjectsLocationsProcessesRunsPatchCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}", "func (fieldMask BatchGetAlertingPoliciesResponse_FieldMask) Marshal() ([]byte, error) {\n\tprotoFieldMask := fieldMask.ToProtoFieldMask()\n\treturn proto.Marshal(protoFieldMask)\n}", "func (o *Entry) Defaults() {\n\tif o.StaticDist == 0 {\n\t\to.StaticDist = 10\n\t}\n\n\tif o.StaticIpv6Dist == 0 {\n\t\to.StaticIpv6Dist = 10\n\t}\n\n\tif o.OspfIntDist == 0 {\n\t\to.OspfIntDist = 30\n\t}\n\n\tif o.OspfExtDist == 0 {\n\t\to.OspfExtDist = 110\n\t}\n\n\tif o.Ospfv3IntDist == 0 {\n\t\to.Ospfv3IntDist = 30\n\t}\n\n\tif o.Ospfv3ExtDist == 0 {\n\t\to.Ospfv3ExtDist = 110\n\t}\n\n\tif o.IbgpDist == 0 {\n\t\to.IbgpDist = 200\n\t}\n\n\tif o.EbgpDist == 0 {\n\t\to.EbgpDist = 20\n\t}\n\n\tif o.RipDist == 0 {\n\t\to.RipDist = 120\n\t}\n}", "func (c *OrganizationsEnvironmentsUpdateTraceConfigCall) UpdateMask(updateMask string) *OrganizationsEnvironmentsUpdateTraceConfigCall {\n\tc.urlParams_.Set(\"updateMask\", updateMask)\n\treturn c\n}" ]
[ "0.7373707", "0.70680124", "0.67444634", "0.6065729", "0.5949686", "0.57163745", "0.5495746", "0.54638475", "0.5452421", "0.5412164", "0.5399574", "0.53054893", "0.52838326", "0.5168645", "0.505817", "0.5040415", "0.46125427", "0.46034554", "0.45514354", "0.4538858", "0.4502264", "0.44303185", "0.44222853", "0.43890834", "0.4327707", "0.42894992", "0.4249346", "0.42491034", "0.4232841", "0.42259347", "0.42160445", "0.4205205", "0.41704565", "0.41552547", "0.41415796", "0.41408935", "0.4131638", "0.41265815", "0.41173407", "0.4114604", "0.41086847", "0.40980873", "0.40917414", "0.40908507", "0.40793622", "0.40781406", "0.40770796", "0.40672734", "0.40592182", "0.40541607", "0.40463343", "0.4044323", "0.4042283", "0.40394586", "0.40377626", "0.40320665", "0.40314558", "0.40218028", "0.4013938", "0.40122122", "0.40104523", "0.39925653", "0.39886916", "0.39871868", "0.39835328", "0.3982232", "0.39807203", "0.39616558", "0.3960067", "0.39508188", "0.39500415", "0.39384842", "0.39365974", "0.39321277", "0.39295983", "0.3928867", "0.3926021", "0.39239496", "0.3921769", "0.39160413", "0.39126498", "0.39115882", "0.39109933", "0.39096108", "0.3908955", "0.3905882", "0.39042652", "0.39032394", "0.39002705", "0.38990647", "0.3895892", "0.38955465", "0.38954479", "0.38941365", "0.38938013", "0.38934338", "0.38933262", "0.38908273", "0.388737", "0.3885464" ]
0.8058049
0
DefaultListHealthMenstruationDailyEntry executes a gorm list call
func DefaultListHealthMenstruationDailyEntry(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationDailyEntry, error) { in := HealthMenstruationDailyEntry{} ormObj, err := in.ToORM(ctx) if err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListApplyQuery); ok { if db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil { return nil, err } } db, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationDailyEntryORM{}, &HealthMenstruationDailyEntry{}, f, s, p, fs) if err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeListFind); ok { if db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil { return nil, err } } db = db.Where(&ormObj) db = db.Order("id") ormResponse := []HealthMenstruationDailyEntryORM{} if err := db.Find(&ormResponse).Error; err != nil { return nil, err } if hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterListFind); ok { if err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil { return nil, err } } pbResponse := []*HealthMenstruationDailyEntry{} for _, responseEntry := range ormResponse { temp, err := responseEntry.ToPB(ctx) if err != nil { return nil, err } pbResponse = append(pbResponse, &temp) } return pbResponse, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func DefaultReadHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationDailyEntryORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationDailyEntryORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationDailyEntryORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultListHealthMenstruationPersonalInfo(ctx context.Context, db *gorm1.DB, f *query1.Filtering, s *query1.Sorting, p *query1.Pagination, fs *query1.FieldSelection) ([]*HealthMenstruationPersonalInfo, error) {\n\tin := HealthMenstruationPersonalInfo{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm2.ApplyCollectionOperators(ctx, db, &HealthMenstruationPersonalInfoORM{}, &HealthMenstruationPersonalInfo{}, f, s, p, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []HealthMenstruationPersonalInfoORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse, f, s, p, fs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*HealthMenstruationPersonalInfo{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func DefaultCreateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func DefaultPatchSetHealthMenstruationDailyEntry(ctx context.Context, objects []*HealthMenstruationDailyEntry, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationDailyEntry, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationDailyEntry, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationDailyEntry(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func DefaultPatchHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationDailyEntry\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationDailyEntry(ctx, &HealthMenstruationDailyEntry{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationDailyEntryWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationDailyEntry(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationDailyEntryWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func DefaultApplyFieldMaskHealthMenstruationDailyEntry(ctx context.Context, patchee *HealthMenstruationDailyEntry, patcher *HealthMenstruationDailyEntry, updateMask *field_mask1.FieldMask, prefix string, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif patcher == nil {\n\t\treturn nil, nil\n\t} else if patchee == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar err error\n\tfor _, f := range updateMask.Paths {\n\t\tif f == prefix+\"Id\" {\n\t\t\tpatchee.Id = patcher.Id\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"CreatedAt\" {\n\t\t\tpatchee.CreatedAt = patcher.CreatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"UpdatedAt\" {\n\t\t\tpatchee.UpdatedAt = patcher.UpdatedAt\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"ProfileId\" {\n\t\t\tpatchee.ProfileId = patcher.ProfileId\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Day\" {\n\t\t\tpatchee.Day = patcher.Day\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"IntensityPercentage\" {\n\t\t\tpatchee.IntensityPercentage = patcher.IntensityPercentage\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Type\" {\n\t\t\tpatchee.Type = patcher.Type\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"Manual\" {\n\t\t\tpatchee.Manual = patcher.Manual\n\t\t\tcontinue\n\t\t}\n\t\tif f == prefix+\"BasedOnPrediction\" {\n\t\t\tpatchee.BasedOnPrediction = patcher.BasedOnPrediction\n\t\t\tcontinue\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn patchee, nil\n}", "func DefaultStrictUpdateHealthMenstruationDailyEntry(ctx context.Context, in *HealthMenstruationDailyEntry, db *gorm1.DB) (*HealthMenstruationDailyEntry, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultStrictUpdateHealthMenstruationDailyEntry\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tlockedRow := &HealthMenstruationDailyEntryORM{}\n\tdb.Model(&ormObj).Set(\"gorm:query_option\", \"FOR UPDATE\").Where(\"id=?\", ormObj.Id).First(lockedRow)\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateCleanup); ok {\n\t\tif db, err = hook.BeforeStrictUpdateCleanup(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithBeforeStrictUpdateSave); ok {\n\t\tif db, err = hook.BeforeStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationDailyEntryORMWithAfterStrictUpdateSave); ok {\n\t\tif err = hook.AfterStrictUpdateSave(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, err\n}", "func (hh *HealthCheckHandler) List(w http.ResponseWriter, r *http.Request) {\n\tqueryParams := r.URL.Query()\n\tpage, err := strconv.Atoi(queryParams[\"page\"][0])\n\tif err != nil {\n\t\thttp.Error(w, marshalError(err.Error()), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tlist := hh.db.List()\n\tsort.Sort(models.HealthChecks(list))\n\tstart, end := paginate(page, 10, len(list))\n\tpaginated := list[start:end]\n\n\tres := &models.HealthCheckList{\n\t\tItems: paginated,\n\t\tTotal: len(list),\n\t\tPage: page,\n\t\tSize: 10,\n\t}\n\n\tb, err := json.Marshal(res)\n\tif err != nil {\n\t\thttp.Error(w, marshalError(err.Error()), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tw.Write(b)\n}", "func (gs *GreetingService) List(c endpoints.Context, r *GreetingsListReq) (*GreetingsList, error) {\n\tif r.Limit <= 0 {\n\t\tr.Limit = 10\n\t}\n\n\tq := datastore.NewQuery(\"Greeting\").Order(\"-Date\").Limit(r.Limit)\n\tgreets := make([]*Greeting, 0, r.Limit)\n\tkeys, err := q.GetAll(c, &greets)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor i, k := range keys {\n\t\tgreets[i].Key = k\n\t}\n\treturn &GreetingsList{greets}, nil\n}", "func runList(props ListCmdProps, output io.Writer, repo db.Repo) error {\n\tstart, err := parseDateOrDefault(props.startDate)\n\n\tif props.startDate == \"\" {\n\t\tdefaultStart := start.Add(-1 * time.Hour * 24 * 30)\n\t\tstart = &defaultStart\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tend, err := parseDateOrDefault(props.endDate)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tworkingDays, err := repo.ListRange(start, end)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\trenderTable(workingDays, output)\n\n\treturn nil\n}", "func (c *IloClient) GetAggHealthDataDell(model string) ([]HealthList, error) {\n\n\tif strings.ToLower(model) == \"r730xd\" {\n\n\t\treturn nil, nil\n\n\t} else if strings.ToLower(model) == \"r740xd\" {\n\t\turl := c.Hostname + \"/redfish/v1/UpdateService/FirmwareInventory\"\n\n\t\tresp, _, _, err := queryData(c, \"GET\", url, nil)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tvar (\n\t\t\tx MemberCountDell\n\t\t\t_healthdata []HealthList\n\t\t)\n\n\t\tjson.Unmarshal(resp, &x)\n\n\t\tfor i := range x.Members {\n\t\t\tr, _ := regexp.Compile(\"Installed\")\n\t\t\tif r.MatchString(x.Members[i].OdataId) == true {\n\t\t\t\t_url := c.Hostname + x.Members[i].OdataId\n\t\t\t\tresp, _, _, err := queryData(c, \"GET\", _url, nil)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\n\t\t\t\tvar y FirmwareDataDell\n\n\t\t\t\tjson.Unmarshal(resp, &y)\n\n\t\t\t\thealthData := HealthList{\n\t\t\t\t\tName: y.Name,\n\t\t\t\t\tState: y.Status.State,\n\t\t\t\t\tHealth: y.Status.Health,\n\t\t\t\t}\n\n\t\t\t\t_healthdata = append(_healthdata, healthData)\n\n\t\t\t}\n\t\t}\n\n\t\treturn _healthdata, nil\n\t}\n\treturn nil, nil\n}", "func (gs *GreetingService) List(c endpoints.Context, r *GreetingsListReq) (*GreetingsList, error) {\n\tif r.Limit <= 0 {\n\t\tr.Limit = 10\n\t}\n\n\tq := datastore.NewQuery(\"Greeting\").Limit(r.Limit)\n\tgreets := make([]*Greeting, 0, r.Limit)\n\tkeys, err := q.GetAll(c, &greets)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor i, k := range keys {\n\t\tgreets[i].Key = k\n\t}\n\treturn &GreetingsList{greets}, nil\n}", "func (r *Resource) getAllHandler(c *gin.Context) {\n // fetch all from database\n meals, err := r.db.GetAllMenuMeals()\n if err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n // return result as JSON\n c.JSON(http.StatusOK, meals)\n}", "func List(ctx context.Context, r Recipient) (Events, error) {\n\td := dsset.Set{\n\t\tParent: r.Key,\n\t\tTombstonesDelay: TombstonesDelay,\n\t}\n\tconst effectivelyUnlimited = 1000000\n\tswitch l, err := d.List(ctx, effectivelyUnlimited); {\n\tcase err != nil:\n\t\treturn nil, err\n\tcase len(l.Items) == effectivelyUnlimited:\n\t\tpanic(fmt.Errorf(\"fetched possibly not all events (limit: %d)\", effectivelyUnlimited))\n\tdefault:\n\t\treturn toEvents(l.Items), nil\n\t}\n}", "func (h WorkloadHandler) List(ctx *gin.Context) {\n}", "func DefaultReadHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors1.EmptyIdError\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadApplyQuery); ok {\n\t\tif db, err = hook.BeforeReadApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif db, err = gorm2.ApplyFieldSelection(ctx, db, nil, &HealthMenstruationPersonalInfoORM{}); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeReadFind); ok {\n\t\tif db, err = hook.BeforeReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tormResponse := HealthMenstruationPersonalInfoORM{}\n\tif err = db.Where(&ormObj).First(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormResponse).(HealthMenstruationPersonalInfoORMWithAfterReadFind); ok {\n\t\tif err = hook.AfterReadFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormResponse.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (c HourlyCommand) Items(arg, data string) (items []alfred.Item, err error) {\n\tdlog.Printf(\"Running HourlyCommand\")\n\n\tvar cfg hourlyConfig\n\tif data != \"\" {\n\t\tif err := json.Unmarshal([]byte(data), &cfg); err != nil {\n\t\t\tdlog.Printf(\"Invalid hourly config\")\n\t\t}\n\t}\n\n\tvar weather Weather\n\tvar loc Location\n\tif loc, weather, err = getWeather(arg); err != nil {\n\t\treturn\n\t}\n\n\tvar startTime time.Time\n\tif cfg.Start != nil {\n\t\tstartTime = *cfg.Start\n\t} else if len(weather.Hourly) > 0 {\n\t\tstartTime = weather.Hourly[0].Time\n\t}\n\n\theading := alfred.Item{\n\t\tTitle: \"Weather for \" + loc.Name,\n\t\tSubtitle: alfred.Line,\n\t\tArg: &alfred.ItemArg{\n\t\t\tKeyword: \"daily\",\n\t\t},\n\t}\n\n\tif weather.URL != \"\" {\n\t\theading.AddMod(alfred.ModCmd, alfred.ItemMod{\n\t\t\tSubtitle: \"Open this forecast in a browser\",\n\t\t\tArg: &alfred.ItemArg{\n\t\t\t\tKeyword: \"daily\",\n\t\t\t\tMode: alfred.ModeDo,\n\t\t\t\tData: alfred.Stringify(&dailyCfg{ToOpen: weather.URL}),\n\t\t\t},\n\t\t})\n\t}\n\n\titems = append(items, heading)\n\n\tdeg := \"F\"\n\tif config.Units == unitsMetric {\n\t\tdeg = \"C\"\n\t}\n\n\taddAlertItems(&weather, &items)\n\n\tfor _, entry := range weather.Hourly {\n\t\tif entry.Time.Before(startTime) {\n\t\t\tcontinue\n\t\t}\n\n\t\tconditions := entry.Summary\n\t\ticon := entry.Icon\n\n\t\titem := alfred.Item{\n\t\t\tTitle: entry.Time.Format(\"Mon \"+config.TimeFormat) + \": \" + conditions,\n\t\t\tSubtitle: fmt.Sprintf(\"%d°%s (%d°%s) ☂ %d%%\", entry.Temp.Int64(), deg, entry.ApparentTemp.Int64(), deg, entry.Precip),\n\t\t\tIcon: getIconFile(icon),\n\t\t}\n\n\t\titems = append(items, item)\n\t}\n\n\treturn\n}", "func (d *Dao) GetAllDayExpenseInfo(c context.Context, beginDate time.Time, ctype, from, limit int) (infos []*model.BudgetDayStatistics, err error) {\n\trows, err := d.rddb.Query(c, _getAllDayExpenseSQL, beginDate, ctype, from, limit)\n\tif err != nil {\n\t\tlog.Error(\"dao.GetAllDayExpenseInfo query error(%v)\", err)\n\t\treturn\n\t}\n\tdefer rows.Close()\n\tfor rows.Next() {\n\t\ta := &model.BudgetDayStatistics{}\n\t\tif err = rows.Scan(&a.DayExpense, &a.UpCount, &a.AvCount, &a.UpAvgExpense, &a.AvAvgExpense, &a.TotalExpense, &a.Date); err != nil {\n\t\t\tlog.Error(\"dao.GetAllDayExpenseInfo scan error(%v)\", err)\n\t\t\treturn\n\t\t}\n\t\tinfos = append(infos, a)\n\t}\n\terr = rows.Err()\n\treturn\n}", "func DefaultListPeriod(ctx context.Context, db *gorm.DB) ([]*Period, error) {\n\tin := Period{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(PeriodORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm1.ApplyCollectionOperators(ctx, db, &PeriodORM{}, &Period{}, nil, nil, nil, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(PeriodORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []PeriodORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(PeriodORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*Period{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func (c Client) ListEntries(ids []string) ([]Entry, error) {\n\tif len(ids) > 1000 {\n\t\treturn nil, errors.New(\"The number of entry ids you can pass as an input is limited to 1,000.\")\n\t}\n\turl := c.Config.BaseURL + \"/\" + c.Config.Version + \"/\" + entriesEndpoint + \"/.mget\"\n\tpayload, err := json.Marshal(ids)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treq, err := http.NewRequest(\"POST\", url, bytes.NewReader(payload))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treq.Header.Add(\"Authorization\", \"Bearer \"+c.Config.Token)\n\treq.Header.Add(\"Content-Type\", \"application/json\")\n\tresp, err := c.Client.Do(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar entries []Entry\n\terr = json.Unmarshal(body, &entries)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn entries, nil\n}", "func DefaultListUserInfo(ctx context.Context, db *gorm.DB) ([]*UserInfo, error) {\n\tin := UserInfo{}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeListApplyQuery); ok {\n\t\tif db, err = hook.BeforeListApplyQuery(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb, err = gorm1.ApplyCollectionOperators(ctx, db, &UserInfoORM{}, &UserInfo{}, nil, nil, nil, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithBeforeListFind); ok {\n\t\tif db, err = hook.BeforeListFind(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tdb = db.Where(&ormObj)\n\tdb = db.Order(\"id\")\n\tormResponse := []UserInfoORM{}\n\tif err := db.Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(UserInfoORMWithAfterListFind); ok {\n\t\tif err = hook.AfterListFind(ctx, db, &ormResponse); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse := []*UserInfo{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func GetData(w http.ResponseWriter, r *http.Request) {\n\tresult := dailyData{}\n\tc := utils.MONGOSESSION.DB(\"healthDB\").C(\"healthData\")\n\tc.Find(bson.M{\"date\": utils.GetDate(time.Now())}).One(&result)\n\tb, _ := json.Marshal(result)\n\tfmt.Fprintf(w, string(b))\n}", "func HealthHandler(w http.ResponseWriter, r *http.Request) {\n\tlog.Println(\"--- RECEIVED HEALTH DATA FROM HEALTH PULSE ---\")\n\n\tauth, httpErr := auth.CheckAuth(w, r)\n\tif !auth {\n\t\thttp.Error(w, httpErr.Status, httpErr.StatusCode)\n\t\treturn\n\t}\n\n\tvar healthSample health.Data\n\tdata, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tlog.Println(string(data))\n\n\terr = json.Unmarshal(data, &healthSample)\n\tif err != nil {\n\t\tlog.Errorf(\"Error while unmarshaling incoming health data: %s\", err)\n\t\tlog.Println(string(data))\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tc := utils.MONGOSESSION.DB(\"healthDB\").C(\"healthData\")\n\tvar results []dailyData\n\terr = c.Find(bson.M{\"date\": healthSample.Date}).All(&results)\n\n\tif err != nil {\n\t\tlog.Errorf(\"Error while finding health data entries: %s\", err)\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Calculate Min and Max heart rate\n\tminMaxHR := calcMinMaxDailyHeartRate(healthSample)\n\thealthSample.MinMaxHeartRate = minMaxHR\n\n\t// If there is no entry for the current day create one with the current sample\n\tif len(results) == 0 {\n\t\terr = c.Insert(&dailyData{Date: healthSample.Date, Data: healthSample})\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Error while inserting health data entries: %s\", err)\n\t\t\treturn\n\t\t}\n\t\tlog.Infof(\"LOGGED ENTRY %s\", healthSample.Date)\n\t\treturn\n\t}\n\n\t// If there is an entry for the current day, update the entry with the current sample\n\tcolQuerier := bson.M{\"date\": healthSample.Date}\n\tchange := bson.M{\"$set\": bson.M{\"date\": healthSample.Date, \"data\": healthSample}}\n\terr = c.Update(colQuerier, change)\n\tif err != nil {\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tlog.Infof(\"UPDATING ENTRY %s\", healthSample.Date)\n}", "func (s *TimeEntriesService) List(start, end *time.Time) ([]TimeEntry, error) {\n\tu := \"time_entries\"\n\tparams := url.Values{}\n\tif start != nil {\n\t\tparams.Add(\"start_date\", start.Format(time.RFC3339))\n\t}\n\tif end != nil {\n\t\tparams.Add(\"end_date\", end.Format(time.RFC3339))\n\t}\n\tu += \"?\" + params.Encode()\n\n\treq, err := s.client.NewRequest(\"GET\", u, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdata := new([]TimeEntry)\n\t_, err = s.client.Do(req, data)\n\n\treturn *data, err\n}", "func DaemonsHandler(cmd *CmdMessage) error {\n if !cmd.Conn.owner.IsUser() {\n return errors.New(\"This handler is not available for daemons\")\n }\n if !cmd.Conn.owner.IsAuthorised() {\n return errors.New(\"User has to log in before using this handler\")\n }\n\n data := make(map[string]interface{})\n data[\"list\"] = [](map[string]interface{}){}\n daemons := cmd.Conn.owner.GetOrg().Daemons\n\n for id := range daemons {\n data[\"list\"] = append(data[\"list\"].([]map[string]interface{}), getDaemonFormat(daemons[id]))\n }\n\n return DispatchMessage(\"daemons\", data, cmd.Conn)\n}", "func (mgr *EntryManager) GetAll() []*Entry {\n\tvar entries []*Entry\n\n\tdb, err := sql.Open(\"postgres\", mgr.ConnStr)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tdefer db.Close()\n\n\t// Generate a Rows iterator from a SQL query\n\tqueryStr := \"SELECT id, title, date_posted, tags FROM entries ORDER BY id;\"\n\trows, err := db.Query(queryStr)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn nil\n\t}\n\tdefer rows.Close()\n\n\t// Iterate over rows and populate Entry records\n\tfor rows.Next() {\n\t\tvar (\n\t\t\tentryID int\n\t\t\ttitle, tagsText string\n\t\t\tdatePosted time.Time\n\t\t)\n\n\t\terr = rows.Scan(&entryID, &title, &datePosted, &tagsText)\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t\treturn nil\n\t\t}\n\n\t\tentries = append(entries, &Entry{\n\t\t\tID: entryID,\n\t\t\tTitle: title,\n\t\t\tDatePosted: datePosted,\n\t\t\tTags: strings.Split(tagsText, \",\"),\n\t\t},\n\t\t)\n\t}\n\n\treturn entries\n}", "func (this *WebController) List() {\n\t//新闻线索模块\n\tweek, _ := this.GetInt(\"week\") //获取时间周期\n\t//以下为算出每个周期的开始时间,人工录入的新闻都是按时间戳筛选的,接口的数据可直接调用week字段\n\tvar st int64\n\tlocal := time.Now().Local().Format(\"2006-01-02\")\n\ttoday, _ := time.Parse(\"2006-01-02 15:04:05\", local + \" 00:00:00\")\n\tnow := time.Now().Unix()\n\tp := this.FormToP(\"tab2\", \"tab3\")\n\tif week == 3 {\n\t\tst = today.Unix() - 30 * 3600 * 24 - 8 * 3600\n\t} else if week == 2 {\n\t\tst = today.Unix() - 7 * 3600 * 24 - 8 * 3600\n\t} else {\n\t\tst = today.Unix() - 8 * 3600\n\t}\n\tif p[\"tab2\"] == \"新华电讯\" {\n\t\t//新华电讯下的数据是按时间戳筛选的\n\t\tif week == 1 {\n\t\t\tp[\"date\"] = P{\"$gte\": st, \"$lte\": now}\n\t\t} else {\n\t\t\tet := today.Unix() - 8 * 3600 - 1\n\t\t\tp[\"date\"] = P{\"$gte\": st, \"$lte\": et}\n\t\t}\n\t} else {\n\t\tp[\"week\"] = 1\n\t}\n\tp[\"news_class\"] = 4\n\tp[\"old\"] = 0\n\tp[\"dh\"] = list_dh\n\ttotal := D(News).Find(p).Count()\n\tsort := \"-date\"\n\tif p[\"tab2\"] == \"互联网\" {\n\t\tsort = \"-hot\"\n\t} else {\n\n\t}\n\tlist := *D(News).Find(p).Sort(sort).Limit(100).All()\n\tif len(list) == 0 {\n\t\t//如果实时数据为空则开启备份数据\n\t\tq := P{}\n\t\tq[\"news_class\"] = 4\n\t\tif p[\"tab3\"] != nil {\n\t\t\tq[\"tab3\"] = p[\"tab3\"]\n\t\t}\n\t\tif p[\"tab2\"] != nil {\n\t\t\tq[\"tab2\"] = p[\"tab2\"]\n\t\t}\n\t\tq[\"dh\"] = list_dh\n\t\tq[\"old\"] = 1\n\t\tlist = *D(News).Find(q).Sort(sort).Limit(100).All()\n\t}\n\tfor _, v := range list {\n\t\tv[\"name\"] = v[\"title\"] //前端用name接受\n\t\tif week == 1 {\n\t\t\t//修改当日时间\n\t\t\tv[\"date\"] = today.Unix()\n\t\t}\n\t}\n\ttotals := 0\n\tr := P{}\n\tr[\"total\"] = total + totals\n\tr[\"page\"], _ = this.GetInt(\"page\", 1) //分页机制暂时没用到\n\tr[\"list\"] = list\n\tthis.EchoJsonMsg(r)\n}", "func (HealthMenstruationDailyEntryORM) TableName() string {\n\treturn \"health_menstruation_daily_entries\"\n}", "func GetJournalEntriesAll(w http.ResponseWriter, req *http.Request, _ httprouter.Params) {\n\tif isLoggedIn(w, req) {\n\t\tq := req.URL.Query()\n\n\t\tch := make(chan map[string]bool)\n\t\tgo model.GetLabourNames(ch)\n\n\t\tdate := q[\"date\"][0]\n\t\tproductID, err := strconv.Atoi(q[\"id\"][0])\n\t\tif err != nil {\n\t\t\tlog.Println(\"Error in GETJournal Entries all\")\n\t\t\tlog.Println(err)\n\t\t}\n\t\tje, box, packet, res := model.GetAllJournalEntry(date, productID)\n\n\t\tlabours = <-ch\n\n\t\tresult := JournalResponse{\n\t\t\tJournalEntries: je,\n\t\t\tBox: box,\n\t\t\tPacket: packet,\n\t\t\tLabours: labours,\n\t\t}\n\t\tif res {\n\t\t\tp, err := json.Marshal(result)\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(\"Error in GetJournalEntries all in Marshalling\")\n\t\t\t\tlog.Println(err)\n\t\t\t}\n\t\t\tio.WriteString(w, string(p))\n\t\t}\n\t}\n}", "func (t *TimeEntriesService) List(ctx context.Context, opts *TimeEntriesListOptions) ([]*TimeEntry, *Response, error) {\n\tu := \"time_entries\"\n\tu, err := addOptions(u, opts)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\treq, err := t.client.NewRequest(\"GET\", u, nil)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\ttype Page struct {\n\t\tPagination\n\t\tTimeEntries []*TimeEntry `json:\"time_entries\"`\n\t}\n\tvar page Page\n\n\tresp, err := t.client.Do(ctx, req, &page)\n\tif err != nil {\n\t\treturn nil, resp, err\n\t}\n\n\tresp.populatePageValues(page.Pagination)\n\n\treturn page.TimeEntries, resp, nil\n}", "func ListBuildSummaries(settings *playfab.Settings, postData *ListBuildSummariesRequestModel, entityToken string) (*ListBuildSummariesResponseModel, error) {\n if entityToken == \"\" {\n return nil, playfab.NewCustomError(\"entityToken should not be an empty string\", playfab.ErrorGeneric)\n }\n b, errMarshal := json.Marshal(postData)\n if errMarshal != nil {\n return nil, playfab.NewCustomError(errMarshal.Error(), playfab.ErrorMarshal)\n }\n\n sourceMap, err := playfab.Request(settings, b, \"/MultiplayerServer/ListBuildSummaries\", \"X-EntityToken\", entityToken)\n if err != nil {\n return nil, err\n }\n \n result := &ListBuildSummariesResponseModel{}\n\n config := mapstructure.DecoderConfig{\n DecodeHook: playfab.StringToDateTimeHook,\n Result: result,\n }\n \n decoder, errDecoding := mapstructure.NewDecoder(&config)\n if errDecoding != nil {\n return nil, playfab.NewCustomError(errDecoding.Error(), playfab.ErrorDecoding)\n }\n \n errDecoding = decoder.Decode(sourceMap)\n if errDecoding != nil {\n return nil, playfab.NewCustomError(errDecoding.Error(), playfab.ErrorDecoding)\n }\n\n return result, nil\n}", "func (m *SeaterModel) ListMeetingCanhuis(params QueryParams) (mcs []*MeetingCanhui, err error) {\n\to := m.Orm()\n\n\tmcs = make([]*MeetingCanhui, 0, PagingDefaultLimit)\n\n\tqs := o.QueryTable(new(MeetingCanhui))\n\tif params != nil {\n\t\tqs = qs.SetCond(params.Condition())\n\t}\n\n\tqs = qs.OrderBy(\"-ID\")\n\t_, err = m.PagingAll(params, qs, &mcs)\n\tif err == orm.ErrNoRows {\n\t\treturn mcs, nil\n\t} else if err != nil {\n\t\terr = errors.Trace(err)\n\t\treturn\n\t}\n\n\treturn\n}", "func (c *cronFederatedHPAs) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.CronFederatedHPAList, err error) {\n\tvar timeout time.Duration\n\tif opts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*opts.TimeoutSeconds) * time.Second\n\t}\n\tresult = &v1alpha1.CronFederatedHPAList{}\n\terr = c.client.Get().\n\t\tNamespace(c.ns).\n\t\tResource(\"cronfederatedhpas\").\n\t\tVersionedParams(&opts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tDo(ctx).\n\t\tInto(result)\n\treturn\n}", "func (db database) list(w http.ResponseWriter, req *http.Request) {\n\n\tif err := itemList.Execute(w, db); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "func (s *AbsenceRegistrationsEndpoint) List(ctx context.Context, division int, all bool, o *api.ListOptions) ([]*AbsenceRegistrations, error) {\n\tvar entities []*AbsenceRegistrations\n\tu, _ := s.client.ResolvePathWithDivision(\"/api/v1/{division}/hrm/AbsenceRegistrations\", division) // #nosec\n\tapi.AddListOptionsToURL(u, o)\n\n\tif all {\n\t\terr := s.client.ListRequestAndDoAll(ctx, u.String(), &entities)\n\t\treturn entities, err\n\t}\n\t_, _, err := s.client.NewRequestAndDo(ctx, \"GET\", u.String(), nil, &entities)\n\treturn entities, err\n}", "func (m *HealthMenstruationDailyEntry) ToORM(ctx context.Context) (HealthMenstruationDailyEntryORM, error) {\n\tto := HealthMenstruationDailyEntryORM{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToORM); ok {\n\t\tif err = prehook.BeforeToORM(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.CreatedAt = &t\n\t}\n\tif m.UpdatedAt != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.UpdatedAt = &t\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tvar t time.Time\n\t\tif t, err = ptypes1.Timestamp(m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t\tto.Day = &t\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = int32(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToORM); ok {\n\t\terr = posthook.AfterToORM(ctx, &to)\n\t}\n\treturn to, err\n}", "func (hpSrv *HomePageServ) List() {\n\tvar (\n\t\tarticelMd []serializer.ArticleModel\n\t)\n\tconf.MYSQL_CONNECT.Order(\"created_at desc\").Find(&articelMd)\n\tif hpSrv.Limit == 0 {\n\t\thpSrv.Limit = 4\n\t}\n\thpSrv.setArticleSet(articelMd)\n\thpSrv.pageCount = setPageCount(len(articelMd), hpSrv.Limit)\n\thpSrv.setPage()\n}", "func (s *business) List(where repository.Example, order string, limit int32, page int32) (*repository.ExampleList, *response.Error) {\n\ts.logger = s.loggerClone\n\ts.logger.SugaredLogger = s.logger.With(\"method\", \"List\")\n\n\t// Pagination\n\tif limit == 0 {\n\t\tlimit = viper.GetInt32(\"PAGE_LIMIT\")\n\t}\n\tif page <= 0 {\n\t\tpage = 1\n\t}\n\toffset := limit * (page - 1)\n\n\t// Get data list\n\texamples, count, err := s.repository.ListWhere(where, order, limit, offset)\n\tif err != nil {\n\t\ts.logger.Errorw(\"list data error\", \"error\", err)\n\t\treturn nil, response.NewErrorFromCode(errorcode.GetDataError)\n\t}\n\texampleList := repository.NewExampleList(examples, count)\n\treturn exampleList, nil\n}", "func (db *DB) List(table jdh.Table, args *jdh.Values) (jdh.ListScanner, error) {\n\tif db.isClosed {\n\t\treturn nil, errors.New(\"database already closed\")\n\t}\n\tif args == nil {\n\t\treturn nil, errors.New(\"empty argument list\")\n\t}\n\tswitch table {\n\tcase jdh.Taxonomy:\n\t\treturn db.taxonList(args.KV)\n\t}\n\treturn nil, errors.New(\"list not implemented for table \" + string(table))\n}", "func (db *Database) ListKeyServerStatsDays(realmID uint) ([]*KeyServerStatsDay, error) {\n\tstop := timeutils.UTCMidnight(time.Now())\n\tstart := stop.Add(project.StatsDisplayDays * -24 * time.Hour)\n\tif start.After(stop) {\n\t\treturn nil, ErrBadDateRange\n\t}\n\n\tsql := `\n\t\tSELECT\n\t\t\td.day AS day,\n\t\t\t$1 AS realm_id,\n\t\t\tCOALESCE(s.publish_requests, array[0,0,0]::bigint[]) AS publish_requests,\n\t\t\tCOALESCE(s.total_teks_published, 0) AS total_teks_published,\n\t\t\tCOALESCE(s.revision_requests, 0) AS revision_requests,\n\t\t\tCOALESCE(s.tek_age_distribution, array[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]::bigint[]) AS tek_age_distribution,\n\t\t\tCOALESCE(s.onset_to_upload_distribution, array[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]::bigint[]) AS onset_to_upload_distribution,\n\t\t\tCOALESCE(s.request_missing_onset_date, 0) AS request_missing_onset_date\n\t\tFROM (\n\t\t\tSELECT day::date FROM generate_series($2, $3, '1 day'::interval) day\n\t\t) d\n\t\tLEFT JOIN key_server_stats_days s ON s.realm_id = $1 AND s.day = d.day\n\t\tORDER BY day DESC`\n\n\tvar stats []*KeyServerStatsDay\n\tif err := db.db.Raw(sql, realmID, start, stop).Scan(&stats).Error; err != nil {\n\t\tif IsNotFound(err) {\n\t\t\treturn stats, nil\n\t\t}\n\t\treturn nil, err\n\t}\n\treturn stats, nil\n}", "func (dao *OHLCVDao) GetAll() ([]types.Tick, error) {\n\tvar response []types.Tick\n\terr := db.Get(dao.dbName, dao.collectionName, bson.M{}, 0, 0, &response)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn nil, err\n\t}\n\n\treturn response, nil\n}", "func (m *HealthMenstruationDailyEntryORM) ToPB(ctx context.Context) (HealthMenstruationDailyEntry, error) {\n\tto := HealthMenstruationDailyEntry{}\n\tvar err error\n\tif prehook, ok := interface{}(m).(HealthMenstruationDailyEntryWithBeforeToPB); ok {\n\t\tif err = prehook.BeforeToPB(ctx, &to); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.Id = m.Id\n\tif m.CreatedAt != nil {\n\t\tif to.CreatedAt, err = ptypes1.TimestampProto(*m.CreatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tif m.UpdatedAt != nil {\n\t\tif to.UpdatedAt, err = ptypes1.TimestampProto(*m.UpdatedAt); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.ProfileId = m.ProfileId\n\tif m.Day != nil {\n\t\tif to.Day, err = ptypes1.TimestampProto(*m.Day); err != nil {\n\t\t\treturn to, err\n\t\t}\n\t}\n\tto.IntensityPercentage = m.IntensityPercentage\n\tto.Type = HealthMenstruationDailyEntry_Type(m.Type)\n\tto.Manual = m.Manual\n\tto.BasedOnPrediction = m.BasedOnPrediction\n\tif posthook, ok := interface{}(m).(HealthMenstruationDailyEntryWithAfterToPB); ok {\n\t\terr = posthook.AfterToPB(ctx, &to)\n\t}\n\treturn to, err\n}", "func (e *Event) List(c echo.Context, p *takrib.Pagination) ([]takrib.Event, error) {\n\tau := e.rbac.User(c)\n\tq, err := query.List(au)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn e.udb.List(e.db, q, p)\n}", "func (s *TimeAndBillingEntryRecentHourCostTypesEndpoint) List(ctx context.Context, division int, all bool, o *api.ListOptions) ([]*TimeAndBillingEntryRecentHourCostTypes, error) {\n\tvar entities []*TimeAndBillingEntryRecentHourCostTypes\n\tu, _ := s.client.ResolvePathWithDivision(\"/api/v1/{division}/read/project/TimeAndBillingEntryRecentHourCostTypes\", division) // #nosec\n\tapi.AddListOptionsToURL(u, o)\n\n\tif all {\n\t\terr := s.client.ListRequestAndDoAll(ctx, u.String(), &entities)\n\t\treturn entities, err\n\t}\n\t_, _, err := s.client.NewRequestAndDo(ctx, \"GET\", u.String(), nil, &entities)\n\treturn entities, err\n}", "func TestGetLogList(t *testing.T) {\n\tw := NewDateWriter(\"./logs\", \"abc\", HOUR, 0)\n\tw.cleanOldLogs()\n}", "func TodayStat(startTime time.Time, n int) ([]*model.SumStat, error) {\n\tvar debug_print_time = false\n\n\tvar conn *sql.DB\n\tvar stmt *sql.Stmt\n\tvar err error\n\tif conn, err = db.Connect(); err != nil {\n\t\treturn nil, err\n\t}\n\tdefer conn.Close()\n\n\tstartTime = startTime.UTC().Truncate(time.Hour*24).AddDate(0, 0, 1)\n\tendTime := startTime.AddDate(0, 0, -n).Truncate(time.Hour * 24)\n\tif debug_print_time {\n\t\tfmt.Println(\"((((())))) ---- start time:\", startTime)\n\t\tfmt.Println(\"((((())))) ---- end time:\", endTime)\n\t}\n\n\t// 这个sql会自动将时间转换为utc时间进行搜索。因此传入的时间无需转换时区。\n\t_sql := `\nselect DATE_FORMAT(o.create_time, '%Y-%m-%d') as 'date', \n count(distinct o.track_number) as 'norder',\n sum(od.quantity) as 'nsold',\n sum(od.quantity * od.selling_price) as '总价' ` +\n\t\t\"from `order` o \" + `\n right join order_detail od on o.track_number = od.order_track_number\nwhere\n o.create_time<?\n and o.create_time >= ?\n and DATEDIFF(o.create_time,?) > ?\n and o.type in (?,?)\n and o.status in (?,?,?,?)\n and od.product_id<>?\ngroup by DATEDIFF(o.create_time,?)\norder by DATEDIFF(o.create_time,?) asc\n`\n\tif stmt, err = conn.Prepare(_sql); err != nil {\n\t\treturn nil, err\n\t}\n\tdefer stmt.Close()\n\n\t// now := time.Now()\n\trows, err := stmt.Query(\n\t\tstartTime,\n\t\tendTime,\n\t\tstartTime, -n,\n\t\tmodel.Wholesale, model.SubOrder, // model.ShippingInstead, // 查子订单\n\t\t\"toprint\", \"todeliver\", \"delivering\", \"done\",\n\t\tbase.STAT_EXCLUDED_PRODUCT,\n\t\tstartTime,\n\t\tstartTime,\n\t)\n\tif db.Err(err) {\n\t\treturn nil, err\n\t}\n\tdefer rows.Close() // db.CloseRows(rows) // use db.CloseRows or rows.Close()? Is rows always nun-nil?\n\n\t// the final result\n\tps := []*model.SumStat{}\n\tfor rows.Next() {\n\t\tp := new(model.SumStat)\n\t\trows.Scan(&p.Id, &p.NOrder, &p.NSold, &p.TotalPrice)\n\n\t\t// update average.\n\t\tp.AvgPrice = p.TotalPrice / float64(p.NSold)\n\n\t\tps = append(ps, p)\n\t}\n\treturn ps, nil\n}", "func GetFullList() (*List, error) { /*{{{*/\n\tresults, err := storage.Get()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t//sorted by date\n\tsort.Sort(results)\n\n\tl := &List{\n\t\tContent: make([]Lister, 0, len(results.Content)),\n\t}\n\tfor _, v := range results.Content {\n\t\tvar e interface{} = entry{v}\n\t\tif lister, ok := e.(Lister); ok {\n\t\t\tl.Content = append(l.Content, lister)\n\t\t}\n\t}\n\tif len(l.Content) == 0 {\n\t\treturn nil, errors.New(\"There is no posts. Maybe is generating... Refresh after a while\")\n\t}\n\treturn l, nil\n}", "func TestDbLog_GetList(t *testing.T) {\n\thandler := debug.NewLocalDb()\n\tdb := newDbLog(handler)\n\t_, err := db.Add(0, \"success\",\"\", 0, \"\", time.GetDayTime())\n\tif err == nil {\n\t\tt.Errorf(\"Add check cronId fail\")\n\t\treturn\n\t}\n\tid, err := db.Add(1, \"success\",\"123\", 1000, \"hello\", time.GetDayTime())\n\tif err != nil {\n\t\tt.Errorf(\"Add fail, error=[%v]\", err)\n\t\treturn\n\t}\n\trows, num, _, _, err := db.GetList(1, 0, 0)\n\tif err != nil || num <= 0 {\n\t\tt.Errorf(\"Get GetList, error=[%v], num=[%v]\", err, num)\n\t\treturn\n\t}\n\tvar row *LogEntity = nil\n\tfor _, r := range rows {\n\t\tif r.Id == id {\n\t\t\trow = r\n\t\t}\n\t}\n\tif row == nil {\n\t\tt.Errorf(\"GetList fail\")\n\t\treturn\n\t}\n\tif row.Id <= 0 || row.CronId != 1 || row.Output != \"123\"||\n\t\trow.UseTime != 1000 || row.Remark != \"hello\" {\n\t\tt.Errorf(\"Add check rows fail\")\n\t\treturn\n\t}\n\tdb.Delete(row.Id)\n}", "func (client ScheduleMessageClient) ListResponder(resp *http.Response) (result PushScheduleFetchAllParameter, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusBadRequest, http.StatusUnauthorized, http.StatusForbidden, http.StatusNotFound, http.StatusInternalServerError),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func ListEventsByDate(date string) error {\n\ttokFile := \"token.json\"\n\t_, err := tokenFromFile(tokFile)\n\tif err != nil {\n\t\treturn err\n\t}\n\tclient, err := GetClientToken()\n\tif err != nil {\n\t\treturn err\n\t}\n\tsrv, err := calendar.New(client)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Unable to retrieve Calendar client: %v\", err)\n\t}\n\tlayout := \"2006-01-02\"\n\tday, err := time.Parse(layout, date)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdayAfter := day.AddDate(0, 0, 1).Format(time.RFC3339)\n\tevents, err := srv.Events.List(\"primary\").ShowDeleted(false).SingleEvents(true).TimeMin(day.Format(time.RFC3339)).TimeMax(dayAfter).MaxResults(1).OrderBy(\"startTime\").Do()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Unable to retrieve today's user's events: %v\", err)\n\t}\n\tif len(events.Items) == 0 {\n\t\tfmt.Println(\"You have no upcoming events on your calendar for this day.\")\n\t} else {\n\t\tfmt.Println(\"These are your upcoming events on your calendar for this day\")\n\t\ttable := tablewriter.NewWriter(os.Stdout)\n\t\ttable.SetHeader([]string{\"EVENTID\", \"NAME\", \"DATE\"})\n\t\tdt := [][]string{}\n\t\tfor _, item := range events.Items {\n\t\t\tdate := item.Start.DateTime\n\t\t\tif date == \"\" {\n\t\t\t\tdate = item.Start.Date\n\t\t\t}\n\t\t\tdt = append(dt, []string{item.Id, item.Summary, date})\n\t\t}\n\t\tfor _, t := range dt {\n\t\t\ttable.Append(t)\n\t\t}\n\t\ttable.Render()\n\t}\n\treturn nil\n}", "func (h *Handler) FetchDailyTimeSeries(w http.ResponseWriter, r *http.Request, params httprouter.Params) error {\n\tstockSymbol := params.ByName(\"stock\")\n\n\tstock, err := h.Stocky.Postgres.GetOrCreateStock(stockSymbol)\n\tif err != nil {\n\t\tError(w, err)\n\t\treturn err\n\t}\n\n\tredisKey := fmt.Sprintf(\"stocky_%s\", stockSymbol)\n\t_, err = h.Stocky.Redis.Get(redisKey)\n\tif err == redis.Nil {\n\t\terr = h.updateDaily(stockSymbol, redisKey, stock.ID)\n\t\tif err != nil {\n\t\t\tError(w, err)\n\t\t\treturn err\n\t\t}\n\t} else if err != nil {\n\t\tError(w, err)\n\t\treturn err\n\t}\n\n\tstockDailySeries, err := h.Stocky.Postgres.FetchDailySeriesByStock(stock.ID)\n\tif err != nil {\n\t\tError(w, err)\n\t\treturn err\n\t}\n\n\tOK(w, entity.NewStockDailyResponse(stock, stockDailySeries), \"\")\n\treturn nil\n}", "func StartListEntriesCall(m telemetry.Metrics) *telemetry.CallCounter {\n\treturn telemetry.StartCall(m, telemetry.RegistrationAPI, telemetry.Entry, telemetry.List)\n}", "func (s *RaftServer) ListEntries(_ context.Context, _ *raftapi.Empty) (*raftapi.EntryListResponse, error) {\n\tlist, err := s.logRepo.List()\n\tif err != nil {\n\t\treturn nil, model.NewRaftError(&s.member, err)\n\t}\n\tvar logEntries = make([]*raftapi.LogEntry, 0)\n\tfor _, entry := range list {\n\t\tlogEntries = append(logEntries, &raftapi.LogEntry{\n\t\t\tTerm: entry.Term,\n\t\t\tValue: entry.Value,\n\t\t})\n\t}\n\tresponse := &raftapi.EntryListResponse{Entries: logEntries}\n\treturn response, nil\n}", "func getLoanList(ls *[]models.Loan, db *gorm.DB) error {\n\tvar l models.Loan\n\tif len(*ls) == 1 {\n\t\tl = (*ls)[0]\n\t}\n\twhere := \"\"\n\tif l.CodCollection != 0 {\n\t\twhere = fmt.Sprintf(\"cod_collection = %v\", l.CodCollection)\n\t}\n\tif l.CodClient != 0 {\n\t\twhere = fmt.Sprintf(\"cod_client = %v\", l.CodClient)\n\t}\n\tif where != \"\" && l.CodLoanState != 0 {\n\t\twhere += fmt.Sprintf(\" and cod_loan_state = %v\", l.CodLoanState)\n\t}\n\terr := db.Where(where).Select(\"id,created_at,updated_at,initial_value,interest,quota,balance,cod_loan_state,cod_client\").Find(ls).GetErrors()\n\tif len(err) != 0 {\n\t\treturn errors.New(\"no se encuentra\")\n\t}\n\treturn nil\n}", "func handleList(cmd *cobra.Command, args []string) {\n\tquery := `\n\tSELECT\n chat.chat_identifier AS id,\n count(chat.chat_identifier) AS messages\n\tFROM\n\t\tchat\n\t\tJOIN chat_message_join ON chat.\"ROWID\" = chat_message_join.chat_id\n\t\tJOIN message ON chat_message_join.message_id = message.\"ROWID\"\n\tWHERE TRUE\n\t-- filter out message reactions\n\tAND text IS NOT NULL\n\tAND associated_message_type == 0\n\t-- filter out empty messages\n\tAND trim(text, ' ') <> ''\n\tAND text <> ''\n\tGROUP BY\n\t\tchat.chat_identifier\n\tHAVING messages > ?\n\tORDER BY\n\t\tmessages DESC, id DESC;\n\t`\n\trows, err := db.Query(query, count)\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\tdefer rows.Close()\n\tfor rows.Next() {\n\t\tvar id string\n\t\tvar messages string\n\t\terr = rows.Scan(&id, &messages)\n\t\tif err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\t\tif isatty.IsTerminal(uintptr(unix.Stdout)) {\n\t\t\tfmt.Printf(\"%s\\t%s\\n\", aurora.Yellow(id), aurora.Blue(messages))\n\t\t} else {\n\t\t\tfmt.Printf(\"%s\\t%s\\n\", id, messages)\n\t\t}\n\t}\n\terr = rows.Err()\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n}", "func AllLogManagementsGet(c *gin.Context) {\n\t// 分表注释下面两行代码\n\tmeta := model.TableMetaFromQuery(c)\n\tginutils.WriteGinJSON(c, http.StatusOK, model.AllLogManagements(meta))\n\t// 分表取消注释下面三行代码\n\t// meta := model.TableMetaFromQuery(c, \"suffix\")\n\t// suffix := c.Query(\"suffix\")\n\t// ginutils.WriteGinJSON(c, http.StatusOK, model.AllLogManagements(meta, suffix))\n}", "func populateEntries(query string) []entity.Entry {\n\tresult := []entity.Entry{}\n\n\tdb := util.OpenDb()\n\trows, _ := db.Query(query)\n\n\tdefer rows.Close()\n\n\tvar (\n\t\tid, score int\n\t\tconcern, grateful, learn, milestone string\n\t\tentered int64\n\t)\n\n\tfor rows.Next() {\n\t\trows.Scan(&id, &score, &concern, &grateful, &learn, &milestone, &entered)\n\t\tresult = append(result, entity.EntryWithAllFields(id, score, concern, grateful, learn, milestone, entered))\n\t}\n\treturn result\n}", "func (h *Handler) list() http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tentities, err := h.UserDAO.FetchAll(r.Context())\n\t\tswitch {\n\t\tcase errors.Is(err, errorx.ErrNoUser):\n\t\t\tmsg := &errorMessage{\n\t\t\t\tMessage: fmt.Sprintf(\"no users exist\"),\n\t\t\t}\n\t\t\tresponse.JSON(w, http.StatusNotFound, msg)\n\t\t\treturn\n\t\tcase err != nil:\n\t\t\tmsg := &errorMessage{\n\t\t\t\tError: err.Error(),\n\t\t\t\tMessage: \"user datastore error\",\n\t\t\t}\n\t\t\tresponse.JSON(w, http.StatusInternalServerError, msg)\n\t\t\treturn\n\t\tdefault:\n\t\t\tresponse.JSON(w, http.StatusOK, entities)\n\t\t}\n\t}\n}", "func (r *HealthResource) ListAll() (*HealthConfigList, error) {\n\tvar list HealthConfigList\n\tif err := r.c.ReadQuery(BasePath+HealthEndpoint, &list); err != nil {\n\t\treturn nil, err\n\t}\n\treturn &list, nil\n}", "func (f *firestoreDir) List(ctx context.Context) ([]plugin.Entry, error) {\n\tcolls, err := f.client.Collections(ctx).GetAll()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn toCollectionEntries(f.client, \"\", colls), nil\n}", "func DefaultCreateHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithBeforeCreate_); ok {\n\t\tif db, err = hook.BeforeCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&ormObj).(HealthMenstruationPersonalInfoORMWithAfterCreate_); ok {\n\t\tif err = hook.AfterCreate_(ctx, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (c *kuberhealthyChecks) List(opts metav1.ListOptions) (result KuberhealthyCheckList, err error) {\n\tvar timeout time.Duration\n\tif opts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*opts.TimeoutSeconds) * time.Second\n\t}\n\tresult = KuberhealthyCheckList{}\n\terr = c.client.Get().\n\t\tNamespace(c.ns).\n\t\tResource(\"khchecks\").\n\t\tVersionedParams(&opts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tDo(context.TODO()).\n\t\tInto(&result)\n\treturn\n}", "func (us UserService) List(dto dto.GeneralListDto) ([]model.User, int64) {\n\treturn userDao.List(dto)\n}", "func (h *GetStickTableEntriesHandlerImpl) Handle(params stick_table.GetStickTableEntriesParams, principal interface{}) middleware.Responder {\n\tfilter := make([]string, 0)\n\tif params.Filter != nil {\n\t\tfilter = strings.Split(*params.Filter, \",\")\n\t}\n\n\tkey := \"\"\n\tif params.Key != nil {\n\t\tkey = *params.Key\n\t}\n\truntime, err := h.Client.Runtime()\n\tif err != nil {\n\t\te := misc.HandleError(err)\n\t\treturn stick_table.NewGetStickTableEntriesDefault(int(*e.Code)).WithPayload(e)\n\t}\n\n\tstkEntries, err := runtime.GetTableEntries(params.StickTable, int(params.Process), filter, key)\n\tif err != nil {\n\t\te := misc.HandleError(err)\n\t\treturn stick_table.NewGetStickTableEntriesDefault(int(*e.Code)).WithPayload(e)\n\t}\n\n\t// if no entries return empty array\n\tif len(stkEntries) == 0 {\n\t\treturn stick_table.NewGetStickTableEntriesOK().WithPayload(stkEntries)\n\t}\n\n\t// else check for pagination\n\toffset := int64(0)\n\tif params.Offset != nil {\n\t\toffset = *params.Offset\n\t}\n\n\tif int(offset) >= len(stkEntries) {\n\t\tmsg := fmt.Sprintf(\"Offset %d is larger than the slice size %d\", offset, len(stkEntries))\n\t\tc := misc.ErrHTTPBadRequest\n\t\te := &models.Error{\n\t\t\tMessage: &msg,\n\t\t\tCode: &c,\n\t\t}\n\t\treturn stick_table.NewGetStickTableEntriesDefault(int(*e.Code)).WithPayload(e)\n\t}\n\n\tif params.Count != nil {\n\t\tif int(offset+*params.Count) >= len(stkEntries) {\n\t\t\tstkEntries = stkEntries[offset:]\n\t\t} else {\n\t\t\tstkEntries = stkEntries[offset : offset+*params.Count]\n\t\t}\n\t} else {\n\t\tstkEntries = stkEntries[offset:]\n\t}\n\treturn stick_table.NewGetStickTableEntriesOK().WithPayload(stkEntries)\n}", "func (p *pool) listEntries() ([]entry, error) {\n\tconn := p.Get()\n\tdefer conn.Close()\n\n\tids, err := redis.Ints(conn.Do(\"SMEMBERS\", key(\"entries\")))\n\tif err != nil {\n\t\treturn nil, xerrors.Errorf(\"can not receive ids: %w\", err)\n\t}\n\n\tvar entries []entry\n\tfor _, id := range ids {\n\t\tvalues, err := redis.Strings(conn.Do(\n\t\t\t\"HMGET\",\n\t\t\tkey(\"entry\", strconv.Itoa(id)),\n\t\t\t\"from\",\n\t\t\t\"subject\",\n\t\t\t\"text\",\n\t\t\t\"fileext\",\n\t\t\t\"created\",\n\t\t))\n\t\tif err != nil {\n\t\t\treturn nil, xerrors.Errorf(\"can not receice entry %d: %w\", id, err)\n\t\t}\n\n\t\tcreated, err := time.Parse(\"2006-01-02 15:04:05\", values[4])\n\t\tif err != nil {\n\t\t\treturn nil, xerrors.Errorf(\"can not parse created time: %w\", err)\n\t\t}\n\n\t\tentries = append(entries, entry{\n\t\t\tID: id,\n\t\t\tFrom: values[0],\n\t\t\tSubject: values[1],\n\t\t\tText: values[2],\n\t\t\tExtension: values[3],\n\t\t\tCreated: created.Format(\"2006-01-02 15:04\"),\n\t\t})\n\t}\n\treturn entries, nil\n}", "func (s *TimeLogStore) ListAllByDates(ctx context.Context, from time.Time, to time.Time) ([]domain.TimeLog, error) {\n\ttimeLogs := s.client.Database(dbName).Collection(timeLogsCollectionName)\n\n\tfindOptions := options.Find()\n\tfindOptions.SetSort(bson.D{{Key: \"createdAt\", Value: 1}})\n\n\tcursor, err := timeLogs.Find(ctx, bson.D{\n\t\t{Key: \"createdAt\", Value: bson.D{{Key: \"$gte\", Value: from}}},\n\t\t{Key: \"createdAt\", Value: bson.D{{Key: \"$lte\", Value: to}}},\n\t}, findOptions)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer cursor.Close(ctx)\n\n\tvar result []domain.TimeLog\n\tcursor.All(ctx, &result)\n\n\tif err := cursor.Err(); err != nil {\n\t\treturn nil, err\n\t}\n\n\tif len(result) == 0 {\n\t\tresult = []domain.TimeLog{}\n\t}\n\n\treturn result, nil\n}", "func (c *globalThreatFeeds) List(ctx context.Context, opts v1.ListOptions) (result *v3.GlobalThreatFeedList, err error) {\n\tvar timeout time.Duration\n\tif opts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*opts.TimeoutSeconds) * time.Second\n\t}\n\tresult = &v3.GlobalThreatFeedList{}\n\terr = c.client.Get().\n\t\tResource(\"globalthreatfeeds\").\n\t\tVersionedParams(&opts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tDo(ctx).\n\t\tInto(result)\n\treturn\n}", "func (repo *SingleStoryRepository) FindAllStoriesForLoggedUser(userId uuid.UUID) []model.SingleStory {\n\tvar stories []model.SingleStory\n\trepo.Database.Select(\"*\").Where(\"user_id = ? and is_deleted = ?\", userId, false).Find(&stories)\n\n\tfor i:=0; i< len(stories); i++{\n\t\tif time.Now().After(stories[i].CreationDate.Add(24 * time.Hour)){\n\t\t\t// PASSED TIME SHOULD SET STORY AS EXPIRED\n\t\t\t//stories[i].IsExpired = true\n\t\t\trepo.Database.Model(&model.SingleStory{}).Where(\"id = ?\", stories[i].ID).Update(\"is_expired\", true)\n\t\t\trepo.Database.Model(&model.Story{}).Where(\"id = ?\", stories[i].ID).Update(\"is_expired\", true)\n\t\t}\n\t}\n\n\treturn stories\n}", "func (s *EmployeesEndpoint) List(ctx context.Context, division int, all bool, o *api.ListOptions) ([]*Employees, error) {\n\tvar entities []*Employees\n\tu, _ := s.client.ResolvePathWithDivision(\"/api/v1/{division}/payroll/Employees\", division) // #nosec\n\tapi.AddListOptionsToURL(u, o)\n\n\tif all {\n\t\terr := s.client.ListRequestAndDoAll(ctx, u.String(), &entities)\n\t\treturn entities, err\n\t}\n\t_, _, err := s.client.NewRequestAndDo(ctx, \"GET\", u.String(), nil, &entities)\n\treturn entities, err\n}", "func List() ([]Incident, error) {\n\n\tmyData := make([]Incident, 0)\n\n\trows, err := database.DBCon.Query(\"SELECT * FROM incidents.employees\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdefer rows.Close()\n\n\tfor rows.Next() {\n\t\ti := new(Incident)\n\t\terr = rows.Scan(&i.ID, &i.Type, &i.StartDate, &i.EndDate, &i.Note, &i.EmployeeId)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tmyData = append(myData, *i)\n\t}\n\n\terr = rows.Err()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\trows.Close()\n\n\treturn myData, nil\n}", "func List(habitMap habits.HabitMap) {\n\torderedHabits := orderByTag(habitMap)\n\thabitStrings := make([]string, len(habitMap), len(habitMap))\n\t// Maintain a map of tag to indexes that have that tag\n\ttype HabitEntry struct {\n\t\tIndex int\n\t\tDaysSince int\n\t}\n\tbyTag := map[string][]HabitEntry{}\n\n\tfor index, h := range orderedHabits {\n\t\tnumOccurrences := len(h.Occurrences)\n\t\tdaysSince := -1\n\t\tif numOccurrences > 0 {\n\t\t\tlastOccurrence := h.Occurrences[numOccurrences-1]\n\t\t\thoursSince := time.Now().Sub(lastOccurrence).Hours()\n\t\t\tdaysSince = int(math.Floor(hoursSince)) / 24\n\t\t}\n\t\t// TODO: Optimization, persist this formatted string in Habit objects\n\t\t// But there's trickiness since the index is part of this string\n\t\thabitStrings[index] = fmt.Sprintf(\"%d. | %s | %s | ^%d days since last tick\\n\", index, h.Name, h.ShortName, daysSince)\n\t\tbyTag[h.Tag] = append(byTag[h.Tag], HabitEntry{index, daysSince})\n\t}\n\n\tcolumns := columnize.SimpleFormat(habitStrings)\n\tlines := strings.Split(columns, \"\\n\")\n\n\tcolorAssignments := buildColorOrder(orderedHabits)\n\n\thabitIndex := 0\n\tfor _, boundary := range colorAssignments.TagBoundaries {\n\t\ttag := orderedHabits[habitIndex].Tag\n\t\tfmt.Printf(\"%4v#\", \"\")\n\t\tcolor.OpUnderscore.Println(tag)\n\t\tfor habitIndex < boundary {\n\t\t\tline := lines[habitIndex]\n\t\t\tparts := strings.Split(line, \"^\")\n\t\t\tcolorToUse := colorAssignments.ColorOrder[habitIndex]\n\t\t\tcolor.S256(colorToUse).Printf(\"%8v %s\", \"\", parts[0])\n\t\t\tcolor.Gray.Print(parts[1])\n\t\t\tfmt.Println()\n\t\t\thabitIndex++\n\t\t}\n\t}\n}", "func OnList(c *grumble.Context) error {\n\tlen := len(config.AppConfig.Plans)\n\tif len == 0 {\n\t\tfmt.Println(\"No plans available. Try \\\"read\\\".\")\n\t\treturn nil\n\t}\n\n\tfor i, plan := range config.AppConfig.Plans {\n\t\tfmt.Println(i+1, plan.Name)\n\t\tfor i, task := range plan.Tasks {\n\t\t\tif task.GetDescription() != \"\" {\n\t\t\t\tfmt.Println(\" \", strconv.Itoa(i+1)+\".\", task.GetDescription())\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func (k Keeper) HistoricalEntries(ctx sdk.Context) (res uint32) {\n\tk.paramspace.Get(ctx, types.KeyHistoricalEntries, &res)\n\treturn\n}", "func (r *LocalRegistry) List(artHome string, extended bool) {\n\t// get a table writer for the stdout\n\tw := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', tabwriter.Debug)\n\t// print the header row\n\tvar err error\n\tif extended {\n\t\t_, err = fmt.Fprintln(w, i18n.String(artHome, i18n.LBL_LS_HEADER_PLUS))\n\t} else {\n\t\t_, err = fmt.Fprintln(w, i18n.String(artHome, i18n.LBL_LS_HEADER))\n\t}\n\tcore.CheckErr(err, \"failed to write table header\")\n\tvar (\n\t\ts *data.Seal\n\t\tauthor string\n\t)\n\t// repository, tag, package id, created, size\n\tfor _, repo := range r.Repositories {\n\t\tfor _, a := range repo.Packages {\n\t\t\ts, err = r.GetSeal(a)\n\t\t\tif err != nil {\n\t\t\t\tauthor = \"unknown\"\n\t\t\t} else {\n\t\t\t\tauthor = s.Manifest.Author\n\t\t\t}\n\t\t\t// if the package is dangling (no tags)\n\t\t\tif len(a.Tags) == 0 {\n\t\t\t\tif extended {\n\t\t\t\t\t_, err = fmt.Fprintln(w, fmt.Sprintf(\"%s\\t %s\\t %s\\t %s\\t %s\\t %s\\t %s\\t\",\n\t\t\t\t\t\trepo.Repository,\n\t\t\t\t\t\t\"<none>\",\n\t\t\t\t\t\ta.Id[0:12],\n\t\t\t\t\t\ta.Type,\n\t\t\t\t\t\ttoElapsedLabel(a.Created),\n\t\t\t\t\t\ta.Size,\n\t\t\t\t\t\tauthor),\n\t\t\t\t\t)\n\t\t\t\t} else {\n\t\t\t\t\t_, err = fmt.Fprintln(w, fmt.Sprintf(\"%s\\t %s\\t %s\\t %s\\t %s\\t %s\\t\",\n\t\t\t\t\t\trepo.Repository,\n\t\t\t\t\t\t\"<none>\",\n\t\t\t\t\t\ta.Id[0:12],\n\t\t\t\t\t\ta.Type,\n\t\t\t\t\t\ttoElapsedLabel(a.Created),\n\t\t\t\t\t\ta.Size),\n\t\t\t\t\t)\n\t\t\t\t}\n\t\t\t\tcore.CheckErr(err, \"failed to write output\")\n\t\t\t}\n\t\t\tfor _, tag := range a.Tags {\n\t\t\t\tif extended {\n\t\t\t\t\t_, err = fmt.Fprintln(w, fmt.Sprintf(\"%s\\t %s\\t %s\\t %s\\t %s\\t %s\\t %s\\t\",\n\t\t\t\t\t\trepo.Repository,\n\t\t\t\t\t\ttag,\n\t\t\t\t\t\ta.Id[0:12],\n\t\t\t\t\t\ta.Type,\n\t\t\t\t\t\ttoElapsedLabel(a.Created),\n\t\t\t\t\t\ta.Size,\n\t\t\t\t\t\tauthor),\n\t\t\t\t\t)\n\t\t\t\t} else {\n\t\t\t\t\t_, err = fmt.Fprintln(w, fmt.Sprintf(\"%s\\t %s\\t %s\\t %s\\t %s\\t %s\\t\",\n\t\t\t\t\t\trepo.Repository,\n\t\t\t\t\t\ttag,\n\t\t\t\t\t\ta.Id[0:12],\n\t\t\t\t\t\ta.Type,\n\t\t\t\t\t\ttoElapsedLabel(a.Created),\n\t\t\t\t\t\ta.Size),\n\t\t\t\t\t)\n\t\t\t\t}\n\t\t\t\tcore.CheckErr(err, \"failed to write output\")\n\t\t\t}\n\t\t}\n\t}\n\terr = w.Flush()\n\tcore.CheckErr(err, \"failed to flush output\")\n}", "func (a *HyperflexApiService) GetHyperflexHealthList(ctx context.Context) ApiGetHyperflexHealthListRequest {\n\treturn ApiGetHyperflexHealthListRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "func (m *Mux) List(ds *discordgo.Session, dm *discordgo.Message, ctx *Context) {\n\tresp := \"```\\n\"\n\n\tfor p, v := range Config.Feeds {\n\t\tresp += strconv.Itoa(p) + \": \" + v.Feed.Title + \", \" + v.Feed.Link + \"\\n\"\n\t}\n\n\tresp += \"```\\n\"\n\n\tds.ChannelMessageSend(dm.ChannelID, resp)\n\n\treturn\n}", "func (v HashtagsResource) List(c buffalo.Context) error {\n\t// Get the DB connection from the context\n\ttx := c.Value(\"tx\").(*pop.Connection)\n\thashtags := &models.Hashtags{}\n\t// Paginate results. Params \"page\" and \"per_page\" control pagination.\n\t// Default values are \"page=1\" and \"per_page=20\".\n\tq := tx.PaginateFromParams(c.Params())\n\t// You can order your list here. Just change\n\terr := q.All(hashtags)\n\t// to:\n\t// err := q.Order(\"created_at desc\").All(hashtags)\n\tif err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\t// Make Hashtags available inside the html template\n\tc.Set(\"hashtags\", hashtags)\n\t// Add the paginator to the context so it can be used in the template.\n\tc.Set(\"pagination\", q.Paginator)\n\treturn c.Render(200, r.HTML(\"hashtags/index.html\"))\n}", "func (h *History) List() {\n\tload := reverse(h.Load())\n\tprompt := promptui.Select{\n\t\tLabel: \"Target hisotry\",\n\t\tItems: load,\n\t\tSize: 10,\n\t}\n\n\ti, _, err := prompt.Run()\n\n\tif err != nil {\n\t\tlog.Fatalln(\"Prompt failed: \\n\", err)\n\t}\n\n\titem := load[i]\n\th.Write(item)\n\tExecuteItem(h.binary, item)\n}", "func (d *Service) AlertsList(ctx context.Context, Limit int, Offset int) ([]*thunderdome.Alert, int, error) {\n\tAlerts := make([]*thunderdome.Alert, 0)\n\tvar AlertCount int\n\n\te := d.DB.QueryRowContext(ctx,\n\t\t\"SELECT COUNT(*) FROM thunderdome.alert;\",\n\t).Scan(\n\t\t&AlertCount,\n\t)\n\tif e != nil {\n\t\td.Logger.Ctx(ctx).Error(\"query scan error\", zap.Error(e))\n\t}\n\n\trows, err := d.DB.QueryContext(ctx,\n\t\t`SELECT id, name, type, content, active, allow_dismiss, registered_only, created_date, updated_date\n\t\tFROM thunderdome.alert\n\t\tLIMIT $1\n\t\tOFFSET $2;\n\t\t`,\n\t\tLimit,\n\t\tOffset,\n\t)\n\n\tif err == nil {\n\t\tdefer rows.Close()\n\t\tfor rows.Next() {\n\t\t\tvar a thunderdome.Alert\n\n\t\t\tif err := rows.Scan(\n\t\t\t\t&a.Id,\n\t\t\t\t&a.Name,\n\t\t\t\t&a.Type,\n\t\t\t\t&a.Content,\n\t\t\t\t&a.Active,\n\t\t\t\t&a.AllowDismiss,\n\t\t\t\t&a.RegisteredOnly,\n\t\t\t\t&a.CreatedDate,\n\t\t\t\t&a.UpdatedDate,\n\t\t\t); err != nil {\n\t\t\t\td.Logger.Ctx(ctx).Error(\"query scan error\", zap.Error(err))\n\t\t\t\treturn nil, AlertCount, err\n\t\t\t} else {\n\t\t\t\tAlerts = append(Alerts, &a)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn Alerts, AlertCount, err\n}", "func (m *ReportRoot) GetDailyPrintUsage()([]PrintUsageable) {\n val, err := m.GetBackingStore().Get(\"dailyPrintUsage\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.([]PrintUsageable)\n }\n return nil\n}", "func (service *EntriesService) List(spaceID string) *Collection {\n\tpath := fmt.Sprintf(\"/spaces/%s%s/entries\", spaceID, getEnvPath(service.c))\n\tmethod := \"GET\"\n\n\treq, err := service.c.newRequest(method, path, nil, nil)\n\tif err != nil {\n\t\treturn &Collection{}\n\t}\n\n\tcol := NewCollection(&CollectionOptions{})\n\tcol.c = service.c\n\tcol.req = req\n\n\treturn col\n}", "func list(db *sql.DB) ([]Todo, error) {\n\treturn read(db, -1)\n}", "func (ec EntryController) GetAllEntries(w http.ResponseWriter, r *http.Request) {\n\tlastProcessedID := r.FormValue(\"last\")\n\n\tvar entries []models.Entry\n\n\toptions := options.Find()\n\n\tfilter := ec.BuildEntryFilter(lastProcessedID, options)\n\n\toptions.SetSort(bson.M{\"createdAt\": -1})\n\n\toptions.SetLimit(10)\n\n\tcurs, err := ec.coll.Find(context.Background(), filter, options)\n\n\tif err != nil {\n\t\tshared.FError(w, http.StatusBadRequest, \"Failed to fetch entries\")\n\t\treturn\n\t}\n\n\t// await parsing of all available entries\n\tdefer curs.Close(context.Background())\n\n\tfor curs.Next(context.Background()) {\n\n\t\tvar entry models.Entry\n\t\terr := curs.Decode(&entry)\n\n\t\tif err != nil {\n\t\t\tshared.FError(w, http.StatusBadRequest, \"Failed to deserialize entries\")\n\t\t\treturn\n\t\t}\n\n\t\tentries = append(entries, entry)\n\t}\n\n\tif err := curs.Err(); err != nil {\n\t\tshared.FError(w, http.StatusBadRequest, \"Failed to parse entries\")\n\t\treturn\n\t}\n\n\tshared.FResponse(w, http.StatusOK, entries)\n}", "func (ep *eventsProvider) List(pageParams ...interface{}) ([]*events.Item, error) {\n\titems := make([]*events.Item, len(ep.Data))\n\n\tep.mutex.RLock()\n\tdefer ep.mutex.RUnlock()\n\tcopy(items, ep.Data)\n\n\treturn items, nil\n}", "func HomePageList(c *server.Context) error {\n\tvar (\n\t\terr error\n\t\tres []ware.BriefInfo\n\t\tidReq struct {\n\t\t\tLastID uint32 `json:\"last_id\"`\n\t\t}\n\t)\n\n\terr = c.JSONBody(&idReq)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInvalidParam, nil)\n\t}\n\n\terr = c.Validate(idReq)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrInvalidParam, nil)\n\t}\n\n\tconn, err := mysql.Pool.Get()\n\tdefer mysql.Pool.Release(conn)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrMysql, nil)\n\t}\n\n\tres, err = ware.Service.HomePageList(conn, idReq.LastID)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn core.WriteStatusAndDataJSON(c, constants.ErrMysql, nil)\n\t}\n\n\treturn core.WriteStatusAndDataJSON(c, constants.ErrSucceed, res)\n}", "func (c *AuditEventClient) List(ctx context.Context, opts *AuditEventListOptions) ([]*resource.AuditEvent, *Pager, error) {\n\tif opts == nil {\n\t\topts = NewAuditEventListOptions()\n\t}\n\tvar res resource.AuditEventList\n\terr := c.client.get(ctx, path.Format(\"/v3/audit_events?%s\", opts.ToQueryString()), &res)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tpager := NewPager(res.Pagination)\n\treturn res.Resources, pager, nil\n}", "func (s priceEntriesService) List(ctx context.Context, paginationOpt common.PaginationOption, sortOpts common.SortOptions) (model.PriceEntries, error) {\n\treturn s.storage.PriceImport().GetPriceEntries(ctx, sortOpts, paginationOpt)\n}", "func (r *Replenish) List(args *Replenish) (*ReplenishList, error) {\n\taction := \"sales.replenish.list.get\"\n\tdata, err := json.Marshal(args)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tbody, err := mafengwo.NewDeals().Fetch(action, data)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresult := ReplenishList{}\n\terr = json.Unmarshal(body, &result)\n\treturn &result, err\n}", "func GetHealth(w http.ResponseWriter, r *http.Request, db *sqlx.DB) {\n\tparams := mux.Vars(r)\n\n\thealth := []Health{}\n\n\tvar err error\n\n\tsession, err := store.Get(r, \"auth\")\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Convert our session data into an instance of User\n\tuser := User{}\n\tuser, _ = session.Values[\"user\"].(User)\n\n\tif user.Username != \"\" && user.AccessLevel == \"admin\" {\n\t\tif _, ok := params[\"id\"]; ok {\n\t\t\terr = db.Select(&health, \"SELECT id, username, ts, variable, value \"+\n\t\t\t\t\"FROM public.health \"+\n\t\t\t\t\"WHERE id = $1 \", params[\"id\"])\n\t\t} else if _, ok = params[\"ts\"]; ok {\n\t\t\terr = db.Select(&health, \"SELECT id, username, ts, variable, value \"+\n\t\t\t\t\"FROM public.health \"+\n\t\t\t\t\"WHERE ts = $1 \", params[\"ts\"])\n\t\t} else if _, ok = params[\"variable\"]; ok {\n\t\t\terr = db.Select(&health, \"SELECT id, username, ts, variable, value \"+\n\t\t\t\t\"FROM public.health \"+\n\t\t\t\t\"WHERE variable = $1 \", params[\"variable\"])\n\t\t} else {\n\t\t\terr = db.Select(&health, \"SELECT id, username, ts, variable, value \"+\n\t\t\t\t\"FROM public.health \")\n\t\t}\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\t\tw.WriteHeader(http.StatusOK)\n\n\t\tif err := json.NewEncoder(w).Encode(health); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t} else {\n\t\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\t\tw.WriteHeader(http.StatusOK)\n\n\t\tif err := json.NewEncoder(w).Encode(\"access denied\"); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}\n\n\tlogRequest(r)\n}", "func (m *SeaterModel) ListMeetings(params QueryParams) (meetings []*Meeting, err error) {\n\to := m.Orm()\n\n\tmeetings = make([]*Meeting, 0, PagingDefaultLimit)\n\n\tqs := o.QueryTable(new(Meeting))\n\tif params != nil {\n\t\tqs = qs.SetCond(params.Condition())\n\t}\n\n\tqs = qs.OrderBy(\"-ID\")\n\t_, err = m.PagingAll(params, qs, &meetings)\n\tif err == orm.ErrNoRows {\n\t\treturn meetings, nil\n\t} else if err != nil {\n\t\terr = errors.Trace(err)\n\t\treturn\n\t}\n\n\treturn\n}", "func (a *HyperflexApiService) GetHyperflexHealthCheckExecutionList(ctx context.Context) ApiGetHyperflexHealthCheckExecutionListRequest {\n\treturn ApiGetHyperflexHealthCheckExecutionListRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "func (s *democrdLister) List(selector labels.Selector) (ret []*v1.Democrd, err error) {\n\terr = cache.ListAll(s.indexer, selector, func(m interface{}) {\n\t\tret = append(ret, m.(*v1.Democrd))\n\t})\n\treturn ret, err\n}", "func (us ChapterService) List(dto dto.GeneralListDto) ([]model.Chapter, int64) {\n\treturn chapterDao.List(dto)\n}", "func (db *DB) List(table jdh.Table, args *jdh.Values) (jdh.ListScanner, error) {\n\tif args == nil {\n\t\treturn nil, errors.New(\"empty argument list\")\n\t}\n\tconn, err := net.Dial(\"tcp\", db.port)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tenc := json.NewEncoder(conn)\n\treq := &Request{\n\t\tQuery: jdh.List,\n\t\tTable: table,\n\t\tKvs: args.KV,\n\t}\n\tenc.Encode(req)\n\tdec := json.NewDecoder(conn)\n\tans := &Answer{}\n\tif err := dec.Decode(ans); err != nil {\n\t\treturn nil, err\n\t}\n\tif _, err := ans.GetMessage(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn &listScanner{c: conn, d: dec}, nil\n}", "func listSuggestions(q Queryable, mpID uint64, date time.Time) (suggs []*mpdata.Suggestion, err error) {\n\trows, err := q.Query(\"SELECT meal.id, meal.name, meal.recipe, meal.favourite, MIN(ABS(DATEDIFF(serving.dateserved, ?))) FROM meal LEFT JOIN serving ON meal.id = serving.mealid GROUP BY meal.id\", date)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer rows.Close()\n\n\tfor rows.Next() {\n\t\tmeal := new(mpdata.Meal)\n\t\tsugg := new(mpdata.Suggestion)\n\t\tsugg.MT.Meal = meal\n\n\t\tvar csd sql.NullInt64\n\n\t\terr = rows.Scan(&meal.ID, &meal.Name, &meal.RecipeURL, &meal.Favourite, &csd)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif csd.Valid && csd.Int64 != 0 {\n\t\t\tsugg.CSD = int(csd.Int64)\n\t\t} else {\n\t\t\tsugg.CSD = -1\n\t\t}\n\n\t\tsuggs = append(suggs, sugg)\n\t}\n\n\terr = rows.Err()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn suggs, nil\n}", "func (s *runnablesrvc) List(ctx context.Context, p *runnable.ListPayload) (res []*runnable.Runnable, err error) {\n\ts.logger.Print(\"runnable.list\")\n\tidQuery := \"\"\n\tif p.ID != nil {\n\t\tidQuery = *p.ID\n\t}\n\tkindQuery := \"\"\n\tif p.Kind != nil {\n\t\tkindQuery = *p.Kind\n\t}\n\titems, err := s.store.Find(ctx, idQuery, kindQuery, p.Labels)\n\tres = make([]*runnable.Runnable, 0, len(items))\n\tfor _, r := range items {\n\t\tres = append(res, runnableDomainToRest(r))\n\t}\n\treturn res, err\n}", "func (db *DB) List(glob string) ([]Entry, error) {\n\t// FIXME: first-pass - ignore glob\n\tmappings, err := db.readDB()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Extract codes and sort\n\tcodes := make([]string, len(mappings))\n\ti := 0\n\tfor code := range mappings {\n\t\tcodes[i] = code\n\t\ti++\n\t}\n\tsort.Strings(codes)\n\n\t// Compile entries\n\tvar entries = make([]Entry, len(mappings))\n\ti = 0\n\tfor _, code := range codes {\n\t\tentries[i] = Entry{Code: code, Url: mappings[code]}\n\t\ti++\n\t}\n\n\treturn entries, nil\n}", "func listEmployees() {\n\tif emptyEmployeeDB() {\n\t\tfmt.Println(\"EmployeeDB Empty\")\n\t} else {\n\t\tfmt.Println(\"# Employee Name Age Salary\")\n\t\tfmt.Println(\"===================================================\")\n\t\tfor i, n := 1, edb.elist.head; n != nil; i, n = i+1, n.next {\n\t\t\tfmt.Printf(\"\\n %2d. %-20s %-10d %10s\", i, n.emp.name, n.emp.age, int32InsertComma(n.emp.salary))\n\t\t}\n\t\tfmt.Printf(\"\\n\");\n\t}\t\n}", "func getLoan(l *models.Loan, db *gorm.DB) error {\n\terr := db.Select(\"id,created_at,updated_at,initial_value,interest,quota,balance,cod_loan_state,cod_client,cod_collection,cod_user\").First(l).GetErrors()\n\tif len(err) != 0 {\n\t\treturn errors.New(\"no se encuentra\")\n\t}\n\treturn nil\n}", "func (hm *HelmManager) ListChart(ctx context.Context,\n\treq *helmmanager.ListChartReq, resp *helmmanager.ListChartResp) error {\n\n\tdefer recorder(ctx, \"ListChart\", req, resp)()\n\taction := actionChart.NewListChartAction(hm.model, hm.platform)\n\treturn action.Handle(ctx, req, resp)\n}" ]
[ "0.6793589", "0.63662523", "0.63374996", "0.6103", "0.59037894", "0.5617607", "0.550038", "0.52984416", "0.51921606", "0.4919258", "0.49057636", "0.48713672", "0.4870926", "0.48581", "0.48579657", "0.48564038", "0.4812534", "0.47780845", "0.47510096", "0.47248134", "0.47170433", "0.46910217", "0.4679411", "0.46563303", "0.46525595", "0.4645543", "0.46262428", "0.46204937", "0.4609981", "0.46041298", "0.46003252", "0.45909652", "0.4578383", "0.45680794", "0.4556645", "0.45406154", "0.45404395", "0.4534347", "0.45275545", "0.45146295", "0.45026466", "0.4502466", "0.4499201", "0.44955045", "0.4494246", "0.44932082", "0.44827986", "0.44769278", "0.44759554", "0.44748795", "0.44660264", "0.44546634", "0.4438871", "0.44331825", "0.44328007", "0.44263557", "0.44096845", "0.4399762", "0.43982908", "0.43908945", "0.43845364", "0.43812016", "0.43664455", "0.4351241", "0.4350863", "0.43450844", "0.433351", "0.43331686", "0.4329955", "0.4329618", "0.43294686", "0.43251765", "0.43226346", "0.4320295", "0.43188134", "0.43164867", "0.4305867", "0.43017134", "0.4301154", "0.42933145", "0.42922", "0.42913038", "0.4290606", "0.4290242", "0.42869967", "0.42824686", "0.4278769", "0.42703488", "0.4263401", "0.42632785", "0.42627934", "0.42624176", "0.4261058", "0.42558676", "0.42461044", "0.42453474", "0.42377254", "0.4233173", "0.42311534", "0.42306677" ]
0.8378897
0
Rank returns how many nodes less than max value.
func (t *Tree) Rank(max int) int { return rank(t.Tree, max) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (t *Tree) Rank(val float64) int {\n\treturn t.root.Rank(val)\n}", "func (e *Election) Rank() []int {\n\tres := make([]int, e.N)\n\tfor i := 0; i < e.N; i++ {\n\t\tfor j := i + 1; j < e.N; j++ {\n\t\t\tr := e.cmp(i, j)\n\t\t\tif r < 0 {\n\t\t\t\tres[i]++\n\t\t\t} else if r > 0 {\n\t\t\t\tres[j]++\n\t\t\t}\n\t\t}\n\t}\n\treturn res\n}", "func pageRank(n int64) int {\n\tif n <= maxSlot {\n\t\tpanic(fmt.Errorf(\"internal error: pageRank(%v)\", n))\n\t}\n\n\tr := int(roundup64(n, pageSize)>>pageBits) + 6\n\tif r >= ranks {\n\t\tr = ranks - 1\n\t}\n\treturn r\n}", "func (lt sdkLinkType) rank() int {\n\treturn int(lt)\n}", "func (alg *TopologicalSorter[V]) Rank(vtx V) (int, bool) {\n\tr, ok := alg.ranks[vtx]\n\treturn r, ok\n}", "func (g *graph) find_max_value(chk *checklist) int {\n\tcurrent := 0\n\tidx := -1\n\tfor i,c := range chk.nodes_count {\n\t\tif c > current {\n\t\t\tidx = i\n\t\t\tcurrent = c\n\t\t}\n\t}\n\tif idx >= 0 { chk.nodes_count[idx] = -1 }\n\treturn idx\n}", "func RankLTE(v int) predicate.Transactionfactoritemtmp {\n\treturn predicate.Transactionfactoritemtmp(func(s *sql.Selector) {\n\t\ts.Where(sql.LTE(s.C(FieldRank), v))\n\t})\n}", "func MapRankToScore(rank, size int) float64 { return float64(size - rank) }", "func GetRank(name string) int {\n\trank := 0\n\n\tfor point := len(ranks)-1; point >= 0; point-- {\n\t\tif len(ranks[point]) > 0 {\n\t\t\trank++\n\n\t\t\tif found, _, _ := Search(ranks[point], name); found {\n\t\t\t\treturn rank\n\n\t\t\t}\n\n\t\t}\n\t}\n\n\treturn rank\n}", "func (z *Skiplist) RankOfLastInRange(spec RangeSpec) int {\n\tif !z.isInRange(spec) {\n\t\treturn -1\n\t}\n\n\tlastNodeRank := -1\n\tx := z.head\n\tfor i := z.level - 1; i >= 0; i-- {\n\t\tfor x.level[i].forward != nil && spec.lteMax(x.level[i].forward.ordered) {\n\t\t\tlastNodeRank += x.level[i].span\n\t\t\tx = x.level[i].forward\n\t\t}\n\t}\n\n\tif !spec.gteMin(x.ordered) {\n\t\treturn -1\n\t}\n\n\treturn lastNodeRank\n}", "func (wf WindowFrame) Rank() int {\n\treturn wf.RowIdx + 1\n}", "func (r *SlidingWindow) Max() int {return r.base + len(r.values) - 1}", "func CalcRank(n *Node, parent *Node, position int) (*Node, error) {\n\t//\n\tr := uint32(1)\n\tfor i := 0; i < n.ChildCount(); i++ {\n\t\tch, ok := n.Child(i)\n\t\tif ok {\n\t\t\tr += ch.Rank\n\t\t}\n\t}\n\tn.Rank = r\n\treturn n, nil\n}", "func (m *SecureScoreControlProfile) GetRank()(*int32) {\n return m.rank\n}", "func IntRank(x []int, r []int) []int {\n\treturn Rank(make_int_index_slice(x), r)\n}", "func (m *SecureScoreControlProfile) GetRank()(*int32) {\n val, err := m.GetBackingStore().Get(\"rank\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*int32)\n }\n return nil\n}", "func Rank(scope *Scope, input tf.Output) (output tf.Output) {\n\tif scope.Err() != nil {\n\t\treturn\n\t}\n\topspec := tf.OpSpec{\n\t\tType: \"Rank\",\n\t\tInput: []tf.Input{\n\t\t\tinput,\n\t\t},\n\t}\n\top := scope.AddOperation(opspec)\n\treturn op.Output(0)\n}", "func (m *UserMutation) Rank() (r float64, exists bool) {\n\tv := m.rank\n\tif v == nil {\n\t\treturn\n\t}\n\treturn *v, true\n}", "func (rn *RangedNumber) Max() int {\n\tif rn.min > rn.max {\n\t\trn.Set(rn.max, rn.min)\n\t}\n\n\treturn rn.max\n}", "func Rank(v int) predicate.Transactionfactoritemtmp {\n\treturn predicate.Transactionfactoritemtmp(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldRank), v))\n\t})\n}", "func (p *P1D) Rank() int {\n\treturn 1\n}", "func (n *Node) Max() int {\n\tif n.Right == nil {\n\t\treturn n.Key\n\t}\n\n\treturn n.Right.Max()\n}", "func (t *Tree) Rank(rank int) (id int32, node *Node) {\n\treturn lookup_node(t.root, rank)\n}", "func GetRank(rank int) *RankData {\n\tfor _, rankData := range ranks {\n\t\tif rank < rankData.NextRank || rankData.NextRank < 0 {\n\t\t\treturn rankData\n\t\t}\n\t}\n\treturn nil\n}", "func (z *zset) Rank(member string) int {\n\tscore, ex := z.tbl[member]\n\tif !ex {\n\t\treturn 0\n\t}\n\tvar obj C.slobj\n\tobj.ptr, obj.length = tocstring(member)\n\trank := C.slGetRank(z.sl, C.double(score), &obj)\n\treturn int(rank)\n}", "func IntStableRank(x []int, r []int) []int {\n\treturn StableRank(make_int_index_slice(x), r)\n}", "func (r GroupSortedSet) ZRevRank(ctx context.Context, key string, member interface{}) (int64, error) {\n\tv, err := r.redis.Do(ctx, \"ZRevRank\", key, member)\n\treturn v.Int64(), err\n}", "func (BinP1D) Rank() int { return 1 }", "func (z *Skiplist) RankOfFirstInRange(spec RangeSpec) int {\n\t_, firstNodeRanks := z.firstNodeInRange(spec)\n\tif firstNodeRanks == nil {\n\t\treturn -1\n\t}\n\treturn firstNodeRanks[0]\n}", "func (e *Election) Condorcet() int {\n\tmax := 0\n\timax := 0\n\te.R = e.Rank()\n\tfor i, v := range e.R {\n\t\tif v > max {\n\t\t\timax = i\n\t\t\tmax = v\n\t\t}\n\t}\n\n\tf := false\n\tfor _, v := range e.R {\n\t\tif v == max {\n\t\t\tif f {\n\t\t\t\treturn -1\n\t\t\t}\n\t\t\tf = true\n\t\t}\n\t}\n\n\treturn imax\n}", "func slotRank(n int) int {\n\tif n < 1 || n > 1024 {\n\t\tpanic(fmt.Errorf(\"internal error: slotRank(%v)\", n))\n\t}\n\n\treturn log(roundup(n, allocAllign)) - 4\n}", "func (treeNode *TreeNode) FindMax() int {\n\tif treeNode.right == nil {\n\t\treturn treeNode.value\n\t}\n\n\treturn treeNode.right.FindMax()\n}", "func (r *ImageRef) Rank(width int, height int, index int) error {\n\tout, err := vipsRank(r.image, width, height, index)\n\tif err != nil {\n\t\treturn err\n\t}\n\tr.setImage(out)\n\treturn nil\n}", "func (ap *AnimeParser) getRank(eachTop *goquery.Selection) int {\n\trank := eachTop.Find(\"td\").First().Find(\"span\").Text()\n\treturn utils.StrToNum(rank)\n}", "func (t *Tree) Size(lo, hi int) int {\n\tresult := rank(t.Tree, hi) - rank(t.Tree, lo)\n\tif contains(t.Tree, hi) {\n\t\tresult++\n\t}\n\treturn result\n}", "func (self *Limits) Maximum() uint32 {\n\treturn uint32(self.inner().max)\n}", "func Rank(comm Comm) (int, error) {\n\tvar r C.int\n\tperr := C.MPI_Comm_rank(C.MPI_Comm(comm), &r)\n\tif perr != 0 {\n\t\treturn -1, errors.New(\"Error calling MPI_Comm_rank\")\n\t}\n\treturn int(r), nil\n}", "func (r GroupSortedSet) ZRank(ctx context.Context, key string, member interface{}) (int64, error) {\n\tv, err := r.redis.Do(ctx, \"ZRank\", key, member)\n\treturn v.Int64(), err\n}", "func (h *MaxHeap) Max() int {\n\tif h.IsEmpty() {\n\t\treturn -1\n\t}\n\treturn h.data[1]\n}", "func rankedWeaponPoints(skillLevel int, rank int, cfg npcdefs.NPCCfg) int {\n\tif rank >= maxWeaponCount {\n\t\treturn 0\n\t}\n\n\t// Calculate points assuming rank 0.\n\tpoints := itemPoints(skillLevel, cfg.WeaponPPLMin, cfg.WeaponPPLMax)\n\n\t// Subtract points for higher ranked weapons.\n\tpoints -= int(float64(rank) * weaponRankDifference)\n\n\tif points < 0 {\n\t\tpoints = 0\n\t}\n\n\treturn points\n}", "func FloatRank(x []float64, r []int) []int {\n\treturn Rank(make_float64_index_slice(x), r)\n}", "func (cc Counter) Rank() []RGB {\n\tcolors := []RGB{}\n\tfor rgb := range cc {\n\t\tcolors = append(colors, rgb)\n\t}\n\tless := func(i, j int) bool {\n\t\tiColor := colors[i]\n\t\tjColor := colors[j]\n\t\tiCount := cc[iColor]\n\t\tjCount := cc[jColor]\n\t\treturn iCount < jCount\n\t}\n\tsort.Slice(colors, less)\n\treturn colors\n}", "func (r Results) Max() int {\n\tmax := 0\n\n\tfor _, result := range r {\n\t\tm := result.Max()\n\t\tif m > max {\n\t\t\tmax = m\n\t\t}\n\t}\n\n\treturn max\n}", "func (bst *BinarySearch) Max() (int, error) {\n\tbst.lock.RLock()\n\tdefer bst.lock.RUnlock()\n\n\tn := bst.root\n\tif n == nil {\n\t\treturn 0, fmt.Errorf(\"max: no nodes exist in tree\")\n\t}\n\tfor {\n\t\tif n.right == nil {\n\t\t\treturn n.value, nil\n\t\t}\n\t\tn = n.right\n\t}\n}", "func (sm *scoreMemberMap) count(min, max float64) int {\n\tn := 0\n\tfor cur := sm.head; cur != nil && cur.score <= max; cur = cur.next {\n\t\tif cur.score >= min {\n\t\t\tn += len(cur.members)\n\t\t}\n\t}\n\treturn n\n}", "func Max(Len int, Less func(i, j int) bool) int {\n\tmx := 0\n\tfor i := 1; i < Len; i++ {\n\t\tif Less(mx, i) {\n\t\t\tmx = i\n\t\t}\n\t}\n\treturn mx\n}", "func (tree *Tree23) minmaxDepth(t TreeNodeIndex) (int, int) {\n\tif tree.IsEmpty(t) {\n\t\treturn 0, 0\n\t}\n\tif tree.IsLeaf(t) {\n\t\treturn 1, 1\n\t}\n\tdepthMin := -1\n\tdepthMax := -1\n\n\tfor i := 0; i < tree.treeNodes[t].cCount; i++ {\n\t\tc := tree.treeNodes[t].children[i]\n\t\tmin, max := tree.minmaxDepth(c.child)\n\t\tif depthMin == -1 || min < depthMin {\n\t\t\tdepthMin = min + 1\n\t\t}\n\t\tif depthMax == -1 || max > depthMax {\n\t\t\tdepthMax = max + 1\n\t\t}\n\t}\n\treturn depthMin, depthMax\n}", "func (px *Paxos) Max() int {\n\t// Your code here.\n\n\treturn len(px.acceptor) - 1\n}", "func minScore(hand ...deck.Card) int {\n\tscore := 0\n\tfor _, c := range hand {\n\t\t// because J, Q, K has rank 11, 12, 13..\n\t\t// we'll either add 10 or less than 10\n\t\tscore += min(int(c.Rank), 10)\n\t}\n\treturn score\n}", "func (ng *NodeGroup) MaxSize() int {\n\treturn int(ng.MaxNodes)\n}", "func RankGT(v int) predicate.Transactionfactoritemtmp {\n\treturn predicate.Transactionfactoritemtmp(func(s *sql.Selector) {\n\t\ts.Where(sql.GT(s.C(FieldRank), v))\n\t})\n}", "func Test_GetRank(t *testing.T) {\n\n\t// 项目开始时间 2017-06-01\n\tprojectStartTime, _ := time.Parse(\"2006-01-02\", \"2017-06-01\")\n\tfund := projectStartTime.Unix() - 8*3600\n\tsurvivalTime := timestamp - fund\n\n\t// 投票方向与时间造成的系数差\n\tvar timeMagin int64\n\tif voteDiff > 0 {\n\t\ttimeMagin = survivalTime / 45000\n\t} else if voteDiff < 0 {\n\t\ttimeMagin = -1 * survivalTime / 45000\n\t} else {\n\t\ttimeMagin = 0\n\t}\n\n\tvateMagin := math.Log10(voteDispute)\n\n\t//详细算法\n\tsocre := vateMagin + float64(timeMagin)\n}", "func MRRank(p []int) (r int) {\n\tp = append([]int{}, p...)\n\tinv := inverse(p)\n\tfor i := len(p) - 1; i > 0; i-- {\n\t\ts := p[i]\n\t\tp[inv[i]] = s\n\t\tinv[s] = inv[i]\n\t}\n\tfor i := 1; i < len(p); i++ {\n\t\tr = r*(i+1) + p[i]\n\t}\n\treturn\n}", "func (px *Paxos) Max() int {\n\t// Your code here.\n\tmax := -1\n\thead := px.prepareStatus.Head\n\tfor head.Next != nil {\n\t\tstate := head.Next\n\t\tif max < state.Seq {\n\t\t\tmax = state.Seq\n\t\t}\n\t\thead = head.Next\n\t}\n\treturn max\n}", "func (o *ClusterRequest) GetMaxRunningNodes() int32 {\n\tif o == nil || o.MaxRunningNodes == nil {\n\t\tvar ret int32\n\t\treturn ret\n\t}\n\treturn *o.MaxRunningNodes\n}", "func maxDepth(n int) types.Depth {\r\n\tvar depth types.Depth\r\n\tfor i := n; i > 0; i >>= 1 {\r\n\t\tdepth++\r\n\t}\r\n\treturn depth * 2\r\n}", "func (obj *set) Rank() SetRank {\n\treturn obj.rank\n}", "func (n *Network) MaxDepth() (int, error) {\n\tmax := 0 // The max depth\n\tfor _, node := range n.Outputs {\n\t\tcurr_depth, err := node.Depth(0)\n\t\tif err != nil {\n\t\t\treturn curr_depth, err\n\t\t}\n\t\tif curr_depth > max {\n\t\t\tmax = curr_depth\n\t\t}\n\t}\n\treturn max, nil\n}", "func maxOfMinSlidingWindows(arr []int, k int) {\n\tsize := len(arr)\n\tque := new(Queue)\n\tmaxVal := math.MinInt32\n\ti := 0\n\tfor i < size {\n\t\t// Remove out of range elements\n\t\tif que.Len() > 0 && que.Front().(int) <= i-k {\n\t\t\tque.Remove()\n\t\t}\n\t\t// Remove smaller values at left.\n\t\tfor que.Len() > 0 && arr[que.Back().(int)] >= arr[i] {\n\t\t\tque.RemoveBack()\n\t\t}\n\t\tque.Add(i)\n\t\t// window of size k\n\t\tif i >= (k-1) && maxVal < arr[que.Front().(int)] {\n\t\t\tmaxVal = arr[que.Front().(int)]\n\t\t}\n\t\ti += 1\n\t}\n\tfmt.Println(\"Max of min is:\", maxVal)\n}", "func (tree *Tree23) max(t TreeNodeIndex) float64 {\n\tif tree.IsLeaf(t) {\n\t\treturn tree.treeNodes[t].elem.ExtractValue()\n\t}\n\tc := tree.treeNodes[t].cCount - 1\n\treturn tree.treeNodes[t].children[c].maxChild\n}", "func (sm *StackMax) Max() (int, error) {\n\tif sm.Empty() {\n\t\treturn -1, ErrstackEmpty\n\t}\n\treturn sm.maxer[sm.length-1], nil\n}", "func max(d dataSet) int {\n\treturn d[len(d)-1]\n}", "func (m *SecureScoreControlProfile) SetRank(value *int32)() {\n m.rank = value\n}", "func GetNumberOfNodes() int64 {\r\n\treturn int64(len(nodesByPosition))\r\n}", "func (fn *formulaFuncs) RANK(argsList *list.List) formulaArg {\n\treturn fn.rank(\"RANK\", argsList)\n}", "func rmax(data ArrType) float64 {\r\n\tvar i, idxCari, idxMax int\r\n\ti = 0\r\n\tidxCari = 1\r\n\tidxMax = 0\r\n\tfor i < N-1 { // N-1 karena idxCari = i + 1 sehingga data terakhir masih diperhitungkan\r\n\t\tif data[idxCari].f3 > data[idxMax].f3 {\r\n\t\t\tidxMax = idxCari\r\n\t\t}\r\n\t\tidxCari++\r\n\t\ti++\r\n\t}\r\n\treturn data[idxMax].f3\r\n}", "func (tree *BinarySearchTree) MaxNode() *int {\n\ttree.lock.RLock()\n\tdefer tree.lock.RUnlock()\n\tvar treeNode *TreeNode\n\ttreeNode = tree.rootNode\n\tif treeNode == nil {\n\t\t//nil instead of 0\n\t\treturn (*int)(nil)\n\t}\n\tfor {\n\t\tif treeNode.rightNode == nil {\n\t\t\treturn &treeNode.value\n\t\t}\n\t\ttreeNode = treeNode.rightNode\n\t}\n}", "func (px *Paxos) Max() int {\n\tkeys := px.sortedSeqs()\n\tif len(keys) == 0 {\n\t\treturn -1\n\t} else {\n\t\tsort.Ints(keys)\n\t}\n\treturn keys[len(keys)-1]\n}", "func (r Result) Max() int {\n\treturn len(r.Ints()) * r.Die().Max().N\n}", "func (px *Paxos) Max() int {\n\t// Your code here.\n\treturn px.max\n}", "func (n *hetznerNodeGroup) MaxSize() int {\n\treturn n.maxSize\n}", "func FetchMax(t *treeNode) int {\n\tif t.Right == nil {\n\t\treturn t.Value\n\t}\n\treturn FetchMax(t.Right)\n}", "func max(x int) int {\n\treturn 40 + x\n}", "func getOffsetNodeCount(nodeCount uint64, offset int64, rounder func(float64) float64) uint64 {\n\treturn uint64(int64(nodeCount) + int64(rounder(float64(nodeCount)*float64(offset)/100)))\n}", "func (v *VEBTree) Maximum() int {\n\treturn v.max\n}", "func (s *Stack) Max() (int, error) {\n\tif s.Empty() {\n\t\treturn -1, ErrstackEmpty\n\t}\n\n\thelpStack := Stack{}\n\tmax, _ := s.Top()\n\n\tfor !s.Empty() {\n\t\ttop, _ := s.Pop()\n\t\tif top > max {\n\t\t\tmax = top\n\t\t}\n\t\thelpStack.Push(top)\n\t}\n\n\tfor !helpStack.Empty() {\n\t\ttop, _ := helpStack.Pop()\n\t\ts.Push(top)\n\t}\n\n\treturn max, nil\n}", "func RankWatchlistLTE(v int) predicate.Watchlist {\n\treturn predicate.Watchlist(func(s *sql.Selector) {\n\t\ts.Where(sql.LTE(s.C(FieldRankWatchlist), v))\n\t})\n}", "func (lc *LineChart) maxXValue() int {\n\tmaxLen := 0\n\tfor _, sv := range lc.series {\n\t\tif l := len(sv.values); l > maxLen {\n\t\t\tmaxLen = l\n\t\t}\n\t}\n\tif maxLen == 0 {\n\t\treturn 0\n\t}\n\treturn maxLen - 1\n}", "func (np *NodePool) MaxNodes() int32 {\n\treturn np.maxNodes\n}", "func (rr rowRangeSlice) maxEntry() int {\n\tif len(rr) == 0 {\n\t\treturn math.MaxInt64\n\t}\n\n\tvar max int\n\tfor _, r := range rr {\n\t\tif max < r.e {\n\t\t\tmax = r.e\n\t\t}\n\t}\n\treturn max\n}", "func (px *Paxos) Max() int {\n\t// Your code here.\n\treturn px.curMax\n}", "func (s *Stat) GetMax() float64 {\n\tif s.n <= 0 {\n\t\treturn 0.0\n\t}\n\treturn s.max\n}", "func getMinMaxScores(scores framework.NodeScoreList) (int64, int64) {\n\tvar max int64 = math.MinInt64 // Set to min value\n\tvar min int64 = math.MaxInt64 // Set to max value\n\n\tfor _, nodeScore := range scores {\n\t\tif nodeScore.Score > max {\n\t\t\tmax = nodeScore.Score\n\t\t}\n\t\tif nodeScore.Score < min {\n\t\t\tmin = nodeScore.Score\n\t\t}\n\t}\n\t// return min and max scores\n\treturn min, max\n}", "func findMax(number []int, max int) (int, func() []int) {\n\tvar res []int\n\tfor _, p := range number {\n\t\tif p <= max {\n\t\t\tres = append(res, p)\n\t\t}\n\t}\n\n\treturn len(res), func() []int {\n\t\treturn res\n\t}\n}", "func FindKthMax(nums []int, k int) (int, error) {\n\tindex := len(nums) - k\n\treturn kthNumber(nums, index)\n}", "func MinMax(x, min, max int) int { return x }", "func maxDepth(root *TreeNode) int {\n\tif root == nil {\n\t\treturn 0\n\t}\n\n\treturn max(maxDepth(root.Left), maxDepth(root.Right)) + 1\n}", "func (clus *Cluster) maxRev() (rev int64, err error) {\n\tctx, cancel := context.WithTimeout(context.TODO(), time.Second)\n\tdefer cancel()\n\trevc, errc := make(chan int64, len(clus.Members)), make(chan error, len(clus.Members))\n\tfor i := range clus.Members {\n\t\tgo func(m *rpcpb.Member) {\n\t\t\tmrev, merr := m.Rev(ctx)\n\t\t\trevc <- mrev\n\t\t\terrc <- merr\n\t\t}(clus.Members[i])\n\t}\n\tfor i := 0; i < len(clus.Members); i++ {\n\t\tif merr := <-errc; merr != nil {\n\t\t\terr = merr\n\t\t}\n\t\tif mrev := <-revc; mrev > rev {\n\t\t\trev = mrev\n\t\t}\n\t}\n\treturn rev, err\n}", "func Max(x, y int) int {\n\tif x < y {\n\t\treturn y\n\t}\n\treturn x\n}", "func max(v ...int) int {\n\tout := 0\n\tfor i := range v {\n\t\tif v[i] > out {\n\t\t\tout = v[i]\n\t\t}\n\t}\n\treturn out\n}", "func GlobalRank(url string) (string, error) {\n\telement := get(url, \"POPULARITY\")\n\tif len(element.Attr) >= 2 {\n\t\treturn element.Attr[1].Value, nil\n\t} else {\n\t\treturn \"No rank\", nil\n\t}\n}", "func (r *Ranker) Rank(layerName string, props geojson.Properties) int {\n\tlayer, ok := r.matchers[layerName]\n\tif !ok {\n\t\treturn r.catchAll\n\t}\n\n\tmatchers, ok := layer[props.MustString(\"kind\", \"\")]\n\tif !ok {\n\t\treturn r.catchAll\n\t}\n\tmatchers = append(matchers, layer[\"\"]...) // include matchers with no kind\n\n\tfor _, m := range matchers {\n\t\tif m.Eval(props) {\n\t\t\treturn m.rank\n\t\t}\n\t}\n\n\treturn r.catchAll\n}", "func CalculatePageRank(d float64, pages *map[string]*crawler.Page) {\n\tfor i := 0; i < 1000; i++ {\n\t\t// converge := make([]bool, 0)\n\t\t// for i := 0; i < len(*pages); i++ {\n\t\t// \tconverge[i] = false\n\t\t// }\n\t\tfor _, page := range *pages {\n\t\t\tvar myRank float64 = 1 - d // value for page rank\n\t\t\tvar runningSum float64 = 0 // running sum for probablity from its parents\n\t\t\tfor _, p := range page.GetParentURL() {\n\t\t\t\tparentPage, ok := (*pages)[p]\n\t\t\t\tif ok {\n\t\t\t\t\tvar parentPR float64 = parentPage.GetPageRank()\n\t\t\t\t\tparentTotalChild := float64(len(parentPage.GetChildrenURL()))\n\t\t\t\t\trunningSum += (parentPR / parentTotalChild)\n\t\t\t\t}\n\t\t\t}\n\t\t\tmyRank = myRank + d*runningSum\n\t\t\tdifference := myRank - page.GetPageRank()\n\t\t\tif math.Abs(difference) < 0.00000000000005 { // showing signs of converging\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tpage.SetRank(myRank)\n\t\t}\n\t}\n}", "func maxMin(k int32, arr []int32) int32 {\n\n\tif k <= 1 {\n\t\treturn 0\n\t}\n\tsort.Slice(arr, func(i, j int) bool {\n\t\treturn arr[i] > arr[j]\n\t})\n\n\tmaxval := int32(^(uint32(0)) >> 1)\n\tminUnfairness := maxval\n\n\tfor i, num := range arr {\n\t\tif i+int(k) > len(arr) {\n\t\t\tbreak\n\t\t}\n\t\tcurrUnfairness := num - arr[i+int(k)-1]\n\t\tif currUnfairness < minUnfairness {\n\t\t\tminUnfairness = currUnfairness\n\t\t}\n\t}\n\n\tif minUnfairness == maxval {\n\t\treturn 0\n\t}\n\treturn minUnfairness\n\n}", "func (o KubernetesClusterDefaultNodePoolPtrOutput) MaxCount() pulumi.IntPtrOutput {\n\treturn o.ApplyT(func(v *KubernetesClusterDefaultNodePool) *int {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.MaxCount\n\t}).(pulumi.IntPtrOutput)\n}", "func nmax(a ...int) int {\n\tret := a[0]\n\tfor _, e := range a {\n\t\tret = max(ret, e)\n\t}\n\treturn ret\n}", "func nmax(a ...int) int {\n\tret := a[0]\n\tfor _, e := range a {\n\t\tret = max(ret, e)\n\t}\n\treturn ret\n}", "func nmax(a ...int) int {\n\tret := a[0]\n\tfor _, e := range a {\n\t\tret = max(ret, e)\n\t}\n\treturn ret\n}", "func nmax(a ...int) int {\n\tret := a[0]\n\tfor _, e := range a {\n\t\tret = max(ret, e)\n\t}\n\treturn ret\n}", "func nmax(a ...int) int {\n\tret := a[0]\n\tfor _, e := range a {\n\t\tret = max(ret, e)\n\t}\n\treturn ret\n}" ]
[ "0.67093927", "0.65358585", "0.64453703", "0.62586796", "0.62451345", "0.62282676", "0.6174461", "0.6084651", "0.59903634", "0.5844654", "0.5832536", "0.580517", "0.567707", "0.561967", "0.5608135", "0.5576382", "0.55679417", "0.5543277", "0.5539918", "0.550763", "0.5503924", "0.5482265", "0.5467747", "0.5434056", "0.541868", "0.5413205", "0.5380868", "0.536446", "0.5347447", "0.53414875", "0.5317133", "0.5311805", "0.527282", "0.5269417", "0.52532184", "0.5247673", "0.52436334", "0.5237889", "0.52369064", "0.52315706", "0.52305335", "0.5228583", "0.52278197", "0.5179761", "0.51662517", "0.51563627", "0.5139808", "0.5137266", "0.5134971", "0.5132026", "0.5089774", "0.50761294", "0.50752497", "0.5066048", "0.50636744", "0.5061616", "0.50584537", "0.50509316", "0.5025953", "0.5023258", "0.5017729", "0.5015886", "0.5013042", "0.5011153", "0.5009476", "0.5003707", "0.500265", "0.4996488", "0.4995252", "0.49931136", "0.49888575", "0.4977312", "0.49613696", "0.4960196", "0.49579227", "0.49539647", "0.49488625", "0.49470156", "0.49398068", "0.49173462", "0.49152306", "0.4908905", "0.48988512", "0.4877898", "0.4870866", "0.48635316", "0.4858513", "0.48486316", "0.4843581", "0.48367274", "0.4835198", "0.48324525", "0.4822173", "0.48205993", "0.48190722", "0.48178476", "0.48178476", "0.48178476", "0.48178476", "0.48178476" ]
0.7792387
0
Size returns how many nodes between range
func (t *Tree) Size(lo, hi int) int { result := rank(t.Tree, hi) - rank(t.Tree, lo) if contains(t.Tree, hi) { result++ } return result }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (g *Graph) Size() int { return len(g.nodes) }", "func (n *TreeBuilderNode) Size() uint64 { return n.size }", "func (n Nodes) Len() int", "func (h *hashRing) Size() int {\n\treturn len(h.nodes)\n}", "func (n nodes) Len() int { return len(n) }", "func (lb *LoadBalancer) Size() int {\n\treturn len(lb.nodes)\n}", "func (node *Node) Size() int {\n\tif node == nil {\n\t\treturn 0\n\t}\n\tsize := 1\n\tif node.Left != nil {\n\t\tsize += node.Left.Size()\n\t}\n\tif node.Right != nil {\n\t\tsize += node.Right.Size()\n\t}\n\treturn size\n}", "func Size(node *Node) int {\n\tids := make(map[NodeId]bool)\n\tqueue := []*Node{node}\n\tfor len(queue) > 0 {\n\t\tnode = queue[0]\n\t\tqueue = queue[1:]\n\t\tids[node.Id] = true\n\t\tfor _, t := range node.Transitions {\n\t\t\tqueue = append(queue, t.Child)\n\t\t}\n\t}\n\treturn len(ids)\n}", "func (ons *orderedNodeSet) size() int {\n\treturn len(ons.nodes)\n}", "func (t *BinaryTree) Size() int { return t.count }", "func RangeSize(begin, end uint64) int {\n\tleft, right := Decompose(begin, end)\n\treturn bits.OnesCount64(left) + bits.OnesCount64(right)\n}", "func (t *binarySearchST) RangeSize(lo, hi interface{}) int {\n\tutils.AssertF(lo != nil && hi != nil, \"invalid lo or hi\")\n\n\tif utils.Less(hi, lo) {\n\t\treturn 0\n\t}\n\tleft := t.Rank(lo)\n\tright := t.Rank(hi)\n\tif t.Contains(hi) {\n\t\tright += 1\n\t}\n\treturn right - left\n}", "func (n *Node) Size() int {\n\tif n == nil {\n\t\treturn 0\n\t}\n\treturn n.size\n}", "func (chunk *ResultNodeListChunk) Size() int {\n\treturn len(chunk.nodes)\n}", "func (ns Nodes) Length() int {\n\treturn len(ns)\n}", "func (g *Graph) GetSize() int {\n\treturn len(g.nodes)\n}", "func nodeWidth(n uint) uint {\n\treturn 2*(n-1) + 1\n}", "func (id *NodeID) Size() int {\n\treturn len(id)\n}", "func (EmptyNode) Size() int { return 0 }", "func (n node) GetSize() int {\n\treturn n.size\n}", "func (t *RbTree[K, V]) Size() int {\n\treturn t.size\n}", "func Size(g EdgeEnumerator) int {\n\tif c, ok := g.(EdgeCounter); ok {\n\t\treturn c.Size()\n\t} else {\n\t\tvar size int\n\t\tg.Edges(func(e Edge) (terminate bool) {\n\t\t\tsize++\n\t\t\treturn\n\t\t})\n\t\treturn size\n\t}\n}", "func (n *RuleTreeNode) Size() int {\n\ts := len(n.Rules)\n\tfor _, c := range n.Children {\n\t\ts += c.Size()\n\t}\n\treturn s\n}", "func subtreeSize(x uint, n uint) uint {\n\tw := nodeWidth(n)\n\tlr := uint((1 << level(x)) - 1)\n\trr := uint(lr)\n\tif x+rr >= w {\n\t\trr = w - x - 1\n\t}\n\n\treturn (lr+rr)/2 + 1\n}", "func (t *RedBlackTree) Size() int {\n\treturn t.size\n}", "func GetNumberOfNodes() int64 {\r\n\treturn int64(len(nodesByPosition))\r\n}", "func (s *nodeSet) size() int {\n\treturn len(s.itemMap)\n}", "func (i *IpldRawNode) Size() (uint64, error) {\n\treturn 0, nil\n}", "func (d *Dtrie) Size() (size int) {\n\tfor _ = range iterate(d.root, nil) {\n\t\tsize++\n\t}\n\treturn size\n}", "func (n Nodes) Len() int {\n\treturn len(n)\n}", "func (rs Ranges) Size() (size int64) {\n\tfor _, r := range rs {\n\t\tsize += r.Size\n\t}\n\treturn size\n}", "func (n *ModuleTreeNode) Size() int {\n\ts := len(n.Modules)\n\tfor _, c := range n.Children {\n\t\ts += c.Size()\n\t}\n\treturn s\n}", "func (tree *UTree) Size() int {\r\n\treturn tree.size\r\n}", "func (qt *Quadtree) Size() int {\n\treturn qt.size\n}", "func (r Range) Size() int {\n\treturn r.length\n}", "func (r *Ring) Size() int {\n\treturn (r.in - r.out + r.size) % r.size\n}", "func (n Nodes) Len() int {\n\treturn len(n.nodes)\n}", "func (*Root) Size() int64 { return 0 }", "func (r *RoutingTable) Size() int {\n\tr.mutex.RLock()\n\tdefer r.mutex.RUnlock()\n\n\tvar n int\n\tfor i := 0; i < len(r.Buckets); i++ {\n\t\tn += r.Buckets[i].Len()\n\t}\n\treturn n\n}", "func Size(root *BinaryNode) int {\n\tif root != nil {\n\t\treturn Size(root.Left) + Size(root.Right) + 1\n\t}\n\treturn 0\n\n}", "func (nl *nodeList) size() int {\n\treturn len(nl.elements)\n}", "func (s *LinkedStack) Size() int { return s.count }", "func (t *AreaTopology) Len() int {\n\tn := 0\n\tfor _, area := range t.areas {\n\t\tn += len(area.nodes)\n\t}\n\treturn n\n}", "func (v ResourceNodes) Len() int {\n\treturn len(v)\n}", "func (rt *RoutingTable) Size() int {\n\tvar tot int\n\trt.tabLock.RLock()\n\tfor _, buck := range rt.Buckets {\n\t\ttot += buck.Len()\n\t}\n\trt.tabLock.RUnlock()\n\treturn tot\n}", "func (t *AATree) Size() int {\n\tif t.root == nil {\n\t\treturn 0\n\t}\n\t// return t.root\n\treturn 0\n}", "func nodeLen(data interface{}) int {\n\tif data == nil {\n\t\treturn 0\n\t}\n\tswitch d := data.(type) {\n\tcase []interface{}:\n\t\treturn len(d)\n\tcase map[string]interface{}:\n\t\treturn len(d)\n\tcase string, int, float64, bool:\n\t\treturn 1\n\t}\n\treturn 0\n}", "func (tree *Tree) Size() int {\n\treturn tree.size\n}", "func (a *ALGraph) Size() int {\n\treturn len(a.adjacencyList)\n}", "func (r pciResource) size() uint64 {\n\treturn r.end - r.start + 1\n}", "func (t *BinarySearchTree) Size() int {\n\treturn 0\n}", "func (bh* BinomialHeap) Size() int {\n return bh.size\n}", "func (set *Set) Size() int {\n\treturn set.tree.Size()\n}", "func (hat *HashedArrayTree) Size() int {\n\treturn hat.size\n}", "func (this *Manager) GetNodesLen() int {\n\treturn len(this.nodes)\n}", "func (edges Edges) Len() int { return len(edges) }", "func (ne nodeEntries) Len() int { return len(ne) }", "func (n *Node) Length() int {\n\tlen := 0\n\tfor n != nil {\n\t\tlen++\n\t\tn = n.Next\n\t}\n\treturn len\n}", "func (me *TrieNode) Size() int {\n\tif me == nil {\n\t\treturn 0\n\t}\n\treturn int(me.size)\n}", "func (g *Graph) Len() int {\n\treturn len(g.nodes)\n}", "func (r *rope) Size() (s int64) {\n\tfor _, rd := range r.rd {\n\t\ts += rd.Size()\n\t}\n\treturn s\n}", "func (t Tree) Size() int {\n\tif t.Symbol.Kind == NIL {\n\t\treturn 0\n\t}\n\n\tcount := 1\n\tfor _, child := range t.Children {\n\t\tif child != nil {\n\t\t\tcount += child.Size()\n\t\t}\n\t}\n\n\treturn count\n}", "func (n *Node) Length() (length int) {\n\t// Step 1: Go through all the nodes\n\tfor i := n; i != nil; i = i.next {\n\t\tlength++\n\t}\n\n\t// Step 2: Return the length\n\treturn length\n}", "func (s *nodeSorter) Len() int {\n\treturn len(s.nodes)\n}", "func (p NodePools) Len() int { return len(p) }", "func (seq *Sequence) Len() int { return len(seq.Nodes) }", "func (n *MemoryNetwork) Size() int {\n\treturn len(n.transports)\n}", "func (s NodeSlice) Len() int {\n\treturn len(s)\n}", "func (h nodeList) Len() int {\n\treturn len(h)\n}", "func (b *BlocksStore) Size() int {\n\treturn len(b.rootNode.Links())\n}", "func (o KubernetesNodePoolOutput) Size() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *KubernetesNodePool) pulumi.StringOutput { return v.Size }).(pulumi.StringOutput)\n}", "func (t ListPartitionReassignmentsTopics46) Size(version int16) int32 {\n\tvar sz int32\n\tsz += sizeof.String(t.Name) // Name\n\tsz += sizeof.Int32Array(t.PartitionIndexes) // PartitionIndexes\n\treturn sz\n}", "func (p *Trie) Size() (sz int) {\n\tsz = len(p.children)\n\n\tfor _, child := range p.children {\n\t\tsz += child.Size()\n\t}\n\n\treturn\n}", "func (sm3 *SM3) Size() int { return 32 }", "func (n *Node) Length() (l int) {\n\tif n == nil {\n\t\treturn\n\t}\n\n\tcn := n\n\n\tfor {\n\t\tif cn.next == nil {\n\t\t\tbreak\n\t\t}\n\n\t\tl++\n\t\tcn = cn.next\n\t}\n\n\treturn\n}", "func SizeRange(min, max uint64) MatcherFunc {\n\treturn func(el Elem) bool {\n\t\tif el.IsDir() {\n\t\t\treturn false\n\t\t}\n\n\t\tfi, err := el.Info()\n\t\tif err != nil {\n\t\t\treturn false\n\t\t}\n\t\treturn mathutil.InUintRange(uint64(fi.Size()), min, max)\n\t}\n}", "func (t CreatePartitionsTopic37) Size(version int16) int32 {\n\tvar sz int32\n\tsz += sizeof.String(t.Name) // Name\n\tsz += sizeof.Int32 // Count\n\tsz += sizeof.ArrayLength // Assignments\n\tfor i := len(t.Assignments) - 1; i >= 0; i-- {\n\t\tsz += t.Assignments[i].Size(version)\n\t}\n\treturn sz\n}", "func (tree *RedBlack[K, V]) Len() int {\n\treturn tree.size\n}", "func (this *MultiMap) Size() int {\n\treturn this.tree.Size()\n}", "func (bst *Bst) Size() float64 {\n\treturn bst.size\n}", "func (t OngoingTopicReassignment46) Size(version int16) int32 {\n\tvar sz int32\n\tsz += sizeof.String(t.Name) // Name\n\tsz += sizeof.ArrayLength // Partitions\n\tfor i := len(t.Partitions) - 1; i >= 0; i-- {\n\t\tsz += t.Partitions[i].Size(version)\n\t}\n\treturn sz\n}", "func (bst *BST) Size() int {\n\treturn bst.size\n}", "func (s *NodeSorter) Len() int {\n\treturn len(s.nodes)\n}", "func (d Dense) Size() int {\n\treturn d.len\n}", "func size(v Type) int32 {}", "func (d *state) Size() int { return d.outputLen }", "func (t OngoingPartitionReassignment46) Size(version int16) int32 {\n\tvar sz int32\n\tsz += sizeof.Int32 // PartitionIndex\n\tsz += sizeof.Int32Array(t.Replicas) // Replicas\n\tsz += sizeof.Int32Array(t.AddingReplicas) // AddingReplicas\n\tsz += sizeof.Int32Array(t.RemovingReplicas) // RemovingReplicas\n\treturn sz\n}", "func (t *Slice) Size() int32 { return 4 }", "func (tree *BinaryTree) Size() int {\n\treturn Size(tree.root)\n}", "func (ng *NodeGroup) MaxSize() int {\n\treturn int(ng.MaxNodes)\n}", "func (this *Map) Size() int {\n\treturn this.tree.Size()\n}", "func (twe *TxSizeEstimator) Size() int64 {\n\treturn baseTxSize +\n\t\tint64(wire.VarIntSerializeSize(uint64(twe.inputCount))) + // prefix len([]TxIn) varint\n\t\ttwe.InputSize + // prefix []TxIn + witness []TxIn\n\t\tint64(wire.VarIntSerializeSize(uint64(twe.outputCount))) + // prefix len([]TxOut) varint\n\t\ttwe.OutputSize + // []TxOut prefix\n\t\tint64(wire.VarIntSerializeSize(uint64(twe.inputCount))) // witness len([]TxIn) varint\n}", "func (t *Tree) Size() int {\n\treturn t.root.Size()\n}", "func (w *RootWalker) Size() int {\n\treturn len(w.stack)\n}", "func (w *RootWalker) Size() int {\n\treturn len(w.stack)\n}", "func (n *NodeSorter) Len() int {\n\treturn len(n.nodes)\n}", "func (g *ItemGraph) GetLength() int {\n\tg.Lock.RLock()\n\tglength := len(g.Nodes)\n\tg.Lock.RUnlock()\n\t// log.Printf(\"Lenght: %i\", glength)\n\treturn glength\n}", "func (t ReassignableTopic45) Size(version int16) int32 {\n\tvar sz int32\n\tsz += sizeof.String(t.Name) // Name\n\tsz += sizeof.ArrayLength // Partitions\n\tfor i := len(t.Partitions) - 1; i >= 0; i-- {\n\t\tsz += t.Partitions[i].Size(version)\n\t}\n\treturn sz\n}", "func (f *Flow) Len() int {\n\treturn len(f.nodes)\n}", "func (t ListOffsetTopic2) Size(version int16) int32 {\n\tvar sz int32\n\tsz += sizeof.String(t.Name) // Name\n\tsz += sizeof.ArrayLength // Partitions\n\tfor i := len(t.Partitions) - 1; i >= 0; i-- {\n\t\tsz += t.Partitions[i].Size(version)\n\t}\n\treturn sz\n}" ]
[ "0.74679583", "0.71741176", "0.7047884", "0.7019155", "0.69828427", "0.6826053", "0.6789677", "0.67552245", "0.6690559", "0.66762626", "0.6667847", "0.66322196", "0.6608516", "0.6595891", "0.65939176", "0.65819585", "0.6556362", "0.65378356", "0.65339345", "0.6522922", "0.65199167", "0.64987296", "0.64928", "0.6474951", "0.6443595", "0.64425856", "0.64128995", "0.64054286", "0.6375169", "0.6373299", "0.6366501", "0.632378", "0.6320443", "0.63197523", "0.6302315", "0.6272426", "0.62719446", "0.6264531", "0.624553", "0.6239853", "0.62301105", "0.62083936", "0.62009007", "0.61972094", "0.6186789", "0.6181459", "0.61734635", "0.6170263", "0.617003", "0.6159789", "0.6156263", "0.614535", "0.61421233", "0.61202186", "0.6111151", "0.6109365", "0.61071694", "0.6091165", "0.608959", "0.6071321", "0.6068022", "0.60642046", "0.6036111", "0.6027519", "0.59856504", "0.59803927", "0.59701824", "0.5967001", "0.5966318", "0.5950302", "0.59448063", "0.5944023", "0.5939236", "0.59383595", "0.5931004", "0.5927545", "0.5921408", "0.5918707", "0.5918487", "0.59140164", "0.59104186", "0.58994156", "0.5894791", "0.5891437", "0.5880086", "0.58730507", "0.5864123", "0.5860731", "0.58565193", "0.5856478", "0.58559036", "0.5852979", "0.58527666", "0.5847899", "0.5847899", "0.5846659", "0.5843209", "0.5842643", "0.58411306", "0.5828551" ]
0.676163
7
RegisterChannelzServiceToServer registers the channelz service to the given server. Note: it is preferred to use the admin API ( instead to register Channelz and other administrative services.
func RegisterChannelzServiceToServer(s grpc.ServiceRegistrar) { channelzgrpc.RegisterChannelzServer(s, newCZServer()) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func RegisterServer(cb interface{}, s interface{}) {\n\tsvrName, _ := reflector.GetName(s)\n\tsvr := &grpcService{\n\t\tname: svrName,\n\t\tcb: cb,\n\t\tsvc: s,\n\t}\n\tgrpcServers = append(grpcServers, svr)\n}", "func (ss *storageServer) RegisterServer(args *storagerpc.RegisterArgs, reply *storagerpc.RegisterReply) error {\n\n\tss.registerLock.Lock()\n\tdefer ss.registerLock.Unlock()\n\n\tok := ss.initializer.Register(args.ServerInfo)\n\n\tif ok {\n\t\tss.nodes = ss.initializer.Flush()\n\t\tss.rangeChecker = nodes.NewNodeCollection(ss.nodes).RangeChecker(ss.selfNode.NodeID)\n\n\t\t*reply = storagerpc.RegisterReply{\n\t\t\tStatus: storagerpc.OK,\n\t\t\tServers: ss.nodes,\n\t\t}\n\t\tif !ss.ready {\n\t\t\tss.initConfChan <- nil\n\t\t}\n\t} else {\n\t\t*reply = storagerpc.RegisterReply{\n\t\t\tStatus: storagerpc.NotReady,\n\t\t\tServers: nil,\n\t\t}\n\t}\n\n\t// CAUTION! might have to return error\n\treturn nil\n\n}", "func (s *FluentdService) RegisterServer(server *grpc.Server) {\n\tpb.RegisterFluentdServer(server, s)\n}", "func RegisterSecurityCenterServer(s *grpc.Server, srv SecurityCenterServer) {\n\tsrc.RegisterSecurityCenterServer(s, srv)\n}", "func (b *Bot) RegisterServer(\n\tserver string, event string, handler interface{}) (int, error) {\n\n\tb.serversProtect.RLock()\n\tdefer b.serversProtect.RUnlock()\n\n\tif s, ok := b.servers[server]; ok {\n\t\ts.protect.RLock()\n\t\tdefer s.protect.RUnlock()\n\t\treturn s.dispatcher.Register(event, handler), nil\n\t}\n\treturn 0, errUnknownServerId\n}", "func RegisterMigrationServiceServer(s *grpc.Server, srv MigrationServiceServer) {\n\tsrc.RegisterMigrationServiceServer(s, srv)\n}", "func RegisterReachabilityServiceServer(s *grpc.Server, srv ReachabilityServiceServer) {\n\tsrc.RegisterReachabilityServiceServer(s, srv)\n}", "func RegisterServerBMServer(e *bm.Engine, server ServerBMServer) {\n\tServerSvc = server\n\te.GET(\"/server.service.v1.Server/Ping\", serverPing)\n\te.GET(\"/server.service.v1.Server/SayHello\", serverSayHello)\n\te.GET(\"/kratos-demo/say_hello\", serverSayHelloURL)\n}", "func RegisterClusterManagerServer(s *grpc.Server, srv ClusterManagerServer) {\n\tsrc.RegisterClusterManagerServer(s, srv)\n}", "func RegisterFooServiceServer(s *grpc.Server, srv FooServiceServer) {\n\tsrc.RegisterFooServiceServer(s, srv)\n}", "func RegisterServer(name string, c ServerCreator) {\n\tserverMap[name] = c\n}", "func (s *Service) RegisterGRPCService(g *grpc.Server) {\n}", "func (s *serviceImplpetstoreRest2GRPCPetStoreServiceserver) RunRegisterServerService(serv *grpc.Server, trigger *servInfo.Trigger) {\n\tservice := &serviceImplpetstoreRest2GRPCPetStoreServiceserver{\n\t\ttrigger: trigger,\n\t\tserviceInfo: serviceInfopetstoreRest2GRPCPetStoreServiceserver,\n\t}\n\tRegisterRest2GRPCPetStoreServiceServer(serv, service)\n}", "func RegisterSecretManagerServiceServer(s *grpc.Server, srv SecretManagerServiceServer) {\n\tsrc.RegisterSecretManagerServiceServer(s, srv)\n}", "func RegisterServer(conn *network.TcpConn, body []byte) (interface{}, error) {\n log.Println(\"RegisterServer:\", string(body))\n \n if conn.Status != network.ConnInit {\n result := \"has inited!\"\n return nil, errors.New(result)\n }\n\n\treq := protocol.IRegisterServer{}\n\terr := protocol.Decode(&req, body)\n\tres := &protocol.ORegisterServer{}\n\tif err != nil {\n\t\tresult := \"error!\"\n\t\treturn nil, errors.New(result)\n\t}\n \n\tserver := ServerInfo{\n ServerInfo:req.ServerInfo,\n\t\tConn: conn,\n\t}\n\n\tconn.AttachID = req.ServerID\n conn.Status = network.ConnRegister\n\tServerMgrInstance.Register(server)\n\treturn res, nil\n}", "func RegisterHealthServer(s grpc.ServiceRegistrar, srv HealthServer) {\n\tstr := &HealthService{\n\t\tCheck: srv.Check,\n\t\tWatch: srv.Watch,\n\t}\n\tRegisterHealthService(s, str)\n}", "func (auth *Auth) RegisterServer(s *grpc.Server) {\n\tapiv1.RegisterAuthenticatorServer(s, auth)\n}", "func (s SmesherService) RegisterService(server *Server) {\n\tpb.RegisterSmesherServiceServer(server.GrpcServer, s)\n}", "func RegisterModelCenterServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server ModelCenterServiceServer) error {\n\n\tmux.Handle(\"POST\", pattern_ModelCenterService_CreateRegisteredModel_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ModelCenterService_CreateRegisteredModel_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ModelCenterService_CreateRegisteredModel_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PATCH\", pattern_ModelCenterService_UpdateRegisteredModel_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ModelCenterService_UpdateRegisteredModel_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ModelCenterService_UpdateRegisteredModel_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PATCH\", pattern_ModelCenterService_DeleteRegisteredModel_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ModelCenterService_DeleteRegisteredModel_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ModelCenterService_DeleteRegisteredModel_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_ModelCenterService_ListRegisteredModels_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ModelCenterService_ListRegisteredModels_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ModelCenterService_ListRegisteredModels_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_ModelCenterService_GetRegisteredModelDetail_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ModelCenterService_GetRegisteredModelDetail_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ModelCenterService_GetRegisteredModelDetail_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_ModelCenterService_CreateModelVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ModelCenterService_CreateModelVersion_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ModelCenterService_CreateModelVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PATCH\", pattern_ModelCenterService_UpdateModelVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ModelCenterService_UpdateModelVersion_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ModelCenterService_UpdateModelVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PATCH\", pattern_ModelCenterService_DeleteModelVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ModelCenterService_DeleteModelVersion_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ModelCenterService_DeleteModelVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_ModelCenterService_GetModelVersionDetail_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ModelCenterService_GetModelVersionDetail_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ModelCenterService_GetModelVersionDetail_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\treturn nil\n}", "func RegisterPayLiveBMServer(e *bm.Engine, server PayLiveBMServer) {\n\tv1PayLiveSvc = server\n\te.POST(\"/live.liveadmin.v1.PayLive/add\", payLiveAdd)\n\te.POST(\"/live.liveadmin.v1.PayLive/update\", payLiveUpdate)\n\te.GET(\"/live.liveadmin.v1.PayLive/getList\", payLiveGetList)\n\te.POST(\"/live.liveadmin.v1.PayLive/close\", payLiveClose)\n\te.POST(\"/live.liveadmin.v1.PayLive/open\", payLiveOpen)\n}", "func RegisterService(server *grpc.Server, service Backend) {\n\tserver.RegisterService(&serviceDesc, service)\n}", "func (s *Serverus) RegisterServer(fn registerServer) {\n\tlog.Println(\"Resgitering Server\")\n\tfn(s.server)\n}", "func RegisterClientConnectorServicesServiceServer(s *grpc.Server, srv ClientConnectorServicesServiceServer) {\n\tsrc.RegisterClientConnectorServicesServiceServer(s, srv)\n}", "func NewServer(id uuid.UUID, csrv *conf.Service, c *conf.Server, logger log.Logger, r *etcd.Registry) (*Server, error) {\n\tlogicClient, err := logic.NewClient(context.Background(), grpc.WithDiscovery(r))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ts := &Server{\n\t\tc: c,\n\t\tuuid: id.String(),\n\t\tround: NewRound(c),\n\t\trpcClient: logicClient,\n\t}\n\t// init bucket\n\ts.buckets = make([]*Bucket, c.Bucket.Size)\n\ts.bucketIdx = uint32(c.Bucket.Size)\n\tfor i := int32(0); i < c.Bucket.Size; i++ {\n\t\ts.buckets[i] = NewBucket(c.Bucket)\n\t}\n\ts.serverID = ip.InternalIP()\n\tgo s.onlineproc()\n\n\tif err := InitWhitelist(c.Whitelist, logger); err != nil {\n\t\tpanic(err)\n\t}\n\tif err := InitTCP(logger, s, c.Tcp.Bind, runtime.NumCPU()); err != nil {\n\t\tpanic(err)\n\t}\n\tif err := InitWebsocket(logger, s, c.Websocket.Bind, runtime.NumCPU()); err != nil {\n\t\tpanic(err)\n\t}\n\t// if conf.Conf.Websocket.TLSOpen {\n\t// \tif err := comet.InitWebsocketWithTLS(srv, conf.Conf.Websocket.TLSBind, conf.Conf.Websocket.CertFile, conf.Conf.Websocket.PrivateFile, runtime.NumCPU()); err != nil {\n\t// \t\tpanic(err)\n\t// \t}\n\t// }\n\n\treturn s, nil\n}", "func (s *PostSvc) RegisterWithGRPCServer(g *grpc.Server) error {\n\tpb.RegisterPostServer(g, s)\n\treturn nil\n}", "func (a *App) RegisterServer(addr string, s net.Server) {\n\ta.servers.Add(addr, s)\n}", "func (dcs *DeviceClaimingServer) RegisterServices(s *grpc.Server) {\n\tttnpb.RegisterEndDeviceClaimingServerServer(s, dcs.grpc.endDeviceClaimingServer)\n\tttnpb.RegisterEndDeviceBatchClaimingServerServer(s, dcs.grpc.endDeviceBatchClaimingServer)\n\tttnpb.RegisterGatewayClaimingServerServer(s, dcs.grpc.gatewayClaimingServer)\n}", "func (s *Server) RegisterService(receiver interface{}, name string) error {\n return s.services.register(receiver, name)\n}", "func (p *Plugin) GRPCServer(broker *plugin.GRPCBroker, s *grpc.Server) error {\n\tproto.RegisterDockerLoggerServer(s, &dockerLoggerServer{\n\t\timpl: p.impl,\n\t\tbroker: broker,\n\t})\n\treturn nil\n}", "func RegisterOrganizationSvcBMServer(e *bm.Engine, server OrganizationSvcBMServer) {\n\tOrganizationSvcSvc = server\n\te.GET(\"/eagle.organization.v1.OrganizationSvc/Ping\", organizationSvcPing)\n\te.GET(\"/organization\", organizationSvcGetOrganization)\n\te.POST(\"/organization\", organizationSvcAddOrganization)\n\te.PUT(\"/organization\", organizationSvcUpdateOrganization)\n\te.DELETE(\"/organization\", organizationSvcDeleteOrganization)\n}", "func RegisterDepsServer(svr prpc.Registrar) {\n\tdm.RegisterDepsServer(svr, newDecoratedDeps())\n}", "func (w *Whisper) RegisterServer(server MailServer) {\n\tw.mailServer = server\n}", "func (p *ResourceGRPCPlugin) GRPCServer(_ *plugin.GRPCBroker, s *grpc.Server) error {\n\tpluginv2.RegisterResourceServer(s, &resourceGRPCServer{\n\t\tserver: p.ResourceServer,\n\t})\n\treturn nil\n}", "func RegisterServer(s *http.Server, srv *rest.Server) {\n\tregister(s, newLoginServer(srv))\n}", "func Register(server *grpc.Server) {\n\tpb.RegisterHelloServiceServer(server, &Server{})\n}", "func (s Service) Register(r *grpc.Server) {\n\tserver := &Server{}\n\tnb.RegisterC1InterfaceServiceServer(r, server)\n}", "func RegisterEventarcServer(s *grpc.Server, srv EventarcServer) { src.RegisterEventarcServer(s, srv) }", "func (r *RouteInfo) RegisterServer(clusterName, serverAddr, serverName, haServerAddr string, serverId int, conn gnet.Conn) *namesrv.RegisterResponse {\n\tr.Lock()\n\tdefer r.Unlock()\n\n\tresult := &namesrv.RegisterResponse{}\n\n\tserverNames := r.clusterAddrTable[clusterName]\n\tif serverNames == nil {\n\t\tserverNames = hashset.New()\n\t\tr.clusterAddrTable[clusterName] = serverNames\n\t}\n\tserverNames.Add(serverName)\n\tregisterFirst := false\n\tserverData := r.serverAddrTable[serverName]\n\tif serverData == nil {\n\t\tregisterFirst = true\n\t\tserverData = protocol.NewServer(clusterName, serverName, make(map[int]string))\n\t\tr.serverAddrTable[serverName] = serverData\n\t}\n\tserverAddrsMap := serverData.GetServerAddrs()\n\t//Switch slave to master: first remove <1, IP:PORT> in namesrv, then add <0, IP:PORT>\n\t//The same IP:PORT must only have one record in serverAddrTable\n\tfor k, v := range serverAddrsMap {\n\t\tif serverAddr != \"\" && serverAddr == v && serverId != k {\n\t\t\tdelete(serverAddrsMap, k)\n\t\t}\n\t}\n\n\toldAddr := serverData.GetServerAddrs()[serverId]\n\tserverData.GetServerAddrs()[serverId] = serverAddr\n\tregisterFirst = registerFirst || \"\" == oldAddr\n\n\tprevServerLiveInfo := r.serverLiveTable[serverAddr]\n\tls := protocol.NewLiveServer(time.Now().Unix(), haServerAddr, nil, conn)\n\tr.serverLiveTable[serverAddr] = ls\n\tif prevServerLiveInfo == nil {\n\t\tlogger.Logger.WithFields(logrus.Fields{\n\t\t\t\"serverLiveTable\": r.serverLiveTable,\n\t\t\t\"serverAddr\": serverAddr,\n\t\t\t\"clusterAddrTable\": r.clusterAddrTable,\n\t\t}).Warn(\"prevServerLiveInfo is nil\")\n\t}\n\tif MasterId != serverId {\n\t\tmasterAddr := serverData.GetServerAddrs()[MasterId]\n\t\tif masterAddr != \"\" {\n\t\t\tserverLiveInfo := r.serverLiveTable[masterAddr]\n\t\t\tif serverLiveInfo != nil {\n\t\t\t\tresult.HaServerAddr = serverLiveInfo.HaServerAddr\n\t\t\t\tresult.MasterAddr = masterAddr\n\t\t\t}\n\t\t}\n\t}\n\treturn result\n}", "func RegisterCompanyServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server CompanyServiceServer) error {\n\n\tmux.Handle(\"POST\", pattern_CompanyService_CreateCompany_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_CreateCompany_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_CreateCompany_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_ListCompanies_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_ListCompanies_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_ListCompanies_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_GetCompany_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_GetCompany_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_GetCompany_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PUT\", pattern_CompanyService_UpdateCompany_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_UpdateCompany_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_UpdateCompany_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_CompanyService_CreateTeam_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_CreateTeam_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_CreateTeam_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_ListTeams_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_ListTeams_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_ListTeams_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_GetTeam_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_GetTeam_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_GetTeam_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PUT\", pattern_CompanyService_UpdateTeam_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_UpdateTeam_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_UpdateTeam_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_GetWorkerTeamInfo_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_GetWorkerTeamInfo_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_GetWorkerTeamInfo_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_CompanyService_CreateJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_CreateJob_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_CreateJob_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_ListJobs_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_ListJobs_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_ListJobs_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_GetJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_GetJob_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_GetJob_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PUT\", pattern_CompanyService_UpdateJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_UpdateJob_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_UpdateJob_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_CompanyService_CreateShift_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_CreateShift_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_CreateShift_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_ListShifts_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_ListShifts_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_ListShifts_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_ListWorkerShifts_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_ListWorkerShifts_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_ListWorkerShifts_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PUT\", pattern_CompanyService_BulkPublishShifts_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_BulkPublishShifts_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_BulkPublishShifts_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_GetShift_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_GetShift_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_GetShift_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"DELETE\", pattern_CompanyService_DeleteShift_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_DeleteShift_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_DeleteShift_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PUT\", pattern_CompanyService_UpdateShift_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_UpdateShift_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_UpdateShift_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_CompanyService_CreateDirectory_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_CreateDirectory_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_CreateDirectory_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_Directory_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_Directory_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_Directory_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_GetAssociations_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_GetAssociations_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_GetAssociations_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_GetDirectoryEntry_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_GetDirectoryEntry_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_GetDirectoryEntry_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PUT\", pattern_CompanyService_UpdateDirectoryEntry_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_UpdateDirectoryEntry_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_UpdateDirectoryEntry_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_ListAdmins_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_ListAdmins_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_ListAdmins_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_CompanyService_CreateAdmin_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_CreateAdmin_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_CreateAdmin_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_GetAdmin_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_GetAdmin_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_GetAdmin_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"DELETE\", pattern_CompanyService_DeleteAdmin_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_DeleteAdmin_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_DeleteAdmin_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_ListWorkers_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_ListWorkers_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_ListWorkers_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_GetWorker_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_GetWorker_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_GetWorker_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"DELETE\", pattern_CompanyService_DeleteWorker_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_DeleteWorker_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_DeleteWorker_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_CompanyService_CreateWorker_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_CreateWorker_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_CreateWorker_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_ListTimeZones_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_ListTimeZones_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_ListTimeZones_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_CompanyService_GrowthGraph_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_CompanyService_GrowthGraph_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_CompanyService_GrowthGraph_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\treturn nil\n}", "func RegisterServer(srv Server, registry *prometheus.Registry, opts ...grpc.ServerOption) *grpc.Server {\n\tvar metrics *grpc_prometheus.ServerMetrics\n\n\t// TODO: Decouple registry from this.\n\tif registry != nil {\n\t\tmetrics = grpc_prometheus.NewServerMetrics()\n\t\tregistry.MustRegister(metrics)\n\n\t\topts = append(opts,\n\t\t\tgrpc.StreamInterceptor(metrics.StreamServerInterceptor()),\n\t\t\tgrpc.UnaryInterceptor(metrics.UnaryServerInterceptor()),\n\t\t)\n\n\t}\n\n\tg := grpc.NewServer(opts...)\n\n\tdiscovery.RegisterAggregatedDiscoveryServiceServer(g, srv)\n\tdiscovery.RegisterSecretDiscoveryServiceServer(g, srv)\n\tapi.RegisterClusterDiscoveryServiceServer(g, srv)\n\tapi.RegisterEndpointDiscoveryServiceServer(g, srv)\n\tapi.RegisterListenerDiscoveryServiceServer(g, srv)\n\tapi.RegisterRouteDiscoveryServiceServer(g, srv)\n\n\tif metrics != nil {\n\t\tmetrics.InitializeMetrics(g)\n\t}\n\n\treturn g\n}", "func RegisterHealthServer(g *grpc.Server, s *Service) {\n\thealthpb.RegisterHealthServer(g, s)\n}", "func registerRateLimitServiceServer(s *grpc.Server, srv ratelimit.RateLimitServiceServer) {\n\ts.RegisterService(&_rateLimitService_serviceDesc, srv)\n}", "func (conf ServerConfig) NewServer(c *component.Component, customOpts ...Option) *Server {\n\tvar registerUnknownTo *ttnpb.OrganizationOrUserIdentifiers\n\tswitch conf.RegisterUnknown.Type {\n\tcase \"user\":\n\t\tregisterUnknownTo = ttnpb.UserIdentifiers{UserID: conf.RegisterUnknown.ID}.OrganizationOrUserIdentifiers()\n\tcase \"organization\":\n\t\tregisterUnknownTo = ttnpb.OrganizationIdentifiers{OrganizationID: conf.RegisterUnknown.ID}.OrganizationOrUserIdentifiers()\n\t}\n\topts := []Option{\n\t\tWithExplicitEnable(conf.ExplicitEnable),\n\t\tWithRegisterUnknown(registerUnknownTo),\n\t\tWithAllowCUPSURIUpdate(conf.AllowCUPSURIUpdate),\n\t}\n\tif conf.RegisterUnknown.APIKey != \"\" {\n\t\topts = append(opts, WithAuth(func(ctx context.Context, gatewayEUI types.EUI64, auth string) grpc.CallOption {\n\t\t\treturn grpc.PerRPCCredentials(rpcmetadata.MD{\n\t\t\t\tAuthType: \"bearer\",\n\t\t\t\tAuthValue: conf.RegisterUnknown.APIKey,\n\t\t\t\tAllowInsecure: c.AllowInsecureForCredentials(),\n\t\t\t})\n\t\t}))\n\t}\n\tif tlsConfig, err := c.GetTLSConfig(c.Context()); err == nil {\n\t\topts = append(opts, WithRootCAs(tlsConfig.RootCAs))\n\t}\n\ts := NewServer(c, append(opts, customOpts...)...)\n\tc.RegisterWeb(s)\n\treturn s\n}", "func (s *server) RegisterServices(gs *grpc.Server) {\n\tttnpb.RegisterApplicationPackageRegistryServer(gs, s)\n\tfor _, subsystem := range s.handlers {\n\t\tif subsystem, ok := subsystem.(rpcserver.ServiceRegisterer); ok {\n\t\t\tsubsystem.RegisterServices(gs)\n\t\t}\n\t}\n}", "func RegisterWorkflowsServer(s *grpc.Server, srv WorkflowsServer) {\n\tsrc.RegisterWorkflowsServer(s, srv)\n}", "func (s *Server) Register(grpcServer *grpc.Server) {\n\tpb.RegisterIstioCertificateServiceServer(grpcServer, s)\n}", "func RegisterLoginServerServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server LoginServerServiceServer) error {\n\n\tmux.Handle(\"GET\", pattern_LoginServerService_LoginServerList_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_LoginServerService_LoginServerList_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_LoginServerService_LoginServerList_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_LoginServerService_LoginServerLogin_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_LoginServerService_LoginServerLogin_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_LoginServerService_LoginServerLogin_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_LoginServerService_LoginServerLogout_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_LoginServerService_LoginServerLogout_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_LoginServerService_LoginServerLogout_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_LoginServerService_LoginServerCreate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_LoginServerService_LoginServerCreate_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_LoginServerService_LoginServerCreate_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\treturn nil\n}", "func RegisterContactCenterInsightsServer(s *grpc.Server, srv ContactCenterInsightsServer) {\n\tsrc.RegisterContactCenterInsightsServer(s, srv)\n}", "func (s Service) Register(r *grpc.Server) {\n\tserver := Server{}\n\tproto.RegisterAdminServiceServer(r, server)\n\tproto.RegisterDeviceInventoryServiceServer(r, server)\n}", "func NewServer(binding string, nodeMgr NodeManagerInterface) GRPCServer {\n\ts := grpc.NewServer()\n\tmyServer := &server{\n\t\tbinding: binding,\n\t\ts: s,\n\t\tnodeMgr: nodeMgr,\n\t}\n\tpb.RegisterCloudProviderVsphereServer(s, myServer)\n\treflection.Register(s)\n\treturn myServer\n}", "func BuildServer(args []string) (*grpc.Server, error) {\n\tif len(args) < 1 {\n\t\treturn nil, errors.New(\"server requires valid services specified to run\")\n\t}\n\tserver := grpc.NewServer()\n\tfor _, service := range args {\n\t\tswitch service {\n\t\tcase \"userV1\":\n\t\t\tservices.RegisterUserAPIv1(server)\n\t\tcase \"leaderboardV1\":\n\t\tcase \"notificationV1\":\n\t\t\treturn nil, fmt.Errorf(\"service '%s' is not implemented\", service)\n\t\tdefault:\n\t\t\treturn nil, fmt.Errorf(\"invalid service name '%s'\", service)\n\t\t}\n\t}\n\treflection.Register(server)\n\treturn server, nil\n}", "func NewServer(conf config.Config, conns service.Connections) gitalypb.ServerServiceServer {\n\ts := &Server{\n\t\tconf: conf,\n\t\tconns: conns,\n\t}\n\n\treturn s\n}", "func SetServerSubscription(s []string) func(*Server) error {\n\treturn func(c *Server) error {\n\t\tif s != nil {\n\t\t\tfor _, d := range s {\n\t\t\t\tc.subscriptionURLs = append(c.subscriptionURLs, d)\n\t\t\t}\n\t\t\treturn nil\n\t\t}\n\t\tc.subscriptionURLs = append(c.subscriptionURLs, \"http://joajgazyztfssty4w2on5oaqksz6tqoxbduy553y34mf4byv6gpq.b32.i2p/export/alive-hosts.txt\")\n\t\treturn nil\n\t}\n}", "func RegisterApateletService(server *service.GRPCServer, stopChannel chan<- struct{}) {\n\tapatelet.RegisterApateletServer(server.Server, &apateletService{stopChannel: stopChannel})\n}", "func (s *Server) RegisterService(service Service) {\n\tservice.Register(s)\n}", "func (s *Server) RegisterService(sd *ServiceDesc, ss interface{}) {\n\ts.register(sd, ss)\n}", "func NewServer(\n\taddr string,\n\tcontrollerNS string,\n\tidentityTrustDomain string,\n\tenableH2Upgrade bool,\n\tenableEndpointSlices bool,\n\tk8sAPI *k8s.API,\n\tmetadataAPI *k8s.MetadataAPI,\n\tclusterStore *watcher.ClusterStore,\n\tclusterDomain string,\n\tdefaultOpaquePorts map[uint32]struct{},\n\tshutdown <-chan struct{},\n) (*grpc.Server, error) {\n\tlog := logging.WithFields(logging.Fields{\n\t\t\"addr\": addr,\n\t\t\"component\": \"server\",\n\t})\n\n\t// Initialize indexers that are used across watchers\n\terr := watcher.InitializeIndexers(k8sAPI)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tendpoints, err := watcher.NewEndpointsWatcher(k8sAPI, metadataAPI, log, enableEndpointSlices, \"local\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\topaquePorts, err := watcher.NewOpaquePortsWatcher(k8sAPI, log, defaultOpaquePorts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tprofiles, err := watcher.NewProfileWatcher(k8sAPI, log)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tservers, err := watcher.NewServerWatcher(k8sAPI, log)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tsrv := server{\n\t\tpb.UnimplementedDestinationServer{},\n\t\tendpoints,\n\t\topaquePorts,\n\t\tprofiles,\n\t\tservers,\n\t\tclusterStore,\n\t\tenableH2Upgrade,\n\t\tcontrollerNS,\n\t\tidentityTrustDomain,\n\t\tclusterDomain,\n\t\tdefaultOpaquePorts,\n\t\tk8sAPI,\n\t\tmetadataAPI,\n\t\tlog,\n\t\tshutdown,\n\t}\n\n\ts := prometheus.NewGrpcServer()\n\t// linkerd2-proxy-api/destination.Destination (proxy-facing)\n\tpb.RegisterDestinationServer(s, &srv)\n\treturn s, nil\n}", "func (s *ServerImpl) RegisterServerShutdownHandler(f ShutdownHandler) {\n\ts.serverShutdownHandler = f\n}", "func RegisterAssetServiceServer(s *grpc.Server, srv AssetServiceServer) {\n\tsrc.RegisterAssetServiceServer(s, srv)\n}", "func RegisterDmBMServer(e *bm.Engine, server DmBMServer) {\n\tv1DmSvc = server\n\te.POST(\"/xlive/open-interface/v1/dm/sendmsg\", dmSendmsg)\n\te.GET(\"/xlive/open-interface/v1/dm/getConf\", dmGetConf)\n}", "func RegisterService(server *grpc.Server, director StreamDirector, resiliency resiliency.Provider, serviceName string, methodNames ...string) {\n\tstreamer := &handler{\n\t\tdirector: director,\n\t\tresiliency: resiliency,\n\t}\n\tfakeDesc := &grpc.ServiceDesc{\n\t\tServiceName: serviceName,\n\t\tHandlerType: (*any)(nil),\n\t}\n\tfor _, m := range methodNames {\n\t\tstreamDesc := grpc.StreamDesc{\n\t\t\tStreamName: m,\n\t\t\tHandler: streamer.handler,\n\t\t\tServerStreams: true,\n\t\t\tClientStreams: true,\n\t\t}\n\t\tfakeDesc.Streams = append(fakeDesc.Streams, streamDesc)\n\t}\n\tserver.RegisterService(fakeDesc, streamer)\n}", "func RegisterService(server *grpc.Server, director StreamDirector, serviceName string, methodNames ...string) {\n\tstreamer := &handler{director}\n\tfakeDesc := &grpc.ServiceDesc{\n\t\tServiceName: serviceName,\n\t\tHandlerType: (*interface{})(nil),\n\t}\n\tfor _, m := range methodNames {\n\t\tstreamDesc := grpc.StreamDesc{\n\t\t\tStreamName: m,\n\t\t\tHandler: streamer.handler,\n\t\t\tServerStreams: true,\n\t\t\tClientStreams: true,\n\t\t}\n\t\tfakeDesc.Streams = append(fakeDesc.Streams, streamDesc)\n\t}\n\tserver.RegisterService(fakeDesc, streamer)\n}", "func (svc *Service) createGPRCServer() (*grpc.Server, error) {\n\topts, err := svc.serverOptions()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tserver := grpc.NewServer(opts...)\n\n\tif svc.cfg.Proto != \"\" && svc.rr != nil {\n\t\t// php proxy services\n\t\tservices, err := parser.File(svc.cfg.Proto, path.Dir(svc.cfg.Proto))\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tfor _, service := range services {\n\t\t\tp := NewProxy(fmt.Sprintf(\"%s.%s\", service.Package, service.Name), svc.cfg.Proto, svc.rr)\n\t\t\tfor _, m := range service.Methods {\n\t\t\t\tp.RegisterMethod(m.Name)\n\t\t\t}\n\n\t\t\tserver.RegisterService(p.ServiceDesc(), p)\n\t\t}\n\t}\n\n\t// external and native services\n\tfor _, r := range svc.services {\n\t\tr(server)\n\t}\n\n\treturn server, nil\n}", "func (srv *Server) Register(rcvr interface{}) error {\n\ts := new(service)\n\ts.typ = reflect.TypeOf(rcvr)\n\ts.rcvr = reflect.ValueOf(rcvr)\n\tsname := reflect.Indirect(s.rcvr).Type().Name()\n\tif sname == \"\" {\n\t\treturn fmt.Errorf(\"WebSocketRPC: Failed to register %v\", sname)\n\t}\n\ts.name = sname\n\ts.method = suitableMethods(s.typ)\n\tif len(s.method) == 0 {\n\t\treturn fmt.Errorf(\"WebSocketRPC: Did not find any methods of %v to register\", sname)\n\t}\n\tsrv.serviceMap[sname] = s\n\treturn nil\n}", "func (s *Server) RegisterService(receiver interface{}, name string) error {\n\treturn s.services.register(receiver, name)\n}", "func createServer() {\n\tlistener := createServerListener()\n\tserver := Server{}\n\tgrpcServer := grpc.NewServer()\n\tRegisterAccountServer(grpcServer, &server)\n\tstartServer(grpcServer, listener)\n}", "func (s *Server) RegisterService(sd *grpc.ServiceDesc, ss interface{}) {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\t// Does some sanity checks.\n\tif _, ok := s.m[sd.ServiceName]; ok {\n\t\tlog.Fatalf(\"grpc: Server.RegisterService found duplicate service registration for %q\", sd.ServiceName)\n\t}\n\tht := reflect.TypeOf(sd.HandlerType).Elem()\n\tst := reflect.TypeOf(ss)\n\tif !st.Implements(ht) {\n\t\tlog.Fatalf(\"grpc: Server.RegisterService found the handler of type %v that does not satisfy %v\", st, ht)\n\t}\n\tsrv := &service{\n\t\tserver: ss,\n\t\tmd: make(map[string]*grpc.MethodDesc),\n\t\t//\t\tsd: make(map[string]*StreamDesc),\n\t}\n\tfor i := range sd.Methods {\n\t\td := &sd.Methods[i]\n\t\tsrv.md[d.MethodName] = d\n\t\tname := fmt.Sprintf(\"/%s/%s\", sd.ServiceName, d.MethodName)\n\t\tlog.Printf(\"Register: %s\", name)\n\t\ts.mux.HandleFunc(name, handler(ss, d))\n\t}\n\t//\tfor i := range sd.Streams {\n\t//\t\td := &sd.Streams[i]\n\t//\t\tsrv.sd[d.StreamName] = d\n\t//\t}\n\ts.m[sd.ServiceName] = srv\n}", "func RegisterLogsServer(s *grpc.Server, srv LogsServer) {\n\totlpcollectorlog.RegisterLogsServiceServer(s, &rawLogsServer{srv: srv})\n}", "func RegisterServerType(typeName string, srv ServerType) {\n\tif _, ok := serverTypes[typeName]; ok {\n\t\tpanic(\"server type already registered\")\n\t}\n\tserverTypes[typeName] = srv\n}", "func NewServer(c *Config) (*Server, error) {\n\t// validate config\n\tif err := validation.Validate.Struct(c); err != nil {\n\t\treturn nil, fmt.Errorf(\"invalid config: %v\", err)\n\t}\n\n\t// create root context\n\tctx, cancel := context.WithCancel(context.Background())\n\n\t// register handlers\n\tmux := runtime.NewServeMux()\n\topts := []grpc.DialOption{grpc.WithInsecure()}\n\terr := proto.RegisterTodosHandlerFromEndpoint(ctx, mux, c.Endpoint, opts)\n\tif err != nil {\n\t\tdefer cancel()\n\t\treturn nil, fmt.Errorf(\"unable to register gateway handler: %v\", err)\n\t}\n\n\ts := Server{\n\t\tcancel: cancel,\n\t\tlog: c.Log,\n\t\tmux: mux,\n\t\tport: c.Port,\n\t}\n\treturn &s, nil\n}", "func NewServer(svc things.Service) mainflux.ThingsServiceServer {\n\treturn &grpcServer{\n\t\tcanAccess: kitgrpc.NewServer(\n\t\t\tcanAccessEndpoint(svc),\n\t\t\tdecodeCanAccessRequest,\n\t\t\tencodeIdentityResponse,\n\t\t),\n\t\tidentify: kitgrpc.NewServer(\n\t\t\tidentifyEndpoint(svc),\n\t\t\tdecodeIdentifyRequest,\n\t\t\tencodeIdentityResponse,\n\t\t),\n\t}\n}", "func (s *Server) RegisterService(receiver interface{}, name string) error {\n\treturn s.services.add(receiver, name, s.ctxType)\n}", "func (a *AuthorizationServer) Register(s *grpc.Server) {\n\tauth.RegisterAuthorizationServer(s, a)\n}", "func RegisterBinauthzManagementServiceV1Beta1Server(s *grpc.Server, srv BinauthzManagementServiceV1Beta1Server) {\n\tsrc.RegisterBinauthzManagementServiceV1Beta1Server(s, srv)\n}", "func (s *Servers) registerGRPCService() {\n\n\tidentitypb.RegisterIdentityServiceServer(s.gRPCServer, s.Backend.IdentityServer)\n\tauthpb.RegisterAuthServiceServer(s.gRPCServer, s.Backend.AuthServer)\n\tmoviepb.RegisterMovieServiceServer(s.gRPCServer, s.Backend.MovieServer)\n}", "func RegisterSimpleServiceOrionServer(srv orion.ServiceFactory, orionServer orion.Server) {\n\torionServer.RegisterService(&_SimpleService_serviceDesc, srv)\n\n}", "func serverRegister() {\n\t// set the parameters to register\n\tbytePublicKey, _ := anonServer.PublicKey.MarshalBinary()\n\tparams := map[string]interface{}{\n\t\t\"public_key\": bytePublicKey,\n\t}\n\tevent := &proto.Event{EventType:proto.SERVER_REGISTER, Params:params}\n\n\tutil.SendEvent(anonServer.LocalAddr, anonServer.CoordinatorAddr, event)\n}", "func (s MeshService) RegisterService(server *Server) {\n\tpb.RegisterMeshServiceServer(server.GrpcServer, s)\n}", "func InitializeServer(grpcServer *grpc.Server) *connectorpb.InitializeResponse {\n\n\taccesscontextmanager_connector.RegisterServers(grpcServer)\n\n\tapigee_connector.RegisterServers(grpcServer)\n\n\tappengine_connector.RegisterServers(grpcServer)\n\n\tassuredworkloads_connector.RegisterServers(grpcServer)\n\n\tbigqueryconnection_connector.RegisterServers(grpcServer)\n\n\tbigqueryreservation_connector.RegisterServers(grpcServer)\n\n\tbinaryauthorization_connector.RegisterServers(grpcServer)\n\n\tcloudbilling_connector.RegisterServers(grpcServer)\n\n\tcloudbuild_connector.RegisterServers(grpcServer)\n\n\tcloudfunctions_connector.RegisterServers(grpcServer)\n\n\tcloudresourcemanager_connector.RegisterServers(grpcServer)\n\n\tcloudscheduler_connector.RegisterServers(grpcServer)\n\n\tcompute_connector.RegisterServers(grpcServer)\n\n\tcontainer_connector.RegisterServers(grpcServer)\n\n\tcontaineranalysis_connector.RegisterServers(grpcServer)\n\n\tdatafusion_beta_connector.RegisterServers(grpcServer)\n\n\tdataproc_connector.RegisterServers(grpcServer)\n\n\tdatastore_connector.RegisterServers(grpcServer)\n\n\tdns_connector.RegisterServers(grpcServer)\n\n\tfile_connector.RegisterServers(grpcServer)\n\n\teventarc_beta_connector.RegisterServers(grpcServer)\n\n\tgameservices_connector.RegisterServers(grpcServer)\n\n\tgkehub_beta_connector.RegisterServers(grpcServer)\n\n\tlogging_connector.RegisterServers(grpcServer)\n\n\tiam_connector.RegisterServers(grpcServer)\n\n\tiap_connector.RegisterServers(grpcServer)\n\n\tidentitytoolkit_connector.RegisterServers(grpcServer)\n\n\tmonitoring_connector.RegisterServers(grpcServer)\n\n\tnetworksecurity_alpha_connector.RegisterServers(grpcServer)\n\n\tosconfig_beta_connector.RegisterServers(grpcServer)\n\n\tpubsub_connector.RegisterServers(grpcServer)\n\n\tpubsublite_connector.RegisterServers(grpcServer)\n\n\tredis_connector.RegisterServers(grpcServer)\n\n\trun_connector.RegisterServers(grpcServer)\n\n\truntimeconfig_connector.RegisterServers(grpcServer)\n\n\tservicenetworking_connector.RegisterServers(grpcServer)\n\n\tsourcerepo_connector.RegisterServers(grpcServer)\n\n\tserviceusage_connector.RegisterServers(grpcServer)\n\n\tspanner_connector.RegisterServers(grpcServer)\n\n\tsql_beta_connector.RegisterServers(grpcServer)\n\n\tstorage_connector.RegisterServers(grpcServer)\n\n\ttpu_connector.RegisterServers(grpcServer)\n\n\tvpcaccess_connector.RegisterServers(grpcServer)\n\n\treturn &connectorpb.InitializeResponse{\n\t\tStatus: &statuspb.Status{\n\t\t\tCode: int32(codes.OK),\n\t\t},\n\t}\n}", "func (wk *Worker) RegisterService(args *serverless.ServiceRegisterArgs, _ *struct{}) error {\n\tplug, err := plugin.Open(\"../plugins/\" + args.ServiceName + \".so\")\n\tif err != nil {\n\t\tfmt.Printf(\"Failed to open plugin %s: %v\\n\", args.ServiceName, err)\n\t\treturn err\n\t}\n\t// TODO: implement me\n\t// Hint 1: You may want to use `plug.Lookup` to locate the service symbol,\n\t// and expose the interested service API associated with serverless.Interface.\n\t// Hint 2: Call newService to initialize a service struct, and insert the service key-value pair\n\t// to the global serviceMap.\n\t// TODO TODO TODO\n\t//\n\n\tserverless.Debug(\"Successfully registered new service %s\\n\", args.ServiceName)\n\treturn nil\n}", "func NewServer(\n\taddr string,\n\tcontrollerNS string,\n\tidentityTrustDomain string,\n\tenableH2Upgrade bool,\n\tk8sAPI *k8s.API,\n\tshutdown <-chan struct{},\n) *grpc.Server {\n\tlog := logging.WithFields(logging.Fields{\n\t\t\"addr\": addr,\n\t\t\"component\": \"server\",\n\t})\n\tendpoints := watcher.NewEndpointsWatcher(k8sAPI, log)\n\tprofiles := watcher.NewProfileWatcher(k8sAPI, log)\n\ttrafficSplits := watcher.NewTrafficSplitWatcher(k8sAPI, log)\n\n\tsrv := server{\n\t\tendpoints,\n\t\tprofiles,\n\t\ttrafficSplits,\n\t\tenableH2Upgrade,\n\t\tcontrollerNS,\n\t\tidentityTrustDomain,\n\t\tlog,\n\t\tshutdown,\n\t}\n\n\ts := prometheus.NewGrpcServer()\n\t// linkerd2-proxy-api/destination.Destination (proxy-facing)\n\tpb.RegisterDestinationServer(s, &srv)\n\t// controller/discovery.Discovery (controller-facing)\n\tdiscoveryPb.RegisterDiscoveryServer(s, &srv)\n\treturn s\n}", "func RegisterAgentEndpointServiceServer(s *grpc.Server, srv AgentEndpointServiceServer) {\n\tsrc.RegisterAgentEndpointServiceServer(s, srv)\n}", "func Register(s *grpc.Server) {\n\tca.RegisterCertServiceServer(s, &service{})\n}", "func StartServer(cleanUpChan chan int){\n\tGrpcServer = &Server{\n CleanUpChan:cleanUpChan ,\n\t GrpcServer: grpc.NewServer(),\n\t}\n\tregisterGrpcServices(GrpcServer.GrpcServer)\n\tif err := GrpcServer.GrpcServer.Serve(getListner(port)); err != nil {\n\t\tpanic(err)\n\t}\n}", "func RegisterSecretServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server SecretServiceServer) error {\n\n\tmux.Handle(\"POST\", pattern_SecretService_CreateSecret_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/api.SecretService/CreateSecret\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_SecretService_CreateSecret_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_SecretService_CreateSecret_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_SecretService_SecretExists_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/api.SecretService/SecretExists\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_SecretService_SecretExists_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_SecretService_SecretExists_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_SecretService_GetSecret_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/api.SecretService/GetSecret\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_SecretService_GetSecret_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_SecretService_GetSecret_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_SecretService_ListSecrets_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/api.SecretService/ListSecrets\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_SecretService_ListSecrets_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_SecretService_ListSecrets_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"DELETE\", pattern_SecretService_DeleteSecret_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/api.SecretService/DeleteSecret\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_SecretService_DeleteSecret_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_SecretService_DeleteSecret_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"DELETE\", pattern_SecretService_DeleteSecretKey_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/api.SecretService/DeleteSecretKey\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_SecretService_DeleteSecretKey_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_SecretService_DeleteSecretKey_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_SecretService_AddSecretKeyValue_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/api.SecretService/AddSecretKeyValue\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_SecretService_AddSecretKeyValue_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_SecretService_AddSecretKeyValue_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PATCH\", pattern_SecretService_UpdateSecretKeyValue_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/api.SecretService/UpdateSecretKeyValue\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_SecretService_UpdateSecretKeyValue_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_SecretService_UpdateSecretKeyValue_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\treturn nil\n}", "func (service *ServerService) ServiceCreateServer(server *models.ServerModel) {\n\tservice.CreateServer(server)\n}", "func (s *Server) Register(rcvr interface{}) error {\n\n\t_service := new(service)\n\t_service.typ = reflect.TypeOf(rcvr)\n\t_service.rcvr = reflect.ValueOf(rcvr)\n\tsname := reflect.Indirect(_service.rcvr).Type().Name()\n\n\tif sname == \"\" {\n\t\terr_s := \"rpc.Register: no service name for type \" + _service.typ.String()\n\t\tlog.Print(err_s)\n\t\treturn errors.New(err_s)\n\t}\n\n\tif !isExported(sname) {\n\t\terr_s := \"rpc.Register: type \" + sname + \" is not exported\"\n\t\tlog.Print(err_s)\n\t\treturn errors.New(err_s)\n\t}\n\t_service.name = sname\n\t_service.method = suitableMethods(_service.typ, true)\n\n\tif _, dup := s.m.LoadOrStore(sname, _service); dup {\n\t\treturn errors.New(\"rpc: service already defined: \" + sname)\n\t}\n\treturn nil\n}", "func RegisterFUOTADeploymentServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server FUOTADeploymentServiceServer) error {\n\n\tmux.Handle(\"POST\", pattern_FUOTADeploymentService_CreateForDevice_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_FUOTADeploymentService_CreateForDevice_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_FUOTADeploymentService_CreateForDevice_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_FUOTADeploymentService_Get_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_FUOTADeploymentService_Get_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_FUOTADeploymentService_Get_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_FUOTADeploymentService_List_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_FUOTADeploymentService_List_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_FUOTADeploymentService_List_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_FUOTADeploymentService_GetDeploymentDevice_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_FUOTADeploymentService_GetDeploymentDevice_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_FUOTADeploymentService_GetDeploymentDevice_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_FUOTADeploymentService_ListDeploymentDevices_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_FUOTADeploymentService_ListDeploymentDevices_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_FUOTADeploymentService_ListDeploymentDevices_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\treturn nil\n}", "func RegisterHelmManagerGwServer(ctx context.Context, mux *runtime.ServeMux, server HelmManagerServer) error {\n\n\tmux.Handle(\"GET\", pattern_HelmManager_Available_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_Available_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_Available_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_HelmManager_CreateRepository_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_CreateRepository_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_CreateRepository_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PUT\", pattern_HelmManager_UpdateRepository_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_UpdateRepository_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_UpdateRepository_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_HelmManager_GetRepository_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_GetRepository_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_GetRepository_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"DELETE\", pattern_HelmManager_DeleteRepository_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_DeleteRepository_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_DeleteRepository_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_HelmManager_ListRepository_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_ListRepository_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_ListRepository_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"DELETE\", pattern_HelmManager_DeleteRepositories_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_DeleteRepositories_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_DeleteRepositories_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_HelmManager_ListChart_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_ListChart_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_ListChart_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_HelmManager_ListChartVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_ListChartVersion_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_ListChartVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_HelmManager_GetChartDetail_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_GetChartDetail_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_GetChartDetail_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_HelmManager_ListRelease_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_ListRelease_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_ListRelease_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_HelmManager_GetReleaseDetail_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_GetReleaseDetail_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_GetReleaseDetail_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_HelmManager_InstallRelease_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_InstallRelease_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_InstallRelease_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_HelmManager_UninstallRelease_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_UninstallRelease_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_UninstallRelease_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_HelmManager_UpgradeRelease_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_UpgradeRelease_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_UpgradeRelease_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_HelmManager_RollbackRelease_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_HelmManager_RollbackRelease_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_HelmManager_RollbackRelease_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\treturn nil\n}", "func (computeService Service) CreateServer(serverCreationParameters ServerCreationParameters) (CreateServerResponse, error) {\n\tsc := createServerContainer{}\n\treqURL, err := computeService.buildRequestURL(\"/servers\")\n\tif err != nil {\n\t\treturn sc.CreateServer, err\n\t}\n\n\tc := serverCreateParametersContainer{ServerCreationParameters: serverCreationParameters}\n\terr = misc.PostJSON(reqURL, computeService.authenticator, c, &sc)\n\treturn sc.CreateServer, err\n}", "func (p *AppPlugin) GRPCServer(_ *plugin.GRPCBroker, s *grpc.Server) error {\n\tpluginproto.RegisterNodeServer(s, NewServer(p.app))\n\treturn nil\n}", "func (s *Server) Register(namespace string, service Invoker) {\n\ts.services[namespace] = service\n}", "func (s *Service) Register(server *grpc.Server) {\n\tcriapi.RegisterImageServiceServer(server, s)\n\tcriapi.RegisterRuntimeServiceServer(server, s)\n}", "func (s *PoolServer) Register(grpcServer *grpc.Server) {\n\tgoblinpb.RegisterGoblinServiceServer(grpcServer, &server{\n\t\tpool: s,\n\t})\n}", "func RegisterUserServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server UserServiceServer) error {\n\n\tmux.Handle(\"POST\", pattern_UserService_Login_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/realworld.UserService/Login\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_UserService_Login_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_UserService_Login_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_UserService_Create_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/realworld.UserService/Create\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_UserService_Create_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_UserService_Create_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_UserService_GetCurrent_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/realworld.UserService/GetCurrent\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_UserService_GetCurrent_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_UserService_GetCurrent_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PUT\", pattern_UserService_UpdateProfile_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/realworld.UserService/UpdateProfile\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_UserService_UpdateProfile_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_UserService_UpdateProfile_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PUT\", pattern_UserService_UpdatePassword_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/realworld.UserService/UpdatePassword\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_UserService_UpdatePassword_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_UserService_UpdatePassword_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_UserService_GetProfile_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/realworld.UserService/GetProfile\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_UserService_GetProfile_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_UserService_GetProfile_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_UserService_FollowUser_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/realworld.UserService/FollowUser\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_UserService_FollowUser_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_UserService_FollowUser_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"DELETE\", pattern_UserService_UnfollowUser_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, \"/realworld.UserService/UnfollowUser\")\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_UserService_UnfollowUser_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_UserService_UnfollowUser_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\treturn nil\n}", "func NewServer(s Service) pb.BookingServiceServer {\n\treturn &grpcServer{s, pb.UnimplementedBookingServiceServer{}}\n}", "func NewGRPCServer(srv *grpc.Server, backend api.Backend) {\n\ts := &grpcServer{\n\t\tbackend: backend,\n\t}\n\tpb.RegisterEntityRegistryServer(srv, s)\n\tpb.RegisterRuntimeRegistryServer(srv, s)\n}", "func RegisterDatastreamServer(s *grpc.Server, srv DatastreamServer) {\n\tsrc.RegisterDatastreamServer(s, srv)\n}", "func NewBankServiceServer() bank_v1.BankServiceServer {\n\tdb := newFirestoreDatabase(nil) // todo: init firestore client\n\tservice := newService(db)\n\tgrpcApi := newGrpcApi(service)\n\treturn grpcApi\n}", "func RegisterConfigServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server ConfigServiceServer) error {\n\n\tmux.Handle(\"GET\", pattern_ConfigService_GetConfig_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ConfigService_GetConfig_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ConfigService_GetConfig_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PUT\", pattern_ConfigService_SetConfig_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ConfigService_SetConfig_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ConfigService_SetConfig_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_ConfigService_GetToken_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ConfigService_GetToken_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ConfigService_GetToken_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_ConfigService_GetDefaultToken_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ConfigService_GetDefaultToken_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ConfigService_GetDefaultToken_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_ConfigService_CreateTokenType_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ConfigService_CreateTokenType_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ConfigService_CreateTokenType_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_ConfigService_GetTokenType_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ConfigService_GetTokenType_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ConfigService_GetTokenType_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_ConfigService_ListTokenTypes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ConfigService_ListTokenTypes_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ConfigService_ListTokenTypes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"PUT\", pattern_ConfigService_UpdateTokenType_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ConfigService_UpdateTokenType_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ConfigService_UpdateTokenType_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"DELETE\", pattern_ConfigService_DeleteTokenType_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ConfigService_DeleteTokenType_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ConfigService_DeleteTokenType_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_ConfigService_GetTokenGroups_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ConfigService_GetTokenGroups_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ConfigService_GetTokenGroups_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"POST\", pattern_ConfigService_CreateTokenGroup_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ConfigService_CreateTokenGroup_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ConfigService_CreateTokenGroup_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"GET\", pattern_ConfigService_GetTokenGroup_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ConfigService_GetTokenGroup_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ConfigService_GetTokenGroup_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\tmux.Handle(\"DELETE\", pattern_ConfigService_DeleteTokenGroup_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\t\tctx, cancel := context.WithCancel(req.Context())\n\t\tdefer cancel()\n\t\tvar stream runtime.ServerTransportStream\n\t\tctx = grpc.NewContextWithServerTransportStream(ctx, &stream)\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\t\tresp, md, err := local_request_ConfigService_DeleteTokenGroup_0(rctx, inboundMarshaler, server, req, pathParams)\n\t\tmd.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\t\tif err != nil {\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\t\t\treturn\n\t\t}\n\n\t\tforward_ConfigService_DeleteTokenGroup_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\t})\n\n\treturn nil\n}" ]
[ "0.6962951", "0.6499365", "0.64940494", "0.62961686", "0.6150645", "0.61200964", "0.609665", "0.60660225", "0.593484", "0.5928594", "0.59166455", "0.59086245", "0.58979774", "0.58734566", "0.58527607", "0.5834305", "0.5828885", "0.5793394", "0.5770595", "0.57611436", "0.57187974", "0.571382", "0.5702104", "0.56861943", "0.56700337", "0.5665839", "0.56072164", "0.5592556", "0.5592398", "0.5590062", "0.5580837", "0.55647457", "0.55453646", "0.5534581", "0.5526083", "0.55256915", "0.55102664", "0.5499799", "0.5476021", "0.5459634", "0.54590094", "0.5450816", "0.543493", "0.5430513", "0.5420254", "0.54074335", "0.53980464", "0.53676736", "0.536534", "0.5365197", "0.5364181", "0.5358532", "0.5347454", "0.5343528", "0.5340167", "0.5337495", "0.53354806", "0.53242695", "0.5319054", "0.5314826", "0.5312495", "0.5301942", "0.53002125", "0.5299455", "0.5297104", "0.5286337", "0.52807873", "0.5270254", "0.52556074", "0.5248179", "0.52444863", "0.5243413", "0.5227637", "0.52249616", "0.52246207", "0.5195723", "0.5189531", "0.5176711", "0.5175476", "0.5170151", "0.51668674", "0.5163313", "0.5162564", "0.5151734", "0.51486117", "0.5142371", "0.5134495", "0.51334715", "0.5133326", "0.51260954", "0.5124962", "0.5117028", "0.511608", "0.50994325", "0.5099115", "0.5097099", "0.5094383", "0.508985", "0.5089332", "0.5087937" ]
0.876372
0
parseFunctions ... Reads a parsers.of lines and parses Function structs from it.
func (i Interface) parseFunctions(contentLines []string) []Function { var functions []Function for _, line := range contentLines { if isPureVirtualDefinition(line) { newFunction := NewFunction(line) functions = append(functions, *newFunction) } } return functions }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func ParseGo(code string) (functions map[uint64]*function.Function) {\n\n\tcodeLines := strings.Split(code, \"\\n\")\n\n\tfunctions = make(map[uint64]*function.Function)\n\n\tvar (\n\t\tstartLine uint64\n\t\tendLine uint64\n\t\tcomment string\n\t\tfunctionContent string\n\t\tstate = commentSearch\n\t)\n\n\tfor idx, line := range codeLines {\n\t\tlineIdx := uint64(idx + 1)\n\t\t// Searching for comment or \"func\"/\"type\" keywords\n\t\tstrings.ReplaceAll(line, \"\\r\", \"\")\n\n\t\t// We found a comment. Transition state to commentStart\n\t\tif strings.HasPrefix(line, \"//\") && state != commentStart {\n\t\t\tstate = commentStart\n\t\t\tstartLine = lineIdx\n\n\t\t} else if strings.Contains(line, \"func\") || strings.Contains(line, \"type\") {\n\n\t\t\t// we found the function keyword so we transition to funcStart state\n\t\t\tif state == commentSearch {\n\t\t\t\t// If we're coming from commentSearch, that means that we didn't have a comment so we set startLine to idx\n\t\t\t\tstartLine = lineIdx\n\n\t\t\t}\n\t\t\t// otherwise, we're coming from commentStart, that means that we had a comment so we leave startLine as it is\n\t\t\tstate = funcStart\n\t\t} else if strings.HasPrefix(line, \"}\") {\n\t\t\tstate = funcEnd\n\t\t\tendLine = lineIdx\n\n\t\t} else if !(strings.HasPrefix(line, \"//\")) && state != funcStart {\n\t\t\tstate = commentSearch\n\t\t\tcomment = \"\"\n\t\t\tstartLine = 0\n\t\t\tendLine = 0\n\n\t\t}\n\n\t\tswitch state {\n\t\tcase commentSearch:\n\t\t\tcontinue\n\t\tcase commentStart:\n\t\t\tcomment += fmt.Sprintf(\"%v\\n\", line)\n\t\tcase funcStart:\n\t\t\tfunctionContent += fmt.Sprintf(\"%v\\n\", line)\n\n\t\tcase funcEnd:\n\t\t\t// add the closing brace\n\t\t\tfunctionContent += fmt.Sprintf(\"%v\\n\", line)\n\t\t\tendLine = uint64(idx)\n\n\t\t\t// create a new function object with the information we got\n\t\t\tf := function.NewFunction(comment, functionContent, \"noNameYet\", 0, startLine, endLine)\n\n\t\t\t// add that to our map\n\t\t\tfunctions[uint64(f.FuncID)] = f\n\n\t\t\t// reset our state machine\n\t\t\tstartLine = 0\n\t\t\tcomment = \"\"\n\t\t\tfunctionContent = \"\"\n\t\t\tstate = commentSearch\n\n\t\tdefault:\n\t\t\tcontinue\n\t\t}\n\n\t}\n\n\treturn\n}", "func parseFunction(function *ast.FuncDecl, fileContext *fileContext) {\n\tfunctionName := addFunctionNode(function, fileContext)\n\n\tparseParameterTypes(function, functionName, fileContext)\n\n\tparseResultTypes(function, functionName, fileContext)\n}", "func ParseC(code string) (functions map[uint64]*function.Function) {\n\n\tcodeLines := strings.Split(code, \"\\n\")\n\tfunctions = make(map[uint64]*function.Function)\n\n\tvar (\n\t\tstartLine uint64\n\t\tendLine uint64\n\t\tcomment string\n\t\tfunctionContent string\n\t\tstate = commentSearch\n\t)\n\n\tfor idx, line := range codeLines {\n\t\tif strings.HasPrefix(line, \"//\") {\n\t\t\tstate = commentStart\n\t\t} else if cFuncMatch.MatchString(line) ||\n\t\t\t(strings.Contains(line, \"template\") && strings.Contains(line, \"typename\")) {\n\t\t\tif state == commentSearch {\n\t\t\t\t// If we're coming from commentSearch, that means that we didn't have a comment so we set startLine to idx\n\t\t\t\tstartLine = uint64(idx + 1)\n\t\t\t}\n\t\t\tstate = funcStart\n\t\t} else if strings.Contains(line, \"struct\") && strings.Contains(line, \"{\") {\n\t\t\tif state == commentSearch {\n\t\t\t\tstartLine = uint64(idx + 1)\n\t\t\t}\n\t\t\tstate = funcStart\n\t\t} else if strings.HasPrefix(line, \"}\") {\n\t\t\tstate = funcEnd\n\t\t} else if !(strings.HasPrefix(line, \"//\")) && state != funcStart {\n\t\t\tstate = commentSearch\n\t\t\tcomment = \"\"\n\t\t}\n\n\t\tswitch state {\n\t\tcase commentSearch:\n\t\t\tcontinue\n\t\tcase commentStart:\n\t\t\tstartLine = uint64(idx + 1)\n\t\t\tcomment += fmt.Sprintf(\"%s\\n\", line)\n\t\tcase funcStart:\n\t\t\tfunctionContent += fmt.Sprintf(\"%v\\n\", line)\n\t\tcase funcEnd:\n\t\t\tendLine = uint64(idx + 1)\n\t\t\t// add the closing brace\n\t\t\tfunctionContent += fmt.Sprintf(\"%v\\n\", line)\n\t\t\t// create a new function object with the information we got\n\t\t\tf := function.NewFunction(comment, functionContent, \"noNameYet\", 0, startLine, endLine)\n\t\t\t// add that to our map\n\t\t\tfunctions[uint64(f.FuncID)] = f\n\t\t\t// reset our state machine\n\t\t\tstartLine = 0\n\t\t\tcomment = \"\"\n\t\t\tfunctionContent = \"\"\n\t\t\tstate = commentSearch\n\n\t\tdefault:\n\t\t\tcontinue\n\t\t}\n\t}\n\treturn\n}", "func ParseFunction(s string) (prefix string, funcname string, f []string, r []string, err error) {\n\tdefer func() {\n\t\tif len(f) == 1 && f[0] == \"void\" {\n\t\t\tf = nil\n\t\t}\n\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(\"cannot parse function '%s' : %v\", s, err)\n\t\t} else {\n\t\t\tprefix = strings.TrimSpace(prefix)\n\t\t\tfuncname = strings.TrimSpace(funcname)\n\t\t\tfor i := range r {\n\t\t\t\tr[i] = strings.TrimSpace(r[i])\n\t\t\t}\n\t\t\tfor i := range f {\n\t\t\t\tf[i] = strings.TrimSpace(f[i])\n\t\t\t}\n\t\t}\n\t}()\n\n\t// remove specific attribute for function longjmp\n\ts = strings.Replace(s, \"__attribute__((noreturn))\", \"\", -1)\n\n\ts = strings.TrimSpace(s)\n\tif !IsFunction(s) {\n\t\terr = fmt.Errorf(\"is not function : %s\", s)\n\t\treturn\n\t}\n\tvar returns string\n\tvar arguments string\n\t{\n\t\t// Example of function types :\n\t\t// int (*)(int, float)\n\t\t// int (int, float)\n\t\t// int (*)(int (*)(int))\n\t\t// void (*(*)(int *, void *, const char *))(void)\n\t\tif s[len(s)-1] != ')' {\n\t\t\terr = fmt.Errorf(\"function type |%s| haven't last symbol ')'\", s)\n\t\t\treturn\n\t\t}\n\t\tcounter := 1\n\t\tvar pos int\n\t\tfor i := len(s) - 2; i >= 0; i-- {\n\t\t\tif i == 0 {\n\t\t\t\terr = fmt.Errorf(\"don't found '(' in type : %s\", s)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif s[i] == ')' {\n\t\t\t\tcounter++\n\t\t\t}\n\t\t\tif s[i] == '(' {\n\t\t\t\tcounter--\n\t\t\t}\n\t\t\tif counter == 0 {\n\t\t\t\tpos = i\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\t// s[:pos] = `speed_t cfgetospeed`\n\t\tif unicode.IsNumber(rune(s[pos-1])) || unicode.IsLetter(rune(s[pos-1])) {\n\t\t\tfor i := pos - 1; i >= 0; i-- {\n\t\t\t\tif s[i] == ' ' {\n\t\t\t\t\tfuncname = s[i+1 : pos]\n\t\t\t\t\treturns = strings.TrimSpace(s[:i])\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\treturns = strings.TrimSpace(s[:pos])\n\t\t}\n\t\targuments = strings.TrimSpace(s[pos:])\n\t}\n\tif arguments == \"\" {\n\t\terr = fmt.Errorf(\"cannot parse (right part is nil) : %v\", s)\n\t\treturn\n\t}\n\t// separate fields of arguments\n\t{\n\t\tpos := 1\n\t\tcounter := 0\n\t\tfor i := 1; i < len(arguments)-1; i++ {\n\t\t\tif arguments[i] == '(' {\n\t\t\t\tcounter++\n\t\t\t}\n\t\t\tif arguments[i] == ')' {\n\t\t\t\tcounter--\n\t\t\t}\n\t\t\tif counter == 0 && arguments[i] == ',' {\n\t\t\t\tf = append(f, strings.TrimSpace(arguments[pos:i]))\n\t\t\t\tpos = i + 1\n\t\t\t}\n\t\t}\n\t\tf = append(f, strings.TrimSpace(arguments[pos:len(arguments)-1]))\n\t}\n\n\t// returns\n\t// Example: __ssize_t\n\tif returns[len(returns)-1] != ')' {\n\t\tr = append(r, returns)\n\t\treturn\n\t}\n\n\t// Example: void ( *(*)(int *, void *, char *))\n\t// ------- --------------------------- return type\n\t// == prefix\n\t// ++++++++++++++++++++++++++++++ block\n\t// return type : void (*)(int *, void *, char *)\n\t// prefix : *\n\t// Find the block\n\tvar counter int\n\tvar position int\n\tfor i := len(returns) - 1; i >= 0; i-- {\n\t\tif returns[i] == ')' {\n\t\t\tcounter++\n\t\t}\n\t\tif returns[i] == '(' {\n\t\t\tcounter--\n\t\t}\n\t\tif counter == 0 {\n\t\t\tposition = i\n\t\t\tbreak\n\t\t}\n\t}\n\tblock := string([]byte(returns[position:]))\n\treturns = returns[:position]\n\n\t// Examples returns:\n\t// int (*)\n\t// char *(*)\n\t// block is : (*)\n\tif block == \"(*)\" {\n\t\tr = append(r, returns)\n\t\treturn\n\t}\n\n\tindex := strings.Index(block, \"(*)\")\n\tif index < 0 {\n\t\tif strings.Count(block, \"(\") == 1 {\n\t\t\t// Examples returns:\n\t\t\t// int ( * [2])\n\t\t\t// ------ return type\n\t\t\t// ====== prefix\n\t\t\t// ++++++++ block\n\t\t\tbBlock := []byte(block)\n\t\t\tfor i := 0; i < len(bBlock); i++ {\n\t\t\t\tswitch bBlock[i] {\n\t\t\t\tcase '(', ')':\n\t\t\t\t\tbBlock[i] = ' '\n\t\t\t\t}\n\t\t\t}\n\t\t\tbBlock = bytes.Replace(bBlock, []byte(\"*\"), []byte(\"\"), 1)\n\t\t\tprefix = string(bBlock)\n\t\t\tr = append(r, returns)\n\t\t\treturn\n\t\t}\n\t\t// void (*(int *, void *, const char *))\n\t\t// ++++++++++++++++++++++++++++++++ block\n\t\tblock = block[1 : len(block)-1]\n\t\tindex := strings.Index(block, \"(\")\n\t\tif index < 0 {\n\t\t\terr = fmt.Errorf(\"cannot found '(' in block\")\n\t\t\treturn\n\t\t}\n\t\treturns = returns + block[index:]\n\t\tprefix = block[:index]\n\t\tif strings.Contains(prefix, \"*\") {\n\t\t\tprefix = strings.Replace(prefix, \"*\", \"\", 1)\n\t\t} else {\n\t\t\terr = fmt.Errorf(\"undefined situation\")\n\t\t\treturn\n\t\t}\n\t\tr = append(r, returns)\n\t\treturn\n\t}\n\tif len(block)-1 > index+3 && block[index+3] == '(' {\n\t\t// Examples returns:\n\t\t// void ( *(*)(int *, void *, char *))\n\t\t// ++++++++++++++++++++++++++++++ block\n\t\t// ^^ check this\n\t\tblock = strings.Replace(block, \"(*)\", \"\", 1)\n\t\tblock = block[1 : len(block)-1]\n\t\tindex := strings.Index(block, \"(\")\n\t\tif index < 0 {\n\t\t\terr = fmt.Errorf(\"cannot found '(' in block\")\n\t\t\treturn\n\t\t}\n\n\t\treturns = returns + block[index:]\n\t\t// example of block[:index]\n\t\t// `*signal`\n\t\t// `* signal`\n\t\tif pr := strings.TrimSpace(block[:index]); unicode.IsLetter(rune(pr[len(pr)-1])) ||\n\t\t\tunicode.IsNumber(rune(pr[len(pr)-1])) {\n\t\t\tpr = strings.Replace(pr, \"*\", \" * \", -1)\n\t\t\tfor i := len(pr) - 1; i >= 0; i-- {\n\t\t\t\tif unicode.IsLetter(rune(pr[i])) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tif unicode.IsNumber(rune(pr[i])) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tprefix = pr[:i]\n\t\t\t\tfuncname = pr[i:]\n\t\t\t\tbreak\n\t\t\t}\n\t\t} else {\n\t\t\tprefix = block[:index]\n\t\t}\n\n\t\tr = append(r, returns)\n\t\treturn\n\t}\n\n\t// Examples returns:\n\t// int ( *( *(*)))\n\t// ----- return type\n\t// ========= prefix\n\t// +++++++++++ block\n\tbBlock := []byte(block)\n\tfor i := 0; i < len(bBlock); i++ {\n\t\tswitch bBlock[i] {\n\t\tcase '(', ')':\n\t\t\tbBlock[i] = ' '\n\t\t}\n\t}\n\tbBlock = bytes.Replace(bBlock, []byte(\"*\"), []byte(\"\"), 1)\n\tprefix = string(bBlock)\n\tr = append(r, returns)\n\n\treturn\n}", "func (p *PSParser) parseFunction() (*PSProgram, error) {\n\tc, _ := p.reader.ReadByte()\n\tif c != '{' {\n\t\treturn nil, errors.New(\"invalid function\")\n\t}\n\n\tfunction := NewPSProgram()\n\n\tfor {\n\t\tp.skipSpaces()\n\t\tbb, err := p.reader.Peek(2)\n\t\tif err != nil {\n\t\t\tif err == io.EOF {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\treturn nil, err\n\t\t}\n\n\t\tcommon.Log.Trace(\"Peek string: %s\", string(bb))\n\t\t// Determine type.\n\t\tif bb[0] == '}' {\n\t\t\tcommon.Log.Trace(\"EOF function\")\n\t\t\tp.reader.ReadByte()\n\t\t\tbreak\n\t\t} else if bb[0] == '{' {\n\t\t\tcommon.Log.Trace(\"Function!\")\n\t\t\tinlineF, err := p.parseFunction()\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tfunction.Append(inlineF)\n\t\t} else if pdfcore.IsDecimalDigit(bb[0]) || (bb[0] == '-' && pdfcore.IsDecimalDigit(bb[1])) {\n\t\t\tcommon.Log.Trace(\"->Number!\")\n\t\t\tnumber, err := p.parseNumber()\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tfunction.Append(number)\n\t\t} else {\n\t\t\tcommon.Log.Trace(\"->Operand or bool?\")\n\t\t\t// Let's peek farther to find out.\n\t\t\tbb, _ = p.reader.Peek(5)\n\t\t\tpeekStr := string(bb)\n\t\t\tcommon.Log.Trace(\"Peek str: %s\", peekStr)\n\n\t\t\tif (len(peekStr) > 4) && (peekStr[:5] == \"false\") {\n\t\t\t\tb, err := p.parseBool()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tfunction.Append(b)\n\t\t\t} else if (len(peekStr) > 3) && (peekStr[:4] == \"true\") {\n\t\t\t\tb, err := p.parseBool()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tfunction.Append(b)\n\t\t\t} else {\n\t\t\t\toperand, err := p.parseOperand()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tfunction.Append(operand)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn function, nil\n}", "func ParseFunctions(filePath string) *TemplateValues {\n\tfset := token.NewFileSet()\n\tf, err := parser.ParseFile(fset, filePath, nil, parser.ParseComments)\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tvar funcInfos []FunctionInfo\n\tpackageName := fmt.Sprint(f.Name)\n\tcontainsMux := false\n\n\tfor _, decl := range f.Decls {\n\t\tswitch t := decl.(type) {\n\t\tcase *ast.FuncDecl:\n\t\t\tresponseWriterParamExists := false\n\t\t\trequestParamExists := false\n\t\t\tfor _, param := range t.Type.Params.List {\n\t\t\t\tswitch t2 := param.Type.(type) {\n\t\t\t\tcase *ast.SelectorExpr:\n\t\t\t\t\tparamName := fmt.Sprint(t2.Sel.Name)\n\t\t\t\t\tif paramName == \"ResponseWriter\" {\n\t\t\t\t\t\tresponseWriterParamExists = true\n\t\t\t\t\t}\n\t\t\t\tcase *ast.StarExpr:\n\t\t\t\t\tparamName := fmt.Sprint(t2.X)\n\t\t\t\t\tif paramName == \"&{http Request}\" {\n\t\t\t\t\t\trequestParamExists = true\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif responseWriterParamExists && requestParamExists {\n\t\t\t\tmuxVars := getMuxVars(t)\n\t\t\t\tif len(muxVars) > 0 {\n\t\t\t\t\tcontainsMux = true\n\t\t\t\t}\n\t\t\t\tfuncInfo := FunctionInfo{\n\t\t\t\t\tName: fmt.Sprint(t.Name),\n\t\t\t\t\tMuxVars: muxVars,\n\t\t\t\t}\n\t\t\t\tfuncInfos = append(funcInfos, funcInfo)\n\t\t\t}\n\t\t}\n\t}\n\ttemplateValues := TemplateValues{\n\t\tFuncInfo: funcInfos,\n\t\tPackageName: packageName,\n\t\tContainsMux: containsMux,\n\t}\n\treturn &templateValues\n}", "func (p *parser) parseFunction(typ uint8) (expr *tree.FunctionExpr, names tree.FuncNameList) {\n\texpr = &tree.FunctionExpr{}\n\texpr.FuncToken = p.expectToken(token.FUNCTION)\n\tif typ > funcExpr {\n\t\tnames.Items = append(names.Items, p.expectToken(token.NAME))\n\t\tif typ > funcLocal {\n\t\t\tfor p.tok == token.DOT {\n\t\t\t\tnames.Seps = append(names.Seps, p.tokenNext())\n\t\t\t\tnames.Items = append(names.Items, p.expectToken(token.NAME))\n\t\t\t}\n\t\t\tif p.tok == token.COLON {\n\t\t\t\tnames.ColonToken = p.tokenNext()\n\t\t\t\tnames.MethodToken = p.expectToken(token.NAME)\n\t\t\t}\n\t\t}\n\t}\n\texpr.LParenToken = p.expectToken(token.LPAREN)\n\tif p.tok == token.NAME {\n\t\texpr.Params = &tree.NameList{Items: []tree.Token{p.expectToken(token.NAME)}}\n\t\tfor p.tok == token.COMMA {\n\t\t\tsepToken := p.tokenNext()\n\t\t\tif p.tok == token.VARARG {\n\t\t\t\texpr.VarArgSepToken = sepToken\n\t\t\t\texpr.VarArgToken = p.tokenNext()\n\t\t\t\tbreak\n\t\t\t}\n\t\t\texpr.Params.Seps = append(expr.Params.Seps, sepToken)\n\t\t\texpr.Params.Items = append(expr.Params.Items, p.expectToken(token.NAME))\n\t\t}\n\t} else if p.tok == token.VARARG {\n\t\texpr.VarArgToken = p.tokenNext()\n\t}\n\texpr.RParenToken = p.expectToken(token.RPAREN)\n\texpr.Body = p.parseBlockBody(token.END)\n\texpr.EndToken = p.expectToken(token.END)\n\treturn expr, names\n}", "func ParseFunctionDefinition(rd io.Reader) (*FunctionDefinition, error) {\n\ts := &scanner.Scanner{Mode: scanner.GoTokens}\n\ts.Init(rd)\n\tstate := StateInit\n\tvar f FunctionDefinition\n\ttokens := make([]string, 0)\n\tfor {\n\t\tr := s.Scan()\n\t\tif r == scanner.EOF {\n\t\t\tbreak\n\t\t}\n\t\tswitch state {\n\t\tcase StateInit:\n\t\t\tswitch r {\n\t\t\tcase '(':\n\t\t\t\tif len(tokens) < 2 {\n\t\t\t\t\treturn nil, fmt.Errorf(\"function definition needs at least a name and a type\")\n\t\t\t\t}\n\t\t\t\tf.Name = tokens[0]\n\t\t\t\tvar typ string\n\t\t\t\tfor _, t := range tokens[1:] {\n\t\t\t\t\tswitch t {\n\t\t\t\t\t// ignore WINAPI calling convention\n\t\t\t\t\tcase \"WINAPI\":\n\t\t\t\t\tdefault:\n\t\t\t\t\t\ttyp = t\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tf.Type = translate(typ)\n\t\t\t\tif f.Type == \"\" {\n\t\t\t\t\treturn nil, fmt.Errorf(\"did not find translation type for %s\", typ)\n\t\t\t\t}\n\t\t\t\ttokens = tokens[0:0]\n\t\t\t\tstate = StateParam\n\t\t\tcase scanner.Ident:\n\t\t\t\ttokens = append([]string{s.TokenText()}, tokens...)\n\t\t\tdefault:\n\t\t\t\treturn nil, fmt.Errorf(\"parse error: got %s\", scanner.TokenString(r))\n\t\t\t}\n\t\tcase StateParam:\n\t\t\tswitch r {\n\t\t\tcase scanner.Ident:\n\t\t\t\ttokens = append([]string{s.TokenText()}, tokens...)\n\t\t\tcase ',', ')':\n\t\t\t\tif len(tokens) < 2 {\n\t\t\t\t\treturn nil, fmt.Errorf(\"function parameter needs at least a name and a type\")\n\t\t\t\t}\n\n\t\t\t\tp := FunctionParameterDefinition{Name: tokens[0]}\n\n\t\t\t\tvar typ string\n\t\t\t\tfor _, t := range tokens[1:] {\n\t\t\t\t\tswitch t {\n\t\t\t\t\tcase \"_In_\":\n\t\t\t\t\t\tp.Direction = DirectionIn\n\t\t\t\t\tcase \"_In_opt_\":\n\t\t\t\t\t\tp.Direction = DirectionInOpt\n\t\t\t\t\tcase \"_Out_\":\n\t\t\t\t\t\tp.Direction = DirectionOut\n\t\t\t\t\tcase \"_Out_opt_\":\n\t\t\t\t\t\tp.Direction = DirectionOutOpt\n\t\t\t\t\tcase \"_Inout_\":\n\t\t\t\t\t\tp.Direction = DirectionInOut\n\t\t\t\t\tcase \"_Inout_opt_\":\n\t\t\t\t\t\tp.Direction = DirectionInOutOpt\n\t\t\t\t\tcase \"_Reserved_\":\n\t\t\t\t\t\tp.Direction = DirectionReserved\n\t\t\t\t\tdefault:\n\t\t\t\t\t\ttyp = t\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tp.Type = translate(typ)\n\t\t\t\tif p.Type == \"\" {\n\t\t\t\t\treturn nil, fmt.Errorf(\"did not find translation type for %s\", typ)\n\t\t\t\t}\n\t\t\t\tf.Params = append(f.Params, p)\n\t\t\t\ttokens = tokens[0:0]\n\t\t\t\tif r == ')' {\n\t\t\t\t\tstate = StateExit\n\t\t\t\t}\n\t\t\t}\n\t\tcase StateExit:\n\t\t\tswitch r {\n\t\t\tcase ';':\n\t\t\t\tstate = StateInit\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\tif state != StateInit {\n\t\treturn nil, fmt.Errorf(\"parse error: wrong state %d\", state)\n\t}\n\treturn &f, nil\n}", "func parseFunction(node *node32) (*FunctionDef, error) {\n\tvar err error\n\tfunction := &FunctionDef{}\n\n\tfunction.SetToken(&node.token32)\n\n\tfunction.returnType, err = parseType(nextNode(node, ruleTYPE).up)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfunction.ident = nextNode(node, ruleIDENT).match\n\n\tparamListNode := nextNode(node, rulePARAMLIST)\n\t// argument list may be missing with zero arguments\n\tif paramListNode != nil {\n\t\tfor pnode := range nodeRange(paramListNode.up) {\n\t\t\tif pnode.pegRule == rulePARAM {\n\t\t\t\tvar param *FunctionParam\n\t\t\t\tparam, err = parseParam(pnode.up)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tfunction.params = append(function.params, param)\n\t\t\t}\n\t\t}\n\t}\n\n\tfunction.body, err = parseStatement(nextNode(node, ruleSTAT).up)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn function, nil\n}", "func (function *function) parse() (err error) {\n\tsignatureFinder := regexp.MustCompile(`(?is)CREATE(?:\\s+OR\\s+REPLACE)?\\s+FUNCTION\\s+(\\S+?)\\((.*?)\\)`)\n\tsubMatches := signatureFinder.FindStringSubmatch(function.definition)\n\n\tif len(subMatches) < 3 {\n\t\treturn fmt.Errorf(\"Can't find a function in %s\", function.path)\n\t}\n\n\tfunction.name = subMatches[1]\n\n\tif function.parseSignature {\n\t\tfunction.signature = subMatches[2]\n\t} else {\n\t\tfunction.signature, function.previousExists, err = function.previousSignature()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\terr = function.removeDefaultFromSignature()\n\tif err != nil {\n\t\treturn\n\t}\n\n\treturn\n}", "func (function *Function) Parse() (err error) {\n\tsignatureFinder := regexp.MustCompile(`(?is)CREATE(?:\\s+OR\\s+REPLACE)?\\s+FUNCTION\\s+(\\S+?)\\((.*?)\\)`)\n\tsubMatches := signatureFinder.FindStringSubmatch(function.Definition)\n\n\tif len(subMatches) < 3 {\n\t\treturn fmt.Errorf(\"Can't find a function in %s\", function.Path)\n\t}\n\n\tfunction.Name = subMatches[1]\n\n\tif function.ParseSignature {\n\t\tfunction.Signature = subMatches[2]\n\t} else {\n\t\tfunction.Signature, function.PreviousExists, err = function.previousSignature()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\terr = function.removeDefaultFromSignature()\n\tif err != nil {\n\t\treturn\n\t}\n\n\treturn\n}", "func (v *Function) Decode(sr stream.Reader) error {\n\n\tnameIsSet := false\n\tthriftNameIsSet := false\n\targumentsIsSet := false\n\n\tif err := sr.ReadStructBegin(); err != nil {\n\t\treturn err\n\t}\n\n\tfh, ok, err := sr.ReadFieldBegin()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfor ok {\n\t\tswitch {\n\t\tcase fh.ID == 1 && fh.Type == wire.TBinary:\n\t\t\tv.Name, err = sr.ReadString()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tnameIsSet = true\n\t\tcase fh.ID == 2 && fh.Type == wire.TBinary:\n\t\t\tv.ThriftName, err = sr.ReadString()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tthriftNameIsSet = true\n\t\tcase fh.ID == 3 && fh.Type == wire.TList:\n\t\t\tv.Arguments, err = _List_Argument_Decode(sr)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\targumentsIsSet = true\n\t\tcase fh.ID == 4 && fh.Type == wire.TStruct:\n\t\t\tv.ReturnType, err = _Type_Decode(sr)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\tcase fh.ID == 5 && fh.Type == wire.TList:\n\t\t\tv.Exceptions, err = _List_Argument_Decode(sr)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\tcase fh.ID == 6 && fh.Type == wire.TBool:\n\t\t\tvar x bool\n\t\t\tx, err = sr.ReadBool()\n\t\t\tv.OneWay = &x\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\tcase fh.ID == 7 && fh.Type == wire.TMap:\n\t\t\tv.Annotations, err = _Map_String_String_Decode(sr)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\tdefault:\n\t\t\tif err := sr.Skip(fh.Type); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t\tif err := sr.ReadFieldEnd(); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif fh, ok, err = sr.ReadFieldBegin(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif err := sr.ReadStructEnd(); err != nil {\n\t\treturn err\n\t}\n\n\tif !nameIsSet {\n\t\treturn errors.New(\"field Name of Function is required\")\n\t}\n\n\tif !thriftNameIsSet {\n\t\treturn errors.New(\"field ThriftName of Function is required\")\n\t}\n\n\tif !argumentsIsSet {\n\t\treturn errors.New(\"field Arguments of Function is required\")\n\t}\n\n\treturn nil\n}", "func Parse(tokens *list.List, funcDefs map[string]int) (ParseTreeRoot, error) {\r\n\r\n\ttoken := tokens.Front()\r\n\ttree := ParseTreeRoot{make([]ParseTree, 0)}\r\n\r\n\tfor token != nil {\r\n\t\tif tokenID(token) != TokenIdentifier {\r\n\t\t\treturn tree, fmt.Errorf(\"\\\"unit\\\", \\\"assembly\\\", \\\"enum\\\", \\\"summarize\\\", or \\\"solve\\\" expected but \\\"%s\\\" given at position %d\", tokenContent(token), tokenPos(token))\r\n\t\t}\r\n\r\n\t\tswitch tokenContent(token) {\r\n\t\tcase \"unit\":\r\n\t\t\t_token, unit, err := parseUnit(token.Next(), tree, funcDefs)\r\n\t\t\ttoken = _token\r\n\t\t\tif err != nil {\r\n\t\t\t\treturn tree, err\r\n\t\t\t}\r\n\t\t\ttree.AddUnit(unit)\r\n\t\t\tbreak\r\n\t\tcase \"enum\":\r\n\t\t\t_token, enum, err := parseEnum(token.Next(), tree)\r\n\t\t\ttoken = _token\r\n\t\t\tif err != nil {\r\n\t\t\t\treturn tree, err\r\n\t\t\t}\r\n\t\t\ttree.AddEnum(enum)\r\n\t\t\tbreak\r\n\t\tcase \"assembly\":\r\n\t\t\t_token, assembly, err := parseAssembly(token.Next(), tree)\r\n\t\t\ttoken = _token\r\n\t\t\tif err != nil {\r\n\t\t\t\treturn tree, err\r\n\t\t\t}\r\n\t\t\ttree.AddAssembly(assembly)\r\n\t\t\tbreak\r\n\t\tcase \"summarize\":\r\n\t\t\t_token, summarize, err := parseSummarize(token.Next(), tree)\r\n\t\t\ttoken = _token\r\n\t\t\tif err != nil {\r\n\t\t\t\treturn tree, err\r\n\t\t\t}\r\n\t\t\ttree.AddSummarize(summarize)\r\n\t\t\tbreak\r\n\t\tcase \"solve\":\r\n\t\t\t_token, solve, err := parseSolve(token.Next(), tree)\r\n\t\t\ttoken = _token\r\n\t\t\tif err != nil {\r\n\t\t\t\treturn tree, err\r\n\t\t\t}\r\n\t\t\ttree.AddSolve(solve)\r\n\t\t\tbreak\r\n\t\t}\r\n\t\ttoken = token.Next()\r\n\t}\r\n\r\n\treturn tree, nil\r\n\r\n}", "func Parse() {\n\tok := true\n\tfor _, f := range funcs {\n\t\tok = f() && ok\n\t}\n\tif !ok {\n\t\tos.Exit(1)\n\t}\n}", "func loadFunctions() (err error) {\n\tsuccessfulCount := len(conf.functionFiles)\n\terrors := make([]string, 0)\n\tbypass := make(map[string]bool)\n\n\tfiles, err := resolveDependencies(conf.functionFiles, conf.sqlDirPath+\"functions\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfunctions := make([]*function, 0)\n\tfor i := len(files) - 1; i >= 0; i-- {\n\t\tfile := files[i]\n\t\tf := function{}\n\t\tf.path = file\n\t\tfunctions = append(functions, &f)\n\n\t\terr = downPass(&f, f.path)\n\t\tif err != nil {\n\t\t\tsuccessfulCount--\n\t\t\terrors = append(errors, fmt.Sprintf(\"%v\\n\", err))\n\t\t\tbypass[f.path] = true\n\t\t}\n\t}\n\n\tfor i := len(functions) - 1; i >= 0; i-- {\n\t\tf := functions[i]\n\t\tif _, ignore := bypass[f.path]; !ignore {\n\t\t\terr = upPass(f, f.path)\n\t\t\tif err != nil {\n\t\t\t\tsuccessfulCount--\n\t\t\t\terrors = append(errors, fmt.Sprintf(\"%v\\n\", err))\n\t\t\t}\n\t\t}\n\t}\n\n\treport(\"functions\", successfulCount, len(conf.functionFiles), errors)\n\n\treturn\n}", "func parse(text string, program *il.Program) error {\n\tp := &parser{\n\t\tscanner: newScanner(text),\n\t\terror: nil,\n\t\tprogram: program,\n\t}\n\n\tfor !p.scanner.end() {\n\t\tif !p.scanner.next() {\n\t\t\tbreak\n\t\t}\n\t\tif p.scanner.token == tkError {\n\t\t\tp.fail(\"Parse error.\")\n\t\t\tbreak\n\t\t}\n\t\tif !p.parseFunctionDef() {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif p.failed() {\n\t\treturn p.error\n\t}\n\n\treturn nil\n}", "func (p Program) lineToFunction(line string) Function {\n\tif len(line) < 3 {\n\t\treturn Function{}\n\t}\n\n\t// divide the function and the parameter\n\tdivided := strings.Split(line, \"(\")\n\n\t// name of the function at the position 0\n\tfunctionName := divided[0]\n\n\t// handle and split parameter\n\tparams := strings.Split(divided[1], \",\")\n\n\tparameter := []interface{}{}\n\tfor _, param := range params {\n\n\t\t// check length of the parameter\n\t\tif len(param) < 1 {\n\t\t\tfmt.Println(\"drawlab: null parameter found at the function\", functionName)\n\t\t\tos.Exit(0)\n\t\t}\n\n\t\t// remove all whitespace and closed bracket\n\n\t\tparam = strings.ReplaceAll(param, \")\", \"\")\n\t\tparam = strings.ReplaceAll(param, \"\\n\", \"\")\n\n\t\t// check if first byte is a space\n\t\ttempParam := param\n\t\tif param[0] == ' ' {\n\t\t\ttempParam = param[1:]\n\t\t}\n\n\t\t// check if number\n\t\tn, err := strconv.Atoi(tempParam)\n\t\tif err != nil {\n\n\t\t\t// check if it's text for the text function\n\t\t\tif tempParam[0] == '\"' {\n\t\t\t\t// remove quotes\n\t\t\t\tparameter = append(parameter, tempParam[1:len(tempParam)-1])\n\n\t\t\t\t// check if it's variable\n\t\t\t} else if tempParam[0] == '#' {\n\n\t\t\t\t// remove '#'\n\t\t\t\tval := p.getVariable(tempParam[1:])\n\t\t\t\tparameter = append(parameter, val)\n\t\t\t}\n\t\t} else {\n\t\t\tparameter = append(parameter, n)\n\t\t}\n\t}\n\n\treturn Function{\n\t\tName: functionName,\n\t\tParameters: parameter,\n\t}\n}", "func ParseAndCallUpdateFunc(ctx context.Context, filePath string, updateFunc createOrUpdateFunc) error {\n\tjsonFile, err := os.Open(filePath)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"could not open file\")\n\t}\n\tdefer func() {\n\t\terr := jsonFile.Close()\n\t\tif err != nil {\n\t\t\tlog.Println(errors.Wrap(err, \"could not close the file\"))\n\t\t}\n\t}()\n\n\tdecoder := json.NewDecoder(jsonFile)\n\tt, err := decoder.Token()\n\tif delim, ok := t.(json.Delim); !ok || delim != '{' {\n\t\treturn errors.New(fmt.Sprintf(\"expected first token to be \\\"{\\\", got %s \", delim))\n\t}\n\tfor decoder.More() {\n\t\ttoken, err := decoder.Token()\n\t\ttokenStr, ok := token.(string)\n\t\tif !ok && tokenStr != strings.ToUpper(tokenStr) {\n\t\t\treturn errors.New(\"token must be an uppercase string\")\n\t\t}\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to get json token\")\n\t\t}\n\n\t\tfor decoder.More() {\n\t\t\tport := &pb.Port{}\n\t\t\terr = decoder.Decode(port)\n\t\t\tif err != nil {\n\t\t\t\tif err.Error() == \"not at beginning of value\" {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\treturn errors.Wrap(err, \"could not decode data\")\n\t\t\t}\n\t\t\t_, err := updateFunc(ctx, port)\n\t\t\tif err != nil {\n\t\t\t\treturn errors.Wrap(err, \"failed to call updateFunc\")\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func Parse(someTokens []lexer.Token) []Node {\n\tnodes := []Node{}\n\ttokens = someTokens\n\tindex = 0\n\n\tfor {\n\t\ttoken := tokens[index]\n\n\t\tswitch {\n\t\tcase token.Type == lexer.EOF:\n\t\t\treturn nodes\n\t\tcase token.Type == lexer.LineBreak:\n\t\t\tindex++\n\t\t\tcontinue\n\t\tcase token.Type == lexer.KeywordFn:\n\t\t\tnodes = append(\n\t\t\t\tnodes,\n\t\t\t\tparseFunction())\n\t\tcase token.Type == lexer.KeywordStruct:\n\t\t\tnodes = append(\n\t\t\t\tnodes,\n\t\t\t\tparseStruct())\n\t\tdefault:\n\t\t\tmsg := fmt.Sprintf(\"Don't know how to parse: %v\", token)\n\t\t\tpanic(msg)\n\t\t}\n\t}\n}", "func (p *Parser) parseFunctionParams() []*ast.Identifier {\n\tvar ret []*ast.Identifier\n\n\tif p.peek.Is(token.RPAREN) {\n\t\tp.next()\n\t\treturn ret\n\t}\n\n\tp.next()\n\tret = append(ret, &ast.Identifier{Token: p.cur, Value: p.cur.Lit})\n\n\tfor p.peek.Is(token.COMMA) {\n\t\tp.next()\n\t\tp.next()\n\t\tret = append(ret, &ast.Identifier{Token: p.cur, Value: p.cur.Lit})\n\t}\n\n\tif !p.expectPeek(token.RPAREN) {\n\t\treturn nil\n\t}\n\treturn ret\n}", "func findFunctions(file *ast.File, match matchFunc, pkgPath string) []*Func {\n\tvar funcs []*Func\n\n\t// required because it's possible that the struct and the method are in different files\n\tpkgDecls := getPackageDecls(pkgPath)\n\n\tfor _, decl := range file.Decls {\n\t\tfd, ok := decl.(*ast.FuncDecl)\n\t\tif !ok || !match(fd) {\n\t\t\tcontinue\n\t\t}\n\n\t\t// will handle non-method / functions without receiver\n\t\tif fd.Recv == nil {\n\t\t\tfuncs = append(funcs, NewFunc(fd, nil, file, pkgDecls, pkgPath))\n\t\t\tcontinue\n\t\t}\n\n\t\tstExp, ok := fd.Recv.List[0].Type.(*ast.StarExpr)\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\n\t\tid, ok := stExp.X.(*ast.Ident)\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\n\t\tstrType := getStructType(pkgDecls, id.Name)\n\t\tif fd.Recv.List[0].Type.(*ast.StarExpr).X.(*ast.Ident).Obj == nil {\n\t\t\tfd.Recv.List[0].Type.(*ast.StarExpr).X.(*ast.Ident).Obj = generateTypeObject(id, strType)\n\t\t}\n\n\t\tfuncs = append(funcs, NewFunc(fd, strType, file, pkgDecls, pkgPath))\n\t}\n\n\treturn funcs\n}", "func (p *Parser) AddBuiltInFuncs() {\n\tp.defs.Funcs = append(p.defs.Funcs,\n\t\t&oop.Fn{\n\t\t\tName: \"print\",\n\t\t\tDefaultParamCount: 2,\n\t\t\tSrc: functions.Print,\n\t\t\tParams: []oop.Param{{\n\t\t\t\tName: \"value\",\n\t\t\t\tParams: true,\n\t\t\t\tDefaultVal: oop.Val{Data: \"\", Type: oop.String},\n\t\t\t}},\n\t\t}, &oop.Fn{\n\t\t\tName: \"println\",\n\t\t\tSrc: functions.Println,\n\t\t\tDefaultParamCount: 2,\n\t\t\tParams: []oop.Param{{\n\t\t\t\tName: \"value\",\n\t\t\t\tParams: true,\n\t\t\t\tDefaultVal: oop.Val{Data: oop.NewListModel(oop.Val{Data: \"\", Type: oop.String}), Type: oop.List},\n\t\t\t}},\n\t\t}, &oop.Fn{\n\t\t\tName: \"input\",\n\t\t\tSrc: functions.Input,\n\t\t\tDefaultParamCount: 1,\n\t\t\tParams: []oop.Param{{\n\t\t\t\tName: \"message\",\n\t\t\t\tDefaultVal: oop.Val{Data: \"\", Type: oop.String},\n\t\t\t}},\n\t\t}, &oop.Fn{\n\t\t\tName: \"exit\",\n\t\t\tDefaultParamCount: 1,\n\t\t\tSrc: functions.Exit,\n\t\t\tParams: []oop.Param{{\n\t\t\t\tName: \"code\",\n\t\t\t\tDefaultVal: oop.Val{Data: 0., Type: oop.Int},\n\t\t\t}},\n\t\t}, &oop.Fn{\n\t\t\tName: \"len\",\n\t\t\tSrc: functions.Len,\n\t\t\tDefaultParamCount: 0,\n\t\t\tParams: []oop.Param{{Name: \"object\"}},\n\t\t}, &oop.Fn{\n\t\t\tName: \"range\",\n\t\t\tDefaultParamCount: 1,\n\t\t\tSrc: functions.Range,\n\t\t\tParams: []oop.Param{\n\t\t\t\t{Name: \"start\"},\n\t\t\t\t{Name: \"to\"},\n\t\t\t\t{\n\t\t\t\t\tName: \"step\",\n\t\t\t\t\tDefaultVal: oop.Val{Data: 1., Type: oop.Int},\n\t\t\t\t},\n\t\t\t},\n\t\t}, &oop.Fn{\n\t\t\tName: \"calloc\",\n\t\t\tSrc: functions.Calloc,\n\t\t\tDefaultParamCount: 0,\n\t\t\tParams: []oop.Param{{Name: \"size\"}},\n\t\t}, &oop.Fn{\n\t\t\tName: \"realloc\",\n\t\t\tDefaultParamCount: 0,\n\t\t\tSrc: functions.Realloc,\n\t\t\tParams: []oop.Param{{Name: \"base\"}, {Name: \"size\"}},\n\t\t}, &oop.Fn{\n\t\t\tName: \"string\",\n\t\t\tSrc: functions.String,\n\t\t\tDefaultParamCount: 1,\n\t\t\tParams: []oop.Param{\n\t\t\t\t{Name: \"object\"},\n\t\t\t\t{\n\t\t\t\t\tName: \"type\",\n\t\t\t\t\tDefaultVal: oop.Val{Data: \"parse\", Type: oop.String},\n\t\t\t\t},\n\t\t\t},\n\t\t}, &oop.Fn{\n\t\t\tName: \"int\",\n\t\t\tSrc: functions.Int,\n\t\t\tDefaultParamCount: 1,\n\t\t\tParams: []oop.Param{\n\t\t\t\t{Name: \"object\"},\n\t\t\t\t{\n\t\t\t\t\tName: \"type\",\n\t\t\t\t\tDefaultVal: oop.Val{Data: \"parse\", Type: oop.String},\n\t\t\t\t},\n\t\t\t},\n\t\t}, &oop.Fn{\n\t\t\tName: \"float\",\n\t\t\tSrc: functions.Float,\n\t\t\tDefaultParamCount: 0,\n\t\t\tParams: []oop.Param{{Name: \"object\"}},\n\t\t}, &oop.Fn{\n\t\t\tName: \"panic\",\n\t\t\tSrc: functions.Panic,\n\t\t\tDefaultParamCount: 0,\n\t\t\tParams: []oop.Param{{Name: \"msg\"}},\n\t\t}, &oop.Fn{\n\t\t\tName: \"type\",\n\t\t\tSrc: functions.Type,\n\t\t\tDefaultParamCount: 0,\n\t\t\tParams: []oop.Param{{Name: \"obj\"}},\n\t\t},\n\t)\n}", "func (s *BaseGraffleParserListener) EnterFunctions_block(ctx *Functions_blockContext) {}", "func listFunction() ([]*Function, error) {\n\tvar err error\n\n\tc := http.Client{}\n\n\trequest, _ := http.NewRequest(http.MethodGet, gateway_url+\"function/list-flow-functions\", nil)\n\tresponse, err := c.Do(request)\n\n\tif err == nil {\n\t\tdefer response.Body.Close()\n\n\t\tif response.Body != nil {\n\t\t\tbodyBytes, bErr := ioutil.ReadAll(response.Body)\n\t\t\tif bErr != nil {\n\t\t\t\tlog.Fatal(bErr)\n\t\t\t}\n\n\t\t\tfunctions := []*Function{}\n\t\t\tmErr := json.Unmarshal(bodyBytes, &functions)\n\t\t\tif mErr != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"failed to get function list, %v\", mErr)\n\t\t\t}\n\n\t\t\treturn functions, nil\n\t\t}\n\t\treturn make([]*Function, 0), nil\n\t}\n\n\treturn nil, fmt.Errorf(\"failed to get function list, %v\", err)\n}", "func (p *parser) parseFuncArgs() (args tree.Args) {\n\tswitch p.tok {\n\tcase token.LPAREN:\n\t\ta := &tree.ListArgs{}\n\t\ta.LParenToken = p.tokenNext()\n\t\tfor p.tok != token.RPAREN {\n\t\t\tif a.Values == nil {\n\t\t\t\ta.Values = &tree.ExprList{}\n\t\t\t}\n\t\t\ta.Values.Items = append(a.Values.Items, p.parseExpr())\n\t\t\tif p.tok == token.COMMA {\n\t\t\t\ta.Values.Seps = append(a.Values.Seps, p.tokenNext())\n\t\t\t} else {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\ta.RParenToken = p.expectToken(token.RPAREN)\n\t\targs = a\n\tcase token.LBRACE:\n\t\ta := &tree.TableArg{}\n\t\ta.Value = *p.parseTableCtor()\n\t\targs = a\n\tcase token.STRING, token.LONGSTRING:\n\t\ta := &tree.StringArg{}\n\t\ta.Value = *p.parseString()\n\t\targs = a\n\tdefault:\n\t\tp.error(p.off, \"function arguments expected\")\n\t}\n\treturn args\n}", "func ParseLinksFunc(host string) LinkParser {\n\tvalidLink := regexp.MustCompile(`(http|ftp|https)://(` + host + `)([\\w.,@?^=%&:/~+#-]*[\\w@?^=%&/~+#-])?`)\n\treturn func(host, body string) []string {\n\t\treturn validLink.FindAllString(body, -1)\n\t\t// TODO: handle relative links and make more sophisticated\n\t}\n}", "func (s *BashScript) Functions() ([]*Function, error) {\n\tfuncs := make([]*Function, 0)\n\n\tfnames, err := s.FunctionNames()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tannotations, err := s.FunctionAnnotations()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// TODO: Make this part shared for all shell types\n\tfor _, fname := range fnames {\n\t\ts.Log.WithFields(logrus.Fields{\n\t\t\t\"func\": fname,\n\t\t}).Debugf(\"building function\")\n\n\t\tf := &Function{\n\t\t\tName: fname,\n\t\t\tOptions: cmd.NewOptionsSet(fname),\n\t\t}\n\n\t\toptions := make(map[string]*cmd.Option, 0)\n\n\t\tfor _, a := range annotations {\n\t\t\tcmdName := a.NamespaceValues[\"cmd\"]\n\t\t\tif cmdName == \"\" || cmdName != f.Name {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\ts.Log.WithFields(logrus.Fields{\n\t\t\t\t\"func\": a.NamespaceValues[\"cmd\"],\n\t\t\t\t\"namespace\": a.Namespace,\n\t\t\t\t\"key\": a.Key,\n\t\t\t}).Debugf(\"handling annotation\")\n\n\t\t\tswitch a.Namespace {\n\t\t\tcase config.CommandAnnotationCmdOptionNamespace:\n\t\t\t\tname := a.NamespaceValues[\"option\"]\n\t\t\t\tif name == \"\" {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tif options[name] == nil {\n\t\t\t\t\toptions[name] = &cmd.Option{Type: cmd.StringOption, Name: name}\n\t\t\t\t}\n\t\t\t\tswitch a.Key {\n\t\t\t\tcase \"type\":\n\t\t\t\t\toptions[name].Type = cmd.StringToOptionType(a.Value)\n\t\t\t\tcase \"short\":\n\t\t\t\t\toptions[name].Short = a.Value\n\t\t\t\tcase \"envName\":\n\t\t\t\t\toptions[name].EnvName = a.Value\n\t\t\t\tcase \"default\":\n\t\t\t\t\toptions[name].Default = a.Value\n\t\t\t\tcase \"required\":\n\t\t\t\t\trequired, err := strconv.ParseBool(a.Value)\n\t\t\t\t\tif err == nil {\n\t\t\t\t\t\toptions[name].Required = required\n\t\t\t\t\t}\n\t\t\t\tcase \"description\":\n\t\t\t\t\toptions[name].Description = a.Value\n\t\t\t\tcase \"hidden\":\n\t\t\t\t\thidden, err := strconv.ParseBool(a.Value)\n\t\t\t\t\tif err == nil {\n\t\t\t\t\t\toptions[name].Hidden = hidden\n\t\t\t\t\t}\n\t\t\t\tcase \"values\":\n\t\t\t\t\tvalues := make([]cmd.OptionValue, 0)\n\t\t\t\t\tif err := json.Unmarshal([]byte(a.Value), &values); err != nil {\n\t\t\t\t\t\ts.Log.WithFields(logrus.Fields{\n\t\t\t\t\t\t\t\"option\": name,\n\t\t\t\t\t\t\t\"json\": a.Value,\n\t\t\t\t\t\t}).Warn(\"error parsing json values, \", err.Error())\n\t\t\t\t\t}\n\t\t\t\t\toptions[name].Values = values\n\t\t\t\t}\n\t\t\tcase config.CommandAnnotationCmdNamespace:\n\t\t\t\tswitch a.Key {\n\t\t\t\tcase \"description\":\n\t\t\t\t\tf.Description = a.Value\n\t\t\t\tcase \"help\":\n\t\t\t\t\tf.Help = a.Value\n\t\t\t\tcase \"hidden\":\n\t\t\t\t\thidden, err := strconv.ParseBool(a.Value)\n\t\t\t\t\tif err == nil {\n\t\t\t\t\t\tf.Hidden = hidden\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tfor _, v := range options {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\ts.Log.WithFields(logrus.Fields{\n\t\t\t\t\t\"option\": v.Name,\n\t\t\t\t\t\"type\": v.Type,\n\t\t\t\t}).Warn(err.Error())\n\t\t\t} else {\n\t\t\t\tif err := f.Options.Add(v); err != nil {\n\t\t\t\t\ts.Log.WithFields(logrus.Fields{\n\t\t\t\t\t\t\"option\": v.Name,\n\t\t\t\t\t\t\"type\": v.Type,\n\t\t\t\t\t}).Warn(err.Error())\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tfuncs = append(funcs, f)\n\t}\n\n\treturn funcs, nil\n}", "func (p *parser) parseFuncDef(dotted bool, name string) (ast.Statement, bool) {\n\tif succ, toks := p.accept(token.LEFT_PAREN); !succ {\n\t\treturn p.errorStmt(true, \"Invalid token in function definition: %v\", toks[len(toks)-1])\n\t}\n\tf := &ast.FunctionDef{Static: !dotted, Name: name}\n\tfor p.peek().Type != token.RIGHT_PAREN {\n\t\tsucc, toks := p.accept(token.IDENTIFIER)\n\t\tif !succ {\n\t\t\treturn p.errorStmt(true, \"Invalid token in function definition: %v\", p.peek())\n\t\t}\n\t\tname := toks[0].Val\n\t\tvar typ ast.Statement\n\t\tif p.peek().Type.IsType() {\n\t\t\ttyp, _ = p.parseType()\n\t\t}\n\t\tf.AddParam(name, typ)\n\t\tswitch p.peek().Type {\n\t\tcase token.COMMA:\n\t\t\tp.next() // eat ,\n\t\tcase token.RIGHT_PAREN:\n\t\tdefault:\n\t\t\treturn p.errorStmt(true, \"Invalid token in function definition: %v\", p.peek())\n\t\t}\n\t}\n\tif succ, toks := p.accept(token.RIGHT_PAREN); !succ {\n\t\treturn p.errorStmt(true, \"Invalid token in function definition: %v\", toks[len(toks)-1])\n\t}\n\n\t// return value(s)\n\trvs, _ := p.parseReturnValues()\n\tfor _, rv := range rvs {\n\t\tf.AddReturn(rv)\n\t}\n\n\tif succ, toks := p.accept(token.EOL, token.INDENT); !succ {\n\t\treturn p.errorStmt(true, \"Invalid token in function definition: %v\", toks[len(toks)-1])\n\t}\n\n\tfor p.peek().Type != token.DEDENT && p.peek().Type != token.EOF {\n\t\tst, _ := p.parseFuncStmt()\n\t\tf.AddStmt(st)\n\t}\n\n\tif succ, toks := p.accept(token.DEDENT, token.EOL); !succ {\n\t\tst, _ := p.errorStmt(true, \"Invalid token in function definition: %v\", toks[len(toks)-1])\n\t\tf.AddStmt(st)\n\t}\n\n\t// If it's an anonymous function and we're not in the middle of a block\n\t// (followed by either a ',' or ')' ) then put the EOL back.\n\tif name == \"\" && !p.peek().Type.IsInBlock() {\n\t\tp.backup(1)\n\t}\n\n\treturn f, false\n}", "func ofParse(a string) *Network {\n\tlines := strings.Split(a, \"\\n\")\n\n\tlinks := make(map[int]map[int]float64)\n\tlayers := make([][]int, 1)\n\tlayers[0] = make([]int, 0)\n\tfunction := make(map[int]string)\n\tcurrentLayer := 0\n\n\ttracker := -1\n\n\tfor _, line := range lines {\n\n\t\tif len(line) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\tif len(line) > 0 && string(line[0]) == \"#\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tif len(line) >= 2 && line[:2] == \"->\" {\n\t\t\tdat := strings.Split(line[2:], \" \")\n\t\t\tnumba, err := strconv.Atoi(dat[0])\n\t\t\tfunctionID := dat[1]\n\t\t\tif err != nil {\n\t\t\t\tpanic(\"TROUBLE PARSING NODE ID\")\n\t\t\t}\n\t\t\tfunction[numba] = functionID\n\n\t\t\tif numba < tracker {\n\t\t\t\tcurrentLayer++\n\t\t\t\tlayers = append(layers, make([]int, 0))\n\t\t\t}\n\t\t\ttracker = numba\n\n\t\t\tlayers[currentLayer] = append(layers[currentLayer], numba)\n\t\t} else { //Line is assumed to be links\n\t\t\tsource := layers[currentLayer][len(layers[currentLayer])-1]\n\t\t\tlList := strings.Split(line, \"\\t\")\n\t\t\tlinks[source] = make(map[int]float64)\n\t\t\tfor _, item := range lList {\n\t\t\t\tif len(item) == 0 {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\titem = strings.Replace(item, \":\", \"=\", -1)\n\t\t\t\tdatums := strings.Split(item, \"=\")\n\t\t\t\tfrom, err0 := strconv.Atoi(datums[0])\n\t\t\t\tto, err1 := strconv.Atoi(datums[1])\n\t\t\t\tweight, err2 := strconv.ParseFloat(datums[2], 64)\n\t\t\t\tif from != source {\n\t\t\t\t\tpanic(\"weight / link mismatch\")\n\t\t\t\t}\n\t\t\t\tif err0 != nil || err1 != nil || err2 != nil {\n\t\t\t\t\tpanic(\"error converting from string\")\n\t\t\t\t}\n\t\t\t\tlinks[from][to] = weight\n\t\t\t}\n\n\t\t}\n\t}\n\n\t// fmt.Println(layers, links, function)\n\n\treverseLookup := make(map[int]*node)\n\n\tnet := new(Network)\n\n\tnet.inputNodes = make([]*node, len(layers[0]))\n\tfor i := range net.inputNodes {\n\t\tnode := new(node)\n\t\tid := layers[0][i]\n\t\tnode.activation = byID(function[id])\n\t\tnode.activationD = byIDD(function[id])\n\t\t// node.delta = float64(id)\n\t\treverseLookup[layers[0][i]] = node\n\t\tnet.inputNodes[i] = node\n\t}\n\n\tnet.hiddenNodes = make([][]*node, 0)\n\tfor hid := 1; hid <= len(layers)-2; hid++ {\n\t\tlayer := make([]*node, len(layers[hid]))\n\t\tfor i := 0; i < len(layer); i++ {\n\t\t\tnode := new(node)\n\t\t\tid := layers[hid][i]\n\t\t\tnode.activation = byID(function[id])\n\t\t\tnode.activationD = byIDD(function[id])\n\t\t\t// node.delta = float64(id)\n\t\t\treverseLookup[layers[hid][i]] = node\n\t\t\tlayer[i] = node\n\t\t}\n\t\tnet.hiddenNodes = append(net.hiddenNodes, layer)\n\t}\n\n\tlastPl := len(layers) - 1\n\n\tnet.outputNodes = make([]*node, len(layers[lastPl]))\n\tfor i := range net.outputNodes {\n\t\tnode := new(node)\n\t\tid := layers[lastPl][i]\n\t\tnode.activation = byID(function[id])\n\t\tnode.activationD = byIDD(function[id])\n\t\t// node.delta = float64(id)\n\t\treverseLookup[layers[lastPl][i]] = node\n\t\tnet.outputNodes[i] = node\n\t}\n\n\tfor from, chart := range links {\n\t\tfor to, weight := range chart {\n\t\t\tfro := reverseLookup[from]\n\t\t\tot := reverseLookup[to]\n\t\t\tfro.forward = append(fro.forward, ot)\n\t\t\tfro.weights = append(fro.weights, weight)\n\t\t}\n\t}\n\n\treturn net\n\n}", "func (dp *Dumper) getFunctions() ([]functionSchema, error) {\n\tquery := \"\" +\n\t\t\"SELECT n.nspname, p.proname, l.lanname, \" +\n\t\t\" CASE WHEN l.lanname = 'internal' THEN p.prosrc ELSE pg_get_functiondef(p.oid) END as definition, \" +\n\t\t\" pg_get_function_arguments(p.oid) \" +\n\t\t\"FROM pg_proc p \" +\n\t\t\"LEFT JOIN pg_namespace n ON p.pronamespace = n.oid \" +\n\t\t\"LEFT JOIN pg_language l ON p.prolang = l.oid \" +\n\t\t\"LEFT JOIN pg_type t ON t.oid = p.prorettype \" +\n\t\t\"WHERE n.nspname NOT IN ('pg_catalog', 'information_schema');\"\n\n\tvar fs []functionSchema\n\trows, err := dp.conn.DB.Query(query)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer rows.Close()\n\n\tfor rows.Next() {\n\t\tvar f functionSchema\n\t\tif err := rows.Scan(&f.schemaName, &f.name, &f.language, &f.statement, &f.arguments); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tf.schemaName, f.name = quoteIdentifier(f.schemaName), quoteIdentifier(f.name)\n\t\tfs = append(fs, f)\n\t}\n\n\treturn fs, nil\n}", "func (p *Parser) nextFunction() {\n\tp.enterNext()\n\n\ttok := p.tok\n\t_, err := p.scope.Lookup(tok)\n\tif err != nil {\n\t\tuserErr(err, tok)\n\t}\n\n\tp.exitNext()\n}", "func NewFunctions() Functions {\n\treturn dynaml.NewFunctions()\n}", "func NewFunctions(configs []FunctionInfo) (*Functions, error) {\n\tclients := map[string]*baetyl.FClient{}\n\tfor _, cfg := range configs {\n\t\tif cfg.Address == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tcli, err := baetyl.NewFClient(cfg.FunctionClientConfig)\n\t\tif err != nil {\n\t\t\tfor _, item := range clients {\n\t\t\t\titem.Close()\n\t\t\t}\n\t\t\treturn nil, fmt.Errorf(\"failed to create function client: %s\", err.Error())\n\t\t}\n\t\tclients[cfg.Name] = cli\n\t}\n\treturn &Functions{\n\t\tconfigs: configs,\n\t\tclients: clients,\n\t}, nil\n}", "func ParseSymTblFunc(ld *LineDesc) {\n var pd ParamDesc;\n var func_t *libgogo.TypeDesc = nil;\n var some_t *libgogo.TypeDesc = nil;\n var fwdStr string;\n var fwdNum uint64;\n var pkgFunc string;\n var funcName string;\n var pkgName string;\n var ind uint64;\n var paramType string;\n var tmpParam *libgogo.ObjectDesc;\n\n InitParamDesc(&pd);\n\n fwdStr = GetNextSymToken(ld);\n fwdNum = libgogo.StringToInt(fwdStr);\n pkgFunc = GetNextSymToken(ld);\n ind = libgogo.StringCompare(pkgFunc, \"main·init\");\n if ind != 0 {\n pkgName = GetPackageName(pkgFunc);\n funcName = GetFuncName(pkgFunc);\n func_t = NewFunction(funcName, pkgName, fwdNum);\n paramType = GetNextSymToken(ld);\n ind = libgogo.StringLength(paramType);\n for ; ind != 0 ; {\n ParseSymbolParam(&pd, paramType);\n tmpParam = libgogo.NewObject(pd.Name, \"\", libgogo.CLASS_PARAMETER);\n some_t = libgogo.GetType(pd.TypeName, pd.TypePackage, GlobalTypes, 1);\n if some_t != nil {\n tmpParam.ObjType = some_t;\n tmpParam.PtrType = pd.Ptr;\n libgogo.AddParameters(tmpParam, func_t);\n } else {\n LinkError(\"unable to find type '\",pd.TypePackage,\"·\",pd.TypeName,\"'.\");\n }\n paramType = GetNextSymToken(ld);\n ind = libgogo.StringLength(paramType);\n }\n }\n\n}", "func parseFnType(p *Parser) (types.TypeNode, error) {\n\tnxt := p.Next()\n\tif nxt.Token != tokens.RPAREN {\n\t\treturn nil, fmt.Errorf(\"unexpected token %s, expected \\\"(\\\"\", nxt.Value)\n\t}\n\t// varName := p.Next()\n\tparams := make([]types.TypeNode, 0)\n\tpeek := p.Peek()\n\tif peek.Token == tokens.LPAREN {\n\t\tp.Next()\n\t} else {\n\t\tfor nxt.Token != tokens.LPAREN {\n\t\t\tvarName := p.Next()\n\t\t\tif varName.Token != tokens.VARNAME {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected token %s, expected \\\"VARNAME\\\"\", varName)\n\t\t\t}\n\t\t\tcolon := p.Next()\n\t\t\tif colon.Token != tokens.COLON {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected token %s, expected \\\":\\\"\", colon)\n\t\t\t}\n\t\t\ttp, err := typeRule(p)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tparams = append(params, tp)\n\t\t\tnxt = p.Next()\n\t\t\tif !(nxt.Token == tokens.COMMA || nxt.Token == tokens.LPAREN) {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected token %s, expected %q or %q\", nxt, \",\", \")\")\n\t\t\t}\n\t\t}\n\n\t}\n\tarrow := p.Peek()\n\tvar retType types.TypeNode = nil\n\tvar err error\n\tif arrow.Token == tokens.ARROW {\n\t\tp.Next()\n\t\tretType, err = typeRule(p)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn &types.FnType{\n\t\tParameters: params,\n\t\tReturnVal: retType,\n\t}, nil\n}", "func ListFunctions(db *bolt.DB) ([]Function, error) {\n\n\tvar result = make([]Function, 0)\n\n\tdb.View(func(tx *bolt.Tx) error {\n\t\t// Assume bucket exists and has keys\n\t\tbucket := tx.Bucket([]byte(ResourceName))\n\n\t\tbucket.ForEach(func(key, value []byte) error {\n\t\t\tfmt.Printf(\"key=%s, value=%s\\n\", key, value)\n\n\t\t\tpersistedFunction := Function{}\n\t\t\tjson.Unmarshal(value, &persistedFunction)\n\n\t\t\tresult = append(result, persistedFunction)\n\t\t\tfmt.Printf(\"result: %v\\n\", result)\n\n\t\t\treturn nil\n\t\t})\n\t\treturn nil\n\t})\n\n\treturn result, nil\n}", "func parse(s *scanner, line string) (receiver, fn, arg, results string) {\n\tvar remaining string\n\ttokens := strings.SplitN(line, \"#\", 2)\n\tif len(tokens) > 2 {\n\t\ts.Fatalf(\"malformed input: expecting a single '#' separator for the results\")\n\t}\n\toperation := strings.TrimSpace(tokens[0])\n\tif len(tokens) == 2 {\n\t\tresults = strings.TrimSpace(tokens[1])\n\t}\n\n\ttokens = strings.SplitN(operation, \".\", 2)\n\tif len(tokens) != 2 {\n\t\ts.Fatalf(\"malformed input: expecting a single '.' separator for the receiver\")\n\t}\n\treceiver, remaining = tokens[0], tokens[1]\n\n\ttokens = strings.Split(remaining, \"(\")\n\tif len(tokens) != 2 {\n\t\ts.Fatalf(\"malformed input: expecting a single '(' for the fn\")\n\t}\n\tfn, remaining = tokens[0], tokens[1]\n\tif !strings.HasSuffix(remaining, \")\") {\n\t\ts.Fatalf(\"malformed input: expecting a closing ')' for the arg\")\n\t}\n\n\ttokens = strings.Split(remaining, \")\")\n\tif len(tokens) != 2 {\n\t\ts.Fatalf(\"malformed input: expecting a single ')' for the fn\")\n\t}\n\n\targ, remaining = tokens[0], tokens[1]\n\tif remaining != \"\" {\n\t\ts.Fatalf(\"malformed input: expecting nothing after closing ')'\")\n\t}\n\n\treturn receiver, fn, arg, results\n}", "func (parser *Parser) funcsDeclars() ([]*Function, error) {\n\tparser.trace(\"FUNCS DECLARS\")\n\tfunction, err := parser.funcDeclar()\n\t// Empty, is not an error\n\tif err == ErrNoMatch {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfuncs, err := parser.funcsDeclars()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn append([]*Function{function}, funcs...), nil\n}", "func ReadFunc(r io.Reader, fn func(Line) error) error {\n\ter := NewReader(r)\n\tfor {\n\t\tline, err := er.Read()\n\t\tif err == io.EOF {\n\t\t\treturn nil\n\t\t} else if err != nil {\n\t\t\treturn err\n\t\t}\n\t\terr = fn(line)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n}", "func parseFunctionFlag() {\n\tswitch fun {\n\tcase \"exp\":\n\t\tf = plot.Exp\n\tcase \"log\":\n\t\tf = plot.Log\n\tcase \"sqrt\":\n\t\tf = plot.Sqrt\n\tcase \"lin\":\n\t\tf = plot.Lin\n\tdefault:\n\t\tlogrus.Fatalln(\"invalid color scaling function:\", fun)\n\t}\n}", "func findFuncs(name string) ([]*FuncExtent, error) {\n\tfset := token.NewFileSet()\n\tparsedFile, err := parser.ParseFile(fset, name, nil, 0)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvisitor := &FuncVisitor{\n\t\tfset: fset,\n\t\tname: name,\n\t\tastFile: parsedFile,\n\t}\n\tast.Walk(visitor, visitor.astFile)\n\treturn visitor.funcs, nil\n}", "func generateParseFunc(headers []string, fieldDelimiter rune, lazyQuotes bool, ignoreQuotes bool) parseFunc {\n\tif ignoreQuotes {\n\t\treturn generateSplitParseFunc(headers, fieldDelimiter)\n\t}\n\treturn generateCSVParseFunc(headers, fieldDelimiter, lazyQuotes)\n}", "func parseFuncDecl(bci *BenchClientInfo, decl *ast.FuncDecl) {\n\tif !strings.HasPrefix(decl.Name.String(), \"New\") {\n\t\treturn\n\t}\n\n\tfuncName := decl.Name.String()\n\tfor _, r := range decl.Type.Results.List {\n\t\tretName := types.ExprString(r.Type)\n\t\tif s, ok := bci.Services[retName]; ok {\n\t\t\t// s has already been added in parseGenDecl()\n\t\t\ts.Factory = funcName\n\t\t}\n\t}\n}", "func (i *Interface) parseDependencies() {\n\tvar dependencies []string\n\tfor _, function := range i.Functions {\n\n\t\t// \"expanded\" refers to creating a parsers.from a templated type, i.e \"QMap <int, QString>\" becomes [QMap int QString]\n\t\texpandedReturnType := strings.FieldsFunc(function.ReturnType, templatedTypeSeparators) \n\t\tfor _, dataType := range(expandedReturnType) {\n\t\t\tdependencies = append(dependencies, strings.TrimSpace(dataType))\n\t\t}\n\n\t\tfor _, parameter := range function.Parameters {\n\t\t\texpandedParameter := strings.FieldsFunc(parameter.Type, templatedTypeSeparators)\n\t\t\tfor _, innerParameter := range expandedParameter {\n\t\t\t\tdependencies = append(dependencies, strings.TrimSpace(innerParameter))\n\t\t\t} \n\t\t}\n\t}\n\ti.Dependencies = dependencies\n\ti.Dependencies = parsers.RemoveConstSpecifiers(i.Dependencies)\n\ti.Dependencies = parsers.RemovePointersAndReferences(i.Dependencies)\n\ti.Dependencies = parsers.RemoveStdDataTypes(i.Dependencies)\n\ti.Dependencies = parsers.MapDataTypesToLibraryDependencies(i.Dependencies)\n\ti.Dependencies = parsers.RemoveDuplicates(i.Dependencies)\n\tsort.Strings(i.Dependencies)\n}", "func parseFuncDocs(file *ast.File, fd *ast.FuncDecl) TestDoc {\n\td := TestDoc{\n\t\tname: fd.Name.Name,\n\t\tdescription: strings.TrimPrefix(fd.Doc.Text(), fd.Name.Name+\" \"),\n\t\tisSubTest: strings.HasPrefix(fd.Name.Name, \"valid\"),\n\t}\n\n\tfor _, c := range file.Comments {\n\t\tfor _, ci := range c.List {\n\t\t\tif ci.Pos() < fd.Pos() || ci.End() > fd.End() {\n\t\t\t\t// only generate docs for comments that are within the function scope\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\ttext := strings.TrimPrefix(ci.Text, \"// \")\n\t\t\tm := docsRegex.FindStringSubmatch(text)\n\t\t\tif len(m) < 2 {\n\t\t\t\t// comment doesn't start with `docs: ` or `docs(...): `\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tmatched := m[0]\n\t\t\tdocsType := m[1]\n\n\t\t\ttext = strings.TrimPrefix(text, matched)\n\t\t\tswitch docsType {\n\t\t\tcase \"special\":\n\t\t\t\td.specialCases = append(d.specialCases, text)\n\t\t\tcase \"skip\":\n\t\t\t\td.skips = append(d.skips, text)\n\t\t\tcase \"\":\n\t\t\t\td.steps = append(d.steps, text)\n\t\t\tdefault:\n\t\t\t\tlog.Printf(\"docs type %s is not recognized\", docsType)\n\t\t\t}\n\t\t}\n\t}\n\treturn d\n}", "func (c *Config) ParseFunction(path string) string {\n\tpath = strings.TrimPrefix(path, c.Path)\n\tpos := strings.Index(path, \"?\")\n\tif pos > -1 {\n\t\tpath = path[:pos]\n\t}\n\tpos = strings.Index(path, \"#\")\n\tif pos > -1 {\n\t\tpath = path[:pos]\n\t}\n\n\treturn strings.Split(path, \"/\")[0]\n}", "func TestFunctionSection(t *testing.T) {\n testCases := []struct{\n name string\n encoded []byte\n decoded FunctionSection\n status error\n }{\n\t\t// 1 function\n { \"function1\",\n []byte{ 1, 0 },\n FunctionSection{ []uint32{ 0 } },\n nil },\n\n\t\t// 2 functions\n { \"function2\",\n []byte{ 2, 0xA, 0xB },\n FunctionSection{ []uint32{ 0xA, 0xB } },\n nil },\n }\n\n for _, test := range testCases {\n t.Run(test.name, func(t *testing.T) {\n section, err := readFunctionSection(test.encoded)\n if (err != test.status) {\n t.Error(\"Unexpected decoding status: \", err)\n }\n if (err == nil) {\n if (len(section.function) != len(test.decoded.function)) {\n t.Error(\"Unexpected decoded length: \", section)\n }\n // Assume each successful decode has at least 1 function\n if (section.function[0] != test.decoded.function[0]) {\n t.Error(\"Unexpected decoded function[0]: \", section)\n }\n\t\t\t}\n })\n }\n}", "func (j *LuaFunction) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error {\n\tvar err error\n\tcurrentKey := ffjtLuaFunctionbase\n\t_ = currentKey\n\ttok := fflib.FFTok_init\n\twantedTok := fflib.FFTok_init\n\nmainparse:\n\tfor {\n\t\ttok = fs.Scan()\n\t\t//\tprintln(fmt.Sprintf(\"debug: tok: %v state: %v\", tok, state))\n\t\tif tok == fflib.FFTok_error {\n\t\t\tgoto tokerror\n\t\t}\n\n\t\tswitch state {\n\n\t\tcase fflib.FFParse_map_start:\n\t\t\tif tok != fflib.FFTok_left_bracket {\n\t\t\t\twantedTok = fflib.FFTok_left_bracket\n\t\t\t\tgoto wrongtokenerror\n\t\t\t}\n\t\t\tstate = fflib.FFParse_want_key\n\t\t\tcontinue\n\n\t\tcase fflib.FFParse_after_value:\n\t\t\tif tok == fflib.FFTok_comma {\n\t\t\t\tstate = fflib.FFParse_want_key\n\t\t\t} else if tok == fflib.FFTok_right_bracket {\n\t\t\t\tgoto done\n\t\t\t} else {\n\t\t\t\twantedTok = fflib.FFTok_comma\n\t\t\t\tgoto wrongtokenerror\n\t\t\t}\n\n\t\tcase fflib.FFParse_want_key:\n\t\t\t// json {} ended. goto exit. woo.\n\t\t\tif tok == fflib.FFTok_right_bracket {\n\t\t\t\tgoto done\n\t\t\t}\n\t\t\tif tok != fflib.FFTok_string {\n\t\t\t\twantedTok = fflib.FFTok_string\n\t\t\t\tgoto wrongtokenerror\n\t\t\t}\n\n\t\t\tkn := fs.Output.Bytes()\n\t\t\tif len(kn) <= 0 {\n\t\t\t\t// \"\" case. hrm.\n\t\t\t\tcurrentKey = ffjtLuaFunctionnosuchkey\n\t\t\t\tstate = fflib.FFParse_want_colon\n\t\t\t\tgoto mainparse\n\t\t\t} else {\n\t\t\t\tswitch kn[0] {\n\n\t\t\t\tcase 'a':\n\n\t\t\t\t\tif bytes.Equal(ffjKeyLuaFunctionArgList, kn) {\n\t\t\t\t\t\tcurrentKey = ffjtLuaFunctionArgList\n\t\t\t\t\t\tstate = fflib.FFParse_want_colon\n\t\t\t\t\t\tgoto mainparse\n\t\t\t\t\t}\n\n\t\t\t\tcase 'i':\n\n\t\t\t\t\tif bytes.Equal(ffjKeyLuaFunctionIsVarArg, kn) {\n\t\t\t\t\t\tcurrentKey = ffjtLuaFunctionIsVarArg\n\t\t\t\t\t\tstate = fflib.FFParse_want_colon\n\t\t\t\t\t\tgoto mainparse\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\tif fflib.EqualFoldRight(ffjKeyLuaFunctionArgList, kn) {\n\t\t\t\t\tcurrentKey = ffjtLuaFunctionArgList\n\t\t\t\t\tstate = fflib.FFParse_want_colon\n\t\t\t\t\tgoto mainparse\n\t\t\t\t}\n\n\t\t\t\tif fflib.EqualFoldRight(ffjKeyLuaFunctionIsVarArg, kn) {\n\t\t\t\t\tcurrentKey = ffjtLuaFunctionIsVarArg\n\t\t\t\t\tstate = fflib.FFParse_want_colon\n\t\t\t\t\tgoto mainparse\n\t\t\t\t}\n\n\t\t\t\tcurrentKey = ffjtLuaFunctionnosuchkey\n\t\t\t\tstate = fflib.FFParse_want_colon\n\t\t\t\tgoto mainparse\n\t\t\t}\n\n\t\tcase fflib.FFParse_want_colon:\n\t\t\tif tok != fflib.FFTok_colon {\n\t\t\t\twantedTok = fflib.FFTok_colon\n\t\t\t\tgoto wrongtokenerror\n\t\t\t}\n\t\t\tstate = fflib.FFParse_want_value\n\t\t\tcontinue\n\t\tcase fflib.FFParse_want_value:\n\n\t\t\tif tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null {\n\t\t\t\tswitch currentKey {\n\n\t\t\t\tcase ffjtLuaFunctionIsVarArg:\n\t\t\t\t\tgoto handle_IsVarArg\n\n\t\t\t\tcase ffjtLuaFunctionArgList:\n\t\t\t\t\tgoto handle_ArgList\n\n\t\t\t\tcase ffjtLuaFunctionnosuchkey:\n\t\t\t\t\terr = fs.SkipField(tok)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn fs.WrapErr(err)\n\t\t\t\t\t}\n\t\t\t\t\tstate = fflib.FFParse_after_value\n\t\t\t\t\tgoto mainparse\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tgoto wantedvalue\n\t\t\t}\n\t\t}\n\t}\n\nhandle_IsVarArg:\n\n\t/* handler: j.IsVarArg type=bool kind=bool quoted=false*/\n\n\t{\n\t\tif tok != fflib.FFTok_bool && tok != fflib.FFTok_null {\n\t\t\treturn fs.WrapErr(fmt.Errorf(\"cannot unmarshal %s into Go value for bool\", tok))\n\t\t}\n\t}\n\n\t{\n\t\tif tok == fflib.FFTok_null {\n\n\t\t} else {\n\t\t\ttmpb := fs.Output.Bytes()\n\n\t\t\tif bytes.Compare([]byte{'t', 'r', 'u', 'e'}, tmpb) == 0 {\n\n\t\t\t\tj.IsVarArg = true\n\n\t\t\t} else if bytes.Compare([]byte{'f', 'a', 'l', 's', 'e'}, tmpb) == 0 {\n\n\t\t\t\tj.IsVarArg = false\n\n\t\t\t} else {\n\t\t\t\terr = errors.New(\"unexpected bytes for true/false value\")\n\t\t\t\treturn fs.WrapErr(err)\n\t\t\t}\n\n\t\t}\n\t}\n\n\tstate = fflib.FFParse_after_value\n\tgoto mainparse\n\nhandle_ArgList:\n\n\t/* handler: j.ArgList type=[]string kind=slice quoted=false*/\n\n\t{\n\n\t\t{\n\t\t\tif tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null {\n\t\t\t\treturn fs.WrapErr(fmt.Errorf(\"cannot unmarshal %s into Go value for \", tok))\n\t\t\t}\n\t\t}\n\n\t\tif tok == fflib.FFTok_null {\n\t\t\tj.ArgList = nil\n\t\t} else {\n\n\t\t\tj.ArgList = []string{}\n\n\t\t\twantVal := true\n\n\t\t\tfor {\n\n\t\t\t\tvar tmpJArgList string\n\n\t\t\t\ttok = fs.Scan()\n\t\t\t\tif tok == fflib.FFTok_error {\n\t\t\t\t\tgoto tokerror\n\t\t\t\t}\n\t\t\t\tif tok == fflib.FFTok_right_brace {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\n\t\t\t\tif tok == fflib.FFTok_comma {\n\t\t\t\t\tif wantVal == true {\n\t\t\t\t\t\t// TODO(pquerna): this isn't an ideal error message, this handles\n\t\t\t\t\t\t// things like [,,,] as an array value.\n\t\t\t\t\t\treturn fs.WrapErr(fmt.Errorf(\"wanted value token, but got token: %v\", tok))\n\t\t\t\t\t}\n\t\t\t\t\tcontinue\n\t\t\t\t} else {\n\t\t\t\t\twantVal = true\n\t\t\t\t}\n\n\t\t\t\t/* handler: tmpJArgList type=string kind=string quoted=false*/\n\n\t\t\t\t{\n\n\t\t\t\t\t{\n\t\t\t\t\t\tif tok != fflib.FFTok_string && tok != fflib.FFTok_null {\n\t\t\t\t\t\t\treturn fs.WrapErr(fmt.Errorf(\"cannot unmarshal %s into Go value for string\", tok))\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif tok == fflib.FFTok_null {\n\n\t\t\t\t\t} else {\n\n\t\t\t\t\t\toutBuf := fs.Output.Bytes()\n\n\t\t\t\t\t\ttmpJArgList = string(string(outBuf))\n\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tj.ArgList = append(j.ArgList, tmpJArgList)\n\n\t\t\t\twantVal = false\n\t\t\t}\n\t\t}\n\t}\n\n\tstate = fflib.FFParse_after_value\n\tgoto mainparse\n\nwantedvalue:\n\treturn fs.WrapErr(fmt.Errorf(\"wanted value token, but got token: %v\", tok))\nwrongtokenerror:\n\treturn fs.WrapErr(fmt.Errorf(\"ffjson: wanted token: %v, but got token: %v output=%s\", wantedTok, tok, fs.Output.String()))\ntokerror:\n\tif fs.BigError != nil {\n\t\treturn fs.WrapErr(fs.BigError)\n\t}\n\terr = fs.Error.ToError()\n\tif err != nil {\n\t\treturn fs.WrapErr(err)\n\t}\n\tpanic(\"ffjson-generated: unreachable, please report bug.\")\ndone:\n\n\treturn nil\n}", "func MakeFunctionReader() http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\n\t\tlog.Info(\"read request\")\n\t\tfunctions, err := readServices()\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error getting service list: %s\\n\", err.Error())\n\n\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\tw.Write([]byte(err.Error()))\n\t\t\treturn\n\t\t}\n\n\t\tfunctionBytes, _ := json.Marshal(functions)\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\tw.WriteHeader(http.StatusOK)\n\t\tw.Write(functionBytes)\n\t}\n}", "func parsePlaceholder(s string, funcs map[string]interface{}) (p tePlaceholder, err error) {\n\tconst (\n\t\tdot = \".\"\n\t\tinvoke = \"()\"\n\t\tindexLeft = \"[\"\n\t\tindexRight = \"]\"\n\t)\n\n\tif s==\"\"{\n\t\treturn\n\t}\n\n\tstrs := strings.Split(s, pipe)\n\tskipFirst := false\n\tif len(strs[0])>0 && strs[0][0] == 'a' {\n\t\tif i, err := strconvh.ParseInt(strs[0][1:]); err == nil {\n\t\t\tp.argNum = i\n\t\t\tskipFirst = true\n\t\t}\n\t}\n\n\tif skipFirst {\n\t\tstrs = strs[1:]\n\t}\n\n\tfor _, str := range strs {\n\t\tif len(str) == 0 {\n\t\t\terr = errors.New(\"unable to parse empty placeholder in '\" + s + \"'\")\n\t\t\treturn\n\t\t}\n\t\tswitch {\n\t\tcase str == \"*\":\n\t\t\tp.funcs = append(p.funcs, dereferencer{})\n\t\tcase str == \"&\":\n\t\t\tp.funcs = append(p.funcs, addrGetter{})\n\t\tcase strings.HasPrefix(str, dot) && strings.HasSuffix(str, invoke): // Method\n\t\t\tname := str[len(dot) : len(str)-len(invoke)]\n\t\t\tif !IsValidExportedIdent(name) {\n\t\t\t\terr = errors.New(\"invalid method name: '\" + name + \"'\")\n\t\t\t\treturn\n\t\t\t}\n\t\t\tp.funcs = append(p.funcs, FuncMethod(name))\n\t\tcase strings.HasPrefix(str, dot): // Field\n\t\t\tname := str[len(dot):]\n\t\t\tif !IsValidExportedIdent(name) {\n\t\t\t\terr = errors.New(\"invalid field name: '\" + name + \"'\")\n\t\t\t\treturn\n\t\t\t}\n\t\t\tp.funcs = append(p.funcs, FuncGetter(name))\n\t\tcase strings.HasSuffix(str, invoke): // Function\n\t\t\tname := str[:len(str)-len(invoke)]\n\t\t\tf, ok := funcs[name]\n\t\t\tif !ok {\n\t\t\t\terr = errors.New(\"unknown function '\" + name + \"'\")\n\t\t\t\treturn\n\t\t\t}\n\t\t\tp.funcs = append(p.funcs, FuncSimple{f})\n\t\tcase strings.HasPrefix(str, indexLeft) && strings.HasSuffix(str, indexRight): // Access by index\n\t\t\tiStr := str[len(indexLeft) : len(str)-len(indexRight)]\n\t\t\tvar i int\n\t\t\ti, err = strconvh.ParseInt(iStr)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tp.funcs = append(p.funcs, Index(i))\n\t\tdefault:\n\t\t\terr = errors.New(\"unknown element in placeholder: '\" + str + \"'\")\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}", "func (ac *Config) LuaFunctionMap(w http.ResponseWriter, req *http.Request, luadata []byte, filename string) (template.FuncMap, error) {\n\tac.pongomutex.Lock()\n\tdefer ac.pongomutex.Unlock()\n\n\t// Retrieve a Lua state\n\tL := ac.luapool.Get()\n\tdefer ac.luapool.Put(L)\n\n\t// Prepare an empty map of functions (and variables)\n\tfuncs := make(template.FuncMap)\n\n\t// Give no filename (an empty string will be handled correctly by the function).\n\tac.LoadCommonFunctions(w, req, filename, L, nil, nil)\n\n\t// Run the script\n\tif err := L.DoString(string(luadata)); err != nil {\n\t\t// Close the Lua state\n\t\tL.Close()\n\n\t\t// Logging and/or HTTP response is handled elsewhere\n\t\treturn funcs, err\n\t}\n\n\t// Extract the available functions from the Lua state\n\tglobalTable := L.G.Global\n\tglobalTable.ForEach(func(key, value lua.LValue) {\n\t\t// Check if the current value is a string variable\n\t\tif luaString, ok := value.(lua.LString); ok {\n\t\t\t// Store the variable in the same map as the functions (string -> interface)\n\t\t\t// for ease of use together with templates.\n\t\t\tfuncs[key.String()] = luaString.String()\n\t\t} else if luaTable, ok := value.(*lua.LTable); ok {\n\n\t\t\t// Convert the table to a map and save it.\n\t\t\t// Ignore values of a different type.\n\t\t\tmapinterface, _ := convert.Table2map(luaTable, false)\n\t\t\tswitch m := mapinterface.(type) {\n\t\t\tcase map[string]string:\n\t\t\t\tfuncs[key.String()] = map[string]string(m)\n\t\t\tcase map[string]int:\n\t\t\t\tfuncs[key.String()] = map[string]int(m)\n\t\t\tcase map[int]string:\n\t\t\t\tfuncs[key.String()] = map[int]string(m)\n\t\t\tcase map[int]int:\n\t\t\t\tfuncs[key.String()] = map[int]int(m)\n\t\t\t}\n\n\t\t\t// Check if the current value is a function\n\t\t} else if luaFunc, ok := value.(*lua.LFunction); ok {\n\t\t\t// Only export the functions defined in the given Lua code,\n\t\t\t// not all the global functions. IsG is true if the function is global.\n\t\t\tif !luaFunc.IsG {\n\n\t\t\t\tfunctionName := key.String()\n\n\t\t\t\t// Register the function, with a variable number of string arguments\n\t\t\t\t// Functions returning (string, error) are supported by html.template\n\t\t\t\tfuncs[functionName] = func(args ...string) (any, error) {\n\t\t\t\t\t// Create a brand new Lua state\n\t\t\t\t\tL2 := ac.luapool.New()\n\t\t\t\t\tdefer L2.Close()\n\n\t\t\t\t\t// Set up a new Lua state with the current http.ResponseWriter and *http.Request\n\t\t\t\t\tac.LoadCommonFunctions(w, req, filename, L2, nil, nil)\n\n\t\t\t\t\t// Push the Lua function to run\n\t\t\t\t\tL2.Push(luaFunc)\n\n\t\t\t\t\t// Push the given arguments\n\t\t\t\t\tfor _, arg := range args {\n\t\t\t\t\t\tL2.Push(lua.LString(arg))\n\t\t\t\t\t}\n\n\t\t\t\t\t// Run the Lua function\n\t\t\t\t\terr := L2.PCall(len(args), lua.MultRet, nil)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t// If calling the function did not work out, return the infostring and error\n\t\t\t\t\t\treturn utils.Infostring(functionName, args), err\n\t\t\t\t\t}\n\n\t\t\t\t\t// Empty return value if no values were returned\n\t\t\t\t\tvar retval any\n\n\t\t\t\t\t// Return the first of the returned arguments, as a string\n\t\t\t\t\tif L2.GetTop() >= 1 {\n\t\t\t\t\t\tlv := L2.Get(-1)\n\t\t\t\t\t\ttbl, isTable := lv.(*lua.LTable)\n\t\t\t\t\t\tswitch {\n\t\t\t\t\t\tcase isTable:\n\t\t\t\t\t\t\t// lv was a Lua Table\n\t\t\t\t\t\t\tretval = gluamapper.ToGoValue(tbl, gluamapper.Option{\n\t\t\t\t\t\t\t\tNameFunc: func(s string) string {\n\t\t\t\t\t\t\t\t\treturn s\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t})\n\t\t\t\t\t\t\tif ac.debugMode && ac.verboseMode {\n\t\t\t\t\t\t\t\tlog.Info(utils.Infostring(functionName, args) + \" -> (map)\")\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\tcase lv.Type() == lua.LTString:\n\t\t\t\t\t\t\t// lv is a Lua String\n\t\t\t\t\t\t\tretstr := L2.ToString(1)\n\t\t\t\t\t\t\tretval = retstr\n\t\t\t\t\t\t\tif ac.debugMode && ac.verboseMode {\n\t\t\t\t\t\t\t\tlog.Info(utils.Infostring(functionName, args) + \" -> \\\"\" + retstr + \"\\\"\")\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\tdefault:\n\t\t\t\t\t\t\tretval = \"\"\n\t\t\t\t\t\t\tlog.Warn(\"The return type of \" + utils.Infostring(functionName, args) + \" can't be converted\")\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\t// No return value, return an empty string and nil\n\t\t\t\t\treturn retval, nil\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t})\n\n\t// Return the map of functions\n\treturn funcs, nil\n}", "func FilterFuncs() (filters []func(string) string) {\n\tfilters = append(filters, FilterParenthesis())\n\tfilters = append(filters, FilterHyphens())\n\treturn filters\n}", "func (g *Generator) AddFuncs(fm map[string]interface{}) {\n\tfor name, f := range fm {\n\t\tg.funcs[name] = f\n\t}\n}", "func (p *Parser) ParseProgram() *ast.Program {\n\tprogram := &ast.Program{}\n\tprogram.Functions = []*ast.Function{}\n\n\tfor p.curTokenIs(token.FN) {\n\t\tfn := p.parseFunction()\n\t\tprogram.Functions = append(program.Functions, fn)\n\t\tp.nextToken()\n\t}\n\n\treturn program\n}", "func loadFunction(path string) (function, error) {\n\tpl, err := plugin.Open(path)\n\tif err != nil {\n\t\treturn function{}, fmt.Errorf(\"Cannot open %s plugin: %v\", path, err)\n\t}\n\trouteRaw, err := pl.Lookup(\"Route\")\n\tif err != nil {\n\t\treturn function{}, fmt.Errorf(\"Cannot lookup Route: %v\", err)\n\t}\n\troute := *routeRaw.(*string)\n\n\thandlerRaw, err := pl.Lookup(\"Handle\")\n\tif err != nil {\n\t\treturn function{}, fmt.Errorf(\"Cannot lookup handler: %v\", err)\n\t}\n\thandler := handlerRaw.(functionHandler)\n\n\treturn function{\n\t\troute: route,\n\t\thandler: handler,\n\t}, nil\n}", "func (info *fileInfo) addFuncDecls() {\n\t// TODO: replace all uses of importCPos with the real locations from\n\t// libclang.\n\tnames := make([]string, 0, len(info.functions))\n\tfor name := range info.functions {\n\t\tnames = append(names, name)\n\t}\n\tsort.Strings(names)\n\tfor _, name := range names {\n\t\tfn := info.functions[name]\n\t\tobj := &ast.Object{\n\t\t\tKind: ast.Fun,\n\t\t\tName: \"C.\" + name,\n\t\t}\n\t\targs := make([]*ast.Field, len(fn.args))\n\t\tdecl := &ast.FuncDecl{\n\t\t\tName: &ast.Ident{\n\t\t\t\tNamePos: info.importCPos,\n\t\t\t\tName: \"C.\" + name,\n\t\t\t\tObj: obj,\n\t\t\t},\n\t\t\tType: &ast.FuncType{\n\t\t\t\tFunc: info.importCPos,\n\t\t\t\tParams: &ast.FieldList{\n\t\t\t\t\tOpening: info.importCPos,\n\t\t\t\t\tList: args,\n\t\t\t\t\tClosing: info.importCPos,\n\t\t\t\t},\n\t\t\t\tResults: fn.results,\n\t\t\t},\n\t\t}\n\t\tobj.Decl = decl\n\t\tfor i, arg := range fn.args {\n\t\t\targs[i] = &ast.Field{\n\t\t\t\tNames: []*ast.Ident{\n\t\t\t\t\t&ast.Ident{\n\t\t\t\t\t\tNamePos: info.importCPos,\n\t\t\t\t\t\tName: arg.name,\n\t\t\t\t\t\tObj: &ast.Object{\n\t\t\t\t\t\t\tKind: ast.Var,\n\t\t\t\t\t\t\tName: arg.name,\n\t\t\t\t\t\t\tDecl: decl,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tType: arg.typeExpr,\n\t\t\t}\n\t\t}\n\t\tinfo.Decls = append(info.Decls, decl)\n\t}\n}", "func EvaluateFuncs(exp string) string {\n exp = EvaluateFunc(exp, \"abs\")\n exp = EvaluateFunc(exp, \"sin\")\n exp = EvaluateFunc(exp, \"cos\")\n exp = EvaluateFunc(exp, \"tan\")\n return exp\n}", "func (sc *SmartContract) handleFunctions(stub shim.ChaincodeStubInterface) pb.Response {\n\t_SC_LOGGER.Info(\"InsidehandleFunctions\")\n\tfunction, _ := stub.GetFunctionAndParameters()\n\tif function == \"probe\" {\n\t\treturn sc.probe(stub)\n\t}\n\treturn shim.Error(\"Invalid function provided\")\n}", "func makeStateFn(expr []string) (stateFn, error) {\n\t// End of the recursive call, we return nil.\n\tif expr == nil || len(expr) == 0 {\n\t\treturn nil, nil\n\t}\n\n\tformatStr := expr[0]\n\n\t// Expressions can be quoted, so we keep a track of it and trim the quotes.\n\tvar quoted bool\n\tif strings.HasPrefix(formatStr, \"\\\"\") {\n\t\tquoted = true\n\t\tformatStr = strings.Trim(formatStr, \"\\\"\")\n\t}\n\n\t// Recursive call to determine the next state function.\n\t// XXX(gilliek): errors are reported right to left\n\tnext, err := makeStateFn(expr[1:])\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tswitch f := LookupFormat(formatStr); f {\n\tcase REMOTE_HOST:\n\t\treturn parseRemoteHost(quoted, next), nil\n\tcase REMOTE_LOGNAME:\n\t\treturn parseRemoteLogname(quoted, next), nil\n\tcase REMOTE_USER:\n\t\treturn parseRemoteUser(quoted, next), nil\n\tcase TIME:\n\t\treturn parseTime(quoted, next), nil\n\tcase REQUEST_FIRST_LINE:\n\t\treturn parseRequestFirstLine(quoted, next), nil\n\tcase STATUS:\n\t\treturn parseStatus(quoted, next), nil\n\tcase RESPONSE_SIZE:\n\t\treturn parseResponseSize(quoted, next), nil\n\tcase RESPONSE_SIZE_CLF:\n\t\treturn parseResponseSizeCLF(quoted, next), nil\n\tcase ELAPSED_TIME_IN_SEC:\n\t\treturn parseElapsedTimeInSec(quoted, next), nil\n\tcase HEADER:\n\t\thdr := strings.TrimSuffix(strings.TrimPrefix(formatStr, \"%{\"), \"}i\")\n\t\treturn parseHeader(quoted, hdr, next), nil\n\tcase UNKNOWN:\n\t\tfallthrough\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"%q format is not supported\", formatStr)\n\t}\n}", "func (p Parser[T]) Parse(s string) ([]check.ValCk[T], error) {\n\texprs, err := getElts(s, p.checkerName)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tckFuncs := make([]check.ValCk[T], 0, len(exprs))\n\tfor _, e := range exprs {\n\t\tf, err := p.ParseExpr(e)\n\t\tif err != nil {\n\t\t\treturn nil,\n\t\t\t\tfmt.Errorf(\"Can't make %s function: %s\",\n\t\t\t\t\tp.checkerName, err)\n\t\t}\n\t\tckFuncs = append(ckFuncs, f)\n\t}\n\n\treturn ckFuncs, nil\n}", "func (r *ListFunctionsRequest) FromJsonString(s string) error {\n\tf := make(map[string]interface{})\n\tif err := json.Unmarshal([]byte(s), &f); err != nil {\n\t\treturn err\n\t}\n\tdelete(f, \"Order\")\n\tdelete(f, \"Orderby\")\n\tdelete(f, \"Offset\")\n\tdelete(f, \"Limit\")\n\tdelete(f, \"SearchKey\")\n\tdelete(f, \"Namespace\")\n\tdelete(f, \"Description\")\n\tdelete(f, \"Filters\")\n\tif len(f) > 0 {\n\t\treturn tcerr.NewTencentCloudSDKError(\"ClientError.BuildRequestError\", \"ListFunctionsRequest has unknown keys!\", \"\")\n\t}\n\treturn json.Unmarshal([]byte(s), &r)\n}", "func (t *LineTable) go12Funcs() []Func {\n\t// Assume it is malformed and return nil on error.\n\tif !disableRecover {\n\t\tdefer func() {\n\t\t\trecover()\n\t\t}()\n\t}\n\n\tft := t.funcTab()\n\tfuncs := make([]Func, ft.Count())\n\tsyms := make([]Sym, len(funcs))\n\tfor i := range funcs {\n\t\tf := &funcs[i]\n\t\tf.Entry = ft.pc(i)\n\t\tf.End = ft.pc(i + 1)\n\t\tinfo := t.funcData(uint32(i))\n\t\tf.LineTable = t\n\t\tf.FrameSize = int(info.deferreturn())\n\t\tsyms[i] = Sym{\n\t\t\tValue: f.Entry,\n\t\t\tType: 'T',\n\t\t\tName: t.funcName(info.nameoff()),\n\t\t\tGoType: 0,\n\t\t\tFunc: f,\n\t\t}\n\t\tf.Sym = &syms[i]\n\t}\n\treturn funcs\n}", "func SymbolResolve(symbols string) (fmap []gtutils.SymbolFuncInfo) {\n\tfmap = make([]gtutils.SymbolFuncInfo, 0)\n\tlines := bufio.NewScanner(strings.NewReader(symbols))\n\tfor lines.Scan() {\n\t\tfields := strings.Split(lines.Text(), \"|\")\n\t\tfor i := range fields {\n\t\t\tfields[i] = strings.TrimSpace(fields[i])\n\t\t}\n\t\tif len(fields) != 7 {\n\t\t\tcontinue\n\t\t}\n\t\tif strings.ToLower(fields[3]) != \"func\" {\n\t\t\tcontinue\n\t\t}\n\t\toff64, _ := strconv.ParseUint(fields[1], 16, 64)\n\t\toff := int(off64)\n\t\tsize64, _ := strconv.ParseUint(fields[4], 16, 64)\n\t\tsize := int(size64)\n\t\tfName := fields[0]\n\t\tsecTab := strings.Index(fields[6], \"\\t\")\n\t\tif secTab < 0 {\n\t\t\tfmap = append(fmap, gtutils.SymbolFuncInfo{\n\t\t\t\tFunction: fName,\n\t\t\t\tHaveSource: false,\n\t\t\t\tSource: \"\",\n\t\t\t\tOffset: int(off),\n\t\t\t\tSize: size,\n\t\t\t\tLine: 0,\n\t\t\t\tSection: fields[6]})\n\t\t} else {\n\t\t\tsecTabSeqLast := secTab\n\t\t\tfor ; fields[6][secTabSeqLast] == '\\t'; secTabSeqLast++ {\n\t\t\t}\n\t\t\tsecName := fields[6][:secTab]\n\t\t\tsrcFile := fields[6][secTabSeqLast:]\n\t\t\tfSrc, line := findSrcFile(srcFile)\n\t\t\tfmap = append(fmap, gtutils.SymbolFuncInfo{\n\t\t\t\tFunction: fName,\n\t\t\t\tHaveSource: true,\n\t\t\t\tSource: fSrc,\n\t\t\t\tOffset: int(off),\n\t\t\t\tSize: size,\n\t\t\t\tLine: line,\n\t\t\t\tSection: secName})\n\t\t}\n\t}\n\treturn\n}", "func validateFunction(fn *types.Function) (errs []error) {\n\tif !template.IsContextFirst(fn.Args) {\n\t\terrs = append(errs, fmt.Errorf(\"%s: first argument should be of type context.Context\", fn.Name))\n\t}\n\tif !template.IsErrorLast(fn.Results) {\n\t\terrs = append(errs, fmt.Errorf(\"%s: last result should be of type error\", fn.Name))\n\t}\n\tfor _, param := range fn.Args {\n\t\tif param.Name == \"\" {\n\t\t\terrs = append(errs, fmt.Errorf(\"%s: unnamed argument of type %s\", fn.Name, param.Type.String()))\n\t\t}\n\t}\n\tfor _, param := range fn.Results {\n\t\tif param.Name == \"\" {\n\t\t\terrs = append(errs, fmt.Errorf(\"%s: unnamed result of type %s\", fn.Name, param.Type.String()))\n\t\t}\n\t}\n\treturn\n}", "func NewFuncs(ctx context.Context, enums []xo.Enum) *Funcs {\n\tdriver, _, _ := xo.DriverSchemaNthParam(ctx)\n\tenumMap := make(map[string]xo.Enum)\n\tif driver == \"mysql\" {\n\t\tfor _, e := range enums {\n\t\t\tenumMap[e.Name] = e\n\t\t}\n\t}\n\treturn &Funcs{\n\t\tdriver: driver,\n\t\tenumMap: enumMap,\n\t\tconstraint: Constraint(ctx),\n\t\tescCols: Esc(ctx, \"columns\"),\n\t\tescTypes: Esc(ctx, \"types\"),\n\t\tengine: Engine(ctx),\n\t}\n}", "func ParseTables(d *drawing.Drawing, line int, data [][2]string) error {\n\tparsers := []func(*drawing.Drawing, [][2]string) (table.SymbolTable, error){\n\t\tParseViewport,\n\t\tParseLtype,\n\t\tParseLayer,\n\t\tParseStyle,\n\t\tParseView,\n\t\tParseUCS,\n\t\tParseAppID,\n\t\tParseDimStyle,\n\t\tParseBlockRecord,\n\t}\n\ttmpdata := make([][2]string, 0)\n\tsetparser := false\n\tvar parser func(*drawing.Drawing, [][2]string) (table.SymbolTable, error)\n\tvar ind int\n\tfor i, dt := range data {\n\t\tif setparser {\n\t\t\tif dt[0] != \"2\" {\n\t\t\t\treturn fmt.Errorf(\"line %d: invalid group code: %s\", line+2*i, dt[0])\n\t\t\t}\n\t\t\tind = int(table.TableTypeValue(strings.ToUpper(dt[1])))\n\t\t\tif ind < 0 {\n\t\t\t\treturn fmt.Errorf(\"line %d: unknown table type: %s\", line+2*i, dt[1])\n\t\t\t}\n\t\t\tparser = parsers[ind]\n\t\t\tsetparser = false\n\t\t} else {\n\t\t\tif dt[0] == \"0\" {\n\t\t\t\tswitch strings.ToUpper(dt[1]) {\n\t\t\t\tcase \"TABLE\":\n\t\t\t\t\tsetparser = true\n\t\t\t\tcase \"ENDTAB\":\n\t\t\t\t\tif len(tmpdata) > 0 {\n\t\t\t\t\t\terr := ParseTable(d, tmpdata, ind, parser)\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\treturn err\n\t\t\t\t\t\t}\n\t\t\t\t\t\ttmpdata = make([][2]string, 0)\n\t\t\t\t\t}\n\t\t\t\tdefault:\n\t\t\t\t\ttmpdata = append(tmpdata, dt)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\ttmpdata = append(tmpdata, dt)\n\t\t\t}\n\t\t}\n\t}\n\tif len(tmpdata) > 0 {\n\t\terr := ParseTable(d, tmpdata, ind, parser)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"line %d: %s\", line+2*len(data), err.Error())\n\t\t}\n\t\ttmpdata = make([][2]string, 0)\n\t}\n\treturn nil\n}", "func (r *RuleSet) matchingFuncs(c context.Context, configSet, path string) ([]Func, error) {\n\tr.l.RLock()\n\tdefer r.l.RUnlock()\n\n\tvar out []Func\n\tvar errs errors.MultiError\n\n\tfor _, rule := range r.r {\n\t\tswitch pat, err := r.renderedConfigPattern(c, rule); {\n\t\tcase err != nil:\n\t\t\terrs = append(errs, err...)\n\t\tcase pat.ConfigSet.Match(configSet) && pat.Path.Match(path):\n\t\t\tout = append(out, rule.cb)\n\t\t}\n\t}\n\n\tif len(errs) != 0 {\n\t\treturn nil, errs\n\t}\n\treturn out, nil\n}", "func GetPublicFunctions(pkg, filePath string) ([]*types.Type, error) {\n\tbuilder := go2idlparser.New()\n\tdata, err := ioutil.ReadFile(filePath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif err := builder.AddFile(pkg, filePath, data); err != nil {\n\t\treturn nil, err\n\t}\n\tuniverse, err := builder.FindTypes()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar functions []*types.Type\n\n\t// Create the AST by parsing src.\n\tfset := token.NewFileSet() // positions are relative to fset\n\tf, err := parser.ParseFile(fset, filePath, nil, 0)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed parse file to list functions: %v\", err)\n\t}\n\n\t// Inspect the AST and print all identifiers and literals.\n\tast.Inspect(f, func(n ast.Node) bool {\n\t\tvar s string\n\t\tswitch x := n.(type) {\n\t\tcase *ast.FuncDecl:\n\t\t\ts = x.Name.Name\n\t\t\t// It's a function (not method), and is public, record it.\n\t\t\tif x.Recv == nil && isPublic(s) {\n\t\t\t\tfunctions = append(functions, universe[pkg].Function(x.Name.Name))\n\t\t\t}\n\t\t}\n\t\treturn true\n\t})\n\n\treturn functions, nil\n}", "func Parse(terms []string, l Lexicon) (parsed []Definition, err error) {\n\treturn nil, nil\n}", "func (e *Evaluator) processFunction(tkn token) error {\n\tfunction, ok := e.functions[tkn.Value]\n\n\tif !ok {\n\t\treturn fmt.Errorf(\"Evaluator: unsupported function %v\", tkn.Value)\n\t}\n\treturn function.Invoke(&e.stack)\n}", "func (p *Parser) parseFuncType(method bool) *expr.FuncLiteral {\n\tf := &expr.FuncLiteral{\n\t\tType: &tipe.Func{},\n\t}\n\n\tif method {\n\t\t// func (a) f()\n\t\tp.expect(token.LeftParen)\n\t\tp.next()\n\t\tif p.s.Token == token.Mul {\n\t\t\tf.PointerReceiver = true\n\t\t\tp.next()\n\t\t}\n\t\tf.ReceiverName = p.parseIdent().Name\n\t\tp.expect(token.RightParen)\n\t\tp.next()\n\t}\n\n\tif p.s.Token == token.Ident {\n\t\tf.Name = p.parseIdent().Name\n\t} else if method {\n\t\tp.errorf(\"class method missing name\")\n\t}\n\n\tp.expect(token.LeftParen)\n\tp.next()\n\tif p.s.Token != token.RightParen {\n\t\tf.ParamNames, f.Type.Params = p.parseParamTuple()\n\t\tif params := f.Type.Params; len(params.Elems) > 0 {\n\t\t\tlast := params.Elems[len(params.Elems)-1]\n\t\t\tif _, variadic := last.(*tipe.Ellipsis); variadic {\n\t\t\t\tf.Type.Variadic = true\n\t\t\t}\n\t\t}\n\t} else {\n\t\tf.Type.Params = new(tipe.Tuple)\n\t}\n\tp.expect(token.RightParen)\n\tp.next()\n\n\tif p.s.Token == token.LeftParen {\n\t\tp.expect(token.LeftParen)\n\t\tp.next()\n\t\tif p.s.Token != token.RightParen {\n\t\t\tf.ResultNames, f.Type.Results = p.parseParamTuple()\n\t\t}\n\t\tp.expect(token.RightParen)\n\t\tp.next()\n\t} else {\n\t\ttyp := p.maybeParseType()\n\t\tif typ != nil {\n\t\t\tf.ResultNames = []string{\"\"}\n\t\t\tf.Type.Results = &tipe.Tuple{Elems: []tipe.Type{typ}}\n\t\t}\n\t}\n\treturn f\n}", "func expFunctions(baseDir string) map[string]function.Function {\n\treturn map[string]function.Function{\n\t\t\"abs\": stdlib.AbsoluteFunc,\n\t\t\"abspath\": funcs.AbsPathFunc,\n\t\t\"basename\": funcs.BasenameFunc,\n\t\t\"base64decode\": funcs.Base64DecodeFunc,\n\t\t\"base64encode\": funcs.Base64EncodeFunc,\n\t\t\"base64gzip\": funcs.Base64GzipFunc,\n\t\t\"base64sha256\": funcs.Base64Sha256Func,\n\t\t\"base64sha512\": funcs.Base64Sha512Func,\n\t\t\"bcrypt\": funcs.BcryptFunc,\n\t\t\"can\": tryfunc.CanFunc,\n\t\t\"ceil\": stdlib.CeilFunc,\n\t\t\"chomp\": stdlib.ChompFunc,\n\t\t\"cidrhost\": funcs.CidrHostFunc,\n\t\t\"cidrnetmask\": funcs.CidrNetmaskFunc,\n\t\t\"cidrsubnet\": funcs.CidrSubnetFunc,\n\t\t\"cidrsubnets\": funcs.CidrSubnetsFunc,\n\t\t\"coalesce\": funcs.CoalesceFunc,\n\t\t\"coalescelist\": stdlib.CoalesceListFunc,\n\t\t\"compact\": stdlib.CompactFunc,\n\t\t\"concat\": stdlib.ConcatFunc,\n\t\t\"contains\": stdlib.ContainsFunc,\n\t\t\"csvdecode\": stdlib.CSVDecodeFunc,\n\t\t\"dirname\": funcs.DirnameFunc,\n\t\t\"distinct\": stdlib.DistinctFunc,\n\t\t\"element\": stdlib.ElementFunc,\n\t\t\"chunklist\": stdlib.ChunklistFunc,\n\t\t\"file\": funcs.MakeFileFunc(baseDir, false),\n\t\t\"fileexists\": funcs.MakeFileExistsFunc(baseDir),\n\t\t\"fileset\": funcs.MakeFileSetFunc(baseDir),\n\t\t\"filebase64\": funcs.MakeFileFunc(baseDir, true),\n\t\t\"filebase64sha256\": funcs.MakeFileBase64Sha256Func(baseDir),\n\t\t\"filebase64sha512\": funcs.MakeFileBase64Sha512Func(baseDir),\n\t\t\"filemd5\": funcs.MakeFileMd5Func(baseDir),\n\t\t\"filesha1\": funcs.MakeFileSha1Func(baseDir),\n\t\t\"filesha256\": funcs.MakeFileSha256Func(baseDir),\n\t\t\"filesha512\": funcs.MakeFileSha512Func(baseDir),\n\t\t\"flatten\": stdlib.FlattenFunc,\n\t\t\"floor\": stdlib.FloorFunc,\n\t\t\"format\": stdlib.FormatFunc,\n\t\t\"formatdate\": stdlib.FormatDateFunc,\n\t\t\"formatlist\": stdlib.FormatListFunc,\n\t\t\"indent\": stdlib.IndentFunc,\n\t\t\"index\": funcs.IndexFunc, // stdlib.IndexFunc is not compatible\n\t\t\"join\": stdlib.JoinFunc,\n\t\t\"jsondecode\": stdlib.JSONDecodeFunc,\n\t\t\"jsonencode\": stdlib.JSONEncodeFunc,\n\t\t\"keys\": stdlib.KeysFunc,\n\t\t\"length\": funcs.LengthFunc,\n\t\t\"list\": funcs.ListFunc,\n\t\t\"log\": stdlib.LogFunc,\n\t\t\"lookup\": funcs.LookupFunc,\n\t\t\"lower\": stdlib.LowerFunc,\n\t\t\"map\": funcs.MapFunc,\n\t\t\"matchkeys\": funcs.MatchkeysFunc,\n\t\t\"max\": stdlib.MaxFunc,\n\t\t\"md5\": funcs.Md5Func,\n\t\t\"merge\": stdlib.MergeFunc,\n\t\t\"min\": stdlib.MinFunc,\n\t\t\"parseint\": stdlib.ParseIntFunc,\n\t\t\"pathexpand\": funcs.PathExpandFunc,\n\t\t\"pow\": stdlib.PowFunc,\n\t\t\"range\": stdlib.RangeFunc,\n\t\t\"regex\": stdlib.RegexFunc,\n\t\t\"regexall\": stdlib.RegexAllFunc,\n\t\t\"replace\": funcs.ReplaceFunc,\n\t\t\"reverse\": stdlib.ReverseListFunc,\n\t\t\"rsadecrypt\": funcs.RsaDecryptFunc,\n\t\t\"setintersection\": stdlib.SetIntersectionFunc,\n\t\t\"setproduct\": stdlib.SetProductFunc,\n\t\t\"setsubtract\": stdlib.SetSubtractFunc,\n\t\t\"setunion\": stdlib.SetUnionFunc,\n\t\t\"sha1\": funcs.Sha1Func,\n\t\t\"sha256\": funcs.Sha256Func,\n\t\t\"sha512\": funcs.Sha512Func,\n\t\t\"signum\": stdlib.SignumFunc,\n\t\t\"slice\": stdlib.SliceFunc,\n\t\t\"sort\": stdlib.SortFunc,\n\t\t\"split\": stdlib.SplitFunc,\n\t\t\"strrev\": stdlib.ReverseFunc,\n\t\t\"substr\": stdlib.SubstrFunc,\n\t\t\"timestamp\": funcs.TimestampFunc,\n\t\t\"timeadd\": stdlib.TimeAddFunc,\n\t\t\"title\": stdlib.TitleFunc,\n\t\t\"tostring\": funcs.MakeToFunc(cty.String),\n\t\t\"tonumber\": funcs.MakeToFunc(cty.Number),\n\t\t\"tobool\": funcs.MakeToFunc(cty.Bool),\n\t\t\"toset\": funcs.MakeToFunc(cty.Set(cty.DynamicPseudoType)),\n\t\t\"tolist\": funcs.MakeToFunc(cty.List(cty.DynamicPseudoType)),\n\t\t\"tomap\": funcs.MakeToFunc(cty.Map(cty.DynamicPseudoType)),\n\t\t\"transpose\": funcs.TransposeFunc,\n\t\t\"trim\": stdlib.TrimFunc,\n\t\t\"trimprefix\": stdlib.TrimPrefixFunc,\n\t\t\"trimspace\": stdlib.TrimSpaceFunc,\n\t\t\"trimsuffix\": stdlib.TrimSuffixFunc,\n\t\t\"try\": tryfunc.TryFunc,\n\t\t\"upper\": stdlib.UpperFunc,\n\t\t\"urlencode\": funcs.URLEncodeFunc,\n\t\t\"uuid\": funcs.UUIDFunc,\n\t\t\"uuidv5\": funcs.UUIDV5Func,\n\t\t\"values\": stdlib.ValuesFunc,\n\t\t\"yamldecode\": yaml.YAMLDecodeFunc,\n\t\t\"yamlencode\": yaml.YAMLEncodeFunc,\n\t\t\"zipmap\": stdlib.ZipmapFunc,\n\t}\n\n}", "func parseFuncPath(path string) (pkgPath, fnName string) {\n\tif len(path) < 1 {\n\t\treturn \"\", \"\"\n\t}\n\tswitch path[0] {\n\tcase '(':\n\t\tregex := regexp.MustCompile(`\\((?P<pkg>[^)]+)\\).(?P<fn>.+)`)\n\t\tsubmatches := regex.FindStringSubmatch(path)\n\t\tif len(submatches) >= 3 {\n\t\t\treturn submatches[1], submatches[2]\n\t\t}\n\tcase '\"':\n\t\tregex := regexp.MustCompile(`\"(?P<pkg>[^)]+)\".(?P<fn>.+)`)\n\t\tsubmatches := regex.FindStringSubmatch(path)\n\t\tif len(submatches) >= 3 {\n\t\t\treturn submatches[1], submatches[2]\n\t\t}\n\tdefault:\n\t\tparts := strings.Split(path, \".\")\n\t\tif len(parts) >= 2 {\n\t\t\treturn parts[0], parts[1]\n\t\t}\n\t}\n\treturn \"\", path\n}", "func Parse(f *os.File) (Source, error) {\n\tvar s scanner.Scanner\n\tvar src Source\n\ts.Init(f)\n\tfor tok := s.Scan(); tok != scanner.EOF; tok = s.Scan() {\n\t\tswitch s.TokenText() {\n\t\tcase \"!\": // macros have completely unpredictable structure, so we need\n\t\t\t// to zip past them for sanity.\n\t\t\tcollapseMacro(&s)\n\t\tcase \"#\": // attribute\n\t\t\tattName := \"#\"\n\t\t\tfor {\n\t\t\t\tc := s.Next()\n\t\t\t\tattName += string(c)\n\t\t\t\tif c == ']' {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t\tswitch attName {\n\t\t\tcase \"#[cfg(test)]\":\n\t\t\t\tsrc.TestBlock = s.Pos().Line\n\t\t\tcase \"#[test]\":\n\t\t\t\tt := capTest(&s)\n\t\t\t\tsrc.Tests = append(src.Tests, t)\n\t\t\tdefault:\n\t\t\t\tcontinue\n\t\t\t}\n\t\t// Detect trait and impl first because they can encapsulate other blocks\n\t\tcase \"trait\":\n\t\t\tsrc.Traits = append(src.Traits, capTrait(&s))\n\t\tcase \"impl\":\n\t\t\tcapImpl(&src, &s)\n\t\tcase \"enum\":\n\t\t\tsrc.Enums = append(src.Enums, capEnum(&s))\n\t\tcase \"struct\":\n\t\t\tsrc.RsStructs = append(src.RsStructs, capStruct(&s))\n\t\tcase \"fn\":\n\t\t\tfn, ubs := capFn(&s)\n\t\t\tsrc.Funcs = append(src.Funcs, fn)\n\t\t\tif len(ubs) > 0 {\n\t\t\t\tsrc.UB = append(src.UB, ubs...)\n\t\t\t}\n\t\tcase \"unsafe\":\n\t\t\tsrc.UB = append(src.UB, capUB(&s))\n\t\tdefault:\n\t\t\tcontinue\n\t\t}\n\t}\n\treturn src, nil\n}", "func parse(src string) (interface{}, error) {\n\ttokens := tokenize(src)\n\tast, remainder, err := readFromTokens(tokens)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif len(remainder) > 0 {\n\t\treturn nil, errors.New(\"unexpected trailing tokens\")\n\t}\n\treturn ast, nil\n}", "func funcFromFunc() {\n\taddExp := mathExpression()\n\tprintln(addExp(10.0, 20.0))\n}", "func (p *parser) lfunction() Node {\n\tident := p.expect(TokenIdent)\n\tp.expect(TokenLParen)\n\targs := p.lparameters()\n\tp.expect(TokenRParen)\n\n\tn := newFunc(ident.pos, ident.val, args)\n\treturn n\n}", "func parse2VarFun(vf *VarFunElem) (err error) {\n\t// Parsujemy sciezke do zmiennej/funkcji\n\tfor vf != nil {\n\t\tswitch pe := vf.name.(type) {\n\t\tcase nil:\n\t\t\t// Samowywolanie - nic nie robimy.\n\t\tcase reflect.Value:\n\t\t\tvk := pe.Kind()\n\t\t\tif vk == reflect.String || vk == reflect.Int ||\n\t\t\t\tvk == reflect.Int8 || vk == reflect.Int16 ||\n\t\t\t\tvk == reflect.Int32 || vk == reflect.Int64 ||\n\t\t\t\tvk == reflect.Float32 || vk == reflect.Float64 {\n\t\t\t\t// Nazwa, indeks liczbowy - nic nie robimy.\n\t\t\t} else {\n\t\t\t\tpanic(fmt.Sprintf(\"tmpl:parse2, line %d: Unknown type (%s) \"+\n\t\t\t\t\t\"of index in var/fun path! \", vf.ln, vk))\n\t\t\t}\n\n\t\tcase []Element:\n\t\t\t// Indeks tekstowy po sparsowaniu.\n\t\t\tvf.name, err = parse2(&pe, MAIN_BLK)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\n\t\tcase *VarFunElem:\n\t\t\t// Indeks bedacy wynikiem funkcji\n\t\t\terr = parse2VarFun(pe)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tvf.name = pe\n\n\t\tdefault:\n\t\t\tpanic(fmt.Sprintf(\n\t\t\t\t\"tmpl:parse2, line %d: Unknown type (%T) in var/fun path!\",\n\t\t\t\tvf.ln, pe))\n\t\t}\n\n\t\t// Parsujemy argumenty funkcji\n\t\tfor ii := range vf.args {\n\t\t\terr = parse2Param(&vf.args[ii], vf.ln)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tvf = vf.next\n\t}\n\treturn\n}", "func (p *Parser) parseFunctionDefinition() asti.ExpressionI {\n\tp.nextToken()\n\tlit := &ast.FunctionDefineLiteral{Token: p.curToken}\n\tif !p.expectPeek(tokentype.LPAREN) {\n\t\treturn nil\n\t}\n\tlit.Defaults, lit.Parameters = p.parseFunctionParameters()\n\tif !p.expectPeek(tokentype.LBRACE) {\n\t\treturn nil\n\t}\n\tlit.Body = p.parseBlockStatement()\n\treturn lit\n}", "func Parse(r io.Reader) (*List, error) {\n\teventList := List{}\n\traw, err := ioutil.ReadAll(r)\n\tif err != nil {\n\t\treturn &eventList, err\n\t}\n\terr = json.Unmarshal(raw, &eventList)\n\tif err != nil {\n\t\treturn &eventList, err\n\t}\n\treturn &eventList, nil\n}", "func (resource *ResourceType) Functions() []Function {\n\tfunctions := maps.Values(resource.functions)\n\n\tsort.Slice(functions, func(i int, j int) bool {\n\t\treturn functions[i].Name() < functions[j].Name()\n\t})\n\n\treturn functions\n}", "func IsFunction(s string) bool {\n\ts = strings.Replace(s, \"(*)\", \"\", -1)\n\treturn strings.Contains(s, \"(\")\n}", "func parseFields(s string) (fs []Widget) {\n\tfor _, p := range pairs(s) {\n\t\tfs = append(fs, Widget{Field: Field{Label: p[0], Content: p[1]}})\n\t}\n\treturn\n}", "func NewCmdFunctions(out io.Writer, config *serverConfig) *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"functions\",\n\t\tShort: i18n.T(\"Run Dispatch Functions Manager\"),\n\t\tArgs: cobra.NoArgs,\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\trunFunctions(config)\n\t\t},\n\t}\n\tcmd.SetOutput(out)\n\treturn cmd\n}", "func parse(input []byte, vars []Variable) (node Node, err error) {\n\t// Tokenize\n\ttokens, err := scanInput(input)\n\tif err != nil {\n\t\treturn\n\t}\n\n\t// Convert to AST\n\tnodes, endPos, err := parseExpression(tokens, vars, 0)\n\n\tif err != nil {\n\t\treturn\n\t}\n\n\tif len(nodes) > 1 {\n\t\terr = fmt.Errorf(\"couldn't flatten down to one node:\\n%+v\", nodes)\n\t\treturn\n\t}\n\n\tif endPos < len(tokens)-1 {\n\t\terr = fmt.Errorf(\"Parsing tokens ended at %d, but expected %d\", endPos, len(tokens)-1)\n\t\treturn\n\t}\n\n\tnode = nodes[0]\n\n\treturn\n}", "func Parse(r io.Reader) (*ClassFile, error) {\n\tc := &ClassFile{}\n\n\tvar err error\n\n\tfor _, f := range initFuncs {\n\t\terr = f(c, r)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn c, nil\n}", "func NewFunction(config *truce.HTTP, function truce.Function) (*Function, error) {\n\tif function.Transports.HTTP == nil {\n\t\treturn nil, nil\n\t}\n\n\ttransport := *function.Transports.HTTP\n\n\tb := &Function{\n\t\tDefinition: function,\n\t\tQuery: map[string]QueryParam{},\n\t}\n\n\ttype argument struct {\n\t\tvariable string\n\t\tposVariable string\n\t\ttyp string\n\t}\n\n\tvar (\n\t\tpathMappings = map[string]string{}\n\t\targs = map[string]argument{}\n\t)\n\n\tfor i, field := range function.Arguments {\n\t\targs[field.Name] = argument{\n\t\t\ttyp: string(field.Type),\n\t\t\tposVariable: fmt.Sprintf(\"v%d\", i),\n\t\t\tvariable: field.Name,\n\t\t}\n\t}\n\n\tif function.Return.Present && function.Return.Name != \"\" {\n\t\tb.HasReturn = true\n\t\tb.ReturnType = string(function.Return.Type)\n\n\t\tif len(b.ReturnType) < 1 {\n\t\t\treturn nil, errors.New(\"return type cannot be empty\")\n\t\t}\n\n\t\tif b.ReturnType[0] == '*' {\n\t\t\tb.ReturnType = b.ReturnType[1:]\n\t\t\tb.ReturnIsPtr = true\n\t\t}\n\t}\n\n\tb.Method = transport.Method\n\n\t// Sort the arguments by name for consistent positional ordering.\n\tvar argVals []truce.ArgumentValue\n\tfor _, arg := range transport.Arguments {\n\t\targVals = append(argVals, arg)\n\t}\n\tsort.Slice(argVals, func(i, j int) bool {\n\t\treturn argVals[i].Name < argVals[j].Name\n\t})\n\n\tvar qpos int\n\tfor _, arg := range argVals {\n\t\ta, ok := args[arg.Name]\n\n\t\tswitch arg.From {\n\t\tcase \"body\":\n\t\t\tif !ok {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tb.BodyVar = a.posVariable\n\t\t\tb.BodyType = a.typ\n\t\tcase \"path\":\n\t\t\tif !ok {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tpathMappings[arg.Var] = args[arg.Name].variable\n\t\tcase \"query\":\n\t\t\tif !ok {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tb.Query[arg.Var] = QueryParam{\n\t\t\t\tPos: qpos,\n\t\t\t\tQueryVar: arg.Var,\n\t\t\t\tGoVar: a.posVariable,\n\t\t\t\tType: a.typ,\n\t\t\t}\n\n\t\t\tqpos++\n\t\tcase \"static\":\n\t\t\t// TODO(georgemac)\n\t\t}\n\t}\n\n\tfor _, part := range strings.Split(config.Prefix, \"/\") {\n\t\tif part == \"\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tb.Path = append(b.Path, Element{Type: \"static\", Value: part})\n\t}\n\n\tb.Path = append(b.Path, parsePath(pathMappings, transport.Path)...)\n\n\treturn b, nil\n}", "func parse(body io.ReadCloser) ([]Result, error) {\n\tdefer body.Close()\n\n\tdoc, err := goquery.NewDocumentFromReader(body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresults := []Result{}\n\tfor _, parserMethod := range defaultParserFunctions {\n\t\tresults = parserMethod(doc)\n\t\tif len(results) != 0 {\n\t\t\treturn results, nil\n\t\t}\n\t}\n\n\treturn results, nil\n}", "func parser(\n\tin <-chan string,\n\terrors chan<- error,\n\tquit chan struct{},\n\tdefinitionFile string,\n) chan Parsed {\n\n\tdef, err := definition.NewDefinition(definitionFile)\n\tif err != nil {\n\t\tlog.Fatal(\"failed to read definition: %s\", err)\n\t}\n\n\tout := make(chan Parsed)\n\n\tfor i := 0; i < NumParserWorkers; i++ {\n\t\tgo func() {\n\t\t\tfor {\n\t\t\t\tselect {\n\t\t\t\tcase <-quit:\n\t\t\t\t\treturn\n\t\t\t\tcase body := <-in:\n\t\t\t\t\tp := Parsed{\n\t\t\t\t\t\tFields: def.Parse(body),\n\t\t\t\t\t\tSize: binary.Size([]byte(body)),\n\t\t\t\t\t}\n\t\t\t\t\tout <- p\n\t\t\t\t}\n\t\t\t}\n\t\t}()\n\t}\n\treturn out\n}", "func generateSplitParseFunc(headers []string, fieldDelimiter rune) parseFunc {\n\treturn func(value interface{}) (interface{}, error) {\n\t\tcsvLine, err := valueAsString(value)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// This parse function does not do any special quote handling; Splitting on the delimiter is sufficient.\n\t\tfields := strings.Split(csvLine, string(fieldDelimiter))\n\t\treturn headersMap(headers, fields)\n\t}\n}", "func (app *APP) Parse() error {\n\tdataHandlers := map[string]dataHandler{\n\t\tOrganisationsKey: app.LoadOrganisationsFromJSON,\n\t\tUsersKey: app.LoadUsersFromJSON,\n\t\tTicketsKey: app.LoadTicketsFromJSON,\n\t}\n\n\tfor dataType, dataHandler := range dataHandlers {\n\t\terr := dataHandler(app.jsonContents[dataType])\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to read %s data file with error: %s\", dataType, err.Error())\n\t\t}\n\t}\n\treturn nil\n}", "func main() {\n\tparse(read_file(\"test.txt\"))\n\n}", "func (p *Parser) parseFunctionParameters() (map[string]asti.ExpressionI, []*ast.Identifier) {\n\n\t// Any default parameters.\n\tm := make(map[string]asti.ExpressionI)\n\n\t// The argument-definitions.\n\tidentifiers := make([]*ast.Identifier, 0)\n\n\t// Is the next parameter \")\" ? If so we're done. No args.\n\tif p.peekTokenIs(tokentype.RPAREN) {\n\t\tp.nextToken()\n\t\treturn m, identifiers\n\t}\n\tp.nextToken()\n\n\t// Keep going until we find a \")\"\n\tfor !p.curTokenIs(tokentype.RPAREN) {\n\n\t\tif p.curTokenIs(tokentype.EOF) {\n\t\t\tp.AddError(\"unterminated function parameters\")\n\t\t\treturn nil, nil\n\t\t}\n\n\t\t// Get the identifier.\n\t\tident := &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}\n\t\tidentifiers = append(identifiers, ident)\n\t\tp.nextToken()\n\n\t\t// If there is \"=xx\" after the name then that's\n\t\t// the default parameter.\n\t\tif p.curTokenIs(tokentype.ASSIGN) {\n\t\t\tp.nextToken()\n\t\t\t// Save the default value.\n\t\t\tm[ident.Value] = p.parseExpressionStatement().Expression\n\t\t\tp.nextToken()\n\t\t}\n\n\t\t// Skip any comma.\n\t\tif p.curTokenIs(tokentype.COMMA) {\n\t\t\tp.nextToken()\n\t\t}\n\t}\n\n\treturn m, identifiers\n}", "func NewFuncParser(p ParserFunc, name string) *FuncParser {\n\treturn &FuncParser{\n\t\tparser: p,\n\t\tname: name,\n\t}\n}", "func flowFunctionRequestHandle(w http.ResponseWriter, function string) {\n\tw.Header().Set(\"Content-Type\", jsonType)\n\tfunctions, err := listFunction()\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"failed to handle request, error: %v\", err), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tfor _, functionObj := range functions {\n\t\tif functionObj.Name == function {\n\t\t\tdog, derr := getDag(function)\n\t\t\tif derr != nil {\n\t\t\t\thttp.Error(w, fmt.Sprintf(\"failed to handle request, %v\", derr), http.StatusInternalServerError)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tfunctionObj.Dag = dog\n\t\t\tdata, _ := json.Marshal(functionObj)\n\t\t\tw.Write(data)\n\t\t\treturn\n\t\t}\n\t}\n\thttp.Error(w, fmt.Sprintf(\"failed to handle request, function not found\"), http.StatusInternalServerError)\n}", "func SolveFunction(v Token, vars map[string]interface{}, stack Stack) Stack {\n\tvar value interface{}\n\tfunTokens := v.Value\n\tif funTokens.Length() > 1 && v.Lexeme != \"PV\" {\n\t\ttoks := ToPostfix(funTokens)\n\t\tif toks.Length() > 0 {\n\t\t\tvalue = SolvePostfix(toks, vars)\n\t\t}\n\t}\n\tif v.Lexeme == \"LENGTH\" {\n\t\tstack.Push(Token{Number, strconv.Itoa(len(value.(string))), Stack{}})\n\t} else if v.Lexeme == \"ISBLANK\" || v.Lexeme == \"ISNULL\" {\n\t\tval := false\n\t\tif len(strings.TrimSpace(value.(string))) == 0 {\n\t\t\tval = true\n\t\t}\n\t\tstack.Push(Token{Boolean, strconv.FormatBool(val), Stack{}})\n\t} else if v.Lexeme == \"MONTH\" {\n\t\tparsefloat, ok := strconv.ParseFloat(value.(string), 64)\n\t\tif ok != nil {\n\t\t\tfmt.Println(\"Error:\", ok)\n\t\t}\n\t\tdays := int(parsefloat)\n\t\tmonths := days * 12 / 365\n\t\tstack.Push(Token{Number, strconv.Itoa(months), Stack{}})\n\t} else if v.Lexeme == \"DAY\" {\n\t\tparsefloat, ok := strconv.ParseFloat(value.(string), 64)\n\t\tif ok != nil {\n\t\t\tfmt.Println(\"Error:\", ok)\n\t\t}\n\t\tdays := int(parsefloat)\n\t\tbasedate, _ := time.Parse(\"01/02/2006\", \"01/01/1900\")\n\t\tdate := basedate.AddDate(0, 0, days)\n\t\tstack.Push(Token{Number, strconv.Itoa(date.Day()), Stack{}})\n\t} else if v.Lexeme == \"NOT\" {\n\t\tif value == \"true\" {\n\t\t\tstack.Push(Token{Boolean, strconv.FormatBool(false), Stack{}})\n\t\t} else {\n\t\t\tstack.Push(Token{Boolean, strconv.FormatBool(true), Stack{}})\n\t\t}\n\t} else if v.Lexeme == \"ROUND\" {\n\t\tvar number, precision float64\n\t\tvar ok error\n\t\tcount := 0\n\t\tstk := Stack{}\n\t\ttoks := ToPostfix(v.Value)\n\t\tfor _, item := range toks.Values {\n\t\t\tif item.Lexeme != \",\" {\n\t\t\t\tstk.Push(item)\n\t\t\t} else {\n\t\t\t\tif count == 0 {\n\t\t\t\t\tval := SolvePostfix(stk, vars)\n\t\t\t\t\tnumber, ok = strconv.ParseFloat(val.(string), 64)\n\t\t\t\t\tif ok != nil {\n\t\t\t\t\t\tfmt.Println(\"Error:\", ok)\n\t\t\t\t\t}\n\t\t\t\t} else if count == 1 {\n\t\t\t\t\tval := SolvePostfix(stk, vars)\n\t\t\t\t\tprecision, ok = strconv.ParseFloat(val.(string), 64)\n\t\t\t\t\tif ok != nil {\n\t\t\t\t\t\tfmt.Println(\"Error:\", ok)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tcount++\n\t\t\t\tstk = Stack{}\n\t\t\t}\n\t\t}\n\t\tif count == 0 {\n\t\t\tval := SolvePostfix(stk, vars)\n\t\t\tnumber, ok = strconv.ParseFloat(val.(string), 64)\n\t\t\tif ok != nil {\n\t\t\t\tfmt.Println(\"Error:\", ok)\n\t\t\t}\n\t\t} else if count == 1 {\n\t\t\tval := SolvePostfix(stk, vars)\n\t\t\tprecision, ok = strconv.ParseFloat(val.(string), 64)\n\t\t\tif ok != nil {\n\t\t\t\tfmt.Println(\"Error:\", ok)\n\t\t\t}\n\t\t}\n\t\toutput := math.Pow(10, float64(precision))\n\t\tresult := number*output + math.Copysign(0.5, number*output)\n\t\tresult = float64(int(result)) / output\n\t\tstr := strconv.FormatFloat(result, 'f', int(precision), 64)\n\t\tstack.Push(Token{Number, str, Stack{}})\n\t}\n\treturn stack\n}", "func (i *Input) parseFuncAnnotation(s string, f *ast.FuncDecl) Annotation {\n\ts = strings.TrimLeft(s, commentPrefix)\n\tvar annotation Annotation\n\tif strings.HasPrefix(s, delegatePrefix) {\n\t\tprefix := fmt.Sprintf(\"//%s:\", delegatePrefix)\n\t\ts = strings.TrimLeft(s, prefix)\n\t\ts = strings.TrimSpace(s)\n\t\tsubmatches := delegateExpr.FindAllStringSubmatch(s, -1)\n\t\tmatches := submatches[0]\n\t\tannotation = &DelegateAnnotation{\n\t\t\tAssemblyName: matches[1],\n\t\t\tTypeName: matches[2],\n\t\t\tMethodName: matches[3],\n\t\t\tFuncDecl: f,\n\t\t\tInput: i,\n\t\t}\n\t}\n\treturn annotation\n}", "func loadConfig(funcs []func() error) error {\n\tvar err error\n\n\tfor _, f := range funcs {\n\t\terr = f()\n\t\tif err != nil {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn err\n}", "func (s *BashScript) FunctionNames() ([]string, error) {\n\tcallArgs := []string{\"-c\", fmt.Sprintf(\"set -e; source %s; declare -F\", s.FullPath())}\n\n\tio, buf := io.BufferedCombined()\n\n\terr := NewBash().Run(io, callArgs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfunctions := []string{}\n\n\tout := buf.String()\n\tfor _, fun := range strings.Split(string(out), \"\\n\") {\n\t\tif fun != \"\" {\n\t\t\tname := strings.Replace(fun, \"declare -f \", \"\", -1)\n\t\t\tfunctions = append(functions, name)\n\t\t}\n\t}\n\n\treturn functions, nil\n}", "func (info *fileInfo) addFuncPtrDecls() {\n\tgen := &ast.GenDecl{\n\t\tTokPos: info.importCPos,\n\t\tTok: token.VAR,\n\t\tLparen: info.importCPos,\n\t\tRparen: info.importCPos,\n\t}\n\tnames := make([]string, 0, len(info.functions))\n\tfor name := range info.functions {\n\t\tnames = append(names, name)\n\t}\n\tsort.Strings(names)\n\tfor _, name := range names {\n\t\tobj := &ast.Object{\n\t\t\tKind: ast.Typ,\n\t\t\tName: \"C.\" + name + \"$funcaddr\",\n\t\t}\n\t\tvalueSpec := &ast.ValueSpec{\n\t\t\tNames: []*ast.Ident{&ast.Ident{\n\t\t\t\tNamePos: info.importCPos,\n\t\t\t\tName: \"C.\" + name + \"$funcaddr\",\n\t\t\t\tObj: obj,\n\t\t\t}},\n\t\t\tType: &ast.SelectorExpr{\n\t\t\t\tX: &ast.Ident{\n\t\t\t\t\tNamePos: info.importCPos,\n\t\t\t\t\tName: \"unsafe\",\n\t\t\t\t},\n\t\t\t\tSel: &ast.Ident{\n\t\t\t\t\tNamePos: info.importCPos,\n\t\t\t\t\tName: \"Pointer\",\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\t\tobj.Decl = valueSpec\n\t\tgen.Specs = append(gen.Specs, valueSpec)\n\t}\n\tinfo.Decls = append(info.Decls, gen)\n}" ]
[ "0.647542", "0.6471392", "0.64463437", "0.64396536", "0.641464", "0.6380902", "0.62952745", "0.6153932", "0.60824454", "0.60764724", "0.59445286", "0.5792225", "0.56793004", "0.5501519", "0.54934597", "0.54603106", "0.5328648", "0.5324013", "0.5319592", "0.5314563", "0.5269679", "0.5218724", "0.5190703", "0.51878303", "0.5171203", "0.51663536", "0.5151349", "0.51302946", "0.51126707", "0.50958186", "0.5075898", "0.5056616", "0.5039655", "0.50194895", "0.5007888", "0.4998416", "0.49726093", "0.49681327", "0.49609277", "0.4905363", "0.49029723", "0.4894804", "0.48929042", "0.4891225", "0.48800123", "0.4855853", "0.4837431", "0.4817131", "0.48000243", "0.4776535", "0.4774336", "0.47717834", "0.477153", "0.47666368", "0.47610116", "0.4730966", "0.47278446", "0.4726941", "0.47178942", "0.47099587", "0.47075924", "0.46870387", "0.46788323", "0.46530136", "0.46492243", "0.4647842", "0.46277544", "0.46269107", "0.46201837", "0.4614352", "0.46078408", "0.4575132", "0.45633292", "0.45395803", "0.45312205", "0.45193192", "0.4516688", "0.45011547", "0.4498642", "0.4497219", "0.44900364", "0.44888803", "0.44881198", "0.44603142", "0.44519588", "0.4442267", "0.44400075", "0.44389814", "0.44305283", "0.44263437", "0.44183847", "0.44159794", "0.4404007", "0.44022045", "0.4399676", "0.43858916", "0.43767378", "0.43762612", "0.43730217", "0.4371922" ]
0.72543186
0
parseDependencies ... The term "dependency" is used here to refer to any data type that may require an include or forward declare.
func (i *Interface) parseDependencies() { var dependencies []string for _, function := range i.Functions { // "expanded" refers to creating a parsers.from a templated type, i.e "QMap <int, QString>" becomes [QMap int QString] expandedReturnType := strings.FieldsFunc(function.ReturnType, templatedTypeSeparators) for _, dataType := range(expandedReturnType) { dependencies = append(dependencies, strings.TrimSpace(dataType)) } for _, parameter := range function.Parameters { expandedParameter := strings.FieldsFunc(parameter.Type, templatedTypeSeparators) for _, innerParameter := range expandedParameter { dependencies = append(dependencies, strings.TrimSpace(innerParameter)) } } } i.Dependencies = dependencies i.Dependencies = parsers.RemoveConstSpecifiers(i.Dependencies) i.Dependencies = parsers.RemovePointersAndReferences(i.Dependencies) i.Dependencies = parsers.RemoveStdDataTypes(i.Dependencies) i.Dependencies = parsers.MapDataTypesToLibraryDependencies(i.Dependencies) i.Dependencies = parsers.RemoveDuplicates(i.Dependencies) sort.Strings(i.Dependencies) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func calculateDependencies(definition string) (definitions dependencies, err error) {\n half := make(dependencies, 0)\n marked := make(dependencies, 0)\n\n err = visitDefinition(definition, &half, &marked)\n\n if nil == err {\n definitions = marked\n }\n\n return\n}", "func convertDependencies(deps []string) []*license_metadata_proto.AnnotatedDependency {\n\tvar ret []*license_metadata_proto.AnnotatedDependency\n\n\tfor _, d := range deps {\n\t\tcomponents := strings.Split(d, \":\")\n\t\tdep := components[0]\n\t\tcomponents = components[1:]\n\t\tad := &license_metadata_proto.AnnotatedDependency{\n\t\t\tFile: proto.String(dep),\n\t\t\tAnnotations: make([]string, 0, len(components)),\n\t\t}\n\t\tfor _, ann := range components {\n\t\t\tif len(ann) == 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tad.Annotations = append(ad.Annotations, ann)\n\t\t}\n\t\tret = append(ret, ad)\n\t}\n\n\treturn ret\n}", "func (s DhcpServer) Dependencies() (deps []depgraph.Dependency) {\n\treturn []depgraph.Dependency{\n\t\t{\n\t\t\tRequiredItem: depgraph.ItemRef{\n\t\t\t\tItemType: NetNamespaceTypename,\n\t\t\t\tItemName: normNetNsName(s.NetNamespace),\n\t\t\t},\n\t\t\tDescription: \"Network namespace must exist\",\n\t\t},\n\t\t{\n\t\t\tRequiredItem: depgraph.ItemRef{\n\t\t\t\tItemType: VethTypename,\n\t\t\t\tItemName: s.VethName,\n\t\t\t},\n\t\t\tDescription: \"veth interface must exist\",\n\t\t},\n\t}\n}", "func (p InfraConfigValidateJMX) Dependencies() []string {\n\treturn []string{\n\t\t\"Infra/Config/IntegrationsMatch\",\n\t\t\"Java/Env/Version\",\n\t}\n}", "func (i *Interface) parseForwardDeclares() {\n\tfor _, dependency := range i.Dependencies {\n\t\tif !parsers.ShouldBeIncludedInHeader(dependency) {\n\t\t\ti.ForwardDeclaresString += \"class \" + dependency + \";\\n\"\n\t\t} \n\t}\n}", "func Parse(description string) (deps []Dep) {\n\tfor _, footerValue := range footer.ParseMessage(description)[cqDependKey] {\n\t\tfor _, v := range strings.Split(footerValue, \",\") {\n\t\t\tif dep, err := parseSingleDep(v); err == nil {\n\t\t\t\tdeps = append(deps, dep)\n\t\t\t}\n\t\t}\n\t}\n\tif len(deps) <= 1 {\n\t\treturn deps\n\t}\n\tsort.Slice(deps, func(i, j int) bool { return deps[i].cmp(deps[j]) == 1 })\n\t// Remove duplicates. We don't use the map in the first place, because\n\t// duplicates are highly unlikely in practice and sorting is nice for\n\t// determinism.\n\tl := 0\n\tfor i := 1; i < len(deps); i++ {\n\t\tif d := deps[i]; d.cmp(deps[l]) != 0 {\n\t\t\tl += 1\n\t\t\tdeps[l] = d\n\t\t}\n\t}\n\treturn deps[:l+1]\n}", "func Dependencies(g *Graph) (pacman.Packages, aur.Packages, []string) {\n\trps := make(pacman.Packages, 0)\n\taps := make(aur.Packages, 0)\n\tups := make([]string, 0)\n\n\tnames := make(map[string]bool)\n\tnodes := AllNodesBottomUp(g)\n\tfor _, vn := range nodes {\n\t\tn := vn.(*Node)\n\t\tif names[n.PkgName()] {\n\t\t\tcontinue\n\t\t}\n\n\t\tnames[n.PkgName()] = true\n\t\tswitch p := n.AnyPackage.(type) {\n\t\tcase *aur.Package:\n\t\t\taps = append(aps, p)\n\t\tcase *pacman.Package:\n\t\t\tif p.Origin == pacman.UnknownOrigin {\n\t\t\t\tups = append(ups, p.Name)\n\t\t\t} else {\n\t\t\t\trps = append(rps, p)\n\t\t\t}\n\t\tdefault:\n\t\t\tpanic(\"unexpected type of package in graph\")\n\t\t}\n\t}\n\treturn rps, aps, ups\n}", "func (d *ABFToInterfaceDescriptor) Dependencies(key string, emptyVal proto.Message) []api.Dependency {\n\t_, ifName, _ := vpp_abf.ParseToInterfaceKey(key)\n\treturn []api.Dependency{\n\t\t{\n\t\t\tLabel: interfaceDep,\n\t\t\tKey: vpp_interfaces.InterfaceKey(ifName),\n\t\t},\n\t}\n}", "func (p BaseConfigProxyDetect) Dependencies() []string {\n\t// no dependencies!\n\treturn []string{\n\t\t\"Base/Config/Validate\",\n\t\t\"Base/Env/CollectEnvVars\",\n\t\t\"Base/Env/CollectSysProps\",\n\t}\n}", "func buildDependencies(fdSet *dpb.FileDescriptorSet) {\n\t// Dependency to google/api/annotations.proto for gRPC-HTTP transcoding. Here a couple of problems arise:\n\t// 1. Problem: \tWe cannot call descriptor.ForMessage(&annotations.E_Http), which would be our\n\t//\t\t\t\trequired dependency. However, we can call descriptor.ForMessage(&http) and\n\t//\t\t\t\tthen construct the extension manually.\n\t// 2. Problem: \tThe name is set wrong.\n\t// 3. Problem: \tgoogle/api/annotations.proto has a dependency to google/protobuf/descriptor.proto.\n\thttp := annotations.Http{}\n\tfd, _ := descriptor.MessageDescriptorProto(&http)\n\n\textensionName := \"http\"\n\tn := \"google/api/annotations.proto\"\n\tl := dpb.FieldDescriptorProto_LABEL_OPTIONAL\n\tt := dpb.FieldDescriptorProto_TYPE_MESSAGE\n\ttName := \"google.api.HttpRule\"\n\textendee := \".google.protobuf.MethodOptions\"\n\n\thttpExtension := &dpb.FieldDescriptorProto{\n\t\tName: &extensionName,\n\t\tNumber: &annotations.E_Http.Field,\n\t\tLabel: &l,\n\t\tType: &t,\n\t\tTypeName: &tName,\n\t\tExtendee: &extendee,\n\t}\n\n\tfd.Extension = append(fd.Extension, httpExtension) // 1. Problem\n\tfd.Name = &n // 2. Problem\n\tfd.Dependency = append(fd.Dependency, \"google/protobuf/descriptor.proto\") //3.rd Problem\n\n\t// Build other required dependencies\n\te := empty.Empty{}\n\tfdp := dpb.DescriptorProto{}\n\tfd2, _ := descriptor.MessageDescriptorProto(&e)\n\tfd3, _ := descriptor.MessageDescriptorProto(&fdp)\n\tdependencies := []*dpb.FileDescriptorProto{fd, fd2, fd3}\n\n\t// According to the documentation of protoReflect.CreateFileDescriptorFromSet the file I want to print\n\t// needs to be at the end of the array. All other FileDescriptorProto are dependencies.\n\tfdSet.File = append(dependencies, fdSet.File...)\n}", "func (l *LoadInventory) Dependencies(\n\tc base.Container,\n) (err error) {\n\tl.client, err = c.S3API()\n\tif err != nil {\n\t\treturn err\n\t}\n\tl.db, err = c.DB()\n\tl.inventory = c.InventoryManager()\n\n\treturn err\n}", "func (*serverModule) Dependencies() []module.Dependency {\n\treturn nil\n}", "func ParseDepFile(content []byte) ([]string, []string) {\n\tcontent = bytes.Replace(content, []byte(\"\\\\\\n\"), nil, -1)\n\tcomponents := bytes.Split(content, []byte(\":\"))\n\tif len(components) != 2 {\n\t\treturn nil, nil\n\t}\n\n\ttargetStrs := bytes.Split(components[0], []byte(\" \"))\n\tdepStrs := bytes.Split(components[1], []byte(\" \"))\n\n\tvar targets, deps []string\n\tfor _, t := range targetStrs {\n\t\tif len(t) > 0 {\n\t\t\ttargets = append(targets, string(t))\n\t\t}\n\t}\n\tfor _, d := range depStrs {\n\t\tif len(d) > 0 {\n\t\t\tdeps = append(deps, string(d))\n\t\t}\n\t}\n\n\treturn targets, deps\n}", "func addDependencies(fdSet *dpb.FileDescriptorSet) {\n\t// At last, we need to add the dependencies to the FileDescriptorProto in order to get them rendered.\n\tlastFdProto := getLast(fdSet.File)\n\tfor _, fd := range fdSet.File {\n\t\tif fd != lastFdProto {\n\t\t\tif *fd.Name == \"google/protobuf/empty.proto\" { // Reference: https://github.com/googleapis/gnostic-grpc/issues/8\n\t\t\t\tif shouldRenderEmptyImport {\n\t\t\t\t\tlastFdProto.Dependency = append(lastFdProto.Dependency, *fd.Name)\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tlastFdProto.Dependency = append(lastFdProto.Dependency, *fd.Name)\n\t\t}\n\t}\n\t// Sort imports so they will be rendered in a consistent order.\n\tsort.Strings(lastFdProto.Dependency)\n}", "func visitDefinition(definition string, half, marked *dependencies) (err error) {\n if half.includes(definition) {\n return errCyclicDependency\n } else if !marked.includes(definition) && !half.includes(definition) {\n half.add(definition)\n task := gofer.index(definition)\n\n if nil == task {\n return errUnresolvableDependencies\n }\n\n for _, dependency := range task.Dependencies {\n err = visitDefinition(dependency, half, marked)\n if nil != err {\n return\n }\n }\n\n half.remove(definition)\n marked.add(definition)\n }\n\n return\n}", "func (d *InterfaceVrfDescriptor) Dependencies(key string, emptyVal proto.Message) (deps []kvs.Dependency) {\n\tif _, vrf, ipv4, ipv6, isIfaceVrfKey := interfaces.ParseInterfaceVrfKey(key); isIfaceVrfKey {\n\t\tif vrf > 0 && ipv4 {\n\t\t\tdeps = append(deps, kvs.Dependency{\n\t\t\t\tLabel: vrfV4Dep,\n\t\t\t\tKey: l3.VrfTableKey(uint32(vrf), l3.VrfTable_IPV4),\n\t\t\t})\n\t\t}\n\t\tif vrf > 0 && ipv6 {\n\t\t\tdeps = append(deps, kvs.Dependency{\n\t\t\t\tLabel: vrfV6Dep,\n\t\t\t\tKey: l3.VrfTableKey(uint32(vrf), l3.VrfTable_IPV6),\n\t\t\t})\n\t\t}\n\t\treturn deps\n\t}\n\n\t_, fromIface, _ := interfaces.ParseInterfaceInheritedVrfKey(key)\n\treturn []kvs.Dependency{\n\t\t{\n\t\t\tLabel: inheritedVrfDep,\n\t\t\tAnyOf: kvs.AnyOfDependency{\n\t\t\t\tKeyPrefixes: []string{interfaces.InterfaceVrfKeyPrefix(fromIface)},\n\t\t\t},\n\t\t},\n\t}\n}", "func (h *descriptorHandler) dependencies(key string, value proto.Message) (deps []kvs.Dependency) {\n\tif h.descriptor == nil || h.descriptor.Dependencies == nil {\n\t\treturn\n\t}\n\t// TODO: check that label is unique for each KV pair, throw panic if not (?)\n\tdefer trackDescMethod(h.descriptor.Name, \"Dependencies\")()\n\treturn h.descriptor.Dependencies(key, value)\n}", "func Dependency(ids ...Identifier) Constraint {\n\treturn dependency(ids)\n}", "func (m *multiNode) SetDependency(dep []int32) {\n\tm.dependency = dep\n}", "func (shl *SharedLibrary) Dependencies() []string {\n\treturn shl.dependencies\n}", "func (t BaseContainersDetectDocker) Dependencies() []string {\n\treturn []string{}\n}", "func (t DotNetAgentVersion) Dependencies() []string {\n\treturn []string{\n\t\t\"DotNet/Agent/Installed\",\n\t}\n}", "func DependencySort(ks []HelmRelease) ([]HelmRelease, error) {\n\tn := make(graph)\n\tlookup := map[string]*HelmRelease{}\n\tfor i := 0; i < len(ks); i++ {\n\t\tn[ks[i].Name] = after(ks[i].Spec.DependsOn)\n\t\tlookup[ks[i].Name] = &ks[i]\n\t}\n\tsccs := tarjanSCC(n)\n\tvar sorted []HelmRelease\n\tvar unsortable CircularDependencyError\n\tfor i := 0; i < len(sccs); i++ {\n\t\ts := sccs[i]\n\t\tif len(s) != 1 {\n\t\t\tunsortable = append(unsortable, s)\n\t\t\tcontinue\n\t\t}\n\t\tif k, ok := lookup[s[0]]; ok {\n\t\t\tsorted = append(sorted, *k.DeepCopy())\n\t\t}\n\t}\n\tif unsortable != nil {\n\t\tfor i, j := 0, len(unsortable)-1; i < j; i, j = i+1, j-1 {\n\t\t\tunsortable[i], unsortable[j] = unsortable[j], unsortable[i]\n\t\t}\n\t\treturn nil, unsortable\n\t}\n\treturn sorted, nil\n}", "func importOrder(deps map[string][]string) ([]string, error) {\n\t// add all nodes and edges\n\tvar remainingNodes = map[string]struct{}{}\n\tvar graph = map[edge]struct{}{}\n\tfor to, froms := range deps {\n\t\tremainingNodes[to] = struct{}{}\n\t\tfor _, from := range froms {\n\t\t\tremainingNodes[from] = struct{}{}\n\t\t\tgraph[edge{from: from, to: to}] = struct{}{}\n\t\t}\n\t}\n\n\t// find initial nodes without any dependencies\n\tsorted := findAndRemoveNodesWithoutDependencies(remainingNodes, graph)\n\tfor i := 0; i < len(sorted); i++ {\n\t\tnode := sorted[i]\n\t\tremoveEdgesFrom(node, graph)\n\t\tsorted = append(sorted, findAndRemoveNodesWithoutDependencies(remainingNodes, graph)...)\n\t}\n\tif len(remainingNodes) > 0 {\n\t\treturn nil, fmt.Errorf(\"cycle: remaining nodes: %#v, remaining edges: %#v\", remainingNodes, graph)\n\t}\n\t//for _, n := range sorted {\n\t//\tfmt.Println(\"topological order\", n)\n\t//}\n\treturn sorted, nil\n}", "func NewDependencyInfo(line string) (*DependencyInfo, error) {\n\tsep1 := strings.IndexRune(line, ' ')\n\tif sep1 != 1 {\n\t\treturn nil, errors.New(\"Invalid separator\")\n\t}\n\tsep2 := strings.IndexRune(line[sep1+1:], ' ')\n\tif sep2 < 0 {\n\t\treturn nil, errors.New(\"Invalid separator\")\n\t}\n\tsep2 += sep1 + 1\n\n\tdepi := new(DependencyInfo)\n\tvar err error\n\tdepi.To, err = strconv.Atoi(line[sep1+1 : sep2-1])\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdepi.DepType = rune(line[sep2-1])\n\n\tdepi.Features = getFeatures(line[sep2+1:], '>', 1)\n\tif pasresult, ok := depi.Features[\"格解析結果\"]; ok {\n\t\tdepi.Pas, err = NewPas(pasresult, true)\n\t}\n\n\treturn depi, err\n}", "func (t BrowserAgentDetect) Dependencies() []string {\n\treturn []string{\n\t\t\"Browser/Agent/GetSource\",\n\t}\n}", "func (j *Job) Dependency(name LinkName) (Dependency, error) {\n\tfor _, d := range j.Dependencies {\n\t\tif d.Name == name {\n\t\t\treturn d, nil\n\t\t}\n\t}\n\treturn Dependency{}, maskAny(errgo.WithCausef(nil, DependencyNotFoundError, name.String()))\n}", "func (td typeDefiner) getLocalDeps(ptype parse.Type) (deps []*typeDefBuilder) {\n\tswitch pt := ptype.(type) {\n\tcase *parse.TypeNamed:\n\t\t// Named references to other types in this package are all we care about.\n\t\tif b := td.builders[pt.Name]; b != nil {\n\t\t\tdeps = append(deps, b)\n\t\t}\n\tcase *parse.TypeEnum:\n\t\t// No deps.\n\tcase *parse.TypeArray:\n\t\tdeps = append(deps, td.getLocalDeps(pt.Elem)...)\n\tcase *parse.TypeList:\n\t\tdeps = append(deps, td.getLocalDeps(pt.Elem)...)\n\tcase *parse.TypeSet:\n\t\tdeps = append(deps, td.getLocalDeps(pt.Key)...)\n\tcase *parse.TypeMap:\n\t\tdeps = append(deps, td.getLocalDeps(pt.Key)...)\n\t\tdeps = append(deps, td.getLocalDeps(pt.Elem)...)\n\tcase *parse.TypeStruct:\n\t\tfor _, field := range pt.Fields {\n\t\t\tdeps = append(deps, td.getLocalDeps(field.Type)...)\n\t\t}\n\tcase *parse.TypeUnion:\n\t\tfor _, field := range pt.Fields {\n\t\t\tdeps = append(deps, td.getLocalDeps(field.Type)...)\n\t\t}\n\tcase *parse.TypeOptional:\n\t\tdeps = append(deps, td.getLocalDeps(pt.Base)...)\n\tdefault:\n\t\tpanic(fmt.Errorf(\"vdl: unhandled parse.Type %T %#v\", ptype, ptype))\n\t}\n\treturn\n}", "func showDepsInfo(gomod []byte) {\n\tdeps := depsy.Extract(gomod, false)\n\n\tif len(deps) == 0 {\n\t\treturn\n\t}\n\n\tfmtutil.Separator(false, \"DEPENDENCIES\")\n\n\tfor _, dep := range deps {\n\t\tif dep.Extra == \"\" {\n\t\t\tfmtc.Printf(\" {s}%8s{!} %s\\n\", dep.Version, dep.Path)\n\t\t} else {\n\t\t\tfmtc.Printf(\" {s}%8s{!} %s {s-}(%s){!}\\n\", dep.Version, dep.Path, dep.Extra)\n\t\t}\n\t}\n}", "func (p BaseLogCopy) Dependencies() []string {\n\treturn []string{\n\t\t\"Base/Env/CollectEnvVars\",\n\t\t\"Base/Env/CollectSysProps\",\n\t\t\"Base/Config/Validate\",\n\t}\n}", "func ResolveDependencies(m meta.RESTMapper, objects []unstructuredv1.Unstructured, uids []types.UID) (NodeMap, error) {\n\treturn resolveDeps(m, objects, uids, true)\n}", "func validateDependencies(eventDependencies []v1alpha1.EventDependency) error {\n\tif len(eventDependencies) < 1 {\n\t\treturn errors.New(\"no event dependencies found\")\n\t}\n\tfor _, dep := range eventDependencies {\n\t\tif dep.Name == \"\" {\n\t\t\treturn errors.New(\"event dependency must define a name\")\n\t\t}\n\t\t// TODO: GatewayName will be deprecated\n\t\tif dep.EventSourceName == \"\" && dep.GatewayName == \"\" {\n\t\t\treturn errors.New(\"event dependency must define the EventSource name\")\n\t\t}\n\n\t\tif dep.EventName == \"\" {\n\t\t\treturn errors.New(\"event dependency must define the event name\")\n\t\t}\n\n\t\tif err := validateEventFilter(dep.Filters); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (a *baseDomain) Dependencies() []asset.Asset {\n\treturn []asset.Asset{\n\t\t&platform{},\n\t}\n}", "func Dependencies(source v1alpha1.SourceSpec) []string {\n\tcandidateMap := make(map[string]bool)\n\tregexps := getRegexpsForLanguage(source.Language)\n\tsubMatches := findAllStringSubmatch(source.Content, regexps...)\n\tfor _, uriPrefix := range subMatches {\n\t\tcandidateComp := decodeComponent(uriPrefix)\n\t\tif candidateComp != \"\" {\n\t\t\tcandidateMap[candidateComp] = true\n\t\t}\n\t}\n\t// Remove duplicates and sort\n\tcandidateComponents := make([]string, 0, len(candidateMap))\n\tfor cmp := range candidateMap {\n\t\tcandidateComponents = append(candidateComponents, cmp)\n\t}\n\tsort.Strings(candidateComponents)\n\treturn candidateComponents\n}", "func (o VirtualDatabaseSpecBuildSourcePtrOutput) Dependencies() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v *VirtualDatabaseSpecBuildSource) []string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Dependencies\n\t}).(pulumi.StringArrayOutput)\n}", "func (n *Node) CollectDependencies(m *Magnet) []*Node {\n\tvar ret []*Node\n\tret = append(ret, n)\n\tkeys := make(map[*Node]bool)\n\tfor _, v := range n.requires {\n\t\tn = m.findNode(v)\n\t\tif n == nil {\n\t\t\tpanic(fmt.Sprintf(\"type %s cannot be built!\", v))\n\t\t}\n\t\tfor _, v := range n.CollectDependencies(m) {\n\t\t\tif _, has := keys[v]; !has {\n\t\t\t\tkeys[v] = true\n\t\t\t\tret = append(ret, v)\n\t\t\t}\n\t\t}\n\t}\n\treturn ret\n}", "func (o *V0037JobProperties) SetDependency(v string) {\n\to.Dependency = &v\n}", "func (d *galleryDocument) Dependencies() map[string]struct{} {\n\treturn map[string]struct{}{tmplPathToName(galTmplPath): {}}\n}", "func deps(rule *bazel.Rule) map[bazel.Label]bool {\n\tret := make(map[bazel.Label]bool)\n\tfor _, d := range rule.StringListAttr(\"deps\") {\n\t\tif l, err := bazel.ParseRelativeLabel(rule.PkgName, d); err == nil {\n\t\t\tret[l] = true\n\t\t}\n\t}\n\treturn ret\n}", "func buildDependencyGraph(req *backend.QueryDataRequest) (*simple.DirectedGraph, error) {\n\tgraph, err := buildGraph(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tregistry := buildNodeRegistry(graph)\n\n\tif err := buildGraphEdges(graph, registry); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn graph, nil\n}", "func addDependencies(s *scope, name string, obj pyObject, target *core.BuildTarget, exported, internal bool) {\n\taddStrings(s, name, obj, func(str string) {\n\t\tif s.state.Config.Bazel.Compatibility && !core.LooksLikeABuildLabel(str) && !strings.HasPrefix(str, \"@\") {\n\t\t\t// *sigh*... Bazel seems to allow an implicit : on the start of dependencies\n\t\t\tstr = \":\" + str\n\t\t}\n\t\ttarget.AddMaybeExportedDependency(checkLabel(s, s.parseLabelInPackage(str, s.pkg)), exported, false, internal)\n\t})\n}", "func resolveMessageDependency(msg *desc.MessageDescriptor, dep messageDependency, encountered map[string]bool) {\n\tif encountered[msg.GetFullyQualifiedName()] {\n\t\treturn\n\t}\n\n\tdep[msg.GetFullyQualifiedName()] = msg\n\tfor _, f := range msg.GetFields() {\n\t\tif entity.IsMessageType(f.GetType()) {\n\t\t\tresolveMessageDependency(f.GetMessageType(), dep, encountered)\n\t\t}\n\t}\n}", "func (o VirtualDatabaseSpecBuildSourceOutput) Dependencies() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v VirtualDatabaseSpecBuildSource) []string { return v.Dependencies }).(pulumi.StringArrayOutput)\n}", "func parse(in io.Reader) (_ *ast, depend_order []string, err error) {\n\n\tl := &yyLexState{\n\t\tline_no: 1,\n\t\tin: bufio.NewReader(in),\n\t\tcommand: make(map[string]*command),\n\t\tpredicate: make(map[string]*predicate),\n\t\texeced: make(map[string]bool),\n\t}\n\n\tyyParse(l)\n\tif l.err != nil {\n\t\treturn nil, nil, l.err\n\t}\n\n\tif len(l.execed) == 0 && len(l.predicate) == 0 {\n\t\treturn nil, nil, errors.New(\"no exec or predicate statement\")\n\t}\n\tif len(l.execed)+len(l.predicate) > 255 {\n\t\treturn nil, nil, errors.New(\"exec + predicate count > 255\")\n\t}\n\n\t// Note: all argv lengths must be <= 255 elements!\n\t// verify_argv_length()\n\n\t// add unqualified exec ... () statements to the dependency list.\n\n\tvar find_unreferenced_EXEC_PRED func(a *ast)\n\tfind_unreferenced_EXEC_PRED = func(a *ast) {\n\n\t\tif a == nil {\n\t\t\treturn\n\t\t}\n\t\tswitch {\n\t\tcase a.yy_tok == EXEC && a.command.depend_ref_count == 0:\n\t\t\tn := a.command.name\n\t\t\tl.depends = append(l.depends, fmt.Sprintf(\"%s %s\", n, n))\n\t\tcase a.yy_tok == PREDICATE && a.predicate.depend_ref_count == 0:\n\t\t\tn := a.predicate.name\n\t\t\tl.depends = append(l.depends, fmt.Sprintf(\"%s %s\", n, n))\n\t\t}\n\t\tfind_unreferenced_EXEC_PRED(a.left)\n\t\tfind_unreferenced_EXEC_PRED(a.right)\n\t\tfind_unreferenced_EXEC_PRED(a.next)\n\t}\n\tfind_unreferenced_EXEC_PRED(l.ast_head)\n\n\tdepend_order = tsort(l.depends)\n\tif depend_order == nil {\n\t\tl.err = errors.New(\"statement invocation order has cycles\")\n\t}\n\tfor i, j := 0, len(depend_order)-1; i < j; i, j = i+1, j-1 {\n\t\tdepend_order[i], depend_order[j] =\n\t\t\tdepend_order[j], depend_order[i]\n\t}\n\treturn l.ast_head, depend_order, l.err\n}", "func (p DotnetRequirementsDatastores) Dependencies() []string {\n\treturn []string{\n\t\t\"DotNet/Agent/Installed\",\n\t}\n}", "func (p BaseConfigLogLevel) Dependencies() []string {\n\treturn []string{\n\t\t\"Base/Config/Validate\", //This identifies this task as dependent on \"Base/Config/Validate\" and so the results from that task will be passed to this task. See the execute method to see how to interact with the results.\n\t}\n}", "func ResolveDependency(dependencies []v1alpha1.EventDependency, events *v1alpha1.Event) *v1alpha1.EventDependency {\n\tfor _, dependency := range dependencies {\n\t\tgatewayNameGlob, err := glob.Compile(dependency.GatewayName)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t\teventNameGlob, err := glob.Compile(dependency.EventName)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t\tif gatewayNameGlob.Match(events.Context.Source) && eventNameGlob.Match(events.Context.Subject) {\n\t\t\treturn &dependency\n\t\t}\n\t}\n\treturn nil\n}", "func (p *Wheel) Dependencies() []Dependency {\n\tvar dependencies []Dependency\n\n\tfor _, row := range p.RequiresDist {\n\t\tdep, err := version.ParseDependency(row)\n\t\tif err != nil {\n\t\t\tfmt.Fprintf(os.Stderr, \"❗️ %s: %s(%v)\\n\", p.name, row, err)\n\t\t\tcontinue\n\t\t}\n\t\tinstall, err := dep.Evaluate(env)\n\t\tif err != nil {\n\t\t\tfmt.Fprintf(os.Stderr, \"❗️ %s: %s(%v)\\n\", p.name, row, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !install {\n\t\t\tcontinue\n\t\t}\n\n\t\t// fmt.Fprintf(os.Stderr, \"🍀 %s: %s(minimal = %s)\\n\", name, row, version.Minimal(dep.Versions))\n\t\tdependencies = append(dependencies, Dependency{\n\t\t\tName: NormalizePackageName(dep.Name),\n\t\t\tVersion: version.Minimal(dep.Versions),\n\t\t})\n\t}\n\n\treturn dependencies\n}", "func hasDependency(bld *build.File, r *build.Rule, dep string) bool {\n\tpkg := filepath.Dir(bld.Path)\n\toldDeps := r.Attr(\"deps\")\n\tif edit.ListFind(oldDeps, dep, pkg) != nil {\n\t\treturn true\n\t}\n\truntimeDeps := r.Attr(\"runtime_deps\")\n\treturn edit.ListFind(runtimeDeps, dep, pkg) != nil\n}", "func Dependencies(jaeger *v1alpha1.Jaeger) []batchv1.Job {\n\tif strings.ToLower(jaeger.Spec.Storage.Type) == \"cassandra\" {\n\t\treturn cassandraDeps(jaeger)\n\t}\n\n\treturn []batchv1.Job{}\n}", "func (cd *circularDependency) checkDependency(d *dependency, m *method) error {\n\t// If this Dependency is already checked,\n\t// we don't need to check it again\n\tif cd.isChecked(d) {\n\t\treturn nil\n\t}\n\n\t//log.Println(\"CD for Dependency\", d.Value.Type())\n\n\t// Add this dependency type to the dependency list\n\t// and check if this type desn't already exist\n\terr := cd.addAndCheck(d.value.Type())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Check if this Dependency has New Method\n\tif d.constructor != nil {\n\t\tfor i := 0; i < d.constructor.Type.NumIn(); i++ {\n\n\t\t\tt := d.constructor.Type.In(i)\n\t\t\t//log.Println(\"CD for Dependency New Dependency\", i, t, dependency.isType(t))\n\n\t\t\t// The first element will always be the dependency itself\n\t\t\tif d.isType(t) {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// All context types doesn't need to be checked\n\t\t\t// it will always be present in the context\n\t\t\tif isContextType(t) {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\td, exist := m.dependencies.vaueOf(t)\n\t\t\tif !exist { // It should never occurs!\n\t\t\t\treturn fmt.Errorf(\"Danger! No dependency %s found! Something very wrong happened!\", t)\n\t\t\t}\n\n\t\t\t// Go ahead recursively on each Dependency\n\t\t\terr := cd.checkDependency(d, m)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\t// Remove itself from the list\n\tcd.pop()\n\n\t// Add this dependency to the checked list\n\tcd.checked = append(cd.checked, d)\n\n\treturn nil\n}", "func Dependencies(logger logr.Logger, manager feature.ResourceManagers, dda *v2alpha1.DatadogAgent) (errs []error) {\n\toverrides := dda.Spec.Override\n\tnamespace := dda.Namespace\n\n\tfor component, override := range overrides {\n\t\terr := overrideRBAC(logger, manager, override, component, namespace)\n\t\tif err != nil {\n\t\t\terrs = append(errs, err)\n\t\t}\n\n\t\t// Handle custom agent configurations (datadog.yaml, cluster-agent.yaml, etc.)\n\t\terrs = append(errs, overrideCustomConfigs(logger, manager, override.CustomConfigurations, dda.Name, namespace)...)\n\n\t\t// Handle custom check configurations\n\t\tconfdCMName := fmt.Sprintf(v2alpha1.ExtraConfdConfigMapName, strings.ToLower((string(component))))\n\t\terrs = append(errs, overrideExtraConfigs(logger, manager, override.ExtraConfd, namespace, confdCMName, true)...)\n\n\t\t// Handle custom check files\n\t\tchecksdCMName := fmt.Sprintf(v2alpha1.ExtraChecksdConfigMapName, strings.ToLower((string(component))))\n\t\terrs = append(errs, overrideExtraConfigs(logger, manager, override.ExtraChecksd, namespace, checksdCMName, false)...)\n\n\t\t// Handle scc\n\t\terrs = append(errs, overrideSCC(manager, dda)...)\n\t}\n\n\treturn errs\n}", "func convertToDependency(f amboy.Format, d *DependencyInterchange) (dependency.Manager, error) {\n\tfactory, err := GetDependencyFactory(d.Type)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdep := factory()\n\n\tif dep.Type().Version != d.Version {\n\t\treturn nil, errors.Errorf(\"dependency '%s' (version=%d) does not match the current version (%d) for the dependency type '%s'\",\n\t\t\td.Type, d.Version, dep.Type().Version, dep.Type().Name)\n\t}\n\n\t// this works, because we want to use all the data from the\n\t// interchange object, but want to use the type information\n\t// associated with the object that we produced with the\n\t// factory.\n\terr = convertFrom(f, d.Dependency, dep)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"converting dependency\")\n\t}\n\n\treturn dep, nil\n}", "func makeDependencyInterchange(f amboy.Format, d dependency.Manager) (*DependencyInterchange, error) {\n\ttypeInfo := d.Type()\n\n\tdata, err := convertTo(f, d)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\toutput := &DependencyInterchange{\n\t\tType: typeInfo.Name,\n\t\tVersion: typeInfo.Version,\n\t\tEdges: d.Edges(),\n\t\tDependency: data,\n\t}\n\n\treturn output, nil\n}", "func (t DotNetCoreRequirementsNetCoreVersion) Dependencies() []string {\n\treturn []string{\n\t\t\"DotNetCore/Agent/Installed\",\n\t\t\"DotNetCore/Env/Versions\",\n\t}\n}", "func (u *comboUtility) Dependency(util utility) {\n\tu.children.Insert(util)\n}", "func Parse(ingress *networking.Ingress) *Dependencies {\n\tsecrets := extractSecrets(ingress)\n\tsecrets = append(secrets, secretsFromAnnotations(ingress)...)\n\n\treturn &Dependencies{\n\t\tServices: extractServices(ingress),\n\t\tEndpoints: extractServices(ingress),\n\t\tSecrets: secrets,\n\t\tConfigmaps: configmapsFromAnnotations(ingress),\n\t\tAnnotations: extractAnnotations(ingress),\n\t}\n}", "func (*gaeModule) Dependencies() []module.Dependency {\n\treturn []module.Dependency{\n\t\tmodule.OptionalDependency(redisconn.ModuleName), // for dscache, if enabled\n\t\tmodule.OptionalDependency(secrets.ModuleName), // to install DS random secrets backend\n\t}\n}", "func (i *Interface) parseIncludes() {\n\tfor _, dependency := range i.Dependencies {\n\t\tinclude := NewInclude(dependency)\n\t\tif parsers.ShouldBeIncludedInHeader(dependency) {\n\t\t\ti.HeaderIncludesString += include.ToString() + \"\\n\"\n\t\t} else {\n\t\t\ti.ImplementationIncludesString += include.ToString() + \"\\n\"\n\t\t}\n\t}\n}", "func (p *Parser) Parse(r dio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) {\n\tinfo, err := buildinfo.Read(r)\n\tif err != nil {\n\t\treturn nil, nil, convertError(err)\n\t}\n\n\tlibs := make([]types.Library, 0, len(info.Deps))\n\n\tfor _, dep := range info.Deps {\n\t\t// binaries with old go version may incorrectly add module in Deps\n\t\t// In this case Path == \"\", Version == \"Devel\"\n\t\t// we need to skip this\n\t\tif dep.Path == \"\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tmod := dep\n\t\tif dep.Replace != nil {\n\t\t\tmod = dep.Replace\n\t\t}\n\n\t\tlibs = append(libs, types.Library{\n\t\t\tName: mod.Path,\n\t\t\tVersion: mod.Version,\n\t\t})\n\t}\n\n\treturn libs, nil, nil\n}", "func (fastenJSON *JSON) AddDependency(target *JSON) {\n\tif target.Product == \"\" {\n\t\treturn\n\t}\n\n\tfor _, inner := range fastenJSON.Depset {\n\t\tfor _, dependency := range inner {\n\t\t\tif dependency.Product == target.Product {\n\t\t\t\tfound := false\n\t\t\t\tif target.Version == \"\" {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tfor _, constraint := range dependency.Constraints {\n\t\t\t\t\tif constraint == target.Version {\n\t\t\t\t\t\tfound = true\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif found {\n\t\t\t\t\treturn\n\t\t\t\t} else if target.Version != \"\" {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tif len(fastenJSON.Depset) == 0 {\n\t\tfastenJSON.Depset = append(fastenJSON.Depset, []Dependency{})\n\t}\n\tfastenJSON.Depset[0] = append(fastenJSON.Depset[0], Dependency{\n\t\tProduct: target.Product,\n\t\tForge: \"cratesio\",\n\t\tConstraints: []string{\"[\" + target.Version + \"]\"},\n\t})\n}", "func (tag scopeDependencyTag) extractDepInfo(ctx android.ModuleContext, dep android.Module, paths *scopePaths) {\n\terr := tag.depInfoExtractor(paths, ctx, dep)\n\tif err != nil {\n\t\tctx.ModuleErrorf(\"has an invalid {scopeDependencyTag: %s} dependency on module %s: %s\", tag.name, ctx.OtherModuleName(dep), err.Error())\n\t}\n}", "func (c *Controller) getHelmRequestDependencies(hr *v1alpha1.HelmRequest) ([]*v1alpha1.HelmRequest, error) {\n\tvar data []*v1alpha1.HelmRequest\n\tdeps := hr.Spec.Dependencies\n\tif len(deps) == 0 {\n\t\tklog.V(4).Infof(\"HelmRequest %s has no dependencies\", hr.GetName())\n\t\treturn nil, nil\n\t}\n\n\tfor _, name := range deps {\n\t\td, err := c.getHelmRequest(hr.GetNamespace(), name)\n\t\tif err != nil {\n\t\t\tklog.Errorf(\"Retrieve dependency %s for %s error: %s\", name, hr.GetName(), err.Error())\n\t\t\treturn nil, err\n\t\t}\n\t\tdata = append(data, d)\n\t}\n\n\treturn data, nil\n\n}", "func (c *ClusterK8sIO) Dependencies() []asset.Asset {\n\treturn []asset.Asset{\n\t\t&installconfig.InstallConfig{},\n\t\t&Networking{},\n\t}\n}", "func (d *RouteDescriptor) Dependencies(key string, route *l3.Route) []kvs.Dependency {\n\tvar dependencies []kvs.Dependency\n\t// the outgoing interface must exist and be UP\n\tif route.OutgoingInterface != \"\" {\n\t\tdependencies = append(dependencies, kvs.Dependency{\n\t\t\tLabel: routeOutInterfaceDep,\n\t\t\tKey: interfaces.InterfaceKey(route.OutgoingInterface),\n\t\t})\n\t}\n\n\t// non-zero VRFs\n\tvar protocol l3.VrfTable_Protocol\n\t_, isIPv6, _ := addrs.ParseIPWithPrefix(route.DstNetwork)\n\tif isIPv6 {\n\t\tprotocol = l3.VrfTable_IPV6\n\t}\n\tif route.VrfId != 0 {\n\t\tdependencies = append(dependencies, kvs.Dependency{\n\t\t\tLabel: vrfTableDep,\n\t\t\tKey: l3.VrfTableKey(route.VrfId, protocol),\n\t\t})\n\t}\n\tif route.Type == l3.Route_INTER_VRF && route.ViaVrfId != 0 {\n\t\tdependencies = append(dependencies, kvs.Dependency{\n\t\t\tLabel: viaVrfTableDep,\n\t\t\tKey: l3.VrfTableKey(route.ViaVrfId, protocol),\n\t\t})\n\t}\n\n\t// if destination network is netalloc reference, then the address must be allocated first\n\tallocDep, hasAllocDep := d.addrAlloc.GetAddressAllocDep(route.DstNetwork,\n\t\t\"\", \"dst_network-\")\n\tif hasAllocDep {\n\t\tdependencies = append(dependencies, allocDep)\n\t}\n\t// if GW is netalloc reference, then the address must be allocated first\n\tallocDep, hasAllocDep = d.addrAlloc.GetAddressAllocDep(route.NextHopAddr,\n\t\troute.OutgoingInterface, \"gw_addr-\")\n\tif hasAllocDep {\n\t\tdependencies = append(dependencies, allocDep)\n\t}\n\n\t// TODO: perhaps check GW routability\n\treturn dependencies\n}", "func(t *TargImp) isDependent(depend string) bool {\n\tfor _, y := range t.dependencies {\n\t\tif y == depend { return true }\n\t}\n\treturn false\n}", "func (s *memoryStorage) GetDependencies(srvFilter ...string) ([]tracer.Dependencies, error) {\n\ts.Lock()\n\tdefer s.Unlock()\n\n\tif len(srvFilter) == 0 {\n\t\tsrvFilter = make([]string, 0)\n\t\tfor _, srvName := range s.services {\n\t\t\tsrvFilter = append(srvFilter, srvName)\n\t\t}\n\t}\n\n\t// Sort service names alphabetically\n\tsort.Strings(srvFilter)\n\n\treplyCount := len(srvFilter)\n\tserviceDeps := make([]tracer.Dependencies, replyCount)\n\tfor index, srvName := range srvFilter {\n\t\tdep, exists := s.serviceDeps[srvName]\n\t\tif !exists {\n\t\t\tdep = &tracer.Dependencies{\n\t\t\t\tService: srvName,\n\t\t\t\tDependencies: make([]string, 0),\n\t\t\t}\n\t\t}\n\t\tserviceDeps[index] = *dep\n\t}\n\n\treturn serviceDeps, nil\n\n}", "func (s *SimplePublishNode) SetDependency(d *int64) {\n\ts.dependency = d\n}", "func (g DotGraph) GetDependencies(pkg string) []string {\n\tdependencies := []string{}\n\n\tfor from, deps := range g.edges {\n\t\tif from == getIDSafeNodeName(pkg) {\n\t\t\tfor _, edge := range deps {\n\t\t\t\tdependencies = append(dependencies, edge.nodeID)\n\t\t\t}\n\t\t}\n\t}\n\treturn dependencies\n}", "func ExpectDependency(logger *logrusx.Logger, dependencies ...interface{}) {\n\tif logger == nil {\n\t\tpanic(\"missing logger for dependency check\")\n\t}\n\tfor _, d := range dependencies {\n\t\tif d == nil {\n\t\t\tlogger.WithError(errors.WithStack(ErrNilDependency)).Fatalf(\"A fatal issue occurred.\")\n\t\t}\n\t}\n}", "func guessDeps(base string, skipImport bool) *cfg.Config {\n\tbuildContext, err := util.GetBuildContext()\n\tif err != nil {\n\t\tmsg.Die(\"Failed to build an import context: %s\", err)\n\t}\n\tname := buildContext.PackageName(base)\n\n\n\tmsg.Info(\"Generating a YAML configuration file and guessing the dependencies\")\n\n\tconfig := new(cfg.Config)\n\n\t// Get the name of the top level package\n\tconfig.Name = name\n\n\t// Import by looking at other package managers and looking over the\n\t// entire directory structure.\n\n\t// Attempt to import from other package managers.\n\tif !skipImport {\n\t\tguessImportDeps(base, config)\n\t}\n\n\timportLen := len(config.Imports)\n\tif importLen == 0 {\n\t\tmsg.Info(\"Scanning code to look for dependencies\")\n\t} else {\n\t\tmsg.Info(\"Scanning code to look for dependencies not found in import\")\n\t}\n\n\t// 返回依赖解析器\n\tr, err := dependency.NewResolver(base)\n\n\n\tif err != nil {\n\t\tmsg.Die(\"Error creating a dependency resolver: %s\", err)\n\t}\n\n\t// 初始化以测试模式\n\tr.ResolveTest = true\n\n\th := &dependency.DefaultMissingPackageHandler{Missing: []string{}, Gopath: []string{}}\n\tr.Handler = h\n\n\tsortable, testSortable, err := r.ResolveLocal(false)\n\tif err != nil {\n\t\tmsg.Die(\"分析本地依赖失败: %s\", err)\n\t}\n\n\tsort.Strings(sortable)\n\tsort.Strings(testSortable)\n\n\tvpath := r.VendorDir\n\tif !strings.HasSuffix(vpath, \"/\") {\n\t\tvpath = vpath + string(os.PathSeparator)\n\t}\n\n\tfor _, pa := range sortable {\n\t\tn := strings.TrimPrefix(pa, vpath)\n\t\troot, subpkg := util.NormalizeName(n)\n\n\t\tif !config.Imports.Has(root) && root != config.Name {\n\t\t\tmsg.Info(\"--> Found reference to %s\\n\", n)\n\t\t\td := &cfg.Dependency{\n\t\t\t\tName: root,\n\t\t\t}\n\t\t\tif len(subpkg) > 0 {\n\t\t\t\td.Subpackages = []string{subpkg}\n\t\t\t}\n\t\t\tconfig.Imports = append(config.Imports, d)\n\t\t} else if config.Imports.Has(root) {\n\t\t\tif len(subpkg) > 0 {\n\t\t\t\tsubpkg = strings.TrimPrefix(subpkg, \"/\")\n\t\t\t\td := config.Imports.Get(root)\n\t\t\t\tif !d.HasSubpackage(subpkg) {\n\t\t\t\t\tmsg.Info(\"--> Adding sub-package %s to %s\\n\", subpkg, root)\n\t\t\t\t\td.Subpackages = append(d.Subpackages, subpkg)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tfor _, pa := range testSortable {\n\t\tn := strings.TrimPrefix(pa, vpath)\n\t\troot, subpkg := util.NormalizeName(n)\n\n\t\tif config.Imports.Has(root) && root != config.Name {\n\t\t\tmsg.Debug(\"--> Found test reference to %s already listed as an import\", n)\n\t\t} else if !config.DevImports.Has(root) && root != config.Name {\n\t\t\tmsg.Info(\"--> Found test reference to %s\", n)\n\t\t\td := &cfg.Dependency{\n\t\t\t\tName: root,\n\t\t\t}\n\t\t\tif len(subpkg) > 0 {\n\t\t\t\td.Subpackages = []string{subpkg}\n\t\t\t}\n\t\t\tconfig.DevImports = append(config.DevImports, d)\n\t\t} else if config.DevImports.Has(root) {\n\t\t\tif len(subpkg) > 0 {\n\t\t\t\tsubpkg = strings.TrimPrefix(subpkg, \"/\")\n\t\t\t\td := config.DevImports.Get(root)\n\t\t\t\tif !d.HasSubpackage(subpkg) {\n\t\t\t\t\tmsg.Info(\"--> Adding test sub-package %s to %s\\n\", subpkg, root)\n\t\t\t\t\td.Subpackages = append(d.Subpackages, subpkg)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(config.Imports) == importLen && importLen != 0 {\n\t\tmsg.Info(\"--> Code scanning found no additional imports\")\n\t}\n\n\treturn config\n}", "func (t *tectonic) Dependencies() []asset.Asset {\n\treturn []asset.Asset{\n\t\tt.installConfig,\n\t\tt.ingressCertKey,\n\t\tt.kubeCA,\n\t}\n}", "func (p PHPEnvPHPinfoCLI) Dependencies() []string {\n\treturn []string{\"PHP/Config/Agent\"}\n}", "func (m *Master) Dependencies() []asset.Asset {\n\treturn []asset.Asset{\n\t\t&installconfig.InstallConfig{},\n\t\t&machine.Master{},\n\t}\n}", "func CheckDependencies() map[string]string {\n\tresult := make(map[string]string)\n\tfor _, dep := range getDependencies() {\n\t\tresult[dep.GetName()] = \"OK\"\n\t\tif isOK, err := dep.GetPinger()(); !isOK {\n\t\t\tresult[dep.GetName()] = err.Error()\n\t\t}\n\t}\n\tif len(result) == 0 {\n\t\t//no dependencies registered\n\t\tresult[\"NODEP\"] = \"No Dependencies Registered\"\n\t}\n\treturn result\n}", "func RegisterDependency(dep Dependency) {\n\tif dependencies == nil {\n\t\tdependencies = make(map[string]Dependency)\n\t}\n\tdependencies[dep.GetName()] = dep\n}", "func isVersionedDependency(content string) bool {\n\treturn !strings.HasPrefix(content, \"gopkg.in\")\n}", "func (task *Task) DependencyNames() []string {\n\tif len(task.dependencies) == 0 {\n\t\treturn nil\n\t}\n\tdeps := []string{}\n\tfor _, dep := range task.dependencies {\n\t\tswitch d := dep.(type) {\n\t\tdefault:\n\t\t\tpanic(\"dependencies can only be Serial or Parallel\")\n\t\tcase Series:\n\t\t\tdeps = append(deps, d.names()...)\n\t\tcase Parallel:\n\t\t\tdeps = append(deps, d.names()...)\n\t\tcase S:\n\t\t\tdeps = append(deps, Series(d).names()...)\n\t\tcase P:\n\t\t\tdeps = append(deps, Parallel(d).names()...)\n\t\t}\n\t}\n\treturn deps\n}", "func TestDeps(t *testing.T) {\n\tcmd := exec.Command(\"go\", \"list\", \"-deps\")\n\tout, err := cmd.Output()\n\tif err != nil {\n\t\tt.Skipf(\"'go list' failed: %s\", err)\n\t}\n\tfor _, pkg := range strings.Split(string(out), \"\\n\") {\n\t\t// Does pkg have form \"domain.name/dir\"?\n\t\tslash := strings.IndexByte(pkg, '/')\n\t\tdot := strings.IndexByte(pkg, '.')\n\t\tif 0 < dot && dot < slash {\n\t\t\tif strings.HasPrefix(pkg, \"go.starlark.net/\") ||\n\t\t\t\tstrings.HasPrefix(pkg, \"golang.org/x/sys/\") {\n\t\t\t\tcontinue // permitted dependencies\n\t\t\t}\n\t\t\tt.Errorf(\"new interpreter dependency: %s\", pkg)\n\t\t}\n\t}\n}", "func hasAngularDependency(r *build.Rule) bool {\n\te := r.Attr(\"deps\")\n\tfor _, li := range edit.AllLists(e) {\n\t\tfor _, elem := range li.List {\n\t\t\tstr, ok := elem.(*build.StringExpr)\n\t\t\tif ok && strings.HasPrefix(str.Value, \"//third_party/javascript/angular2\") {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t}\n\treturn false\n}", "func (k *LoopbackClient) Dependencies() []asset.Asset {\n\treturn []asset.Asset{\n\t\t&tls.AdminKubeConfigClientCertKey{},\n\t\t&tls.KubeAPIServerLocalhostCABundle{},\n\t\t&installconfig.InstallConfig{},\n\t}\n}", "func getDependencies() error {\n\t// go get -u dependencies from the dependencies array\n\tfor _, dependency := range dependencies {\n\t\t// Exec go get command inside Name folder using exec.Command\n\t\tcmd := exec.Command(\"go\", \"get\", \"-u\", dependency)\n\t\tcmd.Dir = Name\n\n\t\tif err := cmd.Run(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func (s *Service) DependenciesFromService() (d []*DependencyFromService) {\n\tfor name, dependency := range s.GetDependencies() {\n\t\td = append(d, &DependencyFromService{\n\t\t\tDependency: dependency,\n\t\t\tService: s,\n\t\t\tName: name,\n\t\t})\n\t}\n\treturn\n}", "func Parse(dir string) ([]*cfg.Dependency, error) {\n\tpath := filepath.Join(dir, \"Godeps/Godeps.json\")\n\tif _, err := os.Stat(path); err != nil {\n\t\treturn []*cfg.Dependency{}, nil\n\t}\n\tmsg.Info(\"Found Godeps.json file in %s\", gpath.StripBasepath(dir))\n\tmsg.Info(\"--> Parsing Godeps metadata...\")\n\n\tbuf := []*cfg.Dependency{}\n\n\tgodeps := &Godeps{}\n\n\t// Get a handle to the file.\n\tfile, err := os.Open(path)\n\tif err != nil {\n\t\treturn buf, err\n\t}\n\tdefer file.Close()\n\n\tdec := json.NewDecoder(file)\n\tif err := dec.Decode(godeps); err != nil {\n\t\treturn buf, err\n\t}\n\n\tseen := map[string]bool{}\n\tfor _, d := range godeps.Deps {\n\t\tpkg, _ := util.NormalizeName(d.ImportPath)\n\t\tif !seen[pkg] {\n\t\t\tseen[pkg] = true\n\t\t\tdep := &cfg.Dependency{Name: pkg, Version: d.Rev}\n\t\t\tbuf = append(buf, dep)\n\t\t}\n\t}\n\n\treturn buf, nil\n}", "func (repo *GitHubProject) Dependencies() []Project {\n\treturn []Project{}\n}", "func (t *TerraformVariables) Dependencies() []asset.Asset {\n\treturn []asset.Asset{\n\t\t&installconfig.InstallConfig{},\n\t\t&bootstrap.Bootstrap{},\n\t\t&machine.Master{},\n\t}\n}", "func require(required []string, parser lineParser) lineParser {\n\tf := func(ctx *parseContext, cmd string, args []string) (node, error) {\n\t\tn, err := parser(ctx, cmd, args)\n\t\tif err == nil {\n\t\t\trequiredMap := make(map[string]bool)\n\t\t\tfor _, r := range required {\n\t\t\t\trequiredMap[r] = false //hasn't been found yet\n\t\t\t}\n\n\t\t\tfor _, previousNode := range ctx.nodes {\n\t\t\t\tif _, found := requiredMap[previousNode.cmd]; found {\n\t\t\t\t\trequiredMap[previousNode.cmd] = true\n\t\t\t\t}\n\t\t\t}\n\t\t\tfor key, found := range requiredMap {\n\t\t\t\tif !found {\n\t\t\t\t\tctx.addErrorf(\"line %d: %s depends on %s\", ctx.lineNum, cmd, key)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn n, err\n\t}\n\treturn f\n}", "func FromDomainDependencies(dLinks []model.DependencyLink) []DependencyLink {\n\tif dLinks == nil {\n\t\treturn nil\n\t}\n\tret := make([]DependencyLink, len(dLinks))\n\tfor i, d := range dLinks {\n\t\tret[i] = DependencyLink{\n\t\t\tCallCount: d.CallCount,\n\t\t\tParent: d.Parent,\n\t\t\tChild: d.Child,\n\t\t}\n\t}\n\treturn ret\n}", "func MatchDependencies(ch chan SourceDependenciesItem) chan SourceDependenciesItem {\n\tout := make(chan SourceDependenciesItem, cli.ChannelBufferSize)\n\tgo func() {\n\t\tdefer close(out)\n\n\t\tfor item := range ch {\n\t\t\tvar matchedDeps []string\n\t\t\tfor _, dep := range item.Dependencies {\n\t\t\t\tmatched, err := regexp.MatchString(cli.FindExp, dep)\n\t\t\t\tif err != nil {\n\t\t\t\t\tfatalError(err)\n\t\t\t\t}\n\t\t\t\tif matched {\n\t\t\t\t\tmatchedDeps = append(matchedDeps, dep)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif matchedDeps != nil {\n\t\t\t\tout <- SourceDependenciesItem{item.FilePath, matchedDeps}\n\t\t\t}\n\t\t}\n\t}()\n\treturn out\n}", "func dependenciesCanBeResolved(target *api.Container, by []*api.Container) bool {\n\tnameMap := make(map[string]*api.Container)\n\tfor _, cont := range by {\n\t\tnameMap[cont.Name] = cont\n\t}\n\tneededVolumeContainers := make([]string, len(target.VolumesFrom))\n\tfor i, volume := range target.VolumesFrom {\n\t\tneededVolumeContainers[i] = volume.SourceContainer\n\t}\n\n\treturn verifyStatusResolveable(target, nameMap, neededVolumeContainers, volumeCanResolve) &&\n\t\tverifyStatusResolveable(target, nameMap, linksToContainerNames(target.Links), linkCanResolve)\n}", "func (g *Generator) AddDependency(n, t, f string) *Generator {\n\tg.deps = append(g.deps, dep{n, t, f})\n\treturn g\n}", "func (l *PackageList) VerifyDependencies(options int, architectures []string, sources *PackageList, progress aptly.Progress) ([]Dependency, error) {\n\tl.PrepareIndex()\n\tmissing := make([]Dependency, 0, 128)\n\n\tif progress != nil {\n\t\tprogress.InitBar(int64(l.Len())*int64(len(architectures)), false, aptly.BarGeneralVerifyDependencies)\n\t}\n\n\tfor _, arch := range architectures {\n\t\tcache := make(map[string]bool, 2048)\n\n\t\tfor _, p := range l.packagesIndex {\n\t\t\tif progress != nil {\n\t\t\t\tprogress.AddBar(1)\n\t\t\t}\n\n\t\t\tif !p.MatchesArchitecture(arch) {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tfor _, dep := range p.GetDependencies(options) {\n\t\t\t\tvariants, err := ParseDependencyVariants(dep)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, fmt.Errorf(\"unable to process package %s: %s\", p, err)\n\t\t\t\t}\n\n\t\t\t\tvariants = depSliceDeduplicate(variants)\n\n\t\t\t\tvariantsMissing := make([]Dependency, 0, len(variants))\n\n\t\t\t\tfor _, dep := range variants {\n\t\t\t\t\tif dep.Architecture == \"\" {\n\t\t\t\t\t\tdep.Architecture = arch\n\t\t\t\t\t}\n\n\t\t\t\t\thash := dep.Hash()\n\t\t\t\t\tsatisfied, ok := cache[hash]\n\t\t\t\t\tif !ok {\n\t\t\t\t\t\tsatisfied = sources.Search(dep, false) != nil\n\t\t\t\t\t\tcache[hash] = satisfied\n\t\t\t\t\t}\n\n\t\t\t\t\tif !satisfied && !ok {\n\t\t\t\t\t\tvariantsMissing = append(variantsMissing, dep)\n\t\t\t\t\t}\n\n\t\t\t\t\tif satisfied && options&DepFollowAllVariants == 0 {\n\t\t\t\t\t\tvariantsMissing = nil\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmissing = append(missing, variantsMissing...)\n\t\t\t}\n\t\t}\n\t}\n\n\tif progress != nil {\n\t\tprogress.ShutdownBar()\n\t}\n\n\tif options&DepVerboseResolve == DepVerboseResolve && progress != nil {\n\t\tmissingStr := make([]string, len(missing))\n\t\tfor i := range missing {\n\t\t\tmissingStr[i] = missing[i].String()\n\t\t}\n\t\tprogress.ColoredPrintf(\"@{y}Missing dependencies:@| %s\", strings.Join(missingStr, \", \"))\n\t}\n\n\treturn missing, nil\n}", "func (s *Scheduler) checkCircularDep(j *gaia.Job, resolved []*gaia.Job, unresolved []*gaia.Job) ([]*gaia.Job, error) {\n\tunresolved = append(unresolved, j)\n\nDependsonLoop:\n\tfor _, job := range j.DependsOn {\n\t\t// Check if job is already in resolved list\n\t\tfor _, resolvedJob := range resolved {\n\t\t\tif resolvedJob.ID == job.ID {\n\t\t\t\tcontinue DependsonLoop\n\t\t\t}\n\t\t}\n\n\t\t// Check if job is already in unresolved list\n\t\tfor _, unresolvedJob := range unresolved {\n\t\t\tif unresolvedJob.ID == job.ID {\n\t\t\t\t// Circular dependency detected\n\t\t\t\t// Return the conflicting dependencies\n\t\t\t\treturn nil, fmt.Errorf(errCircularDep, unresolvedJob.Title, j.Title)\n\t\t\t}\n\t\t}\n\n\t\t// Resolve job\n\t\tvar err error\n\t\tresolved, err = s.checkCircularDep(job, resolved, unresolved)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn append(resolved, j), nil\n}", "func GuessDeps(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {\n\tbuildContext, err := util.GetBuildContext()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tbase := p.Get(\"dirname\", \".\").(string)\n\tskipImport := p.Get(\"skipImport\", false).(bool)\n\tname := guessPackageName(buildContext, base)\n\n\tInfo(\"Generating a YAML configuration file and guessing the dependencies\")\n\n\tconfig := new(cfg.Config)\n\n\t// Get the name of the top level package\n\tconfig.Name = name\n\n\t// Import by looking at other package managers and looking over the\n\t// entire directory structure.\n\n\t// Attempt to import from other package managers.\n\tif !skipImport {\n\t\tInfo(\"Attempting to import from other package managers (use --skip-import to skip)\")\n\t\tdeps := []*cfg.Dependency{}\n\t\tabsBase, err := filepath.Abs(base)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif d, ok := guessImportGodep(absBase); ok {\n\t\t\tInfo(\"Importing Godep configuration\")\n\t\t\tWarn(\"Godep uses commit id versions. Consider using Semantic Versions with Glide\")\n\t\t\tdeps = d\n\t\t} else if d, ok := guessImportGPM(absBase); ok {\n\t\t\tInfo(\"Importing GPM configuration\")\n\t\t\tdeps = d\n\t\t} else if d, ok := guessImportGB(absBase); ok {\n\t\t\tInfo(\"Importing GB configuration\")\n\t\t\tdeps = d\n\t\t}\n\n\t\tfor _, i := range deps {\n\t\t\tInfo(\"Found imported reference to %s\\n\", i.Name)\n\t\t\tconfig.Imports = append(config.Imports, i)\n\t\t}\n\t}\n\n\t// Resolve dependencies by looking at the tree.\n\tr, err := dependency.NewResolver(base)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\th := &dependency.DefaultMissingPackageHandler{Missing: []string{}, Gopath: []string{}}\n\tr.Handler = h\n\n\tsortable, err := r.ResolveLocal(false)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tsort.Strings(sortable)\n\n\tvpath := r.VendorDir\n\tif !strings.HasSuffix(vpath, \"/\") {\n\t\tvpath = vpath + string(os.PathSeparator)\n\t}\n\n\tfor _, pa := range sortable {\n\t\tn := strings.TrimPrefix(pa, vpath)\n\t\troot := util.GetRootFromPackage(n)\n\n\t\tif !config.HasDependency(root) {\n\t\t\tInfo(\"Found reference to %s\\n\", n)\n\t\t\td := &cfg.Dependency{\n\t\t\t\tName: root,\n\t\t\t}\n\t\t\tsubpkg := strings.TrimPrefix(n, root)\n\t\t\tif len(subpkg) > 0 && subpkg != \"/\" {\n\t\t\t\td.Subpackages = []string{subpkg}\n\t\t\t}\n\t\t\tconfig.Imports = append(config.Imports, d)\n\t\t} else {\n\t\t\tsubpkg := strings.TrimPrefix(n, root)\n\t\t\tif len(subpkg) > 0 && subpkg != \"/\" {\n\t\t\t\tsubpkg = strings.TrimPrefix(subpkg, \"/\")\n\t\t\t\td := config.Imports.Get(root)\n\t\t\t\tf := false\n\t\t\t\tfor _, v := range d.Subpackages {\n\t\t\t\t\tif v == subpkg {\n\t\t\t\t\t\tf = true\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif !f {\n\t\t\t\t\tInfo(\"Adding sub-package %s to %s\\n\", subpkg, root)\n\t\t\t\t\td.Subpackages = append(d.Subpackages, subpkg)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn config, nil\n}", "func sortDependencies(ctx context.Context, ranker DepsRanker, missingRuleDeps map[*bazel.Rule]map[ClassName][]bazel.Label) {\n\tstopwatch := time.Now()\n\tfor _, classToLabels := range missingRuleDeps {\n\t\tfor _, labels := range classToLabels {\n\t\t\tsort.Slice(labels, func(i, j int) bool { return ranker.Less(ctx, labels[i], labels[j]) })\n\t\t}\n\t}\n\tlog.Printf(\"Ranking dependencies (%dms)\", int64(time.Now().Sub(stopwatch)/time.Millisecond))\n}", "func findDependencies(st *vmdat.State, files []*jclass.File) []string {\n\tvar nonLoaded []string\n\tfor _, f := range files {\n\t\tdeps := jdeps.ClassDependencies(f)\n\t\tfor _, d := range deps {\n\t\t\tif st.FindPackage(d) == nil {\n\t\t\t\tnonLoaded = append(nonLoaded, d)\n\t\t\t}\n\t\t}\n\t}\n\treturn nonLoaded\n}", "func validateCircularDepends(cfg *Config) error {\n\tfor _, cmdA := range cfg.Commands {\n\t\tfor _, cmdB := range cfg.Commands {\n\t\t\tif cmdA.Name == cmdB.Name {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif yes := depsIntersect(cmdA, cmdB); yes {\n\t\t\t\treturn fmt.Errorf(\n\t\t\t\t\t\"command '%s' have circular depends on command '%s'\",\n\t\t\t\t\tfmt.Sprintf(NoticeColor, cmdA.Name),\n\t\t\t\t\tfmt.Sprintf(NoticeColor, cmdB.Name),\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func importDependencies(project common.AppProject) error {\n\n\tai, err := util.GetAppImports(filepath.Join(project.Dir(), fileFlogoJson), project.DepManager(), true)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\timports := ai.GetAllImports()\n\n\tif len(imports) == 0 {\n\t\treturn nil\n\t}\n\n\terr = project.AddImports(true, false, imports...)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlegacySupportRequired := false\n\n\tfor _, details := range ai.GetAllImportDetails() {\n\n\t\tpath, err := project.GetPath(details.Imp)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tdesc, err := util.GetContribDescriptor(path)\n\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif desc != nil {\n\n\t\t\tcType := desc.GetContribType()\n\t\t\tif desc.IsLegacy {\n\t\t\t\tlegacySupportRequired = true\n\t\t\t\tcType = \"legacy \" + desc.GetContribType()\n\t\t\t\terr := CreateLegacyMetadata(path, desc.GetContribType(), details.Imp.GoImportPath())\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfmt.Printf(\"Installed %s: %s\\n\", cType, details.Imp)\n\t\t\t//instStr := fmt.Sprintf(\"Installed %s:\", cType)\n\t\t\t//fmt.Printf(\"%-20s %s\\n\", instStr, imp)\n\t\t}\n\t}\n\t\n\tif Verbose() {\n\t\tfmt.Printf(\"Tidying go mod...\")\n\t}\n\t\n\terr = util.ExecCmd(exec.Command(\"go\", \"mod\", \"tidy\"), project.SrcDir())\n\tif err != nil {\n\t\tfmt.Printf(\"Failed to clean deps: %s\\n\", err)\n\t}\n\n\tif legacySupportRequired {\n\t\terr := InstallLegacySupport(project)\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (*LogzioSpanReader) GetDependencies(ctx context.Context, endTs time.Time, lookback time.Duration) ([]model.DependencyLink, error) {\n\treturn nil, nil\n}", "func validateCircularDepends(cfg *Config) error {\n\tfor _, cmdA := range cfg.Commands {\n\t\tfor _, cmdB := range cfg.Commands {\n\t\t\tif cmdA.Name == cmdB.Name {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif yes := depsIntersect(cmdA, cmdB); yes {\n\t\t\t\treturn fmt.Errorf(\n\t\t\t\t\t\"command '%s' have circular depends on command '%s'\",\n\t\t\t\t\twithColor(cmdA.Name),\n\t\t\t\t\twithColor(cmdB.Name),\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}" ]
[ "0.65340716", "0.61912113", "0.61908144", "0.60914624", "0.60624367", "0.60466945", "0.6029449", "0.60245776", "0.5904961", "0.58684546", "0.58485883", "0.5831142", "0.5789253", "0.5765972", "0.5744005", "0.5741016", "0.56804025", "0.5667477", "0.5642951", "0.5637858", "0.5626136", "0.5625061", "0.56241703", "0.56202644", "0.56059206", "0.5571854", "0.5559167", "0.555226", "0.55477995", "0.5544807", "0.55356526", "0.55340344", "0.5530089", "0.55159885", "0.5465682", "0.54565597", "0.545447", "0.54446787", "0.54403365", "0.54344386", "0.54285955", "0.54186845", "0.5402228", "0.53955287", "0.5385074", "0.5378247", "0.5376015", "0.53737694", "0.53611827", "0.5360911", "0.5350481", "0.5345349", "0.53378", "0.53316724", "0.5310237", "0.53084964", "0.5296192", "0.52884686", "0.52852", "0.5267407", "0.5252849", "0.5187915", "0.51861227", "0.51817226", "0.51783514", "0.5177082", "0.51651573", "0.5148837", "0.5133664", "0.51167923", "0.51036984", "0.50958997", "0.50920105", "0.5077754", "0.5077672", "0.50647575", "0.50504667", "0.50468063", "0.50412387", "0.5035309", "0.5017924", "0.49824458", "0.49794394", "0.49748102", "0.497364", "0.49593657", "0.49505726", "0.49407923", "0.49349007", "0.49280962", "0.49027222", "0.48963404", "0.48916262", "0.48907965", "0.48847172", "0.48752975", "0.48640066", "0.48564082", "0.48555034", "0.4853564" ]
0.70350784
0
parseIncludes .. Parses dependencies to create an include string for each.
func (i *Interface) parseIncludes() { for _, dependency := range i.Dependencies { include := NewInclude(dependency) if parsers.ShouldBeIncludedInHeader(dependency) { i.HeaderIncludesString += include.ToString() + "\n" } else { i.ImplementationIncludesString += include.ToString() + "\n" } } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func processIncludes(source string) string {\n\tlines := strings.Split(source, \"\\n\")\n\tvar result []string\n\tfor _, line := range lines {\n\t\ttrimmed := strings.TrimSpace(line)\n\t\tif url := parseIncludeURL(trimmed); url != \"\" {\n\t\t\tif buf, err := curl(url); err == nil {\n\t\t\t\tresult = append(result, string(buf))\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\n\t\tresult = append(result, line)\n\t}\n\n\treturn strings.Join(result, \"\\n\")\n}", "func parseInclude(node *node32) string {\n\tstrNode := nextNode(node, ruleSTRLITER)\n\tfile := nextNode(strNode.up, ruleSTR).match\n\n\treturn file\n}", "func (ctx *context) hoistIncludes(result []byte) []byte {\n\tincludesStart := bytes.Index(result, []byte(\"#include\"))\n\tif includesStart == -1 {\n\t\treturn result\n\t}\n\tincludes := make(map[string]bool)\n\tincludeRe := regexp.MustCompile(\"#include <.*>\\n\")\n\tfor _, match := range includeRe.FindAll(result, -1) {\n\t\tincludes[string(match)] = true\n\t}\n\tresult = includeRe.ReplaceAll(result, nil)\n\t// Certain linux and bsd headers are broken and go to the bottom.\n\tvar sorted, sortedBottom, sortedTop []string\n\tfor include := range includes {\n\t\tif strings.Contains(include, \"<linux/\") {\n\t\t\tsortedBottom = append(sortedBottom, include)\n\t\t} else if strings.Contains(include, \"<netinet/if_ether.h>\") {\n\t\t\tsortedBottom = append(sortedBottom, include)\n\t\t} else if strings.Contains(include, \"<keyutils.h>\") {\n\t\t\tsortedBottom = append(sortedBottom, include)\n\t\t} else if ctx.target.OS == freebsd && strings.Contains(include, \"<sys/types.h>\") {\n\t\t\tsortedTop = append(sortedTop, include)\n\t\t} else {\n\t\t\tsorted = append(sorted, include)\n\t\t}\n\t}\n\tsort.Strings(sortedTop)\n\tsort.Strings(sorted)\n\tsort.Strings(sortedBottom)\n\tnewResult := append([]byte{}, result[:includesStart]...)\n\tnewResult = append(newResult, strings.Join(sortedTop, \"\")...)\n\tnewResult = append(newResult, '\\n')\n\tnewResult = append(newResult, strings.Join(sorted, \"\")...)\n\tnewResult = append(newResult, '\\n')\n\tnewResult = append(newResult, strings.Join(sortedBottom, \"\")...)\n\tnewResult = append(newResult, result[includesStart:]...)\n\treturn newResult\n}", "func lexInclude(lx *lexer) stateFn {\r\n\tr := lx.next()\r\n\tswitch {\r\n\tcase r == sqStringStart:\r\n\t\tlx.ignore() // ignore the \" or '\r\n\t\treturn lexIncludeQuotedString\r\n\tcase r == dqStringStart:\r\n\t\tlx.ignore() // ignore the \" or '\r\n\t\treturn lexIncludeDubQuotedString\r\n\tcase r == arrayStart:\r\n\t\treturn lx.errorf(\"Expected include value but found start of an array\")\r\n\tcase r == mapStart:\r\n\t\treturn lx.errorf(\"Expected include value but found start of a map\")\r\n\tcase r == blockStart:\r\n\t\treturn lx.errorf(\"Expected include value but found start of a block\")\r\n\tcase unicode.IsDigit(r), r == '-':\r\n\t\treturn lx.errorf(\"Expected include value but found start of a number\")\r\n\tcase r == '\\\\':\r\n\t\treturn lx.errorf(\"Expected include value but found escape sequence\")\r\n\tcase isNL(r):\r\n\t\treturn lx.errorf(\"Expected include value but found new line\")\r\n\t}\r\n\tlx.backup()\r\n\treturn lexIncludeString\r\n}", "func (i *Interface) parseForwardDeclares() {\n\tfor _, dependency := range i.Dependencies {\n\t\tif !parsers.ShouldBeIncludedInHeader(dependency) {\n\t\t\ti.ForwardDeclaresString += \"class \" + dependency + \";\\n\"\n\t\t} \n\t}\n}", "func (c *Converter) convertInclude(include *nast.IncludeDirective) error {\n\n\tc.includecount++\n\tif c.includecount > 20 {\n\t\treturn &parser.Error{\n\t\t\tMessage: \"Error when processing includes: Include-loop detected\",\n\t\t\tStartPosition: ast.NewPosition(\"\", 1, 1),\n\t\t\tEndPosition: ast.NewPosition(\"\", 20, 70),\n\t\t}\n\t}\n\n\tfilesnames := make([]string, 1)\n\tfilesnames[0] = include.File\n\n\tfile, err := c.getIncludedFile(include)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tp := NewParser().(*Parser)\n\tp.SetFilename(include.File)\n\tparsed, err := p.Parse(file)\n\tif err != nil {\n\t\t// override the position of the error with the position of the include\n\t\t// this way the error gets displayed at the correct location\n\t\t// the message does contain the original location\n\t\treturn &parser.Error{\n\t\t\tMessage: err.Error(),\n\t\t\tStartPosition: include.Start(),\n\t\t\tEndPosition: include.End(),\n\t\t}\n\t}\n\n\tif usesTimeTracking(parsed) {\n\t\tc.usesTimeTracking = true\n\t}\n\n\treplacements := make([]ast.Node, len(parsed.Elements))\n\tfor i := range parsed.Elements {\n\t\treplacements[i] = parsed.Elements[i]\n\t}\n\treturn ast.NewNodeReplacement(replacements...)\n}", "func (s *Service) ReferencedIncludes() ([]*Include, error) {\n\tvar err error\n\tincludes := []*Include{}\n\tincludesSet := make(map[string]*Include)\n\n\t// Check extended service.\n\tif s.Extends != \"\" && strings.Contains(s.Extends, \".\") {\n\t\tincludeName := s.Extends[0:strings.Index(s.Extends, \".\")]\n\t\tinclude := s.Frugal.Include(includeName)\n\t\tif include == nil {\n\t\t\treturn nil, fmt.Errorf(\"Service %s extends references invalid include %s\",\n\t\t\t\ts.Name, s.Extends)\n\t\t}\n\t\tif _, ok := includesSet[includeName]; !ok {\n\t\t\tincludesSet[includeName] = include\n\t\t\tincludes = append(includes, include)\n\t\t}\n\t}\n\n\t// Check methods.\n\tfor _, method := range s.Methods {\n\t\t// Check arguments.\n\t\tfor _, arg := range method.Arguments {\n\t\t\tincludesSet, includes, err = addInclude(includesSet, includes, arg.Type, s.Frugal)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\t// Check return type.\n\t\tif method.ReturnType != nil {\n\t\t\tincludesSet, includes, err = addInclude(includesSet, includes, method.ReturnType, s.Frugal)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\t// Check exceptions.\n\t\tfor _, exception := range method.Exceptions {\n\t\t\tincludesSet, includes, err = addInclude(includesSet, includes, exception.Type, s.Frugal)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn includes, nil\n}", "func (f *Frugal) OrderedIncludes() []*Frugal {\n\tkeys := make([]string, 0, len(f.ParsedIncludes))\n\tfor key := range f.ParsedIncludes {\n\t\tkeys = append(keys, key)\n\t}\n\tsort.Strings(keys)\n\n\tincludes := make([]*Frugal, 0, len(f.ParsedIncludes))\n\tfor _, key := range keys {\n\t\tincludes = append(includes, f.ParsedIncludes[key])\n\t}\n\treturn includes\n}", "func (i *Interface) parseDependencies() {\n\tvar dependencies []string\n\tfor _, function := range i.Functions {\n\n\t\t// \"expanded\" refers to creating a parsers.from a templated type, i.e \"QMap <int, QString>\" becomes [QMap int QString]\n\t\texpandedReturnType := strings.FieldsFunc(function.ReturnType, templatedTypeSeparators) \n\t\tfor _, dataType := range(expandedReturnType) {\n\t\t\tdependencies = append(dependencies, strings.TrimSpace(dataType))\n\t\t}\n\n\t\tfor _, parameter := range function.Parameters {\n\t\t\texpandedParameter := strings.FieldsFunc(parameter.Type, templatedTypeSeparators)\n\t\t\tfor _, innerParameter := range expandedParameter {\n\t\t\t\tdependencies = append(dependencies, strings.TrimSpace(innerParameter))\n\t\t\t} \n\t\t}\n\t}\n\ti.Dependencies = dependencies\n\ti.Dependencies = parsers.RemoveConstSpecifiers(i.Dependencies)\n\ti.Dependencies = parsers.RemovePointersAndReferences(i.Dependencies)\n\ti.Dependencies = parsers.RemoveStdDataTypes(i.Dependencies)\n\ti.Dependencies = parsers.MapDataTypesToLibraryDependencies(i.Dependencies)\n\ti.Dependencies = parsers.RemoveDuplicates(i.Dependencies)\n\tsort.Strings(i.Dependencies)\n}", "func lexIncludeString(lx *lexer) stateFn {\r\n\tr := lx.next()\r\n\tswitch {\r\n\tcase isNL(r) || r == eof || r == optValTerm || r == mapEnd || isWhitespace(r):\r\n\t\tlx.backup()\r\n\t\tlx.emit(itemInclude)\r\n\t\treturn lx.pop()\r\n\tcase r == sqStringEnd:\r\n\t\tlx.backup()\r\n\t\tlx.emit(itemInclude)\r\n\t\tlx.next()\r\n\t\tlx.ignore()\r\n\t\treturn lx.pop()\r\n\t}\r\n\treturn lexIncludeString\r\n}", "func addInclude(includesSet map[string]*Include, includes []*Include, t *Type, frugal *Frugal) (map[string]*Include, []*Include, error) {\n\tvar err error\n\tif strings.Contains(t.Name, \".\") {\n\t\tincludeName := t.Name[0:strings.Index(t.Name, \".\")]\n\t\tinclude := frugal.Include(includeName)\n\t\tif include == nil {\n\t\t\treturn nil, nil, fmt.Errorf(\"Type %s references invalid include %s\", t.Name, include.Name)\n\t\t}\n\t\tif _, ok := includesSet[includeName]; !ok {\n\t\t\tincludesSet[includeName] = include\n\t\t\tincludes = append(includes, include)\n\t\t}\n\t}\n\t// Check container types.\n\tif t.KeyType != nil {\n\t\tincludesSet, includes, err = addInclude(includesSet, includes, t.KeyType, frugal)\n\t}\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tif t.ValueType != nil {\n\t\tincludesSet, includes, err = addInclude(includesSet, includes, t.ValueType, frugal)\n\t}\n\treturn includesSet, includes, err\n}", "func appendIncludedFiles(ast *AST, ifm *IncludeFiles) {\n\tfor _, include := range ast.includes {\n\t\tabsoluteFile := fmt.Sprintf(\"%v/%v\", ifm.dir,\n\t\t\tinclude)\n\n\t\t_, included := ifm.files[absoluteFile]\n\t\tif included {\n\t\t\tcontinue\n\t\t}\n\n\t\tifm.Include(absoluteFile)\n\n\t\twaccIncl := parseInput(absoluteFile)\n\t\tastIncl := generateASTFromWACC(waccIncl, ifm)\n\n\t\tast.enums = append(ast.enums,\n\t\t\tastIncl.enums...)\n\n\t\tast.classes = append(ast.classes,\n\t\t\tastIncl.classes...)\n\n\t\tast.functions = append(ast.functions,\n\t\t\tastIncl.functions...)\n\t}\n}", "func (m *Filters) Includes() []string {\n\tinc := make([]string, 0)\n\tfor _, f := range m.Filters {\n\t\tfinc := f.Includes()\n\t\tif len(finc) > 0 {\n\t\t\tinc = append(inc, finc...)\n\t\t}\n\t}\n\treturn inc\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementNotStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementNotStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func getIncludeList(inputFiles, clangFlags []string, flag []string, cppCode bool) (\n\t_ string, err error) {\n\tdefer func() {\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(\"cannot get Include List : %v\", err)\n\t\t}\n\t}()\n\tvar out bytes.Buffer\n\tvar stderr bytes.Buffer\n\tvar args []string\n\tfor i := range inputFiles {\n\t\tinputFiles[i], err = filepath.Abs(inputFiles[i])\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\targs = append(args, flag...)\n\targs = append(args, \"-c\")\n\targs = append(args, inputFiles...)\n\targs = append(args, clangFlags...)\n\n\tdefer func() {\n\t\tif err != nil {\n\t\t\tfmt.Errorf(\"used next arguments: `%v`. %v\", args, err)\n\t\t}\n\t}()\n\n\tvar cmd *exec.Cmd\n\tcompiler, compilerFlag := Compiler(cppCode)\n\targs = append(compilerFlag, args...)\n\tcmd = exec.Command(compiler, args...)\n\n\tcmd.Stdout = &out\n\tcmd.Stderr = &stderr\n\terr = cmd.Run()\n\tif err != nil {\n\t\terr = fmt.Errorf(\"preprocess failed: %v\\nStdErr = %v\", err, stderr.String())\n\t\treturn\n\t}\n\n\t// add stderr to out, for flags \"-MM -H\"\n\tout.WriteString(stderr.String())\n\n\t// remove warnings\n\t// ... /usr/lib/llvm-4.0/bin/../lib/clang/4.0.1/include/stddef.h\n\t// .. /usr/include/x86_64-linux-gnu/bits/stdlib-float.h\n\t// /home/konstantin/go/src/github.com/Konstantin8105/c4go/testdata/kilo/debug.kilo.c:81:9: warning: '_BSD_SOURCE' macro redefined [-Wmacro-redefined]\n\t// #define _BSD_SOURCE\n\t// ^\n\t// /usr/include/features.h:188:10: note: previous definition is here\n\t// # define _BSD_SOURCE 1\n\t// ^\n\tlines := strings.Split(out.String(), \"\\n\")\n\tfor i := range lines {\n\t\tif strings.Contains(lines[i], \"warning:\") {\n\t\t\tlines = lines[:i]\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn strings.Join(lines, \"\\n\"), err\n}", "func GetIeraphyIncludeList(inputFiles, clangFlags []string, cppCode bool) (\n\tlines []string, err error) {\n\tvar out string\n\tout, err = getIncludeList(inputFiles, clangFlags, []string{\"-MM\", \"-H\"}, cppCode)\n\tif err != nil {\n\t\treturn\n\t}\n\treturn strings.Split(out, \"\\n\"), nil\n}", "func (o WebAclRuleStatementOrStatementStatementAndStatementStatementAndStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementOrStatementStatementAndStatementStatementAndStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementNotStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementNotStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementNotStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementNotStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o RuleMfaOutput) NetworkIncludes() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v *RuleMfa) pulumi.StringArrayOutput { return v.NetworkIncludes }).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementOrStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementOrStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (c *condition) includes(v string) bool {\n\tfor _, pattern := range c.Include {\n\t\tif ok, _ := filepath.Match(pattern, v); ok {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func (o WebAclRuleStatementOrStatementStatementAndStatementStatementAndStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementOrStatementStatementAndStatementStatementAndStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementNotStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementNotStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementOrStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementOrStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (self dependencies) includes(definition string) bool {\n for _, dependency := range self {\n if dependency == definition {\n return true\n }\n }\n\n return false\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementNotStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementNotStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementOrStatementStatementAndStatementStatementAndStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementOrStatementStatementAndStatementStatementAndStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementNotStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementNotStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementOrStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementOrStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementNotStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementNotStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func lexIncludeStart(lx *lexer) stateFn {\r\n\tr := lx.next()\r\n\tif isWhitespace(r) {\r\n\t\treturn lexSkip(lx, lexIncludeStart)\r\n\t}\r\n\tlx.backup()\r\n\treturn lexInclude\r\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementNotStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementNotStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementOrStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementOrStatementStatementSqliMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func lexIncludeQuotedString(lx *lexer) stateFn {\r\n\tr := lx.next()\r\n\tswitch {\r\n\tcase r == sqStringEnd:\r\n\t\tlx.backup()\r\n\t\tlx.emit(itemInclude)\r\n\t\tlx.next()\r\n\t\tlx.ignore()\r\n\t\treturn lx.pop()\r\n\t}\r\n\treturn lexIncludeQuotedString\r\n}", "func extendIncludes(m *[]Includes, ext []Includes) {\n\tif ext != nil {\n\t\tif *m == nil {\n\t\t\t*m = []Includes{}\n\t\t}\n\t\t*m = append(*m, ext...)\n\t}\n}", "func (l *LazyMultiLoaderWithInclude) Include(path string) *LazyMultiLoaderWithInclude {\n\tl.includes = append(l.includes, path)\n\treturn l\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementOrStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementOrStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (l *Line) ContainsIncluded(content string) (output string, err error) {\n\tfor _, i := range l.Includes {\n\t\tfound := false\n\t\tfor _, line := range strings.Split(content, \"\\n\") {\n\n\t\t\tif strings.Contains(line, i) {\n\t\t\t\toutput = output + line + \"\\n\"\n\t\t\t\tfound = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !found {\n\t\t\tlogrus.Errorf(\"Line '%v', not found in content:\\n%v\\n\", i, content)\n\t\t\treturn \"\", fmt.Errorf(ErrLineNotFound)\n\t\t}\n\t}\n\treturn output, nil\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementNotStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementNotStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementNotStatementStatementXssMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementNotStatementStatementXssMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementXssMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementXssMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (meta *Meta) Including(filename string, gziped bool) error {\n\tif filename == \"\" {\n\t\treturn nil\n\t}\n\n\tfa, err := wildcard.Compile([]rune(\n\t\tstrings.ReplaceAll(\n\t\t\tfilepath.Join(meta.Dir, filename), \"\\\\\", \"\\\\\\\\\")))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tmeta.Includes = append(meta.Includes,\n\t\tInclude{Filename: filename, Wc: fa, Gziped: gziped})\n\treturn nil\n}", "func (o WebAclRuleStatementAndStatementStatementNotStatementStatementXssMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementNotStatementStatementXssMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementOrStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementOrStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementNotStatementStatementOrStatementStatementXssMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementNotStatementStatementOrStatementStatementXssMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementNotStatementStatementByteMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementNotStatementStatementByteMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementRegexPatternSetReferenceStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementOrStatementStatementAndStatementStatementAndStatementStatementByteMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementOrStatementStatementAndStatementStatementAndStatementStatementByteMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementByteMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementByteMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementOrStatementStatementByteMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementOrStatementStatementByteMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementByteMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementByteMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (this *Asset) parse() error {\n\tassetPath, err := this.findAssetPath()\n\tif err != nil {\n\t\treturn err\n\t}\n\tfile, err := os.OpenFile(assetPath, os.O_RDONLY, 0666)\n\tif err != nil {\n\t\treturn err\n\t}\n\tasset_reader := bufio.NewReader(file)\n\tfor {\n\t\t_line, _, err := asset_reader.ReadLine()\n\t\tif err == io.EOF {\n\t\t\tbreak\n\t\t}\n\t\tline := string(_line)\n\t\tvar prefix string\n\t\tif this.assetType == ASSET_JAVASCRIPT {\n\t\t\tprefix = \"//= require \"\n\t\t}else {\n\t\t\tprefix = \"/*= require \"\n\t\t}\n\t\tif strings.HasPrefix(line, prefix) {\n\t\t\tinclude_file := line[len(prefix):]\n\t\t\tfile, err := this.findIncludeFilePath(include_file)\n\t\t\tif err != nil {\n\t\t\t\tWarning(\"%v \\\"%v\\\" can't find required file \\\"%v\\\"\", this.assetType.String(), this.assetName, include_file)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tthis.Include_files = append(this.Include_files, file)\n\t\t}\n\t}\n\treturn nil\n}", "func (o WebAclRuleStatementNotStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementRegexMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementNotStatementStatementXssMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementNotStatementStatementXssMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementOrStatementStatementXssMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementOrStatementStatementXssMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementOrStatementStatementByteMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementOrStatementStatementByteMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementOrStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementOrStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementXssMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementXssMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func includeFile(filename string, buf *bytes.Buffer) error {\n\tfilename = filepath.Clean(filename)\n\tlineNum := 0\n\n\t// check for include cyles\n\tif includedFiles[filename] {\n\t\treturn fmt.Errorf(\"include cycle: '%s'\", filename)\n\t} else {\n\t\tincludedFiles[filename] = true\n\t}\n\tdefer delete(includedFiles, filename)\n\n\tf, err := os.Open(filename)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Close()\n\ts := bufio.NewScanner(f)\n\n\t// mark the start of this file\n\tfmt.Fprintln(buf, marker(filename, lineNum))\n\n\tfor s.Scan() {\n\t\tline := s.Text()\n\n\t\tif strings.HasPrefix(line, \"include\") {\n\t\t\tpieces := strings.Split(line, \" \")\n\t\t\tif len(pieces) != 2 {\n\t\t\t\treturn fmt.Errorf(\"%s:%d: invalid include directive\", filename, lineNum)\n\t\t\t}\n\n\t\t\terr := includeFile(filepath.Join(filename, \"..\", pieces[1]), buf)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"%s:%d: %s\", filename, lineNum, err.Error())\n\t\t\t}\n\t\t\tlineNum++\n\n\t\t\t// mark the resumption point for this file\n\t\t\tfmt.Fprintln(buf, marker(filename, lineNum))\n\t\t} else {\n\t\t\tfmt.Fprintln(buf, s.Text())\n\t\t\tlineNum++\n\t\t}\n\t}\n\n\treturn nil\n}", "func (o WebAclRuleStatementOrStatementStatementAndStatementStatementAndStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementOrStatementStatementAndStatementStatementAndStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementNotStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementNotStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementXssMatchStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementXssMatchStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementAndStatementStatementNotStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementAndStatementStatementNotStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (s *Service) ExtendsInclude() string {\n\tincludeAndService := strings.Split(s.Extends, \".\")\n\tif len(includeAndService) == 2 {\n\t\treturn includeAndService[0]\n\t}\n\treturn \"\"\n}", "func (f *Frugal) ReferencedIncludes() ([]*Include, error) {\n\tincludes := []*Include{}\n\tincludesSet := make(map[string]*Include)\n\tfor _, serv := range f.Services {\n\t\tservIncludes, err := serv.ReferencedIncludes()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfor _, include := range servIncludes {\n\t\t\tif _, ok := includesSet[include.Name]; !ok {\n\t\t\t\tincludesSet[include.Name] = include\n\t\t\t\tincludes = append(includes, include)\n\t\t\t}\n\t\t}\n\t}\n\treturn includes, nil\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementOrStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementOrStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementNotStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementNotStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (d *BackupDescriptor) Include(classes []string) {\n\tif len(classes) == 0 {\n\t\treturn\n\t}\n\tset := make(map[string]struct{}, len(classes))\n\tfor _, cls := range classes {\n\t\tset[cls] = struct{}{}\n\t}\n\tpred := func(s string) bool {\n\t\t_, ok := set[s]\n\t\treturn ok\n\t}\n\td.Filter(pred)\n}", "func (o WebAclRuleStatementNotStatementStatementOrStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementOrStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (f *Frugal) ReferencedServiceIncludes() ([]*Include, error) {\n\tincludeNames := []string{}\n\tincludesSet := make(map[string]*Include)\n\tfor _, service := range f.Services {\n\t\tservIncludes, err := service.ReferencedIncludes()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfor _, include := range servIncludes {\n\t\t\tif _, ok := includesSet[include.Name]; !ok {\n\t\t\t\tincludesSet[include.Name] = include\n\t\t\t\tincludeNames = append(includeNames, include.Name)\n\t\t\t}\n\t\t}\n\t}\n\tsort.Strings(includeNames)\n\tincludes := make([]*Include, len(includeNames))\n\tfor i, include := range includeNames {\n\t\tincludes[i] = includesSet[include]\n\t}\n\treturn includes, nil\n}", "func main() {\n\tif len(os.Args[1:]) < 2 {\n\t\tpanic(\"please use 'go run include_headers.go <c_file> <output_file> [include_dir]...'\")\n\t}\n\n\t// cwd is guaranteed to be the directory where the go:generate comment is found\n\tcwd, err := os.Getwd()\n\tif err != nil {\n\t\tlog.Fatalf(\"unable to get current working directory: %s\", err)\n\t}\n\troot := rootDir(cwd)\n\targs := os.Args[1:]\n\tinputFile, err := resolvePath(root, args[0])\n\tif err != nil {\n\t\tlog.Fatalf(\"unable to resolve path to %s: %s\", args[0], err)\n\t}\n\toutputFile, err := resolvePath(root, args[1])\n\tif err != nil {\n\t\tlog.Fatalf(\"unable to resolve path to %s: %s\", args[1], err)\n\t}\n\n\terr = runProcessing(root, inputFile, outputFile, args[2:])\n\tif err != nil {\n\t\tlog.Fatalf(\"error including headers: %s\", err)\n\t}\n\tfmt.Printf(\"successfully included headers from %s => %s\\n\", inputFile, outputFile)\n}", "func (f *Filter) AddInclude(s Matcher) {\n\tf.include = append(f.include, s)\n}", "func TestIncludeDiamond(t *testing.T) {\n\tt.Parallel()\n\tif _, ifnames, _, err := Compile(path.Join(\"testdata\", \"include_diamond_1.mro\"),\n\t\t[]string{\"testdata\"}, false); err != nil {\n\t\tt.Error(err)\n\t} else {\n\t\tif len(ifnames) != 4 {\n\t\t\tt.Errorf(\"Expected 3 includes, found %d\\n%v\", len(ifnames), ifnames)\n\t\t}\n\t\tfound := false\n\t\tfor _, f := range ifnames {\n\t\t\tif f == \"include_diamond_2.mro\" {\n\t\t\t\tfound = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !found {\n\t\t\tt.Error(\"Expected to find pipeline.mro.\")\n\t\t}\n\t\tfound = false\n\t\tfor _, f := range ifnames {\n\t\t\tif f == \"include_diamond_3.mro\" {\n\t\t\t\tfound = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !found {\n\t\t\tt.Error(\"Expected to find stages.mro.\")\n\t\t}\n\t\tfound = false\n\t\tfor _, f := range ifnames {\n\t\t\tif f == \"include_diamond_4.mro\" {\n\t\t\t\tfound = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !found {\n\t\t\tt.Error(\"Expected to find stages.mro.\")\n\t\t}\n\t}\n}", "func (fe *FilterExpr) Includes() []string {\n\tif len(fe.Include) > 0 {\n\t\treturn []string{fe.Include}\n\t}\n\tif fe.Filter == nil {\n\t\treturn nil\n\t}\n\treturn fe.Filter.Includes()\n}", "func Include() fs.FS {\n\tf, _ := fs.Sub(include, \"include\")\n\treturn f\n}", "func (o WebAclRuleStatementNotStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPatternOutput) IncludedHeaders() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v WebAclRuleStatementNotStatementStatementSizeConstraintStatementFieldToMatchHeaderMatchPattern) []string {\n\t\treturn v.IncludedHeaders\n\t}).(pulumi.StringArrayOutput)\n}", "func (f *Frugal) Include(name string) *Include {\n\tname = filepath.Base(name)\n\tfor _, include := range f.Includes {\n\t\tif filepath.Base(include.Name) == name {\n\t\t\treturn include\n\t\t}\n\t}\n\treturn nil\n}", "func (fc *FilterCollection) HasInclude(path string) bool {\n\tcleanedPath := filepath.Clean(path)\n\n\tfor _, pattern := range fc.Includes {\n\t\tif fc.match(pattern, cleanedPath) {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}", "func (s *DescribeServicesInput) SetInclude(v []*string) *DescribeServicesInput {\n\ts.Include = v\n\treturn s\n}", "func (s *Scope) ReferencedIncludes() ([]*Include, error) {\n\tvar err error\n\tincludes := []*Include{}\n\tincludesSet := make(map[string]*Include)\n\tfor _, op := range s.Operations {\n\t\tincludesSet, includes, err = addInclude(includesSet, includes, op.Type, s.Frugal)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn includes, nil\n}", "func GetIncludeListWithUserSource(inputFiles, clangFlags []string, cppCode bool) (\n\tlines []string, err error) {\n\tvar out string\n\tout, err = getIncludeList(inputFiles, clangFlags, []string{\"-MM\"}, cppCode)\n\tif err != nil {\n\t\treturn\n\t}\n\treturn parseIncludeList(out)\n}", "func (in *ActionUserRequestChangeResolveMetaGlobalInput) SetIncludes(value string) *ActionUserRequestChangeResolveMetaGlobalInput {\n\tin.Includes = value\n\n\tif in._selectedParameters == nil {\n\t\tin._selectedParameters = make(map[string]interface{})\n\t}\n\n\tin._selectedParameters[\"Includes\"] = nil\n\treturn in\n}", "func GetIncludeFullList(inputFiles, clangFlags []string, cppCode bool) (\n\tlines []string, err error) {\n\tvar out string\n\tout, err = getIncludeList(inputFiles, clangFlags, []string{\"-M\"}, cppCode)\n\tif err != nil {\n\t\treturn\n\t}\n\treturn parseIncludeList(out)\n}", "func Include(attributes ...string) Options {\n\treturn include{attributes: attributes}\n}", "func TestFixIncludesPipeline(t *testing.T) {\n\tt.Parallel()\n\tif src, err := FormatFile(path.Join(\"testdata\", \"pipeline.mro\"),\n\t\ttrue,\n\t\t[]string{\"testdata\"}); err != nil {\n\t\tt.Error(err)\n\t} else {\n\t\tif src != `@include \"stages.mro\"\n\npipeline MY_PIPELINE(\n in int info,\n out bam result \"description of output\" \"output.bam\",\n)\n{\n call MY_STAGE(\n info = self.info,\n )\n\n return (\n result = MY_STAGE.result,\n )\n}\n# trailing comment\n` {\n\t\t\tt.Errorf(\"Incorrect combined source. Got \\n%s\", src)\n\t\t}\n\t}\n}", "func analyzeFiles(inputFiles, clangFlags []string, cppCode bool) (\n\titems []entity, err error) {\n\t// See : https://clang.llvm.org/docs/CommandGuide/clang.html\n\t// clang -E <file> Run the preprocessor stage.\n\tvar out bytes.Buffer\n\tout, err = getPreprocessSources(inputFiles, clangFlags, cppCode)\n\tif err != nil {\n\t\treturn\n\t}\n\n\t// Parsing preprocessor file\n\tr := bytes.NewReader(out.Bytes())\n\tscanner := bufio.NewScanner(r)\n\tscanner.Split(bufio.ScanLines)\n\t// counter - get position of line\n\tvar counter int\n\t// item, items - entity of preprocess file\n\tvar item *entity\n\n\treg := util.GetRegex(\"# (\\\\d+) \\\".*\\\".*\")\n\n\tfor scanner.Scan() {\n\t\tline := scanner.Text()\n\t\tif reg.MatchString(line) {\n\t\t\tif item != (*entity)(nil) {\n\t\t\t\titems = append(items, *item)\n\t\t\t}\n\t\t\titem, err = parseIncludePreprocessorLine(line)\n\t\t\tif err != nil {\n\t\t\t\terr = fmt.Errorf(\"cannot parse line : %s with error: %s\", line, err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif item.positionInSource == 0 {\n\t\t\t\t// cannot by less 1 for avoid problem with\n\t\t\t\t// identification of \"0\" AST base element\n\t\t\t\titem.positionInSource = 1\n\t\t\t}\n\t\t\titem.lines = make([]*string, 0)\n\t\t}\n\t\tcounter++\n\t\titem.lines = append(item.lines, &line)\n\t}\n\tif item != (*entity)(nil) {\n\t\titems = append(items, *item)\n\t}\n\treturn\n}", "func (r Rust) IncludeFiles() []string { return []string{\"Cargo.toml\"} }", "func (o TaskOutput) Includes() TaskIncludesPtrOutput {\n\treturn o.ApplyT(func(v *Task) TaskIncludesPtrOutput { return v.Includes }).(TaskIncludesPtrOutput)\n}", "func MaybeIncludeFile(s string, orbDirectory string) (string, error) {\n\t// View: https://regexr.com/599mq\n\tincludeRegex := regexp.MustCompile(`<<[\\s]*include\\(([-\\w\\/\\.]+)\\)?[\\s]*>>`)\n\n\t// only find up to 2 matches, because we throw an error if we find >1\n\tincludeMatches := includeRegex.FindAllStringSubmatch(s, 2)\n\tif len(includeMatches) > 1 {\n\t\treturn \"\", fmt.Errorf(\"multiple include statements: '%s'\", s)\n\t}\n\n\tif len(includeMatches) == 1 {\n\t\tmatch := includeMatches[0]\n\t\tfullMatch, subMatch := match[0], match[1]\n\n\t\t// throw an error if the entire string wasn't matched\n\t\tif fullMatch != s {\n\t\t\treturn \"\", fmt.Errorf(\"entire string must be include statement: '%s'\", s)\n\t\t}\n\n\t\tfilepath := filepath.Join(orbDirectory, subMatch)\n\t\tfile, err := os.ReadFile(filepath)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"could not open %s for inclusion\", filepath)\n\t\t}\n\t\tescaped := strings.ReplaceAll(string(file), \"<<\", \"\\\\<<\")\n\n\t\treturn escaped, nil\n\t}\n\n\treturn s, nil\n}", "func Parse(Includes, Excludes [][]string) []byte {\n\tvar host, scheme, port, path string\n\tvar cludes [][][]string\n\n\tcludes = append(cludes, Includes)\n\tcludes = append(cludes, Excludes)\n\n\t// file containing servicenames and ports\n\tfr := File.ReadFromRoot(\"configs/known-ports.txt\", \"pkg\")\n\n\tfor i, clude := range cludes {\n\t\tfor _, item := range clude {\n\t\t\tip := regexp.MustCompile(`\\d+\\.\\d+\\.\\d+\\.\\d+`)\n\n\t\t\tif ip.MatchString(item[0]) {\n\t\t\t\tfor _, ip := range item {\n\t\t\t\t\thost := parseHost(ip)\n\t\t\t\t\tscheme = \"Any\"\n\t\t\t\t\tif i == 0 {\n\t\t\t\t\t\tadd(scheme, host, \"^(80|443)$\", path, false)\n\t\t\t\t\t} else {\n\t\t\t\t\t\tadd(scheme, host, \"^(80|443)$\", path, true)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tscheme = strings.TrimRight(item[1], \"://\")\n\t\t\t\thost = item[2] + item[3] + item[4]\n\t\t\t\tport = strings.TrimLeft(item[6], \":\")\n\t\t\t\tpath = item[7]\n\n\t\t\t\t//fmt.Println(\"S:\" + scheme + \"H:\" + host + \"PO:\" + port + \"PA:\" + path)\n\t\t\t\tscheme, port = parseSchemeAndPort(fr, scheme, port)\n\n\t\t\t\thost = parseHost(host)\n\t\t\t\tpath = parseFile(path)\n\n\t\t\t\tif i == 0 {\n\t\t\t\t\tadd(scheme, host, port, path, false)\n\t\t\t\t} else {\n\t\t\t\t\tadd(scheme, host, port, path, true)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// scope object\n\tscope := Scope{}\n\tscope.Target.Scope.AdvancedMode = true\n\t// add include/exclude slices\n\tscope.Target.Scope.Include = incslice.Include\n\tscope.Target.Scope.Exclude = exslice.Exclude\n\n\t// parse pretty json\n\tjson, err := json.MarshalIndent(scope, \"\", \" \")\n\tif err != nil {\n\t\tfmt.Println(\"json err:\", err)\n\t}\n\treturn json\n}", "func IncludeFields(paths []string, includePaths ...string) []string {\n\tif len(paths) == 0 {\n\t\treturn paths\n\t}\n\tincluded := make([]string, 0, len(paths))\n\tfor _, path := range paths {\n\t\tif HasAnyField(includePaths, path) {\n\t\t\tincluded = append(included, path)\n\t\t}\n\t}\n\treturn included\n}" ]
[ "0.678124", "0.6446041", "0.61750436", "0.610039", "0.6088763", "0.59693265", "0.5771785", "0.5586935", "0.5545841", "0.5491811", "0.5489514", "0.5452785", "0.5350742", "0.5308133", "0.53067124", "0.53055584", "0.53052", "0.5304817", "0.53026134", "0.5296373", "0.5293165", "0.5292278", "0.5290414", "0.5287466", "0.52868646", "0.5277426", "0.5274859", "0.5262468", "0.52623284", "0.52611667", "0.52590424", "0.52553886", "0.5247337", "0.52432734", "0.5239582", "0.5239193", "0.5234012", "0.5232591", "0.5231891", "0.522568", "0.5219155", "0.5213819", "0.52119744", "0.5199697", "0.51964134", "0.5196172", "0.5187277", "0.5185783", "0.5180413", "0.51799625", "0.5178918", "0.51772845", "0.51724213", "0.5167939", "0.5165197", "0.5162568", "0.5160099", "0.5157959", "0.5154649", "0.5143694", "0.5136201", "0.51305926", "0.511316", "0.5106565", "0.5104407", "0.5101744", "0.51005214", "0.50979054", "0.5097351", "0.5097217", "0.5085658", "0.50797224", "0.50516105", "0.5050618", "0.5040303", "0.5034468", "0.5019519", "0.5018545", "0.4988509", "0.49733216", "0.49675032", "0.49197978", "0.48956737", "0.48797345", "0.48696667", "0.48695284", "0.48563263", "0.48500258", "0.48480073", "0.48430988", "0.48308057", "0.4824238", "0.48224685", "0.482179", "0.48077554", "0.48057678", "0.48052418", "0.48049724", "0.4802129", "0.47979853" ]
0.8502233
0
parseForwardDeclares .. Parses dependencies to create an foward declare string for each.
func (i *Interface) parseForwardDeclares() { for _, dependency := range i.Dependencies { if !parsers.ShouldBeIncludedInHeader(dependency) { i.ForwardDeclaresString += "class " + dependency + ";\n" } } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (i *Interface) parseDependencies() {\n\tvar dependencies []string\n\tfor _, function := range i.Functions {\n\n\t\t// \"expanded\" refers to creating a parsers.from a templated type, i.e \"QMap <int, QString>\" becomes [QMap int QString]\n\t\texpandedReturnType := strings.FieldsFunc(function.ReturnType, templatedTypeSeparators) \n\t\tfor _, dataType := range(expandedReturnType) {\n\t\t\tdependencies = append(dependencies, strings.TrimSpace(dataType))\n\t\t}\n\n\t\tfor _, parameter := range function.Parameters {\n\t\t\texpandedParameter := strings.FieldsFunc(parameter.Type, templatedTypeSeparators)\n\t\t\tfor _, innerParameter := range expandedParameter {\n\t\t\t\tdependencies = append(dependencies, strings.TrimSpace(innerParameter))\n\t\t\t} \n\t\t}\n\t}\n\ti.Dependencies = dependencies\n\ti.Dependencies = parsers.RemoveConstSpecifiers(i.Dependencies)\n\ti.Dependencies = parsers.RemovePointersAndReferences(i.Dependencies)\n\ti.Dependencies = parsers.RemoveStdDataTypes(i.Dependencies)\n\ti.Dependencies = parsers.MapDataTypesToLibraryDependencies(i.Dependencies)\n\ti.Dependencies = parsers.RemoveDuplicates(i.Dependencies)\n\tsort.Strings(i.Dependencies)\n}", "func (p* Parser) declarations() {\n for(true) {\n\t switch (p.currentToken.getTokenType()) {\n\t case \"TK_VAR\":\n\t\t p.varDeclarations();\n\t\t break;\n\t case \"TK_PROCEDURE\":\n\t\t p.procDeclaration();\n\t\t break;\n\t case \"TK_LABEL\":\n\t\t p.labelDeclarations();\n\t\t break;\n\t case \"TK_BEGIN\":\n\t\t return;\n\t }\n }\n}", "func (id ifaceDefiner) Declare() {\n\tfor ix := range id.pkg.Files {\n\t\tfile, pfile := id.pkg.Files[ix], id.pfiles[ix]\n\t\tfor _, pdef := range pfile.Interfaces {\n\t\t\texport, err := validIdent(pdef.Name, reservedNormal)\n\t\t\tif err != nil {\n\t\t\t\tid.env.prefixErrorf(file, pdef.Pos, err, \"interface %s invalid name\", pdef.Name)\n\t\t\t\tcontinue // keep going to catch more errors\n\t\t\t}\n\t\t\tdetail := identDetail(\"interface\", file, pdef.Pos)\n\t\t\tif err := file.DeclareIdent(pdef.Name, detail); err != nil {\n\t\t\t\tid.env.prefixErrorf(file, pdef.Pos, err, \"interface %s name conflict\", pdef.Name)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tdef := &Interface{NamePos: NamePos(pdef.NamePos), Exported: export, File: file}\n\t\t\tid.builders[pdef.Name] = &ifaceBuilder{def, pdef}\n\t\t}\n\t}\n}", "func (td typeDefiner) Declare() {\n\tfor ix := range td.pkg.Files {\n\t\tfile, pfile := td.pkg.Files[ix], td.pfiles[ix]\n\t\tfor _, pdef := range pfile.TypeDefs {\n\t\t\tdetail := identDetail(\"type\", file, pdef.Pos)\n\t\t\tif err := file.DeclareIdent(pdef.Name, detail); err != nil {\n\t\t\t\ttd.env.prefixErrorf(file, pdef.Pos, err, \"type %s name conflict\", pdef.Name)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\ttd.builders[pdef.Name] = td.makeTypeDefBuilder(file, pdef)\n\t\t}\n\t}\n}", "func resolveDeclarations(ctx *sql.Context, a *Analyzer, node sql.Node, scope *Scope) (sql.Node, error) {\n\treturn resolveDeclarationsInner(ctx, a, node, newDeclarationScope(nil))\n}", "func (i *Interface) parseIncludes() {\n\tfor _, dependency := range i.Dependencies {\n\t\tinclude := NewInclude(dependency)\n\t\tif parsers.ShouldBeIncludedInHeader(dependency) {\n\t\t\ti.HeaderIncludesString += include.ToString() + \"\\n\"\n\t\t} else {\n\t\t\ti.ImplementationIncludesString += include.ToString() + \"\\n\"\n\t\t}\n\t}\n}", "func (p *Parser) declare(decl, data interface{}, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) {\n\tfor _, ident := range idents {\n\t\tassert(ident.Obj == nil, \"identifier already declared or resolved\")\n\t\tobj := ast.NewObj(kind, ident.Name)\n\t\t// remember the corresponding declaration for redeclaration\n\t\t// errors and global variable resolution/typechecking phase\n\t\tobj.Decl = decl\n\t\tobj.Data = data\n\t\tident.Obj = obj\n\t\tif ident.Name != \"_\" {\n\t\t\tif alt := scope.Insert(obj); alt != nil /*&& p.mode&DeclarationErrors != 0*/ {\n\t\t\t\tprevDecl := \"\"\n\t\t\t\tif pos := alt.Pos(); pos.IsValid() {\n\t\t\t\t\tprevDecl = fmt.Sprintf(\"\\n\\tprevious declaration at %s\", p.file.Position(pos))\n\t\t\t\t}\n\t\t\t\tp.error(ident.Pos(), fmt.Sprintf(\"%s redeclared in this block%s\", ident.Name, prevDecl))\n\t\t\t}\n\t\t}\n\t}\n}", "func (w *bodyBase) declared(name string) bool {\n\tfor _, s := range w.list {\n\t\tif decl, ok := s.(*Declare); ok && decl.name == name {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func ParseForwardPorts(h IptablesHandler, nat string, chain string) ([]int, error) {\n\trules, err := h.IptablesListRules(nat, chain)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treason := \"\"\n\tports := make([]int, 0)\n\tfor _, rule := range rules {\n\t\tflags := pflag.NewFlagSet(\"iptables-flag\", pflag.ContinueOnError)\n\t\tflags.ParseErrorsWhitelist.UnknownFlags = true\n\t\tforwardPort := flags.Int(\"dport\", 0, \"\")\n\t\terr := flags.Parse(strings.Split(rule, \" \"))\n\t\tif err != nil {\n\t\t\treason = fmt.Sprintf(\"%s; %s\", reason, err.Error())\n\t\t} else if *forwardPort != 0 {\n\t\t\tports = append(ports, *forwardPort)\n\t\t}\n\t}\n\n\treturn ports, nil\n}", "func convertDependencies(deps []string) []*license_metadata_proto.AnnotatedDependency {\n\tvar ret []*license_metadata_proto.AnnotatedDependency\n\n\tfor _, d := range deps {\n\t\tcomponents := strings.Split(d, \":\")\n\t\tdep := components[0]\n\t\tcomponents = components[1:]\n\t\tad := &license_metadata_proto.AnnotatedDependency{\n\t\t\tFile: proto.String(dep),\n\t\t\tAnnotations: make([]string, 0, len(components)),\n\t\t}\n\t\tfor _, ann := range components {\n\t\t\tif len(ann) == 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tad.Annotations = append(ad.Annotations, ann)\n\t\t}\n\t\tret = append(ret, ad)\n\t}\n\n\treturn ret\n}", "func (g *Generator) declareIndexAndNameVar(run []Value, typeName string) {\n\tindex, name := g.createIndexAndNameDecl(run, typeName, \"\")\n\tg.Printf(\"const %s\\n\", name)\n\tg.Printf(\"var %s\\n\", index)\n}", "func (parser *Parser) funcsDeclars() ([]*Function, error) {\n\tparser.trace(\"FUNCS DECLARS\")\n\tfunction, err := parser.funcDeclar()\n\t// Empty, is not an error\n\tif err == ErrNoMatch {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfuncs, err := parser.funcsDeclars()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn append([]*Function{function}, funcs...), nil\n}", "func (g *Generator) declareIndexAndNameVars(runs [][]Value, typeName string) {\n\tvar indexes, names []string\n\tfor i, run := range runs {\n\t\tindex, name := g.createIndexAndNameDecl(run, typeName, fmt.Sprintf(\"_%d\", i))\n\t\tif len(run) != 1 {\n\t\t\tindexes = append(indexes, index)\n\t\t}\n\t\tnames = append(names, name)\n\t}\n\tg.Printf(\"const (\\n\")\n\tfor _, name := range names {\n\t\tg.Printf(\"\\t%s\\n\", name)\n\t}\n\tg.Printf(\")\\n\\n\")\n\n\tif len(indexes) > 0 {\n\t\tg.Printf(\"var (\")\n\t\tfor _, index := range indexes {\n\t\t\tg.Printf(\"\\t%s\\n\", index)\n\t\t}\n\t\tg.Printf(\")\\n\\n\")\n\t}\n}", "func (s *BasePlSqlParserListener) EnterSeq_of_declare_specs(ctx *Seq_of_declare_specsContext) {}", "func calculateDependencies(definition string) (definitions dependencies, err error) {\n half := make(dependencies, 0)\n marked := make(dependencies, 0)\n\n err = visitDefinition(definition, &half, &marked)\n\n if nil == err {\n definitions = marked\n }\n\n return\n}", "func (p *Parser) varDeclarations() {\n\tfor(true) {\n\t\tif (\"TK_VAR\" == (p.currentToken.getTokenType())) {\n\t\t\tp.match(\"TK_VAR\");\n\t\t} else {\n\t\t\t// currentToken is not \"TK_VAR\"\n\t\t\tbreak;\n\t\t}\n\n\t\t// Store variables in a list\n\n\t\tvar variablesArrayList []Token\n\t\tfor (\"TK_IDENTIFIER\" == (p.currentToken.getTokenType())) {\n\t\t\tp.currentToken.setTokenType(\"TK_A_VAR\");\n\t\t\tvariablesArrayList = append(variablesArrayList, p.currentToken);\n\n\t\t\tp.match(\"TK_A_VAR\");\n\n\t\t\tif (\"TK_COMMA\" == (p.currentToken.getTokenType())) {\n\t\t\t\tp.match(\"TK_COMMA\");\n\t\t\t}\n\t\t}\n\n\t\tp.match(\"TK_COLON\");\n\t\tdataType := p.currentToken.getTokenType();\n\t\tp.match(dataType);\n\n\t\t// Add the correct datatype for each identifier and insert into symbol table\n\n\t\tfor i := 0; i < len(variablesArrayList); i++ {\n\n\t\t\tvar variable Token = variablesArrayList[i]\n\t\t\tdt := strings.ToLower(dataType)\n\n\t\t\tsymbol := NewSymbol(variable.getTokenVal(), \"TK_A_VAR\",\n\t\t\t\tp.STRING_TYPE_MAP[dt[3 : len(dt)]], p.dp);\n\n\t\t\tp.dp += 4;\n\n\n\t\t\tif (p.symbolTable.lookup(variable.getTokenVal()) == nil) {\n\t\t\t\tp.symbolTable.insert(symbol);\n\t\t\t}\n\t\t}\n\n\t\tif (dataType == (\"TK_ARRAY\")){\n\t\t\tp.arrayDeclaration(variablesArrayList);\n\t\t}\n\n\t\tp.match(\"TK_SEMI_COLON\");\n\n\t}\n}", "func buildDependencies(fdSet *dpb.FileDescriptorSet) {\n\t// Dependency to google/api/annotations.proto for gRPC-HTTP transcoding. Here a couple of problems arise:\n\t// 1. Problem: \tWe cannot call descriptor.ForMessage(&annotations.E_Http), which would be our\n\t//\t\t\t\trequired dependency. However, we can call descriptor.ForMessage(&http) and\n\t//\t\t\t\tthen construct the extension manually.\n\t// 2. Problem: \tThe name is set wrong.\n\t// 3. Problem: \tgoogle/api/annotations.proto has a dependency to google/protobuf/descriptor.proto.\n\thttp := annotations.Http{}\n\tfd, _ := descriptor.MessageDescriptorProto(&http)\n\n\textensionName := \"http\"\n\tn := \"google/api/annotations.proto\"\n\tl := dpb.FieldDescriptorProto_LABEL_OPTIONAL\n\tt := dpb.FieldDescriptorProto_TYPE_MESSAGE\n\ttName := \"google.api.HttpRule\"\n\textendee := \".google.protobuf.MethodOptions\"\n\n\thttpExtension := &dpb.FieldDescriptorProto{\n\t\tName: &extensionName,\n\t\tNumber: &annotations.E_Http.Field,\n\t\tLabel: &l,\n\t\tType: &t,\n\t\tTypeName: &tName,\n\t\tExtendee: &extendee,\n\t}\n\n\tfd.Extension = append(fd.Extension, httpExtension) // 1. Problem\n\tfd.Name = &n // 2. Problem\n\tfd.Dependency = append(fd.Dependency, \"google/protobuf/descriptor.proto\") //3.rd Problem\n\n\t// Build other required dependencies\n\te := empty.Empty{}\n\tfdp := dpb.DescriptorProto{}\n\tfd2, _ := descriptor.MessageDescriptorProto(&e)\n\tfd3, _ := descriptor.MessageDescriptorProto(&fdp)\n\tdependencies := []*dpb.FileDescriptorProto{fd, fd2, fd3}\n\n\t// According to the documentation of protoReflect.CreateFileDescriptorFromSet the file I want to print\n\t// needs to be at the end of the array. All other FileDescriptorProto are dependencies.\n\tfdSet.File = append(dependencies, fdSet.File...)\n}", "func addDependencies(fdSet *dpb.FileDescriptorSet) {\n\t// At last, we need to add the dependencies to the FileDescriptorProto in order to get them rendered.\n\tlastFdProto := getLast(fdSet.File)\n\tfor _, fd := range fdSet.File {\n\t\tif fd != lastFdProto {\n\t\t\tif *fd.Name == \"google/protobuf/empty.proto\" { // Reference: https://github.com/googleapis/gnostic-grpc/issues/8\n\t\t\t\tif shouldRenderEmptyImport {\n\t\t\t\t\tlastFdProto.Dependency = append(lastFdProto.Dependency, *fd.Name)\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tlastFdProto.Dependency = append(lastFdProto.Dependency, *fd.Name)\n\t\t}\n\t}\n\t// Sort imports so they will be rendered in a consistent order.\n\tsort.Strings(lastFdProto.Dependency)\n}", "func (s *scope) declare(object sem.Named, source tok.Source) error {\n\tname := object.GetName()\n\tif existing := s.lookup(name); existing != nil {\n\t\treturn fmt.Errorf(\"%v '%v' already declared\\nFirst declared here: %v\", source, name, existing.source)\n\t}\n\ts.objects[name] = objectAndSource{object, source}\n\treturn nil\n}", "func declareFlags(fs *pflag.FlagSet, v interface{}) error {\n\tpt := reflect.TypeOf(v)\n\n\tif pt.Kind() != reflect.Ptr {\n\t\treturn errors.New(\"v must be a pointer-to-struct\")\n\t}\n\n\tst := pt.Elem()\n\tif st.Kind() != reflect.Struct {\n\t\treturn errors.New(\"v must be a pointer-to-struct\")\n\t}\n\n\trv := reflect.ValueOf(v).Elem()\n\n\tfor i := 0; i < rv.NumField(); i++ {\n\t\tf := st.Field(i)\n\n\t\t// exclude special protocol buffers fields\n\t\tif strings.HasPrefix(f.Name, \"XXX_\") {\n\t\t\tcontinue\n\t\t}\n\n\t\tp := rv.Field(i).Addr().Interface()\n\t\tn := FlagName(f)\n\t\tu := FlagUsage(f)\n\n\t\tif ok := declareFlagForField(fs, p, n, u); !ok {\n\t\t\treturn fmt.Errorf(\n\t\t\t\t\"could not generate flag for %s.%s (%s)\",\n\t\t\t\treflect.TypeOf(v),\n\t\t\t\tf.Name,\n\t\t\t\tf.Type,\n\t\t\t)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (s *BaseSyslParserListener) EnterImports_decl(ctx *Imports_declContext) {}", "func addDependencies(s *scope, name string, obj pyObject, target *core.BuildTarget, exported, internal bool) {\n\taddStrings(s, name, obj, func(str string) {\n\t\tif s.state.Config.Bazel.Compatibility && !core.LooksLikeABuildLabel(str) && !strings.HasPrefix(str, \"@\") {\n\t\t\t// *sigh*... Bazel seems to allow an implicit : on the start of dependencies\n\t\t\tstr = \":\" + str\n\t\t}\n\t\ttarget.AddMaybeExportedDependency(checkLabel(s, s.parseLabelInPackage(str, s.pkg)), exported, false, internal)\n\t})\n}", "func getObjDefs(data string) (*obj, error) {\n objDefs := newObj()\n reAttr := regexp.MustCompile(`\\s*(?P<attr>.*?)\\s+(?P<value>.*)\\n`)\n reObjDef := regexp.MustCompile(`(?sm)(^\\s*define\\s+[a-z]+?\\s*{)(.*?\\n)(\\s*})`)\n rawObjDefs := reObjDef.FindAllStringSubmatch(data, -1)\n c1,c2 := 0, 0 // hostdependency and servicedependency does not have a unique identifier, will use index instead\n if rawObjDefs != nil {\n for _,oDef:= range rawObjDefs {\n defStart := strings.Join(strings.Fields(oDef[1]),\"\")\n objType := strings.TrimSpace(oDef[1])\n objAttrs := parseObjAttr(oDef, reAttr, objType)\n switch defStart {\n case \"definehost{\":\n if objAttrs.attrExist(\"name\"){\n objDefs.SetHostTempDefs(objAttrs)\n } else {\n objDefs.SetHostDefs(objAttrs)\n }\n case \"defineservice{\":\n if objAttrs.attrExist(\"name\"){\n objDefs.SetServiceTempDefs(objAttrs)\n } else {\n objDefs.SetServiceDefs(objAttrs)\n }\n case \"definehostgroup{\":\n objDefs.SetHostGroupDefs(objAttrs)\n case \"definehostdependency{\":\n c1 += 1\n objDefs.SetHostDependencyDefs(objAttrs, c1)\n case \"defineservicedependency{\":\n c2 += 1\n objDefs.SetServiceDependencyDefs(objAttrs, c2)\n case \"definecontact{\":\n if objAttrs.attrExist(\"name\"){\n objDefs.SetContactTempDefs(objAttrs)\n } else {\n objDefs.SetContactDefs(objAttrs)\n }\n case \"definecontactgroup{\":\n objDefs.SetContactGroupDefs(objAttrs)\n case \"definecommand{\":\n if objAttrs.attrExist(\"command_name\") && objAttrs.attrExist(\"command_line\"){\n objDefs.SetcommandDefs(objAttrs)\n }else {\n fmt.Println(\"here\",objAttrs)\n }\n default:\n err := errors.New(\"unknown naigos object type\")\n fmt.Println(&unknownObjectError{objAttrs,objType,err})\n }\n }\n } else {\n err := errors.New(\"no nagios object definition found\")\n return nil,&NotFoundError{err, \"Fatal\", \"\"} \n }\n return objDefs, nil\n}", "func (p *Processor) Forward(xs ...ag.Node) []ag.Node {\n\tif p.RequiresFullSeq() {\n\t\treturn p.fullSeqForward(xs)\n\t}\n\treturn p.incrementalForward(xs)\n}", "func (s *BasePlSqlParserListener) EnterPragma_declaration(ctx *Pragma_declarationContext) {}", "func (g *Generator) declareNameVars(runs [][]Value, typeName string, suffix string) {\n\tg.Printf(\"const _%s_name%s = \\\"\", typeName, suffix)\n\tfor _, run := range runs {\n\t\tfor i := range run {\n\t\t\tg.Printf(\"%s\", run[i].typeInfo.originalName)\n\t\t}\n\t}\n\tg.Printf(\"\\\"\\n\")\n}", "func dropEmptyImportDeclarations(decl ast.Decl) bool {\n\tswitch t := decl.(type) {\n\tcase *ast.GenDecl:\n\t\tif t.Tok != token.IMPORT {\n\t\t\treturn false\n\t\t}\n\t\tspecs := []ast.Spec{}\n\t\tfor _, s := range t.Specs {\n\t\t\tswitch spec := s.(type) {\n\t\t\tcase *ast.ImportSpec:\n\t\t\t\tif spec.Name != nil && spec.Name.Name == \"_\" {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tspecs = append(specs, spec)\n\t\t\t}\n\t\t}\n\t\tif len(specs) == 0 {\n\t\t\treturn true\n\t\t}\n\t\tt.Specs = specs\n\t}\n\treturn false\n}", "func (m *Method) ArgDeclarations() string {\n\tresults := make([]string, len(m.Request))\n\tfor i, argument := range m.Request {\n\t\tresults[i] = argument.Name + \" \" + argument.Type\n\t}\n\treturn strings.Join(results, \", \")\n}", "func (p *Parser) vardec(tokens []obj.Token) { p.fvardec(&p.defs, tokens) }", "func prepareSegmentFwdArgs() []greenplum.SegmentFwdArg {\n\treturn []greenplum.SegmentFwdArg{\n\t\t{Name: permanentFlag, Value: strconv.FormatBool(permanent)},\n\t}\n}", "func (td typeDefiner) getLocalDeps(ptype parse.Type) (deps []*typeDefBuilder) {\n\tswitch pt := ptype.(type) {\n\tcase *parse.TypeNamed:\n\t\t// Named references to other types in this package are all we care about.\n\t\tif b := td.builders[pt.Name]; b != nil {\n\t\t\tdeps = append(deps, b)\n\t\t}\n\tcase *parse.TypeEnum:\n\t\t// No deps.\n\tcase *parse.TypeArray:\n\t\tdeps = append(deps, td.getLocalDeps(pt.Elem)...)\n\tcase *parse.TypeList:\n\t\tdeps = append(deps, td.getLocalDeps(pt.Elem)...)\n\tcase *parse.TypeSet:\n\t\tdeps = append(deps, td.getLocalDeps(pt.Key)...)\n\tcase *parse.TypeMap:\n\t\tdeps = append(deps, td.getLocalDeps(pt.Key)...)\n\t\tdeps = append(deps, td.getLocalDeps(pt.Elem)...)\n\tcase *parse.TypeStruct:\n\t\tfor _, field := range pt.Fields {\n\t\t\tdeps = append(deps, td.getLocalDeps(field.Type)...)\n\t\t}\n\tcase *parse.TypeUnion:\n\t\tfor _, field := range pt.Fields {\n\t\t\tdeps = append(deps, td.getLocalDeps(field.Type)...)\n\t\t}\n\tcase *parse.TypeOptional:\n\t\tdeps = append(deps, td.getLocalDeps(pt.Base)...)\n\tdefault:\n\t\tpanic(fmt.Errorf(\"vdl: unhandled parse.Type %T %#v\", ptype, ptype))\n\t}\n\treturn\n}", "func declf(name, format string, a ...interface{}) *Declare {\n\treturn &Declare{name, exprf(format, a...)}\n}", "func (info *globalInfo) parsePragmas(doc *ast.CommentGroup) {\n\tfor _, comment := range doc.List {\n\t\tif !strings.HasPrefix(comment.Text, \"//go:\") {\n\t\t\tcontinue\n\t\t}\n\t\tparts := strings.Fields(comment.Text)\n\t\tswitch parts[0] {\n\t\tcase \"//go:extern\":\n\t\t\tinfo.extern = true\n\t\t\tif len(parts) == 2 {\n\t\t\t\tinfo.linkName = parts[1]\n\t\t\t}\n\t\t}\n\t}\n}", "func (s *BasePlSqlParserListener) EnterDeclare_spec(ctx *Declare_specContext) {}", "func (dn *DefinitionName) RequiredImports() []PackageReference {\n\treturn []PackageReference{dn.PackageReference}\n}", "func (p *Parser) parseDecl() {\n\tdefer un(trace(p, \"parseDecl\"))\n\n\ttok := p.tok\n\n\tswitch tok.Token {\n\tcase ItemLowerIdent:\n\t\tp.parseBuiltinCombinatorDecl()\n\tcase ItemUpperIdent:\n\t\tp.parsePartialAppDecl()\n\tcase ItemNew, ItemFinal, ItemEmpty:\n\t\tp.parseFinalDecl()\n\tdefault:\n\t\tp.setErr(fmt.Errorf(\"unexpected token\"))\n\t\treturn\n\t}\n}", "func (c *SimpleConsumer) declare() error {\n\t// declare exchange\n\tfmt.Printf(\"Binding exchange %v\\n\", c.exchange)\n\tif err := c.channel.ExchangeDeclare(\n\t\tc.exchange, // name of the exchange\n\t\tc.exchType, // type\n\t\ttrue, // durable\n\t\tfalse, // delete when complete\n\t\tfalse, // internal\n\t\tfalse, // noWait\n\t\tnil, // arguments\n\t); err != nil {\n\t\treturn fmt.Errorf(\"exchange declare error: %s\", err)\n\t}\n\n\t// declare queue\n\tfmt.Printf(\"Declare queue %v\\n\", c.queue)\n\tqueue, err := c.channel.QueueDeclare(\n\t\tc.queue, // name of the queue\n\t\ttrue, // durable\n\t\tfalse, // delete when unused\n\t\tfalse, // exclusive\n\t\tfalse, // noWait\n\t\tnil, // arguments\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"queue declare error: %s\", err)\n\t}\n\n\t// binding queue\n\tfmt.Printf(\"Binding queue %v to exchange %v\\n\", c.queue, c.exchange)\n\tif err = c.channel.QueueBind(\n\t\tqueue.Name, // name of the queue\n\t\tc.bindingKey, // bindingKey\n\t\tc.exchange, // sourceExchange\n\t\tfalse, // noWait\n\t\tnil, // arguments\n\t); err != nil {\n\t\treturn fmt.Errorf(\"queue bind error: %s\", err)\n\t}\n\treturn nil\n}", "func (n ClassNode) Declare(scope *Scope, c *Compiler) value.Value {\n\tstructDefn := types.NewStruct()\n\n\tname := fmt.Sprintf(\"class.%s.%s\", c.Package.NamespaceName, n.Name)\n\tstructDefn.SetName(name)\n\tc.Module.NewType(n.Name, structDefn)\n\tNewTypeDef(n.Name, structDefn, -1).InjectInto(scope)\n\t// structDefn.Opaque = true\n\n\treturn nil\n}", "func (ref *ReferenceDefinition) formatFast(buf *TrackedBuffer) {\n\tbuf.WriteString(\"references \")\n\tref.ReferencedTable.formatFast(buf)\n\tbuf.WriteByte(' ')\n\tref.ReferencedColumns.formatFast(buf)\n\tif ref.Match != DefaultMatch {\n\t\tbuf.WriteString(\" match \")\n\t\tref.Match.formatFast(buf)\n\t}\n\tif ref.OnDelete != DefaultAction {\n\t\tbuf.WriteString(\" on delete \")\n\t\tref.OnDelete.formatFast(buf)\n\t}\n\tif ref.OnUpdate != DefaultAction {\n\t\tbuf.WriteString(\" on update \")\n\t\tref.OnUpdate.formatFast(buf)\n\t}\n}", "func (r *Resolver) Declare(name *Token) {\n\t// don't check global scope.\n\tif r.scopes.Empty() {\n\t\treturn\n\t}\n\n\tscope := r.scopes.Peek()\n\n\tif exists := scope[name.Lexeme]; exists == varDeclared || exists == varDefined {\n\t\tpanic(NewLoxError(name, \"variable redeclared.\"))\n\t}\n\n\tscope[name.Lexeme] = varDeclared\n}", "func visitDefinition(definition string, half, marked *dependencies) (err error) {\n if half.includes(definition) {\n return errCyclicDependency\n } else if !marked.includes(definition) && !half.includes(definition) {\n half.add(definition)\n task := gofer.index(definition)\n\n if nil == task {\n return errUnresolvableDependencies\n }\n\n for _, dependency := range task.Dependencies {\n err = visitDefinition(dependency, half, marked)\n if nil != err {\n return\n }\n }\n\n half.remove(definition)\n marked.add(definition)\n }\n\n return\n}", "func variablesToBashDecls(vars map[string]tftypes.Value) string {\n\tif len(vars) == 0 {\n\t\treturn \"\"\n\t}\n\n\tvar buf strings.Builder\n\tnames := make([]string, 0, len(vars))\n\tfor name := range vars {\n\t\tnames = append(names, name)\n\t}\n\tsort.Strings(names)\n\n\tfor _, name := range names {\n\t\tval := vars[name]\n\t\tswitch {\n\t\tcase val.Is(tftypes.String):\n\t\t\tvar s string\n\t\t\tval.As(&s)\n\t\t\tbuf.WriteString(\"declare -r \")\n\t\t\tbuf.WriteString(name)\n\t\t\tbuf.WriteString(\"=\")\n\t\t\tbuf.WriteString(bashQuoteString(s))\n\t\t\tbuf.WriteString(\"\\n\")\n\t\tcase val.Is(tftypes.Number):\n\t\t\tvar f big.Float\n\t\t\tval.As(&f)\n\t\t\t// NOTE: Bash only actually supports integers, so here we're\n\t\t\t// assuming that the configuration decoder already rejected\n\t\t\t// fractional values.\n\t\t\tbuf.WriteString(\"declare -ri \")\n\t\t\tbuf.WriteString(name)\n\t\t\tbuf.WriteString(\"=\")\n\t\t\tbuf.WriteString(f.Text('f', -1))\n\t\t\tbuf.WriteString(\"\\n\")\n\t\tcase val.Is(listOfString):\n\t\t\tvar l []tftypes.Value\n\t\t\tval.As(&l)\n\t\t\tbuf.WriteString(\"declare -ra \")\n\t\t\tbuf.WriteString(name)\n\t\t\tbuf.WriteString(\"=(\")\n\t\t\tfor i, ev := range l {\n\t\t\t\tvar es string\n\t\t\t\tev.As(&es)\n\t\t\t\tif i != 0 {\n\t\t\t\t\tbuf.WriteString(\" \")\n\t\t\t\t}\n\t\t\t\tbuf.WriteString(bashQuoteString(es))\n\t\t\t}\n\t\t\tbuf.WriteString(\")\\n\")\n\t\tcase val.Is(mapOfString):\n\t\t\tvar m map[string]tftypes.Value\n\t\t\tval.As(&m)\n\t\t\tbuf.WriteString(\"declare -rA \")\n\t\t\tbuf.WriteString(name)\n\t\t\tbuf.WriteString(\"=(\")\n\t\t\ti := 0\n\t\t\tfor ek, ev := range m {\n\t\t\t\tvar es string\n\t\t\t\tev.As(&es)\n\t\t\t\tif i != 0 {\n\t\t\t\t\tbuf.WriteString(\" \")\n\t\t\t\t}\n\t\t\t\tbuf.WriteString(\"[\")\n\t\t\t\tbuf.WriteString(bashQuoteString(ek))\n\t\t\t\tbuf.WriteString(\"]=\")\n\t\t\t\tbuf.WriteString(bashQuoteString(es))\n\t\t\t\ti++\n\t\t\t}\n\t\t\tbuf.WriteString(\")\\n\")\n\t\tdefault:\n\t\t\t// Shouldn't get here if config decoding validation is working\n\t\t\tfmt.Fprintf(&buf, \"# ERROR: Don't know how to serialize %q for bash\\n\", name)\n\t\t}\n\t}\n\treturn buf.String()\n}", "func Parse(ingress *networking.Ingress) *Dependencies {\n\tsecrets := extractSecrets(ingress)\n\tsecrets = append(secrets, secretsFromAnnotations(ingress)...)\n\n\treturn &Dependencies{\n\t\tServices: extractServices(ingress),\n\t\tEndpoints: extractServices(ingress),\n\t\tSecrets: secrets,\n\t\tConfigmaps: configmapsFromAnnotations(ingress),\n\t\tAnnotations: extractAnnotations(ingress),\n\t}\n}", "func (c *client) GetDeclareCaptures(frontend string, transactionID string) (int64, models.Captures, error) {\n\tp, err := c.GetParser(transactionID)\n\tif err != nil {\n\t\treturn 0, nil, err\n\t}\n\tv, err := c.GetVersion(transactionID)\n\tif err != nil {\n\t\treturn 0, nil, err\n\t}\n\tcaptures, err := ParseDeclareCaptures(frontend, p)\n\tif err != nil {\n\t\treturn v, nil, c.HandleError(\"\", \"frontend\", frontend, \"\", false, err)\n\t}\n\treturn v, captures, nil\n}", "func (h *HRef) Resolve(r *Schema) {\n\th.Order = make([]string, 0)\n\th.Schemas = make(map[string]*Schema)\n\tfor _, v := range href.FindAllString(string(h.href), -1) {\n\t\tu, err := url.QueryUnescape(v[2 : len(v)-2])\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tparts := strings.Split(u, \"/\")\n\t\tname := stringext.DepunctWithInitialLower(fmt.Sprintf(\"%s-%s\", parts[len(parts)-3], parts[len(parts)-1]))\n\t\th.Order = append(h.Order, name)\n\t\th.Schemas[name] = Reference(u).Resolve(r)\n\t}\n}", "func deps(rule *bazel.Rule) map[bazel.Label]bool {\n\tret := make(map[bazel.Label]bool)\n\tfor _, d := range rule.StringListAttr(\"deps\") {\n\t\tif l, err := bazel.ParseRelativeLabel(rule.PkgName, d); err == nil {\n\t\t\tret[l] = true\n\t\t}\n\t}\n\treturn ret\n}", "func additionalPredeclared() []types.Type {\n\treturn []types.Type{\n\t\t// comparable\n\t\ttypes.Universe.Lookup(\"comparable\").Type(),\n\n\t\t// any\n\t\ttypes.Universe.Lookup(\"any\").Type(),\n\t}\n}", "func (s *BaseGraffleParserListener) EnterProcedure_declaration_head(ctx *Procedure_declaration_headContext) {\n}", "func Forward(in, out Link) rules.Rule {\n\treturn rules.Rule(fmt.Sprintf(\n\t\t\"-t filter -A fw-interfaces -j ACCEPT -i %v -o %v\",\n\t\tin.Name(), out.Name()))\n}", "func (h *Helper) GetTypeDefs() ([]*TypeDef, map[string]string, error) {\n\tif h.typeDefs != nil {\n\t\treturn h.typeDefs, h.typeImports, nil\n\t}\n\n\ttdefs := []*TypeDef{}\n\t// Map, keyed by package import path, with the values being an alias to use\n\t// for the package\n\ttimports := map[string]string{}\n\t// Map, keyed by original Shape GoTypeElem(), with the values being a\n\t// renamed type name (due to conflicting names)\n\ttrenames := map[string]string{}\n\n\tpayloads := h.getPayloads()\n\n\tfor shapeName, shape := range h.sdkAPI.Shapes {\n\t\tif inStrings(shapeName, payloads) {\n\t\t\t// Payloads are not type defs\n\t\t\tcontinue\n\t\t}\n\t\tif shape.Type != \"structure\" {\n\t\t\tcontinue\n\t\t}\n\t\tif shape.Exception {\n\t\t\t// Neither are exceptions\n\t\t\tcontinue\n\t\t}\n\t\tif h.IsIgnoredShape(shapeName) {\n\t\t\tcontinue\n\t\t}\n\t\ttdefNames := names.New(shapeName)\n\t\tif h.HasConflictingTypeName(shapeName) {\n\t\t\ttdefNames.Camel += ConflictingNameSuffix\n\t\t\ttrenames[shapeName] = tdefNames.Camel\n\t\t}\n\n\t\tattrs := map[string]*Attr{}\n\t\tfor memberName, memberRef := range shape.MemberRefs {\n\t\t\tmemberNames := names.New(memberName)\n\t\t\tmemberShape := memberRef.Shape\n\t\t\tif h.IsIgnoredShape(memberShape.ShapeName) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif !h.IsShapeUsedInCRDs(memberShape.ShapeName) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tgoPkgType := memberRef.Shape.GoTypeWithPkgNameElem()\n\t\t\tif strings.Contains(goPkgType, \".\") {\n\t\t\t\tif strings.HasPrefix(goPkgType, \"[]\") {\n\t\t\t\t\t// For slice types, we just want the element type...\n\t\t\t\t\tgoPkgType = goPkgType[2:]\n\t\t\t\t}\n\t\t\t\tif strings.HasPrefix(goPkgType, \"map[\") {\n\t\t\t\t\tgoPkgType = strings.Split(goPkgType, \"]\")[1]\n\t\t\t\t}\n\t\t\t\tif strings.HasPrefix(goPkgType, \"*\") {\n\t\t\t\t\t// For slice and map types, the element type might be a\n\t\t\t\t\t// pointer to a struct...\n\t\t\t\t\tgoPkgType = goPkgType[1:]\n\t\t\t\t}\n\t\t\t\tpkg := strings.Split(goPkgType, \".\")[0]\n\t\t\t\tif pkg != h.sdkAPI.PackageName() {\n\t\t\t\t\t// time.Time needs to be converted to apimachinery/metav1.Time otherwise there is no DeepCopy support\n\t\t\t\t\tif pkg == \"time\" {\n\t\t\t\t\t\ttimports[\"k8s.io/apimachinery/pkg/apis/meta/v1\"] = \"metav1\"\n\t\t\t\t\t} else if pkg == \"aws\" {\n\t\t\t\t\t\t// The \"aws.JSONValue\" type needs to be handled\n\t\t\t\t\t\t// specially.\n\t\t\t\t\t\ttimports[\"github.com/aws/aws-sdk-go/aws\"] = \"\"\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// Shape.GoPTypeWithPkgNameElem() always returns the type\n\t\t\t\t\t\t// as a full package dot-notation name. We only want to add\n\t\t\t\t\t\t// imports for \"normal\" packages\n\t\t\t\t\t\ttimports[pkg] = \"\"\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\t// There are shapes that are called things like DBProxyStatus that are\n\t\t\t// fields in a DBProxy CRD... we need to ensure the type names don't\n\t\t\t// conflict. Also, the name of the Go type in the generated code is\n\t\t\t// Camel-cased and normalized, so we use that as the Go type\n\t\t\tgt := memberShape.GoType()\n\t\t\tif memberShape.Type == \"structure\" {\n\t\t\t\ttypeNames := names.New(memberShape.ShapeName)\n\t\t\t\tif h.HasConflictingTypeName(memberShape.ShapeName) {\n\t\t\t\t\ttypeNames.Camel += ConflictingNameSuffix\n\t\t\t\t}\n\t\t\t\tgt = \"*\" + typeNames.Camel\n\t\t\t} else if memberShape.Type == \"list\" {\n\t\t\t\t// If it's a list type, where the element is a structure, we need to\n\t\t\t\t// set the GoType to the cleaned-up Camel-cased name\n\t\t\t\tif memberShape.MemberRef.Shape.Type == \"structure\" {\n\t\t\t\t\telemType := memberShape.MemberRef.Shape.GoTypeElem()\n\t\t\t\t\ttypeNames := names.New(elemType)\n\t\t\t\t\tif h.HasConflictingTypeName(elemType) {\n\t\t\t\t\t\ttypeNames.Camel += ConflictingNameSuffix\n\t\t\t\t\t}\n\t\t\t\t\tgt = \"[]*\" + typeNames.Camel\n\t\t\t\t}\n\t\t\t} else if memberShape.Type == \"map\" {\n\t\t\t\t// If it's a map type, where the value element is a structure,\n\t\t\t\t// we need to set the GoType to the cleaned-up Camel-cased name\n\t\t\t\tif memberShape.ValueRef.Shape.Type == \"structure\" {\n\t\t\t\t\tvalType := memberShape.ValueRef.Shape.GoTypeElem()\n\t\t\t\t\ttypeNames := names.New(valType)\n\t\t\t\t\tif h.HasConflictingTypeName(valType) {\n\t\t\t\t\t\ttypeNames.Camel += ConflictingNameSuffix\n\t\t\t\t\t}\n\t\t\t\t\tgt = \"[]map[string]*\" + typeNames.Camel\n\t\t\t\t}\n\t\t\t} else if memberShape.Type == \"timestamp\" {\n\t\t\t\t// time.Time needs to be converted to apimachinery/metav1.Time\n\t\t\t\t// otherwise there is no DeepCopy support\n\t\t\t\tgt = \"*metav1.Time\"\n\t\t\t}\n\t\t\tattrs[memberName] = NewAttr(memberNames, gt, memberShape)\n\t\t}\n\t\tif len(attrs) == 0 {\n\t\t\t// Just ignore these...\n\t\t\tcontinue\n\t\t}\n\t\ttdefs = append(tdefs, &TypeDef{\n\t\t\tNames: tdefNames,\n\t\t\tAttrs: attrs,\n\t\t})\n\t}\n\tsort.Slice(tdefs, func(i, j int) bool {\n\t\treturn tdefs[i].Names.Camel < tdefs[j].Names.Camel\n\t})\n\th.typeDefs = tdefs\n\th.typeImports = timports\n\th.typeRenames = trenames\n\treturn tdefs, timports, nil\n}", "func parsePodPrefixes(clauses []string) (map[string][]string, error) {\n\tpodPrefixes := map[string][]string{}\n\tfor _, p := range clauses {\n\t\tif strings.Contains(p, \":\") {\n\t\t\tss := strings.Split(p, \":\")\n\t\t\tdesc := ss[0]\n\t\t\tps := strings.Split(ss[1], \"|\")\n\t\t\tpodPrefixes[desc] = append(podPrefixes[desc], ps...)\n\t\t} else if strings.Contains(p, \"|\") {\n\t\t\treturn nil, errors.New(\"required-pods must be either <namespace>/<pod-name> or <desc>:<namespace>/<pod-name>|<namespace>/<pod-name>|...\")\n\t\t} else {\n\t\t\tpodPrefixes[p] = []string{p}\n\t\t}\n\t}\n\treturn podPrefixes, nil\n}", "func (p *Parser) fvardec(defs *oop.DefMap, tokens []obj.Token) {\n\t// Name is not defined?\n\tif len(tokens) < 2 {\n\t\tfirst := tokens[0]\n\t\tfract.IPanicC(first.File, first.Line, first.Column+len(first.Val), obj.SyntaxPanic, \"Name is not given!\")\n\t}\n\tinf := varInfo{\n\t\tconstant: tokens[0].Val == \"const\",\n\t\tmut: tokens[0].Val == \"mut\",\n\t}\n\tpre := tokens[1]\n\tif pre.Type == fract.Name {\n\t\tp.varadd(defs, inf, tokens[1:])\n\t} else if pre.Type == fract.Brace && pre.Val == \"(\" {\n\t\ttokens = tokens[2 : len(tokens)-1]\n\t\tlast := 0\n\t\tline := tokens[0].Line\n\t\tbraceCount := 0\n\t\tfor j, tk := range tokens {\n\t\t\tif tk.Type == fract.Brace {\n\t\t\t\tswitch tk.Val {\n\t\t\t\tcase \"{\", \"[\", \"(\":\n\t\t\t\t\tbraceCount++\n\t\t\t\tdefault:\n\t\t\t\t\tbraceCount--\n\t\t\t\t\tline = tk.Line\n\t\t\t\t}\n\t\t\t}\n\t\t\tif braceCount > 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif line < tk.Line {\n\t\t\t\tp.varadd(defs, inf, tokens[last:j])\n\t\t\t\tlast = j\n\t\t\t\tline = tk.Line\n\t\t\t}\n\t\t}\n\t\tif len(tokens) != last {\n\t\t\tp.varadd(defs, inf, tokens[last:])\n\t\t}\n\t} else {\n\t\tfract.IPanic(pre, obj.SyntaxPanic, \"Invalid syntax!\")\n\t}\n}", "func (l *Lexer) consumeColonOrDeclare() Token {\n\tt := Token{\n\t\tType: Colon,\n\t\tValue: string(Colon),\n\t\tColumn: l.Column,\n\t\tLine: l.Line,\n\t}\n\n\tl.move()\n\n\t// check if it is a `:=`\n\tif next, _ := l.peek(); next == '=' {\n\t\tt.Type = Declare\n\t\tt.Value = `:=`\n\t\tl.move()\n\t}\n\n\treturn t\n}", "func processMetricDeclarations(metricDeclarations []*MetricDeclaration, metric *pdata.Metric,\n\tlabels map[string]string, rolledUpDimensions [][]string) (dimensions [][]string) {\n\tseen := make(map[string]bool)\n\taddDimSet := func(dimSet []string) {\n\t\tkey := strings.Join(dimSet, \",\")\n\t\t// Only add dimension set if not a duplicate\n\t\tif _, ok := seen[key]; !ok {\n\t\t\tdimensions = append(dimensions, dimSet)\n\t\t\tseen[key] = true\n\t\t}\n\t}\n\t// Extract and append dimensions from metric declarations\n\tfor _, m := range metricDeclarations {\n\t\tif m.Matches(metric) {\n\t\t\textractedDims := m.ExtractDimensions(labels)\n\t\t\tfor _, dimSet := range extractedDims {\n\t\t\t\taddDimSet(dimSet)\n\t\t\t}\n\t\t}\n\t}\n\t// Add on rolled-up dimensions\n\tfor _, dimSet := range rolledUpDimensions {\n\t\tsort.Strings(dimSet)\n\t\taddDimSet(dimSet)\n\t}\n\treturn\n}", "func ResolveDependencies(m meta.RESTMapper, objects []unstructuredv1.Unstructured, uids []types.UID) (NodeMap, error) {\n\treturn resolveDeps(m, objects, uids, true)\n}", "func populatePortForwards(m model.Manifest, pod v1alpha1.Pod) []model.PortForward {\n\tcPorts := store.AllPodContainerPorts(pod)\n\tfwds := m.K8sTarget().PortForwards\n\tforwards := make([]model.PortForward, 0, len(fwds))\n\tfor _, forward := range fwds {\n\t\tif forward.ContainerPort == 0 && len(cPorts) > 0 {\n\t\t\tforward.ContainerPort = int(cPorts[0])\n\t\t\tfor _, cPort := range cPorts {\n\t\t\t\tif int(forward.LocalPort) == int(cPort) {\n\t\t\t\t\tforward.ContainerPort = int(cPort)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif forward.ContainerPort == 0 {\n\t\t\tforward.ContainerPort = forward.LocalPort\n\t\t}\n\t\tforwards = append(forwards, forward)\n\t}\n\treturn forwards\n}", "func (t *Type) WireArgumentDeclaration(n string) string {\n\tswitch t.WireFamily {\n\tcase FamilyKinds.TrivialCopy:\n\t\treturn t.String() + \" \" + n\n\tcase FamilyKinds.Reference, FamilyKinds.Vector:\n\t\treturn t.String() + \"& \" + n\n\tcase FamilyKinds.String:\n\t\treturn \"const \" + t.String() + \"& \" + n\n\tdefault:\n\t\tpanic(fmt.Sprintf(\"Unknown wire family kind %v\", t.WireFamily))\n\t}\n}", "func CreateForwardAnnotationsMap(splittedAnnotations []string) map[string]string {\n\tforwardAnnotationsMap := make(map[string]string)\n\n\tfor _, annotation := range splittedAnnotations {\n\t\tparsedAnnotation := strings.SplitN(annotation, \":\", 2)\n\t\tif len(parsedAnnotation) != 2 {\n\t\t\tlogrus.Warningf(\"Wrong annotation provided to forward to ingress : %v\", annotation)\n\t\t} else {\n\t\t\tforwardAnnotationsMap[parsedAnnotation[0]] = strings.Trim(parsedAnnotation[1], \" \")\n\t\t}\n\t}\n\n\treturn forwardAnnotationsMap\n}", "func parseGenDecl(bci *BenchClientInfo, decl *ast.GenDecl) {\n\tif decl.Tok != token.TYPE {\n\t\treturn\n\t}\n\n\tfor _, s := range decl.Specs {\n\t\tts := s.(*ast.TypeSpec)\n\t\tiface, ok := ts.Type.(*ast.InterfaceType)\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\n\t\tifaceName := ts.Name.String()\n\t\tif !strings.HasSuffix(ifaceName, \"Client\") {\n\t\t\tcontinue\n\t\t}\n\t\tif strings.Index(ifaceName, \"Service_\") > 0 {\n\t\t\t// TODO: support streaming RPC\n\t\t\tcontinue\n\t\t}\n\t\tbci.Services[ifaceName] = &service{RPCs: make(map[string]rpc)}\n\n\t\tfor _, m := range iface.Methods.List {\n\t\t\tmethodName := m.Names[0].String()\n\t\t\tmt := m.Type.(*ast.FuncType)\n\n\t\t\tvar streaming bool\n\t\t\tfor _, r := range mt.Results.List {\n\t\t\t\tif strings.Index(types.ExprString(r.Type), \"Service_\") > 0 {\n\t\t\t\t\t// TODO: support streaming RPC\n\t\t\t\t\tstreaming = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tswitch {\n\t\t\tcase mt.Params.NumFields() == unaryArgs && !streaming:\n\t\t\t\targName := types.ExprString(mt.Params.List[1].Type)[1:]\n\t\t\t\tbci.Services[ifaceName].RPCs[methodName] = rpc{Unary: true, In: argName}\n\t\t\tdefault:\n\t\t\t\t// TODO: support streaming RPC\n\t\t\t\tbci.Services[ifaceName].RPCs[methodName] = rpc{Stream: true}\n\t\t\t}\n\t\t}\n\t}\n}", "func lexDirective(fin *bufio.Reader) (e DocumentElement, err error) {\n\tr := '\\000'\n\tr, _, err = fin.ReadRune()\n\tif r != '@' {\n\t\terr = errors.New(\"Missing '@' in directive\")\n\t}\n\tif err != nil {\n\t\treturn\n\t}\n\n\tname := \"\"\n\tname, err = readWord(fin)\n\tif err != nil {\n\t\treturn\n\t}\n\n\targDirectives := map[string]bool{\n\t\t\"chapter\": true,\n\t\t\"part\": true,\n\t\t\"prologue\": true,\n\t\t\"note\": true,\n\t}\n\n\tif name == \"scene\" {\n\t\te = SceneBreak(true)\n\t\treturn\n\t} else if _, ok := argDirectives[name]; !ok {\n\t\terr = errors.New(\"Invalid directive\")\n\t\treturn\n\t}\n\n\trawArg := []rune{}\n\tfor {\n\t\tr, _, err = fin.ReadRune()\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t\tif r == '\\n' {\n\t\t\tbreak\n\t\t}\n\t\trawArg = append(rawArg, r)\n\t}\n\targ := strings.TrimSpace(string(rawArg))\n\n\tif name == \"chapter\" {\n\t\te = ChapterBreak(arg)\n\t} else if name == \"part\" {\n\t\te = PartBreak(arg)\n\t} else if name == \"prologue\" {\n\t\te = PrologueBreak(arg)\n\t}\n\n\treturn\n}", "func (def *Definition) Fieldnames() []string {\n\ttypeList := make([]string, 0)\n\tt := TraverserMethods{EnterFunction: func(adaType IAdaType, parentType IAdaType, level int, x interface{}) error {\n\t\ttypeList = append(typeList, adaType.Name())\n\t\treturn nil\n\t}}\n\n\t_ = def.TraverseTypes(t, true, typeList)\n\treturn typeList\n}", "func importOrder(deps map[string][]string) ([]string, error) {\n\t// add all nodes and edges\n\tvar remainingNodes = map[string]struct{}{}\n\tvar graph = map[edge]struct{}{}\n\tfor to, froms := range deps {\n\t\tremainingNodes[to] = struct{}{}\n\t\tfor _, from := range froms {\n\t\t\tremainingNodes[from] = struct{}{}\n\t\t\tgraph[edge{from: from, to: to}] = struct{}{}\n\t\t}\n\t}\n\n\t// find initial nodes without any dependencies\n\tsorted := findAndRemoveNodesWithoutDependencies(remainingNodes, graph)\n\tfor i := 0; i < len(sorted); i++ {\n\t\tnode := sorted[i]\n\t\tremoveEdgesFrom(node, graph)\n\t\tsorted = append(sorted, findAndRemoveNodesWithoutDependencies(remainingNodes, graph)...)\n\t}\n\tif len(remainingNodes) > 0 {\n\t\treturn nil, fmt.Errorf(\"cycle: remaining nodes: %#v, remaining edges: %#v\", remainingNodes, graph)\n\t}\n\t//for _, n := range sorted {\n\t//\tfmt.Println(\"topological order\", n)\n\t//}\n\treturn sorted, nil\n}", "func buildSynonyms(a *Parser) map[string]string {\n\tsynonyms := make(map[string]string)\n\tfor _, n := range a.seq {\n\t\tp := a.params[n]\n\t\tif n == p.name {\n\t\t\tif len(n) == 0 {\n\t\t\t\tsynonyms[n] = \"(nameless)\"\n\t\t\t} else {\n\t\t\t\tsynonyms[n] = n\n\t\t\t}\n\t\t} else {\n\t\t\tsynonyms[p.name] += \", \" + n\n\t\t}\n\t}\n\treturn synonyms\n}", "func (s *BasePlSqlParserListener) EnterPragma_elements(ctx *Pragma_elementsContext) {}", "func (parser *Parser) var_declar() (*Declaration, error) {\n\tparser.trace(\"VAR_DECLAR\")\n\tdefer parser.untrace()\n\tdataType, err, found := parser.match(fxsymbols.DataType)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif !found {\n\t\treturn nil, ErrNoMatch\n\t}\n\tid, err, found := parser.match(fxsymbols.Id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif !found {\n\t\treturn nil, parser.Errorf(ErrNoId)\n\t}\n\t_, err, found = parser.match(fxsymbols.Scol)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif !found {\n\t\treturn nil, parser.Errorf(ErrNoScol)\n\t}\n\tif parser.symbEnvs.GetSymb(id.Lex) != nil {\n\t\treturn nil, fxsymbols.ErrVarExists\n\t}\n\tparser.symbEnvs.PutVar(id)\n\treturn NewDeclaration(id.Lex, fxlex.DataTypeConst(dataType.Val)), nil\n}", "func (parser *Parser) moreParams() ([]*Declaration, error) {\n\tparser.trace(\"MORE_PARAMS\")\n\tdefer parser.untrace()\n\t_, _, found := parser.match(fxsymbols.Coma)\n\n\tif !found {\n\t\treturn nil, nil\n\t}\n\tparam, err := parser.parameter()\n\tif err != nil {\n\t\tif err == ErrNoMatch {\n\t\t\terr = parser.Errorf(ErrNoParam)\n\t\t}\n\t\treturn nil, err\n\t}\n\tparams, err := parser.moreParams()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn append([]*Declaration{param}, params...), nil\n}", "func ParseIncompleteVariableRef(text string) (explode bool, ns string, name string) {\n\treturn parseVariableRef(text, false)\n}", "func (ra *RenameAnalysis) Requires() []string {\n\treturn []string{DependencyBlobCache, DependencyTreeChanges}\n}", "func (info *fileInfo) addVarDecls() {\n\tgen := &ast.GenDecl{\n\t\tTokPos: info.importCPos,\n\t\tTok: token.VAR,\n\t\tLparen: info.importCPos,\n\t\tRparen: info.importCPos,\n\t}\n\tnames := make([]string, 0, len(info.globals))\n\tfor name := range info.globals {\n\t\tnames = append(names, name)\n\t}\n\tsort.Strings(names)\n\tfor _, name := range names {\n\t\tglobal := info.globals[name]\n\t\tobj := &ast.Object{\n\t\t\tKind: ast.Typ,\n\t\t\tName: \"C.\" + name,\n\t\t}\n\t\tvalueSpec := &ast.ValueSpec{\n\t\t\tNames: []*ast.Ident{&ast.Ident{\n\t\t\t\tNamePos: info.importCPos,\n\t\t\t\tName: \"C.\" + name,\n\t\t\t\tObj: obj,\n\t\t\t}},\n\t\t\tType: global.typeExpr,\n\t\t}\n\t\tobj.Decl = valueSpec\n\t\tgen.Specs = append(gen.Specs, valueSpec)\n\t}\n\tinfo.Decls = append(info.Decls, gen)\n}", "func (self *EasyHandler) Namespaces() []string {\n\treturn []string{\"000001\",\"000002\"};\n}", "func Parse(description string) (deps []Dep) {\n\tfor _, footerValue := range footer.ParseMessage(description)[cqDependKey] {\n\t\tfor _, v := range strings.Split(footerValue, \",\") {\n\t\t\tif dep, err := parseSingleDep(v); err == nil {\n\t\t\t\tdeps = append(deps, dep)\n\t\t\t}\n\t\t}\n\t}\n\tif len(deps) <= 1 {\n\t\treturn deps\n\t}\n\tsort.Slice(deps, func(i, j int) bool { return deps[i].cmp(deps[j]) == 1 })\n\t// Remove duplicates. We don't use the map in the first place, because\n\t// duplicates are highly unlikely in practice and sorting is nice for\n\t// determinism.\n\tl := 0\n\tfor i := 1; i < len(deps); i++ {\n\t\tif d := deps[i]; d.cmp(deps[l]) != 0 {\n\t\t\tl += 1\n\t\t\tdeps[l] = d\n\t\t}\n\t}\n\treturn deps[:l+1]\n}", "func ParseFQName(fqNameString string) []string {\n\tif fqNameString == \"\" {\n\t\treturn nil\n\t}\n\treturn strings.Split(fqNameString, \":\")\n}", "func getFieldList(p *program.Program, f *ast.FunctionDecl, fieldTypes []string) (\n\t_ *goast.FieldList, err error) {\n\tdefer func() {\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(\"error in function field list. err = %v\", err)\n\t\t}\n\t}()\n\tr := []*goast.Field{}\n\tfor i := range fieldTypes {\n\t\tif len(f.Children()) <= i {\n\t\t\terr = fmt.Errorf(\"not correct type/children: %d, %d\",\n\t\t\t\tlen(f.Children()), len(fieldTypes))\n\t\t\treturn\n\t\t}\n\t\tn := f.Children()[i]\n\t\tif v, ok := n.(*ast.ParmVarDecl); ok {\n\t\t\tt, err := types.ResolveType(p, fieldTypes[i])\n\t\t\tif err != nil {\n\t\t\t\terr = fmt.Errorf(\"FieldList type: %s. %v\", fieldTypes[i], err)\n\t\t\t\tp.AddMessage(p.GenerateWarningMessage(err, f))\n\t\t\t\terr = nil // ignore error\n\t\t\t\tt = \"C4GO_UNDEFINE_TYPE\"\n\t\t\t}\n\n\t\t\tif t == \"\" {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tr = append(r, &goast.Field{\n\t\t\t\tNames: []*goast.Ident{util.NewIdent(v.Name)},\n\t\t\t\tType: goast.NewIdent(t),\n\t\t\t})\n\t\t}\n\t}\n\n\t// for function argument: ...\n\tif strings.Contains(f.Type, \"...\") {\n\t\tr = append(r, &goast.Field{\n\t\t\tNames: []*goast.Ident{util.NewIdent(\"c4goArgs\")},\n\t\t\tType: &goast.Ellipsis{\n\t\t\t\tEllipsis: 1,\n\t\t\t\tElt: &goast.InterfaceType{\n\t\t\t\t\tInterface: 1,\n\t\t\t\t\tMethods: &goast.FieldList{\n\t\t\t\t\t\tOpening: 1,\n\t\t\t\t\t},\n\t\t\t\t\tIncomplete: false,\n\t\t\t\t},\n\t\t\t},\n\t\t})\n\t}\n\n\treturn &goast.FieldList{\n\t\tList: r,\n\t}, nil\n}", "func (resource *ResourceType) AsDeclarations(codeGenerationContext *CodeGenerationContext, declContext DeclarationContext) []dst.Decl {\n\t/*\n\t\tstart off with:\n\t\t\tmetav1.TypeMeta `json:\",inline\"`\n\t\t\tmetav1.ObjectMeta `json:\"metadata,omitempty\"`\n\n\t\tthen the Spec/Status properties\n\t*/\n\tvar fields []*dst.Field\n\tfor _, property := range resource.EmbeddedProperties() {\n\t\tf := property.AsField(codeGenerationContext)\n\t\tif f != nil {\n\t\t\tfields = append(fields, f)\n\t\t}\n\t}\n\n\tfor _, property := range resource.Properties().AsSlice() {\n\t\tf := property.AsField(codeGenerationContext)\n\t\tif f != nil {\n\t\t\tfields = append(fields, f)\n\t\t}\n\t}\n\n\tif len(fields) > 0 {\n\t\t// A Before:EmptyLine decoration on the first field looks odd, so we force it to Before:NewLine\n\t\t// This makes the output look nicer 🙂\n\t\tfields[0].Decs.Before = dst.NewLine\n\t}\n\n\tresourceTypeSpec := &dst.TypeSpec{\n\t\tName: dst.NewIdent(declContext.Name.Name()),\n\t\tType: &dst.StructType{\n\t\t\tFields: &dst.FieldList{List: fields},\n\t\t},\n\t}\n\n\tvar comments dst.Decorations\n\n\t// Add required RBAC annotations, only on storage version\n\tif resource.isStorageVersion {\n\t\tgroup := declContext.Name.PackageReference().Group()\n\t\tgroup = strings.ToLower(group + GroupSuffix)\n\t\tresourceName := strings.ToLower(declContext.Name.Plural().Name())\n\n\t\tastbuilder.AddComment(&comments, fmt.Sprintf(\"// +kubebuilder:rbac:groups=%s,resources=%s,verbs=get;list;watch;create;update;patch;delete\", group, resourceName))\n\t\tastbuilder.AddComment(&comments, fmt.Sprintf(\"// +kubebuilder:rbac:groups=%s,resources={%s/status,%s/finalizers},verbs=get;update;patch\", group, resourceName, resourceName))\n\n\t\t// This newline is REQUIRED for controller-gen to realize these comments are here. Without it, they are silently ignored, see:\n\t\t// https://github.com/kubernetes-sigs/controller-tools/issues/436\n\t\tcomments = append(comments, \"\\n\")\n\t}\n\n\tastbuilder.AddComment(&comments, \"// +kubebuilder:object:root=true\")\n\tif resource.status != nil {\n\t\tastbuilder.AddComment(&comments, \"// +kubebuilder:subresource:status\")\n\t}\n\n\tif resource.isStorageVersion {\n\t\tastbuilder.AddComment(&comments, \"// +kubebuilder:storageversion\")\n\t}\n\n\t// Add any custom kubebuilder annotations\n\tif len(resource.annotations) > 0 {\n\t\tastbuilder.AddComments(&comments, resource.annotations)\n\t}\n\n\tastbuilder.AddUnwrappedComments(&comments, declContext.Description)\n\tAddValidationComments(&comments, declContext.Validations)\n\n\tresourceDeclaration := &dst.GenDecl{\n\t\tTok: token.TYPE,\n\t\tSpecs: []dst.Spec{resourceTypeSpec},\n\t\tDecs: dst.GenDeclDecorations{\n\t\t\tNodeDecs: dst.NodeDecs{\n\t\t\t\tBefore: dst.EmptyLine,\n\t\t\t\tAfter: dst.EmptyLine,\n\t\t\t\tStart: comments,\n\t\t\t},\n\t\t},\n\t}\n\n\tvar declarations []dst.Decl\n\tdeclarations = append(declarations, resourceDeclaration)\n\tdeclarations = append(declarations, resource.InterfaceImplementer.AsDeclarations(codeGenerationContext, declContext.Name, nil)...)\n\tdeclarations = append(declarations, resource.generateMethodDecls(codeGenerationContext, declContext.Name)...)\n\tdeclarations = append(declarations, resource.resourceListTypeDecls(codeGenerationContext, declContext.Name, declContext.Description)...)\n\n\treturn declarations\n}", "func (p FloatFormalParam) Declaration() string {\n\t// This is a declaration for formal params (we need the * for pointer)\n\treturn p.Type + \" \" + p.Reference()\n}", "func (p *parser) declaration() Node {\n\tv := p.vr()\n\top := p.expect(TokenAsgn)\n\tb := p.expression()\n\treturn newBinary(op, v, b)\n}", "func Dependencies(g *Graph) (pacman.Packages, aur.Packages, []string) {\n\trps := make(pacman.Packages, 0)\n\taps := make(aur.Packages, 0)\n\tups := make([]string, 0)\n\n\tnames := make(map[string]bool)\n\tnodes := AllNodesBottomUp(g)\n\tfor _, vn := range nodes {\n\t\tn := vn.(*Node)\n\t\tif names[n.PkgName()] {\n\t\t\tcontinue\n\t\t}\n\n\t\tnames[n.PkgName()] = true\n\t\tswitch p := n.AnyPackage.(type) {\n\t\tcase *aur.Package:\n\t\t\taps = append(aps, p)\n\t\tcase *pacman.Package:\n\t\t\tif p.Origin == pacman.UnknownOrigin {\n\t\t\t\tups = append(ups, p.Name)\n\t\t\t} else {\n\t\t\t\trps = append(rps, p)\n\t\t\t}\n\t\tdefault:\n\t\t\tpanic(\"unexpected type of package in graph\")\n\t\t}\n\t}\n\treturn rps, aps, ups\n}", "func HandleForwardProto(bot *Bot, data []byte) (ack []byte, err error) {\n\treturn\n}", "func (d *RouteDescriptor) Dependencies(key string, route *l3.Route) []kvs.Dependency {\n\tvar dependencies []kvs.Dependency\n\t// the outgoing interface must exist and be UP\n\tif route.OutgoingInterface != \"\" {\n\t\tdependencies = append(dependencies, kvs.Dependency{\n\t\t\tLabel: routeOutInterfaceDep,\n\t\t\tKey: interfaces.InterfaceKey(route.OutgoingInterface),\n\t\t})\n\t}\n\n\t// non-zero VRFs\n\tvar protocol l3.VrfTable_Protocol\n\t_, isIPv6, _ := addrs.ParseIPWithPrefix(route.DstNetwork)\n\tif isIPv6 {\n\t\tprotocol = l3.VrfTable_IPV6\n\t}\n\tif route.VrfId != 0 {\n\t\tdependencies = append(dependencies, kvs.Dependency{\n\t\t\tLabel: vrfTableDep,\n\t\t\tKey: l3.VrfTableKey(route.VrfId, protocol),\n\t\t})\n\t}\n\tif route.Type == l3.Route_INTER_VRF && route.ViaVrfId != 0 {\n\t\tdependencies = append(dependencies, kvs.Dependency{\n\t\t\tLabel: viaVrfTableDep,\n\t\t\tKey: l3.VrfTableKey(route.ViaVrfId, protocol),\n\t\t})\n\t}\n\n\t// if destination network is netalloc reference, then the address must be allocated first\n\tallocDep, hasAllocDep := d.addrAlloc.GetAddressAllocDep(route.DstNetwork,\n\t\t\"\", \"dst_network-\")\n\tif hasAllocDep {\n\t\tdependencies = append(dependencies, allocDep)\n\t}\n\t// if GW is netalloc reference, then the address must be allocated first\n\tallocDep, hasAllocDep = d.addrAlloc.GetAddressAllocDep(route.NextHopAddr,\n\t\troute.OutgoingInterface, \"gw_addr-\")\n\tif hasAllocDep {\n\t\tdependencies = append(dependencies, allocDep)\n\t}\n\n\t// TODO: perhaps check GW routability\n\treturn dependencies\n}", "func resolve(file *ast.File, scopes map[ast.Node]*Scope) error {\n\t// TODO: Verify that type keywords cannot be redeclared.\n\n\t// Pre-pass, add keyword types and universe scope.\n\tuniverse := NewScope(nil)\n\tcharIdent := &ast.Ident{NamePos: universePos, Name: \"char\"}\n\tcharDecl := &ast.TypeDef{DeclType: charIdent, TypeName: charIdent, Val: &types.Basic{Kind: types.Char}}\n\tcharIdent.Decl = charDecl\n\tintIdent := &ast.Ident{NamePos: universePos, Name: \"int\"}\n\tintDecl := &ast.TypeDef{DeclType: intIdent, TypeName: intIdent, Val: &types.Basic{Kind: types.Int}}\n\tintIdent.Decl = intDecl\n\tvoidIdent := &ast.Ident{NamePos: universePos, Name: \"void\"}\n\tvoidDecl := &ast.TypeDef{DeclType: voidIdent, TypeName: voidIdent, Val: &types.Basic{Kind: types.Void}}\n\tvoidIdent.Decl = voidDecl\n\tuniverseDecls := []*ast.TypeDef{\n\t\tcharDecl,\n\t\tintDecl,\n\t\tvoidDecl,\n\t}\n\tfor _, decl := range universeDecls {\n\t\tif err := universe.Insert(decl); err != nil {\n\t\t\treturn errutil.Err(err)\n\t\t}\n\t}\n\n\t// First pass, add global declarations to file scope.\n\tfileScope := NewScope(universe)\n\tscopes[file] = fileScope\n\tfileScope.IsDef = func(decl ast.Decl) bool {\n\t\t// Consider variable declarations as tentative definitions; i.e. return\n\t\t// false, unless variable definition.\n\t\treturn decl.Value() != nil\n\t}\n\tfor _, decl := range file.Decls {\n\t\tif err := fileScope.Insert(decl); err != nil {\n\t\t\treturn errutil.Err(err)\n\t\t}\n\t}\n\n\t// skip specifies that the block statement body of a function declaration\n\t// should skip creating a nested scope, as it has already been created by its\n\t// function declaration, so that function parameters are placed within the\n\t// correct scope.\n\tskip := false\n\n\t// scope specifies the current lexical scope.\n\tscope := fileScope\n\n\t// resolve performs identifier resolution, mapping identifiers to the\n\t// corresponding declarations of the closest lexical scope.\n\tresolve := func(n ast.Node) error {\n\t\tswitch n := n.(type) {\n\t\tcase ast.Decl:\n\t\t\t// Insert declaration into the scope if not already added by the\n\t\t\t// file scope pre-pass.\n\t\t\tif scope != fileScope {\n\t\t\t\tif err := scope.Insert(n); err != nil {\n\t\t\t\t\treturn errutil.Err(err)\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Create nested scope for function definitions.\n\t\t\tif fn, ok := n.(*ast.FuncDecl); ok {\n\t\t\t\tif astutil.IsDef(fn) {\n\t\t\t\t\tskip = true\n\t\t\t\t}\n\t\t\t\tscope = NewScope(scope)\n\t\t\t\tscopes[fn] = scope\n\t\t\t}\n\t\tcase *ast.BlockStmt:\n\t\t\tif !skip {\n\t\t\t\tscope = NewScope(scope)\n\t\t\t\tscopes[n] = scope\n\t\t\t}\n\t\t\tskip = false\n\t\tcase *ast.Ident:\n\t\t\tdecl, ok := scope.Lookup(n.Name)\n\t\t\tif !ok {\n\t\t\t\treturn errors.Newf(n.Start(), \"undeclared identifier %q\", n)\n\t\t\t}\n\t\t\tn.Decl = decl\n\t\t}\n\t\treturn nil\n\t}\n\n\t// after reverts to the outer scope after traversing block statements.\n\tafter := func(n ast.Node) error {\n\t\tif _, ok := n.(*ast.BlockStmt); ok {\n\t\t\tscope = scope.Outer\n\t\t} else if fn, ok := n.(*ast.FuncDecl); ok && !astutil.IsDef(fn) {\n\t\t\tscope = scope.Outer\n\t\t}\n\t\treturn nil\n\t}\n\n\t// Walk the AST of the given file to resolve identifiers.\n\tif err := astutil.WalkBeforeAfter(file, resolve, after); err != nil {\n\t\treturn errutil.Err(err)\n\t}\n\n\treturn nil\n}", "func (this *RequireParser) Parse() (sh header.Header, ParseException error) {\n\trequireList := header.NewRequireList()\n\n\tvar ch byte\n\tlexer := this.GetLexer()\n\tthis.HeaderName(TokenTypes_REQUIRE)\n\n\tfor ch, _ = lexer.LookAheadK(0); ch != '\\n'; ch, _ = lexer.LookAheadK(0) {\n\t\tr := header.NewRequire()\n\t\tr.SetHeaderName(core.SIPHeaderNames_REQUIRE)\n\n\t\t// Parsing the option tag\n\t\tlexer.Match(TokenTypes_ID)\n\t\ttoken := lexer.GetNextToken()\n\t\tr.SetOptionTag(token.GetTokenValue())\n\t\tlexer.SPorHT()\n\n\t\trequireList.PushBack(r)\n\n\t\tfor ch, _ = lexer.LookAheadK(0); ch == ','; ch, _ = lexer.LookAheadK(0) {\n\t\t\tlexer.Match(',')\n\t\t\tlexer.SPorHT()\n\n\t\t\tr = header.NewRequire()\n\n\t\t\t// Parsing the option tag\n\t\t\tlexer.Match(TokenTypes_ID)\n\t\t\ttoken = lexer.GetNextToken()\n\t\t\tr.SetOptionTag(token.GetTokenValue())\n\t\t\tlexer.SPorHT()\n\n\t\t\trequireList.PushBack(r)\n\t\t}\n\n\t}\n\n\treturn requireList, nil\n}", "func (p *Parser) varsdec(tokens []obj.Token) {\n\t// Name is not defined?\n\tif len(tokens) < 2 {\n\t\tfirst := tokens[0]\n\t\tfract.IPanicC(first.File, first.Line, first.Column+len(first.Val), obj.SyntaxPanic, \"Name is not given!\")\n\t}\n\tif tokens[0].Type != fract.Name {\n\t\tfract.IPanic(tokens[0], obj.SyntaxPanic, \"Invalid syntax!\")\n\t}\n\tvar inf varInfo\n\tinf.shortDeclaration = true\n\tp.varadd(&p.defs, inf, tokens)\n}", "func (d *ABFToInterfaceDescriptor) Dependencies(key string, emptyVal proto.Message) []api.Dependency {\n\t_, ifName, _ := vpp_abf.ParseToInterfaceKey(key)\n\treturn []api.Dependency{\n\t\t{\n\t\t\tLabel: interfaceDep,\n\t\t\tKey: vpp_interfaces.InterfaceKey(ifName),\n\t\t},\n\t}\n}", "func (s *schg) findReferences(schema map[string]interface{}) []string {\n\tvar refs []string\n\tfor name, cont := range schema {\n\t\tswitch reflect.ValueOf(cont).Kind() {\n\t\tcase reflect.Map:\n\t\t\trefs = append(refs, s.findReferences(cont.(map[string]interface{}))...)\n\t\tcase reflect.String:\n\t\t\tif name == `$ref` {\n\t\t\t\ttoks := strings.Split(cont.(string), `/`)\n\t\t\t\tif len(toks) == 3 && toks[0] == `#` && toks[1] == `definitions` {\n\t\t\t\t\trefs = append(refs, toks[2])\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn refs\n}", "func printTypedNames(w io.Writer, prefix string, ns []TypedEntry) {\n\tif len(ns) == 0 {\n\t\treturn\n\t}\n\ttprev := typeString(ns[0].Types)\n\tsep := prefix\n\tfor _, n := range ns {\n\t\ttcur := typeString(n.Types)\n\t\tif tcur != tprev {\n\t\t\tif tprev == \"\" {\n\t\t\t\t// Should be impossible.\n\t\t\t\tpanic(n.Location.String() + \": untyped declarations in the middle of a typed list\")\n\t\t\t}\n\t\t\tfmt.Fprintf(w, \" - %s\", tprev)\n\t\t\ttprev = tcur\n\t\t\tsep = prefix\n\t\t\tif sep == \"\" {\n\t\t\t\tsep = \" \"\n\t\t\t}\n\t\t}\n\t\tfmt.Fprintf(w, \"%s%s\", sep, n.Str)\n\t\tsep = \" \"\n\t}\n\tif tprev != \"\" {\n\t\tfmt.Fprintf(w, \" - %s\", tprev)\n\t}\n}", "func DeclEnd() VERTEXELEMENT {\n\treturn VERTEXELEMENT{0xFF, 0, DECLTYPE_UNUSED, 0, 0, 0}\n}", "func (g *irgen) decls(res *ir.Nodes, decls []syntax.Decl) {\n\tfor _, decl := range decls {\n\t\tswitch decl := decl.(type) {\n\t\tcase *syntax.ConstDecl:\n\t\t\tg.constDecl(res, decl)\n\t\tcase *syntax.FuncDecl:\n\t\t\tg.funcDecl(res, decl)\n\t\tcase *syntax.TypeDecl:\n\t\t\tif ir.CurFunc == nil {\n\t\t\t\tcontinue // already handled in irgen.generate\n\t\t\t}\n\t\t\tg.typeDecl(res, decl)\n\t\tcase *syntax.VarDecl:\n\t\t\tg.varDecl(res, decl)\n\t\tdefault:\n\t\t\tg.unhandled(\"declaration\", decl)\n\t\t}\n\t}\n}", "func (s *BasecluListener) EnterDecl_list(ctx *Decl_listContext) {}", "func (s *BasePlSqlParserListener) ExitSeq_of_declare_specs(ctx *Seq_of_declare_specsContext) {}", "func (f *File) genDecl(node ast.Node) bool {\n\tdecl, ok := node.(*ast.GenDecl)\n\tif !ok || decl.Tok != token.TYPE { // We only care about Type declarations.\n\t\treturn true\n\t}\n\t// The name of the type of the constants we are declaring.\n\t// Can change if this is a multi-element declaration.\n\ttyp := \"\"\n\t// Loop over the elements of the declaration. Each element is a ValueSpec:\n\t// a list of names possibly followed by a type, possibly followed by values.\n\t// If the type and value are both missing, we carry down the type (and value,\n\t// but the \"go/types\" package takes care of that).\n\tfor _, spec := range decl.Specs {\n\t\ttspec := spec.(*ast.TypeSpec) // Guaranteed to succeed as this is TYPE.\n\t\tif tspec.Type != nil {\n\t\t\t// \"X T\". We have a type. Remember it.\n\t\t\ttyp = tspec.Name.Name\n\t\t}\n\t\tif typ != f.typeName {\n\t\t\t// This is not the type we're looking for.\n\t\t\tcontinue\n\t\t}\n\t\t// We now have a list of names (from one line of source code) all being\n\t\t// declared with the desired type.\n\n\t\tstructType, ok := tspec.Type.(*ast.StructType)\n\t\tif !ok {\n\t\t\t//not a struct type\n\t\t\tcontinue\n\t\t}\n\n\t\ttypesObj, typeObjOk := f.pkg.defs[tspec.Name]\n\t\tif !typeObjOk {\n\t\t\tlog.Fatalf(\"no type info found for struct %s\", typ)\n\t\t}\n\n\t\tfor _, fieldLine := range structType.Fields.List {\n\t\t\tfor _, field := range fieldLine.Names {\n\t\t\t\t//skip struct padding\n\t\t\t\tif field.Name == \"_\" {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tfieldObj, _, _ := types.LookupFieldOrMethod(typesObj.Type(), false, f.pkg.typesPkg, field.Name)\n\n\t\t\t\ttypeStr := fieldObj.Type().String()\n\t\t\t\ttags := parseFieldTags(fieldLine.Tag)\n\n\t\t\t\t//Skip here so we don't include rubbish import lines\n\t\t\t\tif tags[\"exclude_dao\"].Value == \"true\" {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tprocessedTypeStr, importPath := processTypeStr(typeStr)\n\t\t\t\t//log.Printf(\"processedTypeStr: %s, importPath: %s\", processedTypeStr, importPath)\n\n\t\t\t\tif importPath != \"\" && !importExists(importPath, f.imports) {\n\n\t\t\t\t\tf.imports = append(f.imports, Import{importPath})\n\n\t\t\t\t}\n\n\t\t\t\tv := Field{\n\t\t\t\t\tName: field.Name,\n\t\t\t\t\tTags: tags,\n\t\t\t\t\tTypeName: processedTypeStr,\n\t\t\t\t}\n\t\t\t\tf.fields = append(f.fields, v)\n\t\t\t}\n\t\t}\n\t}\n\treturn false\n}", "func (p *Parser) parseFinalDecl() {\n\tdefer un(trace(p, \"parseFinalDecl\"))\n\n\tp.next()\n}", "func (parser *Parser) funcDeclar() (*Function, error) {\n\tparser.trace(\"FUNC_DECLAR\")\n\tdefer parser.untrace()\n\t_, err, found := parser.match(fxsymbols.Func)\n\tif err != nil {\n\t\treturn nil, err\n\t\t//return ErrSyntax\n\t}\n\tif !found {\n\t\treturn nil, ErrNoMatch\n\t}\n\ttoken, _, found := parser.match(fxsymbols.Id)\n\tif !found {\n\t\treturn nil, parser.Errorf(ErrNoId)\n\t}\n\tif parser.symbEnvs.GetSymb(token.Lex) != nil {\n\t\treturn nil, fxsymbols.ErrFuncExists\n\t}\n\tparser.symbEnvs.PutFunction(token)\n\tparser.symbEnvs.PushEnv()\n\tdefer parser.symbEnvs.PopEnv()\n\t_, _, found = parser.match(fxsymbols.LeftPar)\n\tif !found {\n\t\treturn nil, parser.Errorf(ErrNoLeftPar)\n\t}\n\tparams, err := parser.parameters()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t_, _, found = parser.match(fxsymbols.RightPar)\n\tif !found {\n\t\treturn nil, parser.Errorf(ErrNoRightPar)\n\t}\n\tbody, err := parser.body()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn NewFunc(token.Lex, params, body), nil\n}", "func (s *BasePlSqlParserListener) EnterMap_order_func_declaration(ctx *Map_order_func_declarationContext) {\n}", "func lexMetadataDirective(\n\tfin *bufio.Reader,\n) (name string, args []string, err error) {\n\terr = eatWhitespace(fin)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tr, _, err := fin.ReadRune()\n\tif r != '@' {\n\t\terr = errors.New(\"Expected directive\")\n\t\treturn\n\t}\n\n\tname, err = readWord(fin)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tfor name != \"begin\" && name != \"scene\" {\n\t\terr = eatWhitespace(fin)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\n\t\tr, _, err = fin.ReadRune()\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\n\t\tfin.UnreadRune()\n\t\tif r == '@' {\n\t\t\tbreak\n\t\t}\n\n\t\targ := \"\"\n\t\targ, err = readPlainText(fin)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t\targs = append(args, arg)\n\t}\n\n\treturn\n}", "func _imports(nameAndPaths ...string) *ast.GenDecl {\n\tdecl := &ast.GenDecl{\n\t\tTok: token.IMPORT,\n\t\tSpecs: []ast.Spec{},\n\t}\n\tfor i := 0; i < len(nameAndPaths); i += 2 {\n\t\tspec := &ast.ImportSpec{\n\t\t\tName: _i(nameAndPaths[i]),\n\t\t\tPath: _s(nameAndPaths[i+1]),\n\t\t}\n\t\tdecl.Specs = append(decl.Specs, spec)\n\t}\n\treturn decl\n}", "func (g *Generator) createIndexAndNameDecl(run []Value, typeName string, suffix string) (string, string) {\n\tb := new(bytes.Buffer)\n\tindexes := make([]int, len(run))\n\tfor i := range run {\n\t\tb.WriteString(run[i].nameInfo.trimmedName)\n\t\tindexes[i] = b.Len()\n\t}\n\tnameConst := fmt.Sprintf(\"_%s_name%s = %q\", typeName, suffix, b.String())\n\tnameLen := b.Len()\n\tb.Reset()\n\t_, _ = fmt.Fprintf(b, \"_%s_index%s = [...]uint%d{0, \", typeName, suffix, usize(nameLen))\n\tfor i, v := range indexes {\n\t\tif i > 0 {\n\t\t\t_, _ = fmt.Fprintf(b, \", \")\n\t\t}\n\t\t_, _ = fmt.Fprintf(b, \"%d\", v)\n\t}\n\t_, _ = fmt.Fprintf(b, \"}\")\n\treturn b.String(), nameConst\n}", "func (s *Service) ReferencedIncludes() ([]*Include, error) {\n\tvar err error\n\tincludes := []*Include{}\n\tincludesSet := make(map[string]*Include)\n\n\t// Check extended service.\n\tif s.Extends != \"\" && strings.Contains(s.Extends, \".\") {\n\t\tincludeName := s.Extends[0:strings.Index(s.Extends, \".\")]\n\t\tinclude := s.Frugal.Include(includeName)\n\t\tif include == nil {\n\t\t\treturn nil, fmt.Errorf(\"Service %s extends references invalid include %s\",\n\t\t\t\ts.Name, s.Extends)\n\t\t}\n\t\tif _, ok := includesSet[includeName]; !ok {\n\t\t\tincludesSet[includeName] = include\n\t\t\tincludes = append(includes, include)\n\t\t}\n\t}\n\n\t// Check methods.\n\tfor _, method := range s.Methods {\n\t\t// Check arguments.\n\t\tfor _, arg := range method.Arguments {\n\t\t\tincludesSet, includes, err = addInclude(includesSet, includes, arg.Type, s.Frugal)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\t// Check return type.\n\t\tif method.ReturnType != nil {\n\t\t\tincludesSet, includes, err = addInclude(includesSet, includes, method.ReturnType, s.Frugal)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\t// Check exceptions.\n\t\tfor _, exception := range method.Exceptions {\n\t\t\tincludesSet, includes, err = addInclude(includesSet, includes, exception.Type, s.Frugal)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn includes, nil\n}", "func pragmaFields(s string) []string {\n\tvar a []string\n\tinQuote := false\n\tfieldStart := -1 // Set to -1 when looking for start of field.\n\tfor i, c := range s {\n\t\tswitch {\n\t\tcase c == '\"':\n\t\t\tif inQuote {\n\t\t\t\tinQuote = false\n\t\t\t\ta = append(a, s[fieldStart:i+1])\n\t\t\t\tfieldStart = -1\n\t\t\t} else {\n\t\t\t\tinQuote = true\n\t\t\t\tif fieldStart >= 0 {\n\t\t\t\t\ta = append(a, s[fieldStart:i])\n\t\t\t\t}\n\t\t\t\tfieldStart = i\n\t\t\t}\n\t\tcase !inQuote && isSpace(c):\n\t\t\tif fieldStart >= 0 {\n\t\t\t\ta = append(a, s[fieldStart:i])\n\t\t\t\tfieldStart = -1\n\t\t\t}\n\t\tdefault:\n\t\t\tif fieldStart == -1 {\n\t\t\t\tfieldStart = i\n\t\t\t}\n\t\t}\n\t}\n\tif !inQuote && fieldStart >= 0 { // Last field might end at the end of the string.\n\t\ta = append(a, s[fieldStart:])\n\t}\n\treturn a\n}", "func parseAttributes(doc *ast.CommentGroup) []Attribute {\n\tvar attributes []Attribute\n\tif doc == nil {\n\t\treturn attributes\n\t}\n\tfor _, comment := range doc.List {\n\t\tif strings.HasPrefix(comment.Text, \"//extern \") {\n\t\t\tnameattr := nameAttribute(strings.TrimSpace(comment.Text[9:]))\n\t\t\tattributes = append(attributes, nameattr)\n\t\t\tcontinue\n\t\t}\n\t\ttext := comment.Text[2:]\n\t\tif strings.HasPrefix(comment.Text, \"/*\") {\n\t\t\ttext = text[:len(text)-2]\n\t\t}\n\t\tattr := parseAttribute(strings.TrimSpace(text))\n\t\tif attr != nil {\n\t\t\tattributes = append(attributes, attr)\n\t\t}\n\t}\n\treturn attributes\n}", "func resolveMessageDependency(msg *desc.MessageDescriptor, dep messageDependency, encountered map[string]bool) {\n\tif encountered[msg.GetFullyQualifiedName()] {\n\t\treturn\n\t}\n\n\tdep[msg.GetFullyQualifiedName()] = msg\n\tfor _, f := range msg.GetFields() {\n\t\tif entity.IsMessageType(f.GetType()) {\n\t\t\tresolveMessageDependency(f.GetMessageType(), dep, encountered)\n\t\t}\n\t}\n}" ]
[ "0.5435614", "0.52448463", "0.5116722", "0.5092922", "0.50283545", "0.49758297", "0.49256507", "0.48094022", "0.4736206", "0.47182396", "0.47119275", "0.47046313", "0.46832284", "0.46707126", "0.4668004", "0.46470323", "0.46359763", "0.46307617", "0.46247268", "0.4615384", "0.4593736", "0.4561329", "0.45344633", "0.45303148", "0.45195314", "0.44555053", "0.444119", "0.44198632", "0.4419734", "0.44129083", "0.44071463", "0.43955427", "0.43452245", "0.43432906", "0.43400645", "0.4318232", "0.43118033", "0.43117827", "0.42997023", "0.42975622", "0.4295926", "0.4284861", "0.42753837", "0.42667258", "0.42511272", "0.4250577", "0.42448318", "0.42415938", "0.42222944", "0.4200206", "0.41980043", "0.4196595", "0.4187164", "0.4186754", "0.41783932", "0.4166035", "0.4150288", "0.4129815", "0.41262355", "0.41246855", "0.4107151", "0.41071498", "0.41032398", "0.4103174", "0.40977645", "0.40943626", "0.40927646", "0.40882948", "0.40873596", "0.40793374", "0.4077704", "0.40776318", "0.40776268", "0.40719214", "0.40635544", "0.40621418", "0.40607086", "0.40590873", "0.40554884", "0.4049623", "0.40474188", "0.40405053", "0.40403214", "0.4031731", "0.4028792", "0.40267012", "0.4026418", "0.402525", "0.40182492", "0.40169162", "0.40147978", "0.40065134", "0.40053883", "0.40033102", "0.39988846", "0.3997909", "0.39919928", "0.39906037", "0.39864898", "0.39796677" ]
0.808147
0
isPureVirtualDefinition ... Returns whether a function is pure virtual.
func isPureVirtualDefinition(line string) bool { line = strings.Replace(line, " ", "", -1) return (strings.Contains(line, "virtual") && strings.Contains(line, "=0;")) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (tp Type) IsVirtualTable() bool {\n\treturn tp == VirtualTable\n}", "func (b *IBFCell) IsPure() bool {\n\treturn (b.Count == 1 || b.Count == -1) && b.HashSum.Uint64() == checkSumHash(b.IDSum.Uint64())\n}", "func (o *os) HasVirtualKeyboard() gdnative.Bool {\n\to.ensureSingleton()\n\t//log.Println(\"Calling _OS.HasVirtualKeyboard()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"_OS\", \"has_virtual_keyboard\")\n\n\t// Call the parent method.\n\t// bool\n\tretPtr := gdnative.NewEmptyBool()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := gdnative.NewBoolFromPointer(retPtr)\n\treturn ret\n}", "func (d *portworx) IsPureFileVolume(volume *torpedovolume.Volume) (bool, error) {\n\tvar proxySpec *api.ProxySpec\n\tvar err error\n\tif proxySpec, err = d.getProxySpecForAVolume(volume); err != nil {\n\t\treturn false, err\n\t}\n\tif proxySpec == nil {\n\t\treturn false, nil\n\t}\n\n\tif proxySpec.ProxyProtocol == api.ProxyProtocol_PROXY_PROTOCOL_PURE_FILE {\n\t\tlog.Debugf(\"Volume [%s] is Pure File volume\", volume.ID)\n\t\treturn true, nil\n\t}\n\n\tlog.Debugf(\"Volume [%s] is not Pure File volume\", volume.ID)\n\treturn false, nil\n}", "func (s *Session) isPureExpr(expr ast.Expr) bool {\n\tif expr == nil {\n\t\treturn true\n\t}\n\n\tswitch expr := expr.(type) {\n\tcase *ast.Ident:\n\t\treturn true\n\tcase *ast.BasicLit:\n\t\treturn true\n\tcase *ast.BinaryExpr:\n\t\treturn s.isPureExpr(expr.X) && s.isPureExpr(expr.Y)\n\tcase *ast.CallExpr:\n\t\ttv := s.TypeInfo.Types[expr.Fun]\n\t\tfor _, arg := range expr.Args {\n\t\t\tif s.isPureExpr(arg) == false {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\n\t\tif tv.IsType() {\n\t\t\treturn true\n\t\t}\n\n\t\tif tv.IsBuiltin() {\n\t\t\tif ident, ok := expr.Fun.(*ast.Ident); ok {\n\t\t\t\tif pureBuiltinFuncNames[ident.Name] {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\treturn false\n\tcase *ast.CompositeLit:\n\t\treturn true\n\tcase *ast.FuncLit:\n\t\treturn true\n\tcase *ast.IndexExpr:\n\t\treturn s.isPureExpr(expr.X) && s.isPureExpr(expr.Index)\n\tcase *ast.SelectorExpr:\n\t\treturn s.isPureExpr(expr.X)\n\tcase *ast.SliceExpr:\n\t\treturn s.isPureExpr(expr.Low) && s.isPureExpr(expr.High) && s.isPureExpr(expr.Max)\n\tcase *ast.StarExpr:\n\t\treturn s.isPureExpr(expr.X)\n\tcase *ast.TypeAssertExpr:\n\t\treturn true\n\tcase *ast.UnaryExpr:\n\t\treturn s.isPureExpr(expr.X)\n\tcase *ast.ParenExpr:\n\t\treturn s.isPureExpr(expr.X)\n\n\tcase *ast.InterfaceType:\n\t\treturn true\n\tcase *ast.ArrayType:\n\t\treturn true\n\tcase *ast.ChanType:\n\t\treturn true\n\tcase *ast.KeyValueExpr:\n\t\treturn true\n\tcase *ast.MapType:\n\t\treturn true\n\tcase *ast.StructType:\n\t\treturn true\n\tcase *ast.FuncType:\n\t\treturn true\n\n\tcase *ast.Ellipsis:\n\t\treturn true\n\n\tcase *ast.BadExpr:\n\t\treturn false\n\t}\n\n\treturn false\n}", "func (d *portworx) IsPureVolume(volume *torpedovolume.Volume) (bool, error) {\n\tvar proxySpec *api.ProxySpec\n\tvar err error\n\tif proxySpec, err = d.getProxySpecForAVolume(volume); err != nil {\n\t\treturn false, err\n\t}\n\n\tif proxySpec == nil {\n\t\treturn false, nil\n\t}\n\n\tif proxySpec.ProxyProtocol == api.ProxyProtocol_PROXY_PROTOCOL_PURE_BLOCK || proxySpec.ProxyProtocol == api.ProxyProtocol_PROXY_PROTOCOL_PURE_FILE {\n\t\tlog.Debugf(\"Volume [%s] is Pure volume\", volume.ID)\n\t\treturn true, nil\n\t}\n\n\tlog.Debugf(\"Volume [%s] is not Pure Block volume\", volume.ID)\n\treturn false, nil\n}", "func (r DynamicRole) IsVirtualRole() bool {\n\tswitch r {\n\tcase DynamicRoleVirtualExecutor:\n\t\treturn true\n\tcase DynamicRoleVirtualValidator:\n\t\treturn true\n\t}\n\treturn false\n}", "func (f FooBarProps) IsProps() {}", "func (ExprType) HasMethod(fn string) bool { return boolResult }", "func isDefinition(s spec.Schema) bool {\n\treturn len(s.SchemaProps.Ref.GetPointer().String()) > 0\n}", "func (c *Client) isPureRevert(ctx context.Context, change *gerritpb.ChangeInfo) (bool, error) {\n\treq := &gerritpb.GetPureRevertRequest{\n\t\tProject: change.Project,\n\t\tNumber: change.Number,\n\t}\n\n\tres, err := c.gerritClient.GetPureRevert(ctx, req)\n\tif err != nil {\n\t\treturn false, errors.Annotate(err,\n\t\t\t\"error querying Gerrit host %s on whether the change %s~%d is a pure revert\",\n\t\t\tc.host, req.Project, req.Number).Err()\n\t}\n\n\treturn res.IsPureRevert, nil\n}", "func (s *Script) IsContractSig() bool {\n\treturn len(*s) == 1 && (*s)[0] == byte(OPCONTRACT)\n}", "func IsScriptedModeSet() PredicateFunc {\n\treturn func(v *VolumeGetProperty) bool {\n\t\treturn v.IsScriptedMode\n\t}\n}", "func (v *Variant) IsFloating() bool {\n\treturn gobool(C.g_variant_is_floating(v.native()))\n}", "func (p RProc) IsCFunc() bool { return int(C._MRB_PROC_CFUNC_P(p.p)) != 0 }", "func (c cell) IsFormula() bool {\n\t_, ok := c.contentHandler.(*FormulaCol)\n\treturn ok\n}", "func isBound(pfn unsafe.Pointer, fn string) string {\n\tinc := \" \"\n\tif pfn != nil {\n\t\tinc = \"+\"\n\t}\n\treturn fmt.Sprintf(\" [%s] %s\", inc, fn)\n}", "func (fn *Func) IsPublic() bool {\n\treturn isPublicName(fn.Name)\n}", "func (o *VirtualizationBaseHostPciDeviceAllOf) HasFunction() bool {\n\tif o != nil && o.Function != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (d UserData) HasFunction() bool {\n\treturn d.ModelData.Has(models.NewFieldName(\"Function\", \"function\"))\n}", "func (o *StorageVdMemberEpAllOf) HasStorageVirtualDrive() bool {\n\tif o != nil && o.StorageVirtualDrive != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (cv *ConVar) IsFunc() bool {\n\treturn cv.isFunc\n}", "func (state *State) IsFunc(index int) bool { return state.TypeAt(index) == FuncType }", "func CallNonvirtualBooleanMethodA(env *C.JNIEnv, obj C.jobject, clazz C.jclass, methodID C.jmethodID, args *C.jvalue) C.jboolean {\n\treturn C._GoJniCallNonvirtualBooleanMethodA(env, obj, clazz, methodID, args)\n}", "func (pmf PMF) IsNormalized() bool {\n\tdelta := math.Abs(1.0 - float64(pmf.Sum()))\n\treturn delta <= maxDelta\n}", "func (o *BlockDeviceMappingVmUpdate) HasVirtualDeviceName() bool {\n\tif o != nil && o.VirtualDeviceName != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (f *Frugal) ContainsFrugalDefinitions() bool {\n\treturn len(f.Scopes)+len(f.Services) > 0\n}", "func (f *Func) IsBuiltin() bool { return f.isBuiltin }", "func (xs *Sheet) IsFormula(row int, col int) int {\n\ttmp, _, _ := xs.xb.lib.NewProc(\"xlSheetIsFormulaW\").\n\t\tCall(xs.self, I(row), I(col))\n\treturn int(tmp)\n}", "func (*OnfTest1Choice_Vehicle) IsYANGGoStruct() {}", "func (o *StorageFlexUtilVirtualDrive) HasVirtualDrive() bool {\n\tif o != nil && o.VirtualDrive != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func FakeBool(v interface{}) bool {\n\tswitch r := v.(type) {\n\tcase float64:\n\t\treturn r != 0\n\tcase string:\n\t\treturn r != \"\"\n\tcase bool:\n\t\treturn r\n\tcase nil:\n\t\treturn false\n\tdefault:\n\t\treturn true\n\t}\n}", "func (o *Vm) HasFlavour() bool {\n\tif o != nil && o.Flavour != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (o *StorageFlexFlashVirtualDrive) HasVirtualDrive() bool {\n\tif o != nil && o.VirtualDrive != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (v *Service) IsSetFunctions() bool {\n\treturn v != nil && v.Functions != nil\n}", "func (ft *FieldType) Hybrid() bool {\n\treturn ft.GetType() == mysql.TypeEnum || ft.GetType() == mysql.TypeBit || ft.GetType() == mysql.TypeSet\n}", "func isFunc(obj reflect.Value) bool {\n\t// Zero value reflected: not a valid function\n\tif obj == (reflect.Value{}) {\n\t\treturn false\n\t}\n\n\tif obj.Type().Kind() != reflect.Func {\n\t\treturn false\n\t}\n\n\treturn true\n}", "func (d Definition) IsFloat() bool {\n\tif k, ok := d.Output.(reflect.Kind); ok {\n\t\tif k == reflect.Float32 || k == reflect.Float64 {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func (n *Node) Synthetic() bool {\n\tif n.Sha256sum == nil {\n\t\treturn true\n\t}\n\treturn false\n}", "func IsFunction(data interface{}) bool {\n\treturn typeIs(data, reflect.Func)\n}", "func (*OnfTest1Choice_Vehicle_UnderCarriage) IsYANGGoStruct() {}", "func HasMethod(st interface{}, methodName string) bool {\n\tvalueIface := reflect.ValueOf(st)\n\n\t// Check if the passed interface is a pointer\n\tif valueIface.Type().Kind() != reflect.Ptr {\n\t\t// Create a new type of Iface, so we have a pointer to work with\n\t\tvalueIface = reflect.New(reflect.TypeOf(st))\n\t}\n\n\t// Get the method by name\n\tmethod := valueIface.MethodByName(methodName)\n\treturn method.IsValid()\n}", "func (pr *prepareResult) isProcedureCall() bool { return pr.fc.IsProcedureCall() }", "func (pr *prepareResult) isProcedureCall() bool { return pr.fc.IsProcedureCall() }", "func isFunction(ident string) bool {\n\tif _, ok := functions[ident]; !ok {\n\t\treturn false\n\t}\n\n\treturn true\n}", "func (obj *ObjectBase) IsTransient() bool {\n\treturn obj.clientPtr == nil\n}", "func (*OnfTest1Choice_Vehicle_ElectricMotor) IsYANGGoStruct() {}", "func (o *os) IsUserfsPersistent() gdnative.Bool {\n\to.ensureSingleton()\n\t//log.Println(\"Calling _OS.IsUserfsPersistent()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"_OS\", \"is_userfs_persistent\")\n\n\t// Call the parent method.\n\t// bool\n\tretPtr := gdnative.NewEmptyBool()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := gdnative.NewBoolFromPointer(retPtr)\n\treturn ret\n}", "func (sig *Signature) HasV() bool {\n\treturn sig.hasV\n}", "func HasProcedureToMedicalProcedure() predicate.ProcedureType {\n\treturn predicate.ProcedureType(func(s *sql.Selector) {\n\t\tstep := sqlgraph.NewStep(\n\t\t\tsqlgraph.From(Table, FieldID),\n\t\t\tsqlgraph.To(ProcedureToMedicalProcedureTable, FieldID),\n\t\t\tsqlgraph.Edge(sqlgraph.O2M, false, ProcedureToMedicalProcedureTable, ProcedureToMedicalProcedureColumn),\n\t\t)\n\t\tsqlgraph.HasNeighbors(s, step)\n\t})\n}", "func (eClass *eClassImpl) IsAbstract() bool {\n\treturn eClass.isAbstract\n}", "func isRealProc(mountPoint string) (bool, error) {\n\tstat := syscall.Statfs_t{}\n\terr := syscall.Statfs(mountPoint, &stat)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\t// 0x9fa0 is PROC_SUPER_MAGIC: https://elixir.bootlin.com/linux/v6.1/source/include/uapi/linux/magic.h#L87\n\treturn stat.Type == 0x9fa0, nil\n}", "func isVirtualTip(bs *HashSet, futureSet *HashSet, anticone *HashSet, children *HashSet) bool {\n\tfor k := range children.GetMap() {\n\t\tif bs.Has(&k) {\n\t\t\treturn false\n\t\t}\n\t\tif !futureSet.Has(&k) && !anticone.Has(&k) {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func (c *Concurrent) isExecutable(e interface{}) bool {\n\treturn reflect.TypeOf(e).Kind() == reflect.Func\n}", "func (v *Function) IsSetReturnType() bool {\n\treturn v != nil && v.ReturnType != nil\n}", "func IsFunc(v interface{}) bool {\n\tr := elconv.AsValueRef(reflect.ValueOf(v))\n\treturn r.Kind() == reflect.Func\n}", "func IsStandardFunction(name string) bool {\n\treturn fset[name]\n}", "func (i *FuncIterator) IsBound() bool {\n\treturn i.endIndex > 0\n}", "func (n *Node) IsMethod() bool {\n\treturn n.Type.Recv() != nil\n}", "func (a *scriptAddress) Internal() bool {\n\treturn false\n}", "func (f FooBarState) IsState() {}", "func Fexistx(mname string) bool {\n\tif _, err := fmethods[mname]; err {\n\t\treturn true\n\t} else {\n\t\treturn false\n\t}\n}", "func (ob *PyObject) IsCallable() bool {\n\treturn C.PyCallable_Check(ob.rawptr) > 0\n}", "func MrbProcStrictP(p RProc) bool { return int(C._MRB_PROC_STRICT_P(p.p)) != 0 }", "func (_FinalizableCrowdsaleImpl *FinalizableCrowdsaleImplCaller) IsFinalized(opts *bind.CallOpts) (bool, error) {\n\tvar (\n\t\tret0 = new(bool)\n\t)\n\tout := ret0\n\terr := _FinalizableCrowdsaleImpl.contract.Call(opts, out, \"isFinalized\")\n\treturn *ret0, err\n}", "func isServiceStubType(t reflect.Type) bool {\n\tif isStructPtr(t) == false {\n\t\treturn false\n\t} else if t.Implements(stubType) == false {\n\t\treturn false\n\t}\n\t// Return success\n\treturn true\n}", "func IsHasMethod(st interface{}, methodName string) bool {\n\treturn HasMethod(st, methodName)\n}", "func (a *accessHelper) isFunction() bool {\n\tfor _, val := range a.functionIndexes {\n\t\tif val == a.index {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}", "func (_Harberger *HarbergerCaller) IsPetrified(opts *bind.CallOpts) (bool, error) {\n\tvar (\n\t\tret0 = new(bool)\n\t)\n\tout := ret0\n\terr := _Harberger.contract.Call(opts, out, \"isPetrified\")\n\treturn *ret0, err\n}", "func (t *Type) IsPrimitive() bool {\n\t_, ok := frugalBaseTypes[t.Name]\n\treturn ok\n}", "func (p *Photon) HasFunction(name string) (bool, int) {\n\tfor i, f := range p.Functions {\n\t\tif f.Name == name {\n\t\t\treturn true, i\n\t\t}\n\t}\n\treturn false, 0\n}", "func (o *V1WorkloadSpec) HasVirtualMachines() bool {\n\tif o != nil && o.VirtualMachines != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (o *ARVRInterface) IsPrimary() gdnative.Bool {\n\t//log.Println(\"Calling ARVRInterface.IsPrimary()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"ARVRInterface\", \"is_primary\")\n\n\t// Call the parent method.\n\t// bool\n\tretPtr := gdnative.NewEmptyBool()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := gdnative.NewBoolFromPointer(retPtr)\n\treturn ret\n}", "func (c MethodsCollection) IsPublic() pIsPublic {\n\treturn pIsPublic{\n\t\tMethod: c.MustGet(\"IsPublic\"),\n\t}\n}", "func (me TAttlistAbstractTextNlmCategory) IsMethods() bool { return me.String() == \"METHODS\" }", "func (o *ScalarFormulaRequestAttributes) HasFormulas() bool {\n\treturn o != nil && o.Formulas != nil\n}", "func IsLazy(m protoreflect.Message, fd protoreflect.FieldDescriptor) bool {\n\tvar mi *MessageInfo\n\tvar p pointer\n\tswitch m := m.(type) {\n\tcase *messageState:\n\t\tmi = m.messageInfo()\n\t\tp = m.pointer()\n\tcase *messageReflectWrapper:\n\t\tmi = m.messageInfo()\n\t\tp = m.pointer()\n\tdefault:\n\t\treturn false\n\t}\n\txd, ok := fd.(protoreflect.ExtensionTypeDescriptor)\n\tif !ok {\n\t\treturn false\n\t}\n\txt := xd.Type()\n\text := mi.extensionMap(p)\n\tif ext == nil {\n\t\treturn false\n\t}\n\tf, ok := (*ext)[int32(fd.Number())]\n\tif !ok {\n\t\treturn false\n\t}\n\treturn f.typ == xt && f.lazy != nil && atomic.LoadUint32(&f.lazy.atomicOnce) == 0\n}", "func (state *State) IsGoFunc(index int) bool {\n\tcls, ok := state.get(index).(*Closure)\n\treturn ok && !cls.isLua()\n}", "func (o *RuleMatch) HasMethods() bool {\n\tif o != nil && o.Methods != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (obj *content) IsNormal() bool {\n\treturn obj.normal != nil\n}", "func (o *FileversionFileversion) HasDescription() bool {\n\tif o != nil && o.Description != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (mock *Mockdb) IsContractAddr(addr types.AddressHash) bool {\n\tif mock.GetCodeSize(addr) > 0 {\n\t\treturn true\n\t}\n\treturn false\n}", "func (o *EditorPlugin) HasMainScreen() gdnative.Bool {\n\t//log.Println(\"Calling EditorPlugin.HasMainScreen()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"EditorPlugin\", \"has_main_screen\")\n\n\t// Call the parent method.\n\t// bool\n\tretPtr := gdnative.NewEmptyBool()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := gdnative.NewBoolFromPointer(retPtr)\n\treturn ret\n}", "func implementedOutsideGo(obj *types.Func) bool {\n\treturn obj.Type().(*types.Signature).Recv() == nil &&\n\t\t(obj.Scope() != nil && obj.Scope().Pos() == token.NoPos)\n}", "func isPureIPv4Address(ipString string) bool {\n\tfor i := 0; i < len(ipString); i++ {\n\t\tswitch ipString[i] {\n\t\tcase '.':\n\t\t\treturn true\n\t\tcase ':':\n\t\t\treturn false\n\t\t}\n\t}\n\treturn false\n}", "func (obj *Global) IsPersonalMode(ctx context.Context) (bool, error) {\n\tresult := &struct {\n\t\tReturn bool `json:\"qReturn\"`\n\t}{}\n\terr := obj.RPC(ctx, \"IsPersonalMode\", result)\n\treturn result.Return, err\n}", "func IsHasMethod(v interface{}, methodName string) bool {\n\treturn String(methodName).IsInArrayIgnoreCase(GetMethods(v))\n}", "func (o *MicrosoftGraphWorkbookSortField) HasSortOn() bool {\n\tif o != nil && o.SortOn != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (v ValidatePayment) IsFinal() bool {\n\treturn false\n}", "func (o *VirtualizationVmwareVirtualMachineAllOf) HasProtectedVm() bool {\n\tif o != nil && o.ProtectedVm != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (v *Function) IsSetOneWay() bool {\n\treturn v != nil && v.OneWay != nil\n}", "func Fexecm(fname, sname string) bool {\n\texecuted := false\n\tif Fexists(fname, sname) {\n\t\tif Fmember(fframes[fname][sname+\",facets\"], \"ref\") {\n\t\t\tfname2 := fframes[fname][sname+\",ref\"][0]\n\t\t\tif Fmember(fframes[fname][sname+\",facets\"], \"ifref\") {\n\t\t\t\tfmethods[Getval(fframes[fname][sname+\",ifref\"])](fname)\n\t\t\t}\n\t\t\texecuted = Fexecm(fname2, sname)\n\t\t} else {\n\t\t\tif Fmember(fframes[fname][sname+\",facets\"], \"method\") {\n\t\t\t\tif Fmember(fframes[fname][sname+\",facets\"], \"ifexecm\") {\n\t\t\t\t\tfmethods[Getval(fframes[fname][sname+\",ifexecm\"])](fname)\n\t\t\t\t}\n\t\t\t\tfmethods[Getval(fframes[fname][sname+\",method\"])](fname)\n\t\t\t\texecuted = true\n\t\t\t}\n\t\t}\n\t}\n\treturn executed\n}", "func isExportedType(d DataType) bool {\n\tif d == FUNCPOINTER {\n\t\treturn false\n\t}\n\treturn true\n}", "func (p Property) IsInherited() bool {\n\treturn p == \"inherited\"\n}", "func (f Unstructured) Primitive() bool {\n\treturn false\n}", "func genAbstractFunc(fn *obj.LSym) {\n\tifn := Ctxt.DwFixups.GetPrecursorFunc(fn)\n\tif ifn == nil {\n\t\tCtxt.Diag(\"failed to locate precursor fn for %v\", fn)\n\t\treturn\n\t}\n\tif Debug_gendwarfinl != 0 {\n\t\tCtxt.Logf(\"DwarfAbstractFunc(%v)\\n\", fn.Name)\n\t}\n\tCtxt.DwarfAbstractFunc(ifn, fn, myimportpath)\n}", "func FnCall() bool {\n\treturn fnCall\n}", "func (tp Type) IsNormalTable() bool {\n\treturn tp == NormalTable\n}", "func (gdt *Vector3) IsNormalized() Bool {\n\targ0 := gdt.getBase()\n\n\tret := C.go_godot_vector3_is_normalized(GDNative.api, arg0)\n\n\treturn Bool(ret)\n}", "func (f Unstructured) IsUndefined() bool {\n\treturn f.fields == nil\n}" ]
[ "0.5725554", "0.56338274", "0.52214384", "0.52026784", "0.5111255", "0.4992483", "0.49429774", "0.48418856", "0.47752437", "0.46805793", "0.46731955", "0.4661175", "0.46523353", "0.46268716", "0.45680475", "0.45335254", "0.45149964", "0.44207093", "0.44183782", "0.44175613", "0.4407443", "0.43974498", "0.4390009", "0.43832728", "0.4382168", "0.43456352", "0.43402055", "0.43392083", "0.43192253", "0.43085852", "0.43065727", "0.4291891", "0.42833865", "0.42738488", "0.42667085", "0.42607573", "0.42600766", "0.42591885", "0.4252782", "0.42430806", "0.42424256", "0.42373472", "0.42342144", "0.42342144", "0.42285097", "0.42255697", "0.4221669", "0.4211416", "0.420607", "0.4192034", "0.41876787", "0.41872486", "0.41713458", "0.41707134", "0.4163064", "0.41315112", "0.41168666", "0.4113844", "0.4105645", "0.41054428", "0.41022852", "0.40975818", "0.40853685", "0.40838265", "0.4075637", "0.4071948", "0.40649983", "0.40646538", "0.40424544", "0.40414414", "0.40396303", "0.40304747", "0.403013", "0.40288645", "0.40274513", "0.40265265", "0.40241277", "0.40187976", "0.40135995", "0.39997464", "0.39988777", "0.39957678", "0.3980908", "0.3977887", "0.39751616", "0.39746946", "0.3972616", "0.39705303", "0.39659476", "0.396556", "0.39594787", "0.39583796", "0.39564136", "0.39545396", "0.39530727", "0.39516088", "0.39505363", "0.39493686", "0.3944017", "0.39436954" ]
0.7474248
0
Fields ... The fields within templates to be replaced.
func (i Interface) Fields() map[string]string { fields := make(map[string]string) fields["{{Interface.Name}}"] = i.Name fields["{{FileName}}"] = i.FileName fields["{{Interface.DefineName}}"] = i.DefineName return fields }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (c *TargetingTemplatesUpdateCall) Fields(s ...googleapi.Field) *TargetingTemplatesUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (Template) Fields() []ent.Field {\n\treturn []ent.Field{\n\t\tfield.String(\"name\").MaxLen(255).MinLen(1),\n\t\tfield.Enum(\"selectionType\").Values(\"INTERNAL_DB\", \"MTURK_QUALIFICATIONS\"),\n\t\tfield.Int(\"participantCount\").NonNegative().Default(0),\n\t\tfield.JSON(\"internalCriteria\", &model.InternalCriteria{}),\n\t\tfield.JSON(\"mturkCriteria\", &model.MTurkCriteria{}),\n\t\tfield.Bool(\"adult\").Default(false),\n\t\tfield.Bool(\"sandbox\").Default(false),\n\t}\n}", "func (o TagTemplateOutput) Fields() pulumi.StringMapOutput {\n\treturn o.ApplyT(func(v *TagTemplate) pulumi.StringMapOutput { return v.Fields }).(pulumi.StringMapOutput)\n}", "func ParseTemplateFields(fileId string) []string {\n\t// create call to export file from drive\n\tfileCall := driveSrv.Files.Export(fileId, \"text/html\")\n\t// execute download of file call\n\tres, err := fileCall.Download()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// defer closing response body\n\tdefer res.Body.Close()\n\t// read the res.Body\n\tbody, err := ioutil.ReadAll(res.Body)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// convert body []bytes to a string to be used in regex test\n\tstrBody := string(body)\n\t// init parsed fields slice to push fields into\n\tparsedFields := []string{}\n\t// create regex test\n\trgx := regexp.MustCompile(`{{([A-Za-z_]*)}}`)\n\t// search document for regex matches\n\trs := rgx.FindAllStringSubmatch(strBody, -1)\n\t// push parsed fields to a parsed fields []string\n\tfor _, v := range rs {\n\t\tparsedFields = utils.AppendIfMissing(parsedFields, v[1])\n\t}\n\t// return parsed fields\n\treturn parsedFields\n}", "func (c *TargetingTemplatesPatchCall) Fields(s ...googleapi.Field) *TargetingTemplatesPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *TargetingTemplatesInsertCall) Fields(s ...googleapi.Field) *TargetingTemplatesInsertCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *TargetingTemplatesGetCall) Fields(s ...googleapi.Field) *TargetingTemplatesGetCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *TargetingTemplatesListCall) Fields(s ...googleapi.Field) *TargetingTemplatesListCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (f Fields) Fields() map[string]interface{} {\n\treturn f\n}", "func (t *Type) SetFields(fields []*Field)", "func (c *AdvertiserLandingPagesUpdateCall) Fields(s ...googleapi.Field) *AdvertiserLandingPagesUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *PagesUpdateCall) Fields(s ...googleapi.Field) *PagesUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *AdvertiserLandingPagesPatchCall) Fields(s ...googleapi.Field) *AdvertiserLandingPagesPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func genFields(fs []*ast.FieldDefinition) *jen.Statement {\n\t//\n\t// Generate config for fields\n\t//\n\t// == Example input SDL\n\t//\n\t// type Dog {\n\t// name(style: NameComponentsStyle = SHORT): String!\n\t// givenName: String @deprecated(reason: \"No longer supported; please use name field.\")\n\t// }\n\t//\n\t// == Example output\n\t//\n\t// graphql.Fields{\n\t// \"name\": graphql.Field{ ... },\n\t// \"givenName\": graphql.Field{ ... },\n\t// }\n\t//\n\treturn jen.Qual(defsPkg, \"Fields\").Values(jen.DictFunc(func(d jen.Dict) {\n\t\tfor _, f := range fs {\n\t\t\td[jen.Lit(f.Name.Value)] = genField(f)\n\t\t}\n\t}))\n}", "func (c *AccountsContainersMacrosUpdateCall) Fields(s ...googleapi.Field) *AccountsContainersMacrosUpdateCall {\n\tc.params_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (r *Search) Fields(fields ...types.FieldAndFormat) *Search {\n\tr.req.Fields = fields\n\n\treturn r\n}", "func (u UpdateUser) Fields() map[string]interface{} {\n\treturn map[string]interface{}{\n\t\t\"email\": u.Email,\n\t\t\"public_id\": u.PublicID,\n\t}\n}", "func (f Unstructured) IterateFields(iteratee func(fieldName string, fieldFragment Fragment)) {\n\tif f.fields != nil {\n\t\tfor fieldName, fieldFragment := range f.fields {\n\t\t\titeratee(fieldName, fieldFragment)\n\t\t}\n\t}\n}", "func (e MaintemplateComponentValidationError) Field() string { return e.field }", "func Fields(fields map[string]interface{}) Option {\n\treturn func(sh *Hook) {\n\t\t// TODO(glib): yuck!\n\t\tf := make(map[string]interface{})\n\t\tfor k, v := range fields {\n\t\t\tf[k] = v\n\t\t}\n\t\tsh.fields = f\n\t}\n}", "func (s *IndicesClearCacheService) Fields(fields string) *IndicesClearCacheService {\n\ts.fields = fields\n\treturn s\n}", "func (s ReplaceDefaultPolicyVersionParams) MarshalFields(e protocol.FieldEncoder) error {\n\tif len(s.TemplateName) > 0 {\n\t\tv := s.TemplateName\n\n\t\tmetadata := protocol.Metadata{}\n\t\te.SetValue(protocol.BodyTarget, \"templateName\", protocol.QuotedValue{ValueMarshaler: v}, metadata)\n\t}\n\treturn nil\n}", "func (BaseMixin) Fields() []ent.Field {\n\treturn []ent.Field{\n\t\tfield.UUID(\"id\", uuid.UUID{}).Default(uuid.New),\n\t\tfield.String(\"some_field\"),\n\t}\n}", "func (s *GetTaskTemplateOutput) SetFields(v []*TaskTemplateField) *GetTaskTemplateOutput {\n\ts.Fields = v\n\treturn s\n}", "func (c *CreativeFieldsUpdateCall) Fields(s ...googleapi.Field) *CreativeFieldsUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *AccountsContainersVariablesUpdateCall) Fields(s ...googleapi.Field) *AccountsContainersVariablesUpdateCall {\n\tc.params_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *AppsModulesPatchCall) Fields(s ...googleapi.Field) *AppsModulesPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *ProjectsLocationsInstancesRenameCall) Fields(s ...googleapi.Field) *ProjectsLocationsInstancesRenameCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *FeaturetilesGetCall) Fields(s ...googleapi.Field) *FeaturetilesGetCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (b *MessagesGetChatPreviewBuilder) Fields(v []string) *MessagesGetChatPreviewBuilder {\n\tb.Params[\"fields\"] = v\n\treturn b\n}", "func insertFields(tableName string, fields string) string {\n\treturn strings.Replace(fields, \"\\\"\"+tableName+\"\\\".\", \"\", -1)\n}", "func (s UpdateTemplateInput) MarshalFields(e protocol.FieldEncoder) error {\n\te.SetValue(protocol.HeaderTarget, \"Content-Type\", protocol.StringValue(\"application/json\"), protocol.Metadata{})\n\n\tif s.Name != nil {\n\t\tv := *s.Name\n\n\t\tmetadata := protocol.Metadata{}\n\t\te.SetValue(protocol.BodyTarget, \"Name\", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata)\n\t}\n\tif s.SourceEntity != nil {\n\t\tv := s.SourceEntity\n\n\t\tmetadata := protocol.Metadata{}\n\t\te.SetFields(protocol.BodyTarget, \"SourceEntity\", v, metadata)\n\t}\n\tif s.VersionDescription != nil {\n\t\tv := *s.VersionDescription\n\n\t\tmetadata := protocol.Metadata{}\n\t\te.SetValue(protocol.BodyTarget, \"VersionDescription\", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata)\n\t}\n\tif s.AwsAccountId != nil {\n\t\tv := *s.AwsAccountId\n\n\t\tmetadata := protocol.Metadata{}\n\t\te.SetValue(protocol.PathTarget, \"AwsAccountId\", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata)\n\t}\n\tif s.TemplateId != nil {\n\t\tv := *s.TemplateId\n\n\t\tmetadata := protocol.Metadata{}\n\t\te.SetValue(protocol.PathTarget, \"TemplateId\", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata)\n\t}\n\treturn nil\n}", "func SetFields(c context.Context, fields Fields) context.Context {\n\treturn context.WithValue(c, fieldsKey, fields)\n}", "func (o LookupTaskTemplateResultOutput) Fields() TaskTemplateFieldArrayOutput {\n\treturn o.ApplyT(func(v LookupTaskTemplateResult) []TaskTemplateField { return v.Fields }).(TaskTemplateFieldArrayOutput)\n}", "func (c *PagesCreateCall) Fields(s ...googleapi.Field) *PagesCreateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func Fields(fields ...string) Parameter {\n\treturn func(pa Parameterizable) {\n\t\tpa.SetParameter(\"fields\", fields)\n\t}\n}", "func (c *PlacementStrategiesUpdateCall) Fields(s ...googleapi.Field) *PlacementStrategiesUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (s GetModelTemplateOutput) MarshalFields(e protocol.FieldEncoder) error {\n\tif s.Value != nil {\n\t\tv := *s.Value\n\n\t\tmetadata := protocol.Metadata{}\n\t\te.SetValue(protocol.BodyTarget, \"value\", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata)\n\t}\n\treturn nil\n}", "func (s GetModelTemplateOutput) MarshalFields(e protocol.FieldEncoder) error {\n\tif s.Value != nil {\n\t\tv := *s.Value\n\n\t\tmetadata := protocol.Metadata{}\n\t\te.SetValue(protocol.BodyTarget, \"value\", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata)\n\t}\n\treturn nil\n}", "func (pf *PathFilter) Fields(contentPath string) map[string]string {\n\tout := make(map[string]string)\n\n\tmatch := pf.re.FindStringSubmatch(contentPath)\n\tnames := pf.re.SubexpNames()\n\n\tdefer func() {\n\t\tif r := recover(); r != nil {\n\t\t\tfmt.Println(\"path filter fields is crashing the app\")\n\t\t\tspew.Dump(contentPath, pf.filterPath, pf.re.String(), match, names)\n\t\t\tpanic(\"i'm done\")\n\t\t}\n\t}()\n\n\tfor i, name := range names {\n\t\tif i != 0 && name != \"\" {\n\t\t\tout[name] = match[i]\n\t\t}\n\t}\n\n\treturn out\n}", "func (tm *TableModel) FormFields() A.MSX {\n\tcache := FORM_CACHE.Get(tm.CacheName)\n\tjson_arr, ok := cache.(A.MSX)\n\tif !ok {\n\t\tjson_arr = A.MSX{}\n\t\tfor _, field := range tm.Fields {\n\t\t\tswitch field.Key {\n\t\t\tcase `id`, `is_deleted`, `modified_at`, `unique_id`, `created_at`, `updated_at`, `deleted_at`, `restored_at`:\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif field.Hide || field.HtmlHide || field.FormHide {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tjson_obj := M.SX{\n\t\t\t\t`key`: field.Key,\n\t\t\t\t`label`: S.Coalesce(field.FormLabel, field.HtmlLabel, field.Label),\n\t\t\t\t`type`: S.Coalesce(field.FormType, field.HtmlLabel, field.Type),\n\t\t\t\t`tooltip`: S.Coalesce(field.FormTooltip, field.FormLabel, field.HtmlLabel, field.Label),\n\t\t\t}\n\t\t\tif field.Required {\n\t\t\t\tjson_obj[`required`] = true\n\t\t\t}\n\t\t\tif field.HtmlSubType != `` {\n\t\t\t\tjson_obj[`sub_type`] = field.HtmlSubType\n\t\t\t}\n\t\t\tjson_arr = append(json_arr, json_obj)\n\t\t\tif DEBUG {\n\t\t\t\tL.Print(`Creating FORM_CACHE.Select`, tm.CacheName)\n\t\t\t}\n\t\t}\n\t\tFORM_CACHE.Set(tm.CacheName, json_arr)\n\t}\n\treturn json_arr\n}", "func (c *AdvertiserLandingPagesInsertCall) Fields(s ...googleapi.Field) *AdvertiserLandingPagesInsertCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *ProjectsNotesPatchCall) Fields(s ...googleapi.Field) *ProjectsNotesPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (d *Database) UpdateFields(tx *sqlx.Tx, params interface{}, fields ...string) (err error) {\n\tvar upd string\n\tquery := \"UPDATE \\\"database\\\" SET \" + upd + \" WHERE id = :id\"\n\t_, err = tx.NamedExec(query, params)\n\treturn\n}", "func (c *PlacementsGeneratetagsCall) Fields(s ...googleapi.Field) *PlacementsGeneratetagsCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (b *GroupsGetBuilder) Fields(v []string) *GroupsGetBuilder {\n\tb.Params[\"fields\"] = v\n\treturn b\n}", "func Fields() error {\n\treturn devtools.GenerateFieldsYAML()\n}", "func (c *SitesPatchCall) Fields(s ...googleapi.Field) *SitesPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *CreativeFieldsPatchCall) Fields(s ...googleapi.Field) *CreativeFieldsPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *ContentInspectCall) Fields(s ...googleapi.Field) *ContentInspectCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func display_input_fields() {\r\n\tfmt.Println(\"<FIELDS>\")\r\n\r\n\tfor key, value := range fields_map {\r\n\t\tfmt.Printf(\"<NAME>%s</NAME><VALUE>%s</VALUE>\\n\",key,value)\r\n\t}\r\n\r\n\tfmt.Println(\"</FIELDS>\")\r\n\r\n}", "func (c *ProjectsNotesCreateCall) Fields(s ...googleapi.Field) *ProjectsNotesCreateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (pt *ProcessTemplate) GetEditableFields(fields []string) []string {\n\teditableFields := pt.ExtractEditableFields()\n\tresult := make([]string, 0)\n\tfor _, field := range fields {\n\t\tif util.InStrArr(editableFields, field) {\n\t\t\tresult = append(result, field)\n\t\t}\n\t}\n\treturn result\n}", "func (b *GroupsGetByIDBuilder) Fields(v []string) *GroupsGetByIDBuilder {\n\tb.Params[\"fields\"] = v\n\treturn b\n}", "func (RepairInvoice) Fields() []ent.Field {\n\treturn []ent.Field{\n\t\tfield.String(\"Rename\").Unique(),\n\t}\n}", "func (c *SubscriptionsInsertCall) Fields(s ...googleapi.Field) *SubscriptionsInsertCall {\n\tc.params_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *PlacementsUpdateCall) Fields(s ...googleapi.Field) *PlacementsUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *PlacementsPatchCall) Fields(s ...googleapi.Field) *PlacementsPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *FloodlightActivitiesGeneratetagCall) Fields(s ...googleapi.Field) *FloodlightActivitiesGeneratetagCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *SitesUpdateCall) Fields(s ...googleapi.Field) *SitesUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *ProjectsInstancesUpdateCall) Fields(s ...googleapi.Field) *ProjectsInstancesUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (s *UpdateTaskTemplateInput) SetFields(v []*TaskTemplateField) *UpdateTaskTemplateInput {\n\ts.Fields = v\n\treturn s\n}", "func WithFields(ctx context.Context, fields Fields) context.Context {\n\tnewFields := mergeFields(ContextFields(ctx), fields)\n\treturn context.WithValue(ctx, fieldsKey, newFields)\n}", "func (m *RawMapper) Fields() []string { return append(m.selectFields, m.selectTags...) }", "func (c *CreativeFieldValuesUpdateCall) Fields(s ...googleapi.Field) *CreativeFieldValuesUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *AdsPatchCall) Fields(s ...googleapi.Field) *AdsPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (s *UpdateTaskTemplateOutput) SetFields(v []*TaskTemplateField) *UpdateTaskTemplateOutput {\n\ts.Fields = v\n\treturn s\n}", "func (c *CreativeFieldValuesPatchCall) Fields(s ...googleapi.Field) *CreativeFieldValuesPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (o *CreateDashboardRenderTaskParams) SetFields(fields *string) {\n\to.Fields = fields\n}", "func fieldliterals(fields ...string) []Field {\n\tfs := make([]Field, len(fields))\n\tfor i := range fields {\n\t\tfs[i] = FieldLiteral(fields[i])\n\t}\n\treturn fs\n}", "func (c *SubaccountsPatchCall) Fields(s ...googleapi.Field) *SubaccountsPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *ProjectsServiceAccountsPatchCall) Fields(s ...googleapi.Field) *ProjectsServiceAccountsPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (l *Logger) DebugfFields(f Fields, format string, v ...interface{}) {\n\tif l.debugEnabled {\n\t\tl.NewEntry().WithFields(f).Printf(DEBUG, format, v...)\n\t}\n}", "func (c *DynamicTargetingKeysInsertCall) Fields(s ...googleapi.Field) *DynamicTargetingKeysInsertCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *ContentRedactCall) Fields(s ...googleapi.Field) *ContentRedactCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *SettingsDatasourcesPatchCall) Fields(s ...googleapi.Field) *SettingsDatasourcesPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *PlacementsInsertCall) Fields(s ...googleapi.Field) *PlacementsInsertCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func DebugWithFields(l interface{}, f Fields) {\n\tentry := logrus.WithFields(logrus.Fields(f))\n\tentry.Data[\"file\"] = fileInfo(2)\n\tentry.Debug(l)\n}", "func (*Base) ObjectFields(p ASTPass, fields *ast.ObjectFields, ctx Context) {\n\tfor i := range *fields {\n\t\tp.ObjectField(p, &(*fields)[i], ctx)\n\t}\n}", "func (c *IndexingDatasourcesItemsPollCall) Fields(s ...googleapi.Field) *IndexingDatasourcesItemsPollCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *IndexingDatasourcesItemsPollCall) Fields(s ...googleapi.Field) *IndexingDatasourcesItemsPollCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (Medicalfile) Fields() []ent.Field {\n return []ent.Field{\n\t\tfield.String(\"detail\").NotEmpty(),\n\t\tfield.Time(\"added_time\"),\n }\n}", "func (c *SubscriptionsChangeRenewalSettingsCall) Fields(s ...googleapi.Field) *SubscriptionsChangeRenewalSettingsCall {\n\tc.params_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *SettingsDatasourcesUpdateCall) Fields(s ...googleapi.Field) *SettingsDatasourcesUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *SettingsDatasourcesUpdateCall) Fields(s ...googleapi.Field) *SettingsDatasourcesUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (snowflake) InsertValuesModifier(fields []*Field) string {\n\tvar fmtStr, colSelector string\n\tfor i, fld := range fields {\n\t\tif colSelector != \"\" { \n\t\t\tcolSelector += \",\"\n\t\t}\n\n\t\tif t := fmt.Sprintf(\"%s\\n\",fld.StructField.Struct.Type); strings.Contains(t, \"snowflake\") {\n\t\t\tfmtStr = \"%sparse_json(column%d)\"\n\t\t} else {\n\t\t\tfmtStr = \"%scolumn%d\"\n\t\t}\n\t\tcolSelector = fmt.Sprintf(fmtStr, colSelector, i+1)\n\t}\n\n\treturn fmt.Sprintf(\"select %s from\", colSelector)\n}", "func FieldsetTmpl(ctx context.Context, wrapper *FieldsetWrapperType, ctrl page.ControlI, h string, buf *bytes.Buffer) {\n\tvar hasInstructions bool = (ctrl.Instructions() != \"\")\n\tvar hasRow bool\n\n\tctrl.WrapperAttributes().AddClass(\"form-group\")\n\tif wrapper.useTooltips {\n\t\t// bootstrap requires that parent of a tooltipped object has position relative\n\t\tctrl.WrapperAttributes().SetStyle(\"position\", \"relative\")\n\t}\n\tif ctrl.WrapperAttributes().HasClass(\"row\") {\n\t\tctrl.WrapperAttributes().RemoveClass(\"row\")\n\t\thasRow = true\n\t}\n\twrapper.LabelAttributes().AddClass(\"col-form-label\").\n\t\tAddClass(\"pt-0\") // helps with alignment\n\n\tbuf.WriteString(`<fieldset id=\"`)\n\n\tbuf.WriteString(ctrl.ID())\n\n\tbuf.WriteString(`_ctl\" `)\n\n\tbuf.WriteString(ctrl.WrapperAttributes().String())\n\n\tbuf.WriteString(` >\n`)\n\tif hasRow {\n\t\tbuf.WriteString(`<div class=\"row\">`)\n\t}\n\tif ctrl.Label() != \"\" {\n\t\tbuf.WriteString(` <legend id=\"`)\n\n\t\tbuf.WriteString(ctrl.ID())\n\n\t\tbuf.WriteString(`_lbl\" `)\n\n\t\tbuf.WriteString(wrapper.LabelAttributes().String())\n\n\t\tbuf.WriteString(`>`)\n\n\t\tbuf.WriteString(html.EscapeString(ctrl.Label()))\n\n\t\tbuf.WriteString(`</legend>\n`)\n\t}\n\n\tbuf.WriteString(`\n`)\n\n\tbuf.WriteString(grhtml.Indent(h))\n\n\tbuf.WriteString(`\n\n`)\n\tmsg := ctrl.ValidationMessage()\n\tvar class string\n\n\tswitch ctrl.ValidationState() {\n\tcase page.ValidationWaiting:\n\t\tfallthrough // we need to correctly style\n\tcase page.ValidationValid:\n\t\tif msg == \"\" {\n\t\t\tmsg = \"&nbsp;\"\n\t\t} else {\n\t\t\tmsg = html.EscapeString(msg)\n\t\t}\n\t\tif wrapper.useTooltips {\n\t\t\tclass = \"valid-tooltip\"\n\t\t} else {\n\t\t\tclass = \"valid-feedback\"\n\t\t}\n\n\tcase page.ValidationInvalid:\n\t\tif msg == \"\" {\n\t\t\tmsg = \"&nbsp;\"\n\t\t} else {\n\t\t\tmsg = html.EscapeString(msg)\n\t\t}\n\t\tif wrapper.useTooltips {\n\t\t\tclass = \"invalid-tooltip\"\n\t\t} else {\n\t\t\tclass = \"invalid-feedback\"\n\t\t}\n\t}\n\tif hasInstructions {\n\n\t\tbuf.WriteString(`<small id=\"`)\n\n\t\tbuf.WriteString(ctrl.ID())\n\n\t\tbuf.WriteString(`_inst\" class=\"form-text\" >`)\n\n\t\tbuf.WriteString(html.EscapeString(ctrl.Instructions()))\n\n\t\tbuf.WriteString(`</small>`)\n\n\t}\n\tif ctrl.ValidationState() != page.ValidationNever {\n\n\t\tbuf.WriteString(`<div id=\"`)\n\n\t\tbuf.WriteString(ctrl.ID())\n\n\t\tbuf.WriteString(`_err\" class=\"`)\n\n\t\tbuf.WriteString(class)\n\n\t\tbuf.WriteString(`\">`)\n\n\t\tbuf.WriteString(msg)\n\n\t\tbuf.WriteString(`</div>`)\n\n\t}\n\n\tbuf.WriteString(`\n`)\n\tif hasRow {\n\t\tbuf.WriteString(`</div>`)\n\t}\n\n\tbuf.WriteString(`\n</fieldset>\n`)\n\n\treturn\n\n}", "func (tm *TableModel) GridFields() A.MSX {\n\tcache := GRID_CACHE.Get(tm.CacheName)\n\tjson_arr, ok := cache.(A.MSX)\n\tif !ok {\n\t\tjson_arr = A.MSX{}\n\t\tfor _, field := range tm.Fields {\n\t\t\tswitch field.Key {\n\t\t\tcase `id`, `is_deleted`, `modified_at`:\n\t\t\t\tcontinue\n\t\t\tcase `created_at`, `updated_at`, `deleted_at`, `restored_at`:\n\t\t\t\tfield.GridType = `datetime`\n\t\t\t}\n\t\t\tif field.Hide || field.HtmlHide || field.GridHide {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tjson_obj := M.SX{\n\t\t\t\t`key`: field.Key,\n\t\t\t\t`label`: S.Coalesce(field.GridLabel, field.HtmlLabel, field.Label),\n\t\t\t\t`type`: S.IfEmpty(field.GridType, field.Type),\n\t\t\t}\n\t\t\tif field.GridFooter == `` {\n\t\t\t\tjson_obj[`footer`] = field.GridFooter\n\t\t\t}\n\t\t\tif field.HtmlSubType != `` {\n\t\t\t\tjson_obj[`sub_type`] = field.HtmlSubType\n\t\t\t}\n\t\t\tjson_arr = append(json_arr, json_obj)\n\t\t\tif DEBUG {\n\t\t\t\tL.Print(`Creating GRID_CACHE.Select`, tm.CacheName)\n\t\t\t}\n\t\t}\n\t\tGRID_CACHE.Set(tm.CacheName, json_arr)\n\t}\n\treturn json_arr\n}", "func (c *ProjectsUpdateCall) Fields(s ...googleapi.Field) *ProjectsUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *CreativesUpdateCall) Fields(s ...googleapi.Field) *CreativesUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (Entry) Fields() []ent.Field {\n\treturn []ent.Field{\n\t\tfield.String(\"title\").MinLen(1).Unique(),\n\t\tfield.String(\"body\").MinLen(1),\n\t\tfield.String(\"tag\"),\n\t}\n}", "func (c *ProjectsServiceAccountsUpdateCall) Fields(s ...googleapi.Field) *ProjectsServiceAccountsUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *ProjectsServiceAccountsUpdateCall) Fields(s ...googleapi.Field) *ProjectsServiceAccountsUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (Menu) Fields() []ent.Field {\n\treturn nil\n}", "func (fs *FileStat) GenerateFields() (string, error) {\n\ttb, e := fs.modTime.MarshalBinary()\n\tif e != nil {\n\t\treturn \"\", e\n\t}\n\tcb, e := fs.compressedBytes()\n\tif e != nil {\n\t\treturn \"\", e\n\t}\n\n\tformat := `\"%s\", \"%s\", %d, 0%o, binfs.MustHexDecode(\"%x\"), %t, binfs.MustHexDecode(\"%x\")`\n\treturn fmt.Sprintf(format,\n\t\tfs.path,\n\t\tfs.name,\n\t\tfs.size,\n\t\tfs.mode,\n\t\ttb,\n\t\tfs.isDir,\n\t\tcb,\n\t), nil\n}", "func (c *PlacementStrategiesPatchCall) Fields(s ...googleapi.Field) *PlacementStrategiesPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *AccountsContainersTagsUpdateCall) Fields(s ...googleapi.Field) *AccountsContainersTagsUpdateCall {\n\tc.params_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func RemoveFields(body map[string]interface{}, removeFields []string) map[string]interface{} {\n\tif body != nil {\n\t\tfor _, v := range removeFields {\n\t\t\tdelete(body, v)\n\t\t}\n\t}\n\treturn body\n}", "func (c *CreativeGroupsUpdateCall) Fields(s ...googleapi.Field) *CreativeGroupsUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *ProjectsGroupsUpdateCall) Fields(s ...googleapi.Field) *ProjectsGroupsUpdateCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "func (c *AccountsPatchCall) Fields(s ...googleapi.Field) *AccountsPatchCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}" ]
[ "0.6189393", "0.61858684", "0.6121268", "0.60077", "0.5991178", "0.5689581", "0.5688557", "0.5664367", "0.5615151", "0.55025023", "0.5417566", "0.53887886", "0.5387306", "0.53794956", "0.53793156", "0.53708047", "0.53572786", "0.5301624", "0.52675027", "0.52467805", "0.52415836", "0.5238408", "0.5195866", "0.51901734", "0.5173902", "0.51737475", "0.51694727", "0.51592124", "0.5158268", "0.5158151", "0.5156797", "0.5155552", "0.51354736", "0.5126607", "0.5124072", "0.51174587", "0.5113633", "0.5107862", "0.5107862", "0.51076394", "0.51041514", "0.5103231", "0.5100401", "0.5097799", "0.50942683", "0.5093795", "0.50885946", "0.508742", "0.507661", "0.5072315", "0.5068673", "0.5068369", "0.5053381", "0.5052991", "0.5043887", "0.50410604", "0.50351995", "0.50326234", "0.5030638", "0.50290847", "0.50238436", "0.5021912", "0.50188303", "0.5013596", "0.50129926", "0.5012939", "0.5003519", "0.50033855", "0.4999149", "0.4998964", "0.49975857", "0.4996103", "0.4994507", "0.49934447", "0.4989369", "0.4987156", "0.49857804", "0.49815163", "0.49733382", "0.49685416", "0.49685416", "0.49646667", "0.4962467", "0.4962171", "0.4962171", "0.49595785", "0.4952159", "0.4949606", "0.4945515", "0.49440697", "0.49439892", "0.49428734", "0.49428734", "0.49403036", "0.49402332", "0.4939312", "0.4938959", "0.49383086", "0.49359018", "0.49349064", "0.49313727" ]
0.0
-1
templatedTypeSeparators ... Used to expand templated types such as QMap>
func templatedTypeSeparators (r rune) bool { return r == '<' || r == '>' || r == ',' }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func mapKeyType(v interface{}) string {\n\tstr := toString(v)\n\tkey, value, found := stringsCut(str, \",\")\n\tif !found || !strings.HasPrefix(key, \"map<\") || !strings.HasSuffix(value, \">\") {\n\t\tpanic(fmt.Errorf(\"mapKeyValue: expected map<Type1,Type2>, got %v\", str))\n\t}\n\treturn strings.TrimPrefix(key, \"map<\")\n}", "func (tp *Template) Delims(left, right string) *Template {\n\ttp.leftDelim = left\n\ttp.rightDelim = right\n\treturn tp\n}", "func typeString(t []TypeName) (str string) {\n\tswitch len(t) {\n\tcase 0:\n\t\tbreak\n\tcase 1:\n\t\tif t[0].Location.Line == 0 {\n\t\t\t// Use the empty string for undeclared\n\t\t\t// implicit types (such as object).\n\t\t\tbreak\n\t\t}\n\t\tstr = t[0].Str\n\tdefault:\n\t\tstr = \"(either\"\n\t\tfor _, n := range t {\n\t\t\tstr += \" \" + n.Str\n\t\t}\n\t\tstr += \")\"\n\t}\n\treturn\n}", "func printTypedNames(w io.Writer, prefix string, ns []TypedEntry) {\n\tif len(ns) == 0 {\n\t\treturn\n\t}\n\ttprev := typeString(ns[0].Types)\n\tsep := prefix\n\tfor _, n := range ns {\n\t\ttcur := typeString(n.Types)\n\t\tif tcur != tprev {\n\t\t\tif tprev == \"\" {\n\t\t\t\t// Should be impossible.\n\t\t\t\tpanic(n.Location.String() + \": untyped declarations in the middle of a typed list\")\n\t\t\t}\n\t\t\tfmt.Fprintf(w, \" - %s\", tprev)\n\t\t\ttprev = tcur\n\t\t\tsep = prefix\n\t\t\tif sep == \"\" {\n\t\t\t\tsep = \" \"\n\t\t\t}\n\t\t}\n\t\tfmt.Fprintf(w, \"%s%s\", sep, n.Str)\n\t\tsep = \" \"\n\t}\n\tif tprev != \"\" {\n\t\tfmt.Fprintf(w, \" - %s\", tprev)\n\t}\n}", "func mapValueType(v interface{}) string {\n\tstr := toString(v)\n\tkey, value, found := stringsCut(str, \",\")\n\tif !found || !strings.HasPrefix(key, \"map<\") || !strings.HasSuffix(value, \">\") {\n\t\tpanic(fmt.Errorf(\"mapKeyValue: expected map<Type1,Type2>, got %v\", str))\n\t}\n\treturn strings.TrimSuffix(value, \">\")\n}", "func (t PathType) Separator() string {\n\tswitch t {\n\tcase Relative:\n\t\treturn \".\"\n\tcase Absolute:\n\t\treturn \"/\"\n\tdefault:\n\t\treturn \"\"\n\t}\n}", "func (gen *jsGenerator) formatType(t *idl.Type) string {\n\tvar s string\n\tms, ok := jsTypes[t.Name]\n\tif !ok {\n\t\tms = t.Name\n\t}\n\tif t.Name == \"list\" {\n\t\ts = fmt.Sprintf(ms, gen.formatType(t.ValueType))\n\t} else if t.Name == \"map\" {\n\t\ts = fmt.Sprintf(ms, jsTypes[t.KeyType.Name], gen.formatType(t.ValueType))\n\t} else if t.IsPrimitive() && t.Name != \"string\" {\n\t\ts = ms + \"?\"\n\t} else if t.IsEnum(gen.tplRootIdl) {\n\t\ts = ms + \"?\"\n\t} else {\n\t\ts = ms\n\t}\n\treturn s\n}", "func QualifiedTypeName(t DataType) string {\n\tswitch t.Kind() {\n\tcase ArrayKind:\n\t\ta := t.(*Array)\n\t\treturn fmt.Sprintf(\"%s<%s>\",\n\t\t\tt.Name(),\n\t\t\tQualifiedTypeName(a.ElemType.Type),\n\t\t)\n\tcase MapKind:\n\t\th := t.(*Map)\n\t\treturn fmt.Sprintf(\"%s<%s, %s>\",\n\t\t\tt.Name(),\n\t\t\tQualifiedTypeName(h.KeyType.Type),\n\t\t\tQualifiedTypeName(h.ElemType.Type),\n\t\t)\n\t}\n\treturn t.Name()\n}", "func typeNames(vars []*types.Var) string {\n\tif len(vars) == 0 {\n\t\treturn \"\"\n\t}\n\tvar buf strings.Builder\n\tfor i, v := range vars {\n\t\tif i != 0 {\n\t\t\tbuf.WriteString(\", \")\n\t\t}\n\t\tbuf.WriteString(nameOf(v.Type()))\n\t}\n\treturn buf.String()\n}", "func typeinfo(list ...interface{}) string {\n\tvar buf strings.Builder\n\tfor i, item := range list {\n\t\tif i > 0 {\n\t\t\tbuf.WriteString(\",\")\n\t\t}\n\t\tbuf.WriteString(fmt.Sprintf(\"%d:%T:%v\", i, item, item))\n\t}\n\treturn buf.String()\n}", "func (o DomainNameEndpointConfigurationOutput) Types() pulumi.StringOutput {\n\treturn o.ApplyT(func(v DomainNameEndpointConfiguration) string { return v.Types }).(pulumi.StringOutput)\n}", "func (e *expression) writeTypeElts(elts []ast.Expr, Lbrace token.Pos) {\n\tfirstPos := e.tr.getLine(Lbrace)\n\tposOldElt := firstPos\n\tposNewElt := 0\n\tuseBracket := false\n\n\tfor i, el := range elts {\n\t\tposNewElt = e.tr.getLine(el.Pos())\n\t\tkv := el.(*ast.KeyValueExpr)\n\t\tkey := e.tr.getExpression(kv.Key).String()\n\n\t\tif i == 0 {\n\t\t\tif strings.HasPrefix(key, `\"`) {\n\t\t\t\tuseBracket = true\n\t\t\t} else {\n\t\t\t\tuseBracket = false\n\t\t\t}\n\t\t}\n\t\tif useBracket {\n\t\t\tkey = \"[\" + key + \"]\"\n\t\t} else {\n\t\t\tkey = \".\" + key\n\t\t}\n\n\t\tif i != 0 {\n\t\t\te.WriteString(\",\")\n\t\t}\n\t\tif posNewElt != posOldElt {\n\t\t\te.WriteString(strings.Repeat(NL, posNewElt - posOldElt))\n\t\t\te.WriteString(strings.Repeat(TAB, e.tr.tabLevel))\n\t\t} else { // in the same line\n\t\t\te.WriteString(SP)\n\t\t}\n\n\t\te.WriteString(fmt.Sprintf(\"%s%s=%s\",\n\t\t\te.tr.lastVarName,\n\t\t\tkey + SP,\n\t\t\tSP + e.tr.getExpression(kv.Value).String(),\n\t\t))\n\n\t\tposOldElt = posNewElt\n\t}\n\te.tr.line += posNewElt - firstPos // update the global position\n}", "func (l AuditSubjectTypeList) Join(separator string) string {\n\toutput := \"\"\n\tfor i, t := range l {\n\t\toutput += string(t)\n\t\tif i < len(l)-1 {\n\t\t\toutput += separator\n\t\t}\n\t}\n\treturn output\n}", "func (m *moduleGenerator) inlineTypeName(ctx *openapi.ReferenceContext, propertyName string) string {\n\tresult := strings.Title(propertyName)\n\tif ex, ok := m.inlineTypes[ctx]; ok {\n\t\tfor {\n\t\t\tif !ex.Has(result) {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tresult += strings.Title(propertyName)\n\t\t}\n\t} else {\n\t\tm.inlineTypes[ctx] = codegen.NewStringSet()\n\t}\n\tm.inlineTypes[ctx].Add(result)\n\treturn result\n}", "func lexMapQuotedKey(lx *lexer) stateFn {\r\n\tr := lx.peek()\r\n\tif r == sqStringEnd {\r\n\t\tlx.emit(itemKey)\r\n\t\tlx.next()\r\n\t\treturn lexSkip(lx, lexMapKeyEnd)\r\n\t}\r\n\tlx.next()\r\n\treturn lexMapQuotedKey\r\n}", "func UnorderedMapTypeName(listYANGPath, listFieldName, parentName string, goStructElements map[string]*ygen.ParsedDirectory) (string, string, bool, error) {\n\t// The list itself, since it is a container, has a struct associated with it. Retrieve\n\t// this from the set of Directory structs for which code (a Go struct) will be\n\t// generated such that additional details can be used in the code generation.\n\tlistElem, ok := goStructElements[listYANGPath]\n\tif !ok {\n\t\treturn \"\", \"\", false, fmt.Errorf(\"struct for %s did not exist\", listYANGPath)\n\t}\n\n\tvar listType, keyType string\n\tvar isDefinedType bool\n\tswitch len(listElem.ListKeys) {\n\tcase 0:\n\t\treturn \"\", \"\", false, fmt.Errorf(\"list does not contain any keys: %s:\", listElem.Name)\n\tcase 1:\n\t\t// This is a single keyed list, so we can represent it as a map with\n\t\t// a simple Go type as the key. Note that a leaf-list can never be\n\t\t// a key, so we do not need to handle the case whereby we would have to\n\t\t// have a slice which keys the list.\n\t\tfor _, listKey := range listElem.ListKeys {\n\t\t\tlistType = fmt.Sprintf(\"map[%s]*%s\", listKey.LangType.NativeType, listElem.Name)\n\t\t\tkeyType = listKey.LangType.NativeType\n\t\t\tisDefinedType = ygen.IsYgenDefinedGoType(listKey.LangType)\n\t\t}\n\tdefault:\n\t\t// This is a list with multiple keys, so we need to generate a new structure\n\t\t// that represents the list key itself - this struct is described in a\n\t\t// generatedGoMultiKeyListStruct struct, which is then expanded by a template to the struct\n\t\t// definition.\n\t\tlistKeyStructName := fmt.Sprintf(\"%s_Key\", listElem.Name)\n\t\tnames := make(map[string]bool, len(goStructElements))\n\t\tfor _, d := range goStructElements {\n\t\t\tnames[d.Name] = true\n\t\t}\n\t\tif names[listKeyStructName] {\n\t\t\tlistKeyStructName = fmt.Sprintf(\"%s_%s_YANGListKey\", parentName, listFieldName)\n\t\t\tif names[listKeyStructName] {\n\t\t\t\treturn \"\", \"\", false, fmt.Errorf(\"unexpected generated list key name conflict for %s\", listYANGPath)\n\t\t\t}\n\t\t\tnames[listKeyStructName] = true\n\t\t}\n\t\tlistType = fmt.Sprintf(\"map[%s]*%s\", listKeyStructName, listElem.Name)\n\t\tkeyType = listKeyStructName\n\t\tisDefinedType = true\n\t}\n\treturn listType, keyType, isDefinedType, nil\n}", "func TemplateType_Values() []string {\n\treturn []string{\n\t\tTemplateTypeEnvironment,\n\t\tTemplateTypeService,\n\t}\n}", "func printTypes(vals []interface{}) string {\n\ts := \"[\"\n\tfor ix, val := range vals {\n\t\tif ix > 0 {\n\t\t\ts += \", \"\n\t\t}\n\t\ts += reflect.TypeOf(val).String()\n\t}\n\treturn s + \"]\"\n}", "func marshalMapInOrder(m map[string]interface{}, t interface{}) (string, error) {\n\ts := \"{\"\n\tv := reflect.ValueOf(t)\n\tfor i := 0; i < v.Type().NumField(); i++ {\n\t\tfield := jsonFieldFromTag(v.Type().Field(i).Tag)\n\t\tif field == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tvalue, err := json.Marshal(m[field])\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\ts += fmt.Sprintf(\"%q:%s,\", field, value)\n\t}\n\ts = s[:len(s)-1]\n\ts += \"}\"\n\tvar buf bytes.Buffer\n\tif err := json.Indent(&buf, []byte(s), \"\", \" \"); err != nil {\n\t\treturn \"\", err\n\t}\n\treturn buf.String(), nil\n}", "func typeName(typ string) string {\n\tif typ[0] == '(' {\n\t\tts := strings.Split(typ[1:len(typ)-1], \",\")\n\t\tif len(ts) != 2 {\n\t\t\tlog.Fatalf(\"Tuple expect 2 arguments\")\n\t\t}\n\t\treturn \"types.NewTuple(\" + typeName(ts[0]) + \", \" + typeName(ts[1]) + \")\"\n\t}\n\tswitch typ {\n\tcase \"Flags\", \"Mem\", \"Void\", \"Int128\":\n\t\treturn \"types.Type\" + typ\n\tdefault:\n\t\treturn \"typ.\" + typ\n\t}\n}", "func (t *Link) PrependType(v interface{}) {\n\tt.typeName = append([]interface{}{v}, t.typeName...)\n\n}", "func (f *FieldsWithValue) Pairs(separator string) []string {\n\tpairs := make([]string, len(f.fields))\n\tfor i, v := range f.fields {\n\t\tpairs[i] = fmt.Sprintf(\"%s%s%s\", v.Name, separator, v.Value)\n\t}\n\treturn pairs\n}", "func isMapType(v interface{}) bool {\n\tkey, value, found := stringsCut(toString(v), \",\")\n\treturn found && strings.HasPrefix(key, \"map<\") && strings.HasSuffix(value, \">\")\n}", "func (t *VMIntTPStrStr) JavaType() string {\n\treturn \"Map<Integer,Pair<String,String>>\"\n}", "func makeExtensionTypeName(base string) string {\n\treturn fmt.Sprintf(\"%s%s%s\", base, valSeparator, typeAppendix)\n}", "func (t *VMStrTPStrStr) JavaType() string {\n\treturn \"Map<String,Pair<String,String>>\"\n}", "func (m Map) TypeString() string {\n\t// TODO: this should return map[somepackage.SomeType]somepackage1.SomeType1\n\t// i.e. package name + UnqualifiedName()\n\t// for key and value types.\n\treturn fmt.Sprintf(\"map[%s]%s\", m.Key.TypeString(), m.Value.TypeString())\n}", "func ExampleT() {\n\tfmt.Println(\"name: \", q.T(\"user\"))\n\tfmt.Println(\"name + alias:\", q.T(\"user\", \"usr\"))\n\t// Output:\n\t// name: \"user\" []\n\t// name + alias: \"user\" AS \"usr\" []\n}", "func (h *Helper) GetTypeRenames() map[string]string {\n\t_, _, _ = h.GetTypeDefs()\n\treturn h.typeRenames\n}", "func expectedTypes(types []string) string {\n\t// Are there any types?\n\tif types == nil || len(types) <= 0 {\n\t\treturn \"\"\n\t}\n\n\t// Handle the case of a single token type\n\tif len(types) == 1 {\n\t\treturn fmt.Sprintf(\"; expected token of type %q\", types[0])\n\t} else if len(types) == 2 {\n\t\treturn fmt.Sprintf(\"; expected tokens of type %q or %q\", types[0], types[1])\n\t}\n\n\t// Handle the general case\n\tbuf := &bytes.Buffer{}\n\tfmt.Fprintf(buf, \"; expected tokens of type \")\n\tfor i, tokType := range types {\n\t\tswitch i {\n\t\tcase 0:\n\t\t\tfmt.Fprintf(buf, \"%q\", tokType)\n\t\tcase len(types) - 1:\n\t\t\tfmt.Fprintf(buf, \", or %q\", tokType)\n\t\tdefault:\n\t\t\tfmt.Fprintf(buf, \", %q\", tokType)\n\t\t}\n\t}\n\treturn buf.String()\n}", "func expandStringMap(v map[string]interface{}) map[string]string {\n\tm := make(map[string]string)\n\tfor key, val := range v {\n\t\tm[key] = val.(string)\n\t}\n\n\treturn m\n}", "func (t *VMStrTPIntStr) JavaType() string {\n\treturn \"Map<String,Pair<Integer,String>>\"\n}", "func GetTableDelimiter(schema []SchemaField) string {\n\trow := \"+\"\n\tfor _, field := range schema {\n\t\tfor i := 0; i < field.FieldSize+1; i++ {\n\t\t\trow += \"-\"\n\t\t}\n\t\trow += \"+\"\n\t}\n\treturn row\n}", "func (fp *FancyPrinter) SetSeparators(separators ...string) {\n\tfp.separators = separators\n}", "func (t *VMIntTPDblStr) JavaType() string {\n\treturn \"Map<Integer,Pair<Double,String>>\"\n}", "func generateGoTypes(idx *jsonschema.Index) ([]byte, error) {\n\tw := bytes.NewBufferString(\"\\n\")\n\tfor _, k := range sortedMapKeysbyName(idx) {\n\t\tt, err := generateGoType((*idx)[k], idx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif string(t) != \"\" {\n\t\t\tfmt.Fprintf(w, \"%s\\n\", t)\n\t\t}\n\t}\n\n\treturn format.Source(w.Bytes())\n}", "func (d *Descriptor) TypeName() []string {\n\tif d.typename != nil {\n\t\treturn d.typename\n\t}\n\tn := 0\n\tfor parent := d; parent != nil; parent = parent.parent {\n\t\tn++\n\t}\n\ts := make([]string, n, n)\n\tfor parent := d; parent != nil; parent = parent.parent {\n\t\tn--\n\t\ts[n] = parent.GetName()\n\t}\n\td.typename = s\n\treturn s\n}", "func TypeStringWithAliasMap(typ dgo.Type, am dgo.AliasMap) string {\n\ts := strings.Builder{}\n\tnewTypeBuilder(&s, am).buildTypeString(typ, 0)\n\treturn s.String()\n}", "func TypeNameHasPrefix(v string) predicate.Watchlisthistory {\n\treturn predicate.Watchlisthistory(func(s *sql.Selector) {\n\t\ts.Where(sql.HasPrefix(s.C(FieldTypeName), v))\n\t})\n}", "func (o RestApiEndpointConfigurationOutput) Types() pulumi.StringOutput {\n\treturn o.ApplyT(func(v RestApiEndpointConfiguration) string { return v.Types }).(pulumi.StringOutput)\n}", "func (f *tmplFuncs) cleanType(path string) string {\n\tsplit := strings.Split(path, \".\")\n\treturn split[len(split)-1]\n}", "func Benchmark_ReflectTypeMapLookupSplit(b *testing.B) {\n\tvar dump interface{}\n\tvar switcher interface{}\n\tleMap := map[reflect.Type]interface{}{\n\t\treflect.TypeOf(\"\"): \"\",\n\t\treflect.TypeOf(1): \"\",\n\t\treflect.TypeOf(struct{}{}): \"\",\n\t}\n\trt := reflect.TypeOf(switcher)\n\tfor i := 0; i < b.N; i++ {\n\t\tdump = leMap[rt]\n\t}\n\t_ = dump\n}", "func tparamName(exportName string) string {\n\t// Remove the \"path\" from the type param name that makes it unique.\n\tix := strings.LastIndex(exportName, \".\")\n\tif ix < 0 {\n\t\terrorf(\"malformed type parameter export name %s: missing prefix\", exportName)\n\t}\n\tname := exportName[ix+1:]\n\tif strings.HasPrefix(name, blankMarker) {\n\t\treturn \"_\"\n\t}\n\treturn name\n}", "func TparamName(exportName string) string {\n\t// Remove the \"path\" from the type param name that makes it unique.\n\tix := strings.LastIndex(exportName, \".\")\n\tif ix < 0 {\n\t\treturn \"\"\n\t}\n\tname := exportName[ix+1:]\n\tif strings.HasPrefix(name, blankMarker) {\n\t\treturn \"_\"\n\t}\n\treturn name\n}", "func TparamName(exportName string) string {\n\t// Remove the \"path\" from the type param name that makes it unique.\n\tix := strings.LastIndex(exportName, \".\")\n\tif ix < 0 {\n\t\treturn \"\"\n\t}\n\tname := exportName[ix+1:]\n\tif strings.HasPrefix(name, blankMarker) {\n\t\treturn \"_\"\n\t}\n\treturn name\n}", "func SplitType_Values() []string {\n\treturn []string{\n\t\tSplitTypeNone,\n\t\tSplitTypeLine,\n\t\tSplitTypeRecordIo,\n\t\tSplitTypeTfrecord,\n\t}\n}", "func (t *VMIntTPIntStr) JavaType() string {\n\treturn \"Map<Integer,Pair<Integer,String>>\"\n}", "func Types(typs map[string]string) ConfigFunc {\n\treturn func(c *Config) {\n\t\tc.Types = make(map[string]types.DataType, len(typs))\n\t\tfor k, v := range typs {\n\t\t\tc.Types[k] = types.DataType(v)\n\t\t}\n\t}\n}", "func makeProvisionerMap(items []plugin) string {\n\toutput := \"\"\n\tfor _, item := range items {\n\t\toutput += fmt.Sprintf(\"\\t\\\"%s\\\": %s.%s,\\n\", item.PluginName, item.ImportName, item.TypeName)\n\t}\n\treturn output\n}", "func TaskTemplateFieldType_Values() []string {\n\treturn []string{\n\t\tTaskTemplateFieldTypeName,\n\t\tTaskTemplateFieldTypeDescription,\n\t\tTaskTemplateFieldTypeScheduledTime,\n\t\tTaskTemplateFieldTypeQuickConnect,\n\t\tTaskTemplateFieldTypeUrl,\n\t\tTaskTemplateFieldTypeNumber,\n\t\tTaskTemplateFieldTypeText,\n\t\tTaskTemplateFieldTypeTextArea,\n\t\tTaskTemplateFieldTypeDateTime,\n\t\tTaskTemplateFieldTypeBoolean,\n\t\tTaskTemplateFieldTypeSingleSelect,\n\t\tTaskTemplateFieldTypeEmail,\n\t}\n}", "func (e *Encoder) SetSeparators(beg, end string) {\n\te.sepBeg = beg\n\te.sepEnd = end\n}", "func (pf field) WTFType() string {\n\tswitch {\n\tcase pf.isList && pf.isFormField:\n\t\treturn fmt.Sprintf(\"FieldList(FormField(%v))\", pf.Type)\n\tcase pf.isList:\n\t\treturn fmt.Sprintf(\"FieldList(%v('%v', [required()]), %v)\", pf.Type, pf.Name, pf.Validators)\n\tcase pf.isFormField:\n\t\treturn fmt.Sprintf(\"FormField(%v)\", pf.Type)\n\tdefault:\n\t\treturn fmt.Sprintf(\"%v(validators=[%v])\", pf.Type, pf.Validators)\n\t}\n}", "func JSONSchemaType(t string) string {\n\tif m, ok := kindMap[t]; ok {\n\t\treturn m\n\t}\n\treturn t\n}", "func (t *typewriter) Typewrite() string {\n\t// Re-use the cached result if already processed.\n\tif t.cur != -1 {\n\t\treturn t.result\n\t}\n\n\tvar buf bytes.Buffer\n\n\tfor {\n\t\tsep, str := t.scanMorpheme()\n\t\tif str == \"\" {\n\t\t\tbreak\n\t\t}\n\n\t\tbuf.WriteString(sep)\n\t\tbuf.WriteString(str)\n\t}\n\n\tt.result = buf.String()\n\treturn t.result\n}", "func (m *RecurrencePattern) SetType(value *RecurrencePatternType)() {\n m.type_escaped = value\n}", "func TypeStrings() []string {\n\tstrs := make([]string, len(_TypeNames))\n\tcopy(strs, _TypeNames)\n\treturn strs\n}", "func (self *Template) Expand(value interface{}) (string, error) {\n\tvalues, ismap := value.(Values)\n\tif !ismap {\n\t\tif m, ismap := struct2map(value); !ismap {\n\t\t\treturn \"\", errors.New(\"expected Values, struct, or pointer to struct\")\n\t\t} else {\n\t\t\treturn self.Expand(m)\n\t\t}\n\t}\n\tvar buf bytes.Buffer\n\tfor _, p := range self.parts {\n\t\terr := p.expand(&buf, values)\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t}\n\treturn buf.String(), nil\n}", "func (t *VMStrTPDblStr) JavaType() string {\n\treturn \"Map<String,Pair<Double,String>>\"\n}", "func (t *VLStrStr) JavaType() string {\n\treturn \"List<Pair<String,String>>\"\n}", "func (t DataType) TypeName() string { return typeNames[t] }", "func appendTypeToName(n string, pType gaia.PipelineType) string {\n\treturn fmt.Sprintf(\"%s%s%s\", n, typeDelimiter, pType.String())\n}", "func (t *VMIntTPStrDbl) JavaType() string {\n\treturn \"Map<Integer,Pair<String,Double>>\"\n}", "func AddressedTypes(providerAddrs []addrs.ProviderConfig) []string {\n\tif len(providerAddrs) == 0 {\n\t\treturn nil\n\t}\n\tm := map[string]struct{}{}\n\tfor _, addr := range providerAddrs {\n\t\tm[addr.Type] = struct{}{}\n\t}\n\n\tnames := make([]string, 0, len(m))\n\tfor typeName := range m {\n\t\tnames = append(names, typeName)\n\t}\n\n\tsort.Strings(names) // Stable result for tests\n\treturn names\n}", "func (m *RecurrenceRange) SetTypeEscaped(value *RecurrenceRangeType)() {\n err := m.GetBackingStore().Set(\"typeEscaped\", value)\n if err != nil {\n panic(err)\n }\n}", "func templateHelpers(fs *token.FileSet) template.FuncMap {\n\treturn template.FuncMap{\n\t\t\"ast\": func(n ast.Node) string {\n\t\t\treturn nodeToString(fs, n)\n\t\t},\n\t\t\"join\": strings.Join,\n\t\t\"params\": func(f *Func) []string {\n\t\t\treturn f.Params(fs)\n\t\t},\n\t\t\"fields\": func(f *Func) []string {\n\t\t\treturn f.Fields(fs)\n\t\t},\n\t\t\"results\": func(f *Func) []string {\n\t\t\treturn f.Results(fs)\n\t\t},\n\t\t\"receiver\": func(f *Func) string {\n\t\t\tif f.ReceiverType() == nil {\n\t\t\t\treturn \"\"\n\t\t\t}\n\n\t\t\treturn strings.Replace(nodeToString(fs, f.ReceiverType()), \"*\", \"\", -1) + \".\"\n\t\t},\n\t\t\"want\": func(s string) string { return strings.Replace(s, \"got\", \"want\", 1) },\n\t}\n}", "func (gen *jsGenerator) fullTypeName(t *idl.Type) string {\n\tvar s string\n\tms, ok := jsTypes[t.Name]\n\tif !ok {\n\t\tms = t.Name\n\t}\n\tif t.Name == \"list\" {\n\t\ts = fmt.Sprintf(ms, gen.fullTypeName(t.ValueType))\n\t} else if t.Name == \"map\" {\n\t\ts = fmt.Sprintf(ms, jsTypes[t.KeyType.Name], gen.fullTypeName(t.ValueType))\n\t} else {\n\t\tns := gen.tplRootIdl.NamespaceOf(ms, \"js\")\n\t\tif ns != \"\" {\n\t\t\ts = fmt.Sprintf(\"%s.%s\", ns, ms)\n\t\t} else {\n\t\t\ts = ms\n\t\t}\n\t}\n\treturn s\n}", "func (lx *Lexer) separator() Token {\n\tr, _ := lx.nextChar()\n\tlx.token.writeRune(r)\n\tlx.token.Type = separatorMap[r]\n\treturn lx.returnAndReset()\n}", "func (o DomainNameEndpointConfigurationPtrOutput) Types() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *DomainNameEndpointConfiguration) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn &v.Types\n\t}).(pulumi.StringPtrOutput)\n}", "func quotedItems(items ...interface{}) []string {\n\tstrItems := stringSlice(items...)\n\tquoted := []string{}\n\tfor _, str := range strItems {\n\t\tif str[0:1] != \"\\\"\" {\n\t\t\tstr = \"\\\"\" + str + \"\\\"\"\n\t\t}\n\t\tquoted = append(quoted, str)\n\t}\n\treturn quoted\n}", "func generateTomlKeysMap(structPtr reflect.Value, config map[string]interface{}) (map[string]interface{}, error) {\n\tstrct := structPtr.Elem()\n\ttomlMap := map[string]interface{}{}\n\tpType := strct.Type()\n\n\tfor configKey, configValue := range config {\n\t\tfield, found := pType.FieldByName(configKey)\n\n\t\tif !found {\n\t\t\treturn map[string]interface{}{}, fmt.Errorf(\"field %s did not exist on plugin\", configKey)\n\t\t}\n\n\t\ttomlTag := field.Tag.Get(\"toml\")\n\t\tif tomlTag == \"\" {\n\t\t\ttomlTag = configKey\n\t\t}\n\n\t\ttomlMap[tomlTag] = configValue\n\t}\n\n\treturn tomlMap, nil\n\n}", "func (sl *Slice) TypeAndNames() kit.TypeAndNameList {\n\tif len(*sl) == 0 {\n\t\treturn nil\n\t}\n\ttn := make(kit.TypeAndNameList, len(*sl))\n\tfor _, kid := range *sl {\n\t\ttn.Add(kid.Type(), kid.Name())\n\t}\n\treturn tn\n}", "func (t *VLIntStr) JavaType() string {\n\treturn \"List<Pair<Integer,String>>\"\n}", "func (t *VMStrTPStrInt) JavaType() string {\n\treturn \"Map<String,Pair<String,Integer>>\"\n}", "func (t *VMStrStr) JavaType() string {\n\treturn \"Map<String,String>\"\n}", "func (t *VMIntTPStrInt) JavaType() string {\n\treturn \"Map<Integer,Pair<String,Integer>>\"\n}", "func templateStringList(c map[string]string, l ...string) ([]string, error) {\n\tif len(l) == 0 {\n\t\treturn l, nil\n\t}\n\n\tvar ret = make([]string, 0, len(l))\n\n\tfor _, item := range l {\n\t\tbuf := &bytes.Buffer{}\n\t\ttmpl, err := template.New(\"\").Parse(item)\n\t\tif err != nil {\n\t\t\treturn ret, err\n\t\t}\n\t\terr = tmpl.Execute(buf, c)\n\t\tif err != nil {\n\t\t\treturn ret, err\n\t\t}\n\t\tret = append(ret, buf.String())\n\t}\n\treturn ret, nil\n}", "func transpileSmartFields(elts []*etree.Element) {\n\n}", "func (o GetDomainNameEndpointConfigurationOutput) Types() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v GetDomainNameEndpointConfiguration) []string { return v.Types }).(pulumi.StringArrayOutput)\n}", "func TypedHandlers(handlers ...TypedHandler) TypedHandler {\n\tres := TypedHandler{}\n\tfor _, typed := range handlers {\n\t\tfor commandType, handler := range typed {\n\t\t\tres[commandType] = handler\n\t\t}\n\t}\n\treturn res\n}", "func typeListHandler(w http.ResponseWriter, r *http.Request) {\n\n\tresponder(w, r, data.Types)\n\n}", "func (t *VTStrStr) JavaType() string {\n\treturn \"Pair<String,String>\"\n}", "func (k *Key) Times(delim string) []time.Time {\n\treturn k.TimesFormat(time.RFC3339, delim)\n}", "func ConsoleFieldKVSep(s string) OptionFormatter {\n\treturn func(f *ConsoleFormatter) {\n\t\tif s != \"\" {\n\t\t\tf.ConsoleFieldKVSep = s\n\t\t}\n\t}\n}", "func (t *VMStrTPStrDbl) JavaType() string {\n\treturn \"Map<String,Pair<String,Double>>\"\n}", "func (sl *Slice) TypeAndUniqueNames() kit.TypeAndNameList {\n\tif len(*sl) == 0 {\n\t\treturn nil\n\t}\n\ttn := make(kit.TypeAndNameList, len(*sl))\n\tfor _, kid := range *sl {\n\t\ttn.Add(kid.Type(), kid.UniqueName())\n\t}\n\treturn tn\n}", "func tparamsMap(tparams *ast.FieldList) map[string]bool {\n\tif tparams == nil || len(tparams.List) == 0 {\n\t\treturn nil\n\t}\n\tm := make(map[string]bool)\n\tfor _, f := range tparams.List {\n\t\tfor _, name := range f.Names {\n\t\t\tif name.Name != \"_\" {\n\t\t\t\tm[name.Name] = true\n\t\t\t}\n\t\t}\n\t}\n\treturn m\n}", "func (v *TypePair) String() string {\n\tif v == nil {\n\t\treturn \"<nil>\"\n\t}\n\n\tvar fields [2]string\n\ti := 0\n\tfields[i] = fmt.Sprintf(\"Left: %v\", v.Left)\n\ti++\n\tfields[i] = fmt.Sprintf(\"Right: %v\", v.Right)\n\ti++\n\n\treturn fmt.Sprintf(\"TypePair{%v}\", strings.Join(fields[:i], \", \"))\n}", "func Type_Values() []string {\n\treturn []string{\n\t\tTypeDash,\n\t\tTypeHls,\n\t}\n}", "func buildSelectFields(selects []interface{}) string {\n\tif len(selects) == 0 {\n\t\treturn \"*\"\n\t}\n\tfields := \"\"\n\tfor _, f := range selects {\n\t\tif v, ok := f.(Raw); ok {\n\t\t\tfields += v.String() + \", \"\n\t\t} else if v, ok := f.(alias); ok {\n\t\t\tfields += v.String() + \", \"\n\t\t} else if v, ok := f.(string); ok {\n\t\t\tfields += quote(v) + \", \"\n\t\t}\n\t}\n\treturn strings.TrimRight(fields, \", \")\n}", "func (c *Car) GetSeparatorTemplate() string {\n return os.Getenv(\"BULLETTRAIN_CAR_VIRTUALENV_SEPARATOR_TEMPLATE\")\n}", "func (sym *symtab) typeIdName(t types.Type) string {\n\tidn := strings.Replace(sym.typeGoName(t), \".\", \"_\", -1)\n\tif _, isary := t.(*types.Array); isary {\n\t\tidn = strings.Replace(idn, \"[\", \"Array_\", 1)\n\t\tidn = strings.Replace(idn, \"]\", \"_\", 1)\n\t}\n\tidn = strings.Replace(idn, \"[]\", \"Slice_\", -1)\n\tidn = strings.Replace(idn, \"map[\", \"Map_\", -1)\n\tidn = strings.Replace(idn, \"[\", \"_\", -1)\n\tidn = strings.Replace(idn, \"]\", \"_\", -1)\n\tidn = strings.Replace(idn, \"{}\", \"_\", -1)\n\tidn = strings.Replace(idn, \"*\", \"Ptr_\", -1)\n\treturn idn\n}", "func (StringMapOutput) ElementType() reflect.Type {\n\treturn stringMapType\n}", "func ConfigTypeHasPrefix(v string) predicate.Order {\n\treturn predicate.Order(func(s *sql.Selector) {\n\t\ts.Where(sql.HasPrefix(s.C(FieldConfigType), v))\n\t})\n}", "func (m *WorkbookNamedItem) SetTypeEscaped(value *string)() {\n err := m.GetBackingStore().Set(\"typeEscaped\", value)\n if err != nil {\n panic(err)\n }\n}", "func ComposeCustomMappingKey(method string, path string) string {\n\treturn method + KeySeperator + path\n}", "func (t *VTIntStr) JavaType() string {\n\treturn \"Pair<Integer,String>\"\n}", "func TypeHasPrefix(v string) predicate.Blob {\n\treturn predicate.Blob(\n\t\tfunc(s *sql.Selector) {\n\t\t\ts.Where(sql.HasPrefix(s.C(FieldType), v))\n\t\t},\n\t)\n}", "func ToOptionsKey(entryName, rpcType string) string {\n\treturn strings.Join([]string{entryName, rpcType}, \"-\")\n}", "func CustomSeparator(separator string) TableOption {\n\treturn func(opts *options) {\n\t\topts.separator = separator\n\t}\n}", "func Separator() iup.Ihandle {\n\t//Ihandle* IupGLSeparator(void);\n\treturn mkih(C.IupGLSeparator())\n}" ]
[ "0.49833834", "0.49633592", "0.46772054", "0.4589316", "0.4566254", "0.45600334", "0.45434427", "0.45412755", "0.4482598", "0.44440588", "0.44426832", "0.44411588", "0.44343516", "0.4406544", "0.4406173", "0.44015318", "0.43420374", "0.4330905", "0.43104362", "0.43059155", "0.42691162", "0.42563102", "0.42479447", "0.4230986", "0.422685", "0.42260692", "0.42244515", "0.42165223", "0.42063677", "0.41976923", "0.41867304", "0.4175167", "0.41738808", "0.41656026", "0.41654506", "0.41556552", "0.4151691", "0.41486496", "0.41461957", "0.413446", "0.41338718", "0.4130602", "0.41271934", "0.4125654", "0.4125654", "0.41188583", "0.41161114", "0.4115267", "0.41064525", "0.41030788", "0.40961894", "0.40934768", "0.40924776", "0.4089931", "0.4089552", "0.40870088", "0.40711176", "0.40645617", "0.40593955", "0.40568012", "0.40491495", "0.4039574", "0.40240443", "0.40150657", "0.40150526", "0.4004847", "0.40027982", "0.39947778", "0.39944926", "0.3991509", "0.39909872", "0.39854917", "0.3979356", "0.39750314", "0.39740086", "0.39696", "0.39652917", "0.39574212", "0.39486322", "0.39481986", "0.39473602", "0.39448112", "0.39444953", "0.39393276", "0.39349893", "0.39327386", "0.39321813", "0.39308414", "0.3930695", "0.3926657", "0.3920019", "0.39142412", "0.3911369", "0.3908854", "0.3902072", "0.3899329", "0.38969377", "0.38906047", "0.3884125", "0.3878104" ]
0.7430346
0
isEtcdConfigFile returns whether the given path looks like a configuration file, and in that case it returns the corresponding hash to detect modifications.
func isEtcdConfigFile(path string) (bool, fhash) { if info, err := os.Stat(path); err != nil || info.IsDir() { return false, fhash{} } b, err := os.ReadFile(path) if err != nil { return false, fhash{} } // search for the "endpoints:" string if strings.Contains(string(b), "endpoints:") { return true, sha256.Sum256(b) } return false, fhash{} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func ParseConfigFile(path string) (*Config, error) {\n\t// slurp\n\tvar buf bytes.Buffer\n\tpath, err := filepath.Abs(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tf, err := os.Open(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer f.Close()\n\tif _, err := io.Copy(&buf, f); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// parse\n\tc := &Config{\n\t\tClient: &ClientConfig{\n\t\t\tServerJoin: &ServerJoin{},\n\t\t\tTemplateConfig: &client.ClientTemplateConfig{\n\t\t\t\tWait: &client.WaitConfig{},\n\t\t\t\tWaitBounds: &client.WaitConfig{},\n\t\t\t\tConsulRetry: &client.RetryConfig{},\n\t\t\t\tVaultRetry: &client.RetryConfig{},\n\t\t\t\tNomadRetry: &client.RetryConfig{},\n\t\t\t},\n\t\t},\n\t\tServer: &ServerConfig{\n\t\t\tPlanRejectionTracker: &PlanRejectionTracker{},\n\t\t\tServerJoin: &ServerJoin{},\n\t\t},\n\t\tACL: &ACLConfig{},\n\t\tAudit: &config.AuditConfig{},\n\t\tConsul: &config.ConsulConfig{},\n\t\tAutopilot: &config.AutopilotConfig{},\n\t\tTelemetry: &Telemetry{},\n\t\tVault: &config.VaultConfig{},\n\t}\n\n\terr = hcl.Decode(c, buf.String())\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to decode HCL file %s: %w\", path, err)\n\t}\n\n\t// convert strings to time.Durations\n\ttds := []durationConversionMap{\n\t\t{\"gc_interval\", &c.Client.GCInterval, &c.Client.GCIntervalHCL, nil},\n\t\t{\"acl.token_ttl\", &c.ACL.TokenTTL, &c.ACL.TokenTTLHCL, nil},\n\t\t{\"acl.policy_ttl\", &c.ACL.PolicyTTL, &c.ACL.PolicyTTLHCL, nil},\n\t\t{\"acl.token_min_expiration_ttl\", &c.ACL.TokenMinExpirationTTL, &c.ACL.TokenMinExpirationTTLHCL, nil},\n\t\t{\"acl.token_max_expiration_ttl\", &c.ACL.TokenMaxExpirationTTL, &c.ACL.TokenMaxExpirationTTLHCL, nil},\n\t\t{\"client.server_join.retry_interval\", &c.Client.ServerJoin.RetryInterval, &c.Client.ServerJoin.RetryIntervalHCL, nil},\n\t\t{\"server.heartbeat_grace\", &c.Server.HeartbeatGrace, &c.Server.HeartbeatGraceHCL, nil},\n\t\t{\"server.min_heartbeat_ttl\", &c.Server.MinHeartbeatTTL, &c.Server.MinHeartbeatTTLHCL, nil},\n\t\t{\"server.failover_heartbeat_ttl\", &c.Server.FailoverHeartbeatTTL, &c.Server.FailoverHeartbeatTTLHCL, nil},\n\t\t{\"server.plan_rejection_tracker.node_window\", &c.Server.PlanRejectionTracker.NodeWindow, &c.Server.PlanRejectionTracker.NodeWindowHCL, nil},\n\t\t{\"server.retry_interval\", &c.Server.RetryInterval, &c.Server.RetryIntervalHCL, nil},\n\t\t{\"server.server_join.retry_interval\", &c.Server.ServerJoin.RetryInterval, &c.Server.ServerJoin.RetryIntervalHCL, nil},\n\t\t{\"consul.timeout\", &c.Consul.Timeout, &c.Consul.TimeoutHCL, nil},\n\t\t{\"autopilot.server_stabilization_time\", &c.Autopilot.ServerStabilizationTime, &c.Autopilot.ServerStabilizationTimeHCL, nil},\n\t\t{\"autopilot.last_contact_threshold\", &c.Autopilot.LastContactThreshold, &c.Autopilot.LastContactThresholdHCL, nil},\n\t\t{\"telemetry.collection_interval\", &c.Telemetry.collectionInterval, &c.Telemetry.CollectionInterval, nil},\n\t\t{\"client.template.block_query_wait\", nil, &c.Client.TemplateConfig.BlockQueryWaitTimeHCL,\n\t\t\tfunc(d *time.Duration) {\n\t\t\t\tc.Client.TemplateConfig.BlockQueryWaitTime = d\n\t\t\t},\n\t\t},\n\t\t{\"client.template.max_stale\", nil, &c.Client.TemplateConfig.MaxStaleHCL,\n\t\t\tfunc(d *time.Duration) {\n\t\t\t\tc.Client.TemplateConfig.MaxStale = d\n\t\t\t}},\n\t\t{\"client.template.wait.min\", nil, &c.Client.TemplateConfig.Wait.MinHCL,\n\t\t\tfunc(d *time.Duration) {\n\t\t\t\tc.Client.TemplateConfig.Wait.Min = d\n\t\t\t},\n\t\t},\n\t\t{\"client.template.wait.max\", nil, &c.Client.TemplateConfig.Wait.MaxHCL,\n\t\t\tfunc(d *time.Duration) {\n\t\t\t\tc.Client.TemplateConfig.Wait.Max = d\n\t\t\t},\n\t\t},\n\t\t{\"client.template.wait_bounds.min\", nil, &c.Client.TemplateConfig.WaitBounds.MinHCL,\n\t\t\tfunc(d *time.Duration) {\n\t\t\t\tc.Client.TemplateConfig.WaitBounds.Min = d\n\t\t\t},\n\t\t},\n\t\t{\"client.template.wait_bounds.max\", nil, &c.Client.TemplateConfig.WaitBounds.MaxHCL,\n\t\t\tfunc(d *time.Duration) {\n\t\t\t\tc.Client.TemplateConfig.WaitBounds.Max = d\n\t\t\t},\n\t\t},\n\t\t{\"client.template.consul_retry.backoff\", nil, &c.Client.TemplateConfig.ConsulRetry.BackoffHCL,\n\t\t\tfunc(d *time.Duration) {\n\t\t\t\tc.Client.TemplateConfig.ConsulRetry.Backoff = d\n\t\t\t},\n\t\t},\n\t\t{\"client.template.consul_retry.max_backoff\", nil, &c.Client.TemplateConfig.ConsulRetry.MaxBackoffHCL,\n\t\t\tfunc(d *time.Duration) {\n\t\t\t\tc.Client.TemplateConfig.ConsulRetry.MaxBackoff = d\n\t\t\t},\n\t\t},\n\t\t{\"client.template.vault_retry.backoff\", nil, &c.Client.TemplateConfig.VaultRetry.BackoffHCL,\n\t\t\tfunc(d *time.Duration) {\n\t\t\t\tc.Client.TemplateConfig.VaultRetry.Backoff = d\n\t\t\t},\n\t\t},\n\t\t{\"client.template.vault_retry.max_backoff\", nil, &c.Client.TemplateConfig.VaultRetry.MaxBackoffHCL,\n\t\t\tfunc(d *time.Duration) {\n\t\t\t\tc.Client.TemplateConfig.VaultRetry.MaxBackoff = d\n\t\t\t},\n\t\t},\n\t\t{\"client.template.nomad_retry.backoff\", nil, &c.Client.TemplateConfig.NomadRetry.BackoffHCL,\n\t\t\tfunc(d *time.Duration) {\n\t\t\t\tc.Client.TemplateConfig.NomadRetry.Backoff = d\n\t\t\t},\n\t\t},\n\t\t{\"client.template.nomad_retry.max_backoff\", nil, &c.Client.TemplateConfig.NomadRetry.MaxBackoffHCL,\n\t\t\tfunc(d *time.Duration) {\n\t\t\t\tc.Client.TemplateConfig.NomadRetry.MaxBackoff = d\n\t\t\t},\n\t\t},\n\t}\n\n\t// Add enterprise audit sinks for time.Duration parsing\n\tfor i, sink := range c.Audit.Sinks {\n\t\ttds = append(tds, durationConversionMap{\n\t\t\tfmt.Sprintf(\"audit.sink.%d\", i), &sink.RotateDuration, &sink.RotateDurationHCL, nil})\n\t}\n\n\t// convert strings to time.Durations\n\terr = convertDurations(tds)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// report unexpected keys\n\terr = extraKeys(c)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Set client template config or its members to nil if not set.\n\tfinalizeClientTemplateConfig(c)\n\n\treturn c, nil\n}", "func (p *Patch) ConfigChanged(remotePath string) bool {\n\tfor _, patchPart := range p.Patches {\n\t\tif patchPart.ModuleName == \"\" {\n\t\t\tfor _, summary := range patchPart.PatchSet.Summary {\n\t\t\t\tif summary.Name == remotePath {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn false\n\t\t}\n\t}\n\treturn false\n}", "func (p *Patch) ConfigChanged(remotePath string) bool {\n\tfor _, patchPart := range p.Patches {\n\t\tif patchPart.ModuleName == \"\" {\n\t\t\tfor _, summary := range patchPart.PatchSet.Summary {\n\t\t\t\tif summary.Name == remotePath {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn false\n\t\t}\n\t}\n\treturn false\n}", "func (config Config) HasLoadedConfigurationFileBeenModified() bool {\n\tif fileInfo, err := os.Stat(config.filePath); err == nil {\n\t\tif !fileInfo.ModTime().IsZero() {\n\t\t\treturn config.lastFileModTime.Unix() != fileInfo.ModTime().Unix()\n\t\t}\n\t}\n\treturn false\n}", "func isConfigState(e *yang.Entry) bool {\n\treturn e.IsDir() && (e.Name == \"config\" || e.Name == \"state\")\n}", "func kubeconfigExistsAndIsFile(filePath string) (bool, error) {\n\tinfo, err := os.Stat(filePath)\n\t// if we couldn't open the file, error out\n\tif err != nil {\n\t\treturn false, err\n\t}\n\t// if we have a directory instead of a file, error out\n\tif info.IsDir() {\n\t\treturn false, errors.New(\"The provided path was a directory. Expected a file.\")\n\t}\n\treturn true, err\n}", "func getConfigFile() ([]byte, error) {\n\tif len(configFile) != 0 {\n\t\treturn configFile, nil\n\t}\n\tenvConf := os.Getenv(\"IGOR_CONFIG\")\n\tif envConf != \"\" {\n\t\treturn []byte(envConf), nil\n\t}\n\tfilename, _ := filepath.Abs(\"./config.json\")\n\tif _, err := os.Stat(filename); err != nil {\n\t\tjsonConfig = false\n\t\tfilename, _ = filepath.Abs(\"./config.yml\")\n\t}\n\tconfigFile, err := ioutil.ReadFile(filename)\n\n\tif err != nil {\n\t\treturn configFile, err\n\t}\n\treturn configFile, nil\n}", "func getConfigFile() string {\n\tif name := os.Getenv(configFileEnvVar); name != \"\" {\n\t\treturn name\n\t}\n\thome, err := homedir.Dir()\n\tif err != nil {\n\t\treturn \"\"\n\t}\n\tname := filepath.Join(home, \".jot\", \"config.toml\")\n\tif checkFile(name) {\n\t\treturn name\n\t}\n\n\treturn \"\"\n}", "func (flogs *fileLogs) Equal(config dvid.StoreConfig) bool {\n\tpath, _, err := parseConfig(config)\n\tif err != nil {\n\t\treturn false\n\t}\n\treturn path == flogs.path\n}", "func isMcConfigExists() bool {\n\tconfigFile, err := getMcConfigPath()\n\tif err != nil {\n\t\treturn false\n\t}\n\t_, err = os.Stat(configFile)\n\tif err != nil {\n\t\treturn false\n\t}\n\treturn true\n}", "func path() string {\n\tif len(configPath) != 0 {\n\t\treturn configPath\n\t}\n\treturn \"config/database.yml\"\n}", "func configExistsInPath(path string) bool {\n\t// Needed for testing\n\tif config != nil {\n\t\treturn true\n\t}\n\n\t// Check devspace.yaml\n\t_, err := os.Stat(filepath.Join(path, constants.DefaultConfigPath))\n\tif err == nil {\n\t\treturn true\n\t}\n\n\t// Check devspace-configs.yaml\n\t_, err = os.Stat(filepath.Join(path, constants.DefaultConfigsPath))\n\tif err == nil {\n\t\treturn true\n\t}\n\n\treturn false // Normal config file found\n}", "func readConfigFile(path string) (config *Config, err error) {\n\tvar file *os.File\n\tfile, err = os.Open(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer file.Close()\n\n\tconfig = new(Config)\n\terr = json.NewDecoder(file).Decode(config)\n\treturn config, err\n}", "func (a *App) SaveEtcdConfig(cfg *config.Etcd) error {\n\treturn saveEtcdConfig(cfg)\n}", "func TestConfigIsJSON(t *testing.T) {\n\thomedir := os.Getenv(\"HOME\")\n\tconfigpath := (homedir + \"/.stalker.json\")\n\tfile, _ := os.Open(configpath)\n\n\tdecoder := json.NewDecoder(file)\n\tconfiguration := Configuration{}\n\terr := decoder.Decode(&configuration)\n\n\tif err != nil {\n\t\tt.Error(err)\n\t} else {\n\t\tt.Log(\"config is valid JSON\")\n\t}\n}", "func updateConfigFile(context *cli.Context) {\n\tconfig, configFilename, err := lib.GetConfig(context)\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\tif configFilename == \"\" {\n\t\tfmt.Println(\"Could not find a config file to update\")\n\t\treturn\n\t}\n\n\t// Same config in []byte format.\n\tconfigRaw, err := ioutil.ReadFile(configFilename)\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\n\t// Same config in map format so that we can detect missing keys.\n\tvar configMap map[string]interface{}\n\tif err = json.Unmarshal(configRaw, &configMap); err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\n\tdirty := updateConfig(config, configMap)\n\n\tif dirty {\n\t\tconfig.ToFile(context)\n\t\tfmt.Printf(\"Wrote %s\\n\", configFilename)\n\t} else {\n\t\tfmt.Println(\"Nothing to update\")\n\t}\n}", "func (ec *EtcdConfig) Validate() error {\n\tif ec.Filepath != \"\" {\n\t\tec.clientNewType = \"file\"\n\t\treturn nil\n\t}\n\n\t// All tls related properties should be empty or all should be defined\n\ttlsPresent := ec.Cert != \"\" || ec.Key != \"\" || ec.CaCert != \"\"\n\ttlsMissing := ec.Cert == \"\" || ec.Key == \"\" || ec.CaCert == \"\"\n\tif tlsPresent {\n\t\tif tlsMissing {\n\t\t\tlog.WithFields(etcdLogFields).WithFields(log.Fields{\n\t\t\t\t\"error\": ErrIncompleteTLSConfig,\n\t\t\t\t\"cert\": ec.Cert,\n\t\t\t\t\"key\": ec.Key,\n\t\t\t\t\"caCert\": ec.CaCert,\n\t\t\t}).Error(ErrIncompleteTLSConfig)\n\t\t\treturn ErrIncompleteTLSConfig\n\t\t}\n\t\tec.clientNewType = \"tls\"\n\t}\n\treturn nil\n}", "func (a *analyzer) tocContainsPath(path string) bool {\n\tb, err := os.ReadFile(filepath.Join(a.checkoutDir, tocPath))\n\tif err != nil {\n\t\treturn false\n\t}\n\n\tvar toc toc\n\tif err := yaml.Unmarshal(b, &toc); err != nil {\n\t\treturn false\n\t}\n\n\tfor _, entry := range flattenTocEntries(toc.Entries) {\n\t\tif entry.Path == filepath.Join(\"/\", path) {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func (w *Writer) configPath(configDigest digest.Digest) string {\n\treturn configDigest.Hex() + \".json\"\n}", "func VerifyMasterFileString(path string) error {\n\tmatched, err := regexp.MatchString(\"^[0-9a-z.]+:[0-9]+$\", path)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif matched {\n\t\treturn fmt.Errorf(\"redis_server config option must point to a file: %s\", path)\n\t}\n\treturn nil\n}", "func setConfigFile(etcdConf *embetcd.Config, max int, min int, check int) (configFilePath string, tempDir string) {\n\t// get a temporary directory for the etcd data directory\n\ttempDir, err := ioutil.TempDir(\"\", \"TestProxyCluster\")\n\tSo(err, ShouldBeNil)\n\tSo(os.RemoveAll(tempDir), ShouldBeNil)\n\n\t// get a temporary filename for the config file\n\tfileObj, err := ioutil.TempFile(\"\", \"TestProxyClusterConfig\")\n\tSo(err, ShouldBeNil)\n\tconfigFilePath = fileObj.Name()\n\t// remove the temp file so we can overwrite it\n\tSo(os.Remove(configFilePath), ShouldBeNil)\n\n\t//// remove the temp dir so we can recreate it\n\n\tproxyConf := configEtcd\n\tproxyConf = strings.Replace(proxyConf, \"<<MAX>>\", strconv.FormatInt(int64(max), 10), -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<MIN>>\", strconv.FormatInt(int64(min), 10), -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<CHECK>>\", strconv.FormatInt(int64(check), 10), -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<LPADDRESS>>\", etcdConf.LPUrls[0].String(), -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<APADDRESS>>\", etcdConf.APUrls[0].String(), -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<LCADDRESS>>\", etcdConf.LCUrls[0].String(), -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<ACADDRESS>>\", etcdConf.ACUrls[0].String(), -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<MADDRESS>>\", etcdConf.ListenMetricsUrls[0].String(), -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<UNHEALTHYTTL>>\", etcdConf.UnhealthyTTL.String(), -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<REMOVEMEMBERTIMEOUT>>\", etcdConf.RemoveMemberTimeout.String(), -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<DATADIR>>\", filepath.Join(tempDir, etcdConf.Dir), -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<CLUSTEROP>>\", etcdConf.ClusterState, -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<TARGETADDRESSES>>\", formatTargetAddresses(etcdConf.InitialCluster), -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<SERVERNAME>>\", etcdConf.Name, -1)\n\tproxyConf = strings.Replace(proxyConf, \"<<CLUSTERNAME>>\", etcdConf.ClusterName, -1)\n\n\tSo(ioutil.WriteFile(path.Join(configFilePath), []byte(proxyConf), os.FileMode(0666)), ShouldBeNil)\n\treturn configFilePath, tempDir\n}", "func ConfigReadFile(path string) (string, []byte, error) {\n\tif strings.HasPrefix(path, \"file://\") {\n\t\tpath = path[7:]\n\t}\n\n\tpos := strings.LastIndexByte(path, '.')\n\tif pos == -1 {\n\t\tpos += len(path)\n\t}\n\n\tdata, err := ioutil.ReadFile(path)\n\tlast := strings.LastIndex(path, \".\") + 1\n\tif last == 0 {\n\t\treturn \"\", nil, fmt.Errorf(\"read file config, type is null\")\n\t}\n\treturn path[pos+1:], data, err\n}", "func TestConfigExists(t *testing.T) {\n\thomedir := os.Getenv(\"HOME\")\n\tconfigpath := (homedir + \"/.stalker.json\")\n\n\tif _, err := os.Stat(configpath); os.IsNotExist(err) {\n\t\tt.Error(configpath + \" not found!\")\n\t} else {\n\t\tt.Log(\"found config \" + configpath)\n\t}\n}", "func ProbeEtcd(endpoint string) (string, bool, error) {\n\tu, err := url.Parse(endpoint + \"/version\")\n\tif err != nil {\n\t\treturn \"\", false, fmt.Errorf(\"Can't parse endpoint %s: %s\", endpoint, err)\n\t}\n\tif u.Scheme == \"https\" { // secure etcd\n\t\tclientcert, clientkey, err := util.ClientCertAndKeyFromEnv()\n\t\tif err != nil {\n\t\t\treturn \"\", false, err\n\t\t}\n\t\tversion, verr := getVersionSecure(u.String(), clientcert, clientkey)\n\t\tif verr != nil {\n\t\t\treturn \"\", false, verr\n\t\t}\n\t\treturn version, true, nil\n\t}\n\tversion, verr := getVersion(u.String())\n\tif verr != nil {\n\t\treturn \"\", false, verr\n\t}\n\treturn version, false, nil\n}", "func TestConfigFromFile(t *testing.T) {\n\tcluster := \"TestCluster\"\n\tdockerAuthType := \"dockercfg\"\n\tdockerAuth := `{\n \"https://index.docker.io/v1/\":{\n \"auth\":\"admin\",\n \"email\":\"email\"\n }\n}`\n\ttestPauseImageName := \"pause-image-name\"\n\ttestPauseTag := \"pause-image-tag\"\n\tcontent := fmt.Sprintf(`{\n \"AWSRegion\": \"not-real-1\",\n \"Cluster\": \"%s\",\n \"EngineAuthType\": \"%s\",\n \"EngineAuthData\": %s,\n \"DataDir\": \"/var/run/ecs_agent\",\n \"TaskIAMRoleEnabled\": true,\n \"TaskCPUMemLimit\": true,\n \"InstanceAttributes\": {\n \"attribute1\": \"value1\"\n },\n \"ContainerInstanceTags\": {\n \"tag1\": \"value1\"\n },\n \"PauseContainerImageName\":\"%s\",\n \"PauseContainerTag\":\"%s\",\n \"AWSVPCAdditionalLocalRoutes\":[\"169.254.172.1/32\"]\n}`, cluster, dockerAuthType, dockerAuth, testPauseImageName, testPauseTag)\n\n\tfilePath := setupFileConfiguration(t, content)\n\tdefer os.Remove(filePath)\n\n\tdefer setTestEnv(\"ECS_AGENT_CONFIG_FILE_PATH\", filePath)()\n\tdefer setTestEnv(\"AWS_DEFAULT_REGION\", \"us-west-2\")()\n\n\tcfg, err := fileConfig()\n\tassert.NoError(t, err, \"reading configuration from file failed\")\n\n\tassert.Equal(t, cluster, cfg.Cluster, \"cluster name not as expected from file\")\n\tassert.Equal(t, dockerAuthType, cfg.EngineAuthType, \"docker auth type not as expected from file\")\n\tassert.Equal(t, dockerAuth, string(cfg.EngineAuthData.Contents()), \"docker auth data not as expected from file\")\n\tassert.Equal(t, map[string]string{\"attribute1\": \"value1\"}, cfg.InstanceAttributes)\n\tassert.Equal(t, map[string]string{\"tag1\": \"value1\"}, cfg.ContainerInstanceTags)\n\tassert.Equal(t, testPauseImageName, cfg.PauseContainerImageName, \"should read PauseContainerImageName\")\n\tassert.Equal(t, testPauseTag, cfg.PauseContainerTag, \"should read PauseContainerTag\")\n\tassert.Equal(t, 1, len(cfg.AWSVPCAdditionalLocalRoutes), \"should have one additional local route\")\n\texpectedLocalRoute, err := cniTypes.ParseCIDR(\"169.254.172.1/32\")\n\tassert.NoError(t, err)\n\tassert.Equal(t, expectedLocalRoute.IP, cfg.AWSVPCAdditionalLocalRoutes[0].IP, \"should match expected route IP\")\n\tassert.Equal(t, expectedLocalRoute.Mask, cfg.AWSVPCAdditionalLocalRoutes[0].Mask, \"should match expected route Mask\")\n\tassert.Equal(t, ExplicitlyEnabled, cfg.TaskCPUMemLimit.Value, \"TaskCPUMemLimit should be explicitly enabled\")\n}", "func GetEtcdCrdPath() string {\n\treturn filepath.Join(\"..\", \"..\", \"..\", \"..\", \"config\", \"crd\", \"bases\", \"10-crd-druid.gardener.cloud_etcds.yaml\")\n}", "func getEtcdOption(conf map[string]string, confKey, envVar string) (string, bool) {\n\tconfVal, inConf := conf[confKey]\n\tenvVal, inEnv := os.LookupEnv(envVar)\n\tif inEnv {\n\t\treturn envVal, true\n\t}\n\treturn confVal, inConf\n}", "func (c Calendars) configPath() (string, error) {\n\tconfDir, err := configDirectory()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn filepath.Join(confDir, \"calendars.txt\"), nil\n}", "func readConfigFile(path string) ([]byte, error) {\n\tif path == \"\" {\n\t\treturn nil, errors.New(\"Configfile path not provided\")\n\t}\n\n\tvar jsonDoc json.RawMessage\n\t// read the file\n\tdata, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdataStr := strings.TrimSpace(string(data))\n\n\t// if string starts with { - assume json else yaml\n\tif strings.HasPrefix(dataStr, \"{\") {\n\t\tjsonDoc = json.RawMessage(data)\n\t} else {\n\t\tvar yamlDoc map[interface{}]interface{}\n\n\t\tif err := yaml.Unmarshal(data, &yamlDoc); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tjsonDoc, err = fmts.YAMLToJSON(yamlDoc)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn jsonDoc, nil\n}", "func setupEtcdCluster(mgr *manager.Manager, node *kubekeyapiv1alpha1.HostCfg) error {\n\tvar localPeerAddresses []string\n\toutput, _ := mgr.Runner.ExecuteCmd(\"sudo -E /bin/sh -c \\\"[ -f /etc/etcd.env ] && echo 'Configuration file already exists' || echo 'Configuration file will be created'\\\"\", 0, true)\n\tif strings.TrimSpace(output) == \"Configuration file already exists\" {\n\t\toutTmp, _ := mgr.Runner.ExecuteCmd(\"sudo cat /etc/etcd.env | awk 'NR==1{print $6}'\", 0, true)\n\t\tif outTmp != kubekeyapiv1alpha1.DefaultEtcdVersion {\n\t\t\tif err := refreshConfig(mgr, node, mgr.Runner.Index, localPeerAddresses, \"existing\"); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\tif err := helthCheck(mgr, node); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tetcdStatus = \"existing\"\n\t\tfor i := 0; i <= mgr.Runner.Index; i++ {\n\t\t\tlocalPeerAddresses = append(localPeerAddresses, fmt.Sprintf(\"etcd%d=https://%s:2380\", i+1, mgr.EtcdNodes[i].InternalAddress))\n\t\t}\n\t\tif mgr.Runner.Index == len(mgr.EtcdNodes)-1 {\n\t\t\tpeerAddresses = localPeerAddresses\n\t\t}\n\t} else {\n\t\tfor i := 0; i <= mgr.Runner.Index; i++ {\n\t\t\tlocalPeerAddresses = append(localPeerAddresses, fmt.Sprintf(\"etcd%d=https://%s:2380\", i+1, mgr.EtcdNodes[i].InternalAddress))\n\t\t}\n\t\tif mgr.Runner.Index == len(mgr.EtcdNodes)-1 {\n\t\t\tpeerAddresses = localPeerAddresses\n\t\t}\n\t\tif mgr.Runner.Index == 0 {\n\t\t\tif err := refreshConfig(mgr, node, mgr.Runner.Index, localPeerAddresses, \"new\"); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tetcdStatus = \"new\"\n\t\t} else {\n\t\t\tswitch etcdStatus {\n\t\t\tcase \"new\":\n\t\t\t\tif err := refreshConfig(mgr, node, mgr.Runner.Index, localPeerAddresses, \"new\"); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\tcase \"existing\":\n\t\t\t\tif err := refreshConfig(mgr, node, mgr.Runner.Index, localPeerAddresses, \"existing\"); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tjoinMemberCmd := fmt.Sprintf(\"sudo -E /bin/sh -c \\\"export ETCDCTL_API=2;export ETCDCTL_CERT_FILE='/etc/ssl/etcd/ssl/admin-%s.pem';export ETCDCTL_KEY_FILE='/etc/ssl/etcd/ssl/admin-%s-key.pem';export ETCDCTL_CA_FILE='/etc/ssl/etcd/ssl/ca.pem';%s/etcdctl --endpoints=%s member add %s %s\\\"\", node.Name, node.Name, etcdBinDir, accessAddresses, fmt.Sprintf(\"etcd%d\", mgr.Runner.Index+1), fmt.Sprintf(\"https://%s:2380\", node.InternalAddress))\n\t\t\t\t_, err := mgr.Runner.ExecuteCmd(joinMemberCmd, 2, true)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn errors.Wrap(errors.WithStack(err), \"Failed to add etcd member\")\n\t\t\t\t}\n\t\t\t\tif err := restartEtcd(mgr); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tif err := helthCheck(mgr, node); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tcheckMemberCmd := fmt.Sprintf(\"sudo -E /bin/sh -c \\\"export ETCDCTL_API=2;export ETCDCTL_CERT_FILE='/etc/ssl/etcd/ssl/admin-%s.pem';export ETCDCTL_KEY_FILE='/etc/ssl/etcd/ssl/admin-%s-key.pem';export ETCDCTL_CA_FILE='/etc/ssl/etcd/ssl/ca.pem';%s/etcdctl --no-sync --endpoints=%s member list\\\"\", node.Name, node.Name, etcdBinDir, accessAddresses)\n\t\t\t\tmemberList, err := mgr.Runner.ExecuteCmd(checkMemberCmd, 2, true)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn errors.Wrap(errors.WithStack(err), \"Failed to list etcd member\")\n\t\t\t\t}\n\t\t\t\tif !strings.Contains(memberList, fmt.Sprintf(\"https://%s:2379\", node.InternalAddress)) {\n\t\t\t\t\treturn errors.Wrap(errors.WithStack(err), \"Failed to add etcd member\")\n\t\t\t\t}\n\t\t\tdefault:\n\t\t\t\treturn errors.New(\"Failed to get etcd cluster status\")\n\t\t\t}\n\t\t}\n\n\t}\n\treturn nil\n}", "func (n *Node) IsExternalEtcd() bool {\n\treturn n.Role == ExternalEtcdRole\n}", "func ReadConfigFile(configPath string, monitor bool, logger log15.Logger) (*Config, error) {\n\tif !utils.PathExists(configPath) {\n\t\treturn nil, fmt.Errorf(\"The configuration file doesn't exist: %s\", configPath)\n\t}\n\n\tlogger.Info(\"Parsing configuration\", log15.Ctx{\"path\": configPath})\n\n\tconf := Config{logger: logger}\n\terr := parseConfig(configPath, &conf.Config)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Watch for configuration changes\n\tif monitor {\n\t\tlogger.Info(\"Setting up configuration watch\", log15.Ctx{\"path\": configPath})\n\n\t\twatcher, err := fsnotify.NewWatcher()\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"Unable to setup fsnotify: %v\", err)\n\t\t}\n\n\t\terr = watcher.Add(filepath.Dir(configPath))\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"Unable to setup fsnotify watch: %v\", err)\n\t\t}\n\n\t\tpathDir := filepath.Dir(configPath)\n\t\tif pathDir == \"\" {\n\t\t\tpathDir = \"./\"\n\t\t}\n\t\tpathBase := filepath.Base(configPath)\n\n\t\tgo func() {\n\t\t\tfor {\n\t\t\t\tselect {\n\t\t\t\tcase ev := <-watcher.Events:\n\t\t\t\t\tif ev.Name != fmt.Sprintf(\"%s/%s\", pathDir, pathBase) {\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\n\t\t\t\t\t// Store the old config for comparison\n\t\t\t\t\toldData, _ := yaml.Marshal(conf.Config)\n\n\t\t\t\t\t// Wait for 1s for ownership changes\n\t\t\t\t\ttime.Sleep(time.Second)\n\n\t\t\t\t\t// Parse the new ocnfig\n\t\t\t\t\terr := parseConfig(configPath, conf.Config)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tlogger.Error(\"Failed to read the new configuration\", log15.Ctx{\"path\": configPath, \"error\": err})\n\t\t\t\t\t}\n\n\t\t\t\t\t// Check if something changed\n\t\t\t\t\tnewData, _ := yaml.Marshal(conf.Config)\n\t\t\t\t\tif string(oldData) == string(newData) {\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\n\t\t\t\t\tlogger.Info(\"Configuration file changed, reloading\", log15.Ctx{\"path\": configPath})\n\t\t\t\t\tfor _, handler := range conf.handlers {\n\t\t\t\t\t\thandler(&conf)\n\t\t\t\t\t}\n\t\t\t\tcase err := <-watcher.Errors:\n\t\t\t\t\tlogger.Error(\"Got bad file notification\", log15.Ctx{\"error\": err})\n\t\t\t\t}\n\t\t\t}\n\t\t}()\n\t}\n\n\treturn &conf, nil\n}", "func TestIsDir(T *testing.T) {\n\n\tvar db etcdDB\n\n\tts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tfmt.Fprintln(w, `{\"action\":\"get\",\"node\":{\"key\":\"/testDir\",\"value\":\"Hello\",\"modifiedIndex\":4,\"createdIndex\":4}}`)\n\t}))\n\tdefer ts.Close()\n\n\tconfig := ts.URL\n\n\tdb.Cfg = cli.Config{\n\t\tEndpoints: []string{config},\n\t\tTransport: cli.DefaultTransport,\n\t\t// set timeout per request to fail fast when the target endpoint is unavailable\n\t\tHeaderTimeoutPerRequest: time.Second,\n\t}\n\n\tdb.C, _ = cli.New(db.Cfg)\n\n\tdb.Kapi = cli.NewKeysAPI(db.C)\n\tdb.Ctx = context.Background()\n\n\tdb.IsDir(\"/testDir\")\n\n}", "func (tc *TestConfig) Path() string {\n\treturn tc.configPath\n}", "func GetEtcdChartPath() string {\n\treturn filepath.Join(\"..\", \"..\", \"..\", \"..\", \"charts\", \"etcd\")\n}", "func (c *Config) WasLoadedFromFile() bool {\n\treturn c.loadedFromFilepath != \"\"\n}", "func (m *Meta) dirIsConfigPath(dir string) bool {\n\tloader, err := m.initConfigLoader()\n\tif err != nil {\n\t\treturn true\n\t}\n\n\treturn loader.IsConfigDir(dir)\n}", "func getConfigPath() (string, error) {\n\treturn \"./veille.yaml\", nil\n}", "func (s *site) getEtcdConfig(ctx context.Context, opCtx *operationContext, server *ProvisionedServer) (*etcdConfig, error) {\n\tetcdClient, err := clients.DefaultEtcdMembers()\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\tmembers, err := etcdClient.List(ctx)\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\tinitialCluster := []string{opCtx.provisionedServers.InitialCluster(s.domainName)}\n\t// add existing members\n\tfor _, member := range members {\n\t\taddress, err := utils.URLHostname(member.PeerURLs[0])\n\t\tif err != nil {\n\t\t\treturn nil, trace.Wrap(err)\n\t\t}\n\t\tinitialCluster = append(initialCluster, fmt.Sprintf(\"%s:%s\",\n\t\t\tmember.Name, address))\n\t}\n\tproxyMode := etcdProxyOff\n\tif !server.IsMaster() {\n\t\tproxyMode = etcdProxyOn\n\t}\n\treturn &etcdConfig{\n\t\tinitialCluster: strings.Join(initialCluster, \",\"),\n\t\tinitialClusterState: etcdExistingCluster,\n\t\tproxyMode: proxyMode,\n\t}, nil\n}", "func (c *Config) Has(path string) bool {\n\tv := c.Get(path)\n\treturn v.raw != nil\n}", "func (c *Config) GetEtcdDB() store.Config {\n\treturn store.Config{\n\t\tPrefix: \"lastbackend\",\n\t\tEndpoints: *c.Etcd.Endpoints,\n\t\tKeyFile: *c.Etcd.TLS.Key,\n\t\tCertFile: *c.Etcd.TLS.Cert,\n\t\tCAFile: *c.Etcd.TLS.CA,\n\t\tCodec: serializer.NewSerializer(json.Encoder{}, json.Decoder{}),\n\t}\n}", "func TestConfigReloadNoConfigFile(t *testing.T) {\n\tserver := New(&Options{NoSigs: true})\n\tloaded := server.ConfigTime()\n\tif server.Reload() == nil {\n\t\tt.Fatal(\"Expected Reload to return an error\")\n\t}\n\tif reloaded := server.ConfigTime(); reloaded != loaded {\n\t\tt.Fatalf(\"ConfigTime is incorrect.\\nexpected: %s\\ngot: %s\", loaded, reloaded)\n\t}\n}", "func ConfigFile(inp string) string {\n\tif inp != \"\" {\n\t\tpath := ExpandUser(inp)\n\t\tif FileExists(path) {\n\t\t\treturn path\n\t\t}\n\t}\n\n\tif env := os.Getenv(\"DOLA_CONFIG\"); env != \"\" {\n\t\tpath := ExpandUser(env)\n\t\tif FileExists(path) {\n\t\t\treturn path\n\t\t}\n\t}\n\n\tif path := ExpandUser(\"~/.dola/config.json\"); FileExists(path) {\n\t\treturn path\n\t}\n\n\treturn \"\"\n}", "func EtcdConfig(urls []string) client.Config {\n\tcustomTransport := GenerateTransport()\n\tc := client.Config{\n\t\tEndpoints: urls,\n\t\tTransport: customTransport,\n\t\tHeaderTimeoutPerRequest: time.Second * 5,\n\t}\n\tc.Username = os.Getenv(\"ETCD_USERNAME\")\n\tc.Password = os.Getenv(\"ETCD_PASSWORD\")\n\treturn c\n}", "func GetEtcdVersion(ec EtcdConfig) (string, string, error) {\n\t// The next etcd release (1.4) will have client.GetVersion()\n\t// We'll use this to test our etcd connection for now\n\tetcdURL := fmt.Sprintf(\"http://%s:%v/version\", ec.EtcdHost, ec.EtcdPort)\n\tresp, err := http.Get(etcdURL)\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\tdefer resp.Body.Close()\n\tbody, _ := ioutil.ReadAll(resp.Body)\n\n\tswitch resp.StatusCode {\n\tcase http.StatusOK:\n\t\tvar vresp version.Versions\n\t\tif err := json.Unmarshal(body, &vresp); err != nil {\n\t\t\treturn \"\", \"\", err\n\t\t}\n\t\treturn vresp.Server, vresp.Cluster, nil\n\tdefault:\n\t\tvar connectErr error\n\t\tif err := json.Unmarshal(body, &connectErr); err != nil {\n\t\t\treturn \"\", \"\", err\n\t\t}\n\t\treturn \"\", \"\", connectErr\n\t}\n}", "func GetConfigFile() string {\n\treturn *configFile\n}", "func readConfigFile(filename, fileFormat string) (*vaultAuthOptions, error) {\n\t// step: check the file exists\n\tif exists, err := fileExists(filename); !exists {\n\t\treturn nil, fmt.Errorf(\"the file: %s does not exist\", filename)\n\t} else if err != nil {\n\t\treturn nil, err\n\t}\n\t// step: we only read in json or yaml formats\n\tsuffix := path.Ext(filename)\n\tswitch suffix {\n\tcase \".yaml\":\n\t\tfallthrough\n\tcase \".yml\":\n\t\treturn readYAMLFile(filename)\n\tdefault:\n\t\treturn readJSONFile(filename, fileFormat)\n\t}\n}", "func getConfigFilePath() string {\n\tvar configFile string\n\tflag.StringVar(&configFile, \"config\", \"./config.json\", \"JSON config file path\")\n\tflag.Parse()\n\n\tlog.Printf(\"Using config file %s\", configFile)\n\n\treturn configFile\n}", "func GetMD5(configFile string) ([]byte, error) {\n\tfile, err := os.Open(configFile)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdefer file.Close()\n\n\thash := md5.New()\n\tif _, err := io.Copy(hash, file); err != nil {\n\t\treturn nil, err\n\t}\n\n\tconfigMD5 := hash.Sum(nil)\n\treturn configMD5, nil\n}", "func (m *Manager) GetEtcPath() string {\n\treturn m.etcPath\n}", "func GetConfigFile(toData *t3cutil.ConfigData, fileInfo atscfg.CfgMeta, hdrCommentTxt string, thiscfg config.Cfg) (string, string, bool, string, []string, error) {\n\tstart := time.Now()\n\tdefer func() {\n\t\tlog.Infof(\"GetConfigFile %v took %v\\n\", fileInfo.Name, time.Since(start).Round(time.Millisecond))\n\t}()\n\tlog.Infoln(\"GetConfigFile '\" + fileInfo.Name + \"'\")\n\n\tgetConfigFile := getConfigFileFunc(fileInfo.Name)\n\tcfg, err := getConfigFile(toData, fileInfo.Name, hdrCommentTxt, thiscfg)\n\tlogWarnings(\"getting config file '\"+fileInfo.Name+\"': \", cfg.Warnings)\n\n\tif err != nil {\n\t\treturn \"\", \"\", false, \"\", []string{}, err\n\t}\n\treturn cfg.Text, cfg.ContentType, cfg.Secure, cfg.LineComment, cfg.Warnings, nil\n}", "func (a *App) GetConfigFile(name string) ([]byte, error) {\n\tdata, err := a.Srv().configStore.GetFile(name)\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"failed to get config file %s\", name)\n\t}\n\n\treturn data, nil\n}", "func (a *App) GetConfigFile(name string) ([]byte, error) {\n\tdata, err := a.Srv().configStore.GetFile(name)\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"failed to get config file %s\", name)\n\t}\n\n\treturn data, nil\n}", "func configPath() (string, error) {\n\thome, err := sys.GetHomeDir()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn filepath.Join(home, \".keeper\", \"config.json\"), nil\n}", "func TestGetValidConfig(T *testing.T) {\n\n\tvar db etcdDB\n\n\tts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tfmt.Fprintln(w, `{\"action\":\"get\",\"node\":{\"key\":\"/testDir\",\"value\":\"Hello\",\"modifiedIndex\":4,\"createdIndex\":4}}`)\n\t}))\n\tdefer ts.Close()\n\n\tconfig := ts.URL\n\n\tdb.Cfg = cli.Config{\n\t\tEndpoints: []string{config},\n\t\tTransport: cli.DefaultTransport,\n\t\t// set timeout per request to fail fast when the target endpoint is unavailable\n\t\tHeaderTimeoutPerRequest: time.Second,\n\t}\n\n\tdb.C, _ = cli.New(db.Cfg)\n\n\tdb.Kapi = cli.NewKeysAPI(db.C)\n\tdb.Ctx = context.Background()\n\n\tdb.Get(\"/testDir\")\n\n}", "func TestCloudConfig(t *testing.T) {\n\tcontents := []byte(`\ncoreos: \n etcd:\n discovery_url: \"https://discovery.etcd.io/827c73219eeb2fa5530027c37bf18877\"\n fleet:\n autostart: Yes\n units:\n - name: 50-eth0.network\n runtime: yes\n content: '[Match]\n \n Name=eth47\n \n \n [Network]\n \n Address=10.209.171.177/19\n \n'\nssh_authorized_keys:\n - foobar\n - foobaz\nwrite_files:\n - content: |\n penny\n elroy\n path: /etc/dogepack.conf\n permissions: '0644'\n owner: root:dogepack\n`)\n\tcfg, err := NewCloudConfig(contents)\n\tif err != nil {\n\t\tt.Fatalf(\"Encountered unexpected error :%v\", err)\n\t}\n\n\tkeys := cfg.SSH_Authorized_Keys\n\tif len(keys) != 2 {\n\t\tt.Error(\"Parsed incorrect number of SSH keys\")\n\t} else if keys[0] != \"foobar\" {\n\t\tt.Error(\"Expected first SSH key to be 'foobar'\")\n\t} else if keys[1] != \"foobaz\" {\n\t\tt.Error(\"Expected first SSH key to be 'foobaz'\")\n\t}\n\n\tif cfg.Coreos.Etcd.Discovery_URL != \"https://discovery.etcd.io/827c73219eeb2fa5530027c37bf18877\" {\n\t\tt.Error(\"Failed to parse etcd discovery url\")\n\t}\n\n\tif !cfg.Coreos.Fleet.Autostart {\n\t\tt.Error(\"Expected AutostartFleet to be true\")\n\t}\n\n\tif len(cfg.Write_Files) != 1 {\n\t\tt.Error(\"Failed to parse correct number of write_files\")\n\t} else {\n\t\twf := cfg.Write_Files[0]\n\t\tif wf.Content != \"penny\\nelroy\\n\" {\n\t\t\tt.Errorf(\"WriteFile has incorrect contents '%s'\", wf.Content)\n\t\t}\n\t\tif wf.Encoding != \"\" {\n\t\t\tt.Errorf(\"WriteFile has incorrect encoding %s\", wf.Encoding)\n\t\t}\n\t\tif wf.Permissions != \"0644\" {\n\t\t\tt.Errorf(\"WriteFile has incorrect permissions %s\", wf.Permissions)\n\t\t}\n\t\tif wf.Path != \"/etc/dogepack.conf\" {\n\t\t\tt.Errorf(\"WriteFile has incorrect path %s\", wf.Path)\n\t\t}\n\t\tif wf.Owner != \"root:dogepack\" {\n\t\t\tt.Errorf(\"WriteFile has incorrect owner %s\", wf.Owner)\n\t\t}\n\t}\n\n\tif len(cfg.Coreos.Units) != 1 {\n\t\tt.Error(\"Failed to parse correct number of units\")\n\t} else {\n\t\tu := cfg.Coreos.Units[0]\n\t\texpect := `[Match]\nName=eth47\n\n[Network]\nAddress=10.209.171.177/19\n`\n\t\tif u.Content != expect {\n\t\t\tt.Errorf(\"Unit has incorrect contents '%s'.\\nExpected '%s'.\", u.Content, expect)\n\t\t}\n\t\tif u.Runtime != true {\n\t\t\tt.Errorf(\"Unit has incorrect runtime value\")\n\t\t}\n\t\tif u.Name != \"50-eth0.network\" {\n\t\t\tt.Errorf(\"Unit has incorrect name %s\", u.Name)\n\t\t}\n\t\tif u.Type() != \"network\" {\n\t\t\tt.Errorf(\"Unit has incorrect type '%s'\", u.Type())\n\t\t}\n\t}\n\n}", "func TestConfig_ParsePanic(t *testing.T) {\n\tci.Parallel(t)\n\n\tc, err := ParseConfigFile(\"./testdata/obj-len-one.hcl\")\n\tif err != nil {\n\t\tt.Fatalf(\"parse error: %s\\n\", err)\n\t}\n\n\td, err := ParseConfigFile(\"./testdata/obj-len-one.json\")\n\tif err != nil {\n\t\tt.Fatalf(\"parse error: %s\\n\", err)\n\t}\n\n\trequire.EqualValues(t, c, d)\n}", "func (a *appState) loadConfigFile(ctx context.Context) error {\n\tcfgPath := a.configPath()\n\n\tif _, err := os.Stat(cfgPath); err != nil {\n\t\t// don't return error if file doesn't exist\n\t\treturn nil\n\t}\n\n\t// read the config file bytes\n\tfile, err := os.ReadFile(cfgPath)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error reading file: %w\", err)\n\t}\n\n\t// unmarshall them into the wrapper struct\n\tcfgWrapper := &ConfigInputWrapper{}\n\terr = yaml.Unmarshal(file, cfgWrapper)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error unmarshalling config: %w\", err)\n\t}\n\n\t// retrieve the runtime configuration from the disk configuration.\n\tnewCfg, err := cfgWrapper.RuntimeConfig(ctx, a)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// validate runtime configuration\n\tif err := newCfg.validateConfig(); err != nil {\n\t\treturn fmt.Errorf(\"error parsing chain config: %w\", err)\n\t}\n\n\t// save runtime configuration in app state\n\ta.config = newCfg\n\n\treturn nil\n}", "func fileChanged(repo *git.Repository, path string) (bool, error) {\n\tstatus, err := repo.StatusFile(path)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tif status == git.StatusWtNew || status == git.StatusWtModified ||\n\t\tstatus == git.StatusWtDeleted {\n\t\treturn true, nil\n\t}\n\n\treturn false, nil\n}", "func (c baseClient) getEngineConfigFilePath(ctx context.Context, engine containerd.Container) (string, error) {\n\tspec, err := engine.Spec(ctx)\n\tconfigFile := \"\"\n\tif err != nil {\n\t\treturn configFile, err\n\t}\n\tfor i := 0; i < len(spec.Process.Args); i++ {\n\t\targ := spec.Process.Args[i]\n\t\tif strings.HasPrefix(arg, \"--config-file\") {\n\t\t\tif strings.Contains(arg, \"=\") {\n\t\t\t\tsplit := strings.SplitN(arg, \"=\", 2)\n\t\t\t\tconfigFile = split[1]\n\t\t\t} else {\n\t\t\t\tif i+1 >= len(spec.Process.Args) {\n\t\t\t\t\treturn configFile, ErrMalformedConfigFileParam\n\t\t\t\t}\n\t\t\t\tconfigFile = spec.Process.Args[i+1]\n\t\t\t}\n\t\t}\n\t}\n\n\tif configFile == \"\" {\n\t\t// TODO - any more diagnostics to offer?\n\t\treturn configFile, ErrEngineConfigLookupFailure\n\t}\n\treturn configFile, nil\n}", "func (c baseClient) getEngineConfigFilePath(ctx context.Context, engine containerd.Container) (string, error) {\n\tspec, err := engine.Spec(ctx)\n\tconfigFile := \"\"\n\tif err != nil {\n\t\treturn configFile, err\n\t}\n\tfor i := 0; i < len(spec.Process.Args); i++ {\n\t\targ := spec.Process.Args[i]\n\t\tif strings.HasPrefix(arg, \"--config-file\") {\n\t\t\tif strings.Contains(arg, \"=\") {\n\t\t\t\tsplit := strings.SplitN(arg, \"=\", 2)\n\t\t\t\tconfigFile = split[1]\n\t\t\t} else {\n\t\t\t\tif i+1 >= len(spec.Process.Args) {\n\t\t\t\t\treturn configFile, ErrMalformedConfigFileParam\n\t\t\t\t}\n\t\t\t\tconfigFile = spec.Process.Args[i+1]\n\t\t\t}\n\t\t}\n\t}\n\n\tif configFile == \"\" {\n\t\t// TODO - any more diagnostics to offer?\n\t\treturn configFile, ErrEngineConfigLookupFailure\n\t}\n\treturn configFile, nil\n}", "func (c *RunCommand) readConfigFile() (string, error) {\n\tif _, err := os.Stat(c.Config); os.IsNotExist(err) {\n\t\treturn \"\", fmt.Errorf(\"config file not found, looking in: %s\", c.Config)\n\t}\n\n\tdata, err := ioutil.ReadFile(c.Config)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn string(data), nil\n}", "func GetConfigFilePath() string {\n\tpath, _ := osext.ExecutableFolder()\n\tpath = fmt.Sprintf(\"%s/eremetic.yml\", path)\n\tif _, err := os.Open(path); err == nil {\n\t\treturn path\n\t}\n\tglobalPath := \"/etc/eremetic/eremetic.yml\"\n\tif _, err := os.Open(globalPath); err == nil {\n\t\treturn globalPath\n\t}\n\n\treturn \"\"\n}", "func getConfigPath(path string) (file string) {\r\n\treturn fmt.Sprintf(\"%s/%s\", path, \"app.ini\")\r\n}", "func (c Config) GetConfigFile() string {\n\treturn c.viper.GetString(configFile)\n}", "func (tail *Tail) isFileDeleted() bool {\n\treturn false\n}", "func (n *Node) IsExternalEtcd() bool {\n\treturn n.Role() == constants.ExternalEtcdNodeRoleValue\n}", "func getConfigFile() (*os.File, error) {\n\treturn os.Open(env.WorkDir() + \"/configs/config.json\")\n}", "func isMdFile(path string) bool {\n\treturn strings.HasSuffix(path, \".md\")\n}", "func (v *VersionFile) equals(vp *EtcdVersionPair) (bool, error) {\n\texists, err := v.Exists()\n\tif err != nil {\n\t\treturn false, err\n\t}\n\tif !exists {\n\t\treturn false, nil\n\t}\n\tcvp, err := v.Read()\n\tif err != nil {\n\t\treturn false, err\n\t}\n\treturn vp.Equals(cvp), nil\n}", "func hasConfigFileOption(unknownArgs []string) bool {\n\tconfigFileOptions := configFileOptions()\n\tisConfigFileOption := func(option string) bool {\n\t\tfor _, configFileOption := range configFileOptions {\n\t\t\tif configFileOption == option {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t\treturn false\n\t}\n\n\tfor _, arg := range unknownArgs {\n\t\tif isConfigFileOption(strings.TrimSpace(arg)) {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}", "func getConfigFilePath() string {\n\tpathList := [5]string{\n\t\t\"config.json\",\n\t\t\"../config.json\",\n\t\t\"../../config.json\",\n\t\t\"../../../config.json\",\n\t\t\"../../../../config.json\",\n\t}\n\n\t_, b, _, _ := runtime.Caller(0)\n\tfilePath := filepath.Dir(b)\n\tfilePath = filepath.Join(filePath, \"../config.json\")\n\n\tpath, err := os.Getwd()\n\tif err == nil {\n\t\tfor _, configPath := range pathList {\n\t\t\tprocessFilePath := filepath.Join(path, configPath)\n\t\t\texist, _ := exists(processFilePath)\n\t\t\tif exist == true {\n\t\t\t\tfilePath = processFilePath\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\n\treturn filePath\n}", "func getConfig(fpath string) {\n\traw, err := ioutil.ReadFile(fpath)\n\tif err != nil {\n\t\tglog.Errorf(\"Failed to read config %q, err: %v\", fpath, err)\n\t\tos.Exit(1)\n\t}\n\terr = json.Unmarshal(raw, &ctx.config)\n\tif err != nil {\n\t\tglog.Errorf(\"Failed to json-unmarshal config %q, err: %v\", fpath, err)\n\t\tos.Exit(1)\n\t}\n}", "func filePath() []byte {\n\tconfigFileName := \"config.dev.json\"\n\tif isProd() {\n\t\tconfigFileName = \"config.prod.json\"\n\t}\n\treturn []byte(fmt.Sprintf(\"%s/%s\", directoryPath, configFileName))\n}", "func (conf *Config) ParseConfigFile(path string) error {\n\tdata, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := json.Unmarshal(data, &conf); err != nil {\n\t\treturn err\n\t}\n\n\tlog.WithFields(log.Fields{\n\t\t\"config\": conf,\n\t}).Info(\"Config file parsed\")\n\n\treturn nil\n}", "func (c *MetaConfig) ValidFilePath(path string) bool {\n\t// for us, we'll consider empty string to be the default path\n\tif path == \"\" {\n\t\treturn true\n\t}\n\tif strings.HasPrefix(path, \"~/\") {\n\t\treturn c.ValidFilePath(ReplaceHomeDir(path))\n\t}\n\n\t// Check if file already exists\n\tif _, err := os.Stat(path); err == nil {\n\t\treturn true\n\t}\n\t// Write file and remove it\n\tif err := ioutil.WriteFile(path, []byte{}, 0644); err == nil {\n\t\tos.Remove(path)\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (c Config) ShouldExcludePath(path string) bool {\n\tif strings.HasSuffix(path, \"t8.hcl\") ||\n\t\tstrings.HasSuffix(path, \"t8.yml\") ||\n\t\tstrings.HasSuffix(path, \"before.t8\") ||\n\t\tstrings.HasSuffix(path, \"after.t8\") {\n\t\treturn true\n\t}\n\n\tfor _, excludePath := range c.ExcludePaths {\n\t\tif isUnconditionalExcludePath(excludePath) {\n\t\t\tfor _, excludePath := range excludePath.Paths {\n\t\t\t\tmatched, _ := regexp.MatchString(excludePath, path)\n\t\t\t\tif matched {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tfor _, param := range c.Parameters {\n\n\t\t\tif isConditionalExcludePath(excludePath) {\n\t\t\t\tif excludePath.ParameterName == param.Name {\n\n\t\t\t\t\tif excludePath.Operator == NotEqual && excludePath.ParameterValue != param.Actual {\n\t\t\t\t\t\tfor _, excludePath := range excludePath.Paths {\n\t\t\t\t\t\t\tmatched, _ := regexp.MatchString(excludePath, path)\n\t\t\t\t\t\t\tif matched {\n\t\t\t\t\t\t\t\treturn true\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif excludePath.Operator == Equal && excludePath.ParameterValue == param.Actual {\n\t\t\t\t\t\tfor _, excludePath := range excludePath.Paths {\n\t\t\t\t\t\t\tmatched, _ := regexp.MatchString(excludePath, path)\n\t\t\t\t\t\t\tif matched {\n\t\t\t\t\t\t\t\treturn true\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn false\n}", "func isPathASystemdDropin(path string) (bool, string, string) {\n\tif !strings.HasPrefix(path, \"/etc/systemd/system\") {\n\t\treturn false, \"\", \"\"\n\t}\n\tif !strings.HasSuffix(path, \".conf\") {\n\t\treturn false, \"\", \"\"\n\t}\n\tpathSegments := strings.Split(path, \"/\")\n\tdropinName := pathSegments[len(pathSegments)-1]\n\tservicePart := pathSegments[len(pathSegments)-2]\n\tallServiceSegments := strings.Split(servicePart, \".\")\n\tif allServiceSegments[len(allServiceSegments)-1] != \"d\" {\n\t\treturn false, \"\", \"\"\n\t}\n\tserviceName := strings.Join(allServiceSegments[:len(allServiceSegments)-1], \".\")\n\treturn true, serviceName, dropinName\n}", "func getEtcdBucket(path string) string {\n\tidx := strings.LastIndex(path, \"/\")\n\tif idx == -1 {\n\t\tpanic(\"path with no slashes \" + path)\n\t}\n\tbucket := path[:idx]\n\tif len(bucket) == 0 {\n\t\tpanic(\"invalid bucket for path \" + path)\n\t}\n\treturn bucket\n}", "func parseConfig(path string) (Config, error) {\n\tconfig := Config{}\n\tfile, err := os.Open(path)\n\tdefer file.Close()\n\tif err != nil {\n\t\treturn config, err\n\t}\n\tdecoder := json.NewDecoder(file)\n\terr = decoder.Decode(&config)\n\tif err != nil {\n\t\treturn config, err\n\t}\n\treturn config, nil\n}", "func NewEtcdStorage(t *testing.T) (*storagebackend.Config, *etcdtesting.EtcdTestServer) {\n\tserver, config := etcdtesting.NewUnsecuredEtcd3TestClientServer(t)\n\tmediaType, _, err := mime.ParseMediaType(runtime.ContentTypeJSON)\n\tif err != nil {\n\t\tt.Errorf(\"failed to parse media type: %v\", err)\n\t}\n\tstorageSerializer, ok := runtime.SerializerInfoForMediaType(api.Codecs.SupportedMediaTypes(), mediaType)\n\tif !ok {\n\t\tt.Errorf(\"no serializer for %s\", mediaType)\n\t}\n\ts := storageSerializer.Serializer\n\tds := recognizer.NewDecoder(s, api.Codecs.UniversalDeserializer())\n\tconfig.Codec = api.Codecs.CodecForVersions(s, ds, schema.GroupVersions{coapi.SchemeGroupVersion}, nil)\n\treturn config, server\n}", "func (*Data_Etcd) Descriptor() ([]byte, []int) {\n\treturn file_internal_conf_conf_proto_rawDescGZIP(), []int{1, 0}\n}", "func isELF(path string) (bool, error) {\n\tfd, err := elf.Open(path)\n\tif err != nil {\n\t\tif strings.Contains(err.Error(), \"bad magic number\") {\n\t\t\treturn false, nil\n\t\t}\n\t\treturn false, err\n\t}\n\tfd.Close()\n\treturn true, nil\n}", "func (k *kubelet) configFile() (string, error) {\n\tconfig := &kubeletconfig.KubeletConfiguration{\n\t\tTypeMeta: v1.TypeMeta{\n\t\t\tKind: \"KubeletConfiguration\",\n\t\t\tAPIVersion: kubeletconfig.SchemeGroupVersion.String(),\n\t\t},\n\t\t// Enables TLS certificate rotation, which is good from security point of view.\n\t\tRotateCertificates: true,\n\t\t// Request HTTPS server certs from API as well, so kubelet does not generate self-signed certificates.\n\t\tServerTLSBootstrap: true,\n\t\t// If Docker is configured to use systemd as a cgroup driver and Docker is used as container\n\t\t// runtime, this needs to be set to match Docker.\n\t\t// TODO pull that information dynamically based on what container runtime is configured.\n\t\tCgroupDriver: k.config.CgroupDriver,\n\t\t// Address where kubelet should listen on.\n\t\tAddress: k.config.Address,\n\t\t// Disable healht port for now, since we don't use it.\n\t\t// TODO check how to use it and re-enable it.\n\t\tHealthzPort: &[]int32{0}[0],\n\t\t// Set up cluster domain. Without this, there is no 'search' field in /etc/resolv.conf in containers, so\n\t\t// short-names resolution like mysvc.myns.svc does not work.\n\t\tClusterDomain: \"cluster.local\",\n\t\t// Authenticate clients using CA file.\n\t\tAuthentication: kubeletconfig.KubeletAuthentication{\n\t\t\tX509: kubeletconfig.KubeletX509Authentication{\n\t\t\t\tClientCAFile: \"/etc/kubernetes/pki/ca.crt\",\n\t\t\t},\n\t\t},\n\n\t\t// This defines where should pods cgroups be created, like /kubepods and /kubepods/burstable.\n\t\t// Also when specified, it suppresses a lot message about it.\n\t\tCgroupRoot: \"/\",\n\n\t\t// Used for calculating node allocatable resources.\n\t\t// If EnforceNodeAllocatable has 'system-reserved' set, those limits will be enforced on cgroup specified\n\t\t// with SystemReservedCgroup.\n\t\tSystemReserved: k.config.SystemReserved,\n\n\t\t// Used for calculating node allocatable resources.\n\t\t// If EnforceNodeAllocatable has 'kube-reserved' set, those limits will be enforced on cgroup specified\n\t\t// with KubeReservedCgroup.\n\t\tKubeReserved: k.config.KubeReserved,\n\n\t\tClusterDNS: k.config.ClusterDNSIPs,\n\n\t\tHairpinMode: k.config.HairpinMode,\n\t}\n\n\tkubelet, err := yaml.Marshal(config)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"serializing to YAML: %w\", err)\n\t}\n\n\treturn string(kubelet), nil\n}", "func getConfig(path string) (*config, error) {\n\tf, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcfg := &config{}\n\tif err := yaml.Unmarshal(f, &cfg); err != nil {\n\t\treturn nil, err\n\t}\n\tfmt.Println(cfg)\n\n\treturn cfg, nil\n\n}", "func (n *NginxConfigFileTemplete) GetConfigFileDirPath() string {\n\treturn n.configFileDirPath\n}", "func getHash(path string) string {\n myfile, err := os.Open(path)\n if err != nil {\n log.Fatal(\"ERROR: problem opening path:\", err)\n }\n defer myfile.Close()\n \n hasher := sha256.New()\n\n if _, err := io.Copy(hasher, myfile); err != nil {\n log.Fatal(\"ERROR: problem copying file into hasher:\", err)\n }\n \n return hex.EncodeToString(hasher.Sum(nil))\n}", "func (g *GenericVaultBackend) EncryptionConfigPath() string {\n\treturn filepath.Join(g.Path(), \"encryption-config\")\n}", "func getConfigFilename(context *cli.Context) (string, bool) {\n\tcf := context.GlobalString(\"config-filename\")\n\n\tif filepath.IsAbs(cf) {\n\t\t// Absolute path specified; user knows what they want.\n\t\t_, err := os.Stat(cf)\n\t\treturn cf, err == nil\n\t}\n\n\tabsCF, err := filepath.Abs(cf)\n\tif err != nil {\n\t\t// syscall failure; treat as if file doesn't exist.\n\t\treturn cf, false\n\t}\n\tif _, err := os.Stat(absCF); err == nil {\n\t\t// File exists on relative path.\n\t\treturn absCF, true\n\t}\n\n\tif xdgCF, err := xdg.Config.Find(cf); err == nil {\n\t\t// File exists in an XDG directory.\n\t\treturn xdgCF, true\n\t}\n\n\t// Default to relative path. This is probably what the user expects if\n\t// it wasn't found anywhere else.\n\treturn absCF, false\n}", "func (p EksProvisioner) writeConfigFile() (string, error) {\n\tif len(p.eksConfig.Params.ConfigFile) > 0 {\n\n\t\t// marshal the struct to YAML\n\t\tyamlBytes, err := yaml.Marshal(&p.eksConfig.Params.ConfigFile)\n\t\tif err != nil {\n\t\t\treturn \"\", errors.WithStack(err)\n\t\t}\n\n\t\tyamlString := string(yamlBytes[:])\n\n\t\t// write the config to a temporary file\n\t\ttmpfile, err := ioutil.TempFile(\"\", \"eks.*.yaml\")\n\t\tif err != nil {\n\t\t\treturn \"\", errors.WithStack(err)\n\t\t}\n\n\t\tdefer tmpfile.Close()\n\n\t\tif _, err := tmpfile.Write([]byte(yamlString)); err != nil {\n\t\t\treturn \"\", errors.WithStack(err)\n\t\t}\n\t\tif err := tmpfile.Close(); err != nil {\n\t\t\treturn \"\", errors.WithStack(err)\n\t\t}\n\n\t\tlog.Logger.Debugf(\"EKS config file written to: %s\", tmpfile.Name())\n\n\t\treturn tmpfile.Name(), nil\n\n\t} else {\n\t\tlog.Logger.Infof(\"No EKS config file data configured. No config file path will be passed \" +\n\t\t\t\"to eksctl commands\")\n\n\t\treturn \"\", nil\n\t}\n}", "func (c *EtcdConfig) Validate() error {\n\t_, err := govalidator.ValidateStruct(c)\n\treturn err\n}", "func GetEdgercPath(c *cli.Context) string {\n\tedgercPath := c.String(\"edgerc\")\n\tif edgercPath == \"\" {\n\t\treturn edgegrid.DefaultConfigFile\n\t}\n\treturn edgercPath\n}", "func (m *Meta) HasChanged(ctx context.Context, fs afs.Service) (bool, error) {\n\tif m.baseURL == \"\" {\n\t\treturn false, nil\n\t}\n\tif !m.isCheckDue(time.Now()) {\n\t\treturn false, nil\n\t}\n\n\troutes, err := fs.List(ctx, m.baseURL, option.NewRecursive(true))\n\tif err != nil {\n\t\treturn false, errors.Wrapf(err, \"failed to load rules %v\", m.baseURL)\n\t}\n\tif !m.hasChanges(routes) {\n\t\treturn false, nil\n\t}\n\tm.mutex.Lock()\n\tdefer m.mutex.Unlock()\n\tm.routes = make(map[string]time.Time)\n\tfor _, route := range routes {\n\t\tif route.IsDir() || !(path.Ext(route.Name()) == \".json\" || path.Ext(route.Name()) == \".yaml\") {\n\t\t\tcontinue\n\t\t}\n\t\tm.routes[route.URL()] = route.ModTime()\n\t}\n\treturn true, nil\n}", "func mustConfFile(configPath string) (string, error) {\n\t_, err := os.Stat(configPath)\n\tif os.IsNotExist(err) {\n\t\treturn \"\", err\n\t}\n\treturn configPath, nil\n}", "func (c *Config) Filepath() string {\n\treturn c.loadedFromFilepath\n}", "func GetCfgPath(c domain.CLIContext) string {\n\treturn c.String(cfgPathKey)\n}", "func ParseConfig(path string) Config {\n file, err := os.Open(path)\n if err != nil {\n log.Panicf(\"Can't load config: %s\", err.Error())\n }\n decoder := json.NewDecoder(file)\n var config Config\n err = decoder.Decode(&config)\n if err != nil {\n log.Panicf(\"Can't parse config file: %s. Error: %s\", path, err.Error())\n }\n return config\n}", "func ReadConfigFile(conf *Config, path string) {\n\tfile, err := os.Open(path)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tconfigFile, _ := ioutil.ReadAll(file)\n\tyaml.Unmarshal(configFile, conf)\n\n\tif conf.DatabaseDriver == \"boltdb\" && conf.DatabasePath == \"\" {\n\t\tconf.DatabasePath = \"db/eremetic.db\"\n\t}\n}", "func ValidateAPIServerETCDConfiguration(config imports.APIServerEtcdConfiguration, fldPath *field.Path) field.ErrorList {\n\tallErrs := field.ErrorList{}\n\n\tif len(config.Url) == 0 {\n\t\tallErrs = append(allErrs, field.Invalid(fldPath.Child(\"url\"), config.Url, \"url of etcd must be set\"))\n\t}\n\n\tif config.SecretRef != nil && (config.CABundle != nil || config.ClientCert != nil || config.ClientKey != nil) {\n\t\tallErrs = append(allErrs, field.Invalid(fldPath.Child(\"secretRef\"), config.Url, \"cannot configure both the secret reference as well as supply the certificate values directly\"))\n\t}\n\n\t// Do not verify the client certs against the given CA, as the client certs do not necessarily have to be signed by the\n\t// same CA that signed etcd's TLS serving certificates.\n\tif config.CABundle != nil {\n\t\tallErrs = append(allErrs, ValidateCACertificate(*config.CABundle, fldPath.Child(\"caBundle\"))...)\n\t}\n\n\tif config.ClientCert != nil {\n\t\tallErrs = append(allErrs, ValidateClientCertificate(*config.ClientCert, fldPath.Child(\"clientCert\"))...)\n\t}\n\n\tif config.ClientKey != nil {\n\t\tallErrs = append(allErrs, ValidatePrivateKey(*config.ClientKey, fldPath.Child(\"clientKey\"))...)\n\t}\n\n\treturn allErrs\n}", "func getFileConfiguration(t *testing.T, content string) *configuration.Registry {\n\ttmpFile, err := ioutil.TempFile(os.TempDir(), \"configFile-\")\n\trequire.NoError(t, err)\n\tdefer os.Remove(tmpFile.Name())\n\t_, err = tmpFile.Write([]byte(content))\n\trequire.NoError(t, err)\n\trequire.NoError(t, tmpFile.Close())\n\tconfig, err := configuration.New(tmpFile.Name())\n\trequire.NoError(t, err)\n\treturn config\n}" ]
[ "0.5153754", "0.50409085", "0.50409085", "0.49216852", "0.48767686", "0.476166", "0.46999902", "0.4645964", "0.46016797", "0.45887595", "0.4587537", "0.45873547", "0.45760208", "0.4569562", "0.4514388", "0.45088005", "0.45053396", "0.4501646", "0.4494398", "0.4474307", "0.4468049", "0.44650996", "0.44410303", "0.44106704", "0.44078323", "0.43953094", "0.43951473", "0.4382739", "0.43783066", "0.43777984", "0.43760347", "0.43733293", "0.43714944", "0.43696865", "0.4367361", "0.43614295", "0.43459165", "0.4344193", "0.4337937", "0.43358865", "0.43353915", "0.43333313", "0.43254045", "0.43207526", "0.43179062", "0.43097582", "0.42880654", "0.4286875", "0.42855063", "0.42822206", "0.4271137", "0.42662948", "0.42662948", "0.42480677", "0.4246072", "0.4242264", "0.4238519", "0.42383355", "0.42357013", "0.4215946", "0.4215946", "0.42054853", "0.42021528", "0.41992104", "0.41902044", "0.4189614", "0.41886458", "0.41851598", "0.41814497", "0.41794372", "0.4178984", "0.41723168", "0.41631165", "0.4161923", "0.41606206", "0.41605002", "0.41570842", "0.41478288", "0.41468322", "0.41330722", "0.41325113", "0.41303995", "0.41291913", "0.41242686", "0.41217995", "0.41179487", "0.41159028", "0.4112529", "0.4112205", "0.41108915", "0.41096392", "0.4106012", "0.4105053", "0.41023386", "0.41016144", "0.4098779", "0.40948474", "0.40909255", "0.40876815", "0.40841606" ]
0.8127358
0
getDirection from byte. Panics on unknown direction.
func getDirection(d byte) direction { switch d { case 'R': return right case 'L': return left case 'U': return up case 'D': return down default: panic(fmt.Sprintf("unknown direction %v", d)) } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (behaviour *Dumb) GetDirection() basic.Direction {\n\treturn behaviour.direction\n}", "func (self Source) GetDirection(result *Vector) {\n\tresult[x], result[y], result[z] = self.Get3f(AlDirection)\n}", "func convertDirectionCode(gpxDirCode string) uint8 {\n\n\tbrytonDirCode := DirectionCodeGoAhead\n\n\tswitch gpxDirCode {\n\tcase \"tshl\":\n\t\tbrytonDirCode = DirectionCodeCloseLeft\n\tcase \"left\":\n\t\tbrytonDirCode = DirectionCodeLeft\n\tcase \"tsll\":\n\t\tbrytonDirCode = DirectionCodeSlightLeft\n\tcase \"straight\":\n\t\tbrytonDirCode = DirectionCodeGoAhead\n\tcase \"tslr\":\n\t\tbrytonDirCode = DirectionCodeSlightRight\n\tcase \"right\":\n\t\tbrytonDirCode = DirectionCodeRight\n\tcase \"tshr\":\n\t\tbrytonDirCode = DirectionCodeCloseRight\n\tdefault:\n\t\tfmt.Println(\"Unsupported direction code: \" + gpxDirCode + \"! Using GoAhead!\")\n\t}\n\n\treturn brytonDirCode\n}", "func GetDirection(pinNo int) uint32 {\n\tindex := (pinNo) / 32\n\n\tregVal := readRegistry(index)\n\n\tgpio := uint32(pinNo % 32)\n\n\tval := ((regVal >> gpio) & 0x1)\n\n\treturn val\n\n}", "func deltaToDirection(dX, dY int) Direction {\n\n\tif dX == -1 && dY == -1 {\n\t\treturn DirectionNW\n\t} else if dX == 0 && dY == -1 {\n\t\treturn DirectionN\n\t} else if dX == 1 && dY == -1 {\n\t\treturn DirectionNE\n\t} else if dX == -1 && dY == 0 {\n\t\treturn DirectionW\n\t} else if dX == 1 && dY == 0 {\n\t\treturn DirectionE\n\t} else if dX == -1 && dY == 1 {\n\t\treturn DirectionSW\n\t} else if dX == 0 && dY == 1 {\n\t\treturn DirectionS\n\t} else if dX == 1 && dY == 1 {\n\t\treturn DirectionSE\n\t} else {\n\t\treturn DirectionUnknown\n\t}\n\n}", "func (p *Port) Direction() int {\n\treturn p.direction\n}", "func (v *MenuButton) GetDirection() ArrowType {\n\tc := C.gtk_menu_button_get_direction(v.native())\n\treturn ArrowType(c)\n}", "func (o PacketMirroringFilterResponsePtrOutput) Direction() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *PacketMirroringFilterResponse) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn &v.Direction\n\t}).(pulumi.StringPtrOutput)\n}", "func (o PacketMirroringFilterOutput) Direction() PacketMirroringFilterDirectionPtrOutput {\n\treturn o.ApplyT(func(v PacketMirroringFilter) *PacketMirroringFilterDirection { return v.Direction }).(PacketMirroringFilterDirectionPtrOutput)\n}", "func (s *ClampDirectionOffset) Direction() dprec.Vec3 {\n\treturn s.direction\n}", "func getDirection(current_direction string, action string) string{\n action = strings.ToUpper(action)\n current_direction = strings.ToUpper(current_direction)\n\n var cur_direction_index int\n clockwise := []string{0:\"N\",1:\"E\",2:\"S\",3:\"W\"}\n\n for i := range clockwise{\n if clockwise[i] == current_direction {\n cur_direction_index = i\n break\n }\n }\n\n if action == \"L\" {\n cur_direction_index -= 1\n if cur_direction_index < 0 {\n cur_direction_index += 4\n }\n }else if action == \"R\"{\n cur_direction_index += 1\n if cur_direction_index > 3 {\n cur_direction_index -= 4\n }\n }\n\n return clockwise[cur_direction_index]\n}", "func (o PacketMirroringFilterPtrOutput) Direction() PacketMirroringFilterDirectionPtrOutput {\n\treturn o.ApplyT(func(v *PacketMirroringFilter) *PacketMirroringFilterDirection {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Direction\n\t}).(PacketMirroringFilterDirectionPtrOutput)\n}", "func (o PacketMirroringFilterResponseOutput) Direction() pulumi.StringOutput {\n\treturn o.ApplyT(func(v PacketMirroringFilterResponse) string { return v.Direction }).(pulumi.StringOutput)\n}", "func (node *hostNode) GetPeerDirection(id peer.ID) network.Direction {\n\tconns := node.host.Network().ConnsToPeer(id)\n\n\tif len(conns) != 1 {\n\t\treturn network.DirUnknown\n\t}\n\treturn conns[0].Stat().Direction\n}", "func getord(input byte) int {\n\tLOWER_OFFSET := 87\n\tDIGIT_OFFSET := 48\n\tUPPER_OFFSET := 29\n\tvar result int\n\tif input <= 57 && input >= 48 {\n\t\tresult = int(input) - DIGIT_OFFSET\n\t} else if input >= 97 && input <= 122 {\n\t\tresult = int(input) - LOWER_OFFSET\n\t} else if input >= 65 && input <= 90 {\n\t\tresult = int(input) - UPPER_OFFSET\n\t} else {\n\t\tfmt.Printf(\"Dafux is this\\n\")\n\t\tresult = 0\n\t}\n\t//fmt.Printf(\"%c as base10 is %d\\n\", input, result)\n\treturn result\n}", "func DirectionFromString(d string) Direction {\n\tswitch d {\n\tcase \"L\":\n\t\treturn DirectionLeft\n\tcase \"R\":\n\t\treturn DirectionRight\n\tdefault:\n\t\treturn DirectionUnknown\n\t}\n}", "func ProtoToComputeFirewallDirectionEnum(e computepb.ComputeFirewallDirectionEnum) *compute.FirewallDirectionEnum {\n\tif e == 0 {\n\t\treturn nil\n\t}\n\tif n, ok := computepb.ComputeFirewallDirectionEnum_name[int32(e)]; ok {\n\t\te := compute.FirewallDirectionEnum(n[len(\"ComputeFirewallDirectionEnum\"):])\n\t\treturn &e\n\t}\n\treturn nil\n}", "func FindDirection(alias string) (dir Direction, found bool) {\n\tdir, found = dirMap[strings.ToLower(alias)]\n\treturn dir, found\n}", "func (el *gameStruct) Direction() dir.Direction {\n\treturn el.direction\n}", "func (r *Rule) direction(key item, l *lexer) error {\n\tif key.typ != itemDirection {\n\t\tpanic(\"item is not a direction\")\n\t}\n\tswitch key.value {\n\tcase \"->\":\n\t\tr.Bidirectional = false\n\tcase \"<>\":\n\t\tr.Bidirectional = true\n\tdefault:\n\t\treturn fmt.Errorf(\"invalid direction operator %q\", key.value)\n\t}\n\treturn nil\n}", "func (gpio *RpiGpio) Direction(p Pin, d PinDirection) error {\n\tpin, err := gpio.getBCMGpio(p)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn gpio.bcm.Direction(pin, d)\n}", "func (d decoderStripe) GetDirectionByIcon(searchedIcon icon) decoderDirection {\n\tfor i, iconIter := range d.icons {\n\t\tif iconIter == searchedIcon {\n\t\t\treturn d.directions[i]\n\t\t}\n\t}\n\n\tpanic(\"Should not happen!\")\n}", "func DirectionFromString(c string) Direction {\n\tswitch c {\n\tcase \"north\":\n\t\treturn Direction_north\n\tcase \"south\":\n\t\treturn Direction_south\n\tcase \"east\":\n\t\treturn Direction_east\n\tcase \"west\":\n\t\treturn Direction_west\n\n\tdefault:\n\t\treturn 0\n\t}\n}", "func (r *Reader) ReadByte() (byte, error) {\n\tr.prevRune = -1\n\tif r.i >= int64(len(r.s)) {\n\t\treturn 0, io.EOF\n\t}\n\tb := r.s[r.i]\n\tr.i++\n\treturn b, nil\n}", "func StringToDirection(s string) Direction {\n\tswitch {\n\tcase s == \"up\":\n\t\treturn Up\n\tcase s == \"down\":\n\t\treturn Down\n\tcase s == \"left\":\n\t\treturn Left\n\tcase s == \"right\":\n\t\treturn Right\n\t}\n\treturn Direction{}\n}", "func (p *Packet) ReadByte() byte {\n\tif p.readIndex+1 > len(p.Payload) {\n\t\tlog.Warning.Println(\"Error parsing packet arguments: { opcode=\" + strconv.Itoa(int(p.Opcode)) + \"; offset=\" + strconv.Itoa(p.readIndex) + \" };\")\n\t\treturn byte(0)\n\t}\n\tdefer func() {\n\t\tp.readIndex++\n\t}()\n\treturn p.Payload[p.readIndex] & 0xFF\n}", "func (m NoMsgTypes) GetMsgDirection() (v enum.MsgDirection, err quickfix.MessageRejectError) {\n\tvar f field.MsgDirectionField\n\tif err = m.Get(&f); err == nil {\n\t\tv = f.Value()\n\t}\n\treturn\n}", "func (p *Packet) getByte() byte {\n\tb := p.buf[p.pos]\n\tp.pos++\n\treturn b\n}", "func (m NoMDEntries) GetTickDirection() (v enum.TickDirection, err quickfix.MessageRejectError) {\n\tvar f field.TickDirectionField\n\tif err = m.Get(&f); err == nil {\n\t\tv = f.Value()\n\t}\n\treturn\n}", "func GetByte(r io.Reader) (byte, error) {\n\tt := make([]byte, 1)\n\t_, err := r.Read(t)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn t[0], nil\n}", "func (m *Manager) ReadByte() byte {\n\treturn byte(m.readUint(8))\n}", "func (m *MockHostNode) GetPeerDirection(id peer.ID) network.Direction {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetPeerDirection\", id)\n\tret0, _ := ret[0].(network.Direction)\n\treturn ret0\n}", "func (obj *Edge) GetDirectionType() types.TGDirectionType {\n\tif obj.EntityType != nil {\n\t\treturn obj.EntityType.(*EdgeType).GetDirectionType()\n\t} else {\n\t\treturn obj.directionType\n\t}\n}", "func DirectionValidator(d Direction) error {\n\tswitch d {\n\tcase DirectionDEBIT, DirectionCREDIT:\n\t\treturn nil\n\tdefault:\n\t\treturn fmt.Errorf(\"transaction: invalid enum value for direction field: %q\", d)\n\t}\n}", "func GetIterDirection(iterDirection ...IterDirection) IterDirection {\n\tdirection := IterDirectionForward\n\tif len(iterDirection) > 0 {\n\t\tswitch iterDirection[0] {\n\t\tcase IterDirectionForward:\n\t\t\tbreak\n\t\tcase IterDirectionBackward:\n\t\t\tdirection = iterDirection[0]\n\t\tdefault:\n\t\t\tpanic(fmt.Sprintf(\"unknown iteration direction: %d\", iterDirection[0]))\n\t\t}\n\t}\n\n\treturn direction\n}", "func (c *Cursor) GetByte() byte {\n\treturn (*c).bytes[(*c).Index]\n}", "func (o FirewallPolicyRuleResponseOutput) Direction() pulumi.StringOutput {\n\treturn o.ApplyT(func(v FirewallPolicyRuleResponse) string { return v.Direction }).(pulumi.StringOutput)\n}", "func lc(b byte) byte {\n\treturn b | 0x20\n}", "func (c *Contract) GetByte(n uint64) byte {\n\tif n < uint64(len(c.Code)) {\n\t\treturn c.Code[n]\n\t}\n\treturn 0\n}", "func (n *Node) dir(key []byte) byte {\n\tbyteoff := n.bitoff >> 3\n\tbitmask := byte(1) << (n.bitoff & 7) // 5 -> 0010 0000\n\tif byteoff < uint(len(key)) && key[byteoff] & bitmask != 0 {\n\t\treturn 1\n\t}\n\treturn 0\n}", "func (s *swimmer) direction() int {\n\treturn s.moveDirection\n}", "func (o FirewallPolicyRuleOutput) Direction() FirewallPolicyRuleDirectionPtrOutput {\n\treturn o.ApplyT(func(v FirewallPolicyRule) *FirewallPolicyRuleDirection { return v.Direction }).(FirewallPolicyRuleDirectionPtrOutput)\n}", "func shift_direction(b Bitboard, direction int) Bitboard {\n\tif direction == NORTH_EAST || direction == EAST || direction == SOUTH_EAST {\n\t\treturn signed_shift(b&^FILE_HBB, direction)\n\t}\n\tif direction == NORTH_WEST || direction == WEST || direction == SOUTH_WEST {\n\t\treturn signed_shift(b&^FILE_ABB, direction)\n\t}\n\treturn signed_shift(b, direction)\n}", "func ReadDirectionNorthSouth(data []byte) DirectionNorthSouth {\n\tbits := (data[3] & 0x80) >> 7\n\treturn DirectionNorthSouth(bits)\n}", "func (o GoogleDatastoreAdminV1IndexedPropertyResponseOutput) Direction() pulumi.StringOutput {\n\treturn o.ApplyT(func(v GoogleDatastoreAdminV1IndexedPropertyResponse) string { return v.Direction }).(pulumi.StringOutput)\n}", "func (o GoogleDatastoreAdminV1IndexedPropertyOutput) Direction() GoogleDatastoreAdminV1IndexedPropertyDirectionOutput {\n\treturn o.ApplyT(func(v GoogleDatastoreAdminV1IndexedProperty) GoogleDatastoreAdminV1IndexedPropertyDirection {\n\t\treturn v.Direction\n\t}).(GoogleDatastoreAdminV1IndexedPropertyDirectionOutput)\n}", "func (f MsgDirectionField) Tag() quickfix.Tag { return tag.MsgDirection }", "func direction(k1 collection.Comparer, k2 interface{}) int {\n\treturn math.Signum(math.Signum(k1.Compare(k2) + 1))\n}", "func (m NoSides) GetRoundingDirection() (v enum.RoundingDirection, err quickfix.MessageRejectError) {\n\tvar f field.RoundingDirectionField\n\tif err = m.Get(&f); err == nil {\n\t\tv = f.Value()\n\t}\n\treturn\n}", "func (z *Tokenizer) nextByte() byte {\n\tif z.err == io.EOF {\n\t\treturn 0\n\t}\n\tby, err := z.r.ReadByte()\n\tif err == io.EOF {\n\t\tz.err = io.EOF\n\t\treturn 0\n\t} else if err != nil {\n\t\tpanic(err)\n\t}\n\treturn by\n}", "func (MoveRequest_Direction) EnumDescriptor() ([]byte, []int) {\n\treturn file_game_game_proto_rawDescGZIP(), []int{2, 0}\n}", "func ReadByte(r io.Reader) (byte, error) {\n\td := make([]byte, 1, 1)\n\t_, err := io.ReadFull(r, d)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn d[0], nil\n}", "func ord(c byte) int {\n\treturn int(c - '0')\n}", "func ord(c byte) int {\n\treturn int(c - '0')\n}", "func Direction(lon0, lat0, lon1, lat1 float64) float64 {\n\tc_dir := C.fap_direction(\n\t\tC.double(lon0), C.double(lat0),\n\t\tC.double(lon1), C.double(lat1),\n\t)\n\n\treturn float64(c_dir)\n}", "func readByte(r io.Reader) (uint8, error) {\n\ttmp := []uint8{0}\n\t_, e := r.Read(tmp)\n\treturn tmp[0], e\n}", "func chooseDirection(elevState cost.AssignedOrderInformation, ID string, floor int) elevio.MotorDirection {\n\tswitch elevState.States[ID].Direction {\n\tcase \"stop\":\n\t\tfallthrough\n\tcase \"down\":\n\t\tif ordersBelow(elevState, ID, floor) {\n\t\t\treturn elevio.MD_Down\n\t\t} else if ordersAbove(elevState, ID, floor) {\n\t\t\treturn elevio.MD_Up\n\t\t} else {\n\t\t\treturn elevio.MD_Stop\n\t\t}\n\tcase \"up\":\n\t\tif ordersAbove(elevState, ID, floor) {\n\t\t\treturn elevio.MD_Up\n\t\t} else if ordersBelow(elevState, ID, floor) {\n\t\t\treturn elevio.MD_Down\n\t\t} else {\n\t\t\treturn elevio.MD_Stop\n\t\t}\n\tdefault:\n\t\treturn elevio.MD_Stop\n\t}\n\treturn elevio.MD_Stop\n}", "func (d *Decoder) Byte() byte {\n\tb, err := d.buf.ReadByte()\n\tif err != nil {\n\t\tpanic(\"unmarshalByte\")\n\t}\n\treturn b\n}", "func (o SecurityPolicyRuleOutput) Direction() SecurityPolicyRuleDirectionPtrOutput {\n\treturn o.ApplyT(func(v SecurityPolicyRule) *SecurityPolicyRuleDirection { return v.Direction }).(SecurityPolicyRuleDirectionPtrOutput)\n}", "func (r *DecReader) ReadByte() (byte, error) {\n\tif r.err != nil {\n\t\treturn 0, r.err\n\t}\n\tif r.firstRead {\n\t\tr.firstRead = false\n\t\tif _, err := r.readFragment(nil, 0); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t\tb := r.plaintextBuffer[0]\n\t\tr.offset = 1\n\t\treturn b, nil\n\t}\n\tif r.offset > 0 && r.offset < len(r.plaintextBuffer) {\n\t\tb := r.plaintextBuffer[r.offset]\n\t\tr.offset++\n\t\treturn b, nil\n\t}\n\tif r.closed {\n\t\treturn 0, io.EOF\n\t}\n\n\tr.offset = 0\n\tif _, err := r.readFragment(nil, 1); err != nil {\n\t\treturn 0, err\n\t}\n\tb := r.plaintextBuffer[0]\n\tr.offset = 1\n\treturn b, nil\n}", "func (Direction) EnumDescriptor() ([]byte, []int) {\n\treturn file_mud_proto_rawDescGZIP(), []int{0}\n}", "func (l *LinksToIterator) Direction() string { return l.direction }", "func (d Direction) Flip() Direction {\n\treturn (d + 2) % 4\n}", "func Direction(op int32) int32 {\n\tif op == et.OpBuy {\n\t\treturn et.ListDESC\n\t}\n\treturn et.ListASC\n}", "func (o SecurityPolicyRuleResponseOutput) Direction() pulumi.StringOutput {\n\treturn o.ApplyT(func(v SecurityPolicyRuleResponse) string { return v.Direction }).(pulumi.StringOutput)\n}", "func CalculateDirection(from, to Dot) Direction {\n\tif !from.Equals(to) {\n\t\tvar diffX, diffY uint8\n\n\t\tif from.X > to.X {\n\t\t\tdiffX = from.X - to.X\n\t\t} else {\n\t\t\tdiffX = to.X - from.X\n\t\t}\n\t\tif from.Y > to.Y {\n\t\t\tdiffY = from.Y - to.Y\n\t\t} else {\n\t\t\tdiffY = to.Y - from.Y\n\t\t}\n\n\t\tif diffX > diffY {\n\t\t\tif to.X > from.X {\n\t\t\t\treturn DirectionEast\n\t\t\t}\n\t\t\treturn DirectionWest\n\t\t}\n\n\t\tif diffY > diffX {\n\t\t\tif to.Y > from.Y {\n\t\t\t\treturn DirectionSouth\n\t\t\t}\n\t\t\treturn DirectionNorth\n\t\t}\n\t}\n\n\treturn RandomDirection()\n}", "func (ch *Chunk) ReadByte() (byte, error) {\n\tif ch.IsFullyRead() {\n\t\treturn 0, io.EOF\n\t}\n\tvar r byte\n\terr := ch.ReadLE(&r)\n\treturn r, err\n}", "func ctlz8(uint8) uint8", "func ReadByte(r io.Reader) (byte, error) {\n\tb := make([]byte, 1)\n\tn, err := r.Read(b)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tif n != 1 {\n\t\treturn 0, fmt.Errorf(\"read invalid amount, exp: 1, n: %d\", n)\n\t}\n\treturn b[0], nil\n}", "func (c *digisparkI2cConnection) ReadByte() (val byte, err error) {\n\tc.mtx.Lock()\n\tdefer c.mtx.Unlock()\n\n\tbuf := []byte{0}\n\tif err = c.readAndCheckCount(buf); err != nil {\n\t\treturn\n\t}\n\tval = buf[0]\n\treturn\n}", "func (p *Lexer) PeekByte() byte {\n\tc, err := p.Byte()\n\n\tif err != nil {\n\t\treturn 0\n\t}\n\n\tp.UnreadByte()\n\treturn c\n}", "func travelDirection(from int, to int) (rowDir int, colDir int) {\n fromRow := rowFromPosition(from)\n fromCol := colFromPosition(from)\n toRow := rowFromPosition(to)\n toCol := colFromPosition(to)\n\n if fromRow < toRow {\n rowDir = 1\n } else if fromRow > toRow {\n rowDir = -1\n } else {\n rowDir = 0\n }\n\n if fromCol < toCol {\n colDir = 1\n } else if fromCol > toCol {\n colDir = -1\n } else {\n colDir = 0\n }\n\n return\n}", "func dirNameToInt(direction string) int {\n dirHash := map[string]int{\"north\": 0, \"east\": 1, \"south\": 2, \"west\": 3}\n\n return dirHash[direction]\n}", "func (o Opcode) LL() uint8 {\n\treturn uint8(o & 0xFF00 >> 8)\n}", "func (Direction) EnumDescriptor() ([]byte, []int) {\n\treturn file_common_proto_rawDescGZIP(), []int{2}\n}", "func readByte(r io.Reader) (byte, error) {\n\tif r, ok := r.(io.ByteReader); ok {\n\t\treturn r.ReadByte()\n\t}\n\tvar v [1]byte\n\t_, err := io.ReadFull(r, v[:])\n\treturn v[0], err\n}", "func (l *StringLexer) PrevByte() byte {\n\treturn l.input[l.pos-1]\n}", "func (p *atomReader) ReadSignedByte() int8 {\n\tc, _ := p.r.ReadByte()\n\treturn int8(c)\n}", "func strToDirection(s string) uint32 {\n\tswitch strings.ToLower(s) {\n\tcase \"top\":\n\t\treturn ewmh.SizeTop\n\tcase \"bottom\":\n\t\treturn ewmh.SizeBottom\n\tcase \"left\":\n\t\treturn ewmh.SizeLeft\n\tcase \"right\":\n\t\treturn ewmh.SizeRight\n\tcase \"topleft\":\n\t\treturn ewmh.SizeTopLeft\n\tcase \"topright\":\n\t\treturn ewmh.SizeTopRight\n\tcase \"bottomleft\":\n\t\treturn ewmh.SizeBottomLeft\n\tcase \"bottomright\":\n\t\treturn ewmh.SizeBottomRight\n\t}\n\treturn ewmh.Infer\n}", "func (r *EncReader) ReadByte() (byte, error) {\n\tif r.err != nil {\n\t\treturn 0, r.err\n\t}\n\tif r.firstRead {\n\t\tr.firstRead = false\n\t\tif _, err := r.readFragment(nil, 0); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t\tb := r.ciphertextBuffer[0]\n\t\tr.offset = 1\n\t\treturn b, nil\n\t}\n\n\tif r.offset > 0 && r.offset < len(r.ciphertextBuffer) {\n\t\tb := r.ciphertextBuffer[r.offset]\n\t\tr.offset++\n\t\treturn b, nil\n\t}\n\tif r.closed {\n\t\treturn 0, io.EOF\n\t}\n\n\tr.offset = 0\n\tif _, err := r.readFragment(nil, 1); err != nil {\n\t\treturn 0, err\n\t}\n\tb := r.ciphertextBuffer[0]\n\tr.offset = 1\n\treturn b, nil\n}", "func (td TupleDesc) GetEnum(i int, tup Tuple) (v uint16, ok bool) {\n\ttd.expectEncoding(i, EnumEnc)\n\tb := td.GetField(i, tup)\n\tif b != nil {\n\t\tv, ok = readEnum(b), true\n\t}\n\treturn\n}", "func RtpDirectionStrGet(direction StreamDirection) string {\n\treturn \"\"\n}", "func readByte(r io.Reader) (ret byte, err error) {\n\tvar be [1]byte\n\tvalBytes := be[0:1]\n\n\tif _, err = io.ReadFull(r, valBytes); err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn valBytes[0], nil\n}", "func ReadByte() {\n\tfmt.Println(\"----------------> ReadByte\")\n\tbuf := bytes.NewBufferString(\"hello\")\n\tfmt.Println(buf.String())\n\n\t// read one byte assign to b\n\tb, _ := buf.ReadByte()\n\n\t// buf=ello\n\tfmt.Println(buf.String())\n\n\t// b=h\n\tfmt.Println(string(b))\n}", "func DenormalizeByte(v int8) float32 {\n\treturn float32(math.Max(float64(v)/127, -1))\n}", "func (node *Configuration) GetByte(parameter uint8) (uint8, error) {\n\tvar value []uint8\n\tvar err error\n\n\tif value, err = node.getValue(parameter, 1); err != nil {\n\t\treturn 0, err\n\t}\n\treturn value[0], nil\n\n}", "func (s *Stream) readByte() (byte, error) {\n\t// since this is readByte functions, therefore, only willRead a byte each time\n\tif err := s.willRead(1); err != nil {\n\t\treturn 0, err\n\t}\n\n\t// pops out a byte from r and return it\n\tb, err := s.r.ReadByte()\n\tif err == io.EOF {\n\t\terr = io.ErrUnexpectedEOF\n\t}\n\treturn b, err\n}", "func (e DirectionMode) C() C.cudnnDirectionMode_t { return C.cudnnDirectionMode_t(e) }", "func (f *Font) getGlyphOriginForDirection(glyph fonts.GID, direction Direction) (x, y Position) {\n\tif direction.isHorizontal() {\n\t\treturn f.getGlyphHOriginWithFallback(glyph)\n\t}\n\treturn f.getGlyphVOriginWithFallback(glyph)\n}", "func (o *FabricFlowControlPolicyAllOf) GetReceiveDirection() string {\n\tif o == nil || o.ReceiveDirection == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.ReceiveDirection\n}", "func (r *VarintReader) ReadByte() (c byte, err error) {\n\tn, err := r.Read(r.buf[:])\n\tif n > 0 {\n\t\tc = r.buf[0]\n\t\tr.bytesRead++\n\t}\n\treturn\n}", "func (p *atomReader) ReadUnsignedByte() uint8 {\n\tc, _ := p.r.ReadByte()\n\treturn c\n}", "func (m *MyMigrate) MigrationDirection(desiredVersion semver.Version, desiredStep int) (Direction, error) {\n\tif desiredStep != 0 {\n\t\tcurrentStep, dirty, err := m.Version()\n\t\tif err != migrate.ErrNilVersion && err != nil {\n\t\t\treturn 0, fmt.Errorf(\"Failed to get Migration Step to determine migration direction: %v\", err)\n\t\t}\n\t\tif dirty {\n\t\t\tlog.Fatal(\"DB in Dirty state, Please fix before migrating\")\n\t\t}\n\t\treturn Direction(desiredStep - int(currentStep)), nil\n\t}\n\tcurrentVersion, err := getCurrentDBVersion(m.postgresURI, m.database)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"Failed to get current DB version to determine migration direction: %v\", err)\n\t}\n\n\treturn Direction(desiredVersion.Compare(*currentVersion)), nil\n}", "func (b *Buffer) ReadByte() (byte, error) {\n\tif b.count == 0 { // no elements exist.\n\t\treturn ' ', errors.New(\"Buffer is empty\")\n\t}\n\tval := b.buf[b.head]\n\tb.count--\n\tb.head++\n\tb.head = b.head % b.size\n\treturn val, nil\n}", "func (r *flagsRegister) convFlagToUInt8() uint8 {\n\tvar f uint8\n\tif r.zero {\n\t\tf = f | 1<<ZERO_FLAG_BYTE_POSITION\n\t}\n\n\tif r.subtract {\n\t\tf = f | 1<<SUBTRACT_FLAG_BYTE_POSITION\n\t}\n\n\tif r.halfCarry {\n\t\tf = f | 1<<HALF_CARRY_BYTE_POSITION\n\t}\n\n\tif r.carry {\n\t\tf = f | 1<<CARRY_FLAG_BYTE_POSITION\n\t}\n\treturn f\n\n}", "func (p *Lexer) UnreadByte() {\n\tif p.r <= 0 {\n\t\treturn\n\t}\n\n\tp.r--\n}", "func (r *Reader) ReadByte() byte {\n\tif len(r.buffer) <= r.index {\n\t\tlog.Panic(\"Error reading byte: buffer is too small!\")\n\t}\n\n\tvar data = r.buffer[r.index]\n\tr.index++\n\n\treturn data\n}", "func (s Stream) ReadByte() (byte, error) {\n\tdata := make([]byte, 1)\n\terr := s.ReadFull(data)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn data[0], nil\n}", "func (t *hashReader) ReadByte() (byte, error) {\n\tb := make([]byte, 1)\n\t_, err := t.Read(b)\n\treturn b[0], err\n}", "func GetLocaleDirection() TextDirection {\n\tc := C.gtk_get_locale_direction()\n\treturn TextDirection(c)\n}" ]
[ "0.61097026", "0.6080042", "0.5982142", "0.5905596", "0.5870495", "0.58615726", "0.5804797", "0.5659074", "0.56584334", "0.5656549", "0.5628965", "0.5622957", "0.5585129", "0.54656595", "0.5448121", "0.54473263", "0.5446236", "0.5445572", "0.5425969", "0.539548", "0.5385884", "0.53042036", "0.52968204", "0.5262794", "0.524183", "0.5229698", "0.5211185", "0.52094555", "0.5202126", "0.51890683", "0.51886654", "0.5187027", "0.51853764", "0.5153367", "0.5141897", "0.51139957", "0.51073986", "0.5098951", "0.50906783", "0.5082817", "0.5067517", "0.5064777", "0.50575644", "0.50560784", "0.5037426", "0.50259614", "0.502424", "0.50213325", "0.5016572", "0.50036144", "0.4975384", "0.49678075", "0.49598098", "0.49598098", "0.49498346", "0.49462005", "0.4939709", "0.49352255", "0.4925703", "0.49218515", "0.49218437", "0.4920542", "0.49111745", "0.49057925", "0.49050325", "0.4880187", "0.48775637", "0.4869183", "0.48516607", "0.48499435", "0.4841185", "0.48373556", "0.48313895", "0.4809457", "0.4806266", "0.47939494", "0.47932568", "0.47920167", "0.47867766", "0.47833064", "0.47827357", "0.4780062", "0.47740573", "0.4763846", "0.47603214", "0.47566196", "0.4755875", "0.47468293", "0.4743493", "0.47420415", "0.47398457", "0.47276333", "0.47213352", "0.47207105", "0.46994203", "0.46992862", "0.46936697", "0.46934435", "0.4686186", "0.46855316" ]
0.79665715
0
addSegment to the wire.
func (w *wire) addSegment(dir direction, dist int) { var lastPoint point if len(w.points) != 0 { lastPoint = w.points[len(w.points)-1] } w.points = append(w.points, lastPoint.move(dir, dist)) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (a *AST) AddSegment(seg *Segment) {\n\t_ = seg.SetParent(a)\n\ta.segs = append(a.segs, seg)\n}", "func (wire *Wire) AddWireSegment(dir byte, magnitude int) error {\n\tvar newSegment segment\n\tif wire.wireSegments == nil {\n\t\tnewSegment.start = Point{0, 0}\n\t\tnewSegment.end = Point{0, 0}\n\t} else {\n\t\tnewSegment = segment{wire.wireSegments[len(wire.wireSegments)-1].end, wire.wireSegments[len(wire.wireSegments)-1].end}\n\t}\n\n\tswitch dir {\n\tcase 'U':\n\t\tnewSegment.end.Y += magnitude\n\tcase 'D':\n\t\tnewSegment.end.Y -= magnitude\n\tcase 'R':\n\t\tnewSegment.end.X += magnitude\n\tcase 'L':\n\t\tnewSegment.end.X -= magnitude\n\tdefault:\n\t\treturn fmt.Errorf(\"Invalid direction given: %v\", dir)\n\t}\n\n\twire.wireSegments = append(wire.wireSegments, newSegment)\n\treturn nil\n}", "func (m *Manifest) AddSegment(s *Segment) {\n\tm.addSegment(s, true)\n\tm.addedSegments[s.ID] = struct{}{}\n}", "func (s *SegmentService) Add(memberID int, item *Segment) (*Response, error) {\n\n\tdata := struct {\n\t\tSegment `json:\"segment\"`\n\t}{*item}\n\n\treq, err := s.client.newRequest(\"POST\", fmt.Sprintf(\"segment/%d\", memberID), data)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresult := &Response{}\n\tresp, err := s.client.do(req, result)\n\tif err != nil {\n\t\treturn resp, err\n\t}\n\n\titem.ID, _ = result.Obj.ID.Int64()\n\treturn result, nil\n}", "func (s *SegmentChangesWrapper) AddToSegment(segmentName string, keys []string) error {\n\treturn errSegmentStorageNotImplementedMethod\n}", "func (cfw *CoverageDataWriter) AppendSegment(args map[string]string, visitor CounterVisitor) error {\n\tcfw.stab = &stringtab.Writer{}\n\tcfw.stab.InitWriter()\n\tcfw.stab.Lookup(\"\")\n\n\tvar err error\n\tfor k, v := range args {\n\t\tcfw.stab.Lookup(k)\n\t\tcfw.stab.Lookup(v)\n\t}\n\n\tws := &slicewriter.WriteSeeker{}\n\tif err = cfw.writeSegmentPreamble(args, ws); err != nil {\n\t\treturn err\n\t}\n\tif err = cfw.writeCounters(visitor, ws); err != nil {\n\t\treturn err\n\t}\n\tif err = cfw.patchSegmentHeader(ws); err != nil {\n\t\treturn err\n\t}\n\tif err := cfw.writeBytes(ws.BytesWritten()); err != nil {\n\t\treturn err\n\t}\n\tif err = cfw.writeFooter(); err != nil {\n\t\treturn err\n\t}\n\tif err := cfw.w.Flush(); err != nil {\n\t\treturn fmt.Errorf(\"write error: %v\", err)\n\t}\n\tcfw.stab = nil\n\treturn nil\n}", "func (s *segment) Append(b []byte) error {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\n\t// TODO: write in encoded form\n\t// https://github.com/komuw/shifta/issues/1\n\tn, err := s.f.Write(b)\n\tif err != nil {\n\t\treturn errSegmentWrite(err)\n\t}\n\n\tif n != len(b) {\n\t\t// partial write.\n\t\terrA := s.f.Truncate(int64(s.currentSegBytes))\n\t\tif errA != nil {\n\t\t\treturn errPartialWriteTruncate(errA)\n\t\t}\n\t} else {\n\t\ts.currentSegBytes = s.currentSegBytes + uint64(n)\n\t\ts.age = tNow() - s.baseOffset\n\t}\n\n\terrB := s.f.Sync()\n\tif errB != nil {\n\t\treturn errSegmentSync(errB)\n\t}\n\n\treturn nil\n}", "func (p *MediaPlaylist) AppendSegment(uri string, duration float64, title string, isVod bool) error {\n\tseg := new(MediaSegment)\n\tseg.URI = uri\n\tseg.Duration = duration\n\tseg.Title = title\n\n\teles := strings.Split(uri, \"/\")\n\toffset := 3\n\tif isVod != true {\n\t\toffset = 5\n\t}\n\tstartTime, err := strconv.ParseInt(eles[offset], 10, 64)\n\tif err != nil {\n\t\treturn err\n\t}\n\tendTime, err := strconv.ParseInt(eles[offset+1], 10, 64)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif p.lastEndTime != -1 {\n\t\tif startTime-p.lastEndTime > 500 {\n\t\t\tseg.IsDiscontinuity = true\n\t\t}\n\t}\n\tp.lastEndTime = endTime\n\n\treturn p.Append(seg)\n}", "func WithSegment(name string, c echo.Context, f func() error) error {\n\ttx := GetTX(c)\n\tif tx == nil {\n\t\treturn f()\n\t}\n\tsegment := newrelic.StartSegment(tx, name)\n\tdefer segment.End()\n\treturn f()\n}", "func WithSegment(name string, c echo.Context, f func() error) error {\n\ttx := GetTX(c)\n\tif tx == nil {\n\t\treturn f()\n\t}\n\tsegment := newrelic.StartSegment(tx, name)\n\tdefer segment.End()\n\treturn f()\n}", "func WithSegment(name string, c echo.Context, f func() error) error {\n\ttx := GetTX(c)\n\tif tx == nil {\n\t\treturn f()\n\t}\n\tsegment := newrelic.StartSegment(tx, name)\n\tdefer segment.End()\n\treturn f()\n}", "func (s *segment) Append(b []byte) error {\n\tif s.gReader != nil {\n\t\treturn ErrImmutableSegment\n\t}\n\t_, err := s.appender.Write(b)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn s.appender.Sync()\n}", "func AddSegmentHook(hookPoint boil.HookPoint, segmentHook SegmentHook) {\n\tswitch hookPoint {\n\tcase boil.BeforeInsertHook:\n\t\tsegmentBeforeInsertHooks = append(segmentBeforeInsertHooks, segmentHook)\n\tcase boil.BeforeUpdateHook:\n\t\tsegmentBeforeUpdateHooks = append(segmentBeforeUpdateHooks, segmentHook)\n\tcase boil.BeforeDeleteHook:\n\t\tsegmentBeforeDeleteHooks = append(segmentBeforeDeleteHooks, segmentHook)\n\tcase boil.BeforeUpsertHook:\n\t\tsegmentBeforeUpsertHooks = append(segmentBeforeUpsertHooks, segmentHook)\n\tcase boil.AfterInsertHook:\n\t\tsegmentAfterInsertHooks = append(segmentAfterInsertHooks, segmentHook)\n\tcase boil.AfterSelectHook:\n\t\tsegmentAfterSelectHooks = append(segmentAfterSelectHooks, segmentHook)\n\tcase boil.AfterUpdateHook:\n\t\tsegmentAfterUpdateHooks = append(segmentAfterUpdateHooks, segmentHook)\n\tcase boil.AfterDeleteHook:\n\t\tsegmentAfterDeleteHooks = append(segmentAfterDeleteHooks, segmentHook)\n\tcase boil.AfterUpsertHook:\n\t\tsegmentAfterUpsertHooks = append(segmentAfterUpsertHooks, segmentHook)\n\t}\n}", "func (l *Log) newSegment(off uint64) error {\n\ts, err := newSegment(l.Dir, off, l.Config)\n\tif err != nil {\n\t\treturn err\n\t}\n\tl.segments = append(l.segments, s)\n\tl.activeSegment = s\n\treturn nil\n}", "func EncodeSegment(w io.Writer, seg Segment) error {\n\t// Everything else needs the 0xff, marker and potential payload\n\t_, err := w.Write([]byte{0xff, byte(seg.Marker)})\n\tif err != nil || seg.Data == nil {\n\t\treturn err\n\t}\n\t// Payload size includes it's own 2-bytes\n\terr = binary.Write(w, binary.BigEndian, uint16(len(seg.Data))+2)\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = w.Write(seg.Data)\n\treturn err\n}", "func (t *Type1) writeSegment(w io.Writer, segment int) error {\n\tl := len(t.Segments[segment])\n\tvar asciiBinary byte\n\tif segment == 1 {\n\t\tasciiBinary = 2\n\t} else {\n\t\tasciiBinary = 1\n\t}\n\tprefix := []byte{128, asciiBinary, byte(l & 0xFF), byte(l >> 8 & 0xFF), byte(l >> 16 & 0xFF), byte(l >> 24 & 0xFF)}\n\t_, err := w.Write(prefix)\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = w.Write(t.Segments[segment])\n\treturn err\n}", "func addSegment(curr *segment, key string) (seg *segment) {\n\tif curr.parameter.segment != nil {\n\t\tseg = curr.parameter.segment\n\n\t} else if child, ok := curr.children[key]; !ok { // child does not match...\n\t\tvar isParam bool\n\n\t\tseg, isParam = newSegment(key)\n\n\t\tif isParam {\n\t\t\tcurr.parameter.segment = seg\n\t\t\tcurr.parameter.name = key[2:]\n\n\t\t} else {\n\t\t\tcurr.children[key] = seg\n\t\t}\n\n\t\treturn\n\n\t} else { // child matches...\n\t\tseg = child\n\t}\n\n\treturn\n}", "func (p *SeriesPartition) createSegment() (*SeriesSegment, error) {\n\t// Close writer for active segment, if one exists.\n\tif segment := p.activeSegment(); segment != nil {\n\t\tif err := segment.CloseForWrite(); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// Generate a new sequential segment identifier.\n\tvar id uint16\n\tif len(p.segments) > 0 {\n\t\tid = p.segments[len(p.segments)-1].ID() + 1\n\t}\n\tfilename := fmt.Sprintf(\"%04x\", id)\n\n\t// Generate new empty segment.\n\tsegment, err := CreateSeriesSegment(id, filepath.Join(p.path, filename))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tp.segments = append(p.segments, segment)\n\n\t// Allow segment to write.\n\tif err := segment.InitForWrite(); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn segment, nil\n}", "func (s *SmartContract) SaveSegment(stub shim.ChaincodeStubInterface, args []string) sc.Response {\n\t// Parse segment\n\tbyteArgs := stub.GetArgs()\n\tsegment := &cs.Segment{}\n\tif err := json.Unmarshal(byteArgs[1], segment); err != nil {\n\t\treturn shim.Error(\"Could not parse segment\")\n\t}\n\n\t// Validate segment\n\tif err := segment.Validate(); err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\t// Set pending evidence\n\tsegment.SetEvidence(\n\t\tmap[string]interface{}{\n\t\t\t\"state\": cs.PendingEvidence,\n\t\t\t\"transactions\": map[string]string{\"transactionID\": stub.GetTxID()},\n\t\t})\n\n\t// Check has prevLinkHash if not create map else check prevLinkHash exists\n\tprevLinkHash := segment.Link.GetPrevLinkHashString()\n\tif prevLinkHash == \"\" {\n\t\t// Create map\n\t\tif err := s.SaveMap(stub, segment); err != nil {\n\t\t\treturn shim.Error(err.Error())\n\t\t}\n\t} else {\n\t\t// Check previous segment exists\n\t\tresponse := s.GetSegment(stub, []string{prevLinkHash})\n\t\tif response.Status == shim.ERROR {\n\t\t\treturn shim.Error(\"Parent segment doesn't exist\")\n\t\t}\n\t}\n\n\t// Save segment\n\tsegmentDoc := SegmentDoc{\n\t\tObjectTypeSegment,\n\t\tsegment.GetLinkHashString(),\n\t\t*segment,\n\t}\n\tsegmentDocBytes, err := json.Marshal(segmentDoc)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\tif err := stub.PutState(segment.GetLinkHashString(), segmentDocBytes); err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\treturn shim.Success(nil)\n}", "func newSegment(segType uint8, flags uint16, streamID uint32, transID uint16, orderID uint16, message []byte) (*segment, error) {\n\tlength := len(message)\n\tif length > segmentBodyMaxSize {\n\t\treturn nil, errSegmentBodyTooLarge\n\t}\n\thdr := header(make([]byte, headerSize))\n\tif message == nil {\n\t\tmessage = []byte{} // FIXME!\n\t}\n\thdr.encode(segType, flags, streamID, transID, orderID, uint16(length))\n\treturn &segment{h: hdr, b: message}, nil\n}", "func (c ClientFake) CreateSegment(name, campaignID string) (Segment, error) {\n\treturn Segment{}, nil\n}", "func (h *HLSHandler) SegmentWritten(localFilePath string) {\n\th.Storage.SegmentWritten(localFilePath)\n}", "func (c *Computer) WriteSegment() *mach.Segment {\n\tif c.MemMode&MemWriteAux > 0 {\n\t\treturn c.Aux\n\t}\n\n\treturn c.Main\n}", "func (p *Projection) drawSegment(s *ik.Segment, col color.RGBA) {\n a := s.Start()\n b := s.End()\n p.line(a.X, a.Y, b.X, b.Y, col)\n\n if s.Child != nil {\n p.drawSegment(s.Child, col)\n }\n}", "func (h *InterfaceVppHandler) AddSpan(ifIdxFrom, ifIdxTo uint32, direction uint8, isL2 bool) error {\n\treturn h.setSpan(ifIdxFrom, ifIdxTo, direction, isL2)\n}", "func WriteSegment(stm *data.StateMap) *data.Segment {\n\treturn stm.Segment(memWriteSegment)\n}", "func (c ClientFake) UpdateSegment(name, campaignID, segmentID string) (Segment, error) {\n\treturn Segment{}, nil\n}", "func (b *vppbridge) AddInterface(vppinterface *vppinterface, portType l2.L2PortType) (err error) {\n\trequest := &l2.SwInterfaceSetL2Bridge{\n\t\tRxSwIfIndex: uint32(vppinterface.swifidx),\n\t\tBdID: b.ID,\n\t\tPortType: portType,\n\t\tEnable: 1,\n\t\tShg: 0,\n\t}\n\n\t// Dispatch request\n\tctx := b.Channel.SendRequest(request)\n\tresponse := &l2.SwInterfaceSetL2BridgeReply{}\n\tif err = ctx.ReceiveReply(response); err != nil {\n\t\terr = errors.Wrap(err, \"ctx.ReceiveReply()\")\n\t\treturn\n\t}\n\tif response.Retval != 0 {\n\t\terr = errors.Errorf(\"AddLoopBackReply: %d error\", response.Retval)\n\t\treturn\n\t}\n\n\t// Cache vppbridge segment\n\tb.segments = append(b.segments, vppinterface)\n\treturn\n}", "func CreateSegment(c *gin.Context) {\n\tsegmentValidator := SegmentValidator{}\n\tif err := segmentValidator.Bind(c); err != nil {\n\t\tc.JSON(http.StatusUnprocessableEntity, common.NewValidatorError(err))\n\t\treturn\n\t}\n\n\tsegment := segmentValidator.Segment\n\n\tif err := SaveOne(&segment); err != nil {\n\t\tc.JSON(http.StatusUnprocessableEntity, common.NewError(\"database\", err))\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusCreated, segment)\n\tgenerateThumbnailForSegment(segment)\n}", "func (*Token) EncodeSegment(seg []byte) string {\n\treturn base64.RawURLEncoding.EncodeToString(seg)\n}", "func EncodeSegment(seg []byte) string {\n\treturn strings.TrimRight(base64.URLEncoding.EncodeToString(seg), \"=\")\n}", "func (c *Client) Add(ctx context.Context, p *AddPayload) (res *StationFull, err error) {\n\tvar ires interface{}\n\tires, err = c.AddEndpoint(ctx, p)\n\tif err != nil {\n\t\treturn\n\t}\n\treturn ires.(*StationFull), nil\n}", "func SegmentSum(scope *Scope, data tf.Output, segment_ids tf.Output) (output tf.Output) {\n\tif scope.Err() != nil {\n\t\treturn\n\t}\n\topspec := tf.OpSpec{\n\t\tType: \"SegmentSum\",\n\t\tInput: []tf.Input{\n\t\t\tdata, segment_ids,\n\t\t},\n\t}\n\top := scope.AddOperation(opspec)\n\treturn op.Output(0)\n}", "func (c *ReplicaClient) WriteWALSegment(ctx context.Context, pos litestream.Pos, rd io.Reader) (info litestream.WALSegmentInfo, err error) {\n\tdefer func() { c.resetOnConnError(err) }()\n\n\tsftpClient, err := c.Init(ctx)\n\tif err != nil {\n\t\treturn info, err\n\t}\n\n\tfilename, err := litestream.WALSegmentPath(c.Path, pos.Generation, pos.Index, pos.Offset)\n\tif err != nil {\n\t\treturn info, fmt.Errorf(\"cannot determine wal segment path: %w\", err)\n\t}\n\tstartTime := time.Now()\n\n\tif err := sftpClient.MkdirAll(path.Dir(filename)); err != nil {\n\t\treturn info, fmt.Errorf(\"cannot make parent snapshot directory %q: %w\", path.Dir(filename), err)\n\t}\n\n\tf, err := sftpClient.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC)\n\tif err != nil {\n\t\treturn info, fmt.Errorf(\"cannot open snapshot file for writing: %w\", err)\n\t}\n\tdefer f.Close()\n\n\tn, err := io.Copy(f, rd)\n\tif err != nil {\n\t\treturn info, err\n\t} else if err := f.Close(); err != nil {\n\t\treturn info, err\n\t}\n\n\tinternal.OperationTotalCounterVec.WithLabelValues(ReplicaClientType, \"PUT\").Inc()\n\tinternal.OperationBytesCounterVec.WithLabelValues(ReplicaClientType, \"PUT\").Add(float64(n))\n\n\treturn litestream.WALSegmentInfo{\n\t\tGeneration: pos.Generation,\n\t\tIndex: pos.Index,\n\t\tOffset: pos.Offset,\n\t\tSize: n,\n\t\tCreatedAt: startTime.UTC(),\n\t}, nil\n}", "func (r *relation) Segment(si engine.SegmentInfo, proc *process.Process) engine.Segment {\n\tt0 := time.Now()\n\tdefer func() {\n\t\tlogutil.Debugf(\"time cost %d ms\", time.Since(t0))\n\t}()\n\treturn r.mp[si.TabletId].Segment(binary.BigEndian.Uint64([]byte(si.Id)), proc)\n}", "func (client WorkloadNetworksClient) CreateSegmentsResponder(resp *http.Response) (result WorkloadNetworkSegment, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func NewSegment(concurrency int, descriptors []Descriptor) *Segment {\n\treturn &Segment{\n\t\tconcurrency: concurrency,\n\t\tdescriptors: descriptors,\n\t\tdescriptorErrorBehavior: ErrorBehaviorTerminate,\n\t\tprocessErrorBehavior: ErrorBehaviorCollect,\n\t}\n}", "func newSegment(key string) (seg *segment, isParam bool) {\n\tseg = &segment{}\n\n\tseg.children = map[string]*segment{}\n\tseg.endpoints = map[string]*endpoint{}\n\n\tif isParameter(key) {\n\t\tisParam = true\n\t}\n\n\treturn\n}", "func (r *Router) AddRoute(method string, path string, callback http.HandlerFunc) (err error) {\n\tkeys := setupKeys(strings.Split(path, \"/\"))\n\tpathParams := []string{}\n\n\tif r.root == nil {\n\t\tr.root = &segment{}\n\t\tr.root.children = map[string]*segment{}\n\t\tr.root.endpoints = map[string]*endpoint{}\n\t}\n\n\tcurr := r.root\n\n\tfor i, key := range keys {\n\t\tif i == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\tif isParameter(key) {\n\t\t\tpathParams = append(pathParams, key[2:])\n\n\t\t}\n\n\t\tif child, _ := getChild(key, curr); child == nil {\n\t\t\tseg := addSegment(curr, key)\n\t\t\tcurr = seg\n\t\t} else {\n\t\t\tcurr = child\n\t\t}\n\t}\n\n\tif _, ok := curr.endpoints[method]; ok {\n\t\terr = errors.New(\"path already exists\")\n\n\t\treturn\n\t}\n\n\tcurr.endpoints[method] = &endpoint{callback, path, pathParams}\n\tr.routes = append(r.routes, route{callback, method, path})\n\n\treturn\n}", "func NewSegment(p1, p2 Vector) Segment {\n\treturn Segment{p1, p2}\n}", "func (vl *VlanBridge) AddSvcSpec(svcName string, spec *ServiceSpec) error {\n return nil\n}", "func segment(icon, fg, bg string) SegmentDefinition {\n\treturn SegmentDefinition{\n\t\ticon: icon,\n\t\tfg: colors.MakeColor(fg),\n\t\tbg: colors.MakeColor(bg),\n\t}\n}", "func (r *rdsRoute) Add(ns, topic, address string, version uint64) error {\n\tkey := r.getRoutePrefix(ns, topic)\n\trds := r.rdc.Get(util.W, key)\n\tdefer rds.Close()\n\t_, err := rds.Do(\"HSET\", key, address, version)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func newSegment(segmentName string, intervalType interval.Type, path string) (Segment, error) {\n\tkvStore, err := kv.NewStore(segmentName, kv.StoreOption{Path: path})\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"create kv store for segment error:%s\", err)\n\t}\n\t// parse base time from segment name\n\tbaseTime, err := interval.GetCalculator(intervalType).ParseSegmentTime(segmentName)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"parse segment[%s] base time error\", path)\n\t}\n\n\treturn &segment{\n\t\tbaseTime: baseTime,\n\t\tkvStore: kvStore,\n\t\tintervalType: intervalType,\n\t\tlogger: logger.GetLogger(),\n\t}, nil\n}", "func (s *Segment) Write(p []byte) (int, error) {\n\t// If p is nil, or has a length of zero, return early.\n\tif len(p) == 0 {\n\t\treturn 0, nil\n\t}\n\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\tif int64(len(p)) > s.remaining() {\n\t\treturn 0, ErrNotEnoughSpace\n\t}\n\treturn s.write(p)\n}", "func (r *Recovery) Add(loc Location) error {\n\tr.mtx.Lock()\n\tdefer r.mtx.Unlock()\n\n\tsh := r.shards[loc.Shard]\n\tl := len(sh.data)\n\tif diff := int(loc.Slot/8) - l; diff >= 0 {\n\t\tsh.extend(diff + 1)\n\t\tfor i := 0; i <= diff; i++ {\n\t\t\tsh.data[l+i] = 0x0\n\t\t}\n\t}\n\tsh.push(loc.Slot)\n\treturn nil\n}", "func AddServant(v dispatch, f interface{}, obj string) {\n\taddServantCommon(v, f, obj, false)\n}", "func SegmentSumV2(scope *Scope, data tf.Output, segment_ids tf.Output, num_segments tf.Output) (output tf.Output) {\n\tif scope.Err() != nil {\n\t\treturn\n\t}\n\topspec := tf.OpSpec{\n\t\tType: \"SegmentSumV2\",\n\t\tInput: []tf.Input{\n\t\t\tdata, segment_ids, num_segments,\n\t\t},\n\t}\n\top := scope.AddOperation(opspec)\n\treturn op.Output(0)\n}", "func (kcp *KCP) newSegment(size int) (seg segment) {\n\tseg.data = kcp.pool.Get()\n\treturn\n}", "func (o *WlRegion) Add(x wire.Int, y wire.Int, width wire.Int, height wire.Int) error {\n\tmsg, err := wire.NewMessage(o.ID(), 1)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err = msg.Write(x); err != nil {\n\t\treturn err\n\t}\n\n\tif err = msg.Write(y); err != nil {\n\t\treturn err\n\t}\n\n\tif err = msg.Write(width); err != nil {\n\t\treturn err\n\t}\n\n\tif err = msg.Write(height); err != nil {\n\t\treturn err\n\t}\n\n\tif err = o.Base.Conn.Write(msg); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (s *SharedMemorySegment) Write(data []byte) {\n\tsrcLen := len(data)\n\tdstLen := len(s.data)\n\n\tif srcLen > dstLen {\n\t\tpanic(\"can't write more than source len\")\n\t}\n\n\ts.writeBuffer(data, s.data)\n}", "func (tag *CustomSegmentTag) SegmentTag() bool {\n\treturn true\n}", "func CreateSegment(db *sql.DB, schema string, count, segmentID, percent int) {\n\tvar add func(string, []int)\n\n\tadd = func(schema string, people []int) {\n\t\tif len(people) == 0 {\n\t\t\treturn\n\t\t}\n\n\t\ttx, err := db.Begin()\n\t\texitIf(\"start transaction\", err)\n\n\t\tquery := `\n\t\t UPDATE ` + schema + `.people\n\t\t\tSET memberships = memberships || myvalues.hash::hstore\n\t\t\tFROM (\n\t\t\t\tVALUES `\n\n\t\tend := `) AS myvalues (id, hash)\n\t\t WHERE ` + schema + `.people.id = myvalues.id::integer`\n\n\t\targs := make([]interface{}, 0, len(people)*2)\n\n\t\tfor i, id := range people {\n\t\t\tquery += fmt.Sprint(\"($\", i*2+1, \", $\", i*2+2, \")\")\n\t\t\tif i != len(people)-1 {\n\t\t\t\tquery += \", \"\n\t\t\t}\n\n\t\t\tstatus := \"left|\"\n\n\t\t\tif rand.Intn(100) <= percent {\n\t\t\t\tstatus = \"entered|\"\n\t\t\t}\n\n\t\t\tkey := status + strconv.Itoa(segmentID)\n\t\t\tvalue := sql.NullString{strconv.Itoa(int(time.Now().Unix())), true}\n\n\t\t\targs = append(args, id, hstore.Hstore{map[string]sql.NullString{key: value}})\n\t\t}\n\n\t\tquery += end\n\n\t\tr, err := db.Exec(query, args...)\n\t\texitIf(\"updating people\", err)\n\n\t\tif num, _ := r.RowsAffected(); num != int64(len(people)) {\n\t\t\tlog.Fatal(\"update didn't update?\", r)\n\t\t}\n\n\t\texitIf(\"commit transaction\", tx.Commit())\n\t}\n\n\tstart := time.Now()\n\tbatch := make([]int, 0, 100)\n\n\tfor i := 0; i < count; i++ {\n\t\tid := i + 1\n\n\t\tif i%10000 == 0 {\n\t\t\tlog.Println(\"adding to person\", id)\n\t\t}\n\n\t\tbatch = append(batch, id)\n\n\t\tif len(batch) >= 100 {\n\t\t\tadd(schema, batch)\n\t\t\tbatch = make([]int, 0, 100)\n\t\t}\n\t}\n\n\tadd(schema, batch)\n\n\tlog.Println(\"updated\", count, \"persons in\", time.Since(start))\n}", "func (x *Indexer) Add(doc *Doc) error {\n\tdid, err := x.doc2Id(doc)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdoc.Path = \"\"\n\tx.shatter <- &shatterReq{docid: did, offset: doc.Start, d: doc.Dat}\n\treturn nil\n}", "func (sdk *Sdk) UpdateSegment(segmentID string, body *Segment) (string, error) {\n\tsdkC := sdk.cms\n\tupdateSegment := fmt.Sprintf(\"/triggers/%s\", segmentID)\n\n\treturn sdkC.rq.PutJSON(updateSegment, body)\n}", "func (c *Connector) AddSvc(ia addr.IA, svc addr.HostSVC, ip net.IP) error {\n\tlog.Debug(\"Adding service\", \"isd_as\", ia, \"svc\", svc, \"ip\", ip)\n\tif !c.ia.Equal(ia) {\n\t\treturn serrors.WithCtx(errMultiIA, \"current\", c.ia, \"new\", ia)\n\t}\n\treturn c.DataPlane.AddSvc(svc, &net.UDPAddr{IP: ip, Port: topology.EndhostPort})\n}", "func (res *Resource) addRoute(method string, route string) {\n\tres.Routes = append(res.Routes, fmt.Sprintf(\"%s - /%s%s\", method, res.Type, route))\n}", "func (g *grid) addCoordinate(c *coordinate, magnitude, totalMagnitude int) {\n\tsegment := newSegment(g.curX, g.curY, c.x, c.y)\n\tsegment.magnitude = magnitude\n\tsegment.totalMagnitude = totalMagnitude\n\n\tg.coordinates = append(g.coordinates, c)\n\tg.segments = append(g.segments, segment)\n\tg.curX = c.x\n\tg.curY = c.y\n}", "func uploadStreamingSegment(client *gophercloud.ServiceClient, opts *uploadSegmentOpts) (*uploadSegmentResult, error) {\n\tvar result uploadSegmentResult\n\n\t// Checksum is always done when streaming.\n\thash := md5.New()\n\tbuf := bytes.NewBuffer([]byte{})\n\tn, err := io.CopyN(io.MultiWriter(hash, buf), opts.Content, opts.SegmentSize)\n\tif err != nil && err != io.EOF {\n\t\treturn nil, err\n\t}\n\n\tlocalChecksum := fmt.Sprintf(\"%x\", hash.Sum(nil))\n\n\tif n == 0 {\n\t\tresult.Complete = true\n\t\tresult.Success = true\n\t\tresult.Size = 0\n\n\t\treturn &result, nil\n\t}\n\n\tcreateOpts := objects.CreateOpts{\n\t\tContent: bytes.NewReader(buf.Bytes()),\n\t\tContentLength: n,\n\t\tETag: localChecksum,\n\t\t// TODO\n\t\t//Metadata: opts.Metadata,\n\t}\n\n\tif opts.SegmentIndex == 0 && n < opts.SegmentSize {\n\t\tres := objects.Create(client, opts.ContainerName, opts.ObjectName, createOpts)\n\t\tif res.Err != nil {\n\t\t\treturn nil, res.Err\n\t\t}\n\n\t\tresult.Location = fmt.Sprintf(\"/%s/%s\", opts.ContainerName, opts.ObjectName)\n\t} else {\n\t\tres := objects.Create(client, opts.SegmentContainer, opts.SegmentName, createOpts)\n\t\tif res.Err != nil {\n\t\t\treturn nil, res.Err\n\t\t}\n\n\t\tresult.Location = fmt.Sprintf(\"/%s/%s\", opts.SegmentContainer, opts.SegmentName)\n\t}\n\n\tresult.Success = true\n\tresult.Complete = n < opts.SegmentSize\n\tresult.Size = n\n\tresult.Index = opts.SegmentIndex\n\tresult.ETag = localChecksum\n\n\treturn &result, nil\n}", "func (sq *SegmentQueue) Push(seg *Segment) {\n\tsq.mtx.Lock()\n\tdefer sq.mtx.Unlock()\n\n\tfor _, s := range sq.pq {\n\t\tif s == seg {\n\t\t\treturn\n\t\t}\n\t}\n\n\theap.Push(&sq.pq, seg)\n\tselect {\n\tcase sq.notifyCh <- seg:\n\tdefault:\n\t}\n}", "func (s *Scene) AddRoutine(r *Routine) error {\n\tif s.HasRoutine(r) {\n\t\treturn ErrRoutineExists\n\t}\n\ts.Routines = append(s.Routines, r)\n\treturn nil\n}", "func (s *Section) add(k, v []byte) {\n\ts.Keys = append(s.Keys, k)\n\ts.Values = append(s.Values, v)\n}", "func AddDrain(tag string, drain DrainFunc) {\n\tVac.addDrain(tag, drain)\n}", "func (s *Server) Add(ctx context.Context, message *goa_starterpb.AddRequest) (*goa_starterpb.AddResponse, error) {\n\tctx = context.WithValue(ctx, goa.MethodKey, \"add\")\n\tctx = context.WithValue(ctx, goa.ServiceKey, \"goa_starter\")\n\tresp, err := s.AddH.Handle(ctx, message)\n\tif err != nil {\n\t\treturn nil, goagrpc.EncodeError(err)\n\t}\n\treturn resp.(*goa_starterpb.AddResponse), nil\n}", "func (w *Watcher) Add(path string) {\n\t// Initial value to be different from any ETag\n\tw.paths[path] = \"Ooh, watch me, watch me!\"\n}", "func AttachToShmSegment(shmID int, size uint, permission int) (*SharedMemorySegment, error) {\n\t// OR (bitwise) flags\n\tvar flgs Flag\n\tflgs = flgs | IPC_CREAT | IPC_EXCL\n\n\tif permission != 0 {\n\t\tflgs |= Flag(permission)\n\t} else {\n\t\tflgs |= 0600 // default permission\n\t}\n\n\tshmAddr, _, errno := syscall.RawSyscall(syscall.SYS_SHMAT, uintptr(shmID), uintptr(0), uintptr(flgs))\n\tif errno != 0 {\n\t\treturn nil, errors.New(errno.Error())\n\t}\n\n\tsegment := &SharedMemorySegment{\n\t\tsize: size,\n\t\tflags: flgs,\n\t\taddress: uintptr(shmID),\n\t\tdata: make([]byte, 0),\n\t}\n\n\t// construct slice from memory segment\n\tsh := (*reflect.SliceHeader)(unsafe.Pointer(&segment.data))\n\tsh.Len = int(size)\n\tsh.Cap = int(size)\n\tsh.Data = shmAddr\n\n\tsegment.data = *(*[]byte)(unsafe.Pointer(sh))\n\n\treturn segment, nil\n}", "func (_obj *Apilangpack) AddServant(imp _impApilangpack, obj string) {\n\ttars.AddServant(_obj, imp, obj)\n}", "func (client WorkloadNetworksClient) CreateSegmentsSender(req *http.Request) (future WorkloadNetworksCreateSegmentsFuture, err error) {\n\tvar resp *http.Response\n\tresp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client))\n\tif err != nil {\n\t\treturn\n\t}\n\tvar azf azure.Future\n\tazf, err = azure.NewFutureFromResponse(resp)\n\tfuture.FutureAPI = &azf\n\tfuture.Result = func(client WorkloadNetworksClient) (wns WorkloadNetworkSegment, err error) {\n\t\tvar done bool\n\t\tdone, err = future.DoneWithContext(context.Background(), client)\n\t\tif err != nil {\n\t\t\terr = autorest.NewErrorWithError(err, \"avs.WorkloadNetworksCreateSegmentsFuture\", \"Result\", future.Response(), \"Polling failure\")\n\t\t\treturn\n\t\t}\n\t\tif !done {\n\t\t\terr = azure.NewAsyncOpIncompleteError(\"avs.WorkloadNetworksCreateSegmentsFuture\")\n\t\t\treturn\n\t\t}\n\t\tsender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))\n\t\twns.Response.Response, err = future.GetResult(sender)\n\t\tif wns.Response.Response == nil && err == nil {\n\t\t\terr = autorest.NewErrorWithError(err, \"avs.WorkloadNetworksCreateSegmentsFuture\", \"Result\", nil, \"received nil response and error\")\n\t\t}\n\t\tif err == nil && wns.Response.Response.StatusCode != http.StatusNoContent {\n\t\t\twns, err = client.CreateSegmentsResponder(wns.Response.Response)\n\t\t\tif err != nil {\n\t\t\t\terr = autorest.NewErrorWithError(err, \"avs.WorkloadNetworksCreateSegmentsFuture\", \"Result\", wns.Response.Response, \"Failure responding to request\")\n\t\t\t}\n\t\t}\n\t\treturn\n\t}\n\treturn\n}", "func (geom Geometry) Segmentize(distance float64) {\n\tC.OGR_G_Segmentize(geom.cval, C.double(distance))\n}", "func (rb *RingBuffer) Add(value stats.Record) {\n\trb.lock.Lock()\n\tdefer rb.lock.Unlock()\n\trb.data[rb.seq%uint64(len(rb.data))] = value\n\trb.seq++\n}", "func (_m *MockSegmentManager) Put(segmentType commonpb.SegmentState, segments ...Segment) {\n\t_va := make([]interface{}, len(segments))\n\tfor _i := range segments {\n\t\t_va[_i] = segments[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, segmentType)\n\t_ca = append(_ca, _va...)\n\t_m.Called(_ca...)\n}", "func (r *ring) Add(b []byte) {\n\n\tif len(b) > r.maxSize {\n\t\tb = b[len(b)-r.maxSize:]\n\t}\n\n\t// if head beyond tail, need to wrap over\n\tnewTail := (len(b) + r.tail)\n\n\tif newTail > len(r.buffer) {\n\t\tnewTail %= len(r.buffer)\n\n\t\toverflow := len(r.buffer) - r.tail\n\t\tcopy(r.buffer[r.tail:len(r.buffer)], b[:overflow])\n\t\tcopy(r.buffer[0:newTail], b[overflow:])\n\n\t\t// advance head to one byte past next r.sep in buffer\n\t\tindex := bytes.IndexByte(r.buffer[newTail+1:r.tail], r.sep)\n\t\tif index == -1 {\n\t\t\tr.head = r.tail\n\t\t} else {\n\t\t\trealIdx := newTail + 1 + index\n\t\t\tr.head = (realIdx + 1) % len(r.buffer)\n\t\t}\n\t} else {\n\t\tcopy(r.buffer[r.tail:newTail], b)\n\t}\n\tr.tail = newTail\n}", "func (s *Segment) Put(st, et time.Time, samples uint64, cb func(depth int, t time.Time, r *big.Rat, addons []Addon)) error {\n\ts.m.Lock()\n\tdefer s.m.Unlock()\n\n\tst, et = normalize(st, et)\n\tif st.After(et) {\n\t\treturn errStartTimeBeforeEndTime\n\t}\n\n\tif !s.growTree(st, et) {\n\t\treturn errTreeMaxSize\n\t}\n\tv := newVis()\n\ts.root.put(st, et, samples, func(sn *streeNode, depth int, tm time.Time, r *big.Rat, addons []Addon) {\n\t\tv.add(sn, r, true)\n\t\tcb(depth, tm, r, addons)\n\t})\n\tv.print(filepath.Join(os.TempDir(), fmt.Sprintf(\"0-put-%s-%s.html\", st.String(), et.String())))\n\treturn nil\n}", "func extractSegment(odpSegments *[]string, node *entities.TreeNode) {\n\tcondition, ok := node.Item.(entities.Condition)\n\tif !ok {\n\t\treturn\n\t}\n\t// Add segment to list only if match type is qualified and value is a non-empty string\n\tif condition.Match == matchers.QualifiedMatchType {\n\t\tif segment, ok := condition.Value.(string); ok && segment != \"\" {\n\t\t\t*odpSegments = append(*odpSegments, segment)\n\t\t}\n\t}\n}", "func (r *RdbReport) Sadd(key, member []byte) error {\n\tr.vl += uint64(len(member) + 38)\n\tr.ll++\n\tr.bidSizeMap[getBid(key)] += uint64(len(member))\n\treturn nil\n}", "func (s *Server) Add(ctx context.Context, message *steppb.AddRequest) (*steppb.AddResponse, error) {\n\tctx = context.WithValue(ctx, goa.MethodKey, \"add\")\n\tctx = context.WithValue(ctx, goa.ServiceKey, \"step\")\n\tresp, err := s.AddH.Handle(ctx, message)\n\tif err != nil {\n\t\treturn nil, goagrpc.EncodeError(err)\n\t}\n\treturn resp.(*steppb.AddResponse), nil\n}", "func (rndr *Renderer) AddService(service *renderer.ContivService) error {\n\tif rndr.snatOnly {\n\t\treturn nil\n\t}\n\n\tdnat := rndr.contivServiceToDNat(service)\n\ttxn := rndr.UpdateTxnFactory(fmt.Sprintf(\"add service '%v'\", service.ID))\n\ttxn.Put(vpp_nat.DNAT44Key(dnat.Label), dnat)\n\treturn nil\n}", "func (p *MediaPlaylist) Append(seg *MediaSegment) error {\n\tif p.head == p.tail && p.count > 0 {\n\t\treturn ErrPlaylistFull\n\t}\n\tp.Segments[p.tail] = seg\n\tp.tail = (p.tail + 1) % p.capacity\n\tp.count++\n\tif p.TargetDuration < seg.Duration {\n\t\tp.TargetDuration = math.Ceil(seg.Duration)\n\t}\n\tp.buf.Reset()\n\treturn nil\n}", "func SegmentExists(ctx context.Context, exec boil.ContextExecutor, iD int) (bool, error) {\n\tvar exists bool\n\tsql := \"select exists(select 1 from \\\"segment\\\" where \\\"id\\\"=$1 limit 1)\"\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, iD)\n\t}\n\trow := exec.QueryRowContext(ctx, sql, iD)\n\n\terr := row.Scan(&exists)\n\tif err != nil {\n\t\treturn false, errors.Wrap(err, \"boiler: unable to check if segment exists\")\n\t}\n\n\treturn exists, nil\n}", "func (s *slot) add(c interface{}) {\n\ts.mux.Lock()\n\tdefer s.mux.Unlock()\n\ts.elements[c] = c\n}", "func (context *context) NextSegment() (Segment, error) {\n\tif context.model.ctx == nil {\n\t\treturn Segment{}, ErrInternalAppError\n\t}\n\tif context.n >= context.model.ctx.Whisper_full_n_segments() {\n\t\treturn Segment{}, io.EOF\n\t}\n\n\t// Populate result\n\tresult := toSegment(context.model.ctx, context.n)\n\n\t// Increment the cursor\n\tcontext.n++\n\n\t// Return success\n\treturn result, nil\n}", "func (s *Split) Add(data Record) {\n\t// append record if it is below the InSplitLimit or the recordInSplitLimit is not set (-1)\n\tif len(s.Records) <= recordInSplitLimit || recordInSplitLimit == -1 {\n\t\ts.Records = append(s.Records, data)\n\t}\n\n\ts.RecordCount++\n}", "func (s *SAM) Add(str string) {\n\tx := s.start\n\tfor _, c := range str {\n\t\tif x.next[c] != nil && x.next[c].l == x.l+1 {\n\t\t\tx = x.next[c]\n\t\t} else {\n\t\t\tx = s.extend(x, c)\n\t\t}\n\t}\n\tfor ; x != s.start; x = x.fail {\n\t\tx.t = true\n\t}\n}", "func NewSegment(data SegmentData, opts Options) (Segment, error) {\n\tif err := data.Validate(); err != nil {\n\t\treturn nil, err\n\t}\n\n\tmetadata := fswriter.Metadata{}\n\tif err := metadata.Unmarshal(data.Metadata); err != nil {\n\t\treturn nil, err\n\t}\n\n\tif metadata.PostingsFormat != fswriter.PostingsFormat_PILOSAV1_POSTINGS_FORMAT {\n\t\treturn nil, fmt.Errorf(\"unsupported postings format: %v\", metadata.PostingsFormat.String())\n\t}\n\n\tfieldsFST, err := vellum.Load(data.FSTFieldsData.Bytes)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to load fields fst: %v\", err)\n\t}\n\n\tvar (\n\t\tdocsThirdPartyReader = data.DocsReader\n\t\tdocsDataReader *docs.DataReader\n\t\tdocsEncodedDataReader *docs.EncodedDataReader\n\t\tdocsIndexReader *docs.IndexReader\n\t)\n\tif docsThirdPartyReader == nil {\n\t\tdocsDataReader = docs.NewDataReader(data.DocsData.Bytes)\n\t\tdocsIndexReader, err = docs.NewIndexReader(data.DocsIdxData.Bytes)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"unable to load documents index: %v\", err)\n\t\t}\n\t}\n\tdocsEncodedDataReader = docs.NewEncodedDataReader(data.DocsData.Bytes)\n\n\ts := &fsSegment{\n\t\tfieldsFST: fieldsFST,\n\t\tdocsDataReader: docsDataReader,\n\t\tdocsEncodedDataReader: docsEncodedDataReader,\n\t\tdocsIndexReader: docsIndexReader,\n\t\tdocsThirdPartyReader: docsThirdPartyReader,\n\n\t\tdata: data,\n\t\topts: opts,\n\t\tnumDocs: metadata.NumDocs,\n\t}\n\n\t// NB(r): The segment uses the context finalization to finalize\n\t// resources. Finalize is called after Close is called and all\n\t// the segment readers have also been closed.\n\ts.ctx = opts.ContextPool().Get()\n\ts.ctx.RegisterFinalizer(s)\n\n\treturn s, nil\n}", "func (a *_Atom) addRing(r *_Ring) {\n\ta.rings.Set(uint(r.id))\n}", "func (me *I16HEXFile) Add(r Record) error {\n\n\tif !r.validate(me.GetType()) {\n\t\treturn &InvalidRecordTypeError{\n\t\t\tInvaildFileType: me.GetType(),\n\t\t\tInvalidRecordType: r.Type,\n\t\t}\n\t}\n\n\tme.records = append(me.records, r)\n\treturn nil\n}", "func (a axes) drawSegment(p *vg.Painter, xy xyer, cs vg.CoordinateSystem, l Line, segment int) {\n\t// we modify l.X, and l.Y and restore it later.\n\tsaveX := l.X\n\tsaveY := l.Y\n\tsaveC := l.C\n\tdefer func() {\n\t\tl.X = saveX\n\t\tl.Y = saveY\n\t\tl.C = saveC\n\t}()\n\n\t// Get slice range for the given segment.\n\tx, _, _ := xy.XY(l)\n\tstart, stop := 0, len(x)\n\tn := 0\n\tfor i, f := range x {\n\t\tif math.IsNaN(f) {\n\t\t\tn++\n\t\t\tif n == segment {\n\t\t\t\tstart = i + 1\n\t\t\t} else if n == segment+1 {\n\t\t\t\tstop = i\n\t\t\t}\n\t\t}\n\t}\n\n\t// What we acutally need to cut depends on the xyer.\n\tif start < len(l.X) && stop <= len(l.X) {\n\t\tl.X = l.X[start:stop]\n\t}\n\tif start < len(l.Y) && stop <= len(l.Y) {\n\t\tl.Y = l.Y[start:stop]\n\t}\n\tif start < len(l.C) && stop <= len(l.C) {\n\t\tl.C = l.C[start:stop]\n\t}\n\n\ta.drawLine(p, xy, cs, l, false)\n}", "func (s *Server) addRoute(method string, pattern string, handler RouteHandler) {\n\ts.routes = append(s.routes, Route{handler : handler, pattern : pattern, method : method})\n}", "func (h *Handler) Add(cmd int32, hf HandlerFunc) {\n\th.router[cmd] = hf\n}", "func (d *distance) add(v int) {\n\td.mu.Lock()\n\tdefer d.mu.Unlock()\n\td.v += v\n}", "func (s *SegmentService) Update(memberID int, item Segment) (*Response, error) {\n\n\tdata := struct {\n\t\tSegment `json:\"segment\"`\n\t}{item}\n\n\tif item.ID < 1 {\n\t\treturn nil, errors.New(\"Update Segment requires a segment to have an ID already\")\n\t}\n\n\treq, err := s.client.newRequest(\"PUT\", fmt.Sprintf(\"segment/%d?id=%d\", memberID, item.ID), data)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresult := &Response{}\n\tresp, err := s.client.do(req, result)\n\tif err != nil {\n\t\treturn resp, err\n\t}\n\n\treturn result, nil\n}", "func (g *graph) addEdge(source, destination flightpath.ScheduleDetail, duration int64) {\n\tg.Schedules[source.City] = append(g.Schedules[source.City], edge{Schedule: destination, Duration: duration, OriginFlightTimestamp: source.Timestamp, Reverse: false})\n\tg.Schedules[destination.City] = append(g.Schedules[destination.City], edge{Schedule: source, Duration: duration, OriginFlightTimestamp: destination.Timestamp, Reverse: true})\n}", "func (self *StraightLineTrack) Add(child Object) {\n\tself.childs = append(self.childs, child)\n}", "func (h *Handler) AddRoute(service config.Service) {\n\th.Routes = append(h.Routes, service)\n}", "func styledSegment(options RawOptions, input interface{}) (*styled.Segment, error) {\n\tvar text string\n\tvar style styled.Style\n\n\tswitch input := input.(type) {\n\tcase string:\n\t\ttext = input\n\tcase *styled.Segment:\n\t\ttext = input.Text\n\t\tstyle = input.Style\n\tdefault:\n\t\treturn nil, errStyledSegmentArgType\n\t}\n\n\tif err := style.ImportFromOptions(options); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &styled.Segment{\n\t\tText: text,\n\t\tStyle: style,\n\t}, nil\n}", "func (c *Client) Add() goa.Endpoint {\n\tvar (\n\t\tencodeRequest = EncodeAddRequest(c.encoder)\n\t\tdecodeResponse = DecodeAddResponse(c.decoder, c.RestoreResponseBody)\n\t)\n\treturn func(ctx context.Context, v interface{}) (interface{}, error) {\n\t\treq, err := c.BuildAddRequest(ctx, v)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\terr = encodeRequest(req, v)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresp, err := c.AddDoer.Do(req)\n\t\tif err != nil {\n\t\t\treturn nil, goahttp.ErrRequestError(\"spin-broker\", \"add\", err)\n\t\t}\n\t\treturn decodeResponse(resp)\n\t}\n}", "func (n *NIC) AddAddress(protocol tcpip.NetworkProtocolNumber, addr tcpip.Address) error {\n\t// Add the endpoint.\n\tn.mu.Lock()\n\t_, err := n.addAddressLocked(protocol, addr, false)\n\tn.mu.Unlock()\n\n\treturn err\n}", "func NewSegmentWriter(data []*batch.Batch, meta *metadata.Segment, dir string) *SegmentWriter {\n\tw := &SegmentWriter{\n\t\tdata: data,\n\t\tmeta: meta,\n\t\tdir: dir,\n\t}\n\t// w.preprocessor = w.defaultPreprocessor\n\tw.fileGetter, w.fileCommiter = w.createFile, w.commitFile\n\tw.dataFlusher = flushBlocks\n\tw.indexFlusher = w.flushIndices\n\treturn w\n}", "func (t MatchTask) AddSentence(contextMarker string, text string) {\n\tvar words = strings.Fields(text)\n\tvar sentence = Sentence{0, words}\n\n\tt.sentenceByContextMarker[contextMarker] = sentence\n}", "func (d *Device) AddService(svc *ble.Service) error {\n\treturn d.Server.AddService(svc)\n}" ]
[ "0.6925719", "0.68303204", "0.6809465", "0.6660276", "0.6556915", "0.6511233", "0.6299954", "0.6058295", "0.5953473", "0.5953473", "0.5953473", "0.5941569", "0.5933238", "0.58960634", "0.57868844", "0.5649867", "0.55553", "0.5448272", "0.5372132", "0.5351253", "0.52755", "0.52620035", "0.51700115", "0.5159465", "0.51055413", "0.50456166", "0.50280225", "0.49895605", "0.4944828", "0.4935449", "0.49334785", "0.49296677", "0.49112156", "0.48970625", "0.4877696", "0.48775506", "0.48654494", "0.48536053", "0.4825567", "0.47783354", "0.4775148", "0.47646788", "0.4734367", "0.47266805", "0.47180167", "0.47126576", "0.47082502", "0.46915874", "0.46852136", "0.46746293", "0.4654648", "0.4648018", "0.46478352", "0.46431628", "0.4637566", "0.4630917", "0.4620816", "0.46074057", "0.4574441", "0.4565549", "0.45566884", "0.4552453", "0.45491704", "0.45429948", "0.45357746", "0.4535106", "0.45340848", "0.45301136", "0.4529616", "0.45291314", "0.45275956", "0.4527442", "0.45272106", "0.45195094", "0.4504887", "0.45012525", "0.45002934", "0.44962275", "0.44914743", "0.44880798", "0.44869998", "0.44859782", "0.44827884", "0.44779474", "0.44717598", "0.44687012", "0.44659805", "0.4462539", "0.44575715", "0.44538814", "0.445141", "0.4448354", "0.44454634", "0.4445435", "0.44438034", "0.44437584", "0.44357654", "0.44356924", "0.44305497", "0.4418254" ]
0.76127625
0
interceptPoints returns every point where the wire collides with wire o. The points' wireLen is the total wire length to get to that point (both wire combined).
func (w *wire) interceptPoints(o wire) []point { var interceptPoints []point for i := 1; i < len(w.points); i++ { v1 := segment{ from: w.points[i-1], to: w.points[i], } for u := 1; u < len(o.points); u++ { v2 := segment{ from: o.points[u-1], to: o.points[u], } intercept := v1.intercepts(v2) if intercept.x != 0 && intercept.y != 0 { // Calculate total wire length (both wires combined) intercept.wireLen = v1.from.wireLen + intercept.distanceToPoint(v1.from) + v2.from.wireLen + intercept.distanceToPoint(v2.from) interceptPoints = append(interceptPoints, intercept) } } } return interceptPoints }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (l line2) SlopeIntercept() (float64, float64) {\n\tslope := (l.end.y - l.start.y) / (l.end.x - l.start.x)\n\tintercept := l.start.y - slope*l.start.x\n\treturn slope, intercept\n}", "func (l *Line) GetIntersectionPoints(other Shape) []IntersectionPoint {\n\n\tintersections := []IntersectionPoint{}\n\n\tswitch b := other.(type) {\n\n\tcase *Line:\n\n\t\tdet := (l.X2-l.X)*(b.Y2-b.Y) - (b.X2-b.X)*(l.Y2-l.Y)\n\n\t\tif det != 0 {\n\n\t\t\t// MAGIC MATH; the extra + 1 here makes it so that corner cases (literally aiming the line through the corners of the\n\t\t\t// hollow square in world5) works!\n\n\t\t\tlambda := (((l.Y - b.Y) * (b.X2 - b.X)) - ((l.X - b.X) * (b.Y2 - b.Y)) + 1) / det\n\n\t\t\tgamma := (((l.Y - b.Y) * (l.X2 - l.X)) - ((l.X - b.X) * (l.Y2 - l.Y)) + 1) / det\n\n\t\t\tif (0 < lambda && lambda < 1) && (0 < gamma && gamma < 1) {\n\t\t\t\tdx, dy := l.GetDelta()\n\t\t\t\tintersections = append(intersections, IntersectionPoint{l.X + lambda*dx, l.Y + lambda*dy, other})\n\t\t\t}\n\n\t\t}\n\tcase *Rectangle:\n\t\tside := NewLine(b.X, b.Y, b.X, b.Y+b.H)\n\t\tintersections = append(intersections, l.GetIntersectionPoints(side)...)\n\n\t\tside.Y = b.Y + b.H\n\t\tside.X2 = b.X + b.W\n\t\tside.Y2 = b.Y + b.H\n\t\tintersections = append(intersections, l.GetIntersectionPoints(side)...)\n\n\t\tside.X = b.X + b.W\n\t\tside.Y2 = b.Y\n\t\tintersections = append(intersections, l.GetIntersectionPoints(side)...)\n\n\t\tside.Y = b.Y\n\t\tside.X2 = b.X\n\t\tside.Y2 = b.Y\n\t\tintersections = append(intersections, l.GetIntersectionPoints(side)...)\n\tcase *Space:\n\t\tfor _, shape := range *b {\n\t\t\tintersections = append(intersections, l.GetIntersectionPoints(shape)...)\n\t\t}\n\tcase *Circle:\n\t\t// \tTO-DO: Add this later, because this is kinda hard and would necessitate some complex vector math that, for whatever\n\t\t// reason, is not even readily available in a Golang library as far as I can tell???\n\t\tbreak\n\t}\n\n\t// fmt.Println(\"WARNING! Object \", other, \" isn't a valid shape for collision testing against Line \", l, \"!\")\n\n\tsort.Slice(intersections, func(i, j int) bool {\n\t\treturn Distance(l.X, l.Y, intersections[i].X, intersections[i].Y) < Distance(l.X, l.Y, intersections[j].X, intersections[j].Y)\n\t})\n\n\treturn intersections\n\n}", "func getSlopeIntercept(p1 Point, p2 Point) (slope float64, intercept float64) {\n\tslope = (float64(p2.Y) - float64(p1.Y)) / (float64(p2.X) - float64(p1.X))\n\tintercept = float64(p1.Y) - slope*float64(p1.X)\n\n\treturn slope, intercept\n}", "func (v segment) intercepts(o segment) point {\n\t// With the assumption that no interceptions occur when segments are\n\t// parallel, and that segments always move either horizontally or\n\t// vertically (not both), we can pretty easily check for interceptions.\n\t//\n\t// First find the values where interception could occur, and what axis for\n\t// both segments are changing. I.e. if the segments are horizontal\n\t// or vertical.\n\ta, axAxis := v.unchangingAxis()\n\tb, bxAxis := o.unchangingAxis()\n\tif axAxis == bxAxis {\n\t\t// We're assuming that they can't overlap\n\t\t// when they are parallel\n\t\treturn point{}\n\t}\n\n\t// Check if the first value (x or y) is on the interval of the\n\t// same axis of the other segment. Do this for the other value (axis) too.\n\tvar aCanCollide bool\n\tif axAxis {\n\t\taCanCollide = inRange(a, o.from.x, o.to.x)\n\t} else {\n\t\taCanCollide = inRange(a, o.from.y, o.to.y)\n\t}\n\tvar bCanCollide bool\n\tif bxAxis {\n\t\tbCanCollide = inRange(b, v.from.x, v.to.x)\n\t} else {\n\t\tbCanCollide = inRange(b, v.from.y, v.to.y)\n\t}\n\n\t// If both axes are in range then they collide\n\tif aCanCollide && bCanCollide {\n\t\t// Check if a is an x- or y-value\n\t\tif axAxis {\n\t\t\treturn point{x: a, y: b}\n\t\t}\n\t\treturn point{x: b, y: a}\n\t}\n\treturn point{x: 0, y: 0}\n}", "func (r Rect) IntersectionPoints(l Line) []Vec {\n\t// Use map keys to ensure unique points\n\tpointMap := make(map[Vec]struct{})\n\n\tfor _, edge := range r.Edges() {\n\t\tif intersect, ok := l.Intersect(edge); ok {\n\t\t\tpointMap[intersect] = struct{}{}\n\t\t}\n\t}\n\n\tpoints := make([]Vec, 0, len(pointMap))\n\tfor point := range pointMap {\n\t\tpoints = append(points, point)\n\t}\n\n\t// Order the points\n\tif len(points) == 2 {\n\t\tif points[1].To(l.A).Len() < points[0].To(l.A).Len() {\n\t\t\treturn []Vec{points[1], points[0]}\n\t\t}\n\t}\n\n\treturn points\n}", "func (r Ruler) LineSliceAlong(start float64, stop float64, l Line) Line {\n\tvar sum float64\n\tvar slice []Point\n\n\tfor i := 0; i < len(l)-1; i++ {\n\t\tp0 := l[i]\n\t\tp1 := l[i+1]\n\t\td := r.Distance(p0, p1)\n\n\t\tsum += d\n\n\t\tif sum > start && len(slice) == 0 {\n\t\t\tslice = append(slice, interpolate(p0, p1, (start-(sum-d))/d))\n\t\t}\n\n\t\tif sum >= stop {\n\t\t\tslice = append(slice, interpolate(p0, p1, (stop-(sum-d))/d))\n\t\t\treturn slice\n\t\t}\n\n\t\tif sum > start {\n\t\t\tslice = append(slice, p1)\n\t\t}\n\t}\n\n\treturn slice\n}", "func (c Circle) IntersectionPoints(l Line) []Vec {\n\tcContainsA := c.Contains(l.A)\n\tcContainsB := c.Contains(l.B)\n\n\t// Special case for both endpoint being contained within the circle\n\tif cContainsA && cContainsB {\n\t\treturn []Vec{}\n\t}\n\n\t// Get closest point on the line to this circles' center\n\tclosestToCenter := l.Closest(c.Center)\n\n\t// If the distance to the closest point is greater than the radius, there are no points of intersection\n\tif closestToCenter.To(c.Center).Len() > c.Radius {\n\t\treturn []Vec{}\n\t}\n\n\t// If the distance to the closest point is equal to the radius, the line is tangent and the closest point is the\n\t// point at which it touches the circle.\n\tif closestToCenter.To(c.Center).Len() == c.Radius {\n\t\treturn []Vec{closestToCenter}\n\t}\n\n\t// Special case for endpoint being on the circles' center\n\tif c.Center == l.A || c.Center == l.B {\n\t\totherEnd := l.B\n\t\tif c.Center == l.B {\n\t\t\totherEnd = l.A\n\t\t}\n\t\tintersect := c.Center.Add(c.Center.To(otherEnd).Unit().Scaled(c.Radius))\n\t\treturn []Vec{intersect}\n\t}\n\n\t// This means the distance to the closest point is less than the radius, so there is at least one intersection,\n\t// possibly two.\n\n\t// If one of the end points exists within the circle, there is only one intersection\n\tif cContainsA || cContainsB {\n\t\tcontainedPoint := l.A\n\t\totherEnd := l.B\n\t\tif cContainsB {\n\t\t\tcontainedPoint = l.B\n\t\t\totherEnd = l.A\n\t\t}\n\n\t\t// Use trigonometry to get the length of the line between the contained point and the intersection point.\n\t\t// The following is used to describe the triangle formed:\n\t\t// - a is the side between contained point and circle center\n\t\t// - b is the side between the center and the intersection point (radius)\n\t\t// - c is the side between the contained point and the intersection point\n\t\t// The captials of these letters are used as the angles opposite the respective sides.\n\t\t// a and b are known\n\t\ta := containedPoint.To(c.Center).Len()\n\t\tb := c.Radius\n\t\t// B can be calculated by subtracting the angle of b (to the x-axis) from the angle of c (to the x-axis)\n\t\tB := containedPoint.To(c.Center).Angle() - containedPoint.To(otherEnd).Angle()\n\t\t// Using the Sin rule we can get A\n\t\tA := math.Asin((a * math.Sin(B)) / b)\n\t\t// Using the rule that there are 180 degrees (or Pi radians) in a triangle, we can now get C\n\t\tC := math.Pi - A + B\n\t\t// If C is zero, the line segment is in-line with the center-intersect line.\n\t\tvar c float64\n\t\tif C == 0 {\n\t\t\tc = b - a\n\t\t} else {\n\t\t\t// Using the Sine rule again, we can now get c\n\t\t\tc = (a * math.Sin(C)) / math.Sin(A)\n\t\t}\n\t\t// Travelling from the contained point to the other end by length of a will provide the intersection point.\n\t\treturn []Vec{\n\t\t\tcontainedPoint.Add(containedPoint.To(otherEnd).Unit().Scaled(c)),\n\t\t}\n\t}\n\n\t// Otherwise the endpoints exist outside of the circle, and the line segment intersects in two locations.\n\t// The vector formed by going from the closest point to the center of the circle will be perpendicular to the line;\n\t// this forms a right-angled triangle with the intersection points, with the radius as the hypotenuse.\n\t// Calculate the other triangles' sides' length.\n\ta := math.Sqrt(math.Pow(c.Radius, 2) - math.Pow(closestToCenter.To(c.Center).Len(), 2))\n\n\t// Travelling in both directions from the closest point by length of a will provide the two intersection points.\n\tfirst := closestToCenter.Add(closestToCenter.To(l.A).Unit().Scaled(a))\n\tsecond := closestToCenter.Add(closestToCenter.To(l.B).Unit().Scaled(a))\n\n\tif first.To(l.A).Len() < second.To(l.A).Len() {\n\t\treturn []Vec{first, second}\n\t}\n\treturn []Vec{second, first}\n}", "func (el *Fill) Polyline() {}", "func (o ElemU) Ipoints() (coords [][]float64) {\n\tcoords = la.MatAlloc(len(o.IpsElem), Global.Ndim)\n\tfor idx, ip := range o.IpsElem {\n\t\tcoords[idx] = o.Shp.IpRealCoords(o.X, ip)\n\t}\n\treturn\n}", "func pointslope(pp *privPath, i, j int) (ctr, dir Point) {\n\t// assume i<j\n\n\tn := len(pp.Pt)\n\tsums := pp.Sums\n\tr := 0 // rotations from i to j\n\n\tfor j >= n {\n\t\tj -= n\n\t\tr++\n\t}\n\tfor i >= n {\n\t\ti -= n\n\t\tr--\n\t}\n\tfor j < 0 {\n\t\tj += n\n\t\tr--\n\t}\n\tfor i < 0 {\n\t\ti += n\n\t\tr++\n\t}\n\n\tx := float64(sums[j+1].x - sums[i].x + r*sums[n].x)\n\ty := float64(sums[j+1].y - sums[i].y + r*sums[n].y)\n\tx2 := float64(sums[j+1].x2 - sums[i].x2 + r*sums[n].x2)\n\txy := float64(sums[j+1].xy - sums[i].xy + r*sums[n].xy)\n\ty2 := float64(sums[j+1].y2 - sums[i].y2 + r*sums[n].y2)\n\tk := float64(j + 1 - i + r*n)\n\n\tctr.X = x / k\n\tctr.Y = y / k\n\n\ta := (x2 - x*x/k) / k\n\tb := (xy - x*y/k) / k\n\tc := (y2 - y*y/k) / k\n\n\tlambda2 := (a + c + math.Sqrt((a-c)*(a-c)+4*b*b)) / 2 // larger e.value\n\n\t// now find e.vector for lambda2\n\ta -= lambda2\n\tc -= lambda2\n\n\tvar l float64\n\tif fabs(a) >= fabs(c) {\n\t\tl = math.Sqrt(a*a + b*b)\n\t\tif l != 0 {\n\t\t\tdir.X = -b / l\n\t\t\tdir.Y = a / l\n\t\t}\n\t} else {\n\t\tl = math.Sqrt(c*c + b*b)\n\t\tif l != 0 {\n\t\t\tdir.X = -c / l\n\t\t\tdir.Y = b / l\n\t\t}\n\t}\n\tif l == 0 {\n\t\tdir.X, dir.Y = 0, 0 // sometimes this can happen when k=4: the two eigenvalues coincide\n\t}\n\treturn\n}", "func (s *Server) GetPoints() []Message {\n\ts.cond.L.Lock()\n\tdefer s.cond.L.Unlock()\n\tcpy := make([]Message, len(s.points))\n\tcopy(cpy, s.points)\n\treturn cpy\n}", "func (a line2) IntersectPoint(b line2) (vector2, bool) {\n\tswaped := false\n\tif math.Abs(a.end.y-a.start.y) > math.Abs(a.end.x-a.start.x) {\n\t\tswaped = true\n\t\ta.start.x, a.start.y = a.start.y, a.start.x\n\t\ta.end.x, a.end.y = a.end.y, a.end.x\n\t\tb.start.x, b.start.y = b.start.y, b.start.x\n\t\tb.end.x, b.end.y = b.end.y, b.end.x\n\t}\n\tif a.start.x > a.end.x {\n\t\ta.start, a.end = a.end, a.start\n\t}\n\tif b.start.x > b.end.x {\n\t\tb.start, b.end = b.end, b.start\n\t}\n\t// we are interested in the 'common' parts.\n\tif a.start.x > b.end.x || b.start.x > a.end.x {\n\t\treturn vector2{}, false\n\t}\n\tsa, ia := a.SlopeIntercept()\n\t// shear b to y direction.\n\tb.start.y = b.start.y - (sa * b.start.x) - ia\n\tb.end.y = b.end.y - (sa * b.end.x) - ia\n\tif math.Signbit(b.start.y) == math.Signbit(b.end.y) {\n\t\treturn vector2{}, false\n\t}\n\t// find x if y == 0\n\ttb := math.Abs(b.start.y) / math.Abs(b.end.y-b.start.y)\n\tx := tb*(b.end.x-b.start.x) + b.start.x\n\tif x < a.start.x || a.end.x < x {\n\t\treturn vector2{}, false\n\t}\n\ty := sa*x + ia\n\tif swaped {\n\t\tx, y = y, x\n\t}\n\treturn vector2{x, y}, true\n}", "func pointsToLines(points []Point) (lines []Line) {\n\tfor i := 0; i < len(points); i++ {\n\t\tfor j := i + 1; j < len(points); j++ {\n\t\t\tif points[i].nextTo(points[j]) {\n\t\t\t\tlines = append(lines, Line{P1: points[i], P2: points[j]})\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (win *window) Lines(pt []image.Point) {\n\tif len(pt) < 2 {\n\t\treturn\n\t}\n\tpts := make([]xgb.Point, len(pt))\n\tfor i, p := range pt {\n\t\tpts[i].X, pts[i].Y = int16(p.X), int16(p.Y)\n\t}\n\txwin.PolyLine(xgb.CoordModeOrigin, win.id, win.gc, pts)\n}", "func getLineParams(p1, p2 Point) (sT slopeType, slope, intercept float64) {\n\tif p1.X == p2.X {\n\t\t// Check for infinite slope.\n\t\tif p2.Y > p1.Y {\n\t\t\tsT = INFUP\n\t\t} else {\n\t\t\tsT = INFDOWN\n\t\t}\n\n\t\tslope, intercept = 0, 0\n\t} else if p1.Y == p2.Y {\n\t\t// check for zero slope\n\t\tif p2.X > p1.X {\n\t\t\tsT = ZERORIGHT\n\t\t} else {\n\t\t\tsT = ZEROLEFT\n\t\t}\n\n\t\tslope, intercept = 0, p1.Y\n\t} else {\n\t\t// 4 classifications of non infinite slope based\n\t\t// on the relative positions of p1 and p2\n\t\tslope, intercept = getSlopeIntercept(p1, p2)\n\t\tif p1.X < p2.X {\n\t\t\tif slope > 0 {\n\t\t\t\tsT = POSRIGHT\n\t\t\t} else {\n\t\t\t\tsT = NEGRIGHT\n\t\t\t}\n\t\t} else {\n\t\t\tif slope > 0 {\n\t\t\t\tsT = POSLEFT\n\t\t\t} else {\n\t\t\t\tsT = NEGLEFT\n\t\t\t}\n\t\t}\n\t}\n\n\treturn sT, slope, intercept\n}", "func GetPoints(l *Line, quantity int, stepRate float64, out []*point.Point) []*point.Point {\n\tif quantity <= 0 && stepRate > 0 {\n\t\tquantity = int(Length(l) / stepRate)\n\t}\n\n\tif out == nil {\n\t\tout = make([]*point.Point, 0)\n\t}\n\n\tfor idx := 0; idx < quantity; idx++ {\n\t\tposition := float64(idx) / float64(quantity)\n\n\t\tx := l.X1 + (l.X2-l.X1)*position\n\t\ty := l.Y1 + (l.Y2-l.Y1)*position\n\n\t\tout = append(out, point.New(x, y))\n\t}\n\n\treturn out\n}", "func (r Ray) LineIntersect(s Ray) (point Vec) {\n\t/*\n\t\tequation is derived from system of equations with\n\t\ttwo unknowns where equations are r.Formula and s.Formula\n\t\tfrom which we can derive x of intersection point\n\n\t\tstarting with:\n\t\t\tr.V.Y*X - r.V.X*Y - r.V.Y*r.O.X + r.V.X*r.O.Y = 0\n\t\tand:\n\t\t\ts.V.Y*X - s.V.X*Y - s.V.Y*s.O.X + s.V.X*s.O.Y = 0\n\n\t\tget y from first one:\n\t\t\tr.V.Y*X - r.V.Y*r.O.X + r.V.X*r.O.Y = r.V.X*Y\n\t\t\t(r.V.Y*X - r.V.Y*r.O.X + r.V.X*r.O.Y)/r.V.X = Y\n\n\t\tthen we substitute and get x:\n\t\t\ts.V.Y*X - s.V.X * (r.V.Y*X - r.V.Y*r.O.X + r.V.X*r.O.Y) / r.V.X - s.V.Y*s.O.X + s.V.X*s.O.Y = 0 // * r.V.X\n\t\t\ts.V.Y*X*r.V.X - s.V.X*r.V.Y*X + s.V.X*r.V.Y*r.O.X - s.V.X*r.V.X*r.O.Y - s.V.Y*s.O.X*r.V.X + s.V.X*s.O.Y*r.V.X = 0 // - s.V.Y*X*r.V.X + s.V.X*r.V.Y*X\n\t\t\ts.V.X*r.V.Y*r.O.X - s.V.X*r.V.X*r.O.Y - s.V.Y*s.O.X*r.V.X + s.V.X*s.O.Y*r.V.X = s.V.X*r.V.Y*X - s.V.Y*X*r.V.X // simplify\n\t\t\ts.V.X * (r.V.Y*r.O.X + r.V.X * (s.O.Y - r.O.Y)) - s.V.Y*s.O.X*r.V.X = X * (s.V.X*r.V.Y - s.V.Y*r.V.X) // / (s.V.X*r.V.Y - s.V.Y*r.V.X)\n\t\t\t(s.V.X * (r.V.Y*r.O.X + r.V.X * (s.O.Y - r.O.Y)) - s.V.Y*s.O.X*r.V.X) / (s.V.X*r.V.Y - s.V.Y*r.V.X) = X\n\t*/\n\n\tpoint.X = (s.V.X*(r.V.Y*r.O.X+r.V.X*(s.O.Y-r.O.Y)) - s.V.Y*s.O.X*r.V.X) / (s.V.X*r.V.Y - s.V.Y*r.V.X)\n\n\tif r.V.X == 0 {\n\t\tpoint.Y = s.ProjectX(point.X)\n\t} else {\n\t\tpoint.Y = r.ProjectX(point.X)\n\t}\n\n\treturn\n}", "func linePointsGen(p1, p2 Point, speed float64) (gen func() (x, y float64, e error)) {\n\t// Set up math\n\tslopeT, slope, _ := getLineParams(p1, p2)\n\n\tx := p1.X\n\txPrev := x\n\ty := p1.Y\n\tyPrev := y\n\te := fmt.Errorf(\"End of path reached\")\n\ttheta := math.Atan(slope)\n\n\t// Every slope type has a different iterator, since they change the\n\t// x and y values in different combinations, as well as do different\n\t// comparisons on the values.\n\tswitch slopeT {\n\tcase ZERORIGHT:\n\t\treturn func() (float64, float64, error) {\n\t\t\tif x > p2.X {\n\t\t\t\treturn 0, 0, e\n\t\t\t}\n\n\t\t\txPrev = x\n\t\t\tx += speed\n\n\t\t\treturn xPrev, y, nil\n\t\t}\n\tcase ZEROLEFT:\n\t\treturn func() (float64, float64, error) {\n\t\t\tif x < p2.X {\n\t\t\t\treturn 0, 0, e\n\t\t\t}\n\n\t\t\txPrev = x\n\t\t\tx -= speed\n\n\t\t\treturn xPrev, y, nil\n\t\t}\n\tcase POSRIGHT:\n\t\treturn func() (float64, float64, error) {\n\t\t\tif y > p2.Y || x > p2.X {\n\t\t\t\treturn 0, 0, e\n\t\t\t}\n\n\t\t\tyPrev = y\n\t\t\txPrev = x\n\n\t\t\ty += speed * math.Sin(theta)\n\t\t\tx += speed * math.Cos(theta)\n\n\t\t\treturn xPrev, yPrev, nil\n\t\t}\n\tcase NEGRIGHT:\n\t\treturn func() (float64, float64, error) {\n\t\t\tif y < p2.Y || x > p2.X {\n\t\t\t\treturn 0, 0, e\n\t\t\t}\n\n\t\t\tyPrev = y\n\t\t\txPrev = x\n\n\t\t\ty += speed * math.Sin(theta)\n\t\t\tx += speed * math.Cos(theta)\n\n\t\t\treturn xPrev, yPrev, nil\n\t\t}\n\tcase POSLEFT:\n\t\treturn func() (float64, float64, error) {\n\t\t\tif y < p2.Y || x < p2.X {\n\t\t\t\treturn 0, 0, e\n\t\t\t}\n\n\t\t\tyPrev = y\n\t\t\txPrev = x\n\n\t\t\ty -= speed * math.Sin(theta)\n\t\t\tx -= speed * math.Cos(theta)\n\n\t\t\treturn xPrev, yPrev, nil\n\t\t}\n\tcase NEGLEFT:\n\t\treturn func() (float64, float64, error) {\n\t\t\tif y > p2.Y || x < p2.X {\n\t\t\t\treturn 0, 0, e\n\t\t\t}\n\n\t\t\tyPrev = y\n\t\t\txPrev = x\n\n\t\t\ty -= speed * math.Sin(theta)\n\t\t\tx -= speed * math.Cos(theta)\n\n\t\t\treturn xPrev, yPrev, nil\n\t\t}\n\tcase INFUP:\n\t\treturn func() (float64, float64, error) {\n\t\t\tif y > p2.Y {\n\t\t\t\treturn 0, 0, e\n\t\t\t}\n\n\t\t\tyPrev := y\n\t\t\ty += speed\n\n\t\t\treturn x, yPrev, nil\n\t\t}\n\tcase INFDOWN:\n\t\treturn func() (float64, float64, error) {\n\t\t\tif y < p2.Y {\n\t\t\t\treturn 0, 0, e\n\t\t\t}\n\n\t\t\tyPrev := y\n\t\t\ty -= speed\n\n\t\t\treturn x, yPrev, nil\n\t\t}\n\t}\n\n\treturn nil\n}", "func SlopeInd(m, xc, yc, xlen float64, lbl string, flip, xlog, ylog bool, args, argsLbl *A) {\n\tif args == nil {\n\t\targs = &A{C: \"k\"}\n\t}\n\targs.NoClip = true\n\tl := 0.5 * xlen\n\tx := []float64{xc - l, xc + l, xc + l, xc - l}\n\ty := []float64{yc - m*l, yc - m*l, yc + m*l, yc - m*l}\n\tif flip {\n\t\tx[1] = xc - l\n\t\ty[1] = yc + m*l\n\t}\n\tdx, dy := x[2]-x[0], y[2]-y[0]\n\td := 0.03 * math.Sqrt(dx*dx+dy*dy)\n\txm := xc - l - d\n\txp := xc + l + d\n\tym := yc + m*l - d\n\typ := yc + m*l + d\n\tyr := yc - m*l + d\n\tys := yc - m*l - d\n\tif xlog {\n\t\tfor i := 0; i < 4; i++ {\n\t\t\tx[i] = math.Pow(10.0, x[i])\n\t\t}\n\t\txc = math.Pow(10.0, xc)\n\t\txm = math.Pow(10.0, xm)\n\t\txp = math.Pow(10.0, xp)\n\t}\n\tif ylog {\n\t\tfor i := 0; i < 4; i++ {\n\t\t\ty[i] = math.Pow(10.0, y[i])\n\t\t}\n\t\tyc = math.Pow(10.0, yc)\n\t\tym = math.Pow(10.0, ym)\n\t\typ = math.Pow(10.0, yp)\n\t\tyr = math.Pow(10.0, yr)\n\t\tys = math.Pow(10.0, ys)\n\t}\n\tPlot(x, y, args)\n\tif lbl != \"\" {\n\t\tif argsLbl == nil {\n\t\t\targsLbl = &A{C: \"k\", Fsz: 6}\n\t\t}\n\t\targsLbl.NoClip = true\n\t\tif flip {\n\t\t\targsLbl.Ha = \"center\"\n\t\t\tif m < 0 {\n\t\t\t\targsLbl.Va = \"top\"\n\t\t\t\tText(xc, ym, \"1\", argsLbl)\n\t\t\t} else {\n\t\t\t\targsLbl.Va = \"bottom\"\n\t\t\t\tText(xc, yp, \"1\", argsLbl)\n\t\t\t}\n\t\t\targsLbl.Ha = \"right\"\n\t\t\targsLbl.Va = \"center\"\n\t\t\tText(xm, yc, lbl, argsLbl)\n\t\t} else {\n\t\t\targsLbl.Ha = \"center\"\n\t\t\tif m < 0 {\n\t\t\t\targsLbl.Va = \"bottom\"\n\t\t\t\tText(xc, yr, \"1\", argsLbl)\n\t\t\t} else {\n\t\t\t\targsLbl.Va = \"top\"\n\t\t\t\tText(xc, ys, \"1\", argsLbl)\n\t\t\t}\n\t\t\targsLbl.Ha = \"left\"\n\t\t\targsLbl.Va = \"center\"\n\t\t\tText(xp, yc, lbl, argsLbl)\n\t\t}\n\t}\n}", "func GetXIntersects(p *Point, a, l float64) []*Point {\n\tad := float64(1)\n\top := math.Tan(a)\n\thy := math.Sqrt(math.Pow(ad, 2) + math.Pow(op, 2))\n\tq := GetQuadrant(a)\n\tif q == 2 || q == 3 {\n\t\top = -op\n\t\tad = -ad\n\t}\n\ts := int(l / hy)\n\tf := GetFirstXIntersect(p, a)\n\tis := []*Point{f}\n\tfor i := 0; i < s; i++ {\n\t\tx := is[len(is)-1].X + ad\n\t\ty := is[len(is)-1].Y + op\n\t\tis = append(is, NewPoint(x, y))\n\t}\n\treturn is\n}", "func toomInterpolate(points [][]int32, param []int32) []int32 {\n\tt := make(thinPoly, 256)\n\tu := make(thinPoly, 256)\n\n\tfor i := range points {\n\t\tt.Inc(u.Mul(param[i], points[i]))\n\t}\n\n\treturn t.Freeze()\n}", "func (pppc *PseudoPolygonPointCollector) Edges(upper bool) ([]geom.Line, error) {\n\tvar pts []geom.Point\n\n\tif upper {\n\t\tpts = make([]geom.Point, len(pppc.upperPoints))\n\t\tcopy(pts, pppc.upperPoints)\n\t} else {\n\t\tpts = make([]geom.Point, len(pppc.lowerPoints))\n\t\tcopy(pts, pppc.lowerPoints)\n\t}\n\tif debug {\n\t\tlbl := \"lower\"\n\t\tif upper {\n\t\t\tlbl = \"upper\"\n\n\t\t}\n\t\tlog.Printf(\"Working on %v points: %v\", lbl, wkt.MustEncode(pts))\n\t}\n\n\tif !pppc.seen[pppc.End] {\n\t\tpts = append(pts, pppc.End)\n\t}\n\n\tif len(pts) == 2 {\n\t\t// just a shared line, no points to triangulate.\n\t\treturn []geom.Line{pppc.SharedLine()}, nil\n\t}\n\n\treturn pseudopolygon.Triangulate(pts, pppc.Order)\n}", "func PolyLine(img Arr, points [][]Point, closed bool, color Scalar, thickness, lineType, shift int) {\n\tif len(points) == 0 {\n\t\treturn\n\t}\n\n\tvar cc C.int\n\tif closed {\n\t\tcc = 1\n\t} else {\n\t\tcc = 0\n\t}\n\n\tcvpoints := make([][]C.CvPoint, 0, len(points))\n\tpts := make([]*C.CvPoint, 0, len(points))\n\tnpts := make([]C.int, 0, len(points))\n\n\tfor i := range points {\n\n\t\tif len(points[i]) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\tcvpoints[i] = make([]C.CvPoint, len(points[i]))\n\n\t\tfor j := range points[i] {\n\t\t\tcvpoints[i][j] = C.CvPoint{C.int(points[i][j].X), C.int(points[i][j].Y)}\n\t\t}\n\n\t\tpts = append(pts, &cvpoints[i][0])\n\t\tnpts = append(npts, C.int(len(points[i])))\n\n\t}\n\n\tif len(pts) == 0 {\n\t\treturn\n\t}\n\n\tdo(func() {\n\t\tC.cvPolyLine(img.arr(), &pts[0], &npts[0], C.int(len(points)), cc, color.cvScalar(), C.int(thickness), C.int(lineType), C.int(shift))\n\t})\n}", "func iIntersection() {\n\n\tfirst := Tuple{0.00, 0, 0}\n\tiInput = append(iInput, first)\n\tcount := 0\n\tfor k := 0; k < len(ArrInput); k++ {\n\t\tfor jj := 0; jj < ArrInput[k].dist; jj++ {\n\t\t\trads := xyInput[k+1].rad\n\t\t\tgx := iInput[count].gx + (int(math.Sin(rads)) * 1)\n\t\t\tgy := iInput[count].gy + (int(math.Cos(rads)) * 1)\n\n\t\t\tnext := Tuple{rads, gx, gy}\n\t\t\tiInput = append(iInput, next)\n\t\t\tcount++\n\n\t\t}\n\n\t}\n\n\tlog.Println(strconv.Itoa(count))\n}", "func (this *DtNavMesh) GetOffMeshConnectionPolyEndPoints(prevRef, polyRef DtPolyRef, startPos, endPos []float32) DtStatus {\n\tvar salt, it, ip uint32\n\n\tif polyRef == 0 {\n\t\treturn DT_FAILURE\n\t}\n\t// Get current polygon\n\tthis.DecodePolyId(polyRef, &salt, &it, &ip)\n\tif it >= (uint32)(this.m_maxTiles) {\n\t\treturn DT_FAILURE | DT_INVALID_PARAM\n\t}\n\tif this.m_tiles[it].Salt != salt || this.m_tiles[it].Header == nil {\n\t\treturn DT_FAILURE | DT_INVALID_PARAM\n\t}\n\ttile := &this.m_tiles[it]\n\tif ip >= (uint32)(tile.Header.PolyCount) {\n\t\treturn DT_FAILURE | DT_INVALID_PARAM\n\t}\n\tpoly := &tile.Polys[ip]\n\n\t// Make sure that the current poly is indeed off-mesh link.\n\tif poly.GetType() != DT_POLYTYPE_OFFMESH_CONNECTION {\n\t\treturn DT_FAILURE\n\t}\n\t// Figure out which way to hand out the vertices.\n\tidx0 := 0\n\tidx1 := 1\n\n\t// Find link that points to first vertex.\n\tfor i := poly.FirstLink; i != DT_NULL_LINK; i = tile.Links[i].Next {\n\t\tif tile.Links[i].Edge == 0 {\n\t\t\tif tile.Links[i].Ref != prevRef {\n\t\t\t\tidx0 = 1\n\t\t\t\tidx1 = 0\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\t}\n\n\tDtVcopy(startPos, tile.Verts[poly.Verts[idx0]*3:])\n\tDtVcopy(endPos, tile.Verts[poly.Verts[idx1]*3:])\n\n\treturn DT_SUCCESS\n}", "func Test_309(t *testing.T) {\n\ttestName := \"Test_309 ExtendPolyLine\"\n\trunStart := time.Now()\n\tfmt.Printf(\"%s\\n\", testName)\n\tdefer func() {\n\t\tVerbose.Printf(\"%s took %v\\n\", testName, time.Since(runStart))\n\t}()\n\tvar (\n\t\tgpstest1 GPS2dList = GPS2dList{\n\t\t\t{Lat: 10, Lon: 10},\n\t\t\t{Lat: 15, Lon: 15},\n\t\t}\n\t\tgpstest2 GPS2dList = GPS2dList{\n\t\t\t//\t\t\t{Lat:10,Lon:10,Up:10},\n\t\t\t//\t\t\t{Lat:15,Lon:15,Up:10},\n\t\t\t{Lat: 36.810202, Lon: -77.025878},\n\t\t\t{Lat: 36.803840, Lon: -76.862869},\n\t\t\t{Lat: 36.814619, Lon: -76.902701},\n\t\t}\n\t\tpl PolyLine\n\t\tp2 PolyLine\n\t\tp3 PolyLine\n\t)\n\tVerbose.Printf(\"Centroid of empty list = %v\\n\", pl.Centroid())\n\tpl = append(pl, []Pointe{Pointe{X: 1.0, Y: 1.0}})\n\tVerbose.Printf(\"Centroid of single point [1,1] = %v\\n\", pl.Centroid())\n\tpl[0] = append(pl[0], Pointe{X: 100.0, Y: 100.0})\n\tVerbose.Printf(\"Centroid of line point [1,1],[100,100] = %v\\n\", pl.Centroid())\n\tVerbose.Printf(\"Expect [50.5,50.5]\\n\\n\")\n\tVerbose.Printf(\"%v\\n\", gpstest1)\n\tVerbose.Printf(\"Centroid of gpslist is %v\\n\", pl.Centroid())\n\tgpsPoly1 := gpstest1.PolyLine()\n\tgpsPoly2 := gpstest2.PolyLine()\n\n\tp2.ExtendByPolyLine(gpsPoly1)\n\tVerbose.Printf(\"len p2 now = %d\\n\", len(p2))\n\tVerbose.Printf(\"Centroid of p2 is %v\\n\", p2.Centroid())\n\n\tp3.ExtendByPolyLine(gpsPoly2)\n}", "func (c *Circle) Points() []*Point {\n\tx, y, dx, dy := c.R-1, 0, 1, 1\n\te := dx - (c.R * 2)\n\n\tpoints := make([]*Point, 0)\n\n\tfor x > y {\n\t\tpoints = append(points, &Point{\n\t\t\tX: c.Center.X + x,\n\t\t\tY: c.Center.Y + y,\n\t\t})\n\t\tpoints = append(points, &Point{\n\t\t\tX: c.Center.X + y,\n\t\t\tY: c.Center.Y + x,\n\t\t})\n\t\tpoints = append(points, &Point{\n\t\t\tX: c.Center.X - y,\n\t\t\tY: c.Center.Y + x,\n\t\t})\n\t\tpoints = append(points, &Point{\n\t\t\tX: c.Center.X - x,\n\t\t\tY: c.Center.Y + y,\n\t\t})\n\t\tpoints = append(points, &Point{\n\t\t\tX: c.Center.X - x,\n\t\t\tY: c.Center.Y - y,\n\t\t})\n\t\tpoints = append(points, &Point{\n\t\t\tX: c.Center.X - y,\n\t\t\tY: c.Center.Y - x,\n\t\t})\n\t\tpoints = append(points, &Point{\n\t\t\tX: c.Center.X + y,\n\t\t\tY: c.Center.Y - x,\n\t\t})\n\t\tpoints = append(points, &Point{\n\t\t\tX: c.Center.X + x,\n\t\t\tY: c.Center.Y - y,\n\t\t})\n\n\t\tif e <= 0 {\n\t\t\ty++\n\t\t\te += dy\n\t\t\tdy += 2\n\t\t}\n\n\t\tif e > 0 {\n\t\t\tx--\n\t\t\tdx += 2\n\t\t\te += dx - (c.R * 2)\n\t\t}\n\t}\n\n\treturn points\n}", "func (p thinPoly) Inc(x []int32) thinPoly {\n\tfor i := range x {\n\t\tp[i] += x[i]\n\t}\n\treturn p\n}", "func Within(points []*Point, polygons []PolygonI) []*Point {\n\tresult := []*Point{}\n\tfor _, polygon := range polygons {\n\t\tfor _, point := range points {\n\t\t\tif Inside(point, polygon) {\n\t\t\t\tresult = append(result, point)\n\t\t\t}\n\t\t}\n\t}\n\treturn result\n}", "func NewBoundFromPoints(corner, oppositeCorner *Point) *Bound {\n\tb := &Bound{\n\t\tsw: corner.Clone(),\n\t\tne: corner.Clone(),\n\t}\n\n\tb.Extend(oppositeCorner)\n\treturn b\n}", "func (s *Serverus) ChainInterceptors(inter interface{}) {}", "func (g *Gene) IntervalOfExons() []Coor {\n\tmerged := g.MergeExons()\n\treturn IntervalRegions(merged)\n}", "func (c *curve) PointLen() int {\n\treturn (c.P.BitLen() + 7 + 1) / 8\n}", "func Line(x0, y0, x1, y1 int) []image.Point {\n\tdx := int(math.Abs(float64(x1 - x0)))\n\tdy := int(math.Abs(float64(y1 - y0)))\n\tsx := 0\n\tsy := 0\n\tif x0 < x1 {\n\t\tsx = 1\n\t} else {\n\t\tsx = -1\n\t}\n\n\tif y0 < y1 {\n\t\tsy = 1\n\t} else {\n\t\tsy = -1\n\t}\n\n\terr := dx - dy\n\n\tps := make([]image.Point, 0)\n\tfor {\n\t\tps = append(ps, image.Pt(x0, y0))\n\t\tif x0 == x1 && y0 == y1 {\n\t\t\tbreak\n\t\t}\n\t\te2 := err * 2\n\t\tif e2 > -dy {\n\t\t\terr -= dy\n\t\t\tx0 += sx\n\t\t}\n\t\tif e2 < dx {\n\t\t\terr += dx\n\t\t\ty0 += sy\n\t\t}\n\t}\n\treturn ps\n}", "func (r Ruler) LineSlice(start Point, end Point, l Line) Line {\n\tp1 := r.PointOnLine(l, start)\n\tp2 := r.PointOnLine(l, end)\n\n\tif p1.index > p2.index || (p1.index == p2.index && p1.t < p2.t) {\n\t\tp1, p2 = p2, p1\n\t}\n\n\tvar slice Line = []Point{p1.point}\n\n\tleft := p1.index + 1\n\tright := p2.index\n\n\tif l[left] != slice[0] && left <= right {\n\t\tslice = append(slice, l[left])\n\t}\n\n\tfor i := left + 1; i <= right; i++ {\n\t\tslice = append(slice, l[i])\n\t}\n\n\tif l[right] != p2.point {\n\t\tslice = append(slice, p2.point)\n\t}\n\n\treturn slice\n}", "func (r Rectangle) Clip(pts []Point) []Point {\n\tclipped := make([]Point, 0, len(pts))\n\tfor _, pt := range pts {\n\t\tif pt.In(r) {\n\t\t\tclipped = append(clipped, pt)\n\t\t}\n\t}\n\treturn clipped\n}", "func (t *Transcript) WhichExonIntersect(reg Coor) []int {\n\tresult := []int{}\n\tfor i, exon := range t.Exons {\n\t\tif exon.Intersect(reg) {\n\t\t\tresult = append(result, i)\n\t\t}\n\t}\n\treturn result\n}", "func lagrangeInterpolate(points map[int]*big.Int, x int, curve elliptic.Curve) *big.Int {\n\tlog.Printf(\"The points is: %v\", points)\n\n\t// 通过这些坐标点来恢复出多项式\n\tpolynomialClient := polynomial.New(curve.Params().N)\n\tresult := polynomialClient.GetPolynomialByPoints(points)\n\n\t// 秘密就是常数项\n\tsecret := result[len(result)-1]\n\n\tlog.Printf(\"The coefficients of the polynomial is: %v\", result)\n\treturn secret\n}", "func (b *BasicShape) Refine() []Shape {\n\tpanic(\"Refine should only be called on shapes which cannot be intersected: Basic\")\n}", "func linInt(x, y []float64, xVal float64) float64 {\n\t// x vector must be nondecreasing\n\n\tif xVal < x[0] || xVal > x[len(x)-1] {\n\t\treturn nan\n\t}\n\n\t// find out which segment we are in\n\tn := 0\n\tfor i, _ := range x {\n\t\tif xVal > x[i] {\n\t\t\tn = i\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\tif n > len(y) {\n\t\treturn nan\n\t}\n\n\t// interpolate\n\tx0, y0 := x[n], y[n]\n\tx1, y1 := x[n+1], y[n+1]\n\ta := x1 - x0\n\tb := y1 - y0\n\tyVal := b*(xVal-x0)/a + y0\n\treturn yVal\n}", "func (ecpgb *EntityContactPointGroupBy) IntsX(ctx context.Context) []int {\n\tv, err := ecpgb.Ints(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func GetYIntersects(p *Point, a, l float64) []*Point {\n\top := float64(1)\n\tad := op / math.Tan(a)\n\thy := math.Sqrt(math.Pow(ad, 2) + math.Pow(op, 2))\n\tq := GetQuadrant(a)\n\tif q == 3 || q == 4 {\n\t\top = -op\n\t\tad = -ad\n\t}\n\ts := int(l / hy)\n\tf := GetFirstYIntersect(p, a)\n\tis := []*Point{f}\n\tfor i := 0; i < s; i++ {\n\t\tx := is[len(is)-1].X + ad\n\t\ty := is[len(is)-1].Y + op\n\t\tis = append(is, NewPoint(x, y))\n\t}\n\treturn is\n}", "func (c Contour) ContainsWnPoly(p Point) bool {\n\tcn := 0\n\tfor i := range c { // edge from c[i] to nextC\n\t\tC := c[i]\n\t\tvar nextC Point\n\t\tif i+1 == len(c) {\n\t\t\tnextC = c[0]\n\t\t} else {\n\t\t\tnextC = c[i+1]\n\t\t}\n\t\tif ((C.Y <= p.Y) && (nextC.Y > p.Y)) || ((C.Y > p.Y) && (nextC.Y <= p.Y)) { // a downward crossing\n\t\t\t// compute the actual edge-ray intersect x-coordinate\n\t\t\tvt := float64(p.Y-C.Y) / (nextC.Y - C.Y)\n\t\t\tif p.X < C.X+vt*(nextC.X-C.X) { // p.Coordinates.X < intersect\n\t\t\t\tcn++ // a valid crossing of y=p.Coordinates[1] right of p.Coordinates[0]\n\t\t\t}\n\t\t}\n\t}\n\treturn (cn&1 == 1) // 0 if even (out), and 1 if odd (in)\n}", "func OfPoints(pts ...[2]float64) Winding { return Order{}.OfPoints(pts...) }", "func (gd *Definition) ConatainsPoint(x, y, buf float64) bool {\n\tif x < gd.Eorig-buf {\n\t\treturn false\n\t}\n\tif x > gd.Eorig+float64(gd.Ncol)*gd.Cwidth+buf {\n\t\treturn false\n\t}\n\tif y > gd.Norig+buf {\n\t\treturn false\n\t}\n\tif y < gd.Norig-float64(gd.Nrow)*gd.Cwidth-buf {\n\t\treturn false\n\t}\n\treturn true\n}", "func OverlayXY(value Vec2) *SimpleElement { return newSEVec2(\"overlayXY\", value) }", "func (rg Range) Lines(y0, y1 int) Range {\n\tnrg := rg\n\tnrg.Min.Y = rg.Min.Y + y0\n\tnrg.Max.Y = rg.Min.Y + y1\n\treturn rg.Intersect(nrg)\n}", "func (lgb *LocationGroupBy) IntsX(ctx context.Context) []int {\n\tv, err := lgb.Ints(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (ligb *LineItemGroupBy) IntsX(ctx context.Context) []int {\n\tv, err := ligb.Ints(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (xy xyer) XY(i int) (float64, float64) {\n\treturn xy.X[i], xy.Y[i]\n}", "func OfGeomPoints(points ...geom.Point) Winding { return Order{}.OfGeomPoints(points...) }", "func VPCMPESTRI(i, mx, x operand.Op) { ctx.VPCMPESTRI(i, mx, x) }", "func readCoverPoints(file *elf.File, tracePC uint64, traceCmp map[uint64]bool) ([2][]uint64, error) {\n\tvar pcs [2][]uint64\n\ttext := file.Section(\".text\")\n\tif text == nil {\n\t\treturn pcs, fmt.Errorf(\"no .text section in the object file\")\n\t}\n\tdata, err := text.Data()\n\tif err != nil {\n\t\treturn pcs, fmt.Errorf(\"failed to read .text: %v\", err)\n\t}\n\tconst callLen = 5\n\tend := len(data) - callLen + 1\n\tfor i := 0; i < end; i++ {\n\t\tpos := bytes.IndexByte(data[i:end], 0xe8)\n\t\tif pos == -1 {\n\t\t\tbreak\n\t\t}\n\t\tpos += i\n\t\ti = pos\n\t\toff := uint64(int64(int32(binary.LittleEndian.Uint32(data[pos+1:]))))\n\t\tpc := text.Addr + uint64(pos)\n\t\ttarget := pc + off + callLen\n\t\tif target == tracePC {\n\t\t\tpcs[0] = append(pcs[0], pc)\n\t\t} else if traceCmp[target] {\n\t\t\tpcs[1] = append(pcs[1], pc)\n\t\t}\n\t}\n\treturn pcs, nil\n}", "func (ecps *EntityContactPointSelect) IntsX(ctx context.Context) []int {\n\tv, err := ecps.Ints(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (r *MinMaxRange) SplitPoints(n int) []float64 {\n\tdelta := r.Max - r.Min\n\tif delta <= 0 {\n\t\treturn nil\n\t}\n\n\tmin := r.Min\n\tstp := delta / float64(n+1)\n\tres := make([]float64, n)\n\tfor i := 0; i < n; i++ {\n\t\tres[i] = min + stp*float64(i+1)\n\t}\n\treturn res\n}", "func (c Cluster) GetPoints() []int {\n\treturn append([]int(nil), c.indices...)\n}", "func linear(x, x1, x2, y1, y2 float64) float64 {\n slope := (y2 - y1) / (x2 - x1)\n intercept := y1 - x1 * slope\n return x * slope + intercept\n}", "func getLineRanges(src image.Image,\n\tthreshold uint32,\n\temptyLineThreshold float64) []lineRange {\n\tbounds := src.Bounds()\n\tsrcWidth, srcHeight := bounds.Dx(), bounds.Dy()\n\tthreshold16 := threshold * 256\n\n\tvar ranges []lineRange\n\tvar r lineRange\n\n\tmaxDotCount := int(emptyLineThreshold)\n\tif emptyLineThreshold < 1 {\n\t\tmaxDotCount = int(float64(srcWidth) * emptyLineThreshold)\n\t}\n\tfor y := 0; y < srcHeight; y++ {\n\t\temptyLine := true\n\t\tdotCount := 0\n\t\tfor x := 0; x < srcWidth; x++ {\n\t\t\tr, g, b, _ := src.At(x, y).RGBA()\n\t\t\tbrightness := getBrightness(r, g, b)\n\t\t\tif brightness < threshold16 {\n\t\t\t\tdotCount++\n\t\t\t\tif dotCount >= maxDotCount {\n\t\t\t\t\temptyLine = false\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif emptyLine {\n\t\t\tif y == 0 {\n\t\t\t\tr = lineRange{start: y, end: y, emptyLine: true}\n\t\t\t} else {\n\t\t\t\tif r.emptyLine {\n\t\t\t\t\tr.end = y\n\t\t\t\t} else {\n\t\t\t\t\tranges = append(ranges, r)\n\t\t\t\t\tr = lineRange{start: y, end: y, emptyLine: true}\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tif y == 0 {\n\t\t\t\tr = lineRange{start: y, end: y, emptyLine: false}\n\t\t\t} else {\n\t\t\t\tif r.emptyLine {\n\t\t\t\t\tranges = append(ranges, r)\n\t\t\t\t\tr = lineRange{start: y, end: y, emptyLine: false}\n\t\t\t\t} else {\n\t\t\t\t\tr.end = y\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\tranges = append(ranges, r)\n\treturn ranges\n}", "func FuncLineString(start, end float64, points int, fn ParamFunc) geom.LineString {\n\tif points < 2 {\n\t\tpanic(\"cannot have a line with less than 2 points\")\n\t}\n\n\tres := (end - start) / (float64(points) - 1)\n\tret := make([][2]float64, points)\n\tt := start\n\n\tfor i := 0; i < points - 1; i++ {\n\t\tret[i] = fn(t)\n\t\tt += res\n\t}\n\n\tret[points - 1] = fn(end)\n\n\treturn ret\n}", "func PolyMin(k float64) MinFunc {\n\treturn func(a, b float64) float64 {\n\t\treturn poly(a, b, k)\n\t}\n}", "func (r *Automaton) GetStartPoints() []int {\n\tpointset := make(map[int]struct{})\n\tpointset[0] = struct{}{}\n\n\tfor s := 0; s < r.nextState; s += 2 {\n\t\ttrans := r.states[s]\n\t\tlimit := trans + 3*r.states[s+1]\n\t\t//System.out.println(\" state=\" + (s/2) + \" trans=\" + trans + \" limit=\" + limit);\n\t\tfor trans < limit {\n\t\t\tmin := r.transitions[trans+1]\n\t\t\tmax := r.transitions[trans+2]\n\t\t\t//System.out.println(\" min=\" + min);\n\t\t\tpointset[min] = struct{}{}\n\t\t\tif max < 0x10FFFF {\n\t\t\t\tpointset[max+1] = struct{}{}\n\t\t\t}\n\t\t\ttrans += 3\n\t\t}\n\t}\n\n\tpoints := make([]int, 0, len(pointset))\n\tfor k, _ := range pointset {\n\t\tpoints = append(points, k)\n\t}\n\tsort.Ints(points)\n\treturn points\n}", "func (ema *Ema) GetPoints() []point {\n\treturn ema.points\n}", "func getPoint(x, y []byte) plotter.XYZs {\n\tpts := make(plotter.XYZs, len(x))\n\tfor i := range x {\n\t\tpts[i].X = float64(x[i])\n\t\tpts[i].Y = float64(y[i])\n\t\tpts[i].Z = 0.1\n\n\t}\n\treturn pts\n}", "func (s *BaseAspidaListener) EnterPoints(ctx *PointsContext) {}", "func (p linear) getInfill(layerNr int, outline clipper.Path, holes clipper.Paths, overlap float32) clipper.Paths {\n\tvar result clipper.Paths\n\n\t// clip the paths with the lines using intersection\n\texset := clipper.Paths{outline}\n\n\tco := clipper.NewClipperOffset()\n\tcl := clipper.NewClipper(clipper.IoNone)\n\n\t// generate the ex-set for the overlap (only if needed)\n\tif overlap != 0 {\n\t\tco.AddPaths(exset, clipper.JtSquare, clipper.EtClosedPolygon)\n\t\tco.MiterLimit = 2\n\t\texset = co.Execute(float64(-overlap))\n\n\t\tco.Clear()\n\t\tco.AddPaths(holes, clipper.JtSquare, clipper.EtClosedPolygon)\n\t\tco.MiterLimit = 2\n\t\tholes = co.Execute(float64(overlap))\n\t}\n\n\t// clip the lines by the outline and holes\n\tcl.AddPaths(exset, clipper.PtClip, true)\n\tcl.AddPaths(holes, clipper.PtClip, true)\n\n\tif layerNr%2 == 0 {\n\t\tcl.AddPaths(p.verticalPaths, clipper.PtSubject, false)\n\t} else {\n\t\tcl.AddPaths(p.horizontalPaths, clipper.PtSubject, false)\n\t}\n\n\ttree, ok := cl.Execute2(clipper.CtIntersection, clipper.PftEvenOdd, clipper.PftEvenOdd)\n\tif !ok {\n\t\tfmt.Println(\"getLinearFill failed\")\n\t\treturn nil\n\t}\n\n\tfor _, c := range tree.Childs() {\n\t\tresult = append(result, c.Contour())\n\t}\n\n\treturn result\n}", "func (c *cursor) LineTo(points ...[2]float64) []uint32 {\n\treturn c.encodeCmd(uint32(NewCommand(cmdLineTo, len(points))), points)\n}", "func (order Order) OfInt64Points(ipts ...[2]int64) Winding {\n\tpts := make([][2]float64, len(ipts))\n\tfor i := range ipts {\n\t\tpts[i] = [2]float64{\n\t\t\tfloat64(ipts[i][0]),\n\t\t\tfloat64(ipts[i][1]),\n\t\t}\n\t}\n\treturn Orientation(order.YPositiveDown, pts...)\n}", "func PCMPESTRI(i, mx, x operand.Op) { ctx.PCMPESTRI(i, mx, x) }", "func (i *Result) Intersection() []geom.Coord {\n\treturn i.intersection\n}", "func GetOverlappedIds(c *gin.Context) {}", "func PolyInt32(a []int32, t []TermT) TermT {\n\tcount := C.uint32_t(len(a))\n\t//iam: FIXME need to unify the yices errors and the go errors...\n\t// do we want to be nannies here?\n\tif count == 0 {\n\t\treturn TermT(C.yices_zero())\n\t}\n\treturn TermT(C.yices_poly_int32(count, (*C.int32_t)(&a[0]), (*C.term_t)(&t[0])))\n}", "func (p *Profile) Boundaries(src []byte) (boundaries []Boundary) {\n\t// Find maximum count.\n\tmax := 0\n\tfor _, b := range p.Blocks {\n\t\tif b.Count > max {\n\t\t\tmax = b.Count\n\t\t}\n\t}\n\t// Divisor for normalization.\n\tdivisor := math.Log(float64(max))\n\n\t// boundary returns a Boundary, populating the Norm field with a normalized Count.\n\tboundary := func(offset int, start bool, count int) Boundary {\n\t\tb := Boundary{Offset: offset, Start: start, Count: count}\n\t\tif !start || count == 0 {\n\t\t\treturn b\n\t\t}\n\t\tif max <= 1 {\n\t\t\tb.Norm = 0.8 // Profile is in\"set\" mode; we want a heat map. Use cov8 in the CSS.\n\t\t} else if count > 0 {\n\t\t\tb.Norm = math.Log(float64(count)) / divisor\n\t\t}\n\t\treturn b\n\t}\n\n\tline, col := 1, 2 // TODO: Why is this 2?\n\tfor si, bi := 0, 0; si < len(src) && bi < len(p.Blocks); {\n\t\tb := p.Blocks[bi]\n\t\tif b.StartLine == line && b.StartCol == col {\n\t\t\tboundaries = append(boundaries, boundary(si, true, b.Count))\n\t\t}\n\t\tif b.EndLine == line && b.EndCol == col {\n\t\t\tboundaries = append(boundaries, boundary(si, false, 0))\n\t\t\tbi++\n\t\t\tcontinue // Don't advance through src; maybe the next block starts here.\n\t\t}\n\t\tif src[si] == '\\n' {\n\t\t\tline++\n\t\t\tcol = 0\n\t\t}\n\t\tcol++\n\t\tsi++\n\t}\n\tsort.Sort(boundariesByPos(boundaries))\n\treturn\n}", "func WrapEndpoints(in svc.Endpoints) svc.Endpoints {\n\n\t// Pass a middleware you want applied to every endpoint.\n\t// optionally pass in endpoints by name that you want to be excluded\n\t// e.g.\n\t// in.WrapAllExcept(authMiddleware, \"Status\", \"Ping\")\n\n\t// Pass in a svc.LabeledMiddleware you want applied to every endpoint.\n\t// These middlewares get passed the endpoints name as their first argument when applied.\n\t// This can be used to write generic metric gathering middlewares that can\n\t// report the endpoint name for free.\n\t// github.com/jjggzz/truss/_example/middlewares/labeledmiddlewares.go for examples.\n\t// in.WrapAllLabeledExcept(errorCounter(statsdCounter), \"Status\", \"Ping\")\n\n\t// How to apply a middleware to a single endpoint.\n\t// in.ExampleEndpoint = authMiddleware(in.ExampleEndpoint)\n\t// 限流\n\tlimitMiddleware := middleware.LimitMiddleware(middleware.LimitDelay, 100)\n\t// 断路器\n\tbreakerMiddleware := middleware.BreakerMiddleware(gobreaker.Settings{})\n\terrorMiddleware := ErrorMiddleware()\n\n\tin.WrapAllExcept(limitMiddleware)\n\tin.WrapAllExcept(breakerMiddleware)\n\tin.WrapAllExcept(errorMiddleware)\n\treturn in\n}", "func (space Space) PointCombine(point1 []float64, weight1 int, point2 []float64, weight2 int) []float64 {\n\treturn space.vspace.PointCombine(point1, weight1, point2, weight2)\n}", "func (f ChangeLineSpaceFilter) getLineRanges(src image.Image) lineRanges {\n\tbounds := src.Bounds()\n\tsrcWidth, srcHeight := bounds.Dx(), bounds.Dy()\n\tthreshold16 := f.option.Threshold * 256\n\n\tvar ranges lineRanges\n\tvar r lineRange\n\n\tmaxDotCount := int(f.option.EmptyLineThreshold)\n\tif f.option.EmptyLineThreshold < 1 {\n\t\tmaxDotCount = int(float64(srcWidth) * f.option.EmptyLineThreshold)\n\t}\n\tfor y := 0; y < srcHeight; y++ {\n\t\temptyLine := true\n\t\tdotCount := 0\n\t\tfor x := 0; x < srcWidth; x++ {\n\t\t\tr, g, b, _ := src.At(x, y).RGBA()\n\t\t\tbrightness := getBrightness(r, g, b)\n\t\t\tif brightness < threshold16 {\n\t\t\t\tdotCount++\n\t\t\t\tif dotCount >= maxDotCount {\n\t\t\t\t\temptyLine = false\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif emptyLine {\n\t\t\tif y == 0 {\n\t\t\t\tr = lineRange{start: y, end: y, emptyLine: true}\n\t\t\t} else {\n\t\t\t\tif r.emptyLine {\n\t\t\t\t\tr.end = y\n\t\t\t\t} else {\n\t\t\t\t\tranges = append(ranges, r)\n\t\t\t\t\tr = lineRange{start: y, end: y, emptyLine: true}\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tif y == 0 {\n\t\t\t\tr = lineRange{start: y, end: y, emptyLine: false}\n\t\t\t} else {\n\t\t\t\tif r.emptyLine {\n\t\t\t\t\tranges = append(ranges, r)\n\t\t\t\t\tr = lineRange{start: y, end: y, emptyLine: false}\n\t\t\t\t} else {\n\t\t\t\t\tr.end = y\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\tranges = append(ranges, r)\n\treturn ranges\n}", "func NewPointEndpoints() []*api.Endpoint {\n\treturn []*api.Endpoint{}\n}", "func (b Bounds2) Corner(i int) Point2 {\n\tvar pX, pY float64\n\tif i&1 != 0 {\n\t\tpX = b.pMax.X\n\t} else {\n\t\tpX = b.pMin.X\n\t}\n\n\tif i&2 != 0 {\n\t\tpY = b.pMax.Y\n\t} else {\n\t\tpY = b.pMin.Y\n\t}\n\n\treturn Point2{X: pX, Y: pY}\n}", "func neighbours(loc xy) []xy {\n\tswitch {\n\tcase loc.x > 0 && loc.y > 0:\n\t\treturn []xy{xy{loc.x + 1, loc.y}, xy{loc.x, loc.y + 1}, xy{loc.x - 1, loc.y}, xy{loc.x, loc.y - 1}}\n\tcase loc.y > 0:\n\t\treturn []xy{xy{loc.x + 1, loc.y}, xy{loc.x, loc.y + 1}, xy{loc.x, loc.y - 1}}\n\tcase loc.x > 0:\n\t\treturn []xy{xy{loc.x + 1, loc.y}, xy{loc.x, loc.y + 1}, xy{loc.x - 1, loc.y}}\n\tdefault:\n\t\treturn []xy{xy{loc.x + 1, loc.y}, xy{loc.x, loc.y + 1}}\n\t}\n}", "func (c *Aggregator) Points() ([]Point, error) {\n\treturn c.samples, nil\n}", "func (t *Table) Pointers(baseOff int) []int {\n\tvar ptrs []int\n\n\tcur := baseOff + len(t.Elems)*2\n\tfor _, e := range t.Elems {\n\t\tvar p int\n\t\tif len(e) == 0 {\n\t\t\tp = 0\n\t\t} else {\n\t\t\tp = cur\n\t\t\tcur += len(e)\n\t\t}\n\t\tptrs = append(ptrs, p)\n\t}\n\n\treturn ptrs\n}", "func (me *XElemsPolyline) Walk() (err error) {\n\tif fn := WalkHandlers.XElemsPolyline; me != nil {\n\t\tif fn != nil {\n\t\t\tif err = fn(me, true); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tfor _, x := range me.Polylines {\n\t\t\tif err = x.Walk(); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif fn != nil {\n\t\t\tif err = fn(me, false); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (o NurbsPatch) diffPoints(idOld int, xNew []float64) bool {\n\tif math.Abs(xNew[3]-o.ControlPoints[idOld].X[3]) > 0 {\n\t\treturn true\n\t}\n\treturn false\n}", "func (rlgb *RuleLimitGroupBy) IntsX(ctx context.Context) []int {\n\tv, err := rlgb.Ints(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (g SimplePoint) Intersects(o Object) bool {\n\treturn intersectsObjectShared(g, o,\n\t\tfunc(v Polygon) bool {\n\t\t\treturn poly.Point(Position{X: g.X, Y: g.Y, Z: 0}).Intersects(polyExteriorHoles(v.Coordinates))\n\t\t},\n\t\tfunc(v MultiPolygon) bool {\n\t\t\tfor _, c := range v.Coordinates {\n\t\t\t\tif poly.Point(Position{X: g.X, Y: g.Y, Z: 0}).Intersects(polyExteriorHoles(c)) {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn false\n\t\t},\n\t)\n}", "func IntsConst(v []int64) ConstInts {\n\treturn &constInts{v}\n}", "func (s *Asteroid) getPoints() {\n\ts.points = s.init\n\ts.drawable = makeVertexArrayObj(s.points)\n}", "func Points(scorer interface{ Number() uint8 }) uint8 {\n\tvar points = map[uint8]uint8{1: 11, 3: 10, 8: 2, 9: 3, 10: 4}\n\treturn points[scorer.Number()]\n}", "func WrapEndpoints(in svc.Endpoints) svc.Endpoints {\n\t\n\t// Pass a middleware you want applied to every endpoint.\n\t// optionally pass in endpoints by name that you want to be excluded\n\t// e.g.\n\t// in.WrapAllExcept(authMiddleware, \"Status\", \"Ping\")\n\t\n\t// Pass in a svc.LabeledMiddleware you want applied to every endpoint.\n\t// These middlewares get passed the endpoints name as their first argument when applied.\n\t// This can be used to write generic metric gathering middlewares that can\n\t// report the endpoint name for free.\n\t// github.com/metaverse/truss/_example/middlewares/labeledmiddlewares.go for examples.\n\t// in.WrapAllLabeledExcept(errorCounter(statsdCounter), \"Status\", \"Ping\")\n\t\n\t// How to apply a middleware to a single endpoint.\n\t// in.ExampleEndpoint = authMiddleware(in.ExampleEndpoint)\n\t\n\t//创建限流器 1r/s 每秒请求数\n\tlimiter := rate.NewLimiter(rate.Every(time.Second*1), 10)\n\t\n\t//通过DelayingLimiter中间件,在bookListEndPoint的外层再包裹一层限流的endPoint\n\tlimtMw := ratelimit.NewDelayingLimiter(limiter)\n\tin.GetBookInfoEndpoint = limtMw(in.GetBookInfoEndpoint)\n\tin.GetBookListEndpoint = limtMw(in.GetBookListEndpoint)\n\treturn in\n}", "func (isf intSliceFunctorImpl) Ints() []int {\n\treturn isf.ints\n}", "func (c *container) Polyline(pts ...Point) *Polyline {\n\tp := &Polyline{Points: pts}\n\tc.contents = append(c.contents, p)\n\n\treturn p\n}", "func (r Ruler) PointOnLine(l Line, p Point) PointOnLine {\n\tvar minDist float64 = math.Inf(1)\n\tvar minX, minY, minT, x, y, dx, dy, t float64\n\tvar minI int\n\n\tfor i := 0; i < len(l)-1; i++ {\n\n\t\tx = l[i][0]\n\t\ty = l[i][1]\n\t\tdx = (l[i+1][0] - x) * r.kx\n\t\tdy = (l[i+1][1] - y) * r.ky\n\n\t\tif dx != 0 || dy != 0 {\n\n\t\t\tt = ((p[0]-x)*r.kx*dx + (p[1]-y)*r.ky*dy) / (dx*dx + dy*dy)\n\n\t\t\tif t > 1 {\n\t\t\t\tx = l[i+1][0]\n\t\t\t\ty = l[i+1][1]\n\n\t\t\t} else if t > 0 {\n\t\t\t\tx += (dx / r.kx) * t\n\t\t\t\ty += (dy / r.ky) * t\n\t\t\t}\n\t\t}\n\n\t\tdx = (p[0] - x) * r.kx\n\t\tdy = (p[1] - y) * r.ky\n\n\t\tvar sqDist = dx*dx + dy*dy\n\t\tif sqDist < minDist {\n\t\t\tminDist = sqDist\n\t\t\tminX = x\n\t\t\tminY = y\n\t\t\tminI = i\n\t\t\tminT = t\n\t\t}\n\t}\n\n\treturn PointOnLine{\n\t\tpoint: Point{minX, minY},\n\t\tindex: minI,\n\t\tt: math.Max(0, math.Min(1, minT)),\n\t}\n}", "func findNearNeighbor(x []float64, y []float64, points map[int]int, start int) int {\n neighbor := 0\n smDist := 10000000.0\n\n // Loop through all yet visited points to find out the cloesest point to current point.\n for i := 1; i < len(points); i++ {\n if (i != start) && (points[i] != 0) {\n sqSum := math.Pow(x[i] - x[start], 2) + math.Pow(y[i] - y[start], 2)\n dist := math.Sqrt(sqSum)\n if dist < smDist {\n neighbor = i\n smDist = dist\n }\n }\n }\n return neighbor\n}", "func _line(x1o, y1o, x2o, y2o *FloatOrInt) chan FloatPair {\n\tc := make(chan FloatPair)\n\n\tgo func() {\n\t\tx1 := x1o.Normalized()\n\t\ty1 := y1o.Normalized()\n\t\tx2 := x2o.Normalized()\n\t\ty2 := y2o.Normalized()\n\n\t\txdiff := sub(max(x1, x2), min(x1, x2))\n\t\tydiff := sub(max(y1, y2), min(y1, y2))\n\n\t\txdir := -1\n\t\tif lessEqual(x1, x2) {\n\t\t\txdir = 1\n\t\t}\n\n\t\tydir := -1\n\t\tif lessEqual(y1, y2) {\n\t\t\tydir = 1\n\t\t}\n\n\t\tr := max(xdiff, ydiff)\n\n\t\tfor i := 0; i < r.Int()+1; i++ {\n\t\t\tx := x1.Float()\n\t\t\ty := y1.Float()\n\n\t\t\tif ydiff.Bool() {\n\t\t\t\ty += (float64(i) * ydiff.Float()) / r.Float() * float64(ydir)\n\t\t\t}\n\t\t\tif xdiff.Bool() {\n\t\t\t\tx += (float64(i) * xdiff.Float()) / r.Float() * float64(xdir)\n\t\t\t}\n\n\t\t\tc <- FloatPair{x, y} // yield\n\t\t}\n\t\tclose(c)\n\n\t}()\n\n\treturn c\n}", "func (self *Rectangle) OffsetPointI(args ...interface{}) *Rectangle{\n return &Rectangle{self.Object.Call(\"offsetPoint\", args)}\n}", "func (m Shape) GetCoordinates() []Line {\n\treturn m.Coordinates\n}", "func (*Secp256k1) PointLen() int { return egPoint.MarshalSize() }", "func BoundingBox_Points(pts [][]float64) []float64 {\n\t// setting opposite default values\n\twest, south, east, north := float64(math.Inf(1)), float64(math.Inf(1)),float64(math.Inf(-1)),float64(math.Inf(-1))\n\n\tfor _, pt := range pts {\n\t\tx, y := pt[0], pt[1]\n\t\t// can only be one condition\n\t\t// using else if reduces one comparison\n\t\tif x < west {\n\t\t\twest = x\n\t\t\n\t\t}\n\t\tif x > east {\n\t\t\teast = x\n\t\t}\n\n\t\tif y < south {\n\t\t\tsouth = y\n\t\t}\n\t\tif y > north {\n\t\t\tnorth = y\n\t\t}\n\t}\n\treturn []float64{west, south, east, north}\n}", "func (l *Level) Around(loc math.Point) []*Tile {\n\tadj := l.Bounds.Clip(math.Adj(loc))\n\n\tneighbors := make([]*Tile, 0, len(adj))\n\n\tfor _, p := range adj {\n\t\tneighbors = append(neighbors, l.At(p))\n\t}\n\treturn neighbors\n}", "func (orth *Orthotope) Intersects(o *Orthotope) int32 {\n\tinT := int32(0)\n\toutT := int32(math.MaxInt32)\n\tfor index, p0 := range o.Point {\n\t\tp1 := o.Delta[index] + p0\n\n\t\tif orth.Delta[index] == 0 {\n\t\t\tif orth.Point[index] < p0 || p1 < orth.Point[index] {\n\t\t\t\treturn -1\n\t\t\t}\n\t\t} else {\n\t\t\tif orth.Delta[index] < 0 {\n\t\t\t\t// Swap p0 and p1 for negative directions.\n\t\t\t\tp0, p1 = p1, p0\n\t\t\t}\n\t\t\tp0T := ((p0 - orth.Point[index]) << ACCURACY) / orth.Delta[index]\n\t\t\tinT = disc.Max(inT, p0T)\n\n\t\t\tp1T := ((p1 - orth.Point[index]) << ACCURACY) / orth.Delta[index]\n\t\t\toutT = disc.Min(outT, p1T)\n\t\t}\n\t}\n\n\tif inT < outT && inT >= 0 {\n\t\treturn inT\n\t}\n\treturn -1\n}", "func (o StreamProcessorOutput) PolygonRegionsOfInterest() StreamProcessorPointArrayArrayOutput {\n\treturn o.ApplyT(func(v *StreamProcessor) StreamProcessorPointArrayArrayOutput { return v.PolygonRegionsOfInterest }).(StreamProcessorPointArrayArrayOutput)\n}" ]
[ "0.5369969", "0.5363394", "0.52974355", "0.5246381", "0.5191002", "0.48185128", "0.46456409", "0.45902103", "0.4585371", "0.45481473", "0.45180067", "0.44956204", "0.44434085", "0.44339702", "0.4428231", "0.44268453", "0.4377828", "0.43652183", "0.43572596", "0.43551555", "0.43396905", "0.43117067", "0.42636937", "0.4244943", "0.41953826", "0.41849253", "0.41668543", "0.4155991", "0.41538334", "0.41437495", "0.41435057", "0.4141967", "0.41309434", "0.41164553", "0.4115369", "0.41123563", "0.41093075", "0.4096261", "0.40938035", "0.40910164", "0.40719175", "0.40713865", "0.40692753", "0.40534657", "0.40459406", "0.40447563", "0.40372694", "0.40369743", "0.40360156", "0.39962155", "0.3993789", "0.39914802", "0.39866063", "0.39780536", "0.39768612", "0.39764377", "0.3969254", "0.39652914", "0.39640805", "0.39608464", "0.39607778", "0.39571217", "0.39390662", "0.39360598", "0.39359733", "0.39229637", "0.39093846", "0.39068687", "0.38953927", "0.38944936", "0.38934216", "0.3885146", "0.38822928", "0.38697746", "0.38657743", "0.38592505", "0.3856952", "0.38548213", "0.38446546", "0.3835397", "0.3832403", "0.3826099", "0.38248911", "0.3819719", "0.38179356", "0.38147724", "0.38059682", "0.38050953", "0.38047758", "0.37992474", "0.37968534", "0.37878054", "0.3779596", "0.37778074", "0.3773406", "0.3765794", "0.37654746", "0.3762597", "0.3760146", "0.37579226" ]
0.8265049
0
move returns a new point that has the same properties as the point, but has moved a certain distance dist in direction dir.
func (p point) move(dir direction, dist int) point { var movedPoint point switch dir { case up: movedPoint = point{x: p.x, y: p.y + dist} case down: movedPoint = point{x: p.x, y: p.y - dist} case right: movedPoint = point{x: p.x + dist, y: p.y} case left: movedPoint = point{x: p.x - dist, y: p.y} } movedPoint.wireLen = p.wireLen + dist return movedPoint }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func movePoint(p *Point2D, dx, dy float64) {\n\tp.Move(dx, dy)\n}", "func (p *Point2D) Move(deltaX, deltaY float64) {\n\t// if you want to modify the \"object\" (i.e. the value) you need to pass a pointer\n\t// otherwise you would only get a copy (by-value)\n\n\t// this is actually short-hand for (*p).x and (*p).y. Go does magic dereferencing on struct pointers.\n\tp.x += deltaX\n\tp.y += deltaY\n}", "func (d *droid) move(direction int) int {\n\td.code.PushInput(int64(direction))\n\td.code.Continue()\n\n\tmoveResult := int(d.code.PopOutput())\n\tif moveResult != 0 {\n\t\tif direction == 1 {\n\t\t\td.location.y--\n\t\t} else if direction == 2 {\n\t\t\td.location.y++\n\t\t} else if direction == 3 {\n\t\t\td.location.x--\n\t\t} else {\n\t\t\td.location.x++\n\t\t}\n\t}\n\n\tif moveResult == 2 {\n\t\td.foundTarget = true\n\t\td.oxygenPosition = &point{x: d.location.x, y: d.location.y}\n\t}\n\n\treturn moveResult\n}", "func (path *Path) Move(pt Point) {\n\twriteCommand(&path.buf, \"m\", pt.X, pt.Y)\n}", "func (r *paintingRobot) move() {\n posX, posY := r.position.x, r.position.y\n switch r.direction {\n case up:\n posY -= 1\n case right:\n posX += 1\n case down:\n posY += 1\n case left:\n posX -= 1\n }\n\n r.position = &point{\n x: posX,\n y: posY,\n }\n\n fmt.Println(fmt.Sprintf(\"robot moved to [%d,%d]\", r.position.x, r.position.y))\n}", "func (d *droid) moveToPoint(p *point) {\n\tif !(d.location.x == p.x && d.location.y == p.y) {\n\t\tpath := d.calculatePath(d.location, p)\n\t\tfor _, step := range path {\n\t\t\tmoveResult := d.move(step)\n\t\t\tif moveResult == 0 {\n\t\t\t\tlog.Fatal(\"Move to point can't find path\")\n\t\t\t}\n\t\t}\n\t}\n}", "func (c Circle) Moved(delta Vec) Circle {\n\treturn Circle{\n\t\tCenter: c.Center.Add(delta),\n\t\tRadius: c.Radius,\n\t}\n}", "func (l *Line) Move(direction Vector) Vector {\n\tl.q = l.q.Add(direction)\n\tl.p = l.p.Add(direction)\n\n\treturn l.Center()\n}", "func (c *Constraints) move(dx, dy float32) {\n\tc.x.move(dx)\n\tc.y.move(dy)\n}", "func (r Ray3) Moved(dist float64) Vector3 {\n\treturn r.Origin.Plus(r.Dir.Scaled(dist))\n}", "func (e *Engine) Move(dir Direction) (success bool) {\n\tsuccess = false\n\n\tdest1 := e.CurrentState.Figure.Add(dir.Point())\n\tvalid, containsBox := e.CheckDestination(dest1)\n\tif !valid {\n\t\treturn\n\t}\n\tvar dest2 Point\n\tif valid && containsBox {\n\t\tdest2 = dest1.Add(dir.Point())\n\t\tvalid, containsSecBox := e.CheckDestination(dest2)\n\t\tif !valid || containsSecBox {\n\t\t\treturn\n\t\t}\n\t}\n\n\tsuccess = true\n\te.appendState2History(e.CurrentState)\n\n\tif containsBox {\n\t\te.moveBox(dest1, dest2)\n\t}\n\te.CurrentState.Figure = dest1\n\treturn\n}", "func (point Point) Walk(direction Direction) Point {\n\tswitch direction {\n\tcase DirectionTop:\n\t\tpoint.Y++\n\tcase DirectionDown:\n\t\tpoint.Y--\n\tcase DirectionRight:\n\t\tpoint.X++\n\tcase DirectionLeft:\n\t\tpoint.X--\n\t}\n\n\treturn point\n}", "func (s *swimmer) move() {\n\ts.xPos += s.direction()\n}", "func (v Vertex) Move(dx, dy int) {\n\tv.x = v.x + dx\n\tv.y = v.y + dy\n}", "func (l Line) Moved(delta Vec) Line {\n\treturn Line{\n\t\tA: l.A.Add(delta),\n\t\tB: l.B.Add(delta),\n\t}\n}", "func (r *Robot) Move() {\n\tr.Pos.Y = r.Pos.Y + r.Dir.Y\n\tr.Pos.X = r.Pos.X + r.Dir.X\n}", "func (a *Agent) Move(position *vec2.T) bool {\n\tdiff := vec2.Sub(position, a.position)\n\tpositionWithSizeRadius := diff.Normalize().Scale(a.sizeRadius).Add(position)\n\n\tif a.currentTriangle != nil && a.currentTriangle.containsPoint(positionWithSizeRadius) {\n\t\ta.position = position\n\t\treturn true\n\t} else if a.currentTriangle = a.navMesh.Mesh.findTriangleByPoint(positionWithSizeRadius); a.currentTriangle != nil {\n\t\ta.position = position\n\t\treturn true\n\t}\n\treturn false\n}", "func (l *Line) Move(x, y float64) {\n\tl.X += x\n\tl.Y += y\n\tl.X2 += x\n\tl.Y2 += y\n}", "func (mov *Moves) Dir(move ecs.Entity) (image.Point, bool) {\n\tif move.Type().HasAll(movDir) {\n\t\treturn mov.dir[move.ID()], true\n\t}\n\treturn image.ZP, false\n}", "func (m *Map) Move(loc Location, d Direction) Location {\n\tRow, Col := m.FromLocation(loc)\n\tswitch d {\n\t\tcase North:\t\tRow -= 1\n\t\tcase South:\t\tRow += 1\n\t\tcase West:\t\tCol -= 1\n\t\tcase East:\t\tCol += 1\n\t\tcase NoMovement: //do nothing\n\t\tdefault: Panicf(\"%v is not a valid direction\", d)\n\t}\n\treturn m.FromRowCol(Row, Col) //this will handle wrapping out-of-bounds numbers\n}", "func (b *Bullet) move() {\n\tb.point = b.point.Add(b.velocity)\n\tb.op.GeoM.Translate(float64(b.velocity.X), float64(b.velocity.Y))\n}", "func (d *Directory) Move(p, name, newPath string) error {\n\tdir, err := d.checkPathExists(p)\n\tif err != nil {\n\t\treturn err\n\t}\n\tnewDir, err := d.checkPathExists(newPath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor i, iNode := range dir.INodes {\n\t\tif iNode.GetName() == name {\n\t\t\td.lock()\n\t\t\tnewDir.INodes = append(newDir.INodes, iNode)\n\t\t\tdir.INodes = append(dir.INodes[:i], dir.INodes[i+1:]...)\n\t\t\td.unlock()\n\t\t\tdir.updateDirectorySize(p)\n\t\t\tdir.updateDirectorySize(newPath)\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn errors.New(\"target doesn't exists: \" + p + name)\n}", "func (r *Render) move(from, to int) int {\n\tfromX, fromY := r.toPos(from)\n\ttoX, toY := r.toPos(to)\n\n\tr.out.CursorUp(fromY - toY)\n\tr.out.CursorBackward(fromX - toX)\n\treturn to\n}", "func (p *Particle) Move() {\n\tp.Position[0] += (p.Direction[0] * p.Speed)\n\tp.Position[1] += (p.Direction[1] * p.Speed)\n}", "func (p *Path) MoveTo(x, y float64) *Path {\n\t//if len(p.d) == 0 && equal(x, 0.0) && equal(y, 0.0) {\n\t//\treturn p\n\t//}\n\tp.i0 = len(p.d)\n\tp.d = append(p.d, moveToCmd, x, y)\n\treturn p\n}", "func Move(zoom, lat, lon float64, pdx int, pdy int) (nlat, nlon float64) {\n\txf, yf := TileNum(int(zoom), lat, lon)\n\tdx := float64(pdx) / TileWidth\n\tdy := float64(pdy) / TileHeight\n\n\treturn latlonFromXY(int(zoom), xf+(dx), yf+(dy))\n}", "func (m Matrix) Moved(delta Vec) Matrix {\n\tm[4], m[5] = m[4]+delta.X, m[5]+delta.Y\n\treturn m\n}", "func (r *DefaultRobot) Move(d Direction) error {\n\tr.pos = Navigate(r.pos, d)\n\tr.distanceTravelled++\n\treturn nil\n}", "func (p *PDF) Move(xDelta, yDelta float64) {\n\tp.x, p.y = p.x+xDelta, p.y+yDelta\n\tp.fpdf.MoveTo(p.x, p.y)\n}", "func (r Rect) Moved(delta Vec) Rect {\n\treturn Rect{\n\t\tMin: r.Min.Add(delta),\n\t\tMax: r.Max.Add(delta),\n\t}\n}", "func (p *Pawn) Move(newLocation location.Location) {\n\tp.loc = newLocation\n\tp.hasMoved = true\n}", "func (f *Fs) moveDir(ctx context.Context, id, leaf, directoryID string) (err error) {\n\t// Move the object\n\topts := rest.Opts{\n\t\tMethod: \"PUT\",\n\t\tRootURL: id,\n\t\tNoResponse: true,\n\t}\n\tmove := api.MoveFolder{\n\t\tName: f.opt.Enc.FromStandardName(leaf),\n\t\tParent: directoryID,\n\t}\n\tvar resp *http.Response\n\treturn f.pacer.Call(func() (bool, error) {\n\t\tresp, err = f.srv.CallXML(ctx, &opts, &move, nil)\n\t\treturn shouldRetry(ctx, resp, err)\n\t})\n}", "func (m MoveResult) Move() Move {\n\tif !m.valid {\n\t\tzap.L().Fatal(\"Check if this isValid before accessing the Move()!\")\n\t}\n\treturn m.move\n}", "func (p *Player) Move (direction string, l *Log) int{\n t := time.Now()\n moved := 0\n name := p.Name\n switch direction {\n case \"a\":\n if p.X == 1 {\n l.WrongDirection(p)\n } else if p.X > 1 {\n p.X = p.X - 1\n moved = moved + 1\n l.AddAction(t.Format(\"3:04:05 \")+ name +\" moves West.\")\n }\n case \"s\":\n if p.Y == 5 {\n l.WrongDirection(p)\n } else if p.Y < 5 {\n p.Y = p.Y + 1\n moved = moved + 1\n l.AddAction(t.Format(\"3:04:05 \")+ name +\" moves South.\")\n }\n case \"d\":\n if p.X == 5 {\n l.WrongDirection(p)\n } else if p.X < 5 {\n p.X = p.X + 1\n moved = moved + 1\n l.AddAction(t.Format(\"3:04:05 \")+ name +\" moves East.\")\n }\n case \"w\":\n if p.Y == 1 {\n l.WrongDirection(p)\n } else if p.Y > 1 {\n p.Y = p.Y - 1\n moved = moved + 1\n l.AddAction(t.Format(\"3:04:05 \")+ name +\" moves North.\")\n }\n case \"x\":\n os.Exit(1)\n }\n return moved\n}", "func (p *Position) Move(hole int) (*Position, *Position, MoveResult, error) {\n\t// validate in range\n\tif hole < 1 || hole > WIDTH() {\n\t\treturn p, nil, BadMove, errors.New(\"hole not in range\")\n\t}\n\n\t// validate hole has stones\n\tstones := p.near().Items[hole]\n\tif stones == 0 {\n\t\treturn p, nil, BadMove, errors.New(\"invalid move\")\n\t}\n\n\t// create delta position\n\tdelta, lastRow, lastHole := deltaPosition(hole, stones)\n\t// fmt.Printf(\"deltaPosition lastRow:%d, lastHole:%d\\n\", lastRow, lastHole)\n\t// combine\n\tresult := p.add(delta)\n\n\t// determina result from last position\n\tmoveResult := EndOfTurn\n\tif lastHole == 0 {\n\t\tmoveResult = RepeatTurn\n\t}\n\n\t// check for steal\n\tif isSteal, opRow, opHole, opCount := result.IsSteal(lastRow, lastHole); isSteal {\n\t\t// create steal position\n\t\tsteal := stealPosition(lastRow, lastHole, opRow, opHole, opCount)\n\t\t// apply\n\t\tresult = result.add(steal)\n\t}\n\n\tif result.IsGameEnd() {\n\t\tmoveResult = EndOfGame\n\t}\n\n\treturn result, delta, moveResult, nil\n}", "func (r *Rook) Move(newLocation location.Location) {\n\tr.loc = newLocation\n\tr.hasMoved = true\n}", "func (db *GeoDB) MoveMember(q *GeoQuery) error {\n\tconn := db.pool.Get()\n\tdefer conn.Close()\n\n\t_, err := db.scripts[\"GEOMOVE\"].Do(\n\t\tconn,\n\t\tTwoKeys,\n\t\tq.FromKey,\n\t\tq.ToKey,\n\t\tq.Member,\n\t)\n\n\treturn err\n}", "func turnDist(p1, p2 Coord) int {\n return int(math.Ceil(float64(dist(p1, p2)) / MOVE_DIST))\n}", "func (f *FileUtil) move(src, dst string) {\n\tsrc = FixPath(src)\n\tdst = FixPath(dst)\n\tif f.Verbose {\n\t\tfmt.Println(\"Moving\", src, \"to\", dst)\n\t}\n\t_, err := f.dbx.Move(files.NewRelocationArg(src, dst))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}", "func (mov *Moves) SetDir(move ecs.Entity, dir image.Point) {\n\tmove.Add(movDir)\n\tmov.dir[move.ID()] = dir\n}", "func (npc *Npc) Move(direction Direction) {\n\ttransform := npc.GetComponent(TransformTag).(*TransformComponent)\n\ttransform.MovementQueue.AddStep(direction)\n}", "func (s *State) Move(pos Position, dir Direction) Position {\n\tfor {\n\t\tif !s.CanMove(pos, dir) {\n\t\t\treturn pos\n\t\t}\n\t\tpos = pos.Next(dir)\n\t}\n}", "func (p *Player) move(treasureMap map[[2]int]int) ([2]int, bool) {\n\n\tif p.DirectionTaken == up {\n\t\tnewPlayerPositionXY := [2]int{p.Position[0], p.Position[1] + 1}\n\t\tif treasureMap[newPlayerPositionXY] == entity_obstacle {\n\t\t\tp.DirectionTaken = right\n\t\t} else {\n\t\t\treturn newPlayerPositionXY, true\n\t\t}\n\t}\n\n\tif p.DirectionTaken == right {\n\t\tnewPlayerPositionXY := [2]int{p.Position[0] + 1, p.Position[1]}\n\t\tif treasureMap[newPlayerPositionXY] == entity_obstacle {\n\t\t\tp.DirectionTaken = down\n\t\t} else {\n\t\t\treturn newPlayerPositionXY, true\n\t\t}\n\t}\n\n\tif p.DirectionTaken == down {\n\t\tnewPlayerPositionXY := [2]int{p.Position[0], p.Position[1] - 1}\n\t\tif treasureMap[newPlayerPositionXY] == entity_obstacle {\n\t\t\tp.DirectionTaken = stuck\n\t\t} else {\n\t\t\treturn newPlayerPositionXY, true\n\t\t}\n\t}\n\n\treturn p.Position, false\n}", "func (d *driver) Move(ctx context.Context, source string, dest string) error {\n\tdefer debugTime()()\n\tsourceobj := d.fullPath(source)\n\tsrchash, err := d.shell.ResolvePath(sourceobj)\n\tif err != nil {\n\t\tif strings.HasPrefix(err.Error(), \"no link named\") {\n\t\t\treturn storagedriver.PathNotFoundError{Path: source}\n\t\t}\n\t\treturn err\n\t}\n\n\td.rootlock.Lock()\n\tdefer d.rootlock.Unlock()\n\tnewroot, err := d.shell.Patch(d.roothash, \"rm-link\", source[1:])\n\tif err != nil {\n\t\tif err.Error() == \"merkledag: not found\" {\n\t\t\treturn storagedriver.PathNotFoundError{Path: source}\n\t\t} else {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// remove leading slash\n\tdest = dest[1:]\n\tnewroot, err = d.shell.PatchLink(newroot, dest, srchash, true)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\td.roothash = newroot\n\tfmt.Println(\"HASH AFTER MOVE: \", newroot)\n\td.publishHash(newroot)\n\treturn nil\n}", "func (i *Item) Move(x, y int) {\n\ti.X = x\n\ti.Y = y\n}", "func (p *Point3D) Move(deltaX, deltaY, deltaZ float64) {\n\t// Can we leverage existing code?\n\t// Turns out we can! This basically translates to super(deltaX, deltaY)\n\tp.Point2D.Move(deltaX, deltaY)\n\n\t//Now only the 3d part\n\tp.z += deltaZ\n}", "func (s *carrier) move() {\n\ts.position.x += s.direction.x\n\ts.position.y += s.direction.y\n\n\t// is the grid big enough for where we want to go?\n\tif !s.isInGrid() {\n\t\t// correct our position for the new grid size\n\t\ts.position.x += s.grid.size\n\t\ts.position.y += s.grid.size\n\t\t//fmt.Println(s.position, s.direction)\n\t\ts.grid.grow()\n\t}\n}", "func (p *PlayerEntity) Move(offset shared.FloatVector) {\n\tp.position.X += offset.X\n\tp.position.Y += offset.Y\n}", "func (p *Player) Move(x float32, y float32) {\n\tp.position.X = p.position.X + p.movement.X*p.speed\n\tp.position.Y = p.position.Y + p.movement.Y*p.speed\n}", "func (unitImpl *UnitImpl) Move(x float64, y float64) bool {\n\treturn unitImpl.RunOnServer(\"move\", map[string]interface{}{\n\t\t\"x\": x,\n\t\t\"y\": y,\n\t}).(bool)\n}", "func (c *Camera) Move(dir CameraDirection, offset float32) {\n\tvar delta types.Vec3\n\n\tswitch dir {\n\tcase Up:\n\t\tdelta = c.Up.Mul(offset)\n\tcase Down:\n\t\tdelta = c.Up.Mul(-offset)\n\tcase Left:\n\t\tdelta = c.LookAt.Sub(c.Position).Normalize().Cross(c.Up).Mul(-offset)\n\tcase Right:\n\t\tdelta = c.LookAt.Sub(c.Position).Normalize().Cross(c.Up).Mul(offset)\n\tcase Forward:\n\t\tdelta = c.LookAt.Sub(c.Position).Normalize().Mul(offset)\n\tcase Backward:\n\t\tdelta = c.LookAt.Sub(c.Position).Normalize().Mul(-offset)\n\t}\n\n\tc.Position = c.Position.Add(delta)\n\tc.LookAt = c.LookAt.Add(delta)\n\tc.Update()\n}", "func (p *Path) MoveTo(x, y float64) {\n\tp.appendToPath(MoveToCmp, x, y)\n\tp.x = x\n\tp.y = y\n}", "func (d *Dao) MoveRelative(\n\tmotorCode int,\n\tdirection int,\n\tspeed int,\n\tposition int,\n) (resp interface{}, err error) {\n\tmotorCodeBytes, err := uint8Bytes(motorCode)\n\tif err != nil {\n\t\treturn resp, err\n\t}\n\tdirectionBytes, err := uint8Bytes(direction)\n\tif err != nil {\n\t\treturn resp, err\n\t}\n\tspeedBytes, err := uint16Bytes(speed)\n\tif err != nil {\n\t\treturn resp, err\n\t}\n\tposBytes, err := uint16Bytes(position)\n\tif err != nil {\n\t\treturn resp, err\n\t}\n\treq := MotorMoveRelativeUnit.Request()\n\toutput, err := sendAck2(d,\n\t\tcomposeBytes(\n\t\t\treq.Bytes(),\n\t\t\tmotorCodeBytes,\n\t\t\tdirectionBytes,\n\t\t\tspeedBytes,\n\t\t\tposBytes,\n\t\t),\n\t\tMotorMoveRelativeUnit.RecResp(),\n\t\tMotorMoveRelativeUnit.ComResp(),\n\t)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn resp, err\n\t}\n\tresp = binary.BigEndian.Uint16(output[3:5])\n\treturn resp, nil\n}", "func (elems *Elements) move(from, to dvid.Point3d, deleteElement bool) (moved *Element, changed bool) {\n\tfor i, elem := range *elems {\n\t\tif from.Equals(elem.Pos) {\n\t\t\tchanged = true\n\t\t\t(*elems)[i].Pos = to\n\t\t\tmoved = (*elems)[i].Copy()\n\t\t\tif deleteElement {\n\t\t\t\t(*elems)[i] = (*elems)[len(*elems)-1] // Delete without preserving order.\n\t\t\t\t*elems = (*elems)[:len(*elems)-1]\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\n\t// Check relationships for any moved points.\n\tfor i, elem := range *elems {\n\t\t// Move any relationship with given pt.\n\t\tfor j, r := range elem.Rels {\n\t\t\tif from.Equals(r.To) {\n\t\t\t\tr.To = to\n\t\t\t\t(*elems)[i].Rels[j] = r\n\t\t\t\tchanged = true\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (d *driver) Move(ctx context.Context, sourcePath string, destPath string) error {\n\tif err := d.copy(ctx, sourcePath, destPath); err != nil {\n\t\treturn err\n\t}\n\treturn d.Delete(ctx, sourcePath)\n}", "func Move(s string) (*shogi.Move, error) {\n\ta := strings.Split(strings.TrimSpace(s), \"\")\n\n\tif len(a) < 4 {\n\t\treturn nil, errors.New(\"insufficient length. input = \" + s)\n\t}\n\n\t// is from captured.\n\tif strings.Contains(s, \"*\") {\n\t\tpiece, err := Piece(usi.Piece(a[0]))\n\t\tif err != nil {\n\t\t\tmsg := \"failed to parse captured piece on Move. input = \" + a[0] + \": %w\"\n\t\t\treturn nil, fmt.Errorf(msg, err)\n\t\t}\n\n\t\tsrc := &shogi.Point{Row: -1, Column: -1}\n\n\t\trow, err := parseRow(a[3])\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"failed to parse row. input = %s: %w\", a[3], err)\n\t\t}\n\n\t\tcol, err := parseColumn(a[2])\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"failed to parse column. input = %s: %w\", a[2], err)\n\t\t}\n\n\t\tdst := &shogi.Point{Row: row, Column: col}\n\t\treturn &shogi.Move{\n\t\t\tSource: src,\n\t\t\tDest: dst,\n\t\t\tPieceID: piece,\n\t\t\tIsPromoted: false,\n\t\t}, nil\n\t}\n\n\tsrow, err := parseRow(a[1])\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to parse row. input = %s: %w\", a[1], err)\n\t}\n\n\tscol, err := parseColumn(a[0])\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to parse column. input = %s: %w\", a[0], err)\n\t}\n\n\tdrow, err := parseRow(a[3])\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to parse row. input = %s: %w\", a[3], err)\n\t}\n\n\tdcol, err := parseColumn(a[2])\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to parse column. input = %s: %w\", a[2], err)\n\t}\n\n\tsrc := &shogi.Point{Row: srow, Column: scol}\n\tdst := &shogi.Point{Row: drow, Column: dcol}\n\tprm := len(a) == 5 && a[4] == \"+\"\n\n\treturn &shogi.Move{\n\t\tSource: src,\n\t\tDest: dst,\n\t\tPieceID: 0,\n\t\tIsPromoted: prm,\n\t}, nil\n}", "func makeMove(oldRow, oldCol int, dir string) (newRow, newCol int) {\n\tnewRow, newCol = oldRow, oldCol\n\n\t// Switch based on direction, with circular movement if out of bounds\n\tswitch dir {\n\tcase \"UP\":\n\t\tnewRow = newRow - 1\n\t\tif newRow < 0 {\n\t\t\tnewRow = len(maze) - 1\n\t\t}\n\tcase \"DOWN\":\n\t\tnewRow = newRow + 1\n\t\tif newRow == len(maze) {\n\t\t\tnewRow = 0\n\t\t}\n\tcase \"RIGHT\":\n\t\tnewCol = newCol + 1\n\t\tif newCol == len(maze[0]) {\n\t\t\tnewCol = 0\n\t\t}\n\tcase \"LEFT\":\n\t\tnewCol = newCol - 1\n\t\tif newCol < 0 {\n\t\t\tnewCol = len(maze[0]) - 1\n\t\t}\n\t}\n\n\t// If wall, then ignore movement\n\tif maze[newRow][newCol] == '#' {\n\t\tnewRow = oldRow\n\t\tnewCol = oldCol\n\t}\n\n\t// Can use fallthrough keyword to skip the implicit break in the switch statement\n\n\treturn\n}", "func (e *Element) move(offset mat.Vec, horizontal bool) mat.Vec {\n\toff := offset.Add(e.margin.Min).Add(e.Offest)\n\te.Frame = e.size.ToAABB().Moved(off)\n\toff.AddE(e.Padding.Min)\n\toOff := off\n\te.Module.OnFrameChange()\n\te.forChild(FCfg{\n\t\tFilter: IgnoreHidden.Filter,\n\t\tReverse: !e.Horizontal(),\n\t}, func(ch *Element) {\n\t\tif ch.Relative {\n\t\t\tch.move(oOff, false)\n\t\t} else {\n\t\t\toff = ch.move(off, e.Horizontal())\n\t\t}\n\n\t})\n\n\tif horizontal {\n\t\tl, _, r, _ := e.margin.Deco()\n\t\toffset.X += l + r + e.Frame.W()\n\t} else {\n\t\t_, b, _, t := e.margin.Deco()\n\t\toffset.Y += b + t + e.Frame.H()\n\t}\n\n\treturn offset\n}", "func (self Path) Move(path string) error {\n\tif info, err := os.Stat(path); err != nil {\n\t\treturn err\n\t} else {\n\t\tself.Create()\n\t\treturn self.Remove()\n\t}\n}", "func (board *Board) Move(direction string) *Board {\n\tswitch direction {\n\tcase UP:\n\t\treturn board.Up()\n\tcase DOWN:\n\t\treturn board.Down()\n\tcase LEFT:\n\t\treturn board.Left()\n\tcase RIGHT:\n\t\treturn board.Right()\n\t}\n\treturn nil\n}", "func NewMoveDirectoryParams() *MoveDirectoryParams {\n\tvar ()\n\treturn &MoveDirectoryParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "func (s *Sprite) Move(dx, dy int) DrawingBuilder {\n\tif s.op == nil {\n\t\ts.err = errors.New(\"add a &ebiten.DrawImageOptions{} to s.op\")\n\t\treturn s\n\t}\n\ts.op.GeoM.Translate(float64(s.x+dx), float64(s.y+dy))\n\treturn s\n}", "func NewMoveOption() *MoveOptions {\n\treturn &MoveOptions{}\n}", "func (dwr *DifferentialWheeledRobot) RollPosition(distLeft, distRight float64, prev Position) Position {\n\n\t// Straight line\n\tif distLeft == distRight {\n\t\treturn Position{\n\t\t\tprev.X + distLeft*math.Cos(prev.Theta),\n\t\t\tprev.Y + distLeft*math.Sin(prev.Theta),\n\t\t\tprev.Theta,\n\t\t}\n\t}\n\n\t// Turning\n\tturnRadius := dwr.BaseWidth * (distRight + distLeft) / (2 * (distRight - distLeft))\n\tangle := (distRight-distLeft)/dwr.BaseWidth + prev.Theta\n\treturn Position{\n\t\tprev.X + turnRadius*(math.Sin(angle)-math.Sin(prev.Theta)),\n\t\tprev.Y - turnRadius*(math.Cos(angle)-math.Cos(prev.Theta)),\n\t\tangle,\n\t}\n\n\t// s := (distLeft + distRight) / 2.0\n\t// theta := (distRight-distLeft)/dwr.BaseWidth + prev.Theta\n\t// x := s*math.Cos(theta) + prev.X\n\t// y := s*math.Sin(theta) + prev.Y\n\n\t// return Position{x, y, theta}\n\n}", "func (r Rectangle) Move(dx, dy int32) Rectangle {\n\treturn Rectangle{r.X + dx, r.Y + dy, r.Width, r.Height}\n}", "func nextMove(screen virtual_machine.Screen, p Point, last_move int) int {\n\tif !isWall(screen, move(toTheRight(last_move), p)) {\n\t\treturn toTheRight(last_move)\n\t} else if !isWall(screen, move(last_move, p)) {\n\t\treturn last_move\n\t} else if !isWall(screen, move(toTheLeft(last_move), p)) {\n\t\treturn toTheLeft(last_move)\n\t} else {\n\t\treturn toTheLeft(toTheLeft(last_move))\n\t}\n}", "func (m *Mob) handleMove(now int64) {\n\tnext := now + m.MoveInterval\n\n\tm.mutex.Lock()\n\tm.nextRun = next\n\tm.mutex.Unlock()\n\n\tpos := m.GetPosition()\n\n\tif !pos.IsMoving {\n\t\tif !m.findNewPath(&pos) {\n\t\t\tm.SetState(MobStateFind)\n\t\t\treturn\n\t\t}\n\n\t\tpath, distance := m.findMovePath(&pos)\n\t\tif distance == 0 {\n\t\t\tm.SetState(MobStateFind)\n\t\t\treturn\n\t\t}\n\n\t\tstart := path[int(distance)].(WayPoint)\n\t\tend := path[0].(WayPoint)\n\n\t\tpos.WayPoints = pos.WayPoints[:0]\n\t\tpos.WayPoints = append(pos.WayPoints, start, end)\n\t\tpos.CurrentWayPoint = 0\n\t\tpos.Speed = m.MoveSpeed\n\n\t\tOpenDeadReckoning(&pos)\n\t\tm.SetPosition(&pos)\n\n\t\tcolumn, row := m.cell.GetId()\n\n\t\tpkt := packet.MobMoveBegin(m)\n\t\tm.world.BroadcastPacket(column, row, pkt)\n\t\treturn\n\t}\n\n\tif pos.CurrentX == pos.FinalX && pos.CurrentY == pos.FinalY {\n\t\tpos.IsDeadReckoning = false\n\t} else {\n\t\tDeadReckoning(&pos)\n\t\tm.SetPosition(&pos)\n\t\tm.adjustCell(&pos)\n\t}\n\n\tif pos.IsDeadReckoning {\n\t\tm.SetPosition(&pos)\n\t\treturn\n\t}\n\n\tpos.InitialX = pos.FinalX\n\tpos.InitialY = pos.FinalY\n\tpos.IsMoving = false\n\n\tm.SetPosition(&pos)\n\n\tcolumn, row := m.cell.GetId()\n\n\tpkt := packet.MobMoveEnd(m)\n\tm.world.BroadcastPacket(column, row, pkt)\n\n\tm.SetState(MobStateFind)\n}", "func (w *Window) Move(x, y float64) {\n\tif err := driver.macRPC.Call(\"windows.Move\", nil, struct {\n\t\tID string\n\t\tX float64\n\t\tY float64\n\t}{\n\t\tID: w.ID().String(),\n\t\tX: x,\n\t\tY: y,\n\t}); err != nil {\n\t\tpanic(err)\n\t}\n}", "func (v Data) Move(old, new int) {\n\tif old == new {\n\t\treturn // well\n\t}\n\n\tshifting := -1\n\to, n := old, new\n\tif old > new {\n\t\tshifting = 1\n\t\told, new = new+1, old+1\n\t}\n\n\tcell := v[o]\n\tcopy(v[old:new], v[old-shifting:new-shifting])\n\tv[n] = cell\n}", "func NewMoving(x, y, w, h float64, r render.Renderable, tree *collision.Tree, cid event.CID, friction float64) *Moving {\n\tm := Moving{}\n\tcid = cid.Parse(&m)\n\tm.Solid = *NewSolid(x, y, w, h, r, tree, cid)\n\tm.vMoving = vMoving{\n\t\tDelta: physics.NewVector(0, 0),\n\t\tSpeed: physics.NewVector(0, 0),\n\t\tFriction: friction,\n\t}\n\treturn &m\n}", "func getMoveDirection(from, to *point) int {\n\tif from.x == to.x && from.y == to.y+1 {\n\t\treturn 1\n\t} else if from.x == to.x && from.y == to.y-1 {\n\t\treturn 2\n\t} else if from.x == to.x+1 && from.y == to.y {\n\t\treturn 3\n\t} else if from.x == to.x-1 && from.y == to.y {\n\t\treturn 4\n\t} else {\n\t\tlog.Fatalf(\"Points %v and %v are not adjacent\\n\", from, to)\n\t}\n\treturn 0\n}", "func (p *player) move(grid [][]rune) {\r\n\tdir := \"?\"\r\n\tswitch p.Dir {\r\n\tcase dirLeft:\r\n\t\tdir = \"LEFT\"\r\n\t\tp.Col = p.Col - 1\r\n\t\tnextSpace := grid[p.Row][p.Col]\r\n\t\tif nextSpace == '/' {\r\n\t\t\tp.Dir = dirDown\r\n\t\t} else if nextSpace == '\\\\' {\r\n\t\t\tp.Dir = dirUp\r\n\t\t} else if nextSpace == '+' {\r\n\t\t\tswitch p.Xing {\r\n\t\t\tcase 0:\r\n\t\t\t\tp.Dir = dirDown\r\n\t\t\t\tbreak\r\n\t\t\tcase 2:\r\n\t\t\t\tp.Dir = dirUp\r\n\t\t\t\tbreak\r\n\t\t\t}\r\n\t\t\tp.Xing = ((p.Xing + 1) % 3)\r\n\t\t}\r\n\t\tbreak\r\n\tcase dirRight:\r\n\t\tdir = \"RIGHT\"\r\n\t\tp.Col = p.Col + 1\r\n\t\tnextSpace := grid[p.Row][p.Col]\r\n\t\tif nextSpace == '/' {\r\n\t\t\tp.Dir = dirUp\r\n\t\t} else if nextSpace == '\\\\' {\r\n\t\t\tp.Dir = dirDown\r\n\t\t} else if nextSpace == '+' {\r\n\t\t\tswitch p.Xing {\r\n\t\t\tcase 0:\r\n\t\t\t\tp.Dir = dirUp\r\n\t\t\t\tbreak\r\n\t\t\tcase 2:\r\n\t\t\t\tp.Dir = dirDown\r\n\t\t\t\tbreak\r\n\t\t\t}\r\n\t\t\tp.Xing = ((p.Xing + 1) % 3)\r\n\t\t}\r\n\t\tbreak\r\n\tcase dirUp:\r\n\t\tdir = \"UP\"\r\n\t\tp.Row = p.Row - 1\r\n\t\tnextSpace := grid[p.Row][p.Col]\r\n\t\tif nextSpace == '/' {\r\n\t\t\tp.Dir = dirRight\r\n\t\t} else if nextSpace == '\\\\' {\r\n\t\t\tp.Dir = dirLeft\r\n\t\t} else if nextSpace == '+' {\r\n\t\t\tswitch p.Xing {\r\n\t\t\tcase 0:\r\n\t\t\t\tp.Dir = dirLeft\r\n\t\t\t\tbreak\r\n\t\t\tcase 2:\r\n\t\t\t\tp.Dir = dirRight\r\n\t\t\t\tbreak\r\n\t\t\t}\r\n\t\t\tp.Xing = ((p.Xing + 1) % 3)\r\n\t\t}\r\n\t\tbreak\r\n\tcase dirDown:\r\n\t\tdir = \"DOWN\"\r\n\t\tp.Row = p.Row + 1\r\n\t\tnextSpace := grid[p.Row][p.Col]\r\n\t\tif nextSpace == '/' {\r\n\t\t\tp.Dir = dirLeft\r\n\t\t} else if nextSpace == '\\\\' {\r\n\t\t\tp.Dir = dirRight\r\n\t\t} else if nextSpace == '+' {\r\n\t\t\tswitch p.Xing {\r\n\t\t\tcase 0:\r\n\t\t\t\tp.Dir = dirRight\r\n\t\t\t\tbreak\r\n\t\t\tcase 2:\r\n\t\t\t\tp.Dir = dirLeft\r\n\t\t\t\tbreak\r\n\t\t\t}\r\n\t\t\tp.Xing = ((p.Xing + 1) % 3)\r\n\t\t}\r\n\t\tbreak\r\n\t}\r\n\tfmt.Printf(\"Player %d moved %s to %d, %d\\n\", p.ID, dir, p.Col, p.Row)\r\n}", "func (e *Entry) Move(newfrag int, flags RenameFlags) error {\n\tfcode := C.CString(e.name)\n\tdefer C.free(unsafe.Pointer(fcode))\n\tcidx := C.int(newfrag)\n\tresult := C.gd_move(e.df.d, fcode, cidx, C.uint(flags))\n\tif result < 0 {\n\t\treturn e.df.Error()\n\t}\n\te.fragment = newfrag\n\treturn nil\n}", "func (t *SimpleChaincode) movePoint(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tif len(args) != 5 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 5\")\n\t}\n\n\tvar transaction MoveInf\n\tvar err error\n\ttransaction.Admin2 = args[0]\n\ttransaction.Student2 = args[1]\n\ttransaction.Password = args[3]\n\ttransaction.Message = args[4]\n\taccountPassword := args[3]\n\t//transaction.Point, err = strconv.Atoi(args[2])\n\n\t// ==== Check if Seller exists ====\n\tbytesAdmin, err := stub.GetState(transaction.Admin2)\n\tif err != nil {\n\t\treturn shim.Error(\"Failed to get Seller: \" + err.Error())\n\t}\n\tif bytesAdmin == nil {\n\t\treturn shim.Error(\"This Admin not exists: \")\n\t}\n\tvar admin Admin\n\n\terr = json.Unmarshal(bytesAdmin, &admin)\n\tif err != nil {\n\t\treturn shim.Error(\"{\\\"Result\\\":\\\"fail\\\",\\\"Message\\\":\\\"Fail to get Admin Account \\\"}\")\n\t}\n\tif admin.Password == accountPassword {\n\t\t// ==== Check if Student exists ====\n\t\tbytesStudent, err := stub.GetState(transaction.Student2)\n\t\tif err != nil {\n\t\t\treturn shim.Error(\"Failed to get Student: \" + err.Error())\n\t\t}\n\t\tif bytesStudent == nil {\n\t\t\treturn shim.Error(\"This Student not exists: \")\n\t\t}\n\t\tvar digitalStudent Student\n\t\terr = json.Unmarshal(bytesStudent, &digitalStudent)\n\t\tif err != nil {\n\t\t\treturn shim.Error(\"Failed to get Student: \" + err.Error())\n\t\t}\n\t\t// ==== Check if Point is a integer ====\n\n\t\t// ==== Move Action ====\n\n\t\t//digitalStudent.Point = digitalStudent.Point + transaction.Point\n\n\t\tvar s int\n\t\ts1, err := strconv.Atoi(digitalStudent.Money)\n\t\ts2, err := strconv.Atoi(args[2])\n\t\ts = s1 + s2\n\t\tdigitalStudent.Money = strconv.Itoa(s) // must change into int????\n\n\t\tDigitalStudentBytes, _ := json.Marshal(digitalStudent)\n\t\terr = stub.PutState(transaction.Student2, []byte(DigitalStudentBytes))\n\t\tif err != nil {\n\t\t\treturn shim.Error(err.Error())\n\t\t}\n\t\treturn shim.Success([]byte(\"{\\\"Result\\\":\\\"MovePointSuccess\\\",\\\"message\\\":{\" + args[4] + \"}}\"))\n\t}\n\treturn shim.Error(\"\\\"Result\\\":\\\"fail\\\",\\\"Message\\\":\\\"Incorrect password\\\"\")\n}", "func (p *Player) Move(loc *Location) {\n\tif loc.Id() != p.loc.Id() {\n\t\tlocks := []lock.IdLocker{p, loc, p.loc}\n\t\tlock.All(locks)\n\t\tdefer lock.UnlockAll(locks)\n\n\t\tp.loc.RemovePlayer(p)\n\t\tp.loc = loc\n\t\tloc.AddPlayer(p)\n\t}\n}", "func (tg *TurtleGraphics) getnewpos(angle, distance float64) Vector {\n\td2r := math.Pi / 180\n\top := math.Sin(angle*d2r) * distance\n\tad := math.Cos(angle*d2r) * distance\n\n\tnewp := Vector{op, ad}\n\n\tnewp.X += tg.Pos.X\n\tnewp.Y += tg.Pos.Y\n\n\treturn newp\n}", "func (m *Mob) findMovePath(pos *context.Position) ([]astar.Pather, float64) {\n\tstart := WayPoint{\n\t\tX: pos.InitialX,\n\t\tY: pos.InitialY,\n\t\tworld: m.world,\n\t}\n\n\tend := WayPoint{\n\t\tX: pos.FinalX,\n\t\tY: pos.FinalY,\n\t\tworld: m.world,\n\t}\n\n\tpath, distance, _ := astar.Path(start, end)\n\n\treturn path, distance\n}", "func (ros RealOS) Move(src, dst string) error {\n\treturn os.Rename(src, dst)\n}", "func Move(x, y float32) {\n\tgContext.Cursor.X = x\n\tgContext.Cursor.Y = y\n}", "func (o *WObj) MoveBy(dx, dy float64) {\n\tif dx != 0 || dy != 0 {\n\t\tx := o.Hitbox.Min().X + dx\n\t\ty := o.Hitbox.Min().Y + dy\n\t\to.SetTopLeft(x, y)\n\t}\n}", "func (a *WalkAnimal) Move() string {\n\treturn strings.Join([]string{a.IAnimal.GetName(), \"moves by walk.\"}, \" \")\n}", "func translate_point_1(p Point, distance float64) {\n\tp.x += distance\n\tp.y += distance\n}", "func (m *move) Execute(cfg *config.Config, logger *log.Logger) error {\n\tlogger.Printf(\"Moving %s to %s\\n\", m.args[0], m.args[1])\n\n\tsourcePath := m.args[0]\n\tif expandedPath, err := pathutil.Expand(sourcePath); err == nil {\n\t\tsourcePath = expandedPath\n\t}\n\tdestinationPath := m.args[1]\n\tif expandedPath, err := pathutil.Expand(destinationPath); err == nil {\n\t\tdestinationPath = expandedPath\n\t}\n\n\tdestPathExists := pathutil.Exists(destinationPath)\n\tsrcIsDir, err := pathutil.IsDir(sourcePath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdestIsDir, err := pathutil.IsDir(destinationPath)\n\tif err != nil && destPathExists {\n\t\treturn err\n\t}\n\n\tif destPathExists && destIsDir {\n\t\tbasename := filepath.Base(sourcePath)\n\t\treturn os.Rename(sourcePath, filepath.Join(destinationPath, basename))\n\t} else if destPathExists && !srcIsDir && !destIsDir {\n\t\treturn os.Rename(sourcePath, destinationPath)\n\t} else if destPathExists {\n\t\treturn fmt.Errorf(\"%s is a file which cannot be overwritten\", destinationPath)\n\t} else {\n\t\treturn os.Rename(sourcePath, destinationPath)\n\t}\n}", "func (d *driver) Move(ctx context.Context, sourcePath string, destPath string) error {\n\treturn d.Bucket.Move(ctx, sourcePath, destPath)\n}", "func (r *regulator) Move(ctx context.Context, sourcePath string, destPath string) error {\n\tr.enter()\n\tdefer r.exit()\n\n\treturn r.StorageDriver.Move(ctx, sourcePath, destPath)\n}", "func (p Point) Translate(dx, dy float64) Point {\n\treturn Point{p.X + dx, p.Y + dy}\n}", "func (ts *TravelState) Move() {\n\ta := rand.Intn(len(ts.state))\n\tb := rand.Intn(len(ts.state))\n\tts.state[a], ts.state[b] = ts.state[b], ts.state[a]\n}", "func (hc *Hailconfig) Move(oldAlias, newAlias string) error {\n\tif !hc.IsPresent(oldAlias) {\n\t\treturn errors.New(\"old alias is not present\")\n\t}\n\tif hc.IsPresent(newAlias) {\n\t\treturn errors.New(\"new alias is already present\")\n\t}\n\thc.Add(newAlias, hc.Scripts[oldAlias].Command, hc.Scripts[oldAlias].Description)\n\treturn hc.Delete(oldAlias)\n}", "func (elems *ElementsNR) move(from, to dvid.Point3d, deleteElement bool) (moved *ElementNR, changed bool) {\n\tfor i, elem := range *elems {\n\t\tif from.Equals(elem.Pos) {\n\t\t\tchanged = true\n\t\t\t(*elems)[i].Pos = to\n\t\t\tmoved = (*elems)[i].Copy()\n\t\t\tif deleteElement {\n\t\t\t\t(*elems)[i] = (*elems)[len(*elems)-1] // Delete without preserving order.\n\t\t\t\t*elems = (*elems)[:len(*elems)-1]\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (a Animal) Move() {\n\tfmt.Println(a.locomotion)\n}", "func distance(point Point) (int) {\n\txDist := point.X\n\tyDist := point.Y\n\n\tif xDist < 0 {\n\t\txDist *= -1\n\t}\n\n\tif yDist < 0 {\n\t\tyDist *= -1\n\t}\n\n\treturn xDist + yDist\n}", "func (fs *EmbedFs) Move(from string, to string) error {\n\treturn ErrNotAvail\n}", "func (vr *vectorRenderer) MoveTo(x, y int) {\n\tvr.p = append(vr.p, fmt.Sprintf(\"M %d %d\", x, y))\n}", "func Position(r geom.Ray, distance float64) geom.Tuple4 {\n\tadd := geom.MultiplyByScalar(r.Direction, distance)\n\tpos := geom.Add(r.Origin, add)\n\treturn pos\n}", "func (cow Cow) Move() {\n\tfmt.Println(cow.locomotion)\n}", "func (chess *Chess) Move(san string) error {\n\tmove, err := chess.SANToMove(san)\n\tif err == nil {\n\t\tchess.makeMove(move)\n\t}\n\treturn err\n}", "func (s *swimmer) direction() int {\n\treturn s.moveDirection\n}", "func (d *droid) moveEveryDirection() {\n\tcurrStep := d.getMovementMapByPoint(d.location)\n\tmoveResult := d.move(1)\n\tif moveResult != 0 {\n\t\tif !d.hasVisitedCurrent() {\n\t\t\td.addStep(currStep)\n\t\t}\n\t\td.moveToPoint(currStep.location)\n\t}\n\tmoveResult = d.move(2)\n\tif moveResult != 0 {\n\t\tif !d.hasVisitedCurrent() {\n\t\t\td.addStep(currStep)\n\t\t}\n\t\td.moveToPoint(currStep.location)\n\t}\n\tmoveResult = d.move(3)\n\tif moveResult != 0 {\n\t\tif !d.hasVisitedCurrent() {\n\t\t\td.addStep(currStep)\n\t\t}\n\t\td.moveToPoint(currStep.location)\n\t}\n\tmoveResult = d.move(4)\n\tif moveResult != 0 {\n\t\tif !d.hasVisitedCurrent() {\n\t\t\td.addStep(currStep)\n\t\t}\n\t\td.moveToPoint(currStep.location)\n\t}\n}", "func (a *Animal) Move() string {\n\treturn \"Locomotion: \" + a.locomotion\n}", "func Move(game *chess.Game, lvl Level) (*chess.Move, error) {\n\tcmd := exec.Command(execPath)\n\tw, err := cmd.StdinPipe()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer w.Close()\n\tr, err := cmd.StdoutPipe()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer r.Close()\n\n\tscanner := bufio.NewScanner(r)\n\tch := make(chan *chess.Move)\n\tgo func() {\n\t\tfor scanner.Scan() {\n\t\t\ts := scanner.Text()\n\t\t\tfmt.Println(s)\n\t\t\tif strings.HasPrefix(s, \"bestmove\") {\n\t\t\t\tmoveTxt := parseOutput(s)\n\t\t\t\tch <- getMoveFromText(game, moveTxt)\n\t\t\t}\n\t\t}\n\t}()\n\tif err := cmd.Start(); err != nil {\n\t\treturn nil, err\n\t}\n\tif _, err := io.WriteString(w, fmt.Sprintf(\"setoption name Skill Level value %d\\n\", lvl)); err != nil {\n\t\treturn nil, err\n\t}\n\tif _, err := io.WriteString(w, fmt.Sprintf(\"position fen %s\\n\", game.Position().String())); err != nil {\n\t\treturn nil, err\n\t}\n\tif _, err := io.WriteString(w, \"go movetime 950\\n\"); err != nil {\n\t\treturn nil, err\n\t}\n\ttime.Sleep(time.Second)\n\tif _, err := io.WriteString(w, \"quit\\n\"); err != nil {\n\t\treturn nil, err\n\t}\n\tif err := cmd.Wait(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn <-ch, nil\n}" ]
[ "0.67348295", "0.66218054", "0.6323302", "0.63009393", "0.61992556", "0.5959389", "0.5927495", "0.59226626", "0.59119165", "0.59011596", "0.5887739", "0.5831569", "0.5827473", "0.5818151", "0.5785061", "0.5772917", "0.5739919", "0.5737568", "0.5686841", "0.56842166", "0.5672908", "0.563404", "0.5629922", "0.5629906", "0.55468744", "0.5543407", "0.55344933", "0.5523739", "0.5486467", "0.54534346", "0.544808", "0.5445023", "0.54401696", "0.54318666", "0.5411926", "0.5403406", "0.5401208", "0.5392887", "0.53867894", "0.5378993", "0.53326684", "0.5299154", "0.5274481", "0.5272039", "0.5266176", "0.5260013", "0.52593184", "0.5257118", "0.5247331", "0.5236787", "0.5233461", "0.5227418", "0.52230227", "0.5206039", "0.5195471", "0.51917875", "0.51759434", "0.5140499", "0.5139834", "0.5124862", "0.511326", "0.5101773", "0.50837356", "0.5061206", "0.5060285", "0.5060011", "0.5043336", "0.5033822", "0.5030271", "0.50285506", "0.50243384", "0.5001481", "0.49890214", "0.4982836", "0.49803925", "0.49668178", "0.49624717", "0.49609303", "0.4947939", "0.49395084", "0.49263278", "0.49232966", "0.49231026", "0.4915219", "0.4911504", "0.4906514", "0.48971182", "0.48891917", "0.48863062", "0.4879053", "0.48733336", "0.4872653", "0.48610815", "0.48585734", "0.48569176", "0.4853567", "0.48478496", "0.48377174", "0.48266345", "0.4814222" ]
0.83701855
0
distanceToOrigin returns the Manhattan distance to origin (0, 0).
func (p point) distanceToOrigin() int { return p.distanceToPoint(point{x: 0, y: 0}) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (s *ship) manhattanDistance() int {\n\tabs := func(num int) int {\n\t\tif num < 0 {\n\t\t\treturn -num\n\t\t}\n\t\treturn num\n\t}\n\treturn abs(s.x) + abs(s.y)\n}", "func (d Position2D) GetManhattanDistance(cmpPos Position2D) int {\n\treturn max(d.Row-cmpPos.Row, cmpPos.Row-d.Row) + max(d.Col-cmpPos.Col, cmpPos.Col-d.Col)\n}", "func ManhattanDist(a Location, b Location) int {\n\treturn util.AbsInt(a.Row-b.Row) + util.AbsInt(a.Col-b.Col)\n}", "func CalculateManhattanDistance(directionCountMap map[Direction]int) int {\n\teastWest := math.Abs(float64(directionCountMap[East]) - float64(directionCountMap[West]))\n\tnorthSouth := math.Abs(float64(directionCountMap[North]) - float64(directionCountMap[South]))\n\treturn int(eastWest + northSouth)\n}", "func ManhattanDistance(p1, p2 Point) int {\n\tdx := p1.x - p2.x\n\tif dx < 0 {\n\t\tdx = -dx\n\t}\n\tdy := p1.y - p2.y\n\tif dy < 0 {\n\t\tdy = -dy\n\t}\n\n\treturn dx + dy\n}", "func (v Vector3D) ManhattanDistance(o Vector3D) int {\n\treturn v.Sub(o).ManhattanLength()\n}", "func ManhattanDistance(firstVector, secondVector []float64) (float64, error) {\n\tdistance := 0.\n\tfor ii := range firstVector {\n\t\tdistance += math.Abs(firstVector[ii] - secondVector[ii])\n\t}\n\treturn distance, nil\n}", "func (b *OGame) Distance(origin, destination ogame.Coordinate) int64 {\n\treturn Distance(origin, destination, b.serverData.Galaxies, b.serverData.Systems, b.serverData.DonutGalaxy, b.serverData.DonutSystem)\n}", "func (m match) dist() uint32 {\n\treturn uint32(m.distance - minDistance)\n}", "func (q *DistanceFeatureQuery) Origin(origin interface{}) *DistanceFeatureQuery {\n\tq.origin = origin\n\treturn q\n}", "func (v Vector3D) ManhattanLength() int {\n\treturn AbsInt(v.x) + AbsInt(v.y) + AbsInt(v.z)\n}", "func (p PointI) Manhattan(p2 PointI) int32 {\n\treturn p.VecTo(p2).Manhattan()\n}", "func (w *RandomWorld) Distance(from, to GoWorld.Location) float64 {\n\treturn math.Sqrt(math.Pow(float64(from.X-to.X), 2) + math.Pow(float64(from.Y-to.Y), 2))\n}", "func (c card) distance(d card) int {\n\tdist := d.number - c.number\n\tif dist < 0 {\n\t\tdist += 13\n\t}\n\treturn dist\n}", "func (c Creature) dist() int {\n\tx := math.Pow(float64(Hero.X()-c.X()), 2)\n\ty := math.Pow(float64(Hero.Y()-c.Y()), 2)\n\n\treturn int(math.Sqrt(x + y))\n}", "func (c *GMapsClient) GetDistance(ctx context.Context, origin, destination *model.Location) (*model.Distance, error) {\n\to, err := origin.Get()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"GetDistance: %s\", err.Error())\n\t}\n\n\td, err := destination.Get()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"GetDistance: %s\", err.Error())\n\t}\n\n\treq := &maps.DistanceMatrixRequest{\n\t\tOrigins: []string{o},\n\t\tDestinations: []string{d},\n\t\tMode: maps.TravelModeWalking,\n\t\tUnits: maps.UnitsImperial,\n\t}\n\n\tresp, err := c.c.DistanceMatrix(ctx, req)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"GetDistance: %s\", err.Error())\n\t}\n\n\tif len(resp.Rows) == 0 {\n\t\treturn nil, fmt.Errorf(\"GetDistance: No DistanceMatrixRows\")\n\t}\n\n\tdist := &model.Distance{\n\t\tID: destination.ID,\n\t\tDuration: resp.Rows[0].Elements[0].Duration / time.Minute,\n\t\tLength: resp.Rows[0].Elements[0].Distance.HumanReadable,\n\t}\n\n\treturn dist, nil\n}", "func Distance(from, to Coord) float64 {\n\tx := float64(from.X - to.X)\n\ty := float64(from.Y - to.Y)\n\n\treturn math.Sqrt(x*x + y*y)\n}", "func (p Point2D) Manhattan(p2 Point2D) float32 {\n\treturn p.VecTo(p2).Manhattan()\n}", "func (m NoMDEntries) GetMDEntryOriginator() (v string, err quickfix.MessageRejectError) {\n\tvar f field.MDEntryOriginatorField\n\tif err = m.Get(&f); err == nil {\n\t\tv = f.Value()\n\t}\n\treturn\n}", "func (g Coords) Distance() float64 {\n\t// * removed returned variable\n\treturn math.Sqrt(math.Pow(g.X2-g.X1, 2) + math.Pow(g.Y2-g.Y1, 2))\n}", "func NewMDEntryOriginator(val string) MDEntryOriginatorField {\n\treturn MDEntryOriginatorField{quickfix.FIXString(val)}\n}", "func (m *Metric) Origin() *Origin {\n\treturn m.source.origin\n}", "func (p *User) CalcDistance(q *User) float64 {\n\n\tdist := 0.5 - math.Cos((q.lat-p.lat)*pi)/2 + math.Cos(p.lat*pi)*math.Cos(q.lat*pi)*(1-math.Cos((q.long-p.long)*pi))/2\n\tdist = 12742 * math.Asin(math.Sqrt(dist))\n\n\treturn dist\n}", "func (m Malkist) CalculateDistance(origins, destinations []string) ([]DistanceMatrix, error) {\n\tendpoint, err := createDistanceMatrixURL(origins, destinations, m.Key)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"URL error: %v\", err.Error())\n\t}\n\n\tres, err := http.Get(endpoint.String())\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"distance calculation error: %v\", err.Error())\n\t}\n\n\tvar body distanceMatrixAPIResponse\n\tjson.NewDecoder(res.Body).Decode(&body)\n\n\tdefer res.Body.Close()\n\n\tif body.Status != \"OK\" {\n\t\treturn nil, fmt.Errorf(\"distance calculation error: %v\", body.Status)\n\t}\n\n\treturn distanceWrapper(body), nil\n}", "func hammingDistance(x int, y int) int {\n\n}", "func (a *SetPermissionArgs) SetOrigin(origin string) *SetPermissionArgs {\n\ta.Origin = &origin\n\treturn a\n}", "func (s *Segment) Distance() float64 {\n\torigin := geodist.Point{Lat: s.Origin.Lat, Long: s.Origin.Long}\n\tdestination := geodist.Point{Lat: s.Destination.Lat, Long: s.Destination.Long}\n\n\tdistance, err := geodist.VincentyDistance(origin, destination)\n\tif err != nil {\n\t\treturn math.NaN()\n\t}\n\n\treturn distance * kmToMiles\n}", "func (self *Chromosome) Distance() (distance int) {\n\tdistance = 0\n\n\tfor i, location := range self.Locations {\n\t\tif i+1 < self.Length() {\n\t\t\tdistance += self.Matrix.DistanceMap[strconv.Itoa(location.Id)+strconv.Itoa(self.Locations[i+1].Id)]\n\t\t} else {\n\t\t\tdistance += self.Matrix.DistanceMap[strconv.Itoa(location.Id)+strconv.Itoa(self.Locations[0].Id)]\n\t\t}\n\t}\n\n\treturn\n}", "func (m NoMDEntries) HasMDEntryOriginator() bool {\n\treturn m.Has(tag.MDEntryOriginator)\n}", "func (lut *DistanceLookUpTable) distCalculated(segment int, code byte) bool {\n\treturn lut.calculated[lut.posForSegmentAndCode(segment, code)]\n}", "func (m NoMDEntries) SetMDEntryOriginator(v string) {\n\tm.Set(field.NewMDEntryOriginator(v))\n}", "func (c Hex) Distance(otherC Color) float64 {\n\treturn c.getRGB().Distance(otherC)\n}", "func (s Service) CalculateDistance(ctx *gin.Context) {\n\tvar req DistanceMatrixRequest\n\n\tif err := ctx.Bind(&req); err != nil {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tm := malkist.Malkist{\n\t\tKey: s.Key,\n\t}\n\n\tresult, err := m.CalculateDistance(req.Origins, req.Destinations)\n\tif err != nil {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tctx.JSON(http.StatusOK, result)\n}", "func planetDistance(planet1, planet2 int64) (distance int64) {\n\treturn int64(1000 + 5*math.Abs(float64(planet2-planet1)))\n}", "func (c *Client) DistanceTo(other *Coordinate) time.Duration {\n\tc.mutex.RLock()\n\tdefer c.mutex.RUnlock()\n\n\treturn c.coord.DistanceTo(other)\n}", "func dist(p1, p2 Coord) int {\n return abs(p1.x-p2.x) + abs(p1.y-p2.y)\n}", "func (s *GetDataSetOutput) SetOrigin(v string) *GetDataSetOutput {\n\ts.Origin = &v\n\treturn s\n}", "func (a *GrantPermissionsArgs) SetOrigin(origin string) *GrantPermissionsArgs {\n\ta.Origin = &origin\n\treturn a\n}", "func (u *Unit) distance(c C) int {\n\tdist, _ := u.Game.bfs(u.Pos, c)\n\treturn dist\n}", "func Distance(strand1, strand2 string) (distance int, err error) {\n\tif len(strand1) != len(strand2) {\n\t\treturn 0, errors.New(\"DNA strands have different length\")\n\t}\n\tif len(strand1) == 0 {\n\t\treturn 0, nil\n\t}\n\tfor index := 0; index < len(strand1); index++ {\n\t\tif strand1[index] != strand2[index] {\n\t\t\tdistance++\n\t\t}\n\t}\n\treturn distance, nil\n}", "func (n *GridNode) ActualDistanceToNeighbor(node astar.Node) float64 {\n\tif isDiagonal(n, node.(*GridNode)) {\n\t\treturn math.Sqrt2 * n.Cost\n\t}\n\treturn n.Cost\n}", "func (o *Origin) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) {\n\tif o == nil {\n\t\treturn 0, errors.New(\"models: no Origin provided for delete\")\n\t}\n\n\tif err := o.doBeforeDeleteHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\n\targs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), originPrimaryKeyMapping)\n\tsql := \"DELETE FROM \\\"origins\\\" WHERE \\\"name\\\"=$1\"\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to delete from origins\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by delete for origins\")\n\t}\n\n\tif err := o.doAfterDeleteHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn rowsAff, nil\n}", "func (s *DataSetEntry) SetOrigin(v string) *DataSetEntry {\n\ts.Origin = &v\n\treturn s\n}", "func getDistance(x, y int) int {\n\treturn int(cityMatrix[x][y])\n}", "func (s *UpdateDataSetOutput) SetOrigin(v string) *UpdateDataSetOutput {\n\ts.Origin = &v\n\treturn s\n}", "func (s *CreateDataSetOutput) SetOrigin(v string) *CreateDataSetOutput {\n\ts.Origin = &v\n\treturn s\n}", "func Distance(z1, z2 *Zip) float64 {\n\tif z1.Code == z2.Code {\n\t\treturn 0.0\n\t}\n\n\ttheta := z1.Longitude - z2.Longitude\n\td := z1.LatitudeSin*z2.LatitudeSin + z1.LatitudeCos*z2.LatitudeCos*cos(d2r*theta)\n\td = math.Acos(d)\n\td = r2d * d\n\td = d * 60 * 1.1515\n\n\treturn d\n}", "func (m *Maps) Distance(c0, c1 types.Coordinate) int {\n\treturn int(math.Abs(float64(c0.X-c1.X)) + math.Abs(float64(c0.Y-c1.Y)))\n}", "func Distance(src_lat, src_long, dest_lat, dest_long, radius float64) float64 {\n\tlat := Haversine(ToRadians(dest_lat - src_lat))\n\tlong := Haversine(ToRadians(dest_long - src_long))\n\n\treturn 2 * radius * math.Asin(math.Sqrt(lat+math.Cos(ToRadians(src_lat))*math.Cos(ToRadians(dest_lat))*long))\n}", "func (p Position) DistanceTo(position Position) float64 {\n\tp1 := geojson.Position{p.X, p.Y, p.Z}\n\tp2 := geojson.Position{position.X, position.Y, position.Z}\n\treturn p1.DistanceTo(p2)\n}", "func Distance(dnaOne, dnaTwo string) (distance int, err error) {\n\tif len(dnaOne) != len(dnaTwo) {\n\t\treturn 0, errors.New(\"Stands are not the same length\")\n\t}\n\n\tfor i := range dnaOne {\n\t\tif dnaOne[i] != dnaTwo[i] {\n\t\t\tdistance++\n\t\t}\n\t}\n\n\treturn distance, nil\n}", "func (to *Session) DeleteOrigin(id int, opts RequestOptions) (tc.Alerts, toclientlib.ReqInf, error) {\n\tif opts.QueryParameters == nil {\n\t\topts.QueryParameters = url.Values{}\n\t}\n\topts.QueryParameters.Set(\"id\", strconv.Itoa(id))\n\tvar alerts tc.Alerts\n\treqInf, err := to.del(apiOrigins, opts, &alerts)\n\treturn alerts, reqInf, err\n}", "func (from Dot) DistanceTo(to Dot) (res uint16) {\n\tif !from.Equals(to) {\n\t\tif from.X > to.X {\n\t\t\tres = uint16(from.X - to.X)\n\t\t} else {\n\t\t\tres = uint16(to.X - from.X)\n\t\t}\n\n\t\tif from.Y > to.Y {\n\t\t\tres += uint16(from.Y - to.Y)\n\t\t} else {\n\t\t\tres += uint16(to.Y - from.Y)\n\t\t}\n\t}\n\n\treturn\n}", "func determineDistance(maze maze, source, destination coordinate) int {\n\tfmt.Println(\"Determining distance between\", source, \"and\", destination)\n\tvisited := make(map[coordinate]bool)\n\ttoVisit := []pathResult{pathResult{source, 0}}\n\tfor {\n\t\tnext := toVisit[0]\n\t\ttoVisit = toVisit[1:]\n\t\tvisited[next.coordinate] = true\n\t\tfor _, neighbor := range destinations(next.coordinate) {\n\t\t\tif neighbor == destination {\n\t\t\t\tfmt.Println(\"Found destination at\", 1+next.length)\n\t\t\t\treturn 1 + next.length\n\t\t\t}\n\t\t\tif visited[neighbor] || maze.positions[neighbor] == \"#\" {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\ttoVisit = append(toVisit, pathResult{neighbor, 1 + next.length})\n\t\t}\n\t}\n}", "func (entity Entity) CalculateDistanceTo(target Entity) float64 {\n\tdx := target.X - entity.X\n\tdy := target.Y - entity.Y\n\n\treturn math.Sqrt(dx*dx + dy*dy)\n}", "func Distance(x, y, x2, y2 int32) int32 {\n\n\tdx := x - x2\n\tdy := y - y2\n\tds := (dx * dx) + (dy * dy)\n\treturn int32(math.Sqrt(math.Abs(float64(ds))))\n\n}", "func (w world) distance(p1, p2 location) float64 {\n\ts1, c1 := math.Sincos(rad(p1.lat))\n\ts2, c2 := math.Sincos(rad(p2.lat))\n\tclong := math.Cos(rad(p1.long - p2.long))\n\treturn w.radius * math.Acos(s1*s2+c1*c2*clong)\n}", "func (w world) distance(p1, p2 location) float64 {\n\ts1, c1 := math.Sincos(rad(p1.lat))\n\ts2, c2 := math.Sincos(rad(p2.lat))\n\n\tclong := math.Cos(rad(p1.long - p2.long))\n\n\treturn w.radius * math.Acos(s1*s2+c1*c2*clong)\n}", "func findDistance(here *location, knownLocations []location) int {\n\tvar totalDistance int\n\tfor _, spot := range knownLocations {\n\t\ttotalDistance = totalDistance + manhattanDistance(*here, spot)\n\t}\n\tfmt.Println(\"distance: \", totalDistance)\n\treturn totalDistance\n}", "func (p Vector3) DistanceTo(o Vector3) Coord {\n\tdx := p.X - o.X\n\tdy := p.Y - o.Y\n\tdz := p.Z - o.Z\n\treturn Coord(math.Sqrt(float64(dx*dx + dy*dy + dz*dz)))\n}", "func (r *RumorMessage) GetOrigin() string {\n\treturn r.Origin\n}", "func (loc1 location) distance(loc2 location) float64 {\n\tvar la1, lo1, la2, lo2, r float64\n\tlat1 := loc1.Lat\n\tlon1 := loc1.Long\n\tlat2 := loc2.Lat\n\tlon2 := loc2.Long\n\tla1 = lat1 * math.Pi / 180\n\tlo1 = lon1 * math.Pi / 180\n\tla2 = lat2 * math.Pi / 180\n\tlo2 = lon2 * math.Pi / 180\n\n\tr = 6378100 // Earth radius in METERS\n\n\t// calculate\n\th := hsin(la2-la1) + math.Cos(la1)*math.Cos(la2)*hsin(lo2-lo1)\n\n\treturn math.Round(2 * r * math.Asin(math.Sqrt(h)))\n}", "func (s *GetDataSetOutput) SetOriginDetails(v *OriginDetails) *GetDataSetOutput {\n\ts.OriginDetails = v\n\treturn s\n}", "func (s *ListDataSetsInput) SetOrigin(v string) *ListDataSetsInput {\n\ts.Origin = &v\n\treturn s\n}", "func distance(point Point) (int) {\n\txDist := point.X\n\tyDist := point.Y\n\n\tif xDist < 0 {\n\t\txDist *= -1\n\t}\n\n\tif yDist < 0 {\n\t\tyDist *= -1\n\t}\n\n\treturn xDist + yDist\n}", "func (m *CarCheckInOutMutation) Distance() (r float64, exists bool) {\n\tv := m.distance\n\tif v == nil {\n\t\treturn\n\t}\n\treturn *v, true\n}", "func (lhs Histgram) DistanceFrom(rhs interface{ GetValues() []int }) int {\n\n\td := 0.0\n\tlhsValues := lhs.GetValues()\n\trhsValues := rhs.GetValues()\n\n\tfor i, v := range lhsValues {\n\t\td += math.Abs(math.Pow(float64(v), 2) - math.Pow(float64(rhsValues[i]), 2))\n\t}\n\n\treturn int(d)\n}", "func Distance(year, month, day int, now time.Time) time.Duration {\n\tif now.IsZero() {\n\t\tnow = time.Now()\n\t}\n\tnext := time.Date(now.Year()+year, now.Month()+time.Month(month), now.Day()+day, now.Hour(), now.Minute(), now.Second(), now.Nanosecond(), now.Location())\n\treturn next.Sub(now)\n}", "func Distance(a, b string) (int, error) {\n\t//length of the DNA strands must be same to calculate hamming distance.\n\tif len(a) != len(b) {\n\t\treturn -1, errors.New(\"length of string A and B should be the same to calculate hamming distance\")\n\t}\n\thammingDistance := 0\n\tfor i :=0; i<len(a) ;i++ {\n\t\tif a[i] != b[i] {\n\t\t\thammingDistance++\n\t\t}\n\t}\n\treturn hammingDistance, nil\n}", "func (s *DisassociateApprovedOriginInput) SetOrigin(v string) *DisassociateApprovedOriginInput {\n\ts.Origin = &v\n\treturn s\n}", "func (p *SamplePoint2D) PlaneDistance(planePosition float64, dim int) float64 {\n\treturn math.Abs(planePosition - p.Dimension(dim))\n}", "func DistanceBetwnPts(origin, position Point) float64 {\n\torigin = origin.toRadians()\n\tposition = position.toRadians()\n\n\tchange := origin.Delta(position)\n\n\ta := math.Pow(math.Sin(change.Lat/2), 2) + math.Cos(origin.Lat)*math.Cos(position.Lat)*math.Pow(math.Sin(change.Lon/2), 2)\n\n\tc := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a))\n\treturn float64(earthRadiusMetres * c)\n}", "func distance(c1, c2 color.Color) float64 {\n\tr1, b1, g1, _ := c1.RGBA()\n\tr2, b2, g2, _ := c2.RGBA()\n\tdr := int(r1) - int(r2)\n\tdg := int(g1) - int(g2)\n\tdb := int(b1) - int(b2)\n\treturn math.Sqrt(float64(dr*dr) + float64(dg*dg) + float64(db*db))\n}", "func (m *DistanceMutation) Distance() (r string, exists bool) {\n\tv := m._Distance\n\tif v == nil {\n\t\treturn\n\t}\n\treturn *v, true\n}", "func (p Point) Distance(q Point) float64 { // HL\n\treturn math.Hypot(q.X-p.X, q.Y-p.Y)\n}", "func distance(x1, y1, x2, y2 float64) float64 {\n\ta := x2 - x1\n\tb := y2 - y1\n\treturn math.Sqrt(a*a + b*b)\n}", "func digTurnDist(p1, p2 Coord) int {\n return int(math.Ceil(float64(digDist(p1, p2)) / MOVE_DIST))\n}", "func (o RouteOutput) Origin() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Route) pulumi.StringOutput { return v.Origin }).(pulumi.StringOutput)\n}", "func (s *server) getOriginCounter(origin string) (okCnt telemetry.SimpleCounter, errorCnt telemetry.SimpleCounter) {\n\ts.cachedTlmLock.Lock()\n\tdefer s.cachedTlmLock.Unlock()\n\n\tif maps, ok := s.cachedOriginCounters[origin]; ok {\n\t\treturn maps.okCnt, maps.errCnt\n\t}\n\n\tokMap := map[string]string{\"message_type\": \"metrics\", \"state\": \"ok\"}\n\terrorMap := map[string]string{\"message_type\": \"metrics\", \"state\": \"error\"}\n\tokMap[\"origin\"] = origin\n\terrorMap[\"origin\"] = origin\n\tmaps := cachedOriginCounter{\n\t\torigin: origin,\n\t\tok: okMap,\n\t\terr: errorMap,\n\t\tokCnt: tlmProcessed.WithTags(okMap),\n\t\terrCnt: tlmProcessed.WithTags(errorMap),\n\t}\n\n\ts.cachedOriginCounters[origin] = maps\n\ts.cachedOrder = append(s.cachedOrder, maps)\n\n\tif len(s.cachedOrder) > maxOriginCounters {\n\t\t// remove the oldest one from the cache\n\t\tpop := s.cachedOrder[0]\n\t\tdelete(s.cachedOriginCounters, pop.origin)\n\t\ts.cachedOrder = s.cachedOrder[1:]\n\t\t// remove it from the telemetry metrics as well\n\t\ttlmProcessed.DeleteWithTags(pop.ok)\n\t\ttlmProcessed.DeleteWithTags(pop.err)\n\t}\n\n\treturn maps.okCnt, maps.errCnt\n}", "func (vec Vector2) Distance(vec2 Vector2) float32 {\n\txd := vec2.X - vec.X\n\tyd := vec2.Y - vec.Y\n\treturn Sqrt(xd*xd + yd*yd)\n}", "func (i I) Distance(i2 I) float64 {\n\treturn i.Subtract(i2).Mag()\n}", "func (_m *Planner) TransitDistance(source *models.Address, destination *models.Address) (int, error) {\n\tret := _m.Called(source, destination)\n\n\tvar r0 int\n\tif rf, ok := ret.Get(0).(func(*models.Address, *models.Address) int); ok {\n\t\tr0 = rf(source, destination)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*models.Address, *models.Address) error); ok {\n\t\tr1 = rf(source, destination)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (p *Particle) Distance(pa *Particle) float64 {\n\treturn p.Position.Distance(pa.Position)\n}", "func (p *hhgPlanner) TransitDistance(appCtx appcontext.AppContext, source *models.Address, destination *models.Address) (int, error) {\n\t// This might get retired after we transition over fully to GHC.\n\tpanic(\"the HHG planner does not need this method and this will be deprecated when the HERE planner is deprecated\")\n}", "func distance(xa, ya, xb, yb int) float64 {\n\tx := math.Abs(float64(xa - xb))\n\ty := math.Abs(float64(ya - yb))\n\treturn math.Sqrt(x*x + y*y)\n}", "func (h *Health) Origin() string {\n\treturn h.origin\n}", "func HeuristicMinDistance(b *Board) int {\n\treturn minDistance(b, Min, false, false) - minDistance(b, Max, false, false)\n}", "func Distance(a, b string) (int, error) {\n\t// If a and b has dissimilar then raise error.\n\tif len(a) != len(b) {\n\t\treturn 0, errors.New(\"a and b must have same length\")\n\t}\n\n\t// Initiate the hamming distance\n\tdistance := 0\n\n\t// Convert to rune to cover utf-8 string\n\tstrandA := []rune(a)\n\tstrandB := []rune(b)\n\n\t// Iterate two DNA strands and compare it by characters at the same index.\n\tfor index := range strandA {\n\t\tif strandA[index] != strandB[index] {\n\t\t\tdistance++\n\t\t}\n\t}\n\n\treturn distance, nil\n}", "func (os *OriginChecker) Delete(origin string) {\n\tos.Lock()\n\tdefer os.Unlock()\n\n\tclean, err := cleanOrigin(origin)\n\tif err != nil {\n\t\treturn\n\t}\n\tdelete(os.origins, clean)\n}", "func (vd VectorDistancer) Distance(i, j int) float64 {\n\tvi := vd[i]\n\tvj := vd[j]\n\tdist := 0.0\n\tfor k, vik := range vi {\n\t\tvjk := vj[k]\n\t\tdist += (vik - vjk) * (vik - vjk)\n\t}\n\treturn dist\n}", "func traitDistance(n1, n2 int, pos uint) int {\n\td := extract(n1, pos) - extract(n2, pos)\n\tif d < 0 {\n\t\treturn d * -1\n\t} else {\n\t\treturn d\n\t}\n}", "func Distance(x1, y1, x2, y2 float64) float64 {\n\treturn math.Sqrt(math.Pow(x1 - x2, 2) + math.Pow(y1 - y2, 2))\n}", "func (c Cell) Distance(target Point) s1.ChordAngle {\n\treturn c.distanceInternal(target, true)\n}", "func (id ID) Distance(other ID) (result ID) {\n\tdistance := new(big.Int)\n\tdistance = distance.Xor(id.Int(), other.Int())\n\tresult, _ = NewID(distance.Bytes())\n\treturn\n}", "func Distance(a, b string) (int, error) {\n\tnewA := []rune(a)\n\tnewB := []rune(b)\n\n\tif len(newA) != len(newB) {\n\t\treturn 0, errors.New(\"DNA strands must be of equal length\")\n\t}\n\n\thammingDistance := 0\n\n\tfor k, v := range newA {\n\t\tif newB[k] != v {\n\t\t\thammingDistance++\n\t\t}\n\t}\n\n\treturn hammingDistance, nil\n}", "func (p1 XY) Distance(p2 XY) float64 {\n\tdx, dy := p2.X-p1.X, p2.Y-p1.Y\n\treturn math.Sqrt(float64(dx*dx) + float64(dy*dy))\n}", "func Distance(c1, c2 ogame.Coordinate, universeSize, nbSystems int64, donutGalaxy, donutSystem bool) (distance int64) {\n\tif c1.Galaxy != c2.Galaxy {\n\t\treturn galaxyDistance(c1.Galaxy, c2.Galaxy, universeSize, donutGalaxy)\n\t}\n\tif c1.System != c2.System {\n\t\treturn flightSystemDistance(nbSystems, c1.System, c2.System, donutSystem)\n\t}\n\tif c1.Position != c2.Position {\n\t\treturn planetDistance(c1.Position, c2.Position)\n\t}\n\treturn 5\n}", "func Distance(latlon1 Coord, latlon2 Coord) float64 {\n\t// convert to radians\n\t// must cast radius as float to multiply later\n\tvar la1, lo1, la2, lo2, r float64\n\tla1 = latlon1[1] * math.Pi / 180\n\tlo1 = latlon1[0] * math.Pi / 180\n\tla2 = latlon2[1] * math.Pi / 180\n\tlo2 = latlon2[0] * math.Pi / 180\n\n\tr = EarthRadiusMeters\n\n\t// calculate\n\th := hsin(la2-la1) + math.Cos(la1)*math.Cos(la2)*hsin(lo2-lo1)\n\n\treturn 2 * r * math.Asin(math.Sqrt(h))\n}", "func HemmingDistance(s1, s2 string) (int, error) {\n\treturn hemmingDistanceBytes([]byte(s1), []byte(s2))\n}", "func (t *Triangle) MetaballDistBound(d float64) float64 {\n\treturn d\n}" ]
[ "0.66292995", "0.6283916", "0.62757933", "0.60816836", "0.60361654", "0.59825534", "0.5843758", "0.53704447", "0.5285398", "0.51062644", "0.509364", "0.5085574", "0.50114954", "0.49696583", "0.4951331", "0.49022195", "0.49021795", "0.4899322", "0.4876255", "0.4805026", "0.47555062", "0.47494859", "0.4733905", "0.4688199", "0.46711767", "0.46659198", "0.46494326", "0.46472976", "0.45916313", "0.45882255", "0.4584777", "0.4579943", "0.45627177", "0.4560766", "0.45489305", "0.45470473", "0.4522793", "0.45175222", "0.45137626", "0.44809157", "0.44661048", "0.44567904", "0.4437853", "0.44288877", "0.44129202", "0.43731457", "0.43627626", "0.43520784", "0.4344972", "0.4334291", "0.43313017", "0.43194708", "0.4319353", "0.4318257", "0.4305841", "0.43052426", "0.43006724", "0.4297537", "0.429343", "0.42925522", "0.42774788", "0.42565534", "0.42343232", "0.42336798", "0.42276734", "0.42268983", "0.42232263", "0.42185155", "0.4218363", "0.42151392", "0.42116252", "0.42079216", "0.4206467", "0.42003718", "0.41998968", "0.41891062", "0.4188781", "0.41838247", "0.4183536", "0.4180152", "0.41675767", "0.4158766", "0.4146382", "0.4145336", "0.41385153", "0.41350114", "0.41333577", "0.41319415", "0.41290906", "0.41234314", "0.4123144", "0.41223964", "0.41150567", "0.4112999", "0.41089576", "0.41088942", "0.4103089", "0.4100843", "0.4100034", "0.40908983" ]
0.67163694
0
distanceToPoint returns the Manhattan distance to point o.
func (p point) distanceToPoint(o point) int { return abs(abs(p.x)-abs(o.x)) + abs(abs(p.y)-abs(o.y)) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (s *ship) manhattanDistance() int {\n\tabs := func(num int) int {\n\t\tif num < 0 {\n\t\t\treturn -num\n\t\t}\n\t\treturn num\n\t}\n\treturn abs(s.x) + abs(s.y)\n}", "func ManhattanDistance(p1, p2 Point) int {\n\tdx := p1.x - p2.x\n\tif dx < 0 {\n\t\tdx = -dx\n\t}\n\tdy := p1.y - p2.y\n\tif dy < 0 {\n\t\tdy = -dy\n\t}\n\n\treturn dx + dy\n}", "func (d Position2D) GetManhattanDistance(cmpPos Position2D) int {\n\treturn max(d.Row-cmpPos.Row, cmpPos.Row-d.Row) + max(d.Col-cmpPos.Col, cmpPos.Col-d.Col)\n}", "func (v Vector3D) ManhattanDistance(o Vector3D) int {\n\treturn v.Sub(o).ManhattanLength()\n}", "func (p PointI) Manhattan(p2 PointI) int32 {\n\treturn p.VecTo(p2).Manhattan()\n}", "func (p Point2D) Manhattan(p2 Point2D) float32 {\n\treturn p.VecTo(p2).Manhattan()\n}", "func ManhattanDist(a Location, b Location) int {\n\treturn util.AbsInt(a.Row-b.Row) + util.AbsInt(a.Col-b.Col)\n}", "func ManhattanDistance(firstVector, secondVector []float64) (float64, error) {\n\tdistance := 0.\n\tfor ii := range firstVector {\n\t\tdistance += math.Abs(firstVector[ii] - secondVector[ii])\n\t}\n\treturn distance, nil\n}", "func CalculateManhattanDistance(directionCountMap map[Direction]int) int {\n\teastWest := math.Abs(float64(directionCountMap[East]) - float64(directionCountMap[West]))\n\tnorthSouth := math.Abs(float64(directionCountMap[North]) - float64(directionCountMap[South]))\n\treturn int(eastWest + northSouth)\n}", "func distance(point Point) (int) {\n\txDist := point.X\n\tyDist := point.Y\n\n\tif xDist < 0 {\n\t\txDist *= -1\n\t}\n\n\tif yDist < 0 {\n\t\tyDist *= -1\n\t}\n\n\treturn xDist + yDist\n}", "func (p *Point) DistanceToPoint(q Point) float64 {\n\tv := p.Minus(q)\n\treturn v.Length()\n}", "func findDistance(here *location, knownLocations []location) int {\n\tvar totalDistance int\n\tfor _, spot := range knownLocations {\n\t\ttotalDistance = totalDistance + manhattanDistance(*here, spot)\n\t}\n\tfmt.Println(\"distance: \", totalDistance)\n\treturn totalDistance\n}", "func (p *kdTreePoint) Distance(other kdtree.Point) float64 {\n\tvar res float64\n\tfor i := 0; i < p.Dim(); i++ {\n\t\ttmp := p.GetValue(i) - other.GetValue(i)\n\t\tres += tmp * tmp\n\t}\n\treturn res\n}", "func (p point) distanceToOrigin() int {\n\treturn p.distanceToPoint(point{x: 0, y: 0})\n}", "func hammingDistance(x int, y int) int {\n\n}", "func (c Creature) dist() int {\n\tx := math.Pow(float64(Hero.X()-c.X()), 2)\n\ty := math.Pow(float64(Hero.Y()-c.Y()), 2)\n\n\treturn int(math.Sqrt(x + y))\n}", "func (v Vector3D) ManhattanLength() int {\n\treturn AbsInt(v.x) + AbsInt(v.y) + AbsInt(v.z)\n}", "func (p Point) Distance(p2 Point) float64 {\n\treturn math.Sqrt(math.Pow(p2.x-p.x, 2) + math.Pow(p2.y-p.y, 2))\n}", "func (p1 Point) Distance(p2 Point) int {\n\tx1 := big.NewInt(int64(p1.X))\n\tx2 := big.NewInt(int64(p2.X))\n\n\ty1 := big.NewInt(int64(p1.Y))\n\ty2 := big.NewInt(int64(p2.Y))\n\n\tdiffX := big.NewInt(0).Sub(x1, x2)\n\tdiffX.Abs(diffX)\n\n\tdiffY := big.NewInt(0).Sub(y1, y2)\n\tdiffY.Abs(diffY)\n\n\tdistance := big.NewInt(0).Add(diffX, diffY)\n\n\treturn int(distance.Int64())\n}", "func (c *cluster) DistanceTo(other Cluster,clusterDistanceAlgo DistanceMeasure,pointDistanceAlgo PointDistance)(distance float64,err error){\n\t// check for nil pointers\n\tif other == nil {\n\t\terr = errors.New(\"pointer to other cluster is nil\")\n\t\treturn\n\t}\n\n\t// check for type compatibility\n\tif reflect.TypeOf(c) == reflect.TypeOf(other) {\n\t\tdistance = clusterDistanceAlgo(c,other,pointDistanceAlgo)\n\t} else {\n\t\tdistance = clusterDistanceAlgo(c,other,pointDistanceAlgo)\n\t\tfmt.Printf(\"obj type:%T\\tparameter type:%T\\n\",c,other)\n\t\t//err = errors.New(\"pointer to other cluster is not of same type as pointer to this cluster obj\")\n\t}\n\treturn\n}", "func (p Point) Distance(p2 Point) float32 {\n\treturn p.VecTo(p2).Len()\n}", "func (p path) findDistanceToIntersection(inter point) int {\n\tdist := 0\n\n\tfor i := 0; i < len(p)-1; i++ {\n\t\tfirstPoint := p[i]\n\t\tsecondPoint := p[i+1]\n\t\tif isBetween := inter.isBetweenTwoPoints(firstPoint, secondPoint); isBetween {\n\t\t\tdist += firstPoint.manhattanDistance(inter)\n\t\t\tbreak\n\t\t}\n\t\tdist += firstPoint.manhattanDistance(secondPoint)\n\t}\n\treturn dist\n}", "func (p Point) Distance(c Comparable) float64 {\n\tq := c.(Point)\n\tvar sum float64\n\tfor dim, c := range p {\n\t\td := c - q[dim]\n\t\tsum += d * d\n\t}\n\treturn math.Sqrt(sum)\n}", "func (from Dot) DistanceTo(to Dot) (res uint16) {\n\tif !from.Equals(to) {\n\t\tif from.X > to.X {\n\t\t\tres = uint16(from.X - to.X)\n\t\t} else {\n\t\t\tres = uint16(to.X - from.X)\n\t\t}\n\n\t\tif from.Y > to.Y {\n\t\t\tres += uint16(from.Y - to.Y)\n\t\t} else {\n\t\t\tres += uint16(to.Y - from.Y)\n\t\t}\n\t}\n\n\treturn\n}", "func (c card) distance(d card) int {\n\tdist := d.number - c.number\n\tif dist < 0 {\n\t\tdist += 13\n\t}\n\treturn dist\n}", "func (m match) dist() uint32 {\n\treturn uint32(m.distance - minDistance)\n}", "func distance(x Point, y Point) float64 {\n\treturn math.Sqrt(math.Pow(x[0]-y[0],2) + math.Pow(x[1]-y[1],2))\t \n}", "func (u *Unit) distance(c C) int {\n\tdist, _ := u.Game.bfs(u.Pos, c)\n\treturn dist\n}", "func distance(p1 Point, p2 Point) float64 {\n\tfirst := math.Pow(float64(p2.x-p1.x), 2)\n\tsecond := math.Pow(float64(p2.y-p1.y), 2)\n\treturn math.Sqrt(first + second)\n}", "func (c *Client) DistanceTo(other *Coordinate) time.Duration {\n\tc.mutex.RLock()\n\tdefer c.mutex.RUnlock()\n\n\treturn c.coord.DistanceTo(other)\n}", "func (p Point2) Distance(p2 Point2) float64 {\n\treturn Distance2(p.X(), p.Y(), p2.X(), p2.Y())\n}", "func (p Point) Distance(q Point) float64 { // HL\n\treturn math.Hypot(q.X-p.X, q.Y-p.Y)\n}", "func (p Vector3) DistanceTo(o Vector3) Coord {\n\tdx := p.X - o.X\n\tdy := p.Y - o.Y\n\tdz := p.Z - o.Z\n\treturn Coord(math.Sqrt(float64(dx*dx + dy*dy + dz*dz)))\n}", "func (p *kdTreePoint) PlaneDistance(val float64, i int) float64 {\n\ttmp := p.GetValue(i) - val\n\treturn tmp * tmp\n}", "func (p Point) Dist(other Point) int {\n\treturn int(math.Abs(float64(p.X-other.X)) + math.Abs(float64(p.Y-other.Y)))\n}", "func (p *SamplePoint2D) PlaneDistance(planePosition float64, dim int) float64 {\n\treturn math.Abs(planePosition - p.Dimension(dim))\n}", "func dist(p1, p2 Coord) int {\n return abs(p1.x-p2.x) + abs(p1.y-p2.y)\n}", "func determineDistance(maze maze, source, destination coordinate) int {\n\tfmt.Println(\"Determining distance between\", source, \"and\", destination)\n\tvisited := make(map[coordinate]bool)\n\ttoVisit := []pathResult{pathResult{source, 0}}\n\tfor {\n\t\tnext := toVisit[0]\n\t\ttoVisit = toVisit[1:]\n\t\tvisited[next.coordinate] = true\n\t\tfor _, neighbor := range destinations(next.coordinate) {\n\t\t\tif neighbor == destination {\n\t\t\t\tfmt.Println(\"Found destination at\", 1+next.length)\n\t\t\t\treturn 1 + next.length\n\t\t\t}\n\t\t\tif visited[neighbor] || maze.positions[neighbor] == \"#\" {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\ttoVisit = append(toVisit, pathResult{neighbor, 1 + next.length})\n\t\t}\n\t}\n}", "func (c Hex) Distance(otherC Color) float64 {\n\treturn c.getRGB().Distance(otherC)\n}", "func (p Point) Dist(other Point) (out int) {\n\tvar res Point\n\tres = other.Sub(p)\n\tout = abs(res.X)+abs(res.Y)\n\treturn\n}", "func (knn *Knn) distance(p1 []float64, p2 []float64) float64 {\n\tvar value float64 = 0\n\tfor i := 0; i < len(p1); i++ {\n\t\tvalue += math.Pow(p1[i]-p2[i], 2)\n\t}\n\treturn math.Sqrt(value)\n}", "func (p *Particle) Distance(pa *Particle) float64 {\n\treturn p.Position.Distance(pa.Position)\n}", "func getDistance(x, y int) int {\n\treturn int(cityMatrix[x][y])\n}", "func (p Point) Distance2(p2 Point) float32 {\n\treturn p.VecTo(p2).Len2()\n}", "func (t *Taxi) getDistance(l Location) float64 {\n\treturn t.location.DistanceTo(l)\n}", "func (p Position) DistanceTo(position Position) float64 {\n\tp1 := geojson.Position{p.X, p.Y, p.Z}\n\tp2 := geojson.Position{position.X, position.Y, position.Z}\n\treturn p1.DistanceTo(p2)\n}", "func (space Space) PointDist(point1 []float64, point2 []float64) float64 {\n\treturn 1 - Cosinus(point1, point2)\n}", "func planetDistance(planet1, planet2 int64) (distance int64) {\n\treturn int64(1000 + 5*math.Abs(float64(planet2-planet1)))\n}", "func (p PointI) Distance(p2 PointI) float32 {\n\treturn p.VecTo(p2).Len()\n}", "func (p *SamplePoint2D) Distance(p2 Point) float64 {\n\tsum := 0.\n\tfor i := 0; i < p.Dimensions(); i++ {\n\t\tsum += math.Pow(p.Dimension(i)-p2.Dimension(i), 2.0)\n\t}\n\treturn math.Sqrt(sum)\n}", "func (p *Plane3D) closestDistance(otherPoint *Vector3D.Vector3D) float64 {\n\n\t// Returns the closest distance from the given point otherPoint to the\n\t// point on the surface in local frame.\n\totherPointLocal := p.transform.toLocal(otherPoint)\n\td := p.closestPointLocal(otherPointLocal)\n\n\t//Returns the distance to the other vector.1\n\n\treturn otherPointLocal.Substract(d).Length()\n}", "func (start GeoPoint) distanceTo(end GeoPoint) float64 {\n\tearthRadius := 6371e3\n\tstartLatRad := start.Lat * math.Pi / 180\n\tendLatRad := end.Lat * math.Pi / 180\n\tdeltaLatRad := (end.Lat - start.Lat) * math.Pi / 180\n\tdeltaLongRad := (end.Long - start.Long) * math.Pi / 180\n\n\ta := math.Sin(deltaLatRad/2)*math.Sin(deltaLatRad/2) +\n\t\tmath.Cos(startLatRad)*math.Cos(endLatRad)*\n\t\t\tmath.Sin(deltaLongRad/2)*math.Sin(deltaLongRad/2)\n\n\tc := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a))\n\n\treturn earthRadius * c\n}", "func distance(x0, x1, y0, y1 int) float64 {\n\tdistance := math.Sqrt(math.Pow(float64(x1-x0), 2) + math.Pow(float64(y1-y0), 2))\n\treturn distance\n}", "func (n *GridNode) ActualDistanceToNeighbor(node astar.Node) float64 {\n\tif isDiagonal(n, node.(*GridNode)) {\n\t\treturn math.Sqrt2 * n.Cost\n\t}\n\treturn n.Cost\n}", "func (entity Entity) CalculateDistanceTo(target Entity) float64 {\n\tdx := target.X - entity.X\n\tdy := target.Y - entity.Y\n\n\treturn math.Sqrt(dx*dx + dy*dy)\n}", "func distance(x1, y1, x2, y2 float64) float64 {\n\ta := x2 - x1\n\tb := y2 - y1\n\treturn math.Sqrt(a*a + b*b)\n}", "func featureDistance(n1, n2 int) int {\n\tvar features int = 0\n\tfor i := 0; i < 5; i++ {\n\t\tf1, f2 := extract(n1, uint(i)), extract(n2, uint(i))\n\t\tif f1 == f2 {\n\t\t\tfeatures++\n\t\t}\n\t}\n\treturn 6 - features\n}", "func (v Vector2) DistanceTo(other Vector) float64 {\r\n\totherv := checkVector2(other)\r\n\treturn v.Subtract(otherv).Length()\r\n}", "func (p *Point) Dist(oth *Point) float64 {\n\treturn math.Sqrt((oth.X-p.X)*(oth.X-p.X) + (oth.Y-p.Y)*(oth.Y-p.Y))\n}", "func (w world) distance(p1, p2 location) float64 {\n\ts1, c1 := math.Sincos(rad(p1.lat))\n\ts2, c2 := math.Sincos(rad(p2.lat))\n\n\tclong := math.Cos(rad(p1.long - p2.long))\n\n\treturn w.radius * math.Acos(s1*s2+c1*c2*clong)\n}", "func (w world) distance(p1, p2 location) float64 {\n\ts1, c1 := math.Sincos(rad(p1.lat))\n\ts2, c2 := math.Sincos(rad(p2.lat))\n\tclong := math.Cos(rad(p1.long - p2.long))\n\treturn w.radius * math.Acos(s1*s2+c1*c2*clong)\n}", "func (p Point2D) Distance(p2 Point2D) float32 {\n\treturn p.VecTo(p2).Len()\n}", "func (p1 XY) Distance(p2 XY) float64 {\n\tdx, dy := p2.X-p1.X, p2.Y-p1.Y\n\treturn math.Sqrt(float64(dx*dx) + float64(dy*dy))\n}", "func Distance(a, b Point) float64 {\n\tcx, cy := cathetus(a, b)\n\treturn math.Sqrt(math.Pow(cx, 2) + math.Pow(cy, 2))\n}", "func (s *GeoDistanceSort) Point(lat, lon float64) *GeoDistanceSort {\n\ts.points = append(s.points, GeoPointFromLatLon(lat, lon))\n\treturn s\n}", "func (p PointI) Distance2(p2 PointI) int32 {\n\treturn p.VecTo(p2).Len2()\n}", "func (self *Chromosome) Distance() (distance int) {\n\tdistance = 0\n\n\tfor i, location := range self.Locations {\n\t\tif i+1 < self.Length() {\n\t\t\tdistance += self.Matrix.DistanceMap[strconv.Itoa(location.Id)+strconv.Itoa(self.Locations[i+1].Id)]\n\t\t} else {\n\t\t\tdistance += self.Matrix.DistanceMap[strconv.Itoa(location.Id)+strconv.Itoa(self.Locations[0].Id)]\n\t\t}\n\t}\n\n\treturn\n}", "func (w *RandomWorld) Distance(from, to GoWorld.Location) float64 {\n\treturn math.Sqrt(math.Pow(float64(from.X-to.X), 2) + math.Pow(float64(from.Y-to.Y), 2))\n}", "func (b *OGame) Distance(origin, destination ogame.Coordinate) int64 {\n\treturn Distance(origin, destination, b.serverData.Galaxies, b.serverData.Systems, b.serverData.DonutGalaxy, b.serverData.DonutSystem)\n}", "func (l Location) DistanceTo(otherLoc Location) float64 {\n\tlatDist, lonDist := l.ComponentDistanceTo(otherLoc)\n\treturn math.Sqrt(math.Pow(latDist, 2) + math.Pow(lonDist, 2))\n}", "func traitDistance(n1, n2 int, pos uint) int {\n\td := extract(n1, pos) - extract(n2, pos)\n\tif d < 0 {\n\t\treturn d * -1\n\t} else {\n\t\treturn d\n\t}\n}", "func (p Point3) Distance(p2 Point3) float64 {\n\treturn Distance3(p.X(), p.Y(), p.Z(), p2.X(), p2.Y(), p2.Z())\n}", "func (loc1 location) distance(loc2 location) float64 {\n\tvar la1, lo1, la2, lo2, r float64\n\tlat1 := loc1.Lat\n\tlon1 := loc1.Long\n\tlat2 := loc2.Lat\n\tlon2 := loc2.Long\n\tla1 = lat1 * math.Pi / 180\n\tlo1 = lon1 * math.Pi / 180\n\tla2 = lat2 * math.Pi / 180\n\tlo2 = lon2 * math.Pi / 180\n\n\tr = 6378100 // Earth radius in METERS\n\n\t// calculate\n\th := hsin(la2-la1) + math.Cos(la1)*math.Cos(la2)*hsin(lo2-lo1)\n\n\treturn math.Round(2 * r * math.Asin(math.Sqrt(h)))\n}", "func (s *Segment) Distance() float64 {\n\torigin := geodist.Point{Lat: s.Origin.Lat, Long: s.Origin.Long}\n\tdestination := geodist.Point{Lat: s.Destination.Lat, Long: s.Destination.Long}\n\n\tdistance, err := geodist.VincentyDistance(origin, destination)\n\tif err != nil {\n\t\treturn math.NaN()\n\t}\n\n\treturn distance * kmToMiles\n}", "func digDist(p1, p2 Coord) int {\n return max(abs(p1.x-p2.x)+abs(p1.y-p2.y)-1, 0)\n}", "func (loc Location) StepTo(other Location) (Direction, bool) {\n\tif ManhattanDist(loc, other) > 1 {\n\t\treturn Nowhere, false\n\t}\n\n\tswitch other.Row - loc.Row {\n\tcase -1:\n\t\treturn Up, true\n\tcase 1:\n\t\treturn Down, true\n\t}\n\n\tswitch other.Col - loc.Col {\n\tcase -1:\n\t\treturn Left, true\n\tcase 1:\n\t\treturn Right, true\n\t}\n\n\treturn Nowhere, true\n}", "func minCostConnectPoints(points [][]int) int {\n\tif len(points) == 0 {\n\t\treturn 0\n\t}\n\n\tstartingPoint := &Edge{\n\t\tPoint: Point{X: points[0][0], Y: points[0][1]},\n\t\tCost: 0,\n\t}\n\n\tpriorityQueue := PriorityQueue{startingPoint}\n\n\tcost := 0\n\n\tvisitedPoints := make(map[Point]bool)\n\n\tfor len(priorityQueue) > 0 && len(visitedPoints) < len(points) {\n\t\tedge := heap.Pop(&priorityQueue).(*Edge)\n\n\t\tif visitedPoints[edge.Point] {\n\t\t\tcontinue\n\t\t}\n\n\t\tvisitedPoints[edge.Point] = true\n\n\t\tcost += edge.Cost\n\n\t\tfor _, point := range points {\n\t\t\tp := Point{X: point[0], Y: point[1]}\n\n\t\t\tif visitedPoints[p] {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\theap.Push(&priorityQueue, &Edge{\n\t\t\t\tPoint: p,\n\t\t\t\tCost: manhattanDistance(edge.Point, p),\n\t\t\t})\n\t\t}\n\t}\n\n\treturn cost\n}", "func findNearNeighbor(x []float64, y []float64, points map[int]int, start int) int {\n neighbor := 0\n smDist := 10000000.0\n\n // Loop through all yet visited points to find out the cloesest point to current point.\n for i := 1; i < len(points); i++ {\n if (i != start) && (points[i] != 0) {\n sqSum := math.Pow(x[i] - x[start], 2) + math.Pow(y[i] - y[start], 2)\n dist := math.Sqrt(sqSum)\n if dist < smDist {\n neighbor = i\n smDist = dist\n }\n }\n }\n return neighbor\n}", "func ChebyDist(p1, p2 Point) int {\n\treturn Max(Abs(p1.X-p2.X), Abs(p1.Y-p2.Y))\n}", "func (g Coords) Distance() float64 {\n\t// * removed returned variable\n\treturn math.Sqrt(math.Pow(g.X2-g.X1, 2) + math.Pow(g.Y2-g.Y1, 2))\n}", "func (p *Point) Distance(lon, lat float64) uint {\n\tl1, l2 := toRad(p.Lat), toRad(lat)\n\tdLon := toRad(p.Lon) - toRad(lon)\n\tdLat := l1 - l2\n\ts1 := math.Sin(dLat / 2)\n\ts1 *= s1\n\tf1 := math.Sin(dLon / 2)\n\ts2 := f1 * f1 * math.Cos(l1) * math.Cos(l2)\n\tv := math.Sqrt(s1 + s2)\n\treturn uint(2 * EarthRadius * math.Asin(v))\n}", "func (p *Plane3D) closestPoint(otherPoint *Vector3D.Vector3D) *Vector3D.Vector3D {\n\n\t// Returns the closest distance from the given point otherPoint to the\n\t// point on the surface in local frame.\n\n\totherPointLocal := p.transform.toLocal(otherPoint)\n\td := p.closestPointLocal(otherPointLocal)\n\treturn p.transform.toWorld(d)\n}", "func getDistance(x1 float64, y1 float64, x2 float64, y2 float64) float64 {\n\treturn math.Sqrt(math.Pow(x1-x2, 2) + math.Pow(y1-y2, 2))\n}", "func (vec Vector2) Distance(vec2 Vector2) float32 {\n\txd := vec2.X - vec.X\n\tyd := vec2.Y - vec.Y\n\treturn Sqrt(xd*xd + yd*yd)\n}", "func (h *hnsw) deleteEntrypoint(node *vertex, denyList helpers.AllowList) error {\n\tif h.isOnlyNode(node, denyList) {\n\t\t// no point in finding another entrypoint if this is the only node\n\t\treturn nil\n\t}\n\n\tnode.Lock()\n\tlevel := node.level\n\tnode.Unlock()\n\n\tnewEntrypoint, level := h.findNewEntrypoint(denyList, level)\n\n\th.Lock()\n\th.entryPointID = newEntrypoint\n\th.currentMaximumLayer = level\n\th.Unlock()\n\th.commitLog.SetEntryPointWithMaxLayer(newEntrypoint, level)\n\n\treturn nil\n}", "func (g *Game) attackMask(from Square) uint64 {\n\tpiece, found := g.board.PieceAt(from)\n\tif !found {\n\t\t// No piece, nothing threatened\n\t\treturn 0\n\t}\n\tpiece = piece.WithArmy(g.armies[ColorIdx(piece.Color())])\n\tswitch piece.Name() {\n\tcase PieceNameClassicKing, PieceNameBasicKing, PieceNameEmpoweredQueen, PieceNameTwoKingsKing:\n\t\treturn dist1Mask[from.Address]\n\tcase PieceNameBasicQueen:\n\t\tdiag := diagMask[from.Address] & g.board.occupiedMask()\n\t\torth := orthMask[from.Address] & g.board.occupiedMask()\n\t\treturn diagAttackMask[from.Address][diag] | orthAttackMask[from.Address][orth]\n\tcase PieceNameBasicBishop:\n\t\tdiag := diagMask[from.Address] & g.board.occupiedMask()\n\t\treturn diagAttackMask[from.Address][diag]\n\tcase PieceNameBasicKnight, PieceNameAnimalsKnight:\n\t\treturn knightMask[from.Address]\n\tcase PieceNameBasicRook:\n\t\torth := orthMask[from.Address] & g.board.occupiedMask()\n\t\treturn orthAttackMask[from.Address][orth]\n\tcase PieceNameBasicPawn, PieceNameNemesisPawn:\n\t\tsign := ColorIdx(piece.Color())*2 - 1\n\t\tmask := uint64(0)\n\t\tif from.X() > 0 {\n\t\t\tmask |= 1 << (int(from.Address) - 1 + 8*sign)\n\t\t}\n\t\tif from.X() < 7 {\n\t\t\tmask |= 1 << (int(from.Address) + 1 + 8*sign)\n\t\t}\n\t\treturn mask\n\tcase PieceNameNemesisQueen:\n\t\tdiag := diagMask[from.Address] & g.board.occupiedMask()\n\t\torth := orthMask[from.Address] & g.board.occupiedMask()\n\t\t// Nemesis queen can only threaten kings\n\t\tthreat := ^g.board.occupiedMask() | g.board.pieceMask(TypeKing)\n\t\treturn (diagAttackMask[from.Address][diag] | orthAttackMask[from.Address][orth]) & threat\n\tcase PieceNameEmpoweredBishop, PieceNameEmpoweredKnight, PieceNameEmpoweredRook:\n\t\t// Same-colored adjacent pieces, including the from square.\n\t\townAdjacent := (adjacentMask[from.Address] | from.mask()) & g.board.colorMask(piece.Color())\n\t\tmask := uint64(0)\n\t\tif ownAdjacent&g.board.pieceMask(TypeRook) != 0 {\n\t\t\torth := orthMask[from.Address] & g.board.occupiedMask()\n\t\t\tmask |= orthAttackMask[from.Address][orth]\n\t\t}\n\t\tif ownAdjacent&g.board.pieceMask(TypeBishop) != 0 {\n\t\t\tdiag := diagMask[from.Address] & g.board.occupiedMask()\n\t\t\tmask |= diagAttackMask[from.Address][diag]\n\t\t}\n\t\tif ownAdjacent&g.board.pieceMask(TypeKnight) != 0 {\n\t\t\tmask |= knightMask[from.Address]\n\t\t}\n\t\treturn mask\n\tcase PieceNameReaperQueen:\n\t\tcandidates := ^maskRank[7*ColorIdx(piece.Color())]\n\t\tkings := g.board.pieceMask(TypeKing) & g.board.colorMask(OtherColor(piece.Color()))\n\t\treturn candidates &^ kings\n\tcase PieceNameReaperRook:\n\t\treturn ^g.board.occupiedMask()\n\tcase PieceNameAnimalsQueen:\n\t\torth := orthMask[from.Address] & g.board.occupiedMask()\n\t\treturn orthAttackMask[from.Address][orth] | knightMask[from.Address]\n\tcase PieceNameAnimalsBishop:\n\t\tdiag := diagMask[from.Address] & g.board.occupiedMask()\n\t\treturn diagAttackMask[from.Address][diag] & dist2Mask[from.Address]\n\tcase PieceNameAnimalsRook:\n\t\treturn orthAttackMask[from.Address][0] & dist3Mask[from.Address]\n\tdefault:\n\t\tpanic(\"Invalid piece type\")\n\t}\n}", "func distance(b board) (d float64) {\n\tfor k := range dist {\n\t\td += dist[k] * freq[b[k]]\n\t}\n\treturn\n}", "func (p Point) getDist(q Point) float64 {\n\tdx := p.X - q.X\n\tdy := p.Y - q.Y\n\treturn dx*dx + dy*dy\n}", "func Distance(x, y, x2, y2 int32) int32 {\n\n\tdx := x - x2\n\tdy := y - y2\n\tds := (dx * dx) + (dy * dy)\n\treturn int32(math.Sqrt(math.Abs(float64(ds))))\n\n}", "func (e Edge) Distance() float64 {\n\treturn PointerDistance(e.A, e.B)\n}", "func digTurnDist(p1, p2 Coord) int {\n return int(math.Ceil(float64(digDist(p1, p2)) / MOVE_DIST))\n}", "func (vd VectorDistancer) Distance(i, j int) float64 {\n\tvi := vd[i]\n\tvj := vd[j]\n\tdist := 0.0\n\tfor k, vik := range vi {\n\t\tvjk := vj[k]\n\t\tdist += (vik - vjk) * (vik - vjk)\n\t}\n\treturn dist\n}", "func (p *Point) To(other *Point) float64 {\n\tX2 := math.Pow(float64(p.X-other.X), 2)\n\tY2 := math.Pow(float64(p.Y-other.Y), 2)\n\treturn math.Sqrt(X2 + Y2)\n}", "func (gdt *Vector3) DistanceTo(b Vector3) Real {\n\targ0 := gdt.getBase()\n\targ1 := b.getBase()\n\n\tret := C.go_godot_vector3_distance_to(GDNative.api, arg0, arg1)\n\n\treturn Real(ret)\n}", "func DistanceBetweenPoints(p1, p2 *Point) float64 {\n\treturn math.Sqrt(math.Pow(p2.X-p1.X, 2) + math.Pow(p2.Y-p1.Y, 2))\n}", "func MinkowskiDistance(firstVector, secondVector []float64, p float64) (float64, error) {\n\tdistance := 0.\n\tfor ii := range firstVector {\n\t\tdistance += math.Pow(math.Abs(firstVector[ii]-secondVector[ii]), p)\n\t}\n\treturn math.Pow(distance, 1/p), nil\n}", "func (c Cell) DistanceToEdge(a, b Point) s1.ChordAngle {\n\t// Possible optimizations:\n\t// - Currently the (cell vertex, edge endpoint) distances are computed\n\t// twice each, and the length of AB is computed 4 times.\n\t// - To fix this, refactor GetDistance(target) so that it skips calculating\n\t// the distance to each cell vertex. Instead, compute the cell vertices\n\t// and distances in this function, and add a low-level UpdateMinDistance\n\t// that allows the XA, XB, and AB distances to be passed in.\n\t// - It might also be more efficient to do all calculations in UVW-space,\n\t// since this would involve transforming 2 points rather than 4.\n\n\t// First, check the minimum distance to the edge endpoints A and B.\n\t// (This also detects whether either endpoint is inside the cell.)\n\tminDist := minChordAngle(c.Distance(a), c.Distance(b))\n\tif minDist == 0 {\n\t\treturn minDist\n\t}\n\n\t// Otherwise, check whether the edge crosses the cell boundary.\n\tcrosser := NewChainEdgeCrosser(a, b, c.Vertex(3))\n\tfor i := 0; i < 4; i++ {\n\t\tif crosser.ChainCrossingSign(c.Vertex(i)) != DoNotCross {\n\t\t\treturn 0\n\t\t}\n\t}\n\n\t// Finally, check whether the minimum distance occurs between a cell vertex\n\t// and the interior of the edge AB. (Some of this work is redundant, since\n\t// it also checks the distance to the endpoints A and B again.)\n\t//\n\t// Note that we don't need to check the distance from the interior of AB to\n\t// the interior of a cell edge, because the only way that this distance can\n\t// be minimal is if the two edges cross (already checked above).\n\tfor i := 0; i < 4; i++ {\n\t\tminDist, _ = UpdateMinDistance(c.Vertex(i), a, b, minDist)\n\t}\n\treturn minDist\n}", "func (g *Grid) ClosestPointTo(q Point) (Point, int) {\n\tmin := BIG\n\tclosest := Point{}\n\tindex := -1\n\tfor i, p := range g.Points {\n\t\tif p == q {\n\t\t\treturn p, i\n\t\t}\n\t\td := p.ManhattanDistance(q)\n\t\tif d < min {\n\t\t\tclosest = p\n\t\t\tmin = d\n\t\t\tindex = i\n\t\t} else if d == min {\n\t\t\tindex = -1\n\t\t}\n\t}\n\treturn closest, index\n}", "func distance(xa, ya, xb, yb int) float64 {\n\tx := math.Abs(float64(xa - xb))\n\ty := math.Abs(float64(ya - yb))\n\treturn math.Sqrt(x*x + y*y)\n}", "func BenchmarkCryptopanPoint(b *testing.B) {\n\tcpan, err := anonymizer.CryptoPan(testKey)\n\tif err != nil {\n\t\tb.Fatal(\"New(testKey) failed:\", err)\n\t}\n\tb.ResetTimer()\n\n\tpoint := geopoint.Value(75071988303315493)\n\tfor i := 0; i < b.N; i++ {\n\t\t_ = cpan.Anonymize(point)\n\t}\n}" ]
[ "0.69864243", "0.679119", "0.67451143", "0.6473577", "0.6260607", "0.61529875", "0.61317617", "0.6004432", "0.58881074", "0.5762732", "0.5652657", "0.5214538", "0.5203245", "0.51792425", "0.5148506", "0.5065928", "0.505602", "0.50225365", "0.5002041", "0.49768972", "0.49323055", "0.49303418", "0.49296376", "0.4909509", "0.48665223", "0.48529097", "0.4829389", "0.4818055", "0.4758331", "0.47578406", "0.47021893", "0.4663602", "0.4652975", "0.46145594", "0.46097404", "0.4603543", "0.4600276", "0.45895532", "0.45888832", "0.4585833", "0.45776066", "0.45546558", "0.4551296", "0.45494196", "0.45465326", "0.45317134", "0.45275158", "0.45238122", "0.45212284", "0.45167106", "0.45129782", "0.45080614", "0.45059645", "0.44850862", "0.4482974", "0.44548815", "0.4446741", "0.44282654", "0.43802556", "0.4343054", "0.43419895", "0.43305814", "0.43284246", "0.43245965", "0.43218467", "0.43163234", "0.4314554", "0.43046197", "0.42902142", "0.42853674", "0.42848268", "0.42682976", "0.4257212", "0.42504236", "0.42453438", "0.4224682", "0.42111397", "0.42028877", "0.41977304", "0.4196486", "0.41949192", "0.419445", "0.41773096", "0.41746506", "0.41588596", "0.41525784", "0.4145602", "0.41439784", "0.41249502", "0.41155115", "0.4114707", "0.41020474", "0.40998283", "0.40994832", "0.40838245", "0.40805277", "0.4073799", "0.40725073", "0.40628368", "0.40588915" ]
0.64441717
4
abs returns the absolute value of i.
func abs(i int) int { if i < 0 { return -i } return i }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func absInt(i int) int {\n\tif i > 0 {\n\t\treturn i\n\t}\n\treturn i * -1\n}", "func Iabs(a int) int {\n\tif a < 0 {\n\t\treturn -a\n\t}\n\treturn a\n}", "func iAbs(x int) int { if x >= 0 { return x } else { return -x } }", "func abs(val int) int {\n\tif val < 0 {\n\t\treturn -val\n\t}\n\treturn val\n}", "func abs(v int) int {\n\tif v < 0 {\n\t\treturn -v\n\t}\n\treturn v\n}", "func abs(a int) int {\n\tif a < 0 {\n\t\treturn -a\n\t}\n\treturn a\n}", "func abs(a int) int {\n\tif a < 0 {\n\t\treturn -a\n\t}\n\treturn a\n}", "func abs(n int) int {\n\tif n < 0 {\n\t\treturn -n\n\t}\n\treturn n\n}", "func (bi Int) Abs() Int {\n\tif bi.GreaterThanEqual(Zero()) {\n\t\treturn bi.Copy()\n\t}\n\treturn bi.Neg()\n}", "func IAbs(n int) int {\n\tif n < 0 {\n\t\treturn -n\n\t}\n\tif n == 0 {\n\t\treturn 0\n\t}\n\treturn n\n}", "func (i *Number) Absolute() *Number {\n\treturn NewNumber(math.Abs(i.value))\n}", "func abs(n int) int {\n\tif n >= 0 {\n\t\treturn n\n\t}\n\treturn -1 * n\n}", "func (i I) Abs() I {\n\tif i.X < 0 {\n\t\ti.X = -i.X\n\t}\n\tif i.Y < 0 {\n\t\ti.Y = -i.Y\n\t}\n\treturn i\n}", "func abs(x int) int {\r\n\tif x < 0 {\r\n\t\treturn -x\r\n\t}\r\n\treturn x\r\n}", "func abs(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func abs(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func abs(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func abs(x int) int {\n\tif x < 0{\n\t\treturn -x\n\t}\n\treturn x\n}", "func absInt(v int) int {\n\tif v < 0 {\n\t\treturn -v\n\t}\n\treturn v\n}", "func IntAbs(a int) int {\n\tif a < 0 {\n\t\treturn -a\n\t}\n\treturn a\n}", "func abs(n int) int {\n if n < 0 {\n return -n\n }\n return n\n}", "func abs(num int64) int64 {\n\tif num < 0 {\n\t\treturn -num\n\t}\n\treturn num\n}", "func abs(a int) int {\r\n if a < 0 {\r\n return -a\r\n }\r\n return a\r\n}", "func abs(n int) int {\n if n > 0 {\n return n\n }\n\n return -n\n}", "func abs(x int64) int64 {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func abs(x int64) int64 {\n\tif x >= 0 {\n\t\treturn x\n\t}\n\treturn -x\n}", "func Absolute(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func Abs(a int) int {\n\tif a > 0 {\n\t\treturn a\n\t}\n\treturn -a\n}", "func Abs(a int) int {\n\tif a > 0 {\n\t\treturn a\n\t}\n\treturn -a\n}", "func Abs(a int) int {\n\tif a > 0 {\n\t\treturn a\n\t}\n\treturn -a\n}", "func (z *Int) Abs() *Int {\n\tif z.Lt(SignedMin) {\n\t\treturn z\n\t}\n\tz.Sub(zero, z)\n\treturn z\n}", "func Abs(a int) int {\n\tif a < 0 {\n\t\treturn -a\n\t}\n\treturn a\n}", "func IntAbs(x int) int {\n\tif x > 0 {\n\t\treturn x\n\t}\n\n\treturn -x\n}", "func intAbs(n int64) int64 {\n\treturn int64(math.Abs(float64(n)))\n}", "func absInt(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func abs(n int64) int64 {\n\treturn int64(math.Abs(float64(n)))\n}", "func (i Instruction) ToAbsolute() Instruction {\n\treturn i & 0xdf\n}", "func (m mathUtil) AbsInt(value int) int {\n\tif value < 0 {\n\t\treturn -value\n\t}\n\treturn value\n}", "func abs(n int32) int32 {\n\tif n < 0 {\n\t\treturn -n\n\t}\n\treturn n\n}", "func Abs(a int) int {\n\treturn neogointernal.Opcode1(\"ABS\", a).(int)\n}", "func getAbsoluteValue(value int) int {\n\tif value < 0 {\n\t\treturn -value\n\t}\n\treturn value\n}", "func Abs(v int) int {\n\tif v > 0 {\n\t\treturn v\n\t}\n\treturn -v\n}", "func abs(n int) int {\n\ty := n >> 31\n\treturn (n ^ y) - y\n}", "func Abs(operand int) int {\n\tif operand < 0 {\n\t\treturn operand * -1\n\t}\n\treturn operand\n}", "func Abs(n int) int {\n\tif n < 0 {\n\t\treturn -n\n\t}\n\treturn n\n}", "func Abs(n int) int {\n\tif n < 0 {\n\t\treturn -n\n\t}\n\treturn n\n}", "func Abs(n int) int {\n\tif n < 0 {\n\t\treturn -n\n\t}\n\treturn n\n}", "func Abs(n int) int {\n\tif n < 0 {\n\t\treturn -n\n\t}\n\treturn n\n}", "func Abs(n int) int {\n\tif n < 0 {\n\t\treturn -n\n\t}\n\treturn n\n}", "func Absolute(num cty.Value) (cty.Value, error) {\n\treturn AbsoluteFunc.Call([]cty.Value{num})\n}", "func getAbs(a int) int {\n if a < 0 {\n return -a\n }\n return a\n}", "func AbsInt(a int) int {\n\tif a < 0 {\n\t\treturn -a\n\t}\n\treturn a\n}", "func Abs(n int) int {\n\tif n >= 0 {\n\t\treturn n\n\t}\n\treturn -n\n}", "func Abs(number int) int {\n\tif number > 0 {\n\t\treturn number\n\t}\n\treturn -number\n}", "func AbsInt(a int) int {\n\tif a < 0 {\n\t\treturn -1 * a\n\t} else {\n\t\treturn a\n\t}\n}", "func Abs(b int) int {\n\tif b < 0 {\n\t\treturn -b\n\t}\n\n\treturn b\n}", "func (z *Int) Abs(x *Int) *Int {}", "func AbsInt(v int) int {\n\tif v < 0 {\n\t\treturn -v\n\t}\n\treturn v\n}", "func Abs(num int) int{\n\tif num > 0{\n\t\treturn num\n\t}\n\treturn (-1)*num\n}", "func Abs(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func Abs(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func Abs(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func Abs(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func Abs(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func Abs(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func Abs(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func Abs(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func Abs(x int) int {\n\tif x > 0 {\n\t\treturn x\n\t}\n\treturn -x\n}", "func abs(x int32) int32 {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func abs(slice []float64) []float64 {\n\tvar newSlice []float64\n\tfor _, value := range slice {\n\t\tif value < 0.0 {\n\t\t\tvalue = math.Abs(value)\n\t\t}\n\t\tnewSlice = append(newSlice, value)\n\t}\n\treturn newSlice\n}", "func AbsInt(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func AbsInt(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func (cf customFloat) abs() customFloat {\n\tif cf < 0 {\n\t\treturn cf * -1\n\t}\n\treturn cf\n}", "func absValueIf(v int64, absolute bool) int64 {\n\tif absolute && v < 0 {\n\t\tv = -v\n\t}\n\treturn v\n}", "func PrintAbs(v Abser) {\n\ta := v.Abs()\n\tfmt.Println(a)\n}", "func abs(in []complex128, out []complex128) {\n\tfor i := range in {\n\t\tout[i] = complex(cmplx.Abs(in[i]), 0)\n\t}\n}", "func AbsInt(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t} else {\n\t\treturn x\n\t}\n}", "func Abs(z, x *big.Int) *big.Int {\n\treturn z.Abs(x)\n}", "func Abs(n int) int {\n if n < 0 {\n return -n\n }\n return n\n}", "func goAbs(x int) int {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}", "func (f *Float) Abs(x *Float) *Float {\n\tx.doinit()\n\tf.doinit()\n\tC.mpf_abs(&f.i[0], &x.i[0])\n\treturn f\n}", "func Abs(in Res) Res {\n\tsign := in.Output().Copy()\n\tanyvec.GreaterThan(sign, sign.Creator().MakeNumeric(0))\n\tsign.Scale(sign.Creator().MakeNumeric(2))\n\tsign.AddScalar(sign.Creator().MakeNumeric(-1))\n\treturn Mul(in, NewConst(sign))\n}", "func IntAbs(z *big.Int, x *big.Int,) *big.Int", "func opI64Abs(expr *CXExpression, fp int) {\n\tinpV0 := ReadI64(fp, expr.Inputs[0])\n\tsign := inpV0 >> 63\n\toutB0 := (inpV0 ^ sign) - sign\n\tWriteI64(GetOffset_i64(fp, expr.Outputs[0]), outB0)\n}", "func Abs(x float64) float64 {\n\tif x < 0 {\n\t\tx = -x\n\t}\n\treturn x\n}", "func opI16Abs(expr *CXExpression, fp int) {\n\tV0 := ReadI16(fp, expr.Inputs[0])\n\tsign := V0 >> 15\n\toutB0 := (V0 ^ sign) - sign\n\tWriteI16(GetOffset_i16(fp, expr.Outputs[0]), outB0)\n}", "func Abs(a *big.Float) *big.Float {\n\treturn ZeroBigFloat().Abs(a)\n}", "func (self *State)Abs(a any)any{\n self.IncOperations(self.coeff[\"abs\"]+self.off[\"abs\"])\n return wrap1(a,math.Abs)\n}", "func Abs(x float64) float64 {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\tif x == 0 {\n\t\treturn 0 // return correctly abs(-0)\n\t}\n\treturn x\n}", "func Abs(x int) int {\n if x < 0 {\n return -x\n }\n return x\n}", "func funcAbs(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {\n\treturn simpleFunc(vals, enh, math.Abs)\n}", "func Abs(arg float64) float64 {\n\treturn math.Abs(arg)\n}", "func mathAbs(ctx phpv.Context, args []*phpv.ZVal) (*phpv.ZVal, error) {\n\tvar z *phpv.ZVal\n\t_, err := core.Expand(ctx, args, &z)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tz, err = z.AsNumeric(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tswitch z.GetType() {\n\tcase phpv.ZtInt:\n\t\ti := z.AsInt(ctx)\n\t\tif i < 0 {\n\t\t\treturn (-i).ZVal(), nil\n\t\t} else {\n\t\t\treturn i.ZVal(), nil\n\t\t}\n\tcase phpv.ZtFloat:\n\t\treturn phpv.ZFloat(math.Abs(float64(z.AsFloat(ctx)))).ZVal(), nil\n\tdefault:\n\t\treturn phpv.ZNull{}.ZVal(), nil\n\t}\n}", "func (v *V) MaxAbs() (f float64, idx int) {\n\tif v.IsNaV() {\n\t\tpanic(ErrNaV)\n\t}\n\tf = NaN()\n\tidx = -1\n\tfor i, e := range v.Data {\n\t\tabs := math.Abs(e)\n\t\tif abs > f || IsNaN(f) {\n\t\t\tf = abs\n\t\t\tidx = i\n\t\t}\n\t}\n\tif IsNaN(f) {\n\t\tidx = -1\n\t\treturn\n\t}\n\treturn f, idx\n}", "func (m *MockFinder) Abs(v []byte) []byte {\n\treturn m.fnd.Abs(v)\n}", "func (v Vector) MaxAbs() float64 {\n\tvar res float64\n\tfor _, x := range v {\n\t\tres = math.Max(res, math.Abs(x))\n\t}\n\treturn res\n}", "func absolute(x int32) int32 {\n\tmask := x >> 31\n\treturn (x + mask) ^ mask\n}", "func (fn *formulaFuncs) ABS(argsList *list.List) formulaArg {\n\tif argsList.Len() != 1 {\n\t\treturn newErrorFormulaArg(formulaErrorVALUE, \"ABS requires 1 numeric argument\")\n\t}\n\targ := argsList.Front().Value.(formulaArg).ToNumber()\n\tif arg.Type == ArgError {\n\t\treturn arg\n\t}\n\treturn newNumberFormulaArg(math.Abs(arg.Number))\n}", "func Abs(x int32) int32 {\n\tif x < 0 {\n\t\treturn -x\n\t}\n\treturn x\n}" ]
[ "0.79870635", "0.7803045", "0.77631706", "0.7357731", "0.73555064", "0.7263729", "0.7263729", "0.7168951", "0.7148395", "0.71423554", "0.71259594", "0.7081049", "0.7076965", "0.70459354", "0.7041281", "0.7041281", "0.7041281", "0.7021915", "0.69966334", "0.69423425", "0.6911024", "0.6907641", "0.69048303", "0.6882799", "0.67948824", "0.6784397", "0.67615515", "0.67409915", "0.67409915", "0.67409915", "0.67323726", "0.6728746", "0.6728441", "0.6727615", "0.66721475", "0.666987", "0.66554683", "0.6650624", "0.66506034", "0.659721", "0.6593671", "0.6588123", "0.6578961", "0.6569658", "0.65568393", "0.65568393", "0.65568393", "0.65568393", "0.65568393", "0.65366125", "0.6531973", "0.65310323", "0.6525651", "0.65246284", "0.65195113", "0.6512692", "0.64732796", "0.647254", "0.6462355", "0.64242405", "0.64242405", "0.64242405", "0.64242405", "0.64242405", "0.64242405", "0.64242405", "0.64242405", "0.6422998", "0.6369103", "0.6304846", "0.6250863", "0.6250863", "0.62449324", "0.6244077", "0.62193584", "0.6205013", "0.6200891", "0.61909056", "0.6189972", "0.61738056", "0.6153702", "0.61136216", "0.60702395", "0.6059942", "0.6014051", "0.6011109", "0.5987813", "0.598776", "0.598516", "0.59811693", "0.59741807", "0.59519", "0.5949076", "0.59489733", "0.59460557", "0.591124", "0.58813643", "0.5880957", "0.58705956" ]
0.8626241
1
unchangingAxis returns what axis and the value of that axis. It assumes that exactly one axis is changing.
func (v segment) unchangingAxis() (val int, xAxis bool) { if v.from.x == v.to.x { return v.from.x, true } return v.from.y, false }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (i *InputHandler) GetAxis(negativeKey, positiveKey Key) float32 {\n\tsum := float32(0)\n\tif i.GetKey(negativeKey) {\n\t\tsum -= 1.0\n\t}\n\tif i.GetKey(positiveKey) {\n\t\tsum += 1.0\n\t}\n\treturn sum\n}", "func (js *joystickState) getAxis(joystick Joystick, axis int) float64 {\n\t// Check that the joystick and axis is valid, return 0 by default.\n\tif js.axis[joystick] == nil || axis >= len(js.axis[joystick]) || axis < 0 {\n\t\treturn 0\n\t}\n\treturn float64(js.axis[joystick][axis])\n}", "func (js *joystickState) getAxis(joystick Joystick, axis int) float64 {\n\t// Check that the joystick and axis is valid, return 0 by default.\n\tif js.axis[joystick] == nil || axis >= len(js.axis[joystick]) || axis < 0 {\n\t\treturn 0\n\t}\n\treturn float64(js.axis[joystick][axis])\n}", "func (m *WorkbookChartAxes) GetValueAxis()(WorkbookChartAxisable) {\n return m.valueAxis\n}", "func UnpackAxis(value int64) UnpackAttr {\n\treturn func(m optionalAttr) {\n\t\tm[\"axis\"] = value\n\t}\n}", "func (i *InputHandler) GetAxis2D(negHorizontalKey, posHorizontalKey, negVerticalKey, posVerticalKey Key) Vector2 {\n\taxis := Vector2{i.GetAxis(negHorizontalKey, posHorizontalKey), i.GetAxis(negVerticalKey, posVerticalKey)}\n\treturn axis.Normalize()\n}", "func (gdt *Vector3) GetAxis(axis Vector3Axis) Real {\n\targ0 := gdt.getBase()\n\targ1 := axis.getBase()\n\n\tret := C.go_godot_vector3_get_axis(GDNative.api, arg0, arg1)\n\n\treturn Real(ret)\n}", "func (m *WorkbookChartAxes) GetCategoryAxis()(WorkbookChartAxisable) {\n return m.categoryAxis\n}", "func (self *SinglePad) Axis(axisCode int) int{\n return self.Object.Call(\"axis\", axisCode).Int()\n}", "func axisMax(val int) (int, int) {\n\tif val < 10 {\n\t\treturn 10, 1\n\t}\n\n\t// If val is less than 100, return val rounded up to the next 10\n\tif val < 100 {\n\t\tx := val % 10\n\t\treturn val + 10 - x, 10\n\t}\n\n\t// If val is less than 500, return val rounded up to the next 50\n\tif val < 500 {\n\t\tx := val % 50\n\t\treturn val + 50 - x, 50\n\t}\n\treturn 1000, 100\n}", "func PackAxis(value int64) PackAttr {\n\treturn func(m optionalAttr) {\n\t\tm[\"axis\"] = value\n\t}\n}", "func (m *WorkbookChartAxes) GetSeriesAxis()(WorkbookChartAxisable) {\n return m.seriesAxis\n}", "func (b *BoundingBox) LongestAxis() (axis int) {\n\n\tif b.Dim(0) < b.Dim(1) {\n\t\tif b.Dim(1) < b.Dim(2) {\n\t\t\taxis = 2\n\t\t} else {\n\t\t\taxis = 1\n\t\t}\n\t} else {\n\t\tif b.Dim(0) < b.Dim(2) {\n\t\t\taxis = 2\n\t\t} else {\n\t\t\taxis = 0\n\t\t}\n\t}\n\treturn\n}", "func OneHotAxis(value int64) OneHotAttr {\n\treturn func(m optionalAttr) {\n\t\tm[\"axis\"] = value\n\t}\n}", "func (w *Window) JoystickAxis(js Joystick, axis int) float64 {\n\treturn w.currJoy.getAxis(js, axis)\n}", "func (t *Dense) RollAxis(axis, start int, safe bool) (retVal *Dense, err error) {\n\tdims := t.Dims()\n\n\tif !(axis >= 0 && axis < dims) {\n\t\terr = errors.Errorf(invalidAxis, axis, dims)\n\t\treturn\n\t}\n\n\tif !(start >= 0 && start <= dims) {\n\t\terr = errors.Wrap(errors.Errorf(invalidAxis, axis, dims), \"Start axis is wrong\")\n\t\treturn\n\t}\n\n\tif axis < start {\n\t\tstart--\n\t}\n\n\tif axis == start {\n\t\tretVal = t\n\t\treturn\n\t}\n\n\taxes := BorrowInts(dims)\n\tdefer ReturnInts(axes)\n\n\tfor i := 0; i < dims; i++ {\n\t\taxes[i] = i\n\t}\n\tcopy(axes[axis:], axes[axis+1:])\n\tcopy(axes[start+1:], axes[start:])\n\taxes[start] = axis\n\n\tif safe {\n\t\treturn t.SafeT(axes...)\n\t}\n\terr = t.T(axes...)\n\tretVal = t\n\treturn\n}", "func (q Quat) Axis() Vec3f {\n\treturn Vec3f{q.X, q.Y, q.Z}\n}", "func DequantizeAxis(value int64) DequantizeAttr {\n\treturn func(m optionalAttr) {\n\t\tm[\"axis\"] = value\n\t}\n}", "func (self *SinglePad) ProcessAxisChange(axisState interface{}) {\n self.Object.Call(\"processAxisChange\", axisState)\n}", "func (this *BoundingBox) LongestAxis() int {\n\tid, max := 0, 0.0\n\n\tfor i := range this.Min {\n\t\tl := this.AxisLength(i)\n\t\tif l > max {\n\t\t\tmax = l\n\t\t\tid = i\n\t\t}\n\t}\n\n\treturn id\n}", "func (w *Window) JoystickAxis(js Joystick, axis GamepadAxis) float64 {\n\treturn w.currJoy.getAxis(js, int(axis))\n}", "func (m *WorkbookChartAxes) SetValueAxis(value WorkbookChartAxisable)() {\n m.valueAxis = value\n}", "func (crs WebMercator) MajorAxis() float64 {\n\treturn spheroid(crs.GeodeticDatum).MajorAxis()\n}", "func (crs XYZ) MajorAxis() float64 {\n\treturn spheroid(crs.GeodeticDatum).MajorAxis()\n}", "func (graph *Graph) GetDimension() (int, int) {\n\treturn graph.dimX, graph.dimY // contains start point so -1\n}", "func cleanAxis(axis *[]int) *[]int {\n\n\tif len(*axis) < 2 {\n\t\treturn axis\n\t}\n\tlength := len(*axis) - 1\n\tfor i := 0; i < length; i++ {\n\t\tfor j := i + 1; j <= length; j++ {\n\t\t\tif (*axis)[i] == (*axis)[j] {\n\t\t\t\tif j == length {\n\t\t\t\t\t*axis = (*axis)[:j]\n\t\t\t\t} else {\n\t\t\t\t\t*axis = append((*axis)[:j], (*axis)[j+1:]...)\n\t\t\t\t}\n\t\t\t\tlength--\n\t\t\t\tj--\n\t\t\t}\n\t\t}\n\t}\n\treturn axis\n}", "func (joint B2WheelJoint) GetLocalAxisA() B2Vec2 {\n\treturn joint.M_localXAxisA\n}", "func (v *Vector) setAxisValue(axis []bool, val int) error {\n\tif len(axis) < 3 {\n\t\treturn fmt.Errorf(\"|axis| != 3\")\n\t}\n\tif axis[X] {\n\t\tv.X = val\n\t}\n\n\tif axis[Y] {\n\t\tv.Y = val\n\t}\n\n\tif axis[Z] {\n\t\tv.Z = val\n\t}\n\t\n\treturn nil\n}", "func (s *Simplex) getColumn(pivotColumn int) []float64 {\n\tvar columnValues []float64\n\tfor i := 0; i < s.RowsSize; i++ {\n\t\tcolumnValues = append(columnValues, s.Tableau[i][pivotColumn])\n\t}\n\treturn columnValues\n}", "func (crs TransverseMercator) MajorAxis() float64 {\n\treturn spheroid(crs.GeodeticDatum).MajorAxis()\n}", "func (self *SinglePad) AxisI(args ...interface{}) int{\n return self.Object.Call(\"axis\", args).Int()\n}", "func getAbsAxis(v Vector) []bool {\n\taxes := make([]bool, 3)\n\t\n\taxes[X] = v.X == 0\n\taxes[Y] = v.Y == 0\n\taxes[Z] = v.Z == 0\n\t\n\treturn axes\n}", "func (o *UserSessionQueryTileConfiguration) GetHasAxisBucketing() bool {\n\tif o == nil || o.HasAxisBucketing == nil {\n\t\tvar ret bool\n\t\treturn ret\n\t}\n\treturn *o.HasAxisBucketing\n}", "func (a *AxisPosition) normalize(gtx layout.Context, axis layout.Axis, elements int, dimensioner Dimensioner) {\n\tif a.First < 0 {\n\t\ta.First = 0\n\t}\n\tif a.First > elements {\n\t\ta.First = elements - 1\n\t}\n\n\tconstraint := axis.Convert(gtx.Constraints.Max).X\n\tfor a.Offset < 0 && a.First > 0 {\n\t\ta.First--\n\t\tdim := dimensioner(axis, a.First, constraint)\n\t\ta.Offset += dim\n\t\ta.OffsetAbs += dim\n\t}\n\tif a.Offset < 0 {\n\t\ta.Offset = 0\n\t}\n\tfor a.Offset > dimensioner(axis, a.First, constraint) && a.First < elements-1 {\n\t\tdim := dimensioner(axis, a.First, constraint)\n\t\ta.First++\n\t\ta.Offset -= dim\n\t\ta.OffsetAbs += dim\n\t}\n}", "func (gdt *Vector3) SetAxis(axis Vector3Axis, val Real) {\n\targ0 := gdt.getBase()\n\targ1 := axis.getBase()\n\targ2 := val.getBase()\n\n\tC.go_godot_vector3_set_axis(GDNative.api, arg0, arg1, arg2)\n}", "func (crs LambertConformalConic2SP) MajorAxis() float64 {\n\treturn spheroid(crs.GeodeticDatum).MajorAxis()\n}", "func UniteAxisRanges(axises []*plot.Axis) {\n\tmin := math.MaxFloat64\n\tmax := -math.MaxFloat64\n\n\tfor _, axis := range axises {\n\t\tmin = math.Min(axis.Min, min)\n\t\tmax = math.Max(axis.Max, max)\n\t}\n\n\tfor _, axis := range axises {\n\t\taxis.Min = min\n\t\taxis.Max = max\n\t}\n\n\treturn\n}", "func (crs Projection) MajorAxis() float64 {\n\treturn spheroid(crs.GeodeticDatum).MajorAxis()\n}", "func (m *TuiModel) GetColumn() int {\n\treturn m.mouseEvent.X / m.CellWidth()\n}", "func (sp *Space) GetXY() (float64, float64) {\n\n\tif len(*sp) > 0 {\n\t\treturn (*sp)[0].GetXY()\n\t}\n\treturn 0, 0\n\n}", "func (j *Joystick) AxisName(stick, axis int) (string, error) {\n\tname_ := C.al_get_joystick_axis_name((*C.ALLEGRO_JOYSTICK)(j), C.int(stick), C.int(axis))\n\tif name_ == nil {\n\t\treturn \"\", fmt.Errorf(\"axis '%d' not found on joystick '%s'\", stick, j.Name())\n\t}\n\treturn C.GoString(name_), nil\n}", "func matchPositionInAxis(axis int, draw bool, from *Node, to *Node) {\n\tnodeName := from.Id\n\tfromX := from.CurrLoc.X\n\tfromY := from.CurrLoc.Y\n\ttoX := to.CurrLoc.X\n\ttoY := to.CurrLoc.Y\n\n\tnodeTrail := \"\"\n\tif draw {\n\t\tnodeTrail = \"t\" + nodeName[len(nodeName)-1:]\n\t}\n\n\tnodePlayer := getPlayerState(from.Id)\n\n\tif axis == AXIS_X { // Match X axis.\n\t\ti := fromX\n\t\tincrement := -1\n\t\tif toX > fromX {\n\t\t\tincrement = 1\n\t\t}\n\t\tfor i != toX {\n\t\t\tboard[fromY][i] = nodeTrail\n\t\t\ti = increment + i\n\t\t}\n\t\tboard[fromY][i] = nodePlayer\n\t\tfrom.CurrLoc.X = toX\n\t} else { // Match Y axis.\n\t\ti := fromY\n\t\tincrement := -1\n\t\tif toY > fromY {\n\t\t\tincrement = 1\n\t\t}\n\t\tfor i != toY {\n\t\t\tboard[fromY][i] = nodeTrail\n\t\t\ti = increment + i\n\t\t}\n\t\tboard[i][fromX] = nodePlayer\n\t\tfrom.CurrLoc.Y = toY\n\t}\n}", "func (this *BoundingBox) AxisLength(i int) float64 {\n\tif i < 0 || i > len(this.Min)-1 {\n\t\treturn 0\n\t}\n\treturn math.Abs(this.Min[i] - this.Max[i])\n}", "func (b *BarChart) DataRange() (xmin, xmax, ymin, ymax float64) {\n\n// fmt.Println (\"DataRange start \" )\n\n\n\tcatMin := b.XMin\n\tcatMax := catMin + float64(len(b.Values)-1)\n\n\tvalMin := math.Inf(1)\n\tvalMax := math.Inf(-1)\n\tfor i, val := range b.Values {\n\t\tvalBot := b.stackedOn.BarHeight(i)\n\t\tvalTop := valBot + val\n\t\tvalMin = math.Min(valMin, math.Min(valBot, valTop))\n\t\tvalMax = math.Max(valMax, math.Max(valBot, valTop))\n\t}\n\tif !b.Horizontal { // case normal\n\t\treturn catMin, catMax, valMin, valMax\n\t}\n//\tfmt.Println (\"DataRange valMin\" ,valMin)\n// fmt.Println (\"DataRange valMax\" ,valMax)\n// fmt.Println (\"DataRange catMin\" ,catMin)\n// fmt.Println (\"DataRange catMax\" ,catMax)\n\n// fmt.Println (\"DataRange end \" )\n\n\treturn valMin, valMax, catMin, catMax\n}", "func (crs AlbersEqualAreaConic) MajorAxis() float64 {\n\treturn spheroid(crs.GeodeticDatum).MajorAxis()\n}", "func (*XyChart_Axis) Descriptor() ([]byte, []int) {\n\treturn file_google_monitoring_dashboard_v1_xychart_proto_rawDescGZIP(), []int{0, 1}\n}", "func (c *Switch) GetDims() (int, int) {\n\tc.lock.RLock()\n\tw, h := c.subRenderables[c.curRenderable].GetDims()\n\tc.lock.RUnlock()\n\treturn w, h\n}", "func (self *SinglePad) OnAxisCallback() interface{}{\n return self.Object.Get(\"onAxisCallback\")\n}", "func (c Coordinate) Col() int { return c.col }", "func validateAxisTimeChart(v interface{}, k string) (we []string, errors []error) {\n\tvalue := v.(string)\n\tif value != \"right\" && value != \"left\" {\n\t\terrors = append(errors, fmt.Errorf(\"%s not allowed; must be either right or left\", value))\n\t}\n\treturn\n}", "func getOrientation(p Point, q Point, r Point) int {\n\tvar orientation = (q.y-p.y)*(r.x-q.x) - (q.x-p.x)*(r.y-q.y)\n\n\tif orientation == 0 {\n\t\treturn 0\n\t} else if orientation > 0 {\n\t\treturn 1\n\t} else {\n\t\treturn 2\n\t}\n}", "func (p Point) Col() int {\n\treturn p.col\n}", "func (m *WorkbookChartAxes) SetSeriesAxis(value WorkbookChartAxisable)() {\n m.seriesAxis = value\n}", "func (rg Range) Column(x int) Range {\n\tif rg.Min.Shift(x, 0).In(rg) {\n\t\trg.Min.X = rg.Min.X + x\n\t\trg.Max.X = rg.Min.X + 1\n\t} else {\n\t\trg = Range{}\n\t}\n\treturn rg\n}", "func (lc *LineChart) axesDetails(cvs *canvas.Canvas) (*axes.XDetails, *axes.YDetails, error) {\n\treqXHeight := axes.RequiredHeight(lc.maxXValue(), lc.xLabels, lc.opts.xLabelOrientation)\n\typ := &axes.YProperties{\n\t\tMin: lc.yMin,\n\t\tMax: lc.yMax,\n\t\tReqXHeight: reqXHeight,\n\t\tScaleMode: lc.opts.yAxisMode,\n\t}\n\tyd, err := axes.NewYDetails(cvs.Area(), yp)\n\tif err != nil {\n\t\treturn nil, nil, fmt.Errorf(\"NewYDetails => %v\", err)\n\t}\n\n\tconst xMin = 0\n\txMax := lc.maxXValue()\n\txd, err := lc.xDetails(cvs, yd.Start.X, xMin, xMax)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\treturn xd, yd, nil\n}", "func UniformDequantizeQuantizationAxis(value int64) UniformDequantizeAttr {\n\treturn func(m optionalAttr) {\n\t\tm[\"quantization_axis\"] = value\n\t}\n}", "func (bbs BollingerBandsSeries) GetYAxis() YAxisType {\n\treturn bbs.YAxis\n}", "func (cs ContinuousSeries) GetYAxis() YAxisType {\n\treturn cs.YAxis\n}", "func (t *Edge) XY() (x, y int) {\n\treturn t.x, t.y\n}", "func (gdt *Vector3) MaxAxis() Int {\n\targ0 := gdt.getBase()\n\n\tret := C.go_godot_vector3_max_axis(GDNative.api, arg0)\n\n\treturn Int(ret)\n}", "func (sheet *SheetData) GetColumn(col int) []interface{} {\n\t//We need to transpose the data\n\tcolData := make([]interface{}, 0)\n\n\t//March over each row\n\tfor r := range sheet.Values {\n\t\t//If it has the column add it\n\t\tif len(sheet.Values[r]) > col+1 {\n\t\t\tcolData = append(colData, sheet.Values[r][col])\n\t\t}\n\n\t}\n\n\treturn colData\n}", "func (m *WorkbookChartAxes) SetCategoryAxis(value WorkbookChartAxisable)() {\n m.categoryAxis = value\n}", "func (self *SinglePad) ProcessAxisChangeI(args ...interface{}) {\n self.Object.Call(\"processAxisChange\", args)\n}", "func (gdt *Vector3) MinAxis() Int {\n\targ0 := gdt.getBase()\n\n\tret := C.go_godot_vector3_min_axis(GDNative.api, arg0)\n\n\treturn Int(ret)\n}", "func (b *BoundingBox) Dim(axis int) float32 {\n\treturn b.Bounds[1][axis] - b.Bounds[0][axis]\n}", "func getMiddle(x int, y int) int {\n\tif x > y {\n\t\treturn ((x - y) / 2) + y\n\t} else {\n\t\treturn ((y - x) / 2) + x\n\t}\n}", "func (r paintingRobot) getGridInfo() (int, int, point) {\n xMin, xMax, yMin, yMax := 0, 0, 0, 0\n for _, p := range r.paintedPoints {\n if p.x > xMax {\n xMax = p.x\n }\n if p.x < xMin {\n xMin = p.x\n }\n if p.y > yMax {\n yMax = p.y\n }\n if p.y < yMin {\n yMin = p.y\n }\n }\n\n return int(math.Abs(float64(xMin))) + xMax + 1, int(math.Abs(float64(yMin))) + yMax + 1,\n point{\n x: int(math.Abs(float64(xMin))),\n y: int(math.Abs(float64(yMin))),\n }\n}", "func (n *node) chooseSplitAxis() int { //TODO Make the code prettier\n\t//[CSA 1]\n\t//Entries sorted by Latitude\n\tS_lat := 0.000000 //used to determine the best axis to split on\n\tbestK_lat := 0 //used to determine the best distribution\n\tminOverlap_lat := -1.000000\n\tbest_area_lat := -1.000000\n\tsortByLat := make([]entry, len(n.entries)) // len(sortByLat) == len(n.entries) is needed for copy to work\n\tcopy(sortByLat, n.entries)\n\tsort.Sort(byLat(sortByLat))\n\n\t//Entries sorted by Longitude\n\tS_long := 0.000000 //used to determine the best axis to split on\n\tbestK_long := 0 //used to determine the best distribution\n\tminOverlap_long := -1.000000\n\tbest_area_long := -1.000000\n\tsort.Sort(byLong(n.entries))\n\n\t//For each axis: M - 2m + 2 distributions of the M+1 entries into two groups are determined\n\td := (RTree_M - (2 * RTree_m) + 2)\n\tfor k := 1; k <= d; k++ {\n\t\t//By Latitude\n\t\tLatGroup1 := make([]entry, (RTree_m - 1 + k))\n\t\tLatGroup2 := make([]entry, (RTree_M - len(LatGroup1) + 1))\n\t\tcopy(LatGroup1, sortByLat[:RTree_m-1+k])\n\t\tcopy(LatGroup2, sortByLat[RTree_m-1+k:])\n\t\tlatGoodness := marginOf(LatGroup1) + marginOf(LatGroup2)\n\t\tS_lat += latGoodness\n\t\t// test if this distribution has the best overlap value for latitude\n\t\tmbr1 := mbrOf(LatGroup1...)\n\t\tmbr2 := mbrOf(LatGroup2...)\n\t\tif o := mbr1.OverlapWith(mbr2); o <= minOverlap_lat || minOverlap_lat == -1 {\n\t\t\tif o < minOverlap_lat || minOverlap_lat == -1 {\n\t\t\t\tbestK_lat = k //we have a new best\n\t\t\t\tminOverlap_lat = o\n\t\t\t\tbest_area_lat = mbr1.Area() + mbr2.Area()\n\t\t\t} else { //tie -> keep the distribution with the least area\n\t\t\t\ta_now := mbr1.Area() + mbr2.Area()\n\t\t\t\tif a_now < best_area_lat {\n\t\t\t\t\tbestK_lat = k //we have a new best\n\t\t\t\t\tminOverlap_lat = o\n\t\t\t\t\tbest_area_lat = mbr1.Area() + mbr2.Area()\n\t\t\t\t}\n\t\t\t}\n\t\t} //else don't change the value\n\n\t\t//By Longitude\n\t\tLongGroup1 := make([]entry, (RTree_m - 1 + k))\n\t\tLongGroup2 := make([]entry, (RTree_M - len(LongGroup1) + 1))\n\t\tcopy(LongGroup1, n.entries[:RTree_m-1+k])\n\t\tcopy(LongGroup2, n.entries[RTree_m-1+k:])\n\t\tlongGoodness := marginOf(LongGroup1) + marginOf(LongGroup2)\n\t\tS_long += longGoodness\n\t\t// test if this distribution has the best overlap value for longitude\n\t\tmbr1 = mbrOf(LongGroup1...)\n\t\tmbr2 = mbrOf(LongGroup2...)\n\t\tif o := mbr1.OverlapWith(mbr2); o <= minOverlap_long || minOverlap_long == -1 {\n\t\t\tif o < minOverlap_long || minOverlap_long == -1 {\n\t\t\t\tbestK_long = k //we have a new best\n\t\t\t\tminOverlap_long = o\n\t\t\t\tbest_area_long = mbr1.Area() + mbr2.Area()\n\t\t\t} else { //tie -> keep the distribution with the least area\n\t\t\t\ta_now := mbr1.Area() + mbr2.Area()\n\t\t\t\tif a_now < best_area_long {\n\t\t\t\t\tbestK_long = k //we have a new best\n\t\t\t\t\tminOverlap_long = o\n\t\t\t\t\tbest_area_long = mbr1.Area() + mbr2.Area()\n\t\t\t\t}\n\t\t\t}\n\t\t} //else don't change the value\n\t}\n\t//CSA2: Choose the axis with the minimum S as split axis\n\tif S_lat < S_long {\n\t\tn.entries = sortByLat\n\t\treturn bestK_lat\n\t}\n\treturn bestK_long\n}", "func (q Quat) AxisRotation() (Vec3f, float32) {\n\t// Based on: http://glmatrix.net/docs/module-quat.html\n\trad := q.Angle()\n\ts := math32.Sin(rad * 0.5)\n\tif s < Epsilon { // no rotation\n\t\treturn Vec3f{1, 0, 0}, rad\n\t}\n\treturn Vec3f{q.X / s, q.Y / s, q.Z / s}, rad\n}", "func (board *Board) getPreviousXY(x, y int) (x_, y_ int, err error) {\n\tx_ = x - 1\n\ty_ = y\n\tif x_ < 0 {\n\t\tx_ = board.dim_x - 1\n\t\ty_ -= 1\n\t}\n\tif y_ < 0 {\n\t\treturn -1, -1, errors.New(\"Already at first cell\")\n\t} else {\n\t\tif board.cells[y_][x_].fixed {\n\t\t\treturn board.getPreviousXY(x_, y_)\n\t\t}\n\t\treturn x_, y_, nil\n\t}\n}", "func (c *Compound) GetDims() (int, int) {\n\tc.lock.RLock()\n\tw, h := c.subRenderables[c.curRenderable].GetDims()\n\tc.lock.RUnlock()\n\treturn w, h\n}", "func column(k int) int {\n\tswitch c := k / 4; {\n\tcase row(k) == 0:\n\t\treturn c - 1\n\tcase row(k) == 3 && isLeftHand(k):\n\t\treturn c + 1\n\tdefault:\n\t\treturn c\n\t}\n}", "func (b *box) getFieldValue(x, y int) int {\n\treturn b.values[x+y*3]\n}", "func (b *TicTacToeBoard) leftUpperCoordinate(rowIndex, columnIndex int) (int, int) {\n\tswitch {\n\tcase rowIndex == 0 || columnIndex == 0:\n\t\treturn rowIndex, columnIndex\n\n\tcase rowIndex == 1 || columnIndex == 1:\n\t\treturn rowIndex - 1, columnIndex - 1\n\n\tcase rowIndex == 2 || columnIndex == 2:\n\t\treturn rowIndex - 2, columnIndex - 2\n\n\tdefault: //case rowIndex >= 3 || columnIndex >= 3:\n\t\treturn rowIndex - 3, columnIndex - 3\n\t}\n}", "func (bbs *BollingerBandsSeries) GetBoundedLastValues() (x, y1, y2 float64) {\n\tif bbs.InnerSeries == nil {\n\t\treturn\n\t}\n\tperiod := bbs.GetPeriod()\n\tseriesLength := bbs.InnerSeries.Len()\n\tstartAt := seriesLength - period\n\tif startAt < 0 {\n\t\tstartAt = 0\n\t}\n\n\tvb := seq.NewBufferWithCapacity(period)\n\tfor index := startAt; index < seriesLength; index++ {\n\t\txn, yn := bbs.InnerSeries.GetValues(index)\n\t\tvb.Enqueue(yn)\n\t\tx = xn\n\t}\n\n\tay := seq.Seq{Provider: vb}.Average()\n\tstd := seq.Seq{Provider: vb}.StdDev()\n\n\ty1 = ay + (bbs.GetK() * std)\n\ty2 = ay - (bbs.GetK() * std)\n\n\treturn\n}", "func (m *TransposableMatrix) Get(x, y int) Slot {\n\txm, ym := m.transposeMapped2Base(x, y)\n\tif xm == -1 || ym == -1 {\n\t\treturn Slot{Label: \"~~\"}\n\t}\n\treturn m.m[xm][ym]\n}", "func (crs LonLat) MajorAxis() float64 {\n\treturn spheroid(crs.GeodeticDatum).MajorAxis()\n}", "func (me TGlyphOrientationVerticalValueType) String() string { return xsdt.String(me).String() }", "func (XyChart_Axis_Scale) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_monitoring_dashboard_v1_xychart_proto_rawDescGZIP(), []int{0, 1, 0}\n}", "func (gd *Definition) PointToRowCol(x, y float64) (row, col int) {\n\tvar wg sync.WaitGroup\n\twg.Add(2)\n\n\tif gd.Rotation != 0. {\n\t\tlog.Fatalf(\" Definition.PointToRowCol todo\")\n\t}\n\n\trow = -1\n\tcol = -1\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tif y > gd.Norig {\n\t\t\treturn\n\t\t}\n\t\tfor {\n\t\t\trow++\n\t\t\tif row > gd.Nrow {\n\t\t\t\trow = -1 // gd.Nrow +1\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif gd.Norig-float64(row+1)*gd.Cwidth <= y {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}()\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tif x < gd.Eorig {\n\t\t\treturn\n\t\t}\n\t\tfor {\n\t\t\tcol++\n\t\t\tif col > gd.Ncol {\n\t\t\t\tcol = -1 // gd.Ncol +1\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif gd.Eorig+float64(col+1)*gd.Cwidth >= x {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n\n\twg.Wait()\n\treturn\n}", "func ReadDimension(valueStart, nullStart unsafe.Pointer,\n\tindex int, dataType memCom.DataType, enumReverseDict []string, meta *TimeDimensionMeta, cache map[TimeDimensionMeta]map[int64]string) *string {\n\tisTimeDimension := meta != nil\n\t// check for nulls\n\tif *(*uint8)(memAccess(nullStart, index)) == 0 {\n\t\treturn nil\n\t}\n\n\t// determine value width in bytes\n\tvalueBytes := memCom.DataTypeBytes(dataType)\n\tvaluePtr := memAccess(valueStart, valueBytes*index)\n\n\t// read intValue; handle float and signed types\n\tvar intValue int64\n\tvar result string\n\tswitch dataType {\n\tcase memCom.Float32:\n\t\t// in case time dimension value was converted to float for division\n\t\tif isTimeDimension {\n\t\t\tintValue = int64(*(*float32)(valuePtr))\n\t\t} else {\n\t\t\tresult = strconv.FormatFloat(float64(*(*float32)(valuePtr)), 'g', -1, 32)\n\t\t\treturn &result\n\t\t}\n\tcase memCom.Int64, memCom.Int32, memCom.Int16, memCom.Int8, memCom.Bool:\n\t\tswitch valueBytes {\n\t\tcase 8:\n\t\t\tintValue = int64(*(*int64)(valuePtr))\n\t\tcase 4:\n\t\t\tintValue = int64(*(*int32)(valuePtr))\n\t\tcase 2:\n\t\t\tintValue = int64(*(*int16)(valuePtr))\n\t\tcase 1:\n\t\t\tintValue = int64(*(*int8)(valuePtr))\n\t\t}\n\t\tresult = strconv.FormatInt(intValue, 10)\n\t\treturn &result\n\tcase memCom.Uint32, memCom.Uint16, memCom.BigEnum, memCom.Uint8, memCom.SmallEnum:\n\t\tswitch valueBytes {\n\t\tcase 4:\n\t\t\tintValue = int64(*(*uint32)(valuePtr))\n\t\tcase 2:\n\t\t\tintValue = int64(*(*uint16)(valuePtr))\n\t\tcase 1:\n\t\t\tintValue = int64(*(*uint8)(valuePtr))\n\t\t}\n\tcase memCom.UUID:\n\t\treturn formatWithDataValue(valuePtr, memCom.UUID)\n\tcase memCom.GeoPoint:\n\t\treturn formatWithDataValue(valuePtr, memCom.GeoPoint)\n\tdefault:\n\t\t// Should never happen.\n\t\treturn nil\n\t}\n\n\t// translate enum case back to string for unsigned types\n\tif intValue >= 0 && intValue < int64(len(enumReverseDict)) {\n\t\tresult = enumReverseDict[int(intValue)]\n\t} else if isTimeDimension {\n\t\tresult = formatTimeDimension(intValue, *meta, cache)\n\t} else {\n\t\tresult = strconv.FormatInt(intValue, 10)\n\t}\n\n\treturn &result\n}", "func Orientation(yPositiveDown bool, pts ...[2]float64) Winding {\n\tmul := int8(1)\n\tif yPositiveDown {\n\t\tmul = -1\n\t}\n\tswitch mul * Orient(pts...) {\n\tcase 0:\n\t\treturn Colinear\n\tcase 1:\n\t\treturn Clockwise\n\tdefault: // -1\n\t\treturn CounterClockwise\n\t}\n}", "func (g *Group) getBounds() (*algebra.Vector, *algebra.Vector) {\n\tvar min *algebra.Vector = nil\n\tvar max *algebra.Vector = nil\n\tif len(g.shapes) == 0 {\n\t\treturn min, max\n\t}\n\tminX := math.Inf(1)\n\tminY := math.Inf(1)\n\tminZ := math.Inf(1)\n\tmaxX := math.Inf(-1)\n\tmaxY := math.Inf(-1)\n\tmaxZ := math.Inf(-1)\n\n\tfor _, shape := range g.shapes {\n\t\ttempMin, tempMax := shape.GetBounds()\n\t\tif tempMin != nil {\n\t\t\tb := GetBoundsTransform(tempMin, tempMax, shape.GetTransform())\n\t\t\ttempMin = b.minimum\n\t\t\ttempMax = b.maximum\n\t\t\ttempMinX := tempMin.Get()[0]\n\t\t\ttempMinY := tempMin.Get()[1]\n\t\t\ttempMinZ := tempMin.Get()[2]\n\t\t\ttempMaxX := tempMax.Get()[0]\n\t\t\ttempMaxY := tempMax.Get()[1]\n\t\t\ttempMaxZ := tempMax.Get()[2]\n\t\t\tminX = math.Min(tempMinX, minX)\n\t\t\tminY = math.Min(tempMinY, minY)\n\t\t\tminZ = math.Min(tempMinZ, minZ)\n\t\t\tmaxX = math.Max(tempMaxX, maxX)\n\t\t\tmaxY = math.Max(tempMaxY, maxY)\n\t\t\tmaxZ = math.Max(tempMaxZ, maxZ)\n\t\t}\n\t}\n\tmin = algebra.NewPoint(minX, minY, minZ)\n\tmax = algebra.NewPoint(maxX, maxY, maxZ)\n\treturn min, max\n}", "func UniformQuantizedDotOutputQuantizationAxis(value int64) UniformQuantizedDotAttr {\n\treturn func(m optionalAttr) {\n\t\tm[\"output_quantization_axis\"] = value\n\t}\n}", "func (me TGlyphOrientationHorizontalValueType) String() string { return xsdt.String(me).String() }", "func (f *field) Column() int {\r\n\treturn f.x\r\n}", "func (bbs *BollingerBandsSeries) GetBoundedValues(index int) (x, y1, y2 float64) {\n\tif bbs.InnerSeries == nil {\n\t\treturn\n\t}\n\tif bbs.valueBuffer == nil || index == 0 {\n\t\tbbs.valueBuffer = seq.NewBufferWithCapacity(bbs.GetPeriod())\n\t}\n\tif bbs.valueBuffer.Len() >= bbs.GetPeriod() {\n\t\tbbs.valueBuffer.Dequeue()\n\t}\n\tpx, py := bbs.InnerSeries.GetValues(index)\n\tbbs.valueBuffer.Enqueue(py)\n\tx = px\n\n\tay := seq.New(bbs.valueBuffer).Average()\n\tstd := seq.New(bbs.valueBuffer).StdDev()\n\n\ty1 = ay + (bbs.GetK() * std)\n\ty2 = ay - (bbs.GetK() * std)\n\treturn\n}", "func getFinalPosition(commands []command) (int, int) {\n\thorizontal, depth := 0, 0\n\tfor _, cmd := range commands {\n\t\tswitch cmd.direction {\n\t\tcase \"forward\":\n\t\t\thorizontal += cmd.unit\n\t\tcase \"down\":\n\t\t\tdepth += cmd.unit\n\t\tcase \"up\":\n\t\t\tdepth -= cmd.unit\n\t\t}\n\t}\n\treturn horizontal, depth\n}", "func (e SeqDataAxis) C() C.cudnnSeqDataAxis_t { return C.cudnnSeqDataAxis_t(e) }", "func Adjustment(i, j int) int {\n\tif i > j {\n\t\treturn -1\n\t} else if i < j {\n\t\treturn 1\n\t}\n\treturn 0\n}", "func EncodeAxis(axis int) byte {\n\tvar symbol byte\n\tswitch {\n\tcase axis >= 0 && axis < alphabetLength:\n\t\tsymbol = byte(axis) + 'a'\n\tcase axis >= alphabetLength && axis < 2*alphabetLength:\n\t\tsymbol = byte(axis) - alphabetLength + 'A'\n\tdefault:\n\t\tpanic(\"sgf.EncodeAxis: axis out of ranges\")\n\t}\n\n\treturn symbol\n}", "func (l *LineChart) Xaxis() []float64 {\n\treturn l.xaxis\n}", "func UniformQuantizeQuantizationAxis(value int64) UniformQuantizeAttr {\n\treturn func(m optionalAttr) {\n\t\tm[\"quantization_axis\"] = value\n\t}\n}", "func (m *WorkbookRangeBorder) GetSideIndex()(*string) {\n val, err := m.GetBackingStore().Get(\"sideIndex\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*string)\n }\n return nil\n}", "func (s *Simplex) GetRanges() {\n\t/* b = S∗∆b + b∗ ≥ 0\n\tMEANING: S* : Slacks/artificial variables columns of final tableau\n\t\t\t b* : RH of final tableau\n\t*/\n\tif s.Status != \"OPTIMAL\" {\n\t\treturn\n\t}\n\tnonBasicVariables := s.getSlackVariables()\n\n\tvar S [][]float64\n\tfor _, nbv := range nonBasicVariables {\n\t\tS = append(S, s.getColumn(nbv)[1:])\n\t}\n\n\tvar bAsterisk [][]float64\n\tbAsterisk = append(bAsterisk, s.getColumn(s.ColumnsSize - 1)[1:]) //ignoring Z row\n\tbAsterisk = transpose(bAsterisk) //get column returns an array 1xm, transpose will return a matrix mx1\n\n\tvar deltas [][]float64\n\tvar operators [][]string\n\tfor j := 0; j < len(S); j++ {\n\t\tvar deltaB []float64\n\t\tvar deltaBOperator []string\n\t\tfor i := 0; i < len(S[0]); i++ {\n\n\t\t\tvalue := -bAsterisk[i][0] * 1 / S[j][i]\n\t\t\tif S[j][i] < 0.0 {\n\t\t\t\tdeltaBOperator = append(deltaBOperator, \"<=\")\n\t\t\t} else {\n\t\t\t\tdeltaBOperator = append(deltaBOperator, \">=\")\n\t\t\t}\n\t\t\tdeltaB = append(deltaB, value)\n\t\t}\n\t\tdeltas = append(deltas, deltaB)\n\t\toperators = append(operators, deltaBOperator)\n\t}\n\n\theader := \"TODO: INVERT INCREASE DECREASE IF MIN \\n \" + \"ROW\" + \"\\t\" + \"A.INC\" + \"\\t\" + \"A.DEC\"\n\tfmt.Println(header)\n\tfor i := 0; i < len(deltas); i++ {\n\t\tvar allowableIncrease, allowableDecrease float64\n\t\tallowableIncrease = math.Inf(1)\n\t\tallowableDecrease = math.Inf(-1)\n\t\tfor j := 0; j < len(deltas[0]); j++ {\n\t\t\ttemp := deltas[i][j]\n\t\t\tif operators[i][j] == \"<=\" {\n\t\t\t\tif temp < allowableIncrease {\n\t\t\t\t\tallowableIncrease = temp\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tif temp > allowableDecrease {\n\t\t\t\t\tallowableDecrease = temp\n\t\t\t\t}\n\t\t\t}\n\n\t\t}\n\t\tline := fmt.Sprintf(\"%d\", i+1) + \"\\t\" + fmt.Sprintf(\"%f\", allowableIncrease) + \"\\t\" + fmt.Sprintf(\"%f\", -1*allowableDecrease)\n\t\tfmt.Println(line)\n\t}\n}", "func (self *TraitPixbuf) GetColorspace() (return__ C.GdkColorspace) {\n\treturn__ = C.gdk_pixbuf_get_colorspace(self.CPointer)\n\treturn\n}", "func (me TgridOriginEnumType) IsUpperLeft() bool { return me == \"upperLeft\" }", "func (m *TransposableMatrix) transposeMapped2Base(xm, ym int) (xb, yb int) {\n\n\tlx := len(m.m) - 1\n\tly := len(m.m[0]) - 1\n\n\txb, yb = -1, -1\n\n\tswitch m.persp {\n\tcase 0:\n\t\txm += m.cx\n\t\tym += m.cy\n\t\tif ym > ly || xm > lx || ym < 0 || xm < 0 {\n\t\t\txb, yb = -1, -1\n\t\t\tbreak\n\t\t}\n\t\t// return m.m[xm][ym]\n\t\txb, yb = xm, ym\n\tcase 1:\n\t\txm -= m.cy\n\t\tym += m.cx\n\t\tif ym > lx || -xm > ly || ym < 0 || -xm < 0 {\n\t\t\txb, yb = -1, -1\n\t\t\tbreak\n\t\t}\n\t\t// return m.m[ym][-xm]\n\t\txb, yb = ym, -xm\n\tcase 2:\n\t\txm -= m.cx\n\t\tym -= m.cy\n\t\tif -xm > lx || -ym > ly || -xm < 0 || -ym < 0 {\n\t\t\txb, yb = -1, -1\n\t\t\tbreak\n\t\t}\n\t\t// return m.m[-xm][-ym]\n\t\txb, yb = -xm, -ym\n\tcase 3:\n\t\txm += m.cy\n\t\tym -= m.cx\n\t\tif -ym > lx || xm > ly || -ym < 0 || xm < 0 {\n\t\t\txb, yb = -1, -1\n\t\t\tbreak\n\t\t}\n\t\t// return m.m[-ym][xm]\n\t\txb, yb = -ym, xm\n\tdefault:\n\t\tpanic(\"allowed: 0 - North - 1 West - 2 South - 3 East\")\n\t}\n\n\treturn\n\n}", "func (enum Enum) Last() (int, string) {\n\tn := len(enum.items)\n\treturn n - 1, enum.items[n-1].value\n}", "func (o *Cell) GetPrevY() int { return o.Y }" ]
[ "0.63404655", "0.62930727", "0.62930727", "0.5801007", "0.54575765", "0.543033", "0.53913295", "0.5386404", "0.5381786", "0.5270984", "0.5190748", "0.51589876", "0.5095693", "0.50670105", "0.50371903", "0.49964777", "0.49689302", "0.4904455", "0.48489913", "0.48389292", "0.48376474", "0.48374248", "0.47837994", "0.47524133", "0.47285637", "0.47161686", "0.4684591", "0.46220815", "0.45721778", "0.45662665", "0.45620817", "0.45500413", "0.45284322", "0.45119703", "0.44798937", "0.4469343", "0.4464374", "0.4452041", "0.44504127", "0.4437067", "0.44289616", "0.44009677", "0.43971094", "0.4392358", "0.43909085", "0.4389717", "0.43863776", "0.43770325", "0.4362942", "0.43390635", "0.43208766", "0.4307965", "0.43036768", "0.4294181", "0.4283507", "0.42797872", "0.4275741", "0.42740282", "0.42733887", "0.42724365", "0.42666405", "0.42596945", "0.4256746", "0.42360017", "0.42342937", "0.42309433", "0.4222033", "0.42009348", "0.41885203", "0.4167504", "0.41618076", "0.41606602", "0.41535416", "0.4153353", "0.4145738", "0.4130636", "0.41292918", "0.41079533", "0.4104291", "0.40842828", "0.40799376", "0.4070629", "0.40617004", "0.40593216", "0.40503544", "0.4039169", "0.40377262", "0.4036765", "0.40364042", "0.40273097", "0.4026251", "0.40232152", "0.40196374", "0.4015381", "0.4007138", "0.4002722", "0.3986184", "0.3978096", "0.39759308", "0.39749968" ]
0.6370387
0
intercepts returns where the segment intercepts segment o. If there is no interception then (0, 0) will be returned. wirelen is not provided.
func (v segment) intercepts(o segment) point { // With the assumption that no interceptions occur when segments are // parallel, and that segments always move either horizontally or // vertically (not both), we can pretty easily check for interceptions. // // First find the values where interception could occur, and what axis for // both segments are changing. I.e. if the segments are horizontal // or vertical. a, axAxis := v.unchangingAxis() b, bxAxis := o.unchangingAxis() if axAxis == bxAxis { // We're assuming that they can't overlap // when they are parallel return point{} } // Check if the first value (x or y) is on the interval of the // same axis of the other segment. Do this for the other value (axis) too. var aCanCollide bool if axAxis { aCanCollide = inRange(a, o.from.x, o.to.x) } else { aCanCollide = inRange(a, o.from.y, o.to.y) } var bCanCollide bool if bxAxis { bCanCollide = inRange(b, v.from.x, v.to.x) } else { bCanCollide = inRange(b, v.from.y, v.to.y) } // If both axes are in range then they collide if aCanCollide && bCanCollide { // Check if a is an x- or y-value if axAxis { return point{x: a, y: b} } return point{x: b, y: a} } return point{x: 0, y: 0} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (w *wire) interceptPoints(o wire) []point {\n\tvar interceptPoints []point\n\tfor i := 1; i < len(w.points); i++ {\n\t\tv1 := segment{\n\t\t\tfrom: w.points[i-1],\n\t\t\tto: w.points[i],\n\t\t}\n\t\tfor u := 1; u < len(o.points); u++ {\n\t\t\tv2 := segment{\n\t\t\t\tfrom: o.points[u-1],\n\t\t\t\tto: o.points[u],\n\t\t\t}\n\t\t\tintercept := v1.intercepts(v2)\n\t\t\tif intercept.x != 0 && intercept.y != 0 {\n\t\t\t\t// Calculate total wire length (both wires combined)\n\t\t\t\tintercept.wireLen = v1.from.wireLen + intercept.distanceToPoint(v1.from) +\n\t\t\t\t\tv2.from.wireLen + intercept.distanceToPoint(v2.from)\n\t\t\t\tinterceptPoints = append(interceptPoints, intercept)\n\t\t\t}\n\t\t}\n\t}\n\treturn interceptPoints\n}", "func (l line2) SlopeIntercept() (float64, float64) {\n\tslope := (l.end.y - l.start.y) / (l.end.x - l.start.x)\n\tintercept := l.start.y - slope*l.start.x\n\treturn slope, intercept\n}", "func (t *Transcript) WhichIntronIntersect(reg Coor) []int {\n\tresult := []int{}\n\tfor i, intron := range t.Introns {\n\t\tif intron.Intersect(reg) {\n\t\t\tresult = append(result, i)\n\t\t}\n\t}\n\treturn result\n}", "func (t *Transcript) WhichExonIntersect(reg Coor) []int {\n\tresult := []int{}\n\tfor i, exon := range t.Exons {\n\t\tif exon.Intersect(reg) {\n\t\t\tresult = append(result, i)\n\t\t}\n\t}\n\treturn result\n}", "func getSlopeIntercept(p1 Point, p2 Point) (slope float64, intercept float64) {\n\tslope = (float64(p2.Y) - float64(p1.Y)) / (float64(p2.X) - float64(p1.X))\n\tintercept = float64(p1.Y) - slope*float64(p1.X)\n\n\treturn slope, intercept\n}", "func (cpu *Mos6502) ind() uint8 {\n\tlowByte := cpu.read(cpu.pc)\n\tcpu.pc++\n\n\thighByte := cpu.read(cpu.pc)\n\tcpu.pc++\n\n\tpointer := (word(highByte) << 8) | word(lowByte)\n\n\tvar a word\n\tif lowByte == 0xff {\n\t\ta = (word(cpu.read(pointer&0xFF00)) << 8) | word(cpu.read(pointer))\n\t} else {\n\t\ta = (word(cpu.read(pointer+1)) << 8) | word(cpu.read(pointer))\n\t}\n\tcpu.addressAbsolute = a\n\n\treturn 0\n}", "func SlopeInd(m, xc, yc, xlen float64, lbl string, flip, xlog, ylog bool, args, argsLbl *A) {\n\tif args == nil {\n\t\targs = &A{C: \"k\"}\n\t}\n\targs.NoClip = true\n\tl := 0.5 * xlen\n\tx := []float64{xc - l, xc + l, xc + l, xc - l}\n\ty := []float64{yc - m*l, yc - m*l, yc + m*l, yc - m*l}\n\tif flip {\n\t\tx[1] = xc - l\n\t\ty[1] = yc + m*l\n\t}\n\tdx, dy := x[2]-x[0], y[2]-y[0]\n\td := 0.03 * math.Sqrt(dx*dx+dy*dy)\n\txm := xc - l - d\n\txp := xc + l + d\n\tym := yc + m*l - d\n\typ := yc + m*l + d\n\tyr := yc - m*l + d\n\tys := yc - m*l - d\n\tif xlog {\n\t\tfor i := 0; i < 4; i++ {\n\t\t\tx[i] = math.Pow(10.0, x[i])\n\t\t}\n\t\txc = math.Pow(10.0, xc)\n\t\txm = math.Pow(10.0, xm)\n\t\txp = math.Pow(10.0, xp)\n\t}\n\tif ylog {\n\t\tfor i := 0; i < 4; i++ {\n\t\t\ty[i] = math.Pow(10.0, y[i])\n\t\t}\n\t\tyc = math.Pow(10.0, yc)\n\t\tym = math.Pow(10.0, ym)\n\t\typ = math.Pow(10.0, yp)\n\t\tyr = math.Pow(10.0, yr)\n\t\tys = math.Pow(10.0, ys)\n\t}\n\tPlot(x, y, args)\n\tif lbl != \"\" {\n\t\tif argsLbl == nil {\n\t\t\targsLbl = &A{C: \"k\", Fsz: 6}\n\t\t}\n\t\targsLbl.NoClip = true\n\t\tif flip {\n\t\t\targsLbl.Ha = \"center\"\n\t\t\tif m < 0 {\n\t\t\t\targsLbl.Va = \"top\"\n\t\t\t\tText(xc, ym, \"1\", argsLbl)\n\t\t\t} else {\n\t\t\t\targsLbl.Va = \"bottom\"\n\t\t\t\tText(xc, yp, \"1\", argsLbl)\n\t\t\t}\n\t\t\targsLbl.Ha = \"right\"\n\t\t\targsLbl.Va = \"center\"\n\t\t\tText(xm, yc, lbl, argsLbl)\n\t\t} else {\n\t\t\targsLbl.Ha = \"center\"\n\t\t\tif m < 0 {\n\t\t\t\targsLbl.Va = \"bottom\"\n\t\t\t\tText(xc, yr, \"1\", argsLbl)\n\t\t\t} else {\n\t\t\t\targsLbl.Va = \"top\"\n\t\t\t\tText(xc, ys, \"1\", argsLbl)\n\t\t\t}\n\t\t\targsLbl.Ha = \"left\"\n\t\t\targsLbl.Va = \"center\"\n\t\t\tText(xp, yc, lbl, argsLbl)\n\t\t}\n\t}\n}", "func PSRLO(i, x operand.Op) { ctx.PSRLO(i, x) }", "func (s *lineSegment) overlapsWith(o *lineSegment) *coordinate {\n\n\t// If both are in the same orientation, we have division by zero.\n\tif s.vertical && o.vertical || !s.vertical && !o.vertical {\n\t\treturn nil\n\t}\n\t// Create a candidate for overlapping\n\t// https://math.stackexchange.com/questions/375083/given-coordinates-of-beginning-and-end-of-two-intersecting-line-segments-how-do\n\tx := -1 * ((s.xStart-s.xEnd)*(o.xStart*o.yEnd-o.xEnd*o.yStart) - (o.xEnd-o.xStart)*(s.xEnd*s.yStart-s.xStart*s.yEnd)) / ((o.yStart-o.yEnd)*(s.xStart-s.xEnd) - (o.xEnd-o.xStart)*(s.yEnd-s.yStart))\n\ty := -1 * (o.xStart*o.yEnd*s.yStart - o.xStart*o.yEnd*s.yEnd - o.xEnd*o.yStart*s.yStart + o.xEnd*o.yStart*s.yEnd - o.yStart*s.xStart*s.yEnd + o.yStart*s.xEnd*s.yStart + o.yEnd*s.xStart*s.yEnd - o.yEnd*s.xEnd*s.yStart) / (-1*o.xStart*s.yStart + o.xStart*s.yEnd + o.xEnd*s.yStart - o.xEnd*s.yEnd + o.yStart*s.xStart - o.yStart*s.xEnd - o.yEnd*s.xStart + o.yEnd*s.xEnd)\n\t// we never match on (0,0)\n\tif x == 0 && y == 0 {\n\t\treturn nil\n\t}\n\ttest := coordinate{\n\t\tx: x,\n\t\ty: y,\n\t}\n\tif s.includesCoordinate(&test) && o.includesCoordinate(&test) {\n\t\treturn &test\n\t}\n\treturn nil\n}", "func (vm *VM) opIn(instr []uint16) int {\n\ta := instr[0] - 32768\n\tchr, err := vm.r.ReadByte()\n\tif err != nil {\n\t\tvm.Status = err.Error()\n\t\treturn 0\n\t}\n\tvm.registers[a] = uint16(chr)\n\treturn 2\n}", "func (cpu *Cpu) ind() int {\n\taddr := cpu.mem.Read(cpu.pc) & 0xFF\n\tcpu.pc++\n\n\treturn cpu.mem.Read(addr) | (cpu.mem.Read(addr+1) << 8)\n}", "func (cpu *Cpu) indy() int {\n\taddr := cpu.mem.Read(cpu.pc) & 0xFF\n\tcpu.pc++\n\n\tbefore := cpu.mem.Read(cpu.mem.Read(addr) | (cpu.mem.Read(addr+1) << 8))\n\tafter := before + cpu.y\n\n\tcpu.pageBoundaryCrossed(before, after)\n\n\treturn after\n}", "func spanOf(at positioner) posSpan {\n\tswitch x := at.(type) {\n\tcase nil:\n\t\tpanic(\"nil positioner\")\n\tcase posSpan:\n\t\treturn x\n\tcase ast.Node:\n\t\tpos := x.Pos()\n\t\treturn posSpan{pos, pos, x.End()}\n\tcase *operand:\n\t\tif x.expr != nil {\n\t\t\tpos := x.Pos()\n\t\t\treturn posSpan{pos, pos, x.expr.End()}\n\t\t}\n\t\treturn posSpan{nopos, nopos, nopos}\n\tdefault:\n\t\tpos := at.Pos()\n\t\treturn posSpan{pos, pos, pos}\n\t}\n}", "func Dotoffset(n *Node, oary []int64, nn **Node) int", "func (sr SplitRegion) Influence(opInfluence OpInfluence, region *core.RegionInfo) {\n\tfor _, p := range region.Peers {\n\t\tinf := opInfluence.GetStoreInfluence(p.GetStoreId())\n\t\tinf.RegionCount++\n\t\tif region.Leader.GetId() == p.GetId() {\n\t\t\tinf.LeaderCount++\n\t\t}\n\t}\n}", "func (a *ALU) InstrXCHInd(R1 bool) {\n\tvar regLoc uint8\n\tvar loc uint8\n\tif R1 {\n\t\tregLoc = a.regAddr(1)\n\t} else {\n\t\tregLoc = a.regAddr(0)\n\t}\n\tloc = a.InternalRAM[regLoc]\n\ta.Accum, a.InternalRAM[loc] = a.InternalRAM[loc], a.Accum\n}", "func iIntersection() {\n\n\tfirst := Tuple{0.00, 0, 0}\n\tiInput = append(iInput, first)\n\tcount := 0\n\tfor k := 0; k < len(ArrInput); k++ {\n\t\tfor jj := 0; jj < ArrInput[k].dist; jj++ {\n\t\t\trads := xyInput[k+1].rad\n\t\t\tgx := iInput[count].gx + (int(math.Sin(rads)) * 1)\n\t\t\tgy := iInput[count].gy + (int(math.Cos(rads)) * 1)\n\n\t\t\tnext := Tuple{rads, gx, gy}\n\t\t\tiInput = append(iInput, next)\n\t\t\tcount++\n\n\t\t}\n\n\t}\n\n\tlog.Println(strconv.Itoa(count))\n}", "func (i *IE) OffendingIE() (uint16, error) {\n\tif i.Type != OffendingIE {\n\t\treturn 0, &InvalidTypeError{Type: i.Type}\n\t}\n\n\tif len(i.Payload) < 2 {\n\t\treturn 0, io.ErrUnexpectedEOF\n\t}\n\n\treturn binary.BigEndian.Uint16(i.Payload[0:2]), nil\n}", "func GetOverlappedIds(c *gin.Context) {}", "func WithInterceptor(v Interceptor) (p Pair) {\n\treturn Pair{Key: \"interceptor\", Value: v}\n}", "func (i I4) SubnetOffset(o int) I4 {\n\treturn I4{ip: i.ip + uint32(o*i.NumIPs()), maskBits: i.maskBits}\n}", "func posInlIndex(xpos src.XPos) int {\n\tpos := Ctxt.PosTable.Pos(xpos)\n\tif b := pos.Base(); b != nil {\n\t\tii := b.InliningIndex()\n\t\tif ii >= 0 {\n\t\t\treturn ii\n\t\t}\n\t}\n\treturn -1\n}", "func (this Interceptor) Intercept(url string, exec rack.Middleware) error {\n\tif this[url] != nil {\n\t\treturn PreExistingInterceptorError{url}\n\t}\n\tthis[url] = exec\n\treturn nil\n}", "func OnesCount8Before(n uint8, iBit uint) int {\n\treturn gobits.OnesCount8(n & ((uint8(1) << iBit) - 1))\n}", "func (a line2) IntersectPoint(b line2) (vector2, bool) {\n\tswaped := false\n\tif math.Abs(a.end.y-a.start.y) > math.Abs(a.end.x-a.start.x) {\n\t\tswaped = true\n\t\ta.start.x, a.start.y = a.start.y, a.start.x\n\t\ta.end.x, a.end.y = a.end.y, a.end.x\n\t\tb.start.x, b.start.y = b.start.y, b.start.x\n\t\tb.end.x, b.end.y = b.end.y, b.end.x\n\t}\n\tif a.start.x > a.end.x {\n\t\ta.start, a.end = a.end, a.start\n\t}\n\tif b.start.x > b.end.x {\n\t\tb.start, b.end = b.end, b.start\n\t}\n\t// we are interested in the 'common' parts.\n\tif a.start.x > b.end.x || b.start.x > a.end.x {\n\t\treturn vector2{}, false\n\t}\n\tsa, ia := a.SlopeIntercept()\n\t// shear b to y direction.\n\tb.start.y = b.start.y - (sa * b.start.x) - ia\n\tb.end.y = b.end.y - (sa * b.end.x) - ia\n\tif math.Signbit(b.start.y) == math.Signbit(b.end.y) {\n\t\treturn vector2{}, false\n\t}\n\t// find x if y == 0\n\ttb := math.Abs(b.start.y) / math.Abs(b.end.y-b.start.y)\n\tx := tb*(b.end.x-b.start.x) + b.start.x\n\tif x < a.start.x || a.end.x < x {\n\t\treturn vector2{}, false\n\t}\n\ty := sa*x + ia\n\tif swaped {\n\t\tx, y = y, x\n\t}\n\treturn vector2{x, y}, true\n}", "func (c *Circle) IntersectSegment(s *Segment) *Intersection {\n\t// intersect as if it is a line\n\tintersection := c.IntersectLine(NewLineFromSegment(s))\n\t// based on number of Solutions...\n\tif intersection.SolutionNumber == 0 {\n\t\treturn intersection\n\t}\n\tif intersection.SolutionNumber == 1 {\n\t\tpt := intersection.Solutions[0]\n\t\tif pt.InSegmentRange(s) {\n\t\t\treturn intersection\n\t\t}\n\t\treturn NewIntersection()\n\t}\n\t// solution number == 2\n\tpt1 := intersection.Solutions[0]\n\tpt2 := intersection.Solutions[1]\n\tif pt1.InSegmentRange(s) {\n\t\tif pt2.InSegmentRange(s) {\n\t\t\treturn intersection\n\t\t}\n\t\treturn NewIntersection(pt1)\n\t}\n\tif pt2.InSegmentRange(s) {\n\t\treturn NewIntersection(pt2)\n\t}\n\treturn NewIntersection()\n}", "func (self *Chromosome) IndexOf(id int) int {\n\tfor i, val := range self.Locations {\n\t\tif val.Id == id {\n\t\t\treturn i\n\t\t}\n\n\t}\n\n\treturn -1\n}", "func (c *Context) PSRLO(i, x operand.Op) {\n\tc.addinstruction(x86.PSRLO(i, x))\n}", "func (swagger *MgwSwagger) GetOperationInterceptors(apiInterceptor InterceptEndpoint, resourceInterceptor InterceptEndpoint, operations []*Operation, isIn bool) map[string]InterceptEndpoint {\n\tinterceptorOperationMap := make(map[string]InterceptEndpoint)\n\n\tfor _, op := range operations {\n\t\textensionName := constants.XWso2RequestInterceptor\n\t\t// first get operational policies\n\t\toperationInterceptor := op.GetCallInterceptorService(isIn)\n\t\t// if operational policy interceptor not given check operational level swagger extension\n\t\tif !operationInterceptor.Enable {\n\t\t\tif !isIn {\n\t\t\t\textensionName = constants.XWso2ResponseInterceptor\n\t\t\t}\n\t\t\toperationInterceptor = swagger.GetInterceptor(op.GetVendorExtensions(), extensionName, constants.OperationLevelInterceptor)\n\t\t}\n\t\toperationInterceptor.ClusterName = op.iD\n\t\t// if operation interceptor not given\n\t\tif !operationInterceptor.Enable {\n\t\t\t// assign resource level interceptor\n\t\t\tif resourceInterceptor.Enable {\n\t\t\t\toperationInterceptor = resourceInterceptor\n\t\t\t} else if apiInterceptor.Enable {\n\t\t\t\t// if resource interceptor not given add api level interceptor\n\t\t\t\toperationInterceptor = apiInterceptor\n\t\t\t}\n\t\t}\n\t\t// add operation to the list only if an interceptor is enabled for the operation\n\t\tif operationInterceptor.Enable {\n\t\t\tinterceptorOperationMap[strings.ToUpper(op.method)] = operationInterceptor\n\t\t}\n\t}\n\treturn interceptorOperationMap\n\n}", "func (tr *trooper) loc() (x, y, z float64) { return tr.part.At() }", "func GetIndVisitor (visitor [] Visitor, finder string) int {\n\tfor i := 0; i < len(visitor); i++ {\n\t\tif visitor[i].Visitorid_string == finder {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func IsCallout(data []byte) (id []byte, consumed int) {\n\tif !bytes.HasPrefix(data, []byte(\"<<\")) {\n\t\treturn nil, 0\n\t}\n\tstart := 2\n\tend := bytes.Index(data[start:], []byte(\">>\"))\n\tif end < 0 {\n\t\treturn nil, 0\n\t}\n\n\tb := data[start : start+end]\n\tb = bytes.TrimSpace(b)\n\ti, err := strconv.Atoi(string(b))\n\tif err != nil {\n\t\treturn nil, 0\n\t}\n\tif i <= 0 {\n\t\treturn nil, 0\n\t}\n\treturn b, start + end + 2 // 2 for >>\n}", "func stealPosition(r int, h int, opRow int, opHole int, opCount int) (p *Position) {\n\tp = ZeroPosition()\n\tp.Row[opRow].Items[opHole] = -opCount\n\tp.Row[r].Items[h] = -1\n\tp.near().Items[0] = opCount + 1\n\treturn\n}", "func (s1 Segment) Intersect(e2 Edge) (Vector, bool) {\n\treturn Intersect(s1, e2)\n}", "func getLayerFirstIx(layer float64) int32 {\r\n\treturn int32((math.Pow(4, layer) - 1) / 3)\r\n}", "func (i *Interest) HopLimit() *uint8 {\n\treturn i.hopLimit\n}", "func (l1 *Line) IntersectSegment(l2 *Line) (Position, error) {\n\tp, err := l1.Intersect(l2)\n\tif err != nil {\n\t\treturn p, err\n\t}\n\tif !l1.Contains(p) || !l2.Contains(p) {\n\t\treturn p, fmt.Errorf(\"Not intersecting segments\")\n\t}\n\treturn p, nil\n}", "func (BlobRangeFinderImpl) getIntersection(r ds3Models.Range, offset int64, length int64) ds3Models.Range {\n var intersection ds3Models.Range\n if r.Start < offset {\n intersection.Start = offset\n } else {\n intersection.Start = r.Start\n }\n\n if r.End < offset + length - 1 {\n intersection.End = r.End\n } else {\n intersection.End = offset + length - 1\n }\n return intersection\n}", "func (r *Router) AppendInterceptor(i func(w http.ResponseWriter, r *http.Request, next http.HandlerFunc)) {\n\tif i == nil {\n\t\treturn\n\t}\n\tr.interceptors = append(r.interceptors, i)\n}", "func (i *Result) Intersection() []geom.Coord {\n\treturn i.intersection\n}", "func (s segment) responsibilityPosition(pos float64) (float64, float64) {\n\tif pos < 0.0 {\n\t\tpos = 0.0\n\t}\n\tif pos > float64(s.length()) {\n\t\tpos = float64(s.length())\n\t}\n\tx := float64(s.start.position.X) + float64(s.start.direction.X)*pos\n\ty := float64(s.start.position.Y) + float64(s.start.direction.Y)*pos\n\treturn x, y\n}", "func (p Pipeline) At(i int) OpUnion { return p.Operations[i] }", "func (o *ReceiptResponse) GetBlockIndex() uint32 {\n\tif o == nil {\n\t\tvar ret uint32\n\t\treturn ret\n\t}\n\n\treturn o.BlockIndex\n}", "func (o *WObj) GetMiddle() (float64, float64, int8) {\n\tpnt := o.Hitbox.GetMiddle()\n\treturn pnt.X, pnt.Y, o.layer\n}", "func (ob *ObservableArray) IndexOf(data interface{}) int {\n\treturn ob.o.Call(\"indexOf\", data).Int()\n}", "func (v *Layer) getCompareIndexes() (bottomTreeStart, bottomTreeStop, topTreeStart, topTreeStop int) {\n\tbottomTreeStart = v.CompareStartIndex\n\ttopTreeStop = v.LayerIndex\n\n\tif v.LayerIndex == v.CompareStartIndex {\n\t\tbottomTreeStop = v.LayerIndex\n\t\ttopTreeStart = v.LayerIndex\n\t} else if v.CompareMode == CompareLayer {\n\t\tbottomTreeStop = v.LayerIndex - 1\n\t\ttopTreeStart = v.LayerIndex\n\t} else {\n\t\tbottomTreeStop = v.CompareStartIndex\n\t\ttopTreeStart = v.CompareStartIndex + 1\n\t}\n\n\treturn bottomTreeStart, bottomTreeStop, topTreeStart, topTreeStop\n}", "func getMiddle(x int, y int) int {\n\tif x > y {\n\t\treturn ((x - y) / 2) + y\n\t} else {\n\t\treturn ((y - x) / 2) + x\n\t}\n}", "func (oe *OraErr) Offset() int { return oe.offset }", "func (s *Serverus) ChainInterceptors(inter interface{}) {}", "func C_spatial_segment(sted, prov int) int {\n\tswitch {\n\tcase sted == 0:\n\t\treturn 0\n\tcase sted == 1:\n\t\treturn 1\n\tcase sted == 2:\n\t\treturn 2\n\tcase prov == 4 || prov == 9 || prov == 11 || prov == 12:\n\t\treturn 4\n\tdefault:\n\t\treturn 3\n\t}\n}", "func getMatchingBracketIndex(op string, brindex int) (index int) {\n\topenBracketCount := 0\n\tfor i, c := range op[brindex:] {\n\t\tif c == '(' {\n\t\t\topenBracketCount++\n\t\t} else if c == ')' {\n\t\t\topenBracketCount--\n\t\t}\n\n\t\t// if the bracket count is 0, it means the bracket requested\n\t\t// was closed (or never opened)\n\t\tif openBracketCount == 0 {\n\t\t\treturn i\n\t\t}\n\t}\n\n\t// nothing was found\n\treturn 0\n}", "func (bi *BaseInstance) Inbound() *dpdk.Ring {\n\tif bi.input2 != nil {\n\t\treturn bi.input2\n\t}\n\treturn bi.input\n}", "func (a *server) GetIntersectingPath(ir *pb.IntersectionRequest) (*pb.IntersectionResponse, error) {\n\tlog.Debug(\"Looing for intersection for \", ir)\n\tif ir.Staleness == 0 {\n\t\tir.Staleness = 60\n\t}\n\tiq := types.IntersectionQuery{\n\t\tAddr: ir.Address,\n\t\tDst: ir.Dest,\n\t\tSrc: ir.Src,\n\t\tStale: time.Duration(ir.Staleness) * time.Minute,\n\t\tAlias: ir.UseAliases,\n\t\tIgnoreSource: ir.IgnoreSource,\n\t}\n\tres, err := a.opts.trs.FindIntersectingTraceroute(iq)\n\tlog.Debug(\"FindIntersectingTraceroute resp \", res)\n\tif err != nil {\n\t\tif err != repo.ErrNoIntFound {\n\t\t\tlog.Error(err)\n\t\t\treturn nil, err\n\t\t}\n\t\ttoken, err := a.tc.Add(ir)\n\t\tvar iresp *pb.IntersectionResponse\n\t\tif err != nil {\n\t\t\tlog.Error(err)\n\t\t\tiresp = &pb.IntersectionResponse{\n\t\t\t\tType: pb.IResponseType_ERROR,\n\t\t\t\tError: err.Error(),\n\t\t\t}\n\t\t} else {\n\t\t\tiresp = &pb.IntersectionResponse{\n\t\t\t\tType: pb.IResponseType_TOKEN,\n\t\t\t\tToken: token,\n\t\t\t}\n\t\t}\n\n\t\tgo a.fillAtlas(ir.Address, ir.Dest, ir.Staleness)\n\t\treturn iresp, nil\n\t}\n\tintr := &pb.IntersectionResponse{\n\t\tType: pb.IResponseType_PATH,\n\t\tPath: res,\n\t}\n\treturn intr, nil\n}", "func getLineParams(p1, p2 Point) (sT slopeType, slope, intercept float64) {\n\tif p1.X == p2.X {\n\t\t// Check for infinite slope.\n\t\tif p2.Y > p1.Y {\n\t\t\tsT = INFUP\n\t\t} else {\n\t\t\tsT = INFDOWN\n\t\t}\n\n\t\tslope, intercept = 0, 0\n\t} else if p1.Y == p2.Y {\n\t\t// check for zero slope\n\t\tif p2.X > p1.X {\n\t\t\tsT = ZERORIGHT\n\t\t} else {\n\t\t\tsT = ZEROLEFT\n\t\t}\n\n\t\tslope, intercept = 0, p1.Y\n\t} else {\n\t\t// 4 classifications of non infinite slope based\n\t\t// on the relative positions of p1 and p2\n\t\tslope, intercept = getSlopeIntercept(p1, p2)\n\t\tif p1.X < p2.X {\n\t\t\tif slope > 0 {\n\t\t\t\tsT = POSRIGHT\n\t\t\t} else {\n\t\t\t\tsT = NEGRIGHT\n\t\t\t}\n\t\t} else {\n\t\t\tif slope > 0 {\n\t\t\t\tsT = POSLEFT\n\t\t\t} else {\n\t\t\t\tsT = NEGLEFT\n\t\t\t}\n\t\t}\n\t}\n\n\treturn sT, slope, intercept\n}", "func (g *Gene) IntervalOfExons() []Coor {\n\tmerged := g.MergeExons()\n\treturn IntervalRegions(merged)\n}", "func (ff *fftag) at(id int) (x, y int) { return id / ff.msize, id % ff.msize }", "func (this *satcluster) getLocation() vectors.Vector2 {\n\tif exestate.OnError(this) {\n\t\treturn vectors.GetEmptyVector2()\n\t}\n\n\t//Interseccion entre dos satelites\n\tpointA, pointB, state := geometry.GetCirclesIntersections(this.getAt(0), this.getAt(1))\n\n\tif !state.IsOk() {\n\t\tthis.RegisterState(state)\n\t\treturn vectors.GetEmptyVector2()\n\t}\n\n\tif pointA.IsEmpty() && pointB.IsEmpty() {\n\t\tthis.RegisterState(exestate.ControlledError(\"No se pudo triengular, no hay interseccion (1) (satellities.satcluster.getLocation)\"))\n\t\treturn vectors.GetEmptyVector2()\n\t}\n\n\tvar intersectsA = (pointA.IsEmpty() == false)\n\tvar intersectsB = (pointB.IsEmpty() == false)\n\n\t//\tequals, _ := vectors.Equals(pointA, pointB)\n\n\t//Para los restantes satelites se verifican las distancias a los puntos de la interseccion\n\tif this.count() > 2 {\n\t\tfor i := 2; i < this.count(); i++ {\n\n\t\t\tintersectsA = intersectsA && !(math.Abs(pointA.DistanceTo(this.getAt(i).Pos)-this.getAt(i).Distance) > 0.005)\n\t\t\tintersectsB = intersectsB && !(math.Abs(pointB.DistanceTo(this.getAt(i).Pos)-this.getAt(i).Distance) > 0.005)\n\n\t\t\tif intersectsA == false && intersectsB == false {\n\t\t\t\tthis.RegisterState(exestate.ControlledError(\"No se pudo triangular, no hay interseccion (2) (satellities.satcluster.getLocation)\"))\n\t\t\t\treturn vectors.GetEmptyVector2()\n\t\t\t}\n\t\t}\n\t}\n\n\tif intersectsA && intersectsB {\n\t\tthis.RegisterState(exestate.ControlledError(\"No se pudo triangular, mas de un punto (satellities.satcluster.getLocation)\"))\n\t\treturn vectors.GetEmptyVector2()\n\t}\n\n\tif intersectsA {\n\t\tpointA.Round()\n\t\treturn pointA\n\t} else if intersectsB {\n\t\tpointB.Round()\n\t\treturn pointB\n\t} else {\n\t\tthis.RegisterState(exestate.ControlledError(\"No se pudo triangular\"))\n\t\treturn vectors.GetEmptyVector2()\n\t}\n}", "func (orth *Orthotope) Intersects(o *Orthotope) int32 {\n\tinT := int32(0)\n\toutT := int32(math.MaxInt32)\n\tfor index, p0 := range o.Point {\n\t\tp1 := o.Delta[index] + p0\n\n\t\tif orth.Delta[index] == 0 {\n\t\t\tif orth.Point[index] < p0 || p1 < orth.Point[index] {\n\t\t\t\treturn -1\n\t\t\t}\n\t\t} else {\n\t\t\tif orth.Delta[index] < 0 {\n\t\t\t\t// Swap p0 and p1 for negative directions.\n\t\t\t\tp0, p1 = p1, p0\n\t\t\t}\n\t\t\tp0T := ((p0 - orth.Point[index]) << ACCURACY) / orth.Delta[index]\n\t\t\tinT = disc.Max(inT, p0T)\n\n\t\t\tp1T := ((p1 - orth.Point[index]) << ACCURACY) / orth.Delta[index]\n\t\t\toutT = disc.Min(outT, p1T)\n\t\t}\n\t}\n\n\tif inT < outT && inT >= 0 {\n\t\treturn inT\n\t}\n\treturn -1\n}", "func (c *UrlReplaceHandler) Intercept(pipeline Pipeline, middlewareIndex int, req *http.Request) (*http.Response, error) {\n\treqOption, ok := req.Context().Value(urlReplaceOptionKey).(urlReplaceOptionsInt)\n\tif !ok {\n\t\treqOption = &c.options\n\t}\n\n\tobsOptions := GetObservabilityOptionsFromRequest(req)\n\tctx := req.Context()\n\tvar span trace.Span\n\tif obsOptions != nil {\n\t\tctx, span = otel.GetTracerProvider().Tracer(obsOptions.GetTracerInstrumentationName()).Start(ctx, \"UrlReplaceHandler_Intercept\")\n\t\tspan.SetAttributes(attribute.Bool(\"com.microsoft.kiota.handler.url_replacer.enable\", true))\n\t\tdefer span.End()\n\t\treq = req.WithContext(ctx)\n\t}\n\n\tif !reqOption.IsEnabled() || len(reqOption.GetReplacementPairs()) == 0 {\n\t\treturn pipeline.Next(req, middlewareIndex)\n\t}\n\n\treq.URL.Path = ReplacePathTokens(req.URL.Path, reqOption.GetReplacementPairs())\n\n\tif span != nil {\n\t\tspan.SetAttributes(attribute.String(\"http.request_url\", req.RequestURI))\n\t}\n\n\treturn pipeline.Next(req, middlewareIndex)\n}", "func (c testCase) offset() int {\n\tswitch x := c.substrOrOffset.(type) {\n\tcase int:\n\t\treturn x\n\tcase string:\n\t\ti := strings.Index(c.content, x)\n\t\tif i < 0 {\n\t\t\tpanic(fmt.Sprintf(\"%q does not contain substring %q\", c.content, x))\n\t\t}\n\t\treturn i\n\t}\n\tpanic(\"substrOrIndex must be an integer or string\")\n}", "func (lgb *LocationGroupBy) IntsX(ctx context.Context) []int {\n\tv, err := lgb.Ints(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (ch chain) responsibilityPosition(pos float64) (float64, float64, bool) {\n\tlength := 0\n\tfor i := range ch.segments {\n\t\tif float64(length+ch.segments[i].length()) > pos {\n\t\t\tx, y := ch.segments[i].responsibilityPosition(pos - float64(length))\n\t\t\treturn x, y, false\n\t\t}\n\t\tlength = length + ch.segments[i].length()\n\t}\n\tlastSegment := ch.segments[len(ch.segments)-1]\n\tx, y := lastSegment.responsibilityPosition(pos - float64(length) + float64(lastSegment.length()))\n\treturn x, y, true\n}", "func instrMVI(op uint8, c *CPU) uint64 {\n\tdst := insArg3b(op)\n\n\tinsSetreg8(c, dst, insArg8(c))\n\n\tif dst == M {\n\t\treturn 10\n\t}\n\treturn 7\n}", "func (r Ray) LineIntersect(s Ray) (point Vec) {\n\t/*\n\t\tequation is derived from system of equations with\n\t\ttwo unknowns where equations are r.Formula and s.Formula\n\t\tfrom which we can derive x of intersection point\n\n\t\tstarting with:\n\t\t\tr.V.Y*X - r.V.X*Y - r.V.Y*r.O.X + r.V.X*r.O.Y = 0\n\t\tand:\n\t\t\ts.V.Y*X - s.V.X*Y - s.V.Y*s.O.X + s.V.X*s.O.Y = 0\n\n\t\tget y from first one:\n\t\t\tr.V.Y*X - r.V.Y*r.O.X + r.V.X*r.O.Y = r.V.X*Y\n\t\t\t(r.V.Y*X - r.V.Y*r.O.X + r.V.X*r.O.Y)/r.V.X = Y\n\n\t\tthen we substitute and get x:\n\t\t\ts.V.Y*X - s.V.X * (r.V.Y*X - r.V.Y*r.O.X + r.V.X*r.O.Y) / r.V.X - s.V.Y*s.O.X + s.V.X*s.O.Y = 0 // * r.V.X\n\t\t\ts.V.Y*X*r.V.X - s.V.X*r.V.Y*X + s.V.X*r.V.Y*r.O.X - s.V.X*r.V.X*r.O.Y - s.V.Y*s.O.X*r.V.X + s.V.X*s.O.Y*r.V.X = 0 // - s.V.Y*X*r.V.X + s.V.X*r.V.Y*X\n\t\t\ts.V.X*r.V.Y*r.O.X - s.V.X*r.V.X*r.O.Y - s.V.Y*s.O.X*r.V.X + s.V.X*s.O.Y*r.V.X = s.V.X*r.V.Y*X - s.V.Y*X*r.V.X // simplify\n\t\t\ts.V.X * (r.V.Y*r.O.X + r.V.X * (s.O.Y - r.O.Y)) - s.V.Y*s.O.X*r.V.X = X * (s.V.X*r.V.Y - s.V.Y*r.V.X) // / (s.V.X*r.V.Y - s.V.Y*r.V.X)\n\t\t\t(s.V.X * (r.V.Y*r.O.X + r.V.X * (s.O.Y - r.O.Y)) - s.V.Y*s.O.X*r.V.X) / (s.V.X*r.V.Y - s.V.Y*r.V.X) = X\n\t*/\n\n\tpoint.X = (s.V.X*(r.V.Y*r.O.X+r.V.X*(s.O.Y-r.O.Y)) - s.V.Y*s.O.X*r.V.X) / (s.V.X*r.V.Y - s.V.Y*r.V.X)\n\n\tif r.V.X == 0 {\n\t\tpoint.Y = s.ProjectX(point.X)\n\t} else {\n\t\tpoint.Y = r.ProjectX(point.X)\n\t}\n\n\treturn\n}", "func (a *APITest) Intercept(interceptor Intercept) *APITest {\n\ta.request.interceptor = interceptor\n\treturn a\n}", "func (s *ShapeOrKeyframed) GetIx() int {\n\tif s == nil || s.Ix == nil {\n\t\treturn 0\n\t}\n\treturn *s.Ix\n}", "func instrSTAX(op uint8, c *CPU) uint64 {\n\tdst := insArg2(op) // actually 1 bit, but the 2nd bit of insArg2 is always 0 in the two STAX instructions\n\n\tif dst == 0 { // STAX B\n\t\tc.Memory[c.BC()] = c.Registers[A]\n\t} else { // STAX D\n\t\tc.Memory[c.DE()] = c.Registers[A]\n\t}\n\n\treturn 7\n}", "func getSplitIndex(length int) int {\n\tif length%2 == 0 {\n\t\treturn length / 2\n\t}\n\n\treturn length/2 + 1\n}", "func (r *Ray) Intersect(o Ray) (Vector, bool) {\n\tconst width = 0.03\n\n\tclampInRange := func(p Vector) (Vector, bool) {\n\t\tdist := r.Origin.Distance(p)\n\t\tif dist < r.Mint || dist > r.Maxt {\n\t\t\treturn r.Origin, false\n\t\t}\n\n\t\treturn p, true\n\t}\n\n\tif r.Origin == o.Origin {\n\t\treturn r.Origin, true\n\t}\n\n\td3 := r.Direction.Cross(o.Direction)\n\n\tif !d3.Equals(NewVector(0, 0, 0)) {\n\t\tmatrix := [12]float64{\n\t\t\tr.Direction.X,\n\t\t\t-o.Direction.X,\n\t\t\td3.X,\n\t\t\to.Origin.X - r.Origin.X,\n\n\t\t\tr.Direction.Y,\n\t\t\t-o.Direction.Y,\n\t\t\td3.Y,\n\t\t\to.Origin.Y - r.Origin.Y,\n\n\t\t\tr.Direction.Z,\n\t\t\t-o.Direction.Z,\n\t\t\td3.Z,\n\t\t\to.Origin.Z - r.Origin.Z,\n\t\t}\n\n\t\tresult := solve(matrix, 3, 4)\n\n\t\ta := result[3]\n\t\tb := result[7]\n\t\tc := result[11]\n\n\t\tif a >= 0 && b >= 0 {\n\t\t\tdist := d3.MultiplyScalar(c)\n\t\t\tif dist.Length() <= width {\n\t\t\t\treturn clampInRange(r.At(a))\n\t\t\t}\n\t\t\treturn r.Origin, false\n\t\t}\n\t}\n\n\tdP := o.Origin.Multiply(r.Origin)\n\n\ta2 := r.Direction.Dot(dP)\n\tb2 := o.Direction.Dot(dP.Neg())\n\n\tif a2 < 0 && b2 < 0 {\n\t\tdist := r.Origin.Distance(dP)\n\t\tif dP.Length() <= width {\n\t\t\treturn clampInRange(r.At(dist))\n\t\t}\n\t\treturn r.Origin, false\n\t}\n\n\tp3a := r.Origin.Plus(r.Direction.MultiplyScalar(a2))\n\td3a := o.Origin.Minus(p3a)\n\n\tp3b := r.Origin\n\td3b := o.Origin.Plus(o.Direction.MultiplyScalar(b2)).Minus(p3b)\n\n\tif b2 < 0 {\n\t\tif d3a.Length() <= width {\n\t\t\treturn clampInRange(p3a)\n\t\t}\n\t\treturn r.Origin, false\n\t}\n\n\tif a2 < 0 {\n\t\tif d3b.Length() <= width {\n\t\t\treturn clampInRange(p3b)\n\t\t}\n\t\treturn r.Origin, false\n\t}\n\n\tif d3a.Length() <= d3b.Length() {\n\t\tif d3a.Length() <= width {\n\t\t\treturn clampInRange(p3a)\n\t\t}\n\t\treturn r.Origin, false\n\t}\n\n\tif d3b.Length() <= width {\n\t\treturn clampInRange(p3b)\n\t}\n\n\treturn r.Origin, false\n}", "func LoggingInterceptor(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {\n\tstart := time.Now()\n\th, err := handler(ctx, req)\n\tp, ok := peer.FromContext(ctx)\n\tif !ok {\n\t\tlog.Warn().Msg(\"unable to log grpc request\")\n\n\t\treturn h, err\n\t}\n\n\tlog.Info().\n\t\tStr(\"method\", info.FullMethod).\n\t\tStr(\"latency\", time.Since(start).String()).\n\t\tStr(\"ip\", p.Addr.String()).\n\t\tMsg(\"\")\n\n\treturn h, err\n}", "func (s *SE) IBBSegmentsOffset() uint64 {\n\treturn s.DigestOffset() + s.DigestTotalSize()\n}", "func (dim *Dimensions) indexOf(p Point) int {\n\tx := p.X - dim.BottomLeft.X\n\ty := p.Y - dim.BottomLeft.Y\n\treturn int((x) + (y)*dim.height)\n}", "func (ls *LocationSelect) IntsX(ctx context.Context) []int {\n\tv, err := ls.Ints(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (s *State) GetInterceptAPIKey() string {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\tfor _, key := range s.interceptAPIKeys {\n\t\tif key != \"\" {\n\t\t\treturn key\n\t\t}\n\t}\n\treturn \"\"\n}", "func (*IdentifyCommand) Op() ws.OpCode { return 0 }", "func (l *Line) Offset() int {\n\treturn l.offsets[0]\n}", "func (tv *TextView) WrappedLineNo(pos TextPos) (si, ri int, ok bool) {\n\tif pos.Ln >= len(tv.Renders) {\n\t\treturn 0, 0, false\n\t}\n\treturn tv.Renders[pos.Ln].RuneSpanPos(pos.Ch)\n}", "func DecodeInterest(wire *tlv.Block) (*Interest, error) {\n\tif wire == nil {\n\t\treturn nil, util.ErrNonExistent\n\t}\n\tif len(wire.Subelements()) == 0 {\n\t\twire.Parse()\n\t}\n\n\ti := new(Interest)\n\ti.lifetime = 4000 * time.Millisecond\n\ti.wire = wire\n\tmostRecentElem := 0\n\thasApplicationParameters := false\n\tfor _, elem := range wire.Subelements() {\n\t\tswitch elem.Type() {\n\t\tcase tlv.Name:\n\t\t\tif mostRecentElem >= 1 {\n\t\t\t\treturn nil, errors.New(\"Name is duplicate or out-of-order\")\n\t\t\t}\n\t\t\tname, err := DecodeName(elem)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tmostRecentElem = 1\n\t\t\ti.name = *name\n\t\tcase tlv.CanBePrefix:\n\t\t\tif mostRecentElem >= 2 {\n\t\t\t\treturn nil, errors.New(\"CanBePrefix is duplicate or out-of-order\")\n\t\t\t}\n\t\t\tmostRecentElem = 2\n\t\t\ti.canBePrefix = true\n\t\tcase tlv.MustBeFresh:\n\t\t\tif mostRecentElem >= 3 {\n\t\t\t\treturn nil, errors.New(\"MustBeFresh is duplicate or out-of-order\")\n\t\t\t}\n\t\t\tmostRecentElem = 3\n\t\t\ti.mustBeFresh = true\n\t\tcase tlv.ForwardingHint:\n\t\t\tif mostRecentElem >= 4 {\n\t\t\t\treturn nil, errors.New(\"ForwardingHint is duplicate or out-of-order\")\n\t\t\t}\n\t\t\tmostRecentElem = 4\n\t\t\telem.Parse()\n\t\t\tfor _, delegationBlock := range elem.Subelements() {\n\t\t\t\tdelegation, err := DecodeDelegation(delegationBlock)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, errors.New(\"error decoding Delegation\")\n\t\t\t\t}\n\t\t\t\ti.forwardingHint = append(i.forwardingHint, *delegation)\n\t\t\t}\n\t\tcase tlv.Nonce:\n\t\t\tif mostRecentElem >= 5 {\n\t\t\t\treturn nil, errors.New(\"Nonce is duplicate or out-of-order\")\n\t\t\t}\n\t\t\tmostRecentElem = 5\n\t\t\tif len(elem.Value()) != 4 {\n\t\t\t\treturn nil, errors.New(\"error decoding Nonce\")\n\t\t\t}\n\t\t\ti.nonce = make([]byte, 4)\n\t\t\tcopy(i.nonce, elem.Value())\n\t\tcase tlv.InterestLifetime:\n\t\t\tif mostRecentElem >= 6 {\n\t\t\t\treturn nil, errors.New(\"InterestLifetime is duplicate or out-of-order\")\n\t\t\t}\n\t\t\tmostRecentElem = 6\n\t\t\tlifetime, err := tlv.DecodeNNIBlock(elem)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.New(\"error decoding InterestLifetime\")\n\t\t\t}\n\t\t\ti.lifetime = time.Duration(lifetime) * time.Millisecond\n\t\tcase tlv.HopLimit:\n\t\t\tif mostRecentElem >= 7 {\n\t\t\t\treturn nil, errors.New(\"HopLimit is duplicate or out-of-order\")\n\t\t\t}\n\t\t\tmostRecentElem = 7\n\t\t\tif len(elem.Value()) != 1 {\n\t\t\t\treturn nil, errors.New(\"error decoding HopLimit\")\n\t\t\t}\n\t\t\ti.hopLimit = new(uint8)\n\t\t\t*i.hopLimit = elem.Value()[0]\n\t\tcase tlv.ApplicationParameters:\n\t\t\tif mostRecentElem >= 8 {\n\t\t\t\treturn nil, errors.New(\"ApplicationParameters is duplicate or out-of-order\")\n\t\t\t}\n\t\t\tmostRecentElem = 8\n\t\t\thasApplicationParameters = true\n\t\t\ti.parameters = append(i.parameters, elem)\n\t\tdefault:\n\t\t\tif !hasApplicationParameters && tlv.IsCritical(elem.Type()) {\n\t\t\t\treturn nil, tlv.ErrUnrecognizedCritical\n\t\t\t} else if hasApplicationParameters {\n\t\t\t\ti.parameters = append(i.parameters, elem)\n\t\t\t}\n\t\t\t// If non-critical and not after ApplicationParameters, ignore\n\t\t}\n\t}\n\n\tif len(i.nonce) == 0 {\n\t\ti.nonce = make([]byte, 4)\n\t\tbinary.LittleEndian.PutUint32(i.nonce, rand.Uint32())\n\t}\n\n\t// If has ApplicationParameters, verify parameters digest component\n\tif hasApplicationParameters {\n\t\t_, paramsDigest := i.name.Find(tlv.ParametersSha256DigestComponent)\n\t\tif paramsDigest == nil {\n\t\t\treturn nil, errors.New(\"has ApplicationParameters but missing ParametersSha256DigestComponent\")\n\t\t}\n\t\t// Hash parameters\n\t\th := sha256.New()\n\t\tfor _, param := range i.parameters {\n\t\t\tparamWire, err := param.Wire()\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.New(\"error wire encoding application parameter of type 0x\" + strconv.FormatUint(uint64(param.Type()), 16))\n\t\t\t}\n\t\t\th.Write(paramWire)\n\t\t}\n\t\tgeneratedHash := h.Sum(nil)\n\n\t\t// Verify hash\n\t\tif !bytes.EqualFold(paramsDigest.Value(), generatedHash) {\n\t\t\treturn nil, errors.New(\"ParametersSha256DigestComponent did not match hash of application parameters\")\n\t\t}\n\t}\n\n\treturn i, nil\n}", "func OpenTracingServerInterceptor(parentSpan opentracing.Span) grpc.UnaryServerInterceptor {\n\ttracingInterceptor := otgrpc.OpenTracingServerInterceptor(\n\t\t// Use the globally installed tracer\n\t\topentracing.GlobalTracer(),\n\t\t// Log full payloads along with trace spans\n\t\totgrpc.LogPayloads(),\n\t)\n\tif parentSpan == nil {\n\t\treturn tracingInterceptor\n\t}\n\tspanContext := parentSpan.Context()\n\treturn func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo,\n\t\thandler grpc.UnaryHandler) (interface{}, error) {\n\n\t\tmd, ok := metadata.FromIncomingContext(ctx)\n\t\tif !ok {\n\t\t\tmd = metadata.New(nil)\n\t\t}\n\t\tcarrier := metadataReaderWriter{md}\n\t\t_, err := opentracing.GlobalTracer().Extract(opentracing.HTTPHeaders, carrier)\n\t\tif err == opentracing.ErrSpanContextNotFound {\n\t\t\tcontract.IgnoreError(opentracing.GlobalTracer().Inject(spanContext, opentracing.HTTPHeaders, carrier))\n\t\t}\n\t\treturn tracingInterceptor(ctx, req, info, handler)\n\t}\n\n}", "func (p *pipeline) IndexOf(comp func(Handler) bool) int {\n\n\thead := p.head\n\n\tfor i := 0; ; i++ {\n\t\tif comp(head.handler) {\n\t\t\treturn i\n\t\t}\n\t\tif head = head.next; head == nil {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn -1\n}", "func (o *Bspline) findSpan(t float64) int {\n\t// Piegl & Tiller: A2.1 p68\n\tn := o.NumBasis()\n\tif t >= o.T[n] {\n\t\treturn n - 1\n\t}\n\tif t <= o.T[o.p] {\n\t\treturn o.p\n\t}\n\tlow, high, mid := o.p, n, (o.p+n)/2\n\tfor t < o.T[mid] || t >= o.T[mid+1] {\n\t\tif t < o.T[mid] {\n\t\t\thigh = mid\n\t\t} else {\n\t\t\tlow = mid\n\t\t}\n\t\tmid = (low + high) / 2\n\t}\n\treturn mid\n}", "func (m *middleware) InterceptOperation(ctx context.Context, next gqlgen.OperationHandler) gqlgen.ResponseHandler {\n\toperations := make([]*Operation, 0)\n\tctx = SaveToContext(ctx, &operations)\n\n\treturn next(ctx)\n}", "func (q *QQwry) getMiddleOffset(start uint32, end uint32) uint32 {\n\trecords := ((end - start) / INDEX_LEN) >> 1\n\treturn start + records*INDEX_LEN\n}", "func findLLVMHighlight(f *ir.Func, prim *primitive.Primitive) ([][2]int, error) {\n\t// Line number ranges to highlight (1-based line numbers, inclusive).\n\tvar lineRanges [][2]int\n\tfor _, blockName := range prim.Nodes {\n\t\tblock, err := findBlock(f, blockName)\n\t\tif err != nil {\n\t\t\treturn nil, errors.WithStack(err)\n\t\t}\n\t\tlineRange := findBlockLineRange(f, block)\n\t\tlineRanges = append(lineRanges, lineRange)\n\t}\n\treturn lineRanges, nil\n}", "func (_Bridge *BridgeCaller) GetInterchainRequests(opts *bind.CallOpts, b []byte) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _Bridge.contract.Call(opts, out, \"getInterchainRequests\", b)\n\treturn *ret0, err\n}", "func OpenTracingMiddleware(handlerPackageName string) gin.HandlerFunc {\n\tif !opentracing.IsGlobalTracerRegistered() {\n\t\tfmt.Println(\"OpenTracing global tracer ever register yet\")\n\t\treturn nil\n\t}\n\treturn func(c *gin.Context) {\n\t\tvar span opentracing.Span\n\t\t// Extract trace context from HTTP Header\n\t\ttraceContext, err := opentracing.GlobalTracer().Extract(\n\t\t\topentracing.HTTPHeaders, opentracing.HTTPHeadersCarrier(c.Request.Header))\n\t\tvar opts []opentracing.StartSpanOption\n\t\tif err == nil {\n\t\t\topts = append(opts, opentracing.ChildOf(traceContext))\n\t\t}\n\t\tspan = opentracing.StartSpan(unknownOperation, opts...)\n\t\tspan.SetBaggageItem(BaggageRequestID, c.GetString(CtxRequestID))\n\n\t\tc.Set(CtxTraceSpan, span)\n\t\t// Keep going on next handlers\n\t\tc.Next()\n\n\t\t// Getting operation name from context if manually set\n\t\toperationName := c.GetString(CtxTraceOperationName)\n\t\tif operationName == \"\" {\n\t\t\toperationName = getHandlerName(handlerPackageName, c.HandlerNames())\n\t\t}\n\t\tspan.SetOperationName(operationName)\n\t\tc.Set(CtxTraceOperationName, operationName)\n\t\t// Set HTTP related information\n\t\tspan.SetTag(TagHTTPURL, c.Request.URL.String())\n\t\tspan.SetTag(TagHTTPMethod, c.Request.Method)\n\t\tspan.SetTag(TagHTTPUserAgent, c.Request.UserAgent())\n\t\tspan.SetTag(TagHTTPClientIP, c.ClientIP())\n\t\tspan.SetTag(TagHTTPStatusCode, c.Writer.Status())\n\t\t// Report to server\n\t\tspan.Finish()\n\t}\n}", "func instrMOV(op uint8, c *CPU) uint64 {\n\tsrc := insArg3(op)\n\tdst := insArg3b(op)\n\n\tinsSetreg8(c, dst, insGetreg8(c, src))\n\n\tif src == M || dst == M {\n\t\treturn 7\n\t}\n\treturn 5\n}", "func (c *otApplyContext) ligateInput(count int, matchPositions [maxContextLength]int,\n\tmatchLength int, ligGlyph fonts.GID, totalComponentCount uint8) {\n\tbuffer := c.buffer\n\n\tbuffer.mergeClusters(buffer.idx, buffer.idx+matchLength)\n\n\t/* - If a base and one or more marks ligate, consider that as a base, NOT\n\t* ligature, such that all following marks can still attach to it.\n\t* https://github.com/harfbuzz/harfbuzz/issues/1109\n\t*\n\t* - If all components of the ligature were marks, we call this a mark ligature.\n\t* If it *is* a mark ligature, we don't allocate a new ligature id, and leave\n\t* the ligature to keep its old ligature id. This will allow it to attach to\n\t* a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,\n\t* and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a\n\t* ligature id and component value of 2. Then if SHADDA,FATHA form a ligature\n\t* later, we don't want them to lose their ligature id/component, otherwise\n\t* GPOS will fail to correctly position the mark ligature on top of the\n\t* LAM,LAM,HEH ligature. See:\n\t* https://bugzilla.gnome.org/show_bug.cgi?id=676343\n\t*\n\t* - If a ligature is formed of components that some of which are also ligatures\n\t* themselves, and those ligature components had marks attached to *their*\n\t* components, we have to attach the marks to the new ligature component\n\t* positions! Now *that*'s tricky! And these marks may be following the\n\t* last component of the whole sequence, so we should loop forward looking\n\t* for them and update them.\n\t*\n\t* Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a\n\t* 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature\n\t* id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature\n\t* form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to\n\t* the new ligature with a component value of 2.\n\t*\n\t* This in fact happened to a font... See:\n\t* https://bugzilla.gnome.org/show_bug.cgi?id=437633\n\t */\n\n\tisBaseLigature := buffer.Info[matchPositions[0]].isBaseGlyph()\n\tisMarkLigature := buffer.Info[matchPositions[0]].isMark()\n\tfor i := 1; i < count; i++ {\n\t\tif !buffer.Info[matchPositions[i]].isMark() {\n\t\t\tisBaseLigature = false\n\t\t\tisMarkLigature = false\n\t\t\tbreak\n\t\t}\n\t}\n\tisLigature := !isBaseLigature && !isMarkLigature\n\n\tklass, ligID := uint16(0), uint8(0)\n\tif isLigature {\n\t\tklass = tt.Ligature\n\t\tligID = buffer.allocateLigID()\n\t}\n\tlastLigID := buffer.cur(0).getLigID()\n\tlastNumComponents := buffer.cur(0).getLigNumComps()\n\tcomponentsSoFar := lastNumComponents\n\n\tif isLigature {\n\t\tbuffer.cur(0).setLigPropsForLigature(ligID, totalComponentCount)\n\t\tif buffer.cur(0).unicode.generalCategory() == nonSpacingMark {\n\t\t\tbuffer.cur(0).setGeneralCategory(otherLetter)\n\t\t}\n\t}\n\n\t// ReplaceGlyph_with_ligature\n\tc.setGlyphPropsExt(ligGlyph, klass, true, false)\n\tbuffer.replaceGlyphIndex(ligGlyph)\n\n\tfor i := 1; i < count; i++ {\n\t\tfor buffer.idx < matchPositions[i] {\n\t\t\tif isLigature {\n\t\t\t\tthisComp := buffer.cur(0).getLigComp()\n\t\t\t\tif thisComp == 0 {\n\t\t\t\t\tthisComp = lastNumComponents\n\t\t\t\t}\n\t\t\t\tnewLigComp := componentsSoFar - lastNumComponents +\n\t\t\t\t\tmin8(thisComp, lastNumComponents)\n\t\t\t\tbuffer.cur(0).setLigPropsForMark(ligID, newLigComp)\n\t\t\t}\n\t\t\tbuffer.nextGlyph()\n\t\t}\n\n\t\tlastLigID = buffer.cur(0).getLigID()\n\t\tlastNumComponents = buffer.cur(0).getLigNumComps()\n\t\tcomponentsSoFar += lastNumComponents\n\n\t\t/* Skip the base glyph */\n\t\tbuffer.skipGlyph()\n\t}\n\n\tif !isMarkLigature && lastLigID != 0 {\n\t\t/* Re-adjust components for any marks following. */\n\t\tfor i := buffer.idx; i < len(buffer.Info); i++ {\n\t\t\tif lastLigID != buffer.Info[i].getLigID() {\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tthisComp := buffer.Info[i].getLigComp()\n\t\t\tif thisComp == 0 {\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tnewLigComp := componentsSoFar - lastNumComponents +\n\t\t\t\tmin8(thisComp, lastNumComponents)\n\t\t\tbuffer.Info[i].setLigPropsForMark(ligID, newLigComp)\n\t\t}\n\t}\n}", "func (ligb *LineItemGroupBy) IntsX(ctx context.Context) []int {\n\tv, err := ligb.Ints(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func GetLineIndex(v *gocui.View) int {\n\t_, cy := v.Cursor()\n\treturn cy\n}", "func findGiverReceiver(p []int) ([]int, bool) {\n\t// start from back and look for intersection\n\ts := p[len(p)-1] // smallest\n\tsi := len(p) - 1 // smallest index\n\n\tfor i := len(p) - 2; i >= 1; i-- {\n\t\tif p[i] != s {\n\t\t\tsi = i + 1\n\t\t\tbreak\n\t\t}\n\t}\n\n\tfor i := (si - 1); i >= 1; i-- {\n\t\tif (p[i] - p[si]) >= 2 {\n\t\t\treturn []int{i, si}, true\n\t\t}\n\t}\n\n\treturn []int{}, false\n}", "func findClosestIntersection(wires []Wire, grid Grid) int {\n\tintersections := make(map[aocutils.Vector2D]int)\n\tfor _, wire := range wires {\n\t\tfor dist, pos := range wire.Positions {\n\t\t\tif grid.Cell(pos) == 'X' {\n\t\t\t\tintersections[pos] = intersections[pos] + dist + 1\n\t\t\t}\n\t\t}\n\t}\n\n\tvar closestIntersection int\n\tfor _, distance := range intersections {\n\t\tif closestIntersection == 0 {\n\t\t\tclosestIntersection = distance\n\t\t}\n\t\tfmt.Println(\"Intersection found with distance\", distance)\n\t\tclosestIntersection = aocutils.MinInt(closestIntersection, distance)\n\t}\n\n\treturn closestIntersection\n}", "func (fw *IPtables) extractSegmentID(addr uint64) uint64 {\n\tendpointBits := fw.networkConfig.EndpointBits()\n\tsegmentBits := fw.networkConfig.SegmentBits()\n\tsid := (addr >> endpointBits) & ((1 << segmentBits) - 1)\n\treturn sid\n}", "func (i I) Dot(i2 I) int {\n\treturn i.X*i2.X + i.Y*i2.Y\n}", "func (o *SearchLine) GetLine() int32 {\n\tif o == nil || o.Line == nil {\n\t\tvar ret int32\n\t\treturn ret\n\t}\n\treturn *o.Line\n}", "func (s *Segment) Limits() (oldest, newest Offset) {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\n\tswitch len(s.chunks) {\n\tcase 0:\n\t\treturn ZeroOffset, ZeroOffset\n\tcase 1:\n\t\toffset := s.chunks[0].Offset()\n\t\treturn offset, offset\n\t}\n\toldest, newest = s.chunks[0].Offset(), s.chunks[len(s.chunks)-1].Offset()\n\treturn oldest, newest\n}", "func New() *Interceptor {\n\treturn &Interceptor{}\n}", "func GetLine(view *gocui.View) int {\n\tif view != nil {\n\t\t_, oy := view.Origin()\n\t\t_, cy := view.Cursor()\n\t\treturn oy + cy\n\t}\n\treturn 0\n}", "func (o *ReceiptResponse) GetBlockIndexOk() (*uint32, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.BlockIndex, true\n}", "func (p *Code) Line(ip int) (file string, line int) {\n\n\tidx := sort.Search(len(p.lines), func(i int) bool {\n\t\treturn ip < p.lines[i].ip\n\t})\n\tif idx < len(p.lines) {\n\t\tt := p.lines[idx]\n\t\treturn t.file, t.line\n\t}\n\treturn \"\", 0\n}" ]
[ "0.6160073", "0.5302833", "0.5037876", "0.47532725", "0.4733476", "0.46008787", "0.45846316", "0.43068388", "0.42624182", "0.42509186", "0.42478302", "0.42192173", "0.42065093", "0.4164991", "0.41622105", "0.414886", "0.41471472", "0.4143421", "0.412669", "0.41088027", "0.41033345", "0.40821975", "0.40612638", "0.4055664", "0.40437365", "0.40373853", "0.40294123", "0.40293124", "0.40240914", "0.40100694", "0.40083608", "0.40022182", "0.39964864", "0.39952245", "0.39792502", "0.39611614", "0.3959512", "0.39542887", "0.39536595", "0.39522794", "0.39490205", "0.39415693", "0.39386007", "0.39305454", "0.39242193", "0.39235446", "0.39192602", "0.3915697", "0.39129135", "0.39089298", "0.3907361", "0.39041758", "0.39034596", "0.38982904", "0.38945752", "0.388575", "0.3884906", "0.38848615", "0.38751212", "0.3874353", "0.38619965", "0.38562617", "0.38371456", "0.3826563", "0.3824116", "0.3819395", "0.38187262", "0.3808034", "0.3801071", "0.37975976", "0.3795164", "0.37937188", "0.3783734", "0.37797344", "0.37772098", "0.3775962", "0.3773427", "0.37673378", "0.37561008", "0.37558776", "0.37549564", "0.37513557", "0.37506622", "0.37503532", "0.37502298", "0.37499365", "0.37459368", "0.37445623", "0.37375623", "0.37352178", "0.3733894", "0.3723893", "0.37220666", "0.37184146", "0.37136784", "0.3711323", "0.37085298", "0.37084678", "0.3706689", "0.3699477" ]
0.561719
1
inRange returns whether true if a >= val = val <= a.
func inRange(val, a, b int) bool { return val >= a && val <= b || val >= b && val <= a }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func InRange(val, min, max float64) float64 {\n\tif val < min {\n\t\treturn min\n\t} else if val > max {\n\t\treturn max\n\t}\n\treturn val\n}", "func InRange(value, left, right float64) bool {\n\tif left > right {\n\t\tleft, right = right, left\n\t}\n\treturn value >= left && value <= right\n}", "func within(value, low, high int) bool {\n\treturn value >= low && value <= high\n}", "func ValueIsInRange(candidate int, lowerBound int, upperBound int) bool {\n\treturn lowerBound <= candidate && candidate < upperBound\n}", "func inRange(r ipRange, ipAddress net.IP) bool {\n\t// strcmp type byte comparison\n\tif bytes.Compare(ipAddress, r.start) >= 0 && bytes.Compare(ipAddress, r.end) < 0 {\n\t\treturn true\n\t}\n\treturn false\n}", "func InRange(n int, r []int) bool {\n\treturn len(r) == 2 && (r[0] <= n && n <= r[1])\n}", "func (p Point) In(rg Range) bool {\n\treturn p.X >= rg.Min.X && p.X < rg.Max.X && p.Y >= rg.Min.Y && p.Y < rg.Max.Y\n}", "func (i *Number) IsInRange(start, end Number) bool {\n\treturn i.value <= end.value && i.value >= start.value\n}", "func (v *intChain) IsInRange(min, max int) IntChainer {\n f := func() bool {\n if min > max || min == max {\n return false\n }\n if v.Num >= min && v.Num <= max {\n return true\n }\n \n return false\n }\n v.chains = append(v.chains, f)\n\n return v\n}", "func (s StatusCode) In(r StatusCodeRange) bool {\n\treturn r.Min <= s && s <= r.Max\n}", "func inRangeEntries(v int, entries []*RangeEntry) bool {\n\tfor _, re := range entries {\n\t\tif re.StartVendorID <= v && v <= re.EndVendorID {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func between(start, elt, end *big.Int, inclusive bool) bool {\n\tif end.Cmp(start) > 0 {\n\t\treturn (start.Cmp(elt) < 0 && elt.Cmp(end) < 0) || (inclusive && elt.Cmp(end) == 0)\n\t}\n\treturn start.Cmp(elt) < 0 || elt.Cmp(end) < 0 || (inclusive && elt.Cmp(end) == 0)\n}", "func (c *capacity) isInRange(y resource.Quantity) bool {\n\tif c.lower.IsZero() && c.upper.Cmp(y) >= 0 {\n\t\t// [0, a] y\n\t\treturn true\n\t}\n\tif c.upper.IsZero() && c.lower.Cmp(y) <= 0 {\n\t\t// [b, 0] y\n\t\treturn true\n\t}\n\tif !c.lower.IsZero() && !c.upper.IsZero() {\n\t\t// [a, b] y\n\t\treturn c.lower.Cmp(y) <= 0 && c.upper.Cmp(y) >= 0\n\t}\n\treturn false\n}", "func (r *Range) Contains(num int64) bool {\n\treturn num >= r.Min && num <= r.Max\n}", "func inRange(point *geo.Point, req *geo.Request) bool {\n\tleft := math.Min(float64(req.Lo.Longitude), float64(req.Hi.Longitude))\n\tright := math.Max(float64(req.Lo.Longitude), float64(req.Hi.Longitude))\n\ttop := math.Max(float64(req.Lo.Latitude), float64(req.Hi.Latitude))\n\tbottom := math.Min(float64(req.Lo.Latitude), float64(req.Hi.Latitude))\n\n\tif float64(point.Longitude) >= left &&\n\t\tfloat64(point.Longitude) <= right &&\n\t\tfloat64(point.Latitude) >= bottom &&\n\t\tfloat64(point.Latitude) <= top {\n\t\treturn true\n\t}\n\treturn false\n}", "func (r Range) ContainsRange(r1 Range) bool {\n\treturn r1[0] >= r[0] && r1[1] <= r[1]\n}", "func BetweenIn(x float64, a, b float64) bool {\n\tmin, max := math.Min(a, b), math.Max(a, b)\n\treturn min <= a && a <= max\n}", "func InRangeInt(val, min, max int) int {\n\tif val < min {\n\t\treturn min\n\t} else if val > max {\n\t\treturn max\n\t}\n\treturn val\n}", "func InRange(tf *token.File, pos token.Pos) bool {\n\tsize := tf.Pos(tf.Size())\n\treturn int(pos) >= tf.Base() && pos <= size\n}", "func inRange(key, left, right, max uint64) bool {\n\tcurrent := left\n\n\tfor ; current != right; current = (current + 1) % max {\n\t\tif current == key {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func (self *CircularTrack) InRange(l, r float64) bool {\n\treturn l < r\n}", "func (v Vec2) In(lo, hi Vec2) bool {\n\treturn lo[0] <= v[0] && lo[1] <= v[1] && v[0] < hi[0] && v[1] < hi[1]\n}", "func (r *Range) CheckInRange(val float64) bool {\n\tif val >= r.minVal && val <= r.maxVal {\n\t\treturn true\n\t}\n\treturn false\n}", "func (rg Range) In(r Range) bool {\n\treturn rg.Intersect(r) == rg\n}", "func (dt *DateTime) InRange(min, max time.Time) *DateTime {\n\topChain := dt.chain.enter(\"InRange()\")\n\tdefer opChain.leave()\n\n\tif opChain.failed() {\n\t\treturn dt\n\t}\n\n\tif !((dt.value.After(min) || dt.value.Equal(min)) &&\n\t\t(dt.value.Before(max) || dt.value.Equal(max))) {\n\t\topChain.fail(AssertionFailure{\n\t\t\tType: AssertInRange,\n\t\t\tActual: &AssertionValue{dt.value},\n\t\t\tExpected: &AssertionValue{AssertionRange{min, max}},\n\t\t\tErrors: []error{\n\t\t\t\terrors.New(\"expected: time point is within given range\"),\n\t\t\t},\n\t\t})\n\t}\n\n\treturn dt\n}", "func (r Range) Contains(v T) bool {\n\treturn r.Max >= v && r.Min <= v\n}", "func (fr *FileRing) InRange(x int64) bool {\n\tfr.lock.RLock()\n\tdefer fr.lock.RUnlock()\n\treturn InRange(x, fr.start, fr.end, M)\n}", "func (su *SliceNumber) InRange() error {\n\tvar si interface{}\n\n\tswitch {\n\tcase len(su.Uint) > 0:\n\t\tsi = su.Uint\n\tcase len(su.Uint16) > 0:\n\t\tsi = su.Uint16\n\tcase len(su.Uint32) > 0:\n\t\tsi = su.Uint32\n\tcase len(su.Uint64) > 0:\n\t\tsi = su.Uint64\n\tcase len(su.Int) > 0:\n\t\tsi = su.Int\n\tcase len(su.Int16) > 0:\n\t\tsi = su.Int16\n\tcase len(su.Int32) > 0:\n\t\tsi = su.Int32\n\tcase len(su.Int64) > 0:\n\t\tsi = su.Int64\n\t}\n\n\tif !su.IsSorted {\n\t\tsu.Sort(false)\n\t\treturn su.InRange()\n\t}\n\n\terr := su.SliceRange.Range(si)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func RangeIn(low, hi int) int {\n\trand.Seed(time.Now().UnixNano())\n\treturn low + rand.Intn(hi-low)\n}", "func fieldInRange(field int, min, max int) bool {\n\treturn (min <= field) && (field <= max)\n}", "func (ggt Globegridtile) ContainsRange(gp Gridpoint, dist float64) bool {\n\ttop := gp.MoveTo(0, dist)\n\tright := gp.MoveTo(90, dist)\n\tbottom := gp.MoveTo(180, dist)\n\tleft := gp.MoveTo(270, dist)\n\n\treturn ggt.ContainsLatLon(top.lat, top.lon) &&\n\t\tggt.ContainsLatLon(right.lat, right.lon) &&\n\t\tggt.ContainsLatLon(bottom.lat, bottom.lon) &&\n\t\tggt.ContainsLatLon(left.lat, left.lon)\n}", "func (p Point) In(r Rectangle) bool {\n\treturn p.X >= r.Min.X && p.X < r.Max.X &&\n\t\tp.Y >= r.Min.Y && p.Y < r.Max.Y\n}", "func (list *List) withinRange(index int) bool {\n\treturn index >= 0 && index < list.size\n}", "func (segment *Segment) isInSegment(value float64) bool {\n\treturn value >= segment.start && value <= segment.end\n}", "func (list *ArrayList) withinRange(index int) bool {\n\treturn index >= 0 && index < list.size\n}", "func RangeFromValIn(vs ...float64) predicate.Property {\n\tv := make([]interface{}, len(vs))\n\tfor i := range v {\n\t\tv[i] = vs[i]\n\t}\n\treturn predicate.Property(func(s *sql.Selector) {\n\t\t// if not arguments were provided, append the FALSE constants,\n\t\t// since we can't apply \"IN ()\". This will make this predicate falsy.\n\t\tif len(v) == 0 {\n\t\t\ts.Where(sql.False())\n\t\t\treturn\n\t\t}\n\t\ts.Where(sql.In(s.C(FieldRangeFromVal), v...))\n\t})\n}", "func (p Point) In(r Rectangle) bool {\n\treturn r.Min.X <= p.X && p.X < r.Max.X &&\n\t\tr.Min.Y <= p.Y && p.Y < r.Max.Y\n}", "func (s Ranges) IsIn(index uint64) bool {\n\tfor _, r := range s {\n\t\tstartIdx := r.Offset\n\t\tendIdx := r.Offset + r.Length\n\t\t// `startIdx` is inclusive, while `endIdx` is exclusive.\n\t\t// The same as usual slice indices works:\n\t\t//\n\t\t// slice[startIdx:endIdx]\n\n\t\tif startIdx <= index && index < endIdx {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func within(p, q, r float64) bool {\n\treturn (p <= q) && (q <= r) || (r <= q) && (q <= p)\n}", "func (e Pos) In(r Rect) bool {\n\treturn r.Min.I <= e.I && e.I < r.Max.I && r.Min.J <= e.J && e.J < r.Max.J\n}", "func (r Rectangle) In(s Rectangle) bool {\n\tif r.Empty() {\n\t\treturn true\n\t}\n\t// Note that r.Max is an exclusive bound for r, so that r.In(s)\n\t// does not require that r.Max.In(s).\n\treturn s.Min.X <= r.Min.X && r.Max.X <= s.Max.X &&\n\t\ts.Min.Y <= r.Min.Y && r.Max.Y <= s.Max.Y\n}", "func InRange(format, version, versionRange string) (bool, error) {\n\tversionParser, exists := GetParser(format)\n\tif !exists {\n\t\treturn false, ErrUnknownVersionFormat\n\t}\n\n\tin, err := versionParser.InRange(version, versionRange)\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\"Format\": format, \"Version\": version, \"Range\": versionRange}).Error(err)\n\t}\n\treturn in, err\n}", "func (r Range) ContainsRange(or Range) bool {\n\treturn r.Offset <= or.Offset && or.EndOffset <= r.EndOffset\n}", "func InRange32(val, min, max float32) float32 {\n\tif val < min {\n\t\treturn min\n\t} else if val > max {\n\t\treturn max\n\t}\n\treturn val\n}", "func Between(val any, min, max int64) bool {\n\tintVal, err := mathutil.Int64(val)\n\tif err != nil {\n\t\treturn false\n\t}\n\n\treturn intVal >= min && intVal <= max\n}", "func (x IntRange) ContainsIntRange(y IntRange) bool {\n\tif y.Empty() {\n\t\treturn true\n\t}\n\tif (x[0] != nil) && (y[0] == nil || x[0].Cmp(y[0]) > 0) {\n\t\treturn false\n\t}\n\tif (x[1] != nil) && (y[1] == nil || x[1].Cmp(y[1]) < 0) {\n\t\treturn false\n\t}\n\treturn true\n}", "func (r ranges) valid(num uint64) bool {\n\tfor _, currentRange := range r {\n\t\tif num <= currentRange.upper && num >= currentRange.lower {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func (f *fragment) rangeBetween(bitDepth uint, predicateMin, predicateMax int64) (*Row, error) {\n\tb := f.row(bsiExistsBit)\n\n\t// Convert predicates to unsigned values.\n\tupredicateMin, upredicateMax := uint64(predicateMin), uint64(predicateMax)\n\tif predicateMin < 0 {\n\t\tupredicateMin = uint64(-predicateMin)\n\t}\n\tif predicateMax < 0 {\n\t\tupredicateMax = uint64(-predicateMax)\n\t}\n\n\t// Handle positive-only values.\n\tif predicateMin >= 0 {\n\t\treturn f.rangeBetweenUnsigned(b.Difference(f.row(bsiSignBit)), bitDepth, upredicateMin, upredicateMax)\n\t}\n\n\t// Handle negative-only values. Swap unsigned min/max predicates.\n\tif predicateMax < 0 {\n\t\treturn f.rangeBetweenUnsigned(b.Intersect(f.row(bsiSignBit)), bitDepth, upredicateMax, upredicateMin)\n\t}\n\n\t// If predicate crosses positive/negative boundary then handle separately and union.\n\tpos, err := f.rangeLTUnsigned(b.Difference(f.row(bsiSignBit)), bitDepth, upredicateMax, true)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tneg, err := f.rangeLTUnsigned(b.Intersect(f.row(bsiSignBit)), bitDepth, upredicateMin, true)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn pos.Union(neg), nil\n}", "func (day DayRange) within(other DayRange) bool {\n\tif day.Wday == other.Wday && day.StartsAt >= other.StartsAt && day.EndsAt <= other.EndsAt {\n\t\treturn true\n\t}\n\treturn false\n}", "func inBound(x int, y int, length int, width int) bool {\n\tif 0 <= x && x < length && y >= 0 && y < width {\n\t\treturn true\n\t}\n\treturn false\n}", "func RangeToValIn(vs ...float64) predicate.Property {\n\tv := make([]interface{}, len(vs))\n\tfor i := range v {\n\t\tv[i] = vs[i]\n\t}\n\treturn predicate.Property(func(s *sql.Selector) {\n\t\t// if not arguments were provided, append the FALSE constants,\n\t\t// since we can't apply \"IN ()\". This will make this predicate falsy.\n\t\tif len(v) == 0 {\n\t\t\ts.Where(sql.False())\n\t\t\treturn\n\t\t}\n\t\ts.Where(sql.In(s.C(FieldRangeToVal), v...))\n\t})\n}", "func (r Rectangle) In(r1 Rectangle) bool {\n\tif r.Empty() {\n\t\treturn true\n\t}\n\tif r1.Empty() {\n\t\treturn false\n\t}\n\treturn r1.Min.X <= r.Min.X && r.Max.X <= r1.Max.X &&\n\t\tr1.Min.Y <= r.Min.Y && r.Max.Y <= r1.Max.Y\n}", "func In(r rune, ranges ...*unicode.RangeTable,) bool", "func rangeCodeTest(val int) func(int) bool {\n\treturn func(code int) bool {\n\t\tdiff := code - val\n\t\treturn diff >= 0 && diff < 100\n\t}\n}", "func inBounds(row, column int) bool {\n\tif row < 0 || row >= rows {\n\t\treturn false\n\t}\n\tif column < 0 || column >= columns {\n\t\treturn false\n\t}\n\treturn true\n}", "func (r intRange) Include(n int) bool {\n\treturn r.min <= n && n <= r.max\n}", "func (s *BasePlSqlParserListener) EnterRange_values_clause(ctx *Range_values_clauseContext) {}", "func IsPartOfRange(r rune, rng []RuneRange) bool {\n\tfor _, v := range rng {\n\t\tif r >= v.Start && r <= v.End {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func (a AddrRange) Contains(addr Address) bool {\n\treturn a.Start <= addr && addr < a.End\n}", "func Range(str string, params ...string) bool {\n\tif len(params) == 2 {\n\t\tvalue, _ := ToFloat(str)\n\t\tmin, _ := ToFloat(params[0])\n\t\tmax, _ := ToFloat(params[1])\n\t\treturn InRange(value, min, max)\n\t}\n\n\treturn false\n}", "func (list *DoublyLinkedList) inRange(index int) bool {\n\treturn index >= 0 && index < list.size && list.size != 0\n}", "func (r *Range) Contains(addr net.IP) bool {\n\tif err := canonicalizeIP(&addr); err != nil {\n\t\treturn false\n\t}\n\n\tsubnet := (net.IPNet)(r.Subnet)\n\n\t// Not the same address family\n\tif len(addr) != len(r.Subnet.IP) {\n\t\treturn false\n\t}\n\n\t// Not in network\n\tif !subnet.Contains(addr) {\n\t\treturn false\n\t}\n\n\t// We ignore nils here so we can use this function as we initialize the range.\n\tif r.RangeStart != nil {\n\t\t// Before the range start\n\t\tif ip.Cmp(addr, r.RangeStart) < 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\tif r.RangeEnd != nil {\n\t\tif ip.Cmp(addr, r.RangeEnd) > 0 {\n\t\t\t// After the range end\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func cgoInRange(p unsafe.Pointer, start, end uintptr) bool {\n\treturn start <= uintptr(p) && uintptr(p) < end\n}", "func (of *openFiles) InRange(fh uint64) bool {\n\treturn uint8(fh) == of.mark\n}", "func (l *IntList) Range(f func(value int) bool) {\n\tx := l.head.getNextNode()\n\tfor x != nil {\n\t\tif !f(x.value) {\n\t\t\tbreak\n\t\t}\n\t\tx = x.getNextNode()\n\t}\n}", "func (r Range) Contains(offset int) bool {\n\treturn r.Offset <= offset && offset <= r.EndOffset\n}", "func (v *ValueRange) HasLowerBound() bool {\n\treturn v.lowerEndPoint != nil\n}", "func (ggt Globegridtile) ContainedInRange(gp Gridpoint, dist float64) bool {\n\td1 := DistanceBetween(gp, NewGridpoint(\"tlc\", ggt.min_lat, ggt.min_lon))\n\td2 := DistanceBetween(gp, NewGridpoint(\"trc\", ggt.min_lat, ggt.max_lon))\n\td3 := DistanceBetween(gp, NewGridpoint(\"blc\", ggt.max_lat, ggt.min_lon))\n\td4 := DistanceBetween(gp, NewGridpoint(\"brc\", ggt.max_lat, ggt.max_lon))\n\n\treturn (d1 <= dist && d2 <= dist && d3 <= dist && d4 <= dist)\n}", "func outOfRange(sc *stmtctx.StatementContext, min, max, val *types.Datum) (int, error) {\n\tresult, err := val.CompareDatum(sc, min)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tif result < 0 {\n\t\treturn result, nil\n\t}\n\tresult, err = val.CompareDatum(sc, max)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tif result > 0 {\n\t\treturn result, nil\n\t}\n\treturn 0, nil\n}", "func Between(x float64, a, b float64) bool {\n\tmin, max := math.Min(a, b), math.Max(a, b)\n\treturn min < a && a < max\n}", "func (r *Request) inRange(host *Host, hosts []*Host) bool {\n\tcheck := false\n\n\tfor i := range hosts {\n\t\tif hosts[i].HostUrl == host.HostUrl {\n\t\t\tcheck = true\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn check\n}", "func (ggt Globegridtile) IntersectsRange(gp Gridpoint, dist float64) bool {\n\t// First easy bit, if the gp is inside the ggt...\n\tif ggt.ContainsLatLon(gp.lat, gp.lon) {\n\t\treturn true\n\t}\n\n\t// OK so the gp is outside the ggt. Either a corner is in range, or a side is in range.\n\n\t// Check if any of the corners are...\n\td1 := DistanceBetween(gp, NewGridpoint(\"tlc\", ggt.min_lat, ggt.min_lon))\n\td2 := DistanceBetween(gp, NewGridpoint(\"trc\", ggt.min_lat, ggt.max_lon))\n\td3 := DistanceBetween(gp, NewGridpoint(\"blc\", ggt.max_lat, ggt.min_lon))\n\td4 := DistanceBetween(gp, NewGridpoint(\"brc\", ggt.max_lat, ggt.max_lon))\n\n\tif d1 <= dist ||\n\t\td2 <= dist ||\n\t\td3 <= dist ||\n\t\td4 <= dist {\n\t\treturn true\n\t}\n\n\t// Lastly we check sides...\n\n\tif gp.lat >= ggt.min_lat && gp.lat < ggt.max_lat {\n\t\t// lat is within range, so use gp.lat\n\t\tif gp.lon > ggt.max_lon {\n\t\t\td := DistanceBetween(gp, NewGridpoint(\"maxlon\", gp.lat, ggt.max_lon))\n\t\t\tif d <= dist {\n\t\t\t\treturn true\n\t\t\t}\n\t\t} else if gp.lon < ggt.min_lon {\n\t\t\td := DistanceBetween(gp, NewGridpoint(\"minlon\", gp.lat, ggt.min_lon))\n\t\t\tif d <= dist {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t}\n\n\tif gp.lon >= ggt.min_lon && gp.lon < ggt.max_lon {\n\t\t// lon is within range, so use gp.lon\n\t\tif gp.lat > ggt.max_lat {\n\t\t\td := DistanceBetween(gp, NewGridpoint(\"maxlat\", ggt.max_lat, gp.lon))\n\t\t\tif d <= dist {\n\t\t\t\treturn true\n\t\t\t}\n\t\t} else if gp.lat < ggt.min_lat {\n\t\t\td := DistanceBetween(gp, NewGridpoint(\"minlon\", ggt.min_lat, gp.lon))\n\t\t\tif d <= dist {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn false\n}", "func LessThan(upper Value) *ValueRange {\n\treturn &ValueRange{\n\t\tupperEndPoint: &EndPoint{value: upper, boundType: BoundTypeOpen},\n\t}\n}", "func (blueprintr Range) Accepts(x float64) bool {\n\treturn blueprintr.Min <= x && (blueprintr.Max == 0 || x <= blueprintr.Max)\n}", "func (g Grid) WithinBounds(c Vec) bool {\n\tN, M := g.Size()\n\treturn 0 <= c.i && c.i < N && 0 <= c.j && c.j < M\n}", "func RuneInRange(start, end rune) Parser {\n\treturn AnnotateStyleable(\n\t\tMatchRune(func(r rune) bool {\n\t\t\treturn start <= r && r <= end\n\t\t}),\n\t\t\"RuneInRange\",\n\t\tfunc(style int) string {\n\t\t\treturn fmt.Sprintf(\"[%s-%s]\", string(start), string(end))\n\t\t})\n}", "func (i I) In(a, b I) bool {\n\tif a.X > b.X {\n\t\tif i.X <= b.X || i.X > a.X {\n\t\t\treturn false\n\t\t}\n\t} else {\n\t\tif i.X >= b.X || i.X < a.X {\n\t\t\treturn false\n\t\t}\n\t}\n\tif a.Y > b.Y {\n\t\tif i.Y <= b.Y || i.Y > a.Y {\n\t\t\treturn false\n\t\t}\n\t} else {\n\t\tif i.Y >= b.Y || i.Y < a.Y {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func check(Val int, arr []int, info []bool) bool {\n\tfor i := len(info) - 1; i >= 0; i-- {\n\t\tif info[i] && Val >= arr[i] || !info[i] && Val <= arr[i] {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func Within(param string, min float64, max float64) error {\n\tf, err := strconv.ParseFloat(param, 64)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"invalid float param %s\", param)\n\t}\n\tif f < min {\n\t\treturn fmt.Errorf(\"%0.2f is less than minimum %0.2f\", f, min)\n\t}\n\tif f > max {\n\t\treturn fmt.Errorf(\"%0.2f is more than maximum %0.2f\", f, max)\n\t}\n\treturn nil\n}", "func overlapBounds(amin, amax, bmin, bmax Vector3) bool {\n\toverlap := true\n\tif amin[0] > bmax[0] || amax[0] < bmin[0] {\n\t\toverlap = false\n\t}\n\tif amin[1] > bmax[1] || amax[1] < bmin[1] {\n\t\toverlap = false\n\t}\n\tif amin[2] > bmax[2] || amax[2] < bmin[2] {\n\t\toverlap = false\n\t}\n\treturn overlap\n}", "func (v RangeInt) Test(value int) bool {\n\treturn TestInt(v.min, v.max, value, v.minExclusive, v.maxExclusive)\n}", "func (ranges Ranges) Less(i, j int) bool {\n if ranges[i].Start == ranges[j].Start {\n return ranges[i].End < ranges[j].End\n }\n return ranges[i].Start < ranges[j].Start\n}", "func (key Key) BetweenEndInclusive(start Key, end Key) bool {\n\ts, e := uint64(start), uint64(end)\n\tk := uint64(key)\n\tif s > config.maxKey || e > config.maxKey || k > config.maxKey {\n\t\tpanic(\"MaxKey constraint has been violated!\")\n\t}\n\tif s == e {\n\t\treturn true // Full sweep - all keys are in range.\n\t}\n\tif s > e { // Interval wraps - if key is lt end or gt start, it is in interval\n\t\treturn s < k || k <= e\n\t} else {\n\t\treturn (s < k && k <= e)\n\t}\n}", "func (rg Range) Intersect(r Range) Range {\n\tif rg.Max.X > r.Max.X {\n\t\trg.Max.X = r.Max.X\n\t}\n\tif rg.Max.Y > r.Max.Y {\n\t\trg.Max.Y = r.Max.Y\n\t}\n\tif rg.Min.X < r.Min.X {\n\t\trg.Min.X = r.Min.X\n\t}\n\tif rg.Min.Y < r.Min.Y {\n\t\trg.Min.Y = r.Min.Y\n\t}\n\tif rg.Min.X >= rg.Max.X || rg.Min.Y >= rg.Max.Y {\n\t\treturn Range{}\n\t}\n\treturn rg\n}", "func (m *Block) Within(minT, maxT int64) bool {\n\t// NOTE: Block intervals are half-open: [MinTime, MaxTime).\n\treturn m.MinTime <= maxT && minT < m.MaxTime\n}", "func InIntSlice(a int, list []int) bool {\n\tfor _, v := range list {\n\t\tif a == v {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func RangeAnd(low, high, n int, f pargo.RangePredicate) bool {\n\tvar recur func(int, int, int) bool\n\trecur = func(low, high, n int) (result bool) {\n\t\tswitch {\n\t\tcase n == 1:\n\t\t\treturn f(low, high)\n\t\tcase n > 1:\n\t\t\tbatchSize := ((high - low - 1) / n) + 1\n\t\t\thalf := n / 2\n\t\t\tmid := low + batchSize*half\n\t\t\tif mid >= high {\n\t\t\t\treturn f(low, high)\n\t\t\t}\n\t\t\tvar b1 bool\n\t\t\tvar p interface{}\n\t\t\tvar wg sync.WaitGroup\n\t\t\twg.Add(1)\n\t\t\tgo func() {\n\t\t\t\tdefer func() {\n\t\t\t\t\twg.Done()\n\t\t\t\t\tp = recover()\n\t\t\t\t}()\n\t\t\t\tb1 = recur(mid, high, n-half)\n\t\t\t}()\n\t\t\tif !recur(low, mid, half) {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\twg.Wait()\n\t\t\tif p != nil {\n\t\t\t\tpanic(p)\n\t\t\t}\n\t\t\treturn b1\n\t\tdefault:\n\t\t\tpanic(fmt.Sprintf(\"invalid number of batches: %v\", n))\n\t\t}\n\t}\n\treturn recur(low, high, internal.ComputeNofBatches(low, high, n))\n}", "func (qc *qualityControl) assertRange(f string, v float64, min float64, max float64) {\n\tif v < min || v > max {\n\t\tqc.errs = append(qc.errs, fmt.Errorf(\"range check, %f < (%s) < %f, failed for value: %f\", min, f, max, v))\n\t}\n}", "func TestRangeIterator(t *testing.T) {\n\ttree := NewSplayTree()\n\titems := []Item{Int(2), Int(4), Int(6), Int(1), Int(5), Int(3), Int(0)}\n\ttree.InsertAll(items)\n\tfor lkup := range items {\n\t\ttree.Lookup(Int(lkup))\n\t\tlower := Int(2)\n\t\tupper := Int(4)\n\t\titer := tree.RangeIterator(lower, upper)\n\t\tfor item := iter(); item != nil; item = iter() {\n\t\t\tif item.Less(lower) || upper.Less(item) {\n\t\t\t\tt.Errorf(\"RangeIterator item %v ![%v, %v]\", item, lower, upper)\n\t\t\t}\n\t\t}\n\t\tlower = Int(-10)\n\t\tupper = Int(-1)\n\t\titer = tree.RangeIterator(lower, upper)\n\t\tfor item := iter(); item != nil; item = iter() {\n\t\t\tif item.Less(lower) || upper.Less(item) {\n\t\t\t\tt.Errorf(\"RangeIterator item %v ![%v, %v]\", item, lower, upper)\n\t\t\t}\n\t\t}\n\t\tlower = Int(-1)\n\t\tupper = Int(3)\n\t\titer = tree.RangeIterator(lower, upper)\n\t\tfor item := iter(); item != nil; item = iter() {\n\t\t\tif item.Less(lower) || upper.Less(item) {\n\t\t\t\tt.Errorf(\"RangeIterator item %v ![%v, %v]\", item, lower, upper)\n\t\t\t}\n\t\t}\n\t\tlower = Int(3)\n\t\tupper = Int(9)\n\t\titer = tree.RangeIterator(lower, upper)\n\t\tfor item := iter(); item != nil; item = iter() {\n\t\t\tif item.Less(lower) || upper.Less(item) {\n\t\t\t\tt.Errorf(\"RangeIterator item %v ![%v, %v]\", item, lower, upper)\n\t\t\t}\n\t\t}\n\t\tlower = Int(9)\n\t\tupper = Int(29)\n\t\titer = tree.RangeIterator(lower, upper)\n\t\tfor item := iter(); item != nil; item = iter() {\n\t\t\tif item.Less(lower) || upper.Less(item) {\n\t\t\t\tt.Errorf(\"RangeIterator item %v ![%v, %v]\", item, lower, upper)\n\t\t\t}\n\t\t}\n\t}\n}", "func (r Range) Overlaps(r1 Range) bool {\n\treturn r[0] <= r1[1] && r[1] >= r1[0]\n}", "func Open(lower Value, upper Value) *ValueRange {\n\tif lower.Compare(upper) != -1 {\n\t\tpanic(\"lower needs to be smaller than upper\")\n\t}\n\n\treturn &ValueRange{\n\t\tlowerEndPoint: &EndPoint{value: lower, boundType: BoundTypeOpen},\n\t\tupperEndPoint: &EndPoint{value: upper, boundType: BoundTypeOpen},\n\t}\n}", "func (A Coor) Inside(B Coor) bool {\n\tif A.Start >= B.Start && A.End <= B.End {\n\t\treturn true\n\t} else {\n\t\treturn false\n\t}\n}", "func containsXY(minx, miny, maxx, maxy, x, y float64) bool {\n\treturn (x >= minx) && (x <= maxx) && (y >= miny) && (y <= maxy)\n}", "func IntRange(min, max int) int {\n\tif min > max {\n\t\tmin, max = max, min\n\t}\n\n\treturn Int(max-min) + min\n}", "func (s *BasejossListener) EnterRangeVal(ctx *RangeValContext) {}", "func (r *Range) IsValid(v Value) bool {\n\tif r.Kind() != v.Kind() {\n\t\treturn false\n\t}\n\tswitch r.Kind() {\n\tcase Integer:\n\t\treturn v.Int() >= r.Start.Int() && v.Int() < r.End.Int()\n\tcase Real:\n\t\treturn v.Float() >= r.Start.Float() && v.Float() < r.End.Float()\n\tdefault:\n\t\tpanic(r)\n\t}\n}", "func (v *ValueRange) Contains(value Value) bool {\n\treturn v.Compare(value) == 0\n}", "func RectWithin(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY float64) bool {\n\trv := !(aMinX < bMinX || aMinY < bMinY || aMaxX > bMaxX || aMaxY > bMaxY)\n\treturn rv\n}", "func (b ValExprBuilder) Between(from interface{}, to interface{}) BoolExprBuilder {\n\treturn b.makeRangeCond(astBetween, from, to)\n}", "func ECDH_IN_RANGE(S []byte) bool {\n\tr := NewBIGints(CURVE_Order)\n\ts := FromBytes(S)\n\tif s.iszilch() {\n\t\treturn false\n\t}\n if Comp(s,r)>=0 {\n\t\treturn false\n\t}\n\treturn true\n}" ]
[ "0.8011385", "0.7935231", "0.78916985", "0.76261526", "0.7270945", "0.72031903", "0.7193918", "0.7107037", "0.70942944", "0.6980476", "0.697944", "0.69790214", "0.69614846", "0.69448054", "0.6925115", "0.68934387", "0.68205553", "0.67717755", "0.67509115", "0.6705743", "0.6649745", "0.66217685", "0.66109395", "0.6580321", "0.65386075", "0.6524916", "0.6449482", "0.63777095", "0.636307", "0.6346053", "0.6339621", "0.6294985", "0.6286289", "0.6285378", "0.62774074", "0.6252149", "0.6250888", "0.6220059", "0.62083316", "0.61940414", "0.6161138", "0.6156428", "0.6142927", "0.6138545", "0.6133084", "0.6122201", "0.6114412", "0.6100223", "0.6082805", "0.6063366", "0.6047323", "0.60462195", "0.60317767", "0.60043067", "0.59776604", "0.5955905", "0.5955721", "0.5954151", "0.59222984", "0.5899201", "0.5888", "0.5881736", "0.58811307", "0.5864442", "0.5851327", "0.58253133", "0.5803668", "0.5785046", "0.57812506", "0.5770001", "0.5742652", "0.57417345", "0.57333565", "0.5720924", "0.57120943", "0.5705741", "0.56717116", "0.5662615", "0.5654372", "0.56452084", "0.56309247", "0.56284803", "0.5612281", "0.5593677", "0.55859035", "0.5567421", "0.5566521", "0.55636877", "0.5554659", "0.5553193", "0.55459744", "0.5540939", "0.5536676", "0.55353993", "0.55320054", "0.5531927", "0.5528657", "0.55269784", "0.5523938", "0.5514944" ]
0.87714565
0
Run executes the pull command.
func (c *PullCommand) Run(args []string) int { cmdFlags := flag.NewFlagSet("pull", flag.ContinueOnError) cmdFlags.Usage = func() { c.UI.Output(c.Help()) } config := c.Config cmdFlags.StringVar(&config.Secret, "secret", config.Secret, "") cmdFlags.StringVar(&config.TargetDirectory, "target", config.TargetDirectory, "") cmdFlags.StringVar(&config.Encoding, "encoding", config.Encoding, "") cmdFlags.StringVar(&config.Format, "format", config.Format, "") req := new(phrase.DownloadRequest) cmdFlags.StringVar(&req.Tag, "tag", "", "") var updatedSince string cmdFlags.StringVar(&updatedSince, "updated-since", "", "") cmdFlags.BoolVar(&req.ConvertEmoji, "convert-emoji", false, "") cmdFlags.BoolVar(&req.SkipUnverifiedTranslations, "skip-unverified-translations", false, "") cmdFlags.BoolVar(&req.IncludeEmptyTranslations, "include-empty-translations", false, "") if err := cmdFlags.Parse(args); err != nil { return 1 } if updatedSince != "" { var err error req.UpdatedSince, err = time.Parse(timeFormat, updatedSince) if err != nil { c.UI.Error(fmt.Sprintf("Error parsing updated-since (%s), format should be YYYYMMDDHHMMSS", updatedSince)) return 1 } } if config.Format == "" { config.Format = defaultDownloadFormat } c.API.AuthToken = config.Secret req.Encoding = config.Encoding req.Format = config.Format if err := config.Valid(); err != nil { c.UI.Error(err.Error()) return 1 } err := c.fetch(req, cmdFlags.Args()) if err != nil { c.UI.Error(fmt.Sprintf("Error encountered fetching the locales:\n\t%s", err.Error())) return 1 } return 0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (p *PullCommand) runPull(args []string) error {\n\treturn pullMissingImage(context.Background(), p.cli.Client(), args[0], true)\n}", "func (config *ReleaseCommandConfig) Run() error {\n\n\tgit, err := gitpkg.GetGit()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = release(git)\n\n\treturn err\n}", "func executePull(location string) {\n\tfmt.Println(\"Pulling from \" + location + \" ...\")\n}", "func (c *PushCommand) Run(args []string) int {\n\n\treturn 0\n}", "func (o *Options) Run() error {\n\terr := o.Validate()\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to validate options\")\n\t}\n\n\terr = o.upgradeRepository(o.TargetGitURL)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to create Pull Request on repository %s\", o.TargetGitURL)\n\t}\n\treturn nil\n}", "func (c *config) pull(remote string, branch string) (output string, err error) {\n\tlog.Printf(\"pulling: %v/%v\", remote, branch)\n\n\tdefaultCommand := []string{\"pull\", remote, branch}\n\n\treturn c.command(defaultCommand...)\n}", "func (c *PruneCommand) Run() error {\n\n\tif c.fs.NArg() < 2 {\n\t\treturn fmt.Errorf(\"usage: prune <username> <password> ::(auth to dockerhub.io)\")\n\t}\n\n\tauth, err := readAuth()\n\tif err != nil {\n\t\treturn errors.New(\"Could not find auth file. Please run `login` command\")\n\t}\n\n\treturn app.Prune(*auth, c.fs.Arg(0), c.fs.Arg(1))\n}", "func (r *Runner) Run(ctx context.Context, remoteName string, remoteUrl string) error {\n\tlog.Infof(\"running git-remote-dgit on remote %s with url %s\", remoteName, remoteUrl)\n\n\t// get the named remote as reported by git, but then\n\t// create a new remote with only the url specified\n\t// this is for cases when a remote has multiple urls\n\t// specified for push / fetch\n\tnamedRemote, err := r.local.Remote(remoteName)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = namedRemote.Config().Validate()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Invalid remote config: %v\", err)\n\t}\n\n\tremote := git.NewRemote(r.local.Storer, &config.RemoteConfig{\n\t\tName: namedRemote.Config().Name,\n\t\tFetch: namedRemote.Config().Fetch,\n\t\tURLs: []string{remoteUrl},\n\t})\n\n\tstdinReader := bufio.NewReader(r.stdin)\n\n\tfor {\n\t\tvar err error\n\n\t\tcommand, err := stdinReader.ReadString('\\n')\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tcommand = strings.TrimSpace(command)\n\t\tcommandParts := strings.Split(command, \" \")\n\n\t\tlog.Infof(\"received command on stdin %s\", command)\n\n\t\targs := strings.TrimSpace(strings.TrimPrefix(command, commandParts[0]))\n\t\tcommand = commandParts[0]\n\n\t\tswitch command {\n\t\tcase \"capabilities\":\n\t\t\tr.respond(strings.Join([]string{\n\t\t\t\t\"*push\",\n\t\t\t\t\"*fetch\",\n\t\t\t}, \"\\n\") + \"\\n\")\n\t\t\tr.respond(\"\\n\")\n\t\tcase \"list\":\n\t\t\trefs, err := remote.List(&git.ListOptions{})\n\n\t\t\tif err == transport.ErrRepositoryNotFound && args == \"for-push\" {\n\t\t\t\tr.respond(\"\\n\")\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif err == transport.ErrRepositoryNotFound {\n\t\t\t\treturn fmt.Errorf(msg.RepoNotFound)\n\t\t\t}\n\n\t\t\tif err == transport.ErrEmptyRemoteRepository || len(refs) == 0 {\n\t\t\t\tr.respond(\"\\n\")\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tvar head string\n\n\t\t\tlistResponse := make([]string, len(refs))\n\t\t\tfor i, ref := range refs {\n\t\t\t\tlistResponse[i] = fmt.Sprintf(\"%s %s\", ref.Hash(), ref.Name())\n\n\t\t\t\t// TODO: set default branch in repo chaintree which\n\t\t\t\t// would become head here\n\t\t\t\t//\n\t\t\t\t// if master head exists, use that\n\t\t\t\tif ref.Name() == \"refs/heads/master\" {\n\t\t\t\t\thead = ref.Name().String()\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tsort.Slice(listResponse, func(i, j int) bool {\n\t\t\t\treturn strings.Split(listResponse[i], \" \")[1] < strings.Split(listResponse[j], \" \")[1]\n\t\t\t})\n\n\t\t\t// if head is empty, use last as default\n\t\t\tif head == \"\" {\n\t\t\t\thead = strings.Split(listResponse[len(listResponse)-1], \" \")[1]\n\t\t\t}\n\n\t\t\tr.respond(\"@%s HEAD\\n\", head)\n\t\t\tr.respond(\"%s\\n\", strings.Join(listResponse, \"\\n\"))\n\t\t\tr.respond(\"\\n\")\n\t\tcase \"push\":\n\t\t\trefSpec := config.RefSpec(args)\n\n\t\t\tendpoint, err := transport.NewEndpoint(remote.Config().URLs[0])\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tauth, err := r.auth()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tlog.Debugf(\"auth for push: %s %s\", auth.Name(), auth.String())\n\n\t\t\terr = remote.PushContext(ctx, &git.PushOptions{\n\t\t\t\tRemoteName: remote.Config().Name,\n\t\t\t\tRefSpecs: []config.RefSpec{refSpec},\n\t\t\t\tAuth: auth,\n\t\t\t})\n\n\t\t\tif err == transport.ErrRepositoryNotFound {\n\t\t\t\terr = nil // reset err back to nil\n\t\t\t\tclient, err := dgit.Default()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\n\t\t\t\t_, err = client.CreateRepoTree(ctx, endpoint, auth, os.Getenv(\"DGIT_OBJ_STORAGE\"))\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\n\t\t\t\t// Retry push now that repo exists\n\t\t\t\terr = remote.PushContext(ctx, &git.PushOptions{\n\t\t\t\t\tRemoteName: remote.Config().Name,\n\t\t\t\t\tRefSpecs: []config.RefSpec{refSpec},\n\t\t\t\t\tAuth: auth,\n\t\t\t\t})\n\t\t\t}\n\n\t\t\tdst := refSpec.Dst(plumbing.ReferenceName(\"*\"))\n\t\t\tif err != nil && err != git.NoErrAlreadyUpToDate {\n\t\t\t\tr.respond(\"error %s %s\\n\", dst, err.Error())\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tr.respond(\"ok %s\\n\", dst)\n\t\t\tr.respond(\"\\n\")\n\t\tcase \"fetch\":\n\t\t\tsplitArgs := strings.Split(args, \" \")\n\t\t\tif len(splitArgs) != 2 {\n\t\t\t\treturn fmt.Errorf(\"incorrect arguments for fetch, received %s, expected 'hash refname'\", args)\n\t\t\t}\n\n\t\t\trefName := plumbing.ReferenceName(splitArgs[1])\n\n\t\t\trefSpecs := []config.RefSpec{}\n\n\t\t\tlog.Debugf(\"remote fetch config %v\", remote.Config().Name)\n\n\t\t\tfor _, fetchRefSpec := range remote.Config().Fetch {\n\t\t\t\tif !fetchRefSpec.Match(refName) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\n\t\t\t\tnewRefStr := \"\"\n\t\t\t\tif fetchRefSpec.IsForceUpdate() {\n\t\t\t\t\tnewRefStr += \"+\"\n\t\t\t\t}\n\t\t\t\tnewRefStr += refName.String() + \":\" + fetchRefSpec.Dst(refName).String()\n\n\t\t\t\tnewRef := config.RefSpec(newRefStr)\n\n\t\t\t\tif err := newRef.Validate(); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\n\t\t\t\tlog.Debugf(\"attempting to fetch on %s\", newRef.String())\n\t\t\t\trefSpecs = append(refSpecs, newRef)\n\t\t\t}\n\n\t\t\terr := remote.FetchContext(ctx, &git.FetchOptions{\n\t\t\t\tRemoteName: remote.Config().Name,\n\t\t\t\tRefSpecs: refSpecs,\n\t\t\t})\n\t\t\tif err != nil && err != git.NoErrAlreadyUpToDate {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tlog.Debugf(\"fetch complete\")\n\t\t\tr.respond(\"\\n\")\n\t\t// Connect can be used for upload / receive pack\n\t\t// case \"connect\":\n\t\t// \tr.respond(\"fallback\\n\")\n\t\tcase \"\": // command stream terminated, return out\n\t\t\treturn nil\n\t\tdefault:\n\t\t\treturn fmt.Errorf(\"Command '%s' not handled\", command)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (cmd PullCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {\n\tap := cmd.createArgParser()\n\thelp, usage := cli.HelpAndUsagePrinters(commandStr, pullShortDesc, pullLongDesc, pullSynopsis, ap)\n\tapr := cli.ParseArgs(ap, args, help)\n\tbranch := dEnv.RepoState.CWBHeadRef()\n\n\tvar verr errhand.VerboseError\n\tvar remoteName string\n\tif apr.NArg() > 1 {\n\t\tverr = errhand.BuildDError(\"\").SetPrintUsage().Build()\n\t} else {\n\t\tif apr.NArg() == 1 {\n\t\t\tremoteName = apr.Arg(0)\n\t\t}\n\n\t\tvar refSpecs []ref.RemoteRefSpec\n\t\trefSpecs, verr = dEnv.GetRefSpecs(remoteName)\n\n\t\tif verr == nil {\n\t\t\tif len(refSpecs) == 0 {\n\t\t\t\tverr = errhand.BuildDError(\"error: no refspec for remote\").Build()\n\t\t\t} else {\n\t\t\t\tremote := dEnv.RepoState.Remotes[refSpecs[0].GetRemote()]\n\n\t\t\t\tfor _, refSpec := range refSpecs {\n\t\t\t\t\tif remoteTrackRef := refSpec.DestRef(branch); remoteTrackRef != nil {\n\t\t\t\t\t\tverr = pullRemoteBranch(ctx, dEnv, remote, branch, remoteTrackRef)\n\n\t\t\t\t\t\tif verr != nil {\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn HandleVErrAndExitCode(verr, usage)\n}", "func (cmd *DownCmd) Run(cobraCmd *cobra.Command, args []string) {\n\tlog.StartFileLogging()\n\n\tconfig := configutil.GetConfig(false)\n\n\treleaseName := *config.DevSpace.Release.Name\n\tkubectl, err := kubectl.NewClient()\n\n\tif err != nil {\n\t\tlog.Fatalf(\"Unable to create new kubectl client: %s\", err.Error())\n\t}\n\n\tclient, err := helmClient.NewClient(kubectl, false)\n\n\tif err != nil {\n\t\tlog.Fatalf(\"Unable to initialize helm client: %s\", err.Error())\n\t}\n\n\tlog.StartWait(\"Deleting release \" + releaseName)\n\tres, err := client.DeleteRelease(releaseName, true)\n\tlog.StopWait()\n\n\tif res != nil && res.Info != \"\" {\n\t\tlog.Donef(\"Successfully deleted release %s: %s\", releaseName, res.Info)\n\t} else if err != nil {\n\t\tlog.Donef(\"Error deleting release %s: %s\", releaseName, err.Error())\n\t} else {\n\t\tlog.Donef(\"Successfully deleted release %s\", releaseName)\n\t}\n}", "func (o *Options) Run() error {\n\terr := o.Validate()\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to validate options\")\n\t}\n\n\tpr, err := o.discoverPullRequest()\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to discover pull request\")\n\t}\n\n\tlog.Logger().Infof(\"found PullRequest %s\", pr.Link)\n\n\tpreview, _, err := previews.GetOrCreatePreview(o.PreviewClient, o.Namespace, pr, o.PreviewHelmfile)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to upsert the Preview resource in namespace %s\", o.Namespace)\n\t}\n\tlog.Logger().Infof(\"upserted preview %s\", preview.Name)\n\n\treturn o.helmfileSyncPreview(pr, preview)\n}", "func Run(cmd *cobra.Command, args []string) {\n\tvar repo *dbt.DBTRepoServer\n\n\tif configFile != \"\" {\n\t\tr, err := dbt.NewRepoServer(configFile)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Failed to create reposerver from file: %s\", err)\n\t\t}\n\n\t\trepo = r\n\n\t} else {\n\t\trepo = &dbt.DBTRepoServer{\n\t\t\tAddress: address,\n\t\t\tPort: port,\n\t\t\tServerRoot: serverRoot,\n\t\t}\n\t}\n\n\tif repo == nil {\n\t\tlog.Fatalf(\"Failed to initialize reposerver object. Cannot continue.\")\n\t}\n\n\terr := repo.RunRepoServer()\n\tif err != nil {\n\t\tlog.Fatalf(\"Error running server: %s\", err)\n\t}\n}", "func pullCmd(c *cli.Context) error {\n\tvar fqRegistries []string\n\n\targs := c.Args()\n\tif len(args) == 0 {\n\t\tlogrus.Errorf(\"an image name must be specified\")\n\t\treturn nil\n\t}\n\tif len(args) > 1 {\n\t\tlogrus.Errorf(\"too many arguments. Requires exactly 1\")\n\t\treturn nil\n\t}\n\timage := args[0]\n\tsrcRef, err := alltransports.ParseImageName(image)\n\tif err != nil {\n\t\tfqRegistries, err = getRegistriesToTry(image)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t} else {\n\t\tfqRegistries = append(fqRegistries, srcRef.DockerReference().String())\n\t}\n\truntime, err := getRuntime(c)\n\tdefer runtime.Shutdown(false)\n\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"could not create runtime\")\n\t}\n\tfor _, fqname := range fqRegistries {\n\t\tfmt.Printf(\"Trying to pull %s...\", fqname)\n\t\tif err := runtime.PullImage(fqname, c.Bool(\"all-tags\"), os.Stdout); err != nil {\n\t\t\tfmt.Printf(\" Failed\\n\")\n\t\t} else {\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn errors.Errorf(\"error pulling image from %q\", image)\n}", "func (gen *Generator) Pull() error {\n\tLog.Info(\"pull\", fmt.Sprintf(\"performing git pull in: %s\", gen.Template.Directory))\n\tGitPull := templates.CommandOptions{\n\t\tCmd: \"git pull\",\n\t\tDir: gen.Template.Directory.ToString(),\n\t\tUseStdOut: true,\n\t}\n\t_, err := templates.Run(GitPull)\n\treturn err\n}", "func (c *updateCmd) Run(_ *buildChild) error {\n\treturn nil\n}", "func runCmd() {\n\tgoPing()\n}", "func (b *binding) Pull(ctx context.Context, remote, local string) error {\n\treturn b.Command(\"pull\", remote, local).Run(ctx)\n}", "func (cmd *DownloadFirmwareCommand) Run(c *client.Client, args []string) error {\n\tvar path string\n\tif len(args) > 0 {\n\t\tpath = args[0]\n\t} else {\n\t\tpath = fmt.Sprintf(\"/firmware/%v/download\", cmd.FirmwareID)\n\t}\n\tlogger := goa.NewLogger(log.New(os.Stderr, \"\", log.LstdFlags))\n\tctx := goa.WithLogger(context.Background(), logger)\n\tresp, err := c.DownloadFirmware(ctx, path)\n\tif err != nil {\n\t\tgoa.LogError(ctx, \"failed\", \"err\", err)\n\t\treturn err\n\t}\n\n\tgoaclient.HandleResponse(c.Client, resp, cmd.PrettyPrint)\n\treturn nil\n}", "func Run() error {\n\treturn command.Execute()\n}", "func Run() {\n\toptsUser := &github.RepositoryListOptions{\n\t\tAffiliation: viper.GetString(\"affiliation\"),\n\t\tDirection: viper.GetString(\"direction\"),\n\t\tListOptions: github.ListOptions{PerPage: viper.GetInt(\"count\")},\n\t\tSort: viper.GetString(\"sort\"),\n\t\tType: viper.GetString(\"type\"),\n\t\tVisibility: viper.GetString(\"visibility\"),\n\t}\n\n\toptsOrg := &github.RepositoryListByOrgOptions{\n\t\tDirection: viper.GetString(\"direction\"),\n\t\tListOptions: github.ListOptions{PerPage: viper.GetInt(\"count\")},\n\t\tSort: viper.GetString(\"sort\"),\n\t\tType: viper.GetString(\"type\"),\n\t}\n\n\tvar queryFunc func(client *github.Client, name string, page int) ([]*github.Repository, *github.Response, error)\n\n\tif viper.GetBool(\"user\") {\n\t\tqueryFunc = userQueryFunc(optsUser)\n\t} else {\n\t\tqueryFunc = orgQueryFunc(optsOrg)\n\t}\n\n\trepos := queryRepos(\n\t\tnewClient(viper.GetString(\"token\")),\n\t\tviper.GetString(\"prefix\"),\n\t\tviper.GetString(\"name\"),\n\t\tqueryFunc,\n\t)\n\n\tinternal.RenderTemplate(\n\t\tviper.GetString(\"prefix\"),\n\t\tviper.GetString(\"name\"),\n\t\tconvertRepos(repos),\n\t\tviper.GetString(\"template\"),\n\t\tos.Stdout,\n\t)\n}", "func Run(state *core.BuildState, label core.AnnotatedOutputLabel, args []string, remote, env, inTmp bool, dir, overrideCmd string) {\n\tprepareRun()\n\n\trun(context.Background(), state, label, args, false, false, remote, env, false, inTmp, dir, overrideCmd)\n}", "func (p *Publisher) Run() error {\n\tspecResponse, err := p.sdAPI.PostCommand(p.commandSpec)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Post failed: %v\", err)\n\t}\n\n\terr = p.tagCommand(specResponse)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Tag failed: %v\", err)\n\t}\n\n\t// Published successfully\n\t// Show version number of command published by sd-cmd\n\tfmt.Println(specResponse.Version)\n\n\treturn nil\n}", "func Run() {\n\tcmd.Execute()\n}", "func Run(c *cli.Context) {\n\tport, err := osExpandAndVerifyGlobal(c, \"port\")\n\tif err != nil {\n\t\tfail(err)\n\t}\n\tcommand, err := osExpandAndVerify(c, \"command\")\n\tif err != nil {\n\t\tfail(err)\n\t}\n\thost, err := osExpandAndVerify(c, \"host\")\n\tif err != nil {\n\t\tfail(err)\n\t}\n\tuser, err := osExpandAndVerify(c, \"user\")\n\tif err != nil {\n\t\tfail(err)\n\t}\n\tconnection := c.String(\"connection\")\n\tif connection == ansible.ConnectionWinRM {\n\t\tpassword, err := osExpandAndVerify(c, \"password\")\n\t\tif err != nil {\n\t\t\tfail(err)\n\t\t}\n\t\terr = winrm.RemoteWinRmCommand(user, password, host, port, command, nil, nil, \"\")\n\t} else {\n\t\tprivatekey, err := osExpandAndVerify(c, \"privatekey\")\n\t\tif err != nil {\n\t\t\tfail(err)\n\t\t}\n\t\tenvVars := make(map[string]string)\n\t\terr = ssh.RemoteSSHCommand(user, privatekey, host, port, command, envVars)\n\t}\n\tif err != nil {\n\t\tlog.Err(\"Failed: %v\", err)\n\t}\n}", "func Run(_conn db.Conn, _dk docker.Client, _role db.Role) {\n\tconn = _conn\n\tdk = _dk\n\trole = _role\n\n\timageSet := map[string]struct{}{}\n\tfor _, image := range imageMap {\n\t\timageSet[image] = struct{}{}\n\t}\n\n\tfor image := range imageSet {\n\t\tgo dk.Pull(image)\n\t}\n\n\tswitch role {\n\tcase db.Master:\n\t\trunMaster()\n\tcase db.Worker:\n\t\trunWorker()\n\t}\n}", "func (d *downloadCommand) Run(args []string) int {\n\tif len(args) < 2 {\n\t\tfmt.Println(\"The download command expects exactly two arguments.\")\n\t\tfmt.Printf(\"%s\\n\", d.Help())\n\t\treturn 1\n\t}\n\tproduct := args[0]\n\tversion := args[1]\n\n\tproductURL, err := url.Parse(hcrelease.ReleasesURL)\n\tif err != nil {\n\t\treturn 1\n\t}\n\tproductURL.Path = \"index.json\"\n\n\tproducts, err := hcrelease.GetProducts(productURL.String())\n\tif err != nil {\n\t\tfmt.Printf(\"failed to retrieve product details: %v\", err)\n\t\treturn 1\n\t}\n\n\trelease, err := products.GetRelease(product)\n\tif err != nil {\n\t\tfmt.Printf(\"failed to retrieve release details: %v\", err)\n\t\treturn 1\n\t}\n\n\tver, err := release.GetVersion(version)\n\tif err != nil {\n\t\tfmt.Printf(\"failed to retrieve version details: %v\", err)\n\t\treturn 1\n\t}\n\n\tbuild, err := ver.GetBuild(runtime.GOOS, runtime.GOARCH)\n\tif err != nil {\n\t\tfmt.Printf(\"failed to retrieve build details: %v\", err)\n\t\treturn 1\n\t}\n\n\toutFile, err := os.Create(build.Filename)\n\tif err != nil {\n\t\treturn 1\n\t}\n\tdefer outFile.Close()\n\n\terr = build.Download(outFile)\n\tif err != nil {\n\t\tfmt.Printf(\"failed to download build: %v\", err)\n\t\treturn 1\n\t}\n\n\treturn 0\n}", "func (v *VersionCommand) Run(args []string) {\n\tapiClient := v.cli.Client()\n\n\tresult, err := apiClient.SystemVersion()\n\tif err != nil {\n\t\tfmt.Fprintf(os.Stderr, \"failed to get system version: %v\\n\", err)\n\t\treturn\n\t}\n\n\tv.cli.Print(result)\n}", "func (step *FetchUpstreamStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {\n\treturn repo.Logging.FetchUpstream(step.BranchName)\n}", "func (c *ReleaseLatestCommand) Run(args []string) int {\n\tcmdFlags := flag.NewFlagSet(\"release latest\", flag.ContinueOnError)\n\tcmdFlags.StringVarP(&c.sourceType, \"source-type\", \"s\", \"github\", \"A type of release data source\")\n\n\tif err := cmdFlags.Parse(args); err != nil {\n\t\tc.UI.Error(fmt.Sprintf(\"failed to parse arguments: %s\", err))\n\t\treturn 1\n\t}\n\n\tif len(cmdFlags.Args()) != 1 {\n\t\tc.UI.Error(fmt.Sprintf(\"The command expects 1 argument, but got %d\", len(cmdFlags.Args())))\n\t\tc.UI.Error(c.Help())\n\t\treturn 1\n\t}\n\n\tc.source = cmdFlags.Arg(0)\n\n\tr, err := newRelease(c.sourceType, c.source)\n\tif err != nil {\n\t\tc.UI.Error(err.Error())\n\t\treturn 1\n\t}\n\n\tv, err := release.Latest(context.Background(), r)\n\tif err != nil {\n\t\tc.UI.Error(err.Error())\n\t\treturn 1\n\t}\n\n\tc.UI.Output(v)\n\treturn 0\n}", "func Run(url, updateCmd string) {\n\tt := time.NewTicker(1 * time.Minute)\n\tfor {\n\t\t<-t.C\n\n\t\tr, err := http.Head(url)\n\t\tif err != nil {\n\t\t\tlogrus.Errorln(err)\n\t\t\tcontinue\n\t\t} else if r.StatusCode >= 300 {\n\t\t\tlogrus.Errorf(\"HEAD request returned status code %d\", r.StatusCode)\n\t\t\tcontinue\n\t\t}\n\n\t\tlm, err := time.Parse(time.RFC1123, r.Header.Get(\"Last-Modified\"))\n\t\tif err != nil {\n\t\t\tlogrus.Errorln(err)\n\t\t\tcontinue\n\t\t}\n\n\t\tif !lm.After(meta.BuildTime.Add(5 * time.Minute)) { // buffer for compile and upload time\n\t\t\tcontinue\n\t\t}\n\n\t\tlogrus.Println(\"updating binary...\")\n\n\t\terr = updateBin(url)\n\t\tif err != nil {\n\t\t\tlogrus.Errorln(err)\n\t\t\t// todo: may need to recover here, if stuck without binary\n\t\t\tcontinue\n\t\t}\n\n\t\tparts := strings.Fields(updateCmd)\n\t\terr = exec.Command(parts[0], parts[1:]...).Start()\n\t\tif err != nil {\n\t\t\tlogrus.Errorln(err)\n\t\t\tcontinue\n\t\t}\n\n\t\treturn // stop checking for updates in case restart takes a while\n\t}\n}", "func (c *RestoreCommand) Run(args []string) int {\n\tif len(args) != 1 {\n\t\tc.UI.Error(\"You need to specify a restore file path from base of bucket\")\n\t\treturn 1\n\t}\n\n\tc.UI.Info(fmt.Sprintf(\"v%v: Starting Consul Snapshot\", c.Version))\n\tresponse := restore.Runner(args[0])\n\treturn response\n}", "func Run(w io.Writer, workdir, outdir, platform string) error {\n\tif err := packages.Install(w); err != nil {\n\t\treturn err\n\t}\n\tif err := rump.PrepareRumpRepo(w, workdir); err != nil {\n\t\treturn err\n\t}\n\tif err := rump.BuildRump(w, workdir, outdir, platform); err != nil {\n\t\treturn err\n\t}\n\tif err := rump.ApplyPatches(filepath.Join(outdir, \"rumprun\"), platform); err != nil {\n\t\treturn err\n\t}\n\tif err := rump.BuildRump(w, workdir, outdir, platform); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func pullExample() string {\n\treturn `$ pouch images\nIMAGE ID IMAGE NAME SIZE\nbbc3a0323522 docker.io/library/busybox:latest 703.14 KB\n$ pouch pull docker.io/library/redis:alpine\n$ pouch images\nIMAGE ID IMAGE NAME SIZE\nbbc3a0323522 docker.io/library/busybox:latest 703.14 KB\n0153c5db97e5 docker.io/library/redis:alpine 9.63 MB`\n}", "func (c *jcliPluginFetchCmd) Run(cmd *cobra.Command, args []string) (err error) {\n\tvar userHome string\n\tif userHome, err = homedir.Dir(); err != nil {\n\t\treturn\n\t}\n\n\tpluginRepo := fmt.Sprintf(\"%s/.jenkins-cli/plugins-repo\", userHome)\n\tc.output = cmd.OutOrStdout()\n\n\tvar r *git.Repository\n\tif r, err = git.PlainOpen(pluginRepo); err == nil {\n\t\tvar w *git.Worktree\n\t\tif w, err = r.Worktree(); err != nil {\n\t\t\treturn\n\t\t}\n\n\t\tif c.Reset {\n\t\t\tif err = w.Reset(&git.ResetOptions{\n\t\t\t\tMode: git.HardReset,\n\t\t\t}); err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\terr = w.Pull(c.getPullOptions())\n\t\tif err == git.NoErrAlreadyUpToDate {\n\t\t\terr = nil // consider it's ok\n\t\t}\n\t} else {\n\t\tcloneOptions := c.getCloneOptions()\n\t\t_, err = git.PlainClone(pluginRepo, false, cloneOptions)\n\t}\n\treturn\n}", "func (d *DockerHTTP) Run(definition shared.Definition, rootfsDir string) error {\n\tabsRootfsDir, err := filepath.Abs(rootfsDir)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// If DOCKER_REGISTRY_BASE is not set it's used default https://registry-1.docker.io\n\treturn dcapi.DownloadAndUnpackImage(definition.Source.URL, absRootfsDir, &dcapi.DownloadOpts{\n\t\tRegistryBase: os.Getenv(\"DOCKER_REGISTRY_BASE\"),\n\t\tKeepLayers: false,\n\t})\n}", "func (cmd *UpdatePostCommand) Run(c *client.Client, args []string) error {\n\tvar path string\n\tif len(args) > 0 {\n\t\tpath = args[0]\n\t} else {\n\t\tpath = fmt.Sprintf(\"/posts/%v\", cmd.PostID)\n\t}\n\tvar payload client.UpdatePostPayload\n\tif cmd.Payload != \"\" {\n\t\terr := json.Unmarshal([]byte(cmd.Payload), &payload)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to deserialize payload: %s\", err)\n\t\t}\n\t}\n\tlogger := goa.NewLogger(log.New(os.Stderr, \"\", log.LstdFlags))\n\tctx := goa.WithLogger(context.Background(), logger)\n\tresp, err := c.UpdatePost(ctx, path, &payload)\n\tif err != nil {\n\t\tgoa.LogError(ctx, \"failed\", \"err\", err)\n\t\treturn err\n\t}\n\n\tgoaclient.HandleResponse(c.Client, resp, PrettyPrint)\n\treturn nil\n}", "func (cmd *ShowPostCommand) Run(c *client.Client, args []string) error {\n\tvar path string\n\tif len(args) > 0 {\n\t\tpath = args[0]\n\t} else {\n\t\tpath = fmt.Sprintf(\"/posts/%v\", cmd.PostID)\n\t}\n\tlogger := goa.NewLogger(log.New(os.Stderr, \"\", log.LstdFlags))\n\tctx := goa.WithLogger(context.Background(), logger)\n\tresp, err := c.ShowPost(ctx, path)\n\tif err != nil {\n\t\tgoa.LogError(ctx, \"failed\", \"err\", err)\n\t\treturn err\n\t}\n\n\tgoaclient.HandleResponse(c.Client, resp, PrettyPrint)\n\treturn nil\n}", "func (c *CmdGitMdget) Run() error {\n\tcli, err := GetGitClient(c.G())\n\tctx := context.Background()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar res []keybase1.GitRepoResult\n\tif len(c.folder) > 0 {\n\t\tfolder, err := ParseTLF(c.folder)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tres, err = cli.GetGitMetadata(ctx, folder)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\tres, err = cli.GetAllGitMetadata(ctx)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tjsonStr, err := json.MarshalIndent(res, \"\", \" \")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Println(string(jsonStr))\n\treturn nil\n}", "func Run(conf *Config, fs sys.FS, env sys.Env, storageDriver storagedriver.StorageDriver) error {\n\tlog.Debug(\"Running git hook\")\n\n\tbuilderKey, err := builderconf.GetBuilderKey()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tkubeClient, err := client.NewInCluster()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"couldn't reach the api server (%s)\", err)\n\t}\n\n\tscanner := bufio.NewScanner(os.Stdin)\n\tfor scanner.Scan() {\n\t\tline := scanner.Text()\n\t\toldRev, newRev, refName, err := readLine(line)\n\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"reading STDIN (%s)\", err)\n\t\t}\n\n\t\tlog.Debug(\"read [%s,%s,%s]\", oldRev, newRev, refName)\n\n\t\t// if we're processing a receive-pack on an existing repo, run a build\n\t\tif strings.HasPrefix(conf.SSHOriginalCommand, \"git-receive-pack\") {\n\t\t\tif err := build(conf, storageDriver, kubeClient, fs, env, builderKey, newRev); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn scanner.Err()\n}", "func Pull(dir, url, auth string, stdout, stderr io.Writer) error {\n\tvar user, password string\n\tif auth != \"\" {\n\t\ta := strings.Split(auth, \":\")\n\t\tuser, password = a[0], a[1]\n\t}\n\n\tswitch true {\n\tcase strings.HasSuffix(url, \".git\"):\n\t\treturn pullGit(dir, url, user, password, stdout, stderr)\n\tdefault:\n\t\treturn errors.New(\"unknown repo type\")\n\t}\n}", "func (l *Launcher) Run(args []string) error {\n\tclient, err := dockerclient.NewClient(l.dockerSocket, \"\", nil, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\tl.client = client\n\terr = l.pullImage()\n\tif err != nil {\n\t\treturn err\n\t}\n\tresp, err := l.createContainer(args)\n\tif err != nil {\n\t\treturn err\n\t}\n\tcontainerID := resp.ID\n\tfmt.Fprintf(l.Out, \"starting debug container...\\n\")\n\terr = l.startContainer(containerID)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer l.cleanContainer(containerID)\n\terr = l.attachToContainer(containerID)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (sshConfig *SSHConfig) Run(cmd string) (string, error) {\n\tb, err1 := sshConfig.rawRun(cmd)\n\treturn string(b), err1\n}", "func (cmd *DownloadCommand) Run(c *client.Client, args []string) error {\n\tvar (\n\t\tfnf func(context.Context, string) (int64, error)\n\t\tfnd func(context.Context, string, string) (int64, error)\n\n\t\trpath = args[0]\n\t\toutfile = cmd.OutFile\n\t\tlogger = goa.NewLogger(log.New(os.Stderr, \"\", log.LstdFlags))\n\t\tctx = goa.WithLogger(context.Background(), logger)\n\t\terr error\n\t)\n\n\tif rpath[0] != '/' {\n\t\trpath = \"/\" + rpath\n\t}\n\tif rpath == \"/swagger.json\" {\n\t\tfnf = c.DownloadSwaggerJSON\n\t\tif outfile == \"\" {\n\t\t\toutfile = \"swagger.json\"\n\t\t}\n\t\tgoto found\n\t}\n\tif rpath == \"/swagger.yaml\" {\n\t\tfnf = c.DownloadSwaggerYaml\n\t\tif outfile == \"\" {\n\t\t\toutfile = \"swagger.yaml\"\n\t\t}\n\t\tgoto found\n\t}\n\treturn fmt.Errorf(\"don't know how to download %s\", rpath)\nfound:\n\tctx = goa.WithLogContext(ctx, \"file\", outfile)\n\tif fnf != nil {\n\t\t_, err = fnf(ctx, outfile)\n\t} else {\n\t\t_, err = fnd(ctx, rpath, outfile)\n\t}\n\tif err != nil {\n\t\tgoa.LogError(ctx, \"failed\", \"err\", err)\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (cmd *GetImageProjectCommand) Run(c *client.Client, args []string) error {\n\tvar path string\n\tif len(args) > 0 {\n\t\tpath = args[0]\n\t} else {\n\t\tpath = fmt.Sprintf(\"/projects/%v/media\", cmd.ProjectID)\n\t}\n\tlogger := goa.NewLogger(log.New(os.Stderr, \"\", log.LstdFlags))\n\tctx := goa.WithLogger(context.Background(), logger)\n\tresp, err := c.GetImageProject(ctx, path)\n\tif err != nil {\n\t\tgoa.LogError(ctx, \"failed\", \"err\", err)\n\t\treturn err\n\t}\n\n\tgoaclient.HandleResponse(c.Client, resp, cmd.PrettyPrint)\n\treturn nil\n}", "func Cmd(method, source string, args Options) ([]byte, error) {\n\treturn fetch.Cmd(fetch.Request{\n\t\tMethod: method,\n\t\tURL: fmt.Sprintf(\n\t\t\t\"http://api.pullword.com/%s.php?source=%s&param1=%d&param2=%d\",\n\t\t\tmethod,\n\t\t\tsource,\n\t\t\targs.Threshold,\n\t\t\targs.Debug,\n\t\t),\n\t})\n}", "func (c *TestCommand) Run() error {\n\tlocalPath, err := c.build()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfile, err := os.Open(localPath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer file.Close()\n\n\tftpConn, err := c.config.DialFtp()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer ftpConn.Close()\n\n\t_, name := filepath.Split(localPath)\n\tdronePath, err := ftpConn.Upload(file, name)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer ftpConn.Del(name)\n\n\ttelnetConn, err := c.config.DialTelnet()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer telnetConn.Close()\n\n\tcmd := fmt.Sprintf(\"chmod +x %s && %s\", dronePath, dronePath)\n\treturn telnetConn.Exec(cmd, os.Stdout)\n}", "func RunCmd() *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"run [script path]\",\n\t\tShort: \"Clones multiple repositories, run a script in that directory, and creates a PR with those changes.\",\n\t\tLong: runHelp,\n\t\tArgs: cobra.ExactArgs(1),\n\t\tPreRunE: logFlagInit,\n\t\tRunE: run,\n\t}\n\n\tcmd.Flags().StringP(\"branch\", \"B\", \"multi-gitter-branch\", \"The name of the branch where changes are committed.\")\n\tcmd.Flags().StringP(\"base-branch\", \"\", \"\", \"The branch which the changes will be based on.\")\n\tcmd.Flags().StringP(\"pr-title\", \"t\", \"\", \"The title of the PR. Will default to the first line of the commit message if none is set.\")\n\tcmd.Flags().StringP(\"pr-body\", \"b\", \"\", \"The body of the commit message. Will default to everything but the first line of the commit message if none is set.\")\n\tcmd.Flags().StringP(\"commit-message\", \"m\", \"\", \"The commit message. Will default to title + body if none is set.\")\n\tcmd.Flags().StringSliceP(\"reviewers\", \"r\", nil, \"The username of the reviewers to be added on the pull request.\")\n\tcmd.Flags().StringSliceP(\"team-reviewers\", \"\", nil, \"Github team names of the reviewers, in format: 'org/team'\")\n\tcmd.Flags().StringSliceP(\"assignees\", \"a\", nil, \"The username of the assignees to be added on the pull request.\")\n\tcmd.Flags().IntP(\"max-reviewers\", \"M\", 0, \"If this value is set, reviewers will be randomized.\")\n\tcmd.Flags().IntP(\"max-team-reviewers\", \"\", 0, \"If this value is set, team reviewers will be randomized\")\n\tcmd.Flags().IntP(\"concurrent\", \"C\", 1, \"The maximum number of concurrent runs.\")\n\tcmd.Flags().BoolP(\"skip-pr\", \"\", false, \"Skip pull request and directly push to the branch.\")\n\tcmd.Flags().StringSliceP(\"skip-repo\", \"s\", nil, \"Skip changes on specified repositories, the name is including the owner of repository in the format \\\"ownerName/repoName\\\".\")\n\tcmd.Flags().BoolP(\"interactive\", \"i\", false, \"Take manual decision before committing any change. Requires git to be installed.\")\n\tcmd.Flags().BoolP(\"dry-run\", \"d\", false, \"Run without pushing changes or creating pull requests.\")\n\tcmd.Flags().StringP(\"conflict-strategy\", \"\", \"skip\", `What should happen if the branch already exist.\nAvailable values:\n skip: Skip making any changes to the existing branch and do not create a new pull request.\n replace: Replace the existing content of the branch by force pushing any new changes, then reuse any existing pull request, or create a new one if none exist.\n`)\n\tcmd.Flags().BoolP(\"draft\", \"\", false, \"Create pull request(s) as draft.\")\n\t_ = cmd.RegisterFlagCompletionFunc(\"conflict-strategy\", func(cmd *cobra.Command, _ []string, _ string) ([]string, cobra.ShellCompDirective) {\n\t\treturn []string{\"skip\", \"replace\"}, cobra.ShellCompDirectiveNoFileComp\n\t})\n\tcmd.Flags().StringSliceP(\"labels\", \"\", nil, \"Labels to be added to any created pull request.\")\n\tcmd.Flags().StringP(\"author-name\", \"\", \"\", \"Name of the committer. If not set, the global git config setting will be used.\")\n\tcmd.Flags().StringP(\"author-email\", \"\", \"\", \"Email of the committer. If not set, the global git config setting will be used.\")\n\tconfigureGit(cmd)\n\tconfigurePlatform(cmd)\n\tconfigureRunPlatform(cmd, true)\n\tconfigureLogging(cmd, \"-\")\n\tconfigureConfig(cmd)\n\tcmd.Flags().AddFlagSet(outputFlag())\n\n\treturn cmd\n}", "func Run(args ...string) (output string, err error) {\n\tvar cmd = exec.Command(\"git\", args...)\n\tbts, err := cmd.CombinedOutput()\n\tif err != nil {\n\t\treturn \"\", errors.New(string(bts))\n\t}\n\treturn string(bts), err\n}", "func (a *App) Run(cmd string, w io.Writer) error {\n\ta.Log(fmt.Sprintf(\"running '%s'\", cmd), \"tsuru\")\n\tsource := \"[ -f /home/application/apprc ] && source /home/application/apprc\"\n\tcd := \"[ -d /home/application/current ] && cd /home/application/current\"\n\tcmd = fmt.Sprintf(\"%s; %s; %s\", source, cd, cmd)\n\treturn a.run(cmd, w)\n}", "func (r *Repo) Pull() error {\n\tr.Lock()\n\tdefer r.Unlock()\n\n\treturn timeout(*cmdTimeout, func() error {\n\t\treturn r.Master.VCS.Download(r.Path)\n\t})\n}", "func (cli *CLI) Run(args []string) int {\n\n\tvar (\n\t\towner string\n\t\trepo string\n\t\ttoken string\n\n\t\tcommitish string\n\t\tname string\n\t\tbody string\n\t\tdraft bool\n\t\tprerelease bool\n\n\t\tparallel int\n\n\t\trecreate bool\n\t\treplace bool\n\t\tsoft bool\n\n\t\tstat bool\n\t\tversion bool\n\t\tdebug bool\n\n\t\tgeneratenotes bool\n\t)\n\n\tflags := flag.NewFlagSet(Name, flag.ContinueOnError)\n\tflags.SetOutput(cli.errStream)\n\tflags.Usage = func() {\n\t\tfmt.Fprint(cli.errStream, helpText)\n\t}\n\n\tflags.StringVar(&owner, \"username\", \"\", \"\")\n\tflags.StringVar(&owner, \"owner\", \"\", \"\")\n\tflags.StringVar(&owner, \"u\", \"\", \"\")\n\n\tflags.StringVar(&repo, \"repository\", \"\", \"\")\n\tflags.StringVar(&repo, \"r\", \"\", \"\")\n\n\tflags.StringVar(&token, \"token\", os.Getenv(EnvGitHubToken), \"\")\n\tflags.StringVar(&token, \"t\", os.Getenv(EnvGitHubToken), \"\")\n\n\tflags.StringVar(&commitish, \"commitish\", \"\", \"\")\n\tflags.StringVar(&commitish, \"c\", \"\", \"\")\n\n\tflags.StringVar(&name, \"name\", \"\", \"\")\n\tflags.StringVar(&name, \"n\", \"\", \"\")\n\n\tflags.StringVar(&body, \"body\", \"\", \"\")\n\tflags.StringVar(&body, \"b\", \"\", \"\")\n\n\tflags.BoolVar(&draft, \"draft\", false, \"\")\n\tflags.BoolVar(&prerelease, \"prerelease\", false, \"\")\n\n\tflags.IntVar(&parallel, \"parallel\", defaultParallel, \"\")\n\tflags.IntVar(&parallel, \"p\", defaultParallel, \"\")\n\n\tflags.BoolVar(&recreate, \"delete\", false, \"\")\n\tflags.BoolVar(&recreate, \"recreate\", false, \"\")\n\n\tflags.BoolVar(&replace, \"replace\", false, \"\")\n\n\tflags.BoolVar(&soft, \"soft\", false, \"\")\n\n\tflags.BoolVar(&version, \"version\", false, \"\")\n\tflags.BoolVar(&version, \"v\", false, \"\")\n\n\tflags.BoolVar(&debug, \"debug\", false, \"\")\n\n\tflags.BoolVar(&generatenotes, \"generatenotes\", false, \"\")\n\n\t// Deprecated\n\tflags.BoolVar(&stat, \"stat\", false, \"\")\n\n\t// Parse flags\n\tif err := flags.Parse(args[1:]); err != nil {\n\t\treturn ExitCodeParseFlagsError\n\t}\n\n\tif debug {\n\t\tos.Setenv(EnvDebug, \"1\")\n\t\tDebugf(\"Run as DEBUG mode\")\n\t}\n\n\t// Show version and check latest version release\n\tif version {\n\t\tfmt.Fprint(cli.outStream, OutputVersion())\n\t\treturn ExitCodeOK\n\t}\n\n\tparsedArgs := flags.Args()\n\tDebugf(\"parsed args : %s\", parsedArgs)\n\tvar tag, path string\n\tswitch len(parsedArgs) {\n\tcase 1:\n\t\ttag, path = parsedArgs[0], \"\"\n\tcase 2:\n\t\ttag, path = parsedArgs[0], parsedArgs[1]\n\tdefault:\n\t\tPrintRedf(cli.errStream,\n\t\t\t\"Invalid number of arguments: you must set a git TAG and optionally a PATH.\\n\")\n\t\treturn ExitCodeBadArgs\n\t}\n\n\t// Extract github repository owner username.\n\t// If it's not provided via command line flag, read it from .gitconfig\n\t// (github user or git user).\n\tif len(owner) == 0 {\n\t\torigin, err := gitconfig.OriginURL()\n\t\tif err == nil {\n\t\t\towner = retrieveOwnerName(origin)\n\t\t}\n\t\tif len(owner) == 0 {\n\t\t\towner, err = gitconfig.GithubUser()\n\t\t\tif err != nil {\n\t\t\t\towner, err = gitconfig.Username()\n\t\t\t}\n\n\t\t\tif err != nil {\n\t\t\t\tPrintRedf(cli.errStream,\n\t\t\t\t\t\"Failed to set up ghr: repository owner name not found\\n\")\n\t\t\t\tfmt.Fprintf(cli.errStream,\n\t\t\t\t\t\"Please set it via `-u` option.\\n\\n\"+\n\t\t\t\t\t\t\"You can set default owner name in `github.username` or `user.name`\\n\"+\n\t\t\t\t\t\t\"in `~/.gitconfig` file\\n\")\n\t\t\t\treturn ExitCodeOwnerNotFound\n\t\t\t}\n\t\t}\n\t}\n\tDebugf(\"Owner: %s\", owner)\n\n\t// Extract repository name from files.\n\t// If not provided, read it from .git/config file.\n\tif len(repo) == 0 {\n\t\tvar err error\n\t\trepo, err = gitconfig.Repository()\n\t\tif err != nil {\n\t\t\tPrintRedf(cli.errStream,\n\t\t\t\t\"Failed to set up ghr: repository name not found\\n\")\n\t\t\tfmt.Fprintf(cli.errStream,\n\t\t\t\t\"ghr reads it from `.git/config` file. Change directory to \\n\"+\n\t\t\t\t\t\"repository root directory or setup git repository.\\n\"+\n\t\t\t\t\t\"Or set it via `-r` option.\\n\")\n\t\t\treturn ExitCodeRepoNotFound\n\t\t}\n\t}\n\tDebugf(\"Repository: %s\", repo)\n\n\t// If GitHub API token is not provided via command line flag\n\t// or env var then read it from .gitconfig file.\n\tif len(token) == 0 {\n\t\tvar err error\n\t\ttoken, err = gitconfig.GithubToken()\n\t\tif err != nil {\n\t\t\tPrintRedf(cli.errStream, \"Failed to set up ghr: token not found\\n\")\n\t\t\tfmt.Fprintf(cli.errStream,\n\t\t\t\t\"To use ghr, you need a GitHub API token.\\n\"+\n\t\t\t\t\t\"Please set it via `%s` env var or `-t` option.\\n\\n\"+\n\t\t\t\t\t\"If you don't have one, visit official doc (goo.gl/jSnoI)\\n\"+\n\t\t\t\t\t\"and get it first.\\n\",\n\t\t\t\tEnvGitHubToken)\n\t\t\treturn ExitCodeTokenNotFound\n\t\t}\n\t}\n\tDebugf(\"Github API Token: %s\", maskString(token))\n\n\t// Set Base GitHub API URL. Base URL can also be provided via env var for use with GHE.\n\tbaseURLStr := defaultBaseURL\n\tif urlStr := os.Getenv(EnvGitHubAPI); len(urlStr) != 0 {\n\t\tbaseURLStr = urlStr\n\t}\n\tDebugf(\"Base GitHub API URL: %s\", baseURLStr)\n\n\tif parallel <= 0 {\n\t\tparallel = runtime.NumCPU()\n\t}\n\tDebugf(\"Parallel factor: %d\", parallel)\n\n\tlocalAssets, err := LocalAssets(path)\n\tif err != nil {\n\t\tPrintRedf(cli.errStream,\n\t\t\t\"Failed to find assets from %s: %s\\n\", path, err)\n\t\treturn ExitCodeError\n\t}\n\tDebugf(\"Number of file to upload: %d\", len(localAssets))\n\n\t// Create a GitHub client\n\tgitHubClient, err := NewGitHubClient(owner, repo, token, baseURLStr)\n\tif err != nil {\n\t\tPrintRedf(cli.errStream, \"Failed to construct GitHub client: %s\\n\", err)\n\t\treturn ExitCodeError\n\t}\n\n\tghr := GHR{\n\t\tGitHub: gitHubClient,\n\t\toutStream: cli.outStream,\n\t}\n\n\tDebugf(\"Name: %s\", name)\n\n\t// Prepare create release request\n\treq := &github.RepositoryRelease{\n\t\tName: github.String(name),\n\t\tTagName: github.String(tag),\n\t\tPrerelease: github.Bool(prerelease),\n\t\tDraft: github.Bool(draft),\n\t\tTargetCommitish: github.String(commitish),\n\t\tBody: github.String(body),\n\t\tGenerateReleaseNotes: github.Bool(generatenotes),\n\t}\n\n\tctx := context.TODO()\n\n\tif soft {\n\t\t_, err := ghr.GitHub.GetRelease(ctx, *req.TagName)\n\n\t\tif err == nil {\n\t\t\tfmt.Fprintf(cli.outStream, \"ghr aborted since tag `%s` already exists\\n\", *req.TagName)\n\t\t\treturn ExitCodeOK\n\t\t}\n\n\t\tif !errors.Is(err, ErrReleaseNotFound) {\n\t\t\tPrintRedf(cli.errStream, \"Failed to get GitHub release: %s\\n\", err)\n\t\t\treturn ExitCodeError\n\t\t}\n\t}\n\n\trelease, err := ghr.GitHub.GetDraftRelease(ctx, tag)\n\tif err != nil {\n\t\tPrintRedf(cli.errStream, \"Failed to get draft release: %s\\n\", err)\n\t\treturn ExitCodeError\n\t}\n\tif release == nil {\n\t\trelease, err = ghr.CreateRelease(ctx, req, recreate)\n\t\tif err != nil {\n\t\t\tPrintRedf(cli.errStream, \"Failed to create GitHub release page: %s\\n\", err)\n\t\t\treturn ExitCodeError\n\t\t}\n\t}\n\n\tif replace {\n\t\terr := ghr.DeleteAssets(ctx, *release.ID, localAssets, parallel)\n\t\tif err != nil {\n\t\t\tPrintRedf(cli.errStream, \"Failed to delete existing assets: %s\\n\", err)\n\t\t\treturn ExitCodeError\n\t\t}\n\t}\n\n\t// FIXME(tcnksm): More ideal way to change this\n\t// This is for Github enterprise\n\tif err := ghr.GitHub.SetUploadURL(*release.UploadURL); err != nil {\n\t\tfmt.Fprintf(cli.errStream, \"Failed to set upload URL %s: %s\\n\", *release.UploadURL, err)\n\t\treturn ExitCodeError\n\t}\n\n\terr = ghr.UploadAssets(ctx, *release.ID, localAssets, parallel)\n\tif err != nil {\n\t\tPrintRedf(cli.errStream, \"Failed to upload one of assets: %s\\n\", err)\n\t\treturn ExitCodeError\n\t}\n\n\tif !draft {\n\t\t_, err := ghr.GitHub.EditRelease(ctx, *release.ID, &github.RepositoryRelease{\n\t\t\tDraft: github.Bool(false),\n\t\t})\n\t\tif err != nil {\n\t\t\tPrintRedf(cli.errStream, \"Failed to publish release: %s\\n\", err)\n\t\t\treturn ExitCodeError\n\t\t}\n\t}\n\n\treturn ExitCodeOK\n}", "func (c *TwitterCommand) Run(args []string) int {\n\tvar url bool\n\n\tflags := flag.NewFlagSet(\"twitter\", flag.ContinueOnError)\n\tflags.BoolVar(&url, \"url\", false, \"Get account page URL\")\n\n\tif err := flags.Parse(args); err != nil {\n\t\treturn 1\n\t}\n\n\toutput := \"@a_know\"\n\n\tif url {\n\t\toutput = \"https://twitter.com/a_know\"\n\t}\n\n\tfmt.Fprintln(c.OutStream, output)\n\treturn 0\n}", "func (o *StepUpdateCommand) Run() error {\n\treturn o.Cmd.Help()\n}", "func (r *Puller) Pull(image string) error {\n\tcmd := exec.Command(\"docker\", \"pull\", image)\n\tvar out bytes.Buffer\n\terr := utils.ExecL(cmd, &out, log.WithField(trace.Component, constants.ComponentSystem))\n\tif err != nil {\n\t\treturn trace.Wrap(err, out.String())\n\t}\n\treturn nil\n}", "func (lpc *ListPartCommand) RunCommand() error {\n\tlpc.lpOption.encodingType, _ = GetString(OptionEncodingType, lpc.command.options)\n\tsrcBucketUrL, err := GetCloudUrl(lpc.command.args[0], lpc.lpOption.encodingType)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif srcBucketUrL.object == \"\" {\n\t\treturn fmt.Errorf(\"object name is empty\")\n\t}\n\n\tlpc.lpOption.cloudUrl = *srcBucketUrL\n\tlpc.lpOption.uploadId = lpc.command.args[1]\n\n\treturn lpc.ListPart()\n}", "func (cmd *CfRecycleCmd) Run(cliConnection plugin.CliConnection, args []string) {\n\tif args[0] == \"recycle\" {\n\t\tcmd.RecycleCommand(cliConnection, args)\n\t}\n}", "func (d Driver) Run(name, confTarget, hostVolume string, args []string) error {\n\td.containerID = fmt.Sprintf(\"maestro-%s\", name)\n\td.confTarget = confTarget\n\td.hostVolume = hostVolume\n\td.cmd = args\n\tneedToPull, checkErr := d.needToPull(context.Background())\n\tif checkErr != nil {\n\t\treturn checkErr\n\t}\n\tif needToPull {\n\t\tpullErr := d.pull(context.Background())\n\t\tif pullErr != nil {\n\t\t\treturn pullErr\n\t\t}\n\t}\n\tneedToRemoveOld, removalID, checkRemoveErr := d.needToRemove(context.Background())\n\tif checkRemoveErr != nil {\n\t\treturn checkRemoveErr\n\t}\n\tif needToRemoveOld {\n\t\tremoveErr := d.remove(context.Background(), removalID)\n\t\tif removeErr != nil {\n\t\t\treturn removeErr\n\t\t}\n\t}\n\tcreateErr := d.create(context.Background())\n\tif createErr != nil {\n\t\treturn createErr\n\t}\n\treturn d.start(context.Background())\n}", "func Run(args ...string) (string, error) {\n\t// TODO: use exex.CommandContext here and refactor.\n\t/* #nosec */\n\tcmd := exec.Command(\"git\", args...)\n\tlog.WithField(\"args\", args).Debug(\"running git\")\n\tbts, err := cmd.CombinedOutput()\n\tlog.WithField(\"output\", string(bts)).\n\t\tDebug(\"git result\")\n\tif err != nil {\n\t\treturn \"\", errors.New(string(bts))\n\t}\n\treturn string(bts), nil\n}", "func Run(c *deis.Client, appID string, command string) (api.AppRunResponse, error) {\n\treq := api.AppRunRequest{Command: command}\n\tbody, err := json.Marshal(req)\n\n\tif err != nil {\n\t\treturn api.AppRunResponse{}, err\n\t}\n\n\tu := fmt.Sprintf(\"/v2/apps/%s/run\", appID)\n\n\tres, reqErr := c.Request(\"POST\", u, body)\n\tif reqErr != nil && !deis.IsErrAPIMismatch(reqErr) {\n\t\treturn api.AppRunResponse{}, reqErr\n\t}\n\n\tarr := api.AppRunResponse{}\n\n\tif err = json.NewDecoder(res.Body).Decode(&arr); err != nil {\n\t\treturn api.AppRunResponse{}, err\n\t}\n\n\treturn arr, reqErr\n}", "func (am *AutogitManager) Pull(\n\tctx context.Context, srcTLF *libkbfs.TlfHandle, srcRepo, branchName string,\n\tdstTLF *libkbfs.TlfHandle, dstDir string) (\n\tdoneCh <-chan struct{}, err error) {\n\tam.log.CDebugf(ctx, \"Autogit pull request from %s/%s:%s to %s/%s\",\n\t\tsrcTLF.GetCanonicalPath(), srcRepo, branchName,\n\t\tdstTLF.GetCanonicalPath(), dstDir)\n\tdefer func() {\n\t\tam.deferLog.CDebugf(ctx, \"Pull request processed: %+v\", err)\n\t}()\n\n\treq := resetReq{\n\t\tsrcTLF, srcRepo, branchName, dstTLF, dstDir, make(chan struct{}),\n\t}\n\treturn am.queueReset(ctx, req)\n}", "func (src *SecretsRemoveCommand) Run(args []string) int {\n\tsrc.NewFlagSet()\n\n\tsrc.FlagSet.StringVar(&src.flagSecretID, flagSecretID, \"\", \"\")\n\tsrc.FlagSet.StringVar(&src.flagSecretID, flagSecretIDDeprecated, \"\", \"\")\n\tsrc.FlagSet.StringVar(&src.flagSecretName, flagSecretNameIdentifier, \"\", \"\")\n\tsrc.FlagSet.StringVar(&src.flagSecretName, flagSecretNameIdentifierDeprecated, \"\", \"\")\n\n\tif err := src.SecretsBaseCommand.run(args); err != nil {\n\t\tsrc.UI.Error(err.Error())\n\t\treturn 1\n\t}\n\n\tif err := src.removeSecret(); err != nil {\n\t\tsrc.UI.Error(err.Error())\n\t\treturn 1\n\t}\n\n\treturn 0\n}", "func PullCommand() cli.Command {\n\treturn cli.Command{\n\t\tName: \"pull\",\n\t\tUsage: \"Pull an image from an Amazon ECR repository.\",\n\t\tArgsUsage: PullImageFormat,\n\t\tBefore: ecscli.BeforeApp,\n\t\tAction: ImagePull,\n\t\tFlags: []cli.Flag{\n\t\t\tcli.StringFlag{\n\t\t\t\tName: ecscli.RegistryIdFlag,\n\t\t\t\tUsage: \"[Optional] Specifies the the Amazon ECR registry ID to pull the image from. By default, images are pulled from the current AWS account.\",\n\t\t\t},\n\t\t},\n\t}\n}", "func (cmd *ListFirmwareCommand) Run(c *client.Client, args []string) error {\n\tvar path string\n\tif len(args) > 0 {\n\t\tpath = args[0]\n\t} else {\n\t\tpath = \"/firmware\"\n\t}\n\tlogger := goa.NewLogger(log.New(os.Stderr, \"\", log.LstdFlags))\n\tctx := goa.WithLogger(context.Background(), logger)\n\tresp, err := c.ListFirmware(ctx, path, stringFlagVal(\"module\", cmd.Module), intFlagVal(\"page\", cmd.Page), intFlagVal(\"pageSize\", cmd.PageSize), stringFlagVal(\"profile\", cmd.Profile))\n\tif err != nil {\n\t\tgoa.LogError(ctx, \"failed\", \"err\", err)\n\t\treturn err\n\t}\n\n\tgoaclient.HandleResponse(c.Client, resp, cmd.PrettyPrint)\n\treturn nil\n}", "func Run(argv []string) (err error) {\n\tenvFlag := cli.StringFlag{\n\t\tName: \"env\",\n\t\tValue: \"dev\",\n\t\tUsage: \"Specify Space environment\",\n\t}\n\n\tdownloadCommand := cli.Command{\n\t\tName: \"pull\",\n\t\tAliases: []string{\"download\"},\n\t\tUsage: \"Download file from Space\",\n\t\tArgsUsage: \"Space object's name\",\n\t\tFlags: []cli.Flag{\n\t\t\t&envFlag,\n\t\t\t&cli.StringFlag{\n\t\t\t\tName: \"output\",\n\t\t\t\tAliases: []string{\"o\"},\n\t\t\t\tUsage: \"Output file, otherwise use object's name\",\n\t\t\t\tValue: \"\",\n\t\t\t},\n\t\t},\n\t\tAction: downloadAction,\n\t}\n\n\tlistInternalCommand := cli.Command{\n\t\tName: \"list-internal\",\n\t\tUsage: \"List available buckets or objects in Space. Not a good idea for production bucket.\",\n\t\tArgsUsage: \"If given, list all objects in {bucket}/{prefix}, otherwise list all buckets\",\n\t\tHideHelp: true,\n\t\tHidden: true,\n\t\tFlags: []cli.Flag{\n\t\t\t&envFlag,\n\t\t},\n\t\tAction: listInternalAction,\n\t}\n\n\tlistCommand := cli.Command{\n\t\tName: \"list\",\n\t\tUsage: \"List available objects in Space.\",\n\t\tArgsUsage: \"Prefix\",\n\t\tFlags: []cli.Flag{\n\t\t\t&envFlag,\n\t\t},\n\t\tAction: listAction,\n\t}\n\n\tpushCommand := cli.Command{\n\t\tName: \"push\",\n\t\tAliases: []string{\"upload\"},\n\t\tUsage: \"Upload file/folder to Space\",\n\t\tArgsUsage: \"File or folder path to upload\",\n\t\tFlags: []cli.Flag{\n\t\t\t&envFlag,\n\t\t\t&cli.BoolFlag{\n\t\t\t\tName: \"recursive\",\n\t\t\t\tAliases: []string{\"r\"},\n\t\t\t\tUsage: \"Upload a folder recursively\",\n\t\t\t\tValue: false,\n\t\t\t},\n\t\t\t&cli.StringFlag{\n\t\t\t\tName: \"prefix\",\n\t\t\t\tAliases: []string{\"p\"},\n\t\t\t\tUsage: \"Object name's prefix.\",\n\t\t\t\tValue: \"\",\n\t\t\t},\n\t\t\t&cli.StringFlag{\n\t\t\t\tName: \"tags\",\n\t\t\t\tAliases: []string{\"t\"},\n\t\t\t\tUsage: \"Add tags, e.g. \\\"version: 0.0, type: app\\\"\",\n\t\t\t\tValue: \"\",\n\t\t\t},\n\t\t},\n\t\tAction: pushAction,\n\t}\n\n\tremoveCommand := cli.Command{\n\t\tName: \"remove\",\n\t\tAliases: []string{\"rm\"},\n\t\tUsage: \"Remove file(s) in Space\",\n\t\tArgsUsage: \"Files to be removed\",\n\t\tFlags: []cli.Flag{\n\t\t\t&envFlag,\n\t\t},\n\t\tAction: removeAction,\n\t}\n\n\tapp := &cli.App{\n\t\tName: \"space\",\n\t\tUsage: \"Work with Space and assets\",\n\t\tCommands: []*cli.Command{\n\t\t\t&downloadCommand,\n\t\t\t&listInternalCommand,\n\t\t\t&listCommand,\n\t\t\t&pushCommand,\n\t\t\t&removeCommand,\n\t\t},\n\t}\n\n\terr = app.Run(argv)\n\treturn err\n}", "func (mon *SocketMonitor) Run(command []byte) ([]byte, error) {\n\t// Just call RunWithFile with no file\n\treturn mon.RunWithFile(command, nil)\n}", "func (cmd *ProjectGetIDPictureCommand) Run(c *client.Client, args []string) error {\n\tvar path string\n\tif len(args) > 0 {\n\t\tpath = args[0]\n\t} else {\n\t\tpath = fmt.Sprintf(\"/projects/%v/picture\", cmd.ProjectID)\n\t}\n\tlogger := goa.NewLogger(log.New(os.Stderr, \"\", log.LstdFlags))\n\tctx := goa.WithLogger(context.Background(), logger)\n\tresp, err := c.ProjectGetIDPicture(ctx, path)\n\tif err != nil {\n\t\tgoa.LogError(ctx, \"failed\", \"err\", err)\n\t\treturn err\n\t}\n\n\tgoaclient.HandleResponse(c.Client, resp, cmd.PrettyPrint)\n\treturn nil\n}", "func (r *RemoteExecutor) Run(name string, args ...string) error {\n\t_, err := r.RunWithStdout(name, args...)\n\treturn err\n}", "func Pull(c Config) {\n\n\tUpdate(c)\n\n}", "func Run(ctx context.Context /*clusterConfigs config.K8S,*/, k8sRepoConfig config.KubernetesConfigsRepo, slashcmd, txt, caller string) (messages []string, err error) {\n\tcommand.DeployWorker.Set(k8sRepoConfig.Git)\n\tif slashcmd != ACCEPTED_SLASHCMD {\n\t\treturn []string{\"call help\"}, errors.Errorf(\"%s is not a supported slash command\", slashcmd)\n\t}\n\ttxtParts := strings.Split(txt, \" \")\n\tif len(txtParts) == 0 {\n\t\t// TODO send help\n\t\treturn []string{\"call help\"}, nil\n\t}\n\n\tcmd := txtParts[0]\n\tswitch cmd {\n\t// case \"list\":\n\t// \tmessages, err = command.List(ctx, clusterConfigs, txtParts[1:])\n\t// case \"info\":\n\t// \tmessages, err = command.Info(ctx, clusterConfigs, txtParts[1:])\n\tcase \"deploy\":\n\t\tmessages, err = command.Deploy(ctx, k8sRepoConfig, txtParts[1:], txt, \"+\"+caller)\n\tcase \"release\":\n\t\tmessages, err = command.Release(ctx, k8sRepoConfig, txtParts[1:], txt, \"+\"+caller)\n\tdefault:\n\t\tif isBowie(txtParts) {\n\t\t\tmessages = command.Bowie()\n\t\t} else {\n\t\t\tmessages = []string{\"call help\"}\n\t\t\terr = errors.Errorf(\"command(%s) is not supported\", cmd)\n\t\t}\n\t}\n\treturn messages, err\n}", "func (uc *UpdateCmd) Run(ctx context.Context, client hcapi2.Client, cmd *cobra.Command, args []string) error {\n\n\tidOrName := args[0]\n\tresource, _, err := uc.Fetch(ctx, client, cmd, idOrName)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// resource is an interface that always has a type, so the interface is never nil\n\t// (i.e. == nil) is always false.\n\tif reflect.ValueOf(resource).IsNil() {\n\t\treturn fmt.Errorf(\"%s not found: %s\", uc.ResourceNameSingular, idOrName)\n\t}\n\n\t// The inherited commands should not need to parse the flags themselves\n\t// or use the cobra command, therefore we fill them in a map here and\n\t// pass the map then to the update method. A caller can/should rely on\n\t// the map to contain all the flag keys that were specified.\n\tflags := make(map[string]pflag.Value, cmd.Flags().NFlag())\n\tcmd.Flags().VisitAll(func(flag *pflag.Flag) {\n\t\tflags[flag.Name] = flag.Value\n\t})\n\n\tif err := uc.Update(ctx, client, cmd, resource, flags); err != nil {\n\t\treturn fmt.Errorf(\"updating %s %s failed: %s\", uc.ResourceNameSingular, idOrName, err)\n\t}\n\n\tfmt.Printf(\"%s %v updated\\n\", uc.ResourceNameSingular, idOrName)\n\treturn nil\n}", "func Run(ctx *cli.Context) {\n\tif ctx.Bool(\"debug\") {\n\t\tlog.SetLevel(log.DebugLevel)\n\t}\n\tlog.SetLevel(log.DebugLevel)\n\n\td, err := bridge.NewDriver(version, ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\th := network.NewHandler(d)\n\th.ServeUnix(pluginName, 0)\n}", "func run(command string, args ...string) []byte {\n\n\toutput, _ := exec.Command(command, args...).CombinedOutput()\n\treturn output\n}", "func Run() error {\n\tcommand := &commander.Command{\n\t\tUsageLine: os.Args[0],\n\t\tShort: \"go_todo\",\n\t}\n\n\tcommand.Subcommands = []*commander.Command{\n\t\ttodoList(todoFilename),\n\t\ttodoSave(todoFilename),\n\t\ttodoStatus(todoFilename),\n\t\ttodoDelete(todoFilename),\n\t}\n\n\terr := command.Dispatch(os.Args[1:])\n\tif err != nil {\n\t\tfmt.Printf(\"%v\\n\", err)\n\t\tos.Exit(1)\n\t}\n\n\treturn err\n}", "func (gc *TemplateCommand) Run(args []string, w io.Writer) error {\n\tif len(args) == 0 {\n\t\treturn errExpectedSubcommand\n\t}\n\n\tsubcommand := args[0]\n\tswitch subcommand {\n\tcase \"list\":\n\t\treturn gc.list(w)\n\tcase \"copy\":\n\t\treturn gc.fetch(w, os.WriteFile, args[1:])\n\t}\n\treturn errUnknownSubcommand\n}", "func Run(args ...string) ([]byte, error) {\n\treturn RunInPath(\"\", args...)\n}", "func (cmd *DownloadCommand) Run(c *client.Client, args []string) error {\n\tvar (\n\t\tfnf func(context.Context, string) (int64, error)\n\t\tfnd func(context.Context, string, string) (int64, error)\n\n\t\trpath = args[0]\n\t\toutfile = cmd.OutFile\n\t\tlogger = goa.NewLogger(log.New(os.Stderr, \"\", log.LstdFlags))\n\t\tctx = goa.WithLogger(context.Background(), logger)\n\t\terr error\n\t)\n\n\tif rpath[0] != '/' {\n\t\trpath = \"/\" + rpath\n\t}\n\tif rpath == \"/swagger.json\" {\n\t\tfnf = c.DownloadSwaggerJSON\n\t\tif outfile == \"\" {\n\t\t\toutfile = \"swagger.json\"\n\t\t}\n\t\tgoto found\n\t}\n\tif strings.HasPrefix(rpath, \"/\") {\n\t\tfnd = c.Download\n\t\trpath = rpath[1:]\n\t\tif outfile == \"\" {\n\t\t\t_, outfile = path.Split(rpath)\n\t\t}\n\t\tgoto found\n\t}\n\tif strings.HasPrefix(rpath, \"/swagger-ui/\") {\n\t\tfnd = c.DownloadSwaggerUI\n\t\trpath = rpath[12:]\n\t\tif outfile == \"\" {\n\t\t\t_, outfile = path.Split(rpath)\n\t\t}\n\t\tgoto found\n\t}\n\treturn fmt.Errorf(\"don't know how to download %s\", rpath)\nfound:\n\tctx = goa.WithLogContext(ctx, \"file\", outfile)\n\tif fnf != nil {\n\t\t_, err = fnf(ctx, outfile)\n\t} else {\n\t\t_, err = fnd(ctx, rpath, outfile)\n\t}\n\tif err != nil {\n\t\tgoa.LogError(ctx, \"failed\", \"err\", err)\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func Run(opts ...cli.Option) error {\n\treturn cmd.Run(opts...)\n}", "func (c Command) Run(args ...string) error {\n\treturn c.builder().Run(args...)\n}", "func Run(c *Config, token string, outputDir string) {\n\tclient := connect(c)\n\n\t// Authentication happens by sending the secret token via metadata\n\tmd := metadata.Pairs(shared.SecretKey, c.Secret)\n\n\tstream, err := client.Get(metadata.NewContext(context.Background(), md), &api.GetRequest{\n\t\tToken: token,\n\t})\n\tshared.ExitOnError(err, \"Unable to initiate Receive: %v\", grpc.ErrorDesc(err))\n\n\tfor {\n\t\tres, err := stream.Recv()\n\t\tif err != nil {\n\t\t\tshared.ExitOnError(err, \"Unable to receive data: %v\", grpc.ErrorDesc(err))\n\t\t}\n\t\tfmt.Println(\"received\", res.Type)\n\t}\n}", "func run(command string, args ...string) []byte {\n\toutput, err := exec.Command(command, args...).Output()\n\tcheck(err)\n\treturn output\n}", "func (service Service) PullCommand() Command {\n\treturn Command{\n\t\tMain: \"docker\",\n\t\tArgs: []string{\"pull\", service.Image},\n\t}\n}", "func (c *Cmd) Run() error {\n\treturn c.runInnerCommand()\n}", "func (c *LocalCmd) Run() error {\n\treturn runCmd(c.cmd, c.args, c.env, ioutil.Discard, ioutil.Discard)\n}", "func Pull(entity *RepoEntity, options PullOptions) (err error) {\n\t// here we configure pull operation\n\t// default mode is go-git (this may be configured)\n\tpullCmdMode = pullCmdModeNative\n\tpullTryCount = 0\n\n\tswitch pullCmdMode {\n\tcase pullCmdModeLegacy:\n\t\terr = pullWithGit(entity, options)\n\t\treturn err\n\tcase pullCmdModeNative:\n\t\terr = pullWithGoGit(entity, options)\n\t\treturn err\n\t}\n\treturn nil\n}", "func (h *distroIDGetTeardownHandler) Run(ctx context.Context) gimlet.Responder {\n\td, err := h.sc.FindDistroById(h.distroID)\n\tif err != nil {\n\t\treturn gimlet.MakeJSONErrorResponder(errors.Wrapf(err, \"Database error for find() by distro id '%s'\", h.distroID))\n\t}\n\n\tapiDistro := &model.APIDistro{}\n\tif err = apiDistro.BuildFromService(d); err != nil {\n\t\treturn gimlet.MakeJSONInternalErrorResponder(errors.Wrap(err, \"API error converting from distro.Distro to model.APIDistro\"))\n\t}\n\n\treturn gimlet.NewJSONResponse(apiDistro.Teardown)\n}", "func (dm *DotnetModule) runCmd() error {\n\tcmd, err := dm.createCmd()\n\tif err != nil {\n\t\treturn err\n\t}\n\t// To prevent NuGet prompting for credentials\n\terr = os.Setenv(\"NUGET_EXE_NO_PROMPT\", \"true\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = io.RunCmd(cmd)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (s *imageBuilderServer) Pull(ctx context.Context, _ *api.Empty) (*api.Response, error) {\n\n\t// TODO: save git repo params and clone\n\n\treturn &okResponse, nil\n}", "func (cmd *DeletePostCommand) Run(c *client.Client, args []string) error {\n\tvar path string\n\tif len(args) > 0 {\n\t\tpath = args[0]\n\t} else {\n\t\tpath = fmt.Sprintf(\"/posts/%v\", cmd.PostID)\n\t}\n\tlogger := goa.NewLogger(log.New(os.Stderr, \"\", log.LstdFlags))\n\tctx := goa.WithLogger(context.Background(), logger)\n\tresp, err := c.DeletePost(ctx, path)\n\tif err != nil {\n\t\tgoa.LogError(ctx, \"failed\", \"err\", err)\n\t\treturn err\n\t}\n\n\tgoaclient.HandleResponse(c.Client, resp, PrettyPrint)\n\treturn nil\n}", "func (suc *SecretsUpdateCommand) Run(args []string) int {\n\tsuc.NewFlagSet()\n\n\tsuc.FlagSet.StringVar(&suc.flagSecretID, flagSecretID, \"\", \"\")\n\tsuc.FlagSet.StringVar(&suc.flagSecretID, flagSecretIDDeprecated, \"\", \"\")\n\tsuc.FlagSet.StringVar(&suc.flagSecretName, flagSecretNameIdentifier, \"\", \"\")\n\tsuc.FlagSet.StringVar(&suc.flagSecretName, flagSecretNameIdentifierDeprecated, \"\", \"\")\n\tsuc.FlagSet.StringVar(&suc.flagSecretValue, flagSecretValue, \"\", \"\")\n\n\tif err := suc.SecretsBaseCommand.run(args); err != nil {\n\t\tsuc.UI.Error(err.Error())\n\t\treturn 1\n\t}\n\n\tif err := suc.updateSecret(); err != nil {\n\t\tsuc.UI.Error(err.Error())\n\t\treturn 1\n\t}\n\n\treturn 0\n}", "func (c *Command) Run(source, message string) error {\n\tfactory := imgsource.NewImgSrcFactory(source)\n\timgSrc := factory.GetImgSrc()\n\terr := imgSrc.AddMessage(message)\n\tif err != nil {\n\t\tfmt.Fprintf(c.ErrStream, \"error happened. error: %s\\n\", err)\n\t\treturn err\n\t}\n\treturn nil\n}", "func Fetch() {\n\terr := RunCommand(\"git\", \"fetch\", \"--prune\")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "func (db Database) Run(cmd interface{}, result interface{}) os.Error {\n\tcursor, err := db.Conn.Find(db.Name+\".$cmd\", cmd, runFindOptions)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar d BSONData\n\tif err := cursor.Next(&d); err != nil {\n\t\treturn err\n\t}\n\n\tvar r CommandResponse\n\tif err := Decode(d.Data, &r); err != nil {\n\t\treturn err\n\t}\n\tif err := r.Error(); err != nil {\n\t\treturn err\n\t}\n\n\tif result != nil {\n\t\tif err := d.Decode(result); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (a App) Run() error {\n\tdeal, err := FetchDeal(a.URL)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Could not fetch deal: %v\", err)\n\t}\n\n\tNewPrinter(a.Out).Print(deal)\n\treturn nil\n}", "func (c *Cmd) Run() error {\n\treturn c.Cmd.Run()\n}", "func (o *Options) Run() error {\n\tscmClient, err := o.Validate()\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to validate options\")\n\t}\n\n\tfullName := scm.Join(o.Owner, o.Name)\n\n\tctx := context.Background()\n\n\treleaseInput := &scm.ReleaseInput{\n\t\tDescription: o.Description,\n\t\tTitle: o.Title,\n\t\tPrerelease: o.PreRelease,\n\t\tTag: o.Tag,\n\t}\n\n\trelease, _, err := scmClient.Releases.FindByTag(ctx, fullName, o.Tag)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to find release %s %s\", fullName, o.Tag)\n\t}\n\t_, _, err = scmClient.Releases.Update(ctx, fullName, release.ID, releaseInput)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to update release %s %s, id: %v\", fullName, o.Tag, release.ID)\n\t}\n\treturn nil\n}", "func (r *RunCommand) Run(params *params.Params, ioStreams *genericclioptions.IOStreams) error {\n\t// ran into some data race conditions during unit test with this starting up, but pod events\n\t// coming in before we completed initialization below\n\tr.watchLock.Lock()\n\t// resource using GenerateName, which will provice a unique instance\n\tbr := &buildv1alpha1.BuildRun{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tGenerateName: fmt.Sprintf(\"%s-\", r.buildName),\n\t\t},\n\t\tSpec: *r.buildRunSpec,\n\t}\n\tflags.SanitizeBuildRunSpec(&br.Spec)\n\n\tclientset, err := params.ShipwrightClientSet()\n\tif err != nil {\n\t\treturn err\n\t}\n\tbr, err = clientset.ShipwrightV1alpha1().BuildRuns(params.Namespace()).Create(r.cmd.Context(), br, metav1.CreateOptions{})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif !r.follow {\n\t\tfmt.Fprintf(ioStreams.Out, \"BuildRun created %q for build %q\\n\", br.GetName(), r.buildName)\n\t\treturn nil\n\t}\n\n\tr.ioStreams = ioStreams\n\tkclientset, err := params.ClientSet()\n\tif err != nil {\n\t\treturn err\n\t}\n\tr.buildRunName = br.Name\n\tif r.shpClientset, err = params.ShipwrightClientSet(); err != nil {\n\t\treturn err\n\t}\n\n\t// instantiating a pod watcher with a specific label-selector to find the indented pod where the\n\t// actual build started by this subcommand is being executed, including the randomized buildrun\n\t// name\n\tlistOpts := metav1.ListOptions{LabelSelector: fmt.Sprintf(\n\t\t\"build.shipwright.io/name=%s,buildrun.shipwright.io/name=%s\",\n\t\tr.buildName,\n\t\tbr.GetName(),\n\t)}\n\tr.pw, err = reactor.NewPodWatcher(r.Cmd().Context(), kclientset, listOpts, params.Namespace())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tr.pw.WithOnPodModifiedFn(r.onEvent)\n\t// cannot defer with unlock up top because r.pw.Start() blocks; but the erroring out above kills the\n\t// cli invocation, so it does not matter\n\tr.watchLock.Unlock()\n\t_, err = r.pw.Start()\n\treturn err\n}", "func (runner *SSHRunner) Run(command string) (string, error) {\n\treturn runner.runSSHCommandFromDriver(command, false)\n}", "func (cmd *ReceiveCommand) Run() error {\n\tclient, err := cmd.WebsocketClientFactory.Client()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer client.Close()\n\n\tif err := client.Receive(cmd.Timeout, func(message []byte) error {\n\t\t_, err := cmd.OutputWriter.Write(append(message, '\\n'))\n\t\treturn err\n\t}); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (cmd *RefreshUserCommand) Run(c *client.Client, args []string) error {\n\tvar path string\n\tif len(args) > 0 {\n\t\tpath = args[0]\n\t} else {\n\t\tpath = \"/refresh\"\n\t}\n\tvar payload client.RefreshUserPayload\n\tif cmd.Payload != \"\" {\n\t\terr := json.Unmarshal([]byte(cmd.Payload), &payload)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to deserialize payload: %s\", err)\n\t\t}\n\t}\n\tlogger := goa.NewLogger(log.New(os.Stderr, \"\", log.LstdFlags))\n\tctx := goa.WithLogger(context.Background(), logger)\n\tresp, err := c.RefreshUser(ctx, path, &payload)\n\tif err != nil {\n\t\tgoa.LogError(ctx, \"failed\", \"err\", err)\n\t\treturn err\n\t}\n\n\tgoaclient.HandleResponse(c.Client, resp, cmd.PrettyPrint)\n\treturn nil\n}", "func (c *PsCmd) Run(cli *CLI, logWriters *LogWriters) (err error) {\n\tvar aids []int\n\tif c.AccountID == 0 {\n\t\ts := NewSpinner(\"Looking up accounts\", logWriters)\n\t\ts.Start()\n\n\t\tas, err := api.Accounts()\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unable to look up accounts: %w\", err)\n\t\t}\n\t\tfor _, a := range as {\n\t\t\taids = append(aids, a.ID)\n\t\t}\n\n\t\ts.Stop()\n\t} else {\n\t\taids = append(aids, c.AccountID)\n\t}\n\n\tvar targets [][]int\n\tfor _, id := range aids {\n\t\tif c.AppID == 0 {\n\t\t\ts := NewSpinner(\"Looking up applications\", logWriters)\n\t\t\ts.Start()\n\n\t\t\tas, err := api.Applications(id)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"unable to look up applications: %w\", err)\n\t\t\t}\n\t\t\tfor _, a := range as {\n\t\t\t\ttargets = append(targets, []int{id, a.ID})\n\t\t\t}\n\n\t\t\ts.Stop()\n\t\t} else {\n\t\t\ttargets = append(targets, []int{id, c.AppID})\n\t\t}\n\t}\n\n\tif c.Watch {\n\t\tticker := time.NewTicker(c.Interval)\n\t\tfor ; true; <-ticker.C {\n\t\t\terr = pollAndOutput(cli, targets, c.AppPath,logWriters)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t} else {\n\t\terr = pollAndOutput(cli, targets, c.AppPath,logWriters)\n\t\treturn err\n\t}\n\n\treturn nil\n}" ]
[ "0.7724834", "0.7001258", "0.6845062", "0.6809112", "0.6801887", "0.65916026", "0.65858054", "0.6569583", "0.6555816", "0.6421461", "0.6414631", "0.64013046", "0.63939244", "0.6381241", "0.631539", "0.6242557", "0.6239383", "0.6225722", "0.6219979", "0.6218955", "0.61963075", "0.6171137", "0.6138994", "0.61363846", "0.6130139", "0.6096791", "0.60859776", "0.60693985", "0.6066218", "0.6053879", "0.6036024", "0.60344034", "0.6034264", "0.6032755", "0.6013311", "0.6007515", "0.59951234", "0.5981018", "0.59791905", "0.5978467", "0.59631276", "0.59603304", "0.59500957", "0.5945344", "0.59420335", "0.59382576", "0.59264493", "0.5925757", "0.59236336", "0.5912614", "0.59119105", "0.5881311", "0.5880354", "0.58749706", "0.5867423", "0.5866714", "0.5860598", "0.5857775", "0.58505446", "0.58477354", "0.58434373", "0.584175", "0.5840943", "0.58347815", "0.5830225", "0.582659", "0.5826083", "0.5822203", "0.58206034", "0.5811477", "0.5807747", "0.580669", "0.5795997", "0.5793313", "0.5790796", "0.57859164", "0.57851", "0.57837284", "0.5781939", "0.578068", "0.5765768", "0.57603335", "0.5760174", "0.57520777", "0.5749241", "0.5743665", "0.57431245", "0.57421064", "0.57351464", "0.57312405", "0.5727997", "0.5724996", "0.571918", "0.5718337", "0.5706847", "0.57024544", "0.5686583", "0.5677223", "0.5669725", "0.5660785" ]
0.8103023
0
Help displays available options for the pull command.
func (c *PullCommand) Help() string { helpText := ` Usage: phrase pull [options] [LOCALE] Download the translation files in the current project. Options: --format=yml See documentation for list of allowed formats --target=./phrase/locales Target folder to store locale files --tag=foo Limit results to a given tag instead of all translations --updated-since=YYYYMMDDHHMMSS Limit results to translations updated after the given date (UTC) --include-empty-translations Include empty translations in the result --convert-emoji Convert Emoji symbols --encoding=utf-8 Convert .strings or .properties with alternate encoding --skip-unverified-translations Skip unverified translations in the result --secret=YOUR_AUTH_TOKEN The Auth Token to use for this operation instead of the saved one (optional) ` return strings.TrimSpace(helpText) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (cmd PullCmd) Description() string {\n\treturn \"Fetch from a dolt remote data repository and merge.\"\n}", "func (d *downloadCommand) Help() string {\n\thelp := `Usage: hashicorp-releases download <product> <version>`\n\treturn help\n}", "func help() {\n\tlog.Infoln(\"#: the number of the peer you want to connect to\")\n\tlog.Infoln(\"r: refresh peer list\")\n\tlog.Infoln(\"q: quit pcp\")\n\tlog.Infoln(\"?: this help message\")\n}", "func PrintHelp() {\n fs := setupFlags(&options{})\n fs.Usage()\n}", "func (get *BaseCommandGetOperation) Help() string {\n\treturn \"\"\n}", "func GetHelp() string {\n\tmsg := \"List of available commands\\n /status - returns validator status, voting power, current block height \" +\n\t\t\"and network block height\\n /peers - returns number of connected peers\\n /node - return status of caught-up\\n\" +\n\t\t\"/balance - returns the current balance of your account \\n /list - list out the available commands\"\n\n\treturn msg\n}", "func (c *Subcommand) Help(flags *flag.FlagSet) {\n\tfmt.Printf(\"%s\\n\\n%s\\n\\n\", c.shortHelp, c.longHelp)\n\tflags.PrintDefaults()\n}", "func (c *PullCommand) Synopsis() string {\n\treturn \"Download the translation files in the current project\"\n}", "func (up *BaseProvisionUpOperation) Help() string {\n\treturn \"\"\n}", "func (c *ReleaseLatestCommand) Help() string {\n\thelpText := `\nUsage: tfupdate release latest [options] <SOURCE>\n\nArguments\n SOURCE A path of release data source.\n Valid format depends on --source-type option.\n - github or gitlab:\n owner/repo\n e.g. terraform-providers/terraform-provider-aws\n - tfregistryModule\n namespace/name/provider\n e.g. terraform-aws-modules/vpc/aws\n - tfregistryProvider (experimental)\n namespace/type\n e.g. hashicorp/aws\n\nOptions:\n -s --source-type A type of release data source.\n Valid values are\n - github (default)\n - gitlab\n - tfregistryModule\n - tfregistryProvider (experimental)\n`\n\treturn strings.TrimSpace(helpText)\n}", "func (c *GetAddressInfoCommand) Help() string {\n\treturn `Usage: wallet api getaddressinfo [options...]\nOptions:\n\t-address address\n`\n}", "func (c *UptimeCommand) Help() string {\n\thelpText := `\nUsage: pingdom uptime [options]\n Runs uptime report on Pingdom for specified period on all checks tagged\n with given tags.\nOptions:\n -period PeriodName Default: Today\n Options: %s\n -tags tag1,tag2 If specified, only include checks with one of these tags\n`\n\treturn strings.TrimSpace(fmt.Sprintf(helpText, strings.Join(c.GetPeriodOptions(), \", \")))\n}", "func (x *MultipleCommand) Help() string {\n\treturn \"\"\n}", "func (c *GetCommand) Help() string {\n\thelpText := `\nUsage :\n\twikible get [options]\n\n\tGet the wiki pages to code.\n\nOptions:\n\t-i parent id\n\t-a wiki address\n`\n\treturn strings.TrimSpace(helpText)\n}", "func (cli *CLI) Help(base Command, name string, args []string) (string, error) {\n\tb := &bytes.Buffer{}\n\terr := cli.printHelp(NewOutput(b), base, name, args)\n\treturn b.String(), err\n}", "func (vm *VM) Help(cmd *cobra.Command, args []string) {\n\n\tcli := ui.NewCLI(vm.Config)\n\tversionMap := map[string]string{\"ReleaseVersion\": ReleaseVersion, \"GitHash\": GitHash}\n\n\tif len(args) == 0 {\n\t\tfmt.Println(cli.Render(\"vmUsage\", versionMap))\n\t\treturn\n\t}\n\n\thelpType := strings.ToLower(args[0])\n\tswitch helpType {\n\tcase \"scanners\":\n\t\tfmt.Println(cli.Render(\"scannersUsage\", versionMap))\n\n\tcase \"agent-groups\":\n\t\tfmt.Print(cli.Render(\"agentGroupsUsage\", versionMap))\n\tcase \"agents\":\n\t\tfmt.Print(cli.Render(\"agentsUsage\", versionMap))\n\tcase \"export-vulns\":\n\t\tfmt.Print(cli.Render(\"ExportVulnsHelp\", versionMap))\n\n\tdefault:\n\t\tfmt.Println(cli.Render(\"vmUsage\", versionMap))\n\t}\n\n\treturn\n}", "func help(cfg cli.Config) {\n\tfmt.Printf(`\n usage:\n scan ip/host [option]\n options:\n -p port-range or port number Specified range or port number (default is %s)\n -c TCP connect scan (default is TCP SYN scan)\n -4 Force IPv4\n -6 Force IPv6\n example:\n scan 8.8.8.8 -p 53\n scan www.google.com -p 1-500\n scan freebsd.org -6\n\t`,\n\t\tcfg.Scan.Port)\n}", "func (c *Ping) Help() string {\n\treturn `Usage: PING [message] Returns PONG if no argument is provided, otherwise return a copy of the argument as a bulk.`\n}", "func PrintHelp() {\n\tfmt.Printf(\"[+] Author: brax (https://github.com/braaaax/gfz)\\n\")\n\tfmt.Printf(\"\\nUsage: gfz [options] <url>\\n\")\n\tfmt.Printf(\"Keyword: FUZZ, ..., FUZnZ wherever you put these keywords gfuzz will replace them with the values of the specified payload.\\n\\n\")\n\tfmt.Printf(\"Options:\\n\")\n\tfmt.Println(\"-h/--help : This help.\")\n\tfmt.Println(\"-w wordlist : Specify a wordlist file (alias for -z file,wordlist).\")\n\tfmt.Println(\"-z file/range/list,PAYLOAD : Where PAYLOAD is FILENAME or 1-10 or \\\"-\\\" separated sequence.\")\n\tfmt.Println(\"--hc/hl/hw/hh N[,N]+ : Hide responses with the specified code, lines, words, or chars.\")\n\tfmt.Println(\"--sc/sl/sw/sh N[,N]]+ : Show responses with the specified code, lines, words, or chars.\")\n\tfmt.Println(\"-t N : Specify the number of concurrent connections (10 default).\")\n\tfmt.Println(\"--post : Specify POST request method.\")\n\tfmt.Println(\"--post-form key=FUZZ : Specify form value eg key=value.\")\n\t// fmt.Println(\"--post-multipart file.FUZZ : Fuzz filename for file uploads.\")\n\tfmt.Println(\"-p IP:PORT : Specify proxy.\") // TODO: need better cmdline parse for two URLs\n\tfmt.Println(\"-b COOKIE : Specify cookie.\")\n\tfmt.Println(\"-ua USERAGENT : Specify user agent.\")\n\tfmt.Println(\"--password PASSWORD : Specify password for basic web auth.\")\n\tfmt.Println(\"--username USERNAME : Specify username.\")\n\tfmt.Println(\"--no-follow : Don't follow HTTP(S) redirections.\")\n\tfmt.Println(\"--no-color : Monotone output. (use for windows\")\n\tfmt.Println(\"--print-body : Print response body to stdout.\")\n\tfmt.Println(\"-k : Strict TLS connections (skip verify=false opposite of curl).\")\n\tfmt.Println(\"-q : No output.\")\n\tfmt.Println(\"-H : Add headers. (e.g. Key:Value)\")\n\tfmt.Printf(\"\\n\")\n\tfmt.Println(\"Examples: gfz -w users.txt -w pass.txt --sc 200 http://www.site.com/log.asp?user=FUZZ&pass=FUZ2Z\")\n\tfmt.Println(\" gfz -z file,default/common.txt -z list,-.php http://somesite.com/FUZZFUZ2Z\")\n\tfmt.Println(\" gfz -t 32 -w somelist.txt https://someTLSsite.com/FUZZ\")\n\tfmt.Println(\" gfz --print-body --sc 200 --post-form \\\"name=FUZZ\\\" -z file,somelist.txt http://somesite.com/form\")\n\tfmt.Println(\" gfz --post -b mycookie -ua normalbrowser --username admin --password FUZZ -z list,admin-password http://somesite.com\")\n}", "func Help() string {\n\treturn \"Commands are detailled here : https://github.com/eze-kiel/irc-bot/blob/master/README.md\"\n}", "func DisplayCommandHelp(b *Brute, m *discordgo.MessageCreate, cmd *Command) {\n\tif cmd.HelpFunc != nil {\n\t\tcmd.HelpFunc(b, b.Session, m)\n\t\treturn\n\t}\n\n\tif cmd.HelpStr != \"\" {\n\t\t_, err := b.Session.ChannelMessageSend(m.ChannelID, cmd.HelpStr)\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Failed to send message: %v\\n\", err)\n\t\t}\n\t\treturn\n\t}\n\n\t_, err := b.Session.ChannelMessageSend(m.ChannelID, fmt.Sprintf(\"No usage info defined for `%s`. Here - grab some beers while waiting for Mak to add them :beers:\", cmd.Cmd))\n\tif err != nil {\n\t\tfmt.Printf(\"Failed to send message: %v\\n\", err)\n\t}\n}", "func (app *App) ShowHelp(cmdName string) {\n\n\tfindLongestOption := func(options []Option) int {\n\t\tmax := 0\n\t\tfor _, opt := range options {\n\t\t\tlength := 0\n\t\t\tif opt.Value != \"\" {\n\t\t\t\tlength = len(opt.Key) + 1 + len(opt.Value)\n\t\t\t} else {\n\t\t\t\tlength = len(opt.Key)\n\t\t\t}\n\t\t\tif length > max {\n\t\t\t\tmax = length\n\t\t\t}\n\t\t}\n\t\treturn max\n\t}\n\n\toptionFormatStr := \"\"\n\n\tformatOption := func(opt Option) string {\n\t\tif opt.Value != \"\" {\n\t\t\tpair := fmt.Sprintf(\"%v=%v\", opt.Key, opt.Value)\n\t\t\treturn fmt.Sprintf(optionFormatStr, pair)\n\t\t} else {\n\t\t\treturn fmt.Sprintf(optionFormatStr, opt.Key)\n\t\t}\n\t}\n\n\tshowOptions := func(options []Option) {\n\t\tlongest := findLongestOption(options)\n\t\toptionFormatStr = fmt.Sprintf(\" -%%-%vv\", longest)\n\t\tfmt.Printf(\"\\n\")\n\t\tfor _, opt := range options {\n\t\t\tfmt.Printf(\"%v\", formatOption(opt))\n\t\t\twriteBody(opt.Description, 3, 6+longest)\n\t\t}\n\t}\n\n\tcmd := app.find(cmdName)\n\tif cmd != nil {\n\t\tcmdAndArgs := cmd.Name + \" \" + formatCmdArgs(cmd.Args)\n\t\tfmt.Printf(\"\\n%v\\n\\n\", cmdAndArgs)\n\t\twriteBody(cmd.ShortDescription()+\".\", 2, 2)\n\t\tif cmd.ExtraDescription() != \"\" {\n\t\t\twriteBody(cmd.ExtraDescription(), 2, 2)\n\t\t}\n\t\tif len(cmd.Options) != 0 {\n\t\t\tshowOptions(cmd.Options)\n\t\t}\n\t} else {\n\t\tlongestCmd := 0\n\t\tfor _, c := range app.Commands {\n\t\t\tif len(c.Name) > longestCmd {\n\t\t\t\tlongestCmd = len(c.Name)\n\t\t\t}\n\t\t}\n\t\tcmdFormatStr := fmt.Sprintf(\" %%-%vv %%v\\n\", longestCmd)\n\t\tif app.Description != \"\" {\n\t\t\tfmt.Printf(\"\\n%v\\n\\n\", app.Description)\n\t\t}\n\t\tfor _, c := range app.Commands {\n\t\t\tfmt.Printf(cmdFormatStr, c.Name, c.ShortDescription())\n\t\t}\n\t\tif len(app.Options) != 0 {\n\t\t\tshowOptions(app.Options)\n\t\t}\n\t}\n\n}", "func help() {\r\n fmt.Printf(\"ORIGAMI\\n\")\r\n fmt.Printf(\"\\tA web app that checks the toner levels of printers at the Elizabethtown College campus.\\n\\n\")\r\n fmt.Printf(\"USAGE\\n\")\r\n fmt.Printf(\"\\tUsage: origami [-f filepath | -h]\\n\\n\")\r\n fmt.Printf(\"OPTIONS\\n\")\r\n fmt.Printf(\"\\t-f: specify the filepath of the config file (\\\"./origami.conf\\\" by default)\\n\")\r\n fmt.Printf(\"\\t-h: this menu\\n\\n\")\r\n fmt.Printf(\"AUTHOR\\n\")\r\n fmt.Printf(\"\\tRory Dudley (aka pinecat: https://github.com/pinecat/origamiv2)\\n\\n\")\r\n fmt.Printf(\"EOF\\n\")\r\n}", "func Help() {\n\tlog.Println(\"\")\n\tlog.Println(\"Commands:\")\n\tlog.Println(\" Init : Create an empty deps.json\")\n\tlog.Println(\" Add [nickname] : Add a dependency (interactive)\")\n\tlog.Println(\" Install : Install all the dependencies listed in deps.json (default)\")\n\tlog.Println(\" Update [nickname] [branch] : Update [nickname] to use the latest commit in [branch]\")\n\tlog.Println(\" Self-Upgrade : Upgrade depman to the latest version on the master branch\")\n\tlog.Println(\" Help : Display this help\")\n\tlog.Println(\" Show-Frozen : Show dependencies as resolved to commit IDs\")\n\tlog.Println(\"\")\n\tlog.Println(\"Example: depman --verbose install\")\n\tlog.Println(\"\")\n\t//log.Println(\" freeze : For each dependency change tag and branch versions to commits (not yet implemented)\")\n\tlog.Println(\"Options:\")\n\tflag.PrintDefaults()\n}", "func (p *plugin) cmdHelp(w irc.ResponseWriter, r *irc.Request, params cmd.ParamList) {\n\tproto.PrivMsg(w, r.Target, TextHelpDisplay, r.SenderName)\n}", "func (src *SecretsRemoveCommand) Help() string {\n\treturn `Remove a secret from your Realm Application.\n\nUsage:\n realm-cli secrets remove --name [string] [options]\n realm-cli secrets remove --id [string] [options]\n\nREQUIRED:\n --name [string] OR --id [string]\n\tThe name or ID of your secret.\n` +\n\t\tsrc.SecretsBaseCommand.Help()\n}", "func (c *Config) getHelp() {\n\tcm := cmds.Command{\n\t\tName: \"help\",\n\t\tDescription: \"prints information about how to use pod\",\n\t\tEntrypoint: helpFunction,\n\t\tCommands: nil,\n\t}\n\t// first add all the options\n\tc.ForEach(func(ifc opt.Option) bool {\n\t\to := fmt.Sprintf(\"Parallelcoin Pod All-in-One Suite\\n\\n\")\n\t\tvar dt details\n\t\tswitch ii := ifc.(type) {\n\t\tcase *binary.Opt:\n\t\t\tdt = details{ii.GetMetadata().Name, ii.Option, ii.Description, fmt.Sprint(ii.Def), ii.Aliases,\n\t\t\t\tii.Documentation,\n\t\t\t}\n\t\tcase *list.Opt:\n\t\t\tdt = details{ii.GetMetadata().Name, ii.Option, ii.Description, fmt.Sprint(ii.Def), ii.Aliases,\n\t\t\t\tii.Documentation,\n\t\t\t}\n\t\tcase *float.Opt:\n\t\t\tdt = details{ii.GetMetadata().Name, ii.Option, ii.Description, fmt.Sprint(ii.Def), ii.Aliases,\n\t\t\t\tii.Documentation,\n\t\t\t}\n\t\tcase *integer.Opt:\n\t\t\tdt = details{ii.GetMetadata().Name, ii.Option, ii.Description, fmt.Sprint(ii.Def), ii.Aliases,\n\t\t\t\tii.Documentation,\n\t\t\t}\n\t\tcase *text.Opt:\n\t\t\tdt = details{ii.GetMetadata().Name, ii.Option, ii.Description, fmt.Sprint(ii.Def), ii.Aliases,\n\t\t\t\tii.Documentation,\n\t\t\t}\n\t\tcase *duration.Opt:\n\t\t\tdt = details{ii.GetMetadata().Name, ii.Option, ii.Description, fmt.Sprint(ii.Def), ii.Aliases,\n\t\t\t\tii.Documentation,\n\t\t\t}\n\t\t}\n\t\tcm.Commands = append(cm.Commands, cmds.Command{\n\t\t\tName: dt.option,\n\t\t\tDescription: dt.desc,\n\t\t\tEntrypoint: func(ifc interface{}) (e error) {\n\t\t\t\to += fmt.Sprintf(\"Help information about %s\\n\\n\\toption name:\\n\\t\\t%s\\n\\taliases:\\n\\t\\t%s\\n\\tdescription:\\n\\t\\t%s\\n\\tdefault:\\n\\t\\t%v\\n\",\n\t\t\t\t\tdt.name, dt.option, dt.aliases, dt.desc, dt.def,\n\t\t\t\t)\n\t\t\t\tif dt.documentation != \"\" {\n\t\t\t\t\to += \"\\tdocumentation:\\n\\t\\t\" + dt.documentation + \"\\n\\n\"\n\t\t\t\t}\n\t\t\t\tfmt.Fprint(os.Stderr, o)\n\t\t\t\treturn\n\t\t\t},\n\t\t\tCommands: nil,\n\t\t},\n\t\t)\n\t\treturn true\n\t},\n\t)\n\t// next add all the commands\n\tc.Commands.ForEach(func(cm cmds.Command) bool {\n\t\t\n\t\treturn true\n\t}, 0, 0,\n\t)\n\tc.Commands = append(c.Commands, cm)\n\treturn\n}", "func (c *PushCommand) Help() string {\n\treturn `\nUsage: filegate push [options] file\n ` + c.Synopsis() + `.\nOptions:\n --signaling-server=signaling_server Signaling endpoint, used by peers to exchange session description.\n --stun-servers=stun_server_1,stun_server_2,... STUN servers for traversal of NAT gateways.\n`\n}", "func (sbc *SecretsBaseCommand) Help() string {\n\treturn `\nOPTIONAL:\n --app-id [string]\n\tThe App ID for your app (i.e. the name of your app followed by a unique suffix, like \"my-app-nysja\").\n\tRequired if not being run from within a realm project directory.` +\n\t\tsbc.ProjectCommand.Help()\n}", "func (a *SLCommand) Help(command string) string {\n\tvar help string\n\n\tglobal := `\n -username \"...\" Sofleyer Username (env: IMAGES_SL_USERNAME)\n -api-key \"...\" Softlayer API Key (env: IMAGES_SL_API_KEY)\n`\n\tswitch command {\n\tcase \"modify\":\n\t\thelp = newModifyFlags().helpMsg\n\tcase \"list\":\n\t\thelp = newListFlags().helpMsg\n\tcase \"delete\":\n\t\thelp = newDeleteFlags().helpMsg\n\tcase \"copy\":\n\t\thelp = newCopyFlags().helpMsg\n\tdefault:\n\t\treturn \"no help found for command \" + command\n\t}\n\n\thelp += global\n\treturn help\n}", "func (slc *SecretsListCommand) Help() string {\n\treturn `List secrets from your Realm Application.\n\nUsage: realm-cli secrets list [options]\n` +\n\t\tslc.SecretsBaseCommand.Help()\n}", "func (c *MigrateCommand) Help(ctx context.Context, req *proto.HelpRequest, rsp *proto.HelpResponse) error {\n\t// Usage should include the name of the command\n\trsp.Usage = \"migrate\"\n\trsp.Description = \"This is an example bot command as a micro service which echos the message\"\n\treturn nil\n}", "func (g *Getter) PrintHelp(indent string) {\n\tfmt.Println(indent, \"The get command downloads and installs a Fyne application.\")\n\tfmt.Println(indent, \"A single parameter is required to specify the Go package, as with \\\"go get\\\"\")\n}", "func pullExample() string {\n\treturn `$ pouch images\nIMAGE ID IMAGE NAME SIZE\nbbc3a0323522 docker.io/library/busybox:latest 703.14 KB\n$ pouch pull docker.io/library/redis:alpine\n$ pouch images\nIMAGE ID IMAGE NAME SIZE\nbbc3a0323522 docker.io/library/busybox:latest 703.14 KB\n0153c5db97e5 docker.io/library/redis:alpine 9.63 MB`\n}", "func (c *Client) Help(cmd string) (string, error) {\n\tvar result string\n\terr := c.runCommand(&result, \"help\", cmd)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn result, nil\n}", "func showHelp(a *artifact.Artifact) {\n\tlog.Printf(\"Help!\")\n}", "func displayHelp(subcommand ...string) {\n\tswitch subcommand[0] {\n\tcase \"\":\n\t\tfmt.Println(MainHelp)\n\tcase \"run\":\n\t\tfmt.Println(RunHelp)\n\tcase \"build\":\n\t\tfmt.Println(BuildHelp)\n\tcase \"test\":\n\t\tfmt.Println(TestHelp)\n\tcase \"deps\":\n\t\tfmt.Println(DepsHelp)\n\tdefault:\n\t\tfmt.Println(MainHelp)\n\t}\n}", "func (c *ServeCommand) Help() string {\n\thelpText := `\nUsage: spored serve [options] ...\n\n Spored Server.\n\nOptions:\n\n -config=config.yml Configuration file to use.\n -env=development Environment.\n`\n\n\treturn strings.TrimSpace(helpText)\n}", "func PrintHelp() {\n\tfmt.Print(usage)\n}", "func (a activePlugin) Help() string {\n\treturn \"/url-shorten {url}\"\n}", "func PrintHelp() {\n\n\tfmt.Println(`usage: gpress [--decompress] [--verbose] --sourcefile <file>\n\n--decompress decompress file\n--help display help\n--sourcefile file to be compressed or decompressed\n--verbose add debug output\n--version display program version`)\n}", "func showCmdUsage(cmd *RunCmd) {\n\tvar shell = \"\"\n\t//noinspection GoBoolExpressions\n\tif config.ShowCmdShells {\n\t\tshell = fmt.Sprintf(\" (%s)\", cmd.Shell())\n\t}\n\tif !cmd.EnableHelp() {\n\t\tfmt.Fprintf(config.ErrOut, \"%s%s: No help available.\\n\", cmd.Name, shell)\n\t\treturn\n\t}\n\t// Usages\n\t//\n\tfor i, usage := range cmd.Config.Usages {\n\t\tor := \"or\"\n\t\tif i == 0 {\n\t\t\tfmt.Fprintf(config.ErrOut, \"Usage:\\n\")\n\t\t\tor = \" \" // 2 spaces\n\t\t}\n\t\tpad := strings.Repeat(\" \", len(cmd.Name)-1)\n\t\tif usage[0] == '(' {\n\t\t\tfmt.Fprintf(config.ErrOut, \" %s %s\\n\", pad, usage)\n\t\t} else {\n\t\t\tfmt.Fprintf(config.ErrOut, \" %s %s %s\\n\", or, cmd.Name, usage)\n\t\t}\n\t}\n\thasHelpShort := false\n\thasHelpLong := false\n\tfor _, opt := range cmd.Config.Opts {\n\t\tif opt.Short == 'h' {\n\t\t\thasHelpShort = true\n\t\t}\n\t\tif opt.Long == \"help\" {\n\t\t\thasHelpLong = true\n\t\t}\n\t}\n\t// Options\n\t//\n\tif len(cmd.Config.Opts) > 0 {\n\t\tfmt.Fprintln(config.ErrOut, \"Options:\")\n\t\tif !hasHelpShort || !hasHelpLong {\n\t\t\tswitch {\n\t\t\tcase !hasHelpShort && hasHelpLong:\n\t\t\t\tfmt.Fprintln(config.ErrOut, \" -h\")\n\t\t\tcase hasHelpShort && !hasHelpLong:\n\t\t\t\tfmt.Fprintln(config.ErrOut, \" --help\")\n\t\t\tdefault:\n\t\t\t\tfmt.Fprintln(config.ErrOut, \" -h, --help\")\n\t\t\t}\n\t\t\tfmt.Fprintln(config.ErrOut, \" Show full help screen\")\n\t\t}\n\t}\n\tfor _, opt := range cmd.Config.Opts {\n\t\tb := &strings.Builder{}\n\t\tb.WriteString(\" \")\n\t\tif opt.Short != 0 {\n\t\t\tb.WriteRune('-')\n\t\t\tb.WriteRune(opt.Short)\n\t\t}\n\t\tif opt.Long != \"\" {\n\t\t\tif opt.Short != 0 {\n\t\t\t\tb.WriteString(\", \")\n\t\t\t}\n\t\t\tb.WriteString(\"--\")\n\t\t\tb.WriteString(opt.Long)\n\t\t}\n\t\tif opt.Value != \"\" {\n\t\t\tb.WriteRune(' ')\n\t\t\tb.WriteRune('<')\n\t\t\tb.WriteString(opt.Value)\n\t\t\tb.WriteRune('>')\n\t\t}\n\t\tif opt.Desc != \"\" {\n\t\t\tif opt.Short != 0 && opt.Long == \"\" && opt.Value == \"\" {\n\t\t\t\tb.WriteString(\" \")\n\t\t\t} else {\n\t\t\t\tb.WriteString(\"\\n \")\n\t\t\t}\n\t\t\tb.WriteString(opt.Desc)\n\t\t}\n\t\tfmt.Fprintln(config.ErrOut, b.String())\n\t}\n}", "func (ScrapeHostResourceUtilization) Help() string {\n\treturn \"Collect info from SYS.M_HOST_RESOURCE_UTILIZATION\"\n}", "func CommandHelp(args []string) {\n\tif len(os.Args) > 2 {\n\t\tHelp(os.Args[2])\n\t} else {\n\t\tHelp(\"\")\n\t}\n}", "func printCLIHelp() {\n\tfmt.Println(\"Valid Commands:\")\n\tfmt.Println(\"(if not joined) join intro\")\n\tfmt.Println(\"(if not joined) join [port_number]\")\n\tfmt.Println(\"(if joined) leave\")\n\tfmt.Println(\"(if joined) members\")\n\tfmt.Println(\"(if joined) id\")\n\tfmt.Println(\"(if joined) gossip\")\n\tfmt.Println(\"(if joined) all-to-all\")\n\tfmt.Println(\"(if joined) put [filepath]\")\n\tfmt.Println(\"(if joined) get [filename]\")\n\tfmt.Println(\"(if joined) delete [filename]\")\n\tfmt.Println(\"(if joined) ls [filename]\")\n\tfmt.Println(\"(if joined) store\")\n\tfmt.Println(\"(all scenarios) exit\")\n}", "func (c *TwitterCommand) Help() string {\n\treturn \"Usage: a-know twitter\"\n}", "func (c *Command) Help() string {\n\treturn \"\"\n}", "func (sc *SecretsCommand) Help() string {\n\treturn sc.Synopsis()\n}", "func helpCommand(c *WebsocketConnection, a []string) {\n\tc.WritePrivateMessage(getHelpMessage())\n}", "func (c *NodeInfoCommand) Help() string {\n\treturn `Usage: wallet api nodeinfo\n`\n}", "func (c *Client) Help(cmd ...string) ([]string, error) {\n\tswitch len(cmd) {\n\tcase 0:\n\t\treturn c.Exec(\"help\")\n\tcase 1:\n\t\treturn c.ExecCmd(NewCmd(\"help\").WithArgs(cmd[0]))\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"more than one cmd specified\")\n\t}\n}", "func Help(cmdInfo CommandInfo) {\n\tif len(cmdInfo.CmdOps) == 1 {\n\t\t// When user only writes: ?help\n\t\tprettyPrintHelp(\n\t\t\t\"Error\",\n\t\t\t\"You must query a valid command.\",\n\t\t\tformat(\n\t\t\t\tcreateFields(\"EXAMPLE\", cmdInfo.Prefix+\"help search\", true),\n\t\t\t),\n\t\t\tcmdInfo,\n\t\t\t14886454,\n\t\t)\n\t\treturn\n\t}\n\tfull := strings.Join(cmdInfo.CmdOps[1:], \" \")\n\tif !find(full, cmdInfo) {\n\t\tprettyPrintHelp(\n\t\t\tfull,\n\t\t\t\"Command Not Found\",\n\t\t\tformat(\n\t\t\t\tcreateFields(\"To List All Commands:\", cmdInfo.Prefix+\"list\", true),\n\t\t\t),\n\t\t\tcmdInfo,\n\t\t\t14886454,\n\t\t)\n\t\treturn\n\t}\n\t// Valid commands\n\tswitch full {\n\tcase \"search\":\n\t\tprettyPrintHelp(\n\t\t\t\"Search\",\n\t\t\t\"Search will look up an item from New Horizon's bug and fish database.\",\n\t\t\tformat(\n\t\t\t\tcreateFields(\"EXAMPLE\", cmdInfo.Prefix+\"search emperor butterfly\", true),\n\t\t\t\tcreateFields(\"EXAMPLE\", cmdInfo.Prefix+\"search north bug\", true),\n\t\t\t),\n\t\t\tcmdInfo,\n\t\t\t9410425,\n\t\t)\n\tcase \"list\":\n\t\tprettyPrintHelp(\n\t\t\t\"List\",\n\t\t\t\"List will show all commands the bot understands.\",\n\t\t\tformat(\n\t\t\t\tcreateFields(\"EXAMPLE\", cmdInfo.Prefix+\"list\", true),\n\t\t\t),\n\t\t\tcmdInfo,\n\t\t\t9410425,\n\t\t)\n\tcase \"pong\":\n\t\tprettyPrintHelp(\n\t\t\t\"Pong\",\n\t\t\t\"Playing with pong.\",\n\t\t\tformat(\n\t\t\t\tcreateFields(\"EXAMPLE\", cmdInfo.Prefix+\"pong\", true),\n\t\t\t),\n\t\t\tcmdInfo,\n\t\t\t9410425,\n\t\t)\n\t}\n}", "func Cmd(method, source string, args Options) ([]byte, error) {\n\treturn fetch.Cmd(fetch.Request{\n\t\tMethod: method,\n\t\tURL: fmt.Sprintf(\n\t\t\t\"http://api.pullword.com/%s.php?source=%s&param1=%d&param2=%d\",\n\t\t\tmethod,\n\t\t\tsource,\n\t\t\targs.Threshold,\n\t\t\targs.Debug,\n\t\t),\n\t})\n}", "func ShowHelp() {\n\tfmt.Printf(\"%v\\n\", helpText)\n}", "func (a API) Help(cmd *btcjson.HelpCmd) (e error) {\n\tRPCHandlers[\"help\"].Call <-API{a.Ch, cmd, nil}\n\treturn\n}", "func (c *RestoreCommand) Help() string {\n\treturn `\nUsage: consul-snapshot restore filename.backup\n\nStarts a restore process\n`\n}", "func (c *ImportRawKeyCommand) Help() string {\n\treturn `Usage: keygen api importrawkey [options...]\nOptions:\n -key private key\n -pass passphrase\n`\n}", "func help() {\n\tfmt.Println(\"\\n--------------Command--------------\")\n\tfmt.Println(\"1. status\")\n\tfmt.Println(\"2. input [tipe identitas: string] [nomor identitas: integer]\")\n\tfmt.Println(\"3. leave [nomor loker: integer]\")\n\tfmt.Println(\"4. find [nomor identitas: integer]\")\n\tfmt.Println(\"5. search [tipe identitas: string]\")\n\tfmt.Println(\"6. exit\")\n\tfmt.Println(\"--------------End Command--------------\\n\")\n}", "func (ScrapeVserver) Help() string {\n\treturn \"Collect Netapp Vserver info;\"\n}", "func (suc *SecretsUpdateCommand) Help() string {\n\treturn `Update a secret for your Realm Application.\n\nUsage:\n realm-cli secrets update --name [string] --value [string] [options]\n realm-cli secrets update --id [string] --value [string] [options]\n\nREQUIRED:\n --name [string] OR --id [string]\n\tThe name or ID of your secret.\n\n --value [string]\n\tThe value that your secret is being updated to.\n` +\n\t\tsuc.SecretsBaseCommand.Help()\n}", "func usage() {\n\tfmt.Printf(\"%s\", helpString)\n}", "func DisplayHelp(b *Brute, m *discordgo.MessageCreate, c []*Command) {\n\tif c == nil || len(c) == 0 {\n\t\tmessage := \"Supported commands:\\n\"\n\t\tfor _, cmd := range b.Commands {\n\t\t\tmessage = fmt.Sprintf(\"%s\\n`%s%s`%s\", message, b.Prefix, cmd.Cmd[0], getAliases(cmd.Cmd, b.Prefix))\n\t\t}\n\n\t\tmessage = fmt.Sprintf(\"%s\\n\\nUse `%shelp [command]` for more info about the concrete command\", message, b.Prefix)\n\n\t\t_, err := b.Session.ChannelMessageSend(m.ChannelID, message)\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Failed to send message: %v\\n\", err)\n\t\t}\n\n\t\treturn\n\t}\n\n\tfor _, cmd := range c {\n\t\tif cmd != nil {\n\t\t\tDisplayCommandHelp(b, m, cmd)\n\t\t}\n\t}\n}", "func (c *Command) Help() string {\n\t// Some commands with subcommands (kv/snapshot) call this without initializing\n\t// any flags first, so exit early to avoid a panic\n\tif c.flagSet == nil {\n\t\treturn \"\"\n\t}\n\treturn c.helpFlagsFor(c.flagSet)\n}", "func (flags *Flags) Help() string {\n\treturn \"\"\n}", "func (v *ValidateCmd) Help() string {\n\treturn `\nChecks the currently logged in account and verifies its ability to access TFE API. \nIf successful, emits a notification to macOS with the name of the user. \n`\n}", "func printHelp() {\n\t// print help using the flag package default\n\tflag.Usage()\n\t// add the two trailing arguments for source and dest\n\tfmt.Fprintf(os.Stderr, \" source: The source of the copy, either a local file path or an s3 path like s3:bucket:/path\\n\")\n\tfmt.Fprintf(os.Stderr, \" destination: The destination of the copy, in the same format as source (above)\\n\")\n\tfmt.Fprintf(os.Stderr, \"\\nBoth source and destination are required, and one must be an s3 path, another must be a local path\\n\\n\")\n}", "func (c *VsmStatsCommand) Help() string {\n\thelpText := `\nUsage: maya volume stats <volname> \n\n Display Volume Stats information including running status\n and Read/Write.\n\nVolume Stats Options:\n -json \n Output stats in json format\n\n`\n\treturn strings.TrimSpace(helpText)\n}", "func Help(prefix string) string {\n\treturn fmt.Sprintf(\"usage: %s [command] [command_args...]\\n\", prefix) +\n\t\t`Available commands:\n - **help** - shows this message\n\t- **next** - shows information about the next race\n\t- **last** - shows information about the last race\n\t- **current** - shows races for the current season\n\t- **results** - shows information about results\n\t\t- **results circuit <circuit>** - shows historical information about the winners at a given circuit for the last years\n\t\t- **results driver <driver>** - shows last results for a driver\n`\n}", "func Help(args []string) {\n\tpretty.Println(\n\t\t\"[Impatience - Help]:\\n\",\n\t\t\"\t- Avaliable sub commands:\\n\",\n\t\t\"\t\tº launch\\n\",\n\t\t\"\t\tº init\\n\",\n\t\t\"\t\tº help\\n\",\n\t\t\"------------------------------------------\\n\\n\",\n\t\t// Launch\n\t\t\"# command \\\"launch\\\": \\n\",\n\t\t\"Launches a new web server.\\n\",\n\t\t\"\t--address, -a server address\\n\",\n\t\t\"\t--cache, -s cache strategy, as of now only \\\"cookie\\\" is valid\\n\",\n\t\t\"\t--config, -c path for a JSON configuration\\n\",\n\t\t\"\t--node, -n path to node_modules root\\n\",\n\t\t\"\t--node-ext file extensions that shall be analyzed looking for node libraries\\n\",\n\t\t\"\t--port, -p TCP port the server shall be launched in\\n\",\n\t\t\"\t--root, -r public root that shall be served by Impatience\\n\",\n\t\t\"\t--ts enable ts support, you may specify the path to tsconfig\\n\",\n\t)\n}", "func PrintCmdHelp(toolName string, command Command) {\n\tbw := bufio.NewWriter(os.Stdout)\n\n\tdata := struct {\n\t\tToolName string\n\t\tCmdUsageLine string\n\t\tCmdLong string\n\t}{\n\t\ttoolName,\n\t\tcommand.OptionInfo().UsageLine,\n\t\tcommand.OptionInfo().Long,\n\t}\n\n\tfgutil.RenderTemplate(bw, tplCmdHelp, data)\n\tbw.Flush()\n}", "func (UninstallCommand) Help() string {\n\treturn heredoc.Docf(`\n\t\tUninstalls a given Helm release.\n\n\t\tIt removes all of the resources associated with the last release of the chart\n\t\tas well as the release history, freeing it up for future use.\n\n\t\tUsage:\n\t\t helm uninstall RELEASE_NAME [...] [flags]\n\n\t\tAliases:\n\t\t uninstall, del, delete, un\n\n\t\tFlags:\n\t\t%s\n\t`, indent.String(renderSupportedFlags(SupportedUninstallFlags{}), 4))\n}", "func (cmd RemoteCmd) Description() string {\n\treturn \"Manage set of tracked repositories.\"\n}", "func showHelp(exitStatus int) {\n\tfmt.Print(help.Help)\n\n\tos.Exit(exitStatus)\n}", "func (p *ReminderPlugin) Help(bot *bruxism.Bot, service bruxism.Service, message bruxism.Message, detailed bool) []string {\n\thelp := []string{\n\t\tbruxism.CommandHelp(service, \"reminder\", \"<time> <reminder>\", \"Sets a reminder that is sent after the provided time.\")[0],\n\t\tbruxism.CommandHelp(service, \"reminderlist\", \"\", \"List all active reminders.\")[0],\n\t\tbruxism.CommandHelp(service, \"reminderdelete\", \"<index>\", \"Deletes a reminder by index. eg: reminderdelete 0\")[0],\n\t}\n\tif detailed {\n\t\thelp = append(help, []string{\n\t\t\t\"Examples: \",\n\t\t\tp.randomReminder(service),\n\t\t\tp.randomReminder(service),\n\t\t}...)\n\t}\n\treturn help\n}", "func (c *DisplayCommand) Help() string {\n\thelpText := `\nUsage: blinky display [options]\n\tDisplay system informations\nOptions:\n\t--debug Debug mode enabled\n`\n\treturn strings.TrimSpace(helpText)\n}", "func (s *GlobalLookupFactory) Help() string {\n\treturn \"\"\n}", "func (c VerifyCmd) Help() string {\n\thelpText := `\nUsage: gtm verify <version-constraint>\n\n Check if gtm satisfies a Semantic Version 2.0 constraint.\n`\n\treturn strings.TrimSpace(helpText)\n}", "func (pull *LibcomposePullProperty) Description() string {\n\treturn \"When building, pull all images before using them?\"\n}", "func Help(args []string, cmds []*command.Command) int {\n\tvar (\n\t\tcmd string\n\t)\n\tif len(args) >= 3 {\n\t\tcmd = args[2]\n\t}\n\t// Prints the help if the command exist.\n\tfor _, c := range cmds {\n\t\tif c.Name() == cmd {\n\t\t\treturn c.Usage()\n\t\t}\n\t}\n\tif cmd == \"\" {\n\t\tfmt.Fprintf(os.Stderr, \"missing help command. Usage:\\n\\n\\t$ bw help [command]\\n\\nAvailable help commands\\n\\n\")\n\t\tvar usage []string\n\t\tfor _, c := range cmds {\n\t\t\tname := c.Name()\n\t\t\tfor i := len(name); i < 12; i++ {\n\t\t\t\tname += \" \"\n\t\t\t}\n\t\t\tusage = append(usage, fmt.Sprintf(\"\\t%s\\t- %s\\n\", name, c.Short))\n\t\t}\n\t\tsort.Strings(usage)\n\t\tfor _, u := range usage {\n\t\t\tfmt.Fprint(os.Stderr, u)\n\t\t}\n\t\tfmt.Fprintln(os.Stderr, \"\")\n\t\treturn 0\n\t}\n\tfmt.Fprintf(os.Stderr, \"help command %q not recognized. Usage:\\n\\n\\t$ bw help\\n\\n\", cmd)\n\treturn 2\n}", "func (p *Plugin) Help() {\n\tfmt.Println(\"\tgump: Initialize an empty .version.sh file.\")\n\tfmt.Println(\"\tgump:user/repo: Initialize a .version.sh file downloaded from github.com/user/repo/.version.sh.\")\n}", "func (vm *VM) ExportVulnsHelp(cmd *cobra.Command, args []string) {\n\tfmt.Printf(\"tiogo version %s (%s)\", ReleaseVersion, GitHash)\n\tif vm.Config.VM.Log.IsLevelEnabled(log.DebugLevel) {\n\t\tfmt.Println(spew.Print(vm.Config))\n\t}\n\n\tcli := ui.NewCLI(vm.Config)\n\tfmt.Println(cli.Render(\"exportVulnsUsage\", nil))\n\n\treturn\n}", "func PrintHelp() {\n\n\thelpString := `\n Usage: ./nexus-repository-cli.exe [option] [parameters...]\n\n [options]\n -list\n List the repositories in Nexus. Optional parameters: repoType, repoPolicy\n -create\n Create a repository in Nexus. Required parameter: repoId, repoType, provider, repoPolicy (only for maven2). Optional parameter: exposed\n -delete\n Delete a repository in Nexus. Required parameter: repoId\n -addRepoToGroup\n Add a reposirory to a group repository. Required parameters: repoId, repositories\n\n [parameters]\n -nexusUrl string\n Nexus server URL (default \"http://localhost:8081/nexus\")\n -exposed\n Set this flag to expose the repository in nexus.\n -username string\n Username for authentication\n -password string\n Password for authentication\n -repoId string\n ID of the Repository\n -repoType string\n Type of a repository. Possible values : hosted/proxy/group\n -repoPolicy string\n Policy of the hosted repository. Possible values : snapshot/release\n -provider string\n Repository provider. Possible values: maven2/npm/nuget\n -remoteStorageUrl string\n Remote storage url to proxy in Nexus\n -repositories string\n Comma separated value of repositories to be added to a group.\n -verbose\n Set this flag for Debug logs.\n\t`\n\n\tflag.Usage = func() {\n\t\tfmt.Fprintf(os.Stderr, helpString)\n\t}\n}", "func (c *initCommand) Help() string {\n\treturn initHelp\n}", "func (srv *Server) Help() string {\n\thelp := \"Usage: docker COMMAND [arg...]\\n\\nA self-sufficient runtime for linux containers.\\n\\nCommands:\\n\"\n\tfor _, cmd := range [][]string{\n\t\t{\"attach\", \"Attach to a running container\"},\n\t\t{\"commit\", \"Create a new image from a container's changes\"},\n\t\t{\"diff\", \"Inspect changes on a container's filesystem\"},\n\t\t{\"export\", \"Stream the contents of a container as a tar archive\"},\n\t\t{\"history\", \"Show the history of an image\"},\n\t\t{\"images\", \"List images\"},\n\t\t{\"import\", \"Create a new filesystem image from the contents of a tarball\"},\n\t\t{\"info\", \"Display system-wide information\"},\n\t\t{\"inspect\", \"Return low-level information on a container\"},\n\t\t{\"kill\", \"Kill a running container\"},\n\t\t{\"login\", \"Register or Login to the docker registry server\"},\n\t\t{\"logs\", \"Fetch the logs of a container\"},\n\t\t{\"port\", \"Lookup the public-facing port which is NAT-ed to PRIVATE_PORT\"},\n\t\t{\"ps\", \"List containers\"},\n\t\t{\"pull\", \"Pull an image or a repository from the docker registry server\"},\n\t\t{\"push\", \"Push an image or a repository to the docker registry server\"},\n\t\t{\"restart\", \"Restart a running container\"},\n\t\t{\"rm\", \"Remove a container\"},\n\t\t{\"rmi\", \"Remove an image\"},\n\t\t{\"run\", \"Run a command in a new container\"},\n\t\t{\"start\", \"Start a stopped container\"},\n\t\t{\"stop\", \"Stop a running container\"},\n\t\t{\"tag\", \"Tag an image into a repository\"},\n\t\t{\"version\", \"Show the docker version information\"},\n\t\t{\"wait\", \"Block until a container stops, then print its exit code\"},\n\t} {\n\t\thelp += fmt.Sprintf(\" %-10.10s%s\\n\", cmd[0], cmd[1])\n\t}\n\treturn help\n}", "func usage() {\n\tfor _, key := range commandKeys {\n\t\tfmt.Printf(\"%v\\n\", commands[key])\n\t}\n\n}", "func usage() {\n\tfor _, key := range commandKeys {\n\t\tfmt.Printf(\"%v\\n\", commands[key])\n\t}\n\n}", "func (c *GetOfflineCommand) Help() string {\n\tif len(c.helpText) == 0 {\n\t\tt, _ := template.New(\"GetOfflineCommandHelp\").Parse(getCommandHelp)\n\t\tparams := getCommandHelpParams{cliutil.SsmCliName, getCommand, sendCommand, cliutil.FormatFlag(getCommandCommandID), cliutil.FormatFlag(getCommandDetails)}\n\t\tbuf := new(bytes.Buffer)\n\t\tt.Execute(buf, params)\n\t\tc.helpText = buf.String()\n\t}\n\treturn c.helpText\n}", "func (c *RunCommand) Help() string {\n\thelpText := `\nsource_fileをコンパイル後、problem_noで指定された番号の問題のテストを実行する\n\nUsage:\n\tgoyuki run problem_no source_file\n\nOptions:\n\t-language=lang, -l\t\t実行する言語を指定します (デフォルト 拡張子から判別)\n\t-validater=validater, -V テストの一致方法を指定します (デフォルト diff validater)\n\t-verbose, -vb\t\tコンパイル時、実行時の標準出力、標準エラー出力を表示する\n\t-place=n, -p\t\t\t出力される数値を小数点以下n桁に丸める (float validater時のみ) (0<=n<=15)\n\n\n`\n\treturn strings.TrimSpace(helpText)\n}", "func ShowCmdHelp(cmd *RunCmd) {\n\tvar shell = \"\"\n\t//noinspection GoBoolExpressions\n\tif config.ShowCmdShells {\n\t\tshell = fmt.Sprintf(\" (%s)\", cmd.Shell())\n\t}\n\n\tif !cmd.EnableHelp() {\n\t\tfmt.Fprintf(config.ErrOut, \"%s%s: No help available.\\n\", cmd.Name, shell)\n\t\treturn\n\t}\n\tfmt.Fprintf(config.ErrOut, \"%s%s:\\n\", cmd.Name, shell)\n\t// Desc\n\t//\n\tif len(cmd.Config.Desc) > 0 {\n\t\tfor _, desc := range cmd.Config.Desc {\n\t\t\tfmt.Fprintf(config.ErrOut, \" %s\\n\", desc)\n\t\t}\n\t\t// } else {\n\t\t// \tfmt.Fprintf(errOut, \"%s:\\n\", cmd.name)\n\t}\n\tshowCmdUsage(cmd)\n}", "func showHelp(s string) {\n var commands = setHelpCommands()\n fmt.Println(\"gobash> showing help\")\n switch s {\n case \"all\", \"help\", \"h\", \"?\":\n color.Green(\"%v\\n\", commands[\"help\"])\n color.Green(\"%v\\n\", commands[\"cd\"])\n color.Green(\"%v\\n\", commands[\"ls\"])\n color.Green(\"%v\\n\", commands[\"exec\"])\n color.Green(\"%v\\n\", commands[\"exit\"])\n case \"cd\":\n color.Green(\"%v\\n\", commands[\"cd\"])\n case \"ls\":\n color.Green(\"%v\\n\", commands[\"ls\"])\n case \"exec\":\n color.Green(\"%v\\n\", commands[\"exec\"])\n case \"exit\":\n color.Green(\"%v\\n\", commands[\"exit\"])\n default:\n color.Red(\"help command unrecognized\")\n }\n}", "func OptionsHelpTemplate() string {\n\treturn \"\"\n}", "func (cp *CmdExit) Help() string {\n\treturn `exit\n\tTerminates the process and returns control to the shell.`\n}", "func (c *DeleteCommand) Help() string {\n\thelpText := `\nUsage: sloppy delete [OPTIONS] PROJECT[/SERVICE[/APP]]\n\n Deletes the given project, service or application\n\nOptions:\n\n -f, --force=false Force the deletion of a given project, service or an app\n\nExamples:\n\n sloppy delete letschat\n sloppy delete -f letschat/frontend\n sloppy delete letschat/frontend/apache\n`\n\treturn strings.TrimSpace(helpText)\n}", "func List(repo name.Repository, options ...Option) ([]string, error) {\n\to, err := makeOptions(options...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn newPuller(o).List(o.context, repo)\n}", "func printHelp(parser *flags.Parser) {\n\tparser.WriteHelp(os.Stderr)\n\tos.Exit(0)\n}", "func ShowHelp() {\n\tfmt.Println(`Usage of ./kube-ipam:\n -help\n Display usage help information of kube-ipam.\n -outputconf string\n Generate the configuration files required by different CNI plug-ins.(Use with \"macvlan | ipvlan | kube-router | bridge | flannel\")\n -version\n Display software version information of kube-ipam.\n `)\n}", "func PullOptions(opts ...PullOption) *PullSettings {\n\toptions := &PullSettings{\n\t\tLimit: -1,\n\t}\n\n\tfor _, opt := range opts {\n\t\topt(options)\n\t}\n\treturn options\n}", "func (ScrapeBrokerStatus) Help() string {\n\treturn \"Scrape information from brokerStatusQuery\"\n}", "func (fv *UDPAddrs) Help() string {\n\treturn \"a UDP address\"\n}", "func Usage() {\n\t// To embed the bot user and password comment the line above and uncomment the line below\n\tfmt.Printf(\"Usage: %v -i <ip address> -p <port> -d <domain name>\\n\", runAs)\n\tflag.PrintDefaults()\n}" ]
[ "0.67475915", "0.6715028", "0.6536846", "0.6511913", "0.63175184", "0.62976", "0.6272036", "0.62529284", "0.6177238", "0.6158495", "0.61454433", "0.613495", "0.6116337", "0.6108647", "0.6097167", "0.6093562", "0.6082107", "0.60775214", "0.6075261", "0.60701317", "0.6062361", "0.60329705", "0.60326374", "0.6031859", "0.6026639", "0.6018327", "0.6010704", "0.60063404", "0.6005279", "0.6004181", "0.600377", "0.5994041", "0.59932774", "0.5988601", "0.59769595", "0.5976748", "0.59672207", "0.5960101", "0.595585", "0.5948359", "0.5946635", "0.59464884", "0.592177", "0.59170663", "0.59014434", "0.58986366", "0.5889633", "0.5881884", "0.5874312", "0.586951", "0.5863855", "0.5843945", "0.5838042", "0.5828985", "0.5822384", "0.5817133", "0.5813989", "0.58095694", "0.5799928", "0.57908213", "0.5773593", "0.5761448", "0.57567745", "0.5754472", "0.57366383", "0.5735627", "0.5734887", "0.57338744", "0.5730857", "0.5729456", "0.57291895", "0.57282585", "0.5728194", "0.57233787", "0.571692", "0.5700233", "0.56900877", "0.5682619", "0.5681778", "0.5675187", "0.56728625", "0.5662894", "0.565907", "0.56487346", "0.56427246", "0.56427246", "0.5636505", "0.563122", "0.56267226", "0.5622589", "0.56138104", "0.5612574", "0.56061053", "0.5599219", "0.5595074", "0.55923754", "0.5579033", "0.55677164", "0.556376", "0.55635226" ]
0.77002186
0
Synopsis displays a synopsis of the pull command.
func (c *PullCommand) Synopsis() string { return "Download the translation files in the current project" }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (cmd PullCmd) Description() string {\n\treturn \"Fetch from a dolt remote data repository and merge.\"\n}", "func (c *SyncCommand) Synopsis() string {\n\treturn \"Pull latest on all branches in all repositories\"\n}", "func (c *PullCommand) Help() string {\n\thelpText := `\n\tUsage: phrase pull [options] [LOCALE]\n\n\t Download the translation files in the current project.\n\n\tOptions:\n\n --format=yml See documentation for list of allowed formats\n --target=./phrase/locales Target folder to store locale files\n --tag=foo Limit results to a given tag instead of all translations\n --updated-since=YYYYMMDDHHMMSS Limit results to translations updated after the given date (UTC)\n --include-empty-translations Include empty translations in the result\n --convert-emoji Convert Emoji symbols\n --encoding=utf-8 Convert .strings or .properties with alternate encoding\n --skip-unverified-translations Skip unverified translations in the result\n --secret=YOUR_AUTH_TOKEN The Auth Token to use for this operation instead of the saved one (optional)\n\t`\n\treturn strings.TrimSpace(helpText)\n}", "func pullExample() string {\n\treturn `$ pouch images\nIMAGE ID IMAGE NAME SIZE\nbbc3a0323522 docker.io/library/busybox:latest 703.14 KB\n$ pouch pull docker.io/library/redis:alpine\n$ pouch images\nIMAGE ID IMAGE NAME SIZE\nbbc3a0323522 docker.io/library/busybox:latest 703.14 KB\n0153c5db97e5 docker.io/library/redis:alpine 9.63 MB`\n}", "func (c *ReleaseLatestCommand) Synopsis() string {\n\treturn \"Get the latest release version\"\n}", "func (d *downloadCommand) Synopsis() string {\n\treturn \"Download a specific version of a product.\"\n}", "func (c *GetCommand) Synopsis() string {\n\treturn \"Getting the wiki tree to code\"\n}", "func (c *PushCommand) Synopsis() string {\n\treturn \"Sends resource directly to the peer\"\n}", "func (suc *SecretsUpdateCommand) Synopsis() string {\n\treturn \"Update a secret for your Realm App.\"\n}", "func (pull *LibcomposePullProperty) Description() string {\n\treturn \"When building, pull all images before using them?\"\n}", "func (src *SecretsRemoveCommand) Synopsis() string {\n\treturn \"Remove a secret from your Realm App.\"\n}", "func CmdPullStory(c *cli.Context) {\n\n\tfrom := c.String(\"source\")\n\tsource, err := gitutil.LookupBranchSource(from, true)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\t// Get repo instance\n\troot, _ := os.Getwd()\n\trepo, err := gitutil.GetRepo(root)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tvar remoteName string\n\n\t// Extract source's remote and branch names\n\tsources := strings.Split(source, \"/\")\n\tif len(sources) == 1 {\n\t\tremoteName = \"origin\"\n\t} else {\n\t\tremoteName = sources[0]\n\t}\n\n\t// Fetch from repo before pulling\n\tfmt.Printf(\"Fetching most recent with remote: `%s`\\n\", remoteName)\n\tif err = gitutil.Fetch(repo, remoteName); err != nil {\n\t\t// do not fail entire app even if fetch fails\n\t\tlog.Println(err)\n\t}\n\n\tfmt.Printf(\"Merging %s into local branch\\n\", source)\n\terr = gitutil.Pull(repo, source)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "func (d *downloadCommand) Help() string {\n\thelp := `Usage: hashicorp-releases download <product> <version>`\n\treturn help\n}", "func (c *ServeCommand) Synopsis() string {\n\treturn \"Spored Server\"\n}", "func executePull(location string) {\n\tfmt.Println(\"Pulling from \" + location + \" ...\")\n}", "func (slc *SecretsListCommand) Synopsis() string {\n\treturn \"List secrets from your Realm App.\"\n}", "func (c *TwitterCommand) Synopsis() string {\n\treturn \"Display a-know's Twitter account name\"\n}", "func (c *UptimeCommand) Synopsis() string {\n\treturn \"Generate uptime report based on provided arguments\"\n}", "func (cmd *DatasetDownload) Description() string { return cmd.Synopsis() }", "func (c *SetupCommand) Synopsis() string {\n\treturn \"Utility to setup the elos command line interface\"\n}", "func Cmd(method, source string, args Options) ([]byte, error) {\n\treturn fetch.Cmd(fetch.Request{\n\t\tMethod: method,\n\t\tURL: fmt.Sprintf(\n\t\t\t\"http://api.pullword.com/%s.php?source=%s&param1=%d&param2=%d\",\n\t\t\tmethod,\n\t\t\tsource,\n\t\t\targs.Threshold,\n\t\t\targs.Debug,\n\t\t),\n\t})\n}", "func (c *Command) Synopsis() string {\n\treturn \"\"\n}", "func (c *UpdateCommand) Synopsis() string {\n\treturn \"Reread the model configuration and rebuild the working model\"\n}", "func (cmd RemoteCmd) Description() string {\n\treturn \"Manage set of tracked repositories.\"\n}", "func usage() {\n\tfmt.Printf(\"%s\", helpString)\n}", "func (c *RestoreCommand) Help() string {\n\treturn `\nUsage: consul-snapshot restore filename.backup\n\nStarts a restore process\n`\n}", "func execSynopsis(_ int, p *gop.Context) {\n\targs := p.GetArgs(1)\n\tret := doc.Synopsis(args[0].(string))\n\tp.Ret(1, ret)\n}", "func (c *DisplayCommand) Synopsis() string {\n\treturn \"Display system informations\"\n}", "func (c *PullCommand) Run(args []string) int {\n\tcmdFlags := flag.NewFlagSet(\"pull\", flag.ContinueOnError)\n\tcmdFlags.Usage = func() { c.UI.Output(c.Help()) }\n\n\tconfig := c.Config\n\n\tcmdFlags.StringVar(&config.Secret, \"secret\", config.Secret, \"\")\n\tcmdFlags.StringVar(&config.TargetDirectory, \"target\", config.TargetDirectory, \"\")\n\tcmdFlags.StringVar(&config.Encoding, \"encoding\", config.Encoding, \"\")\n\tcmdFlags.StringVar(&config.Format, \"format\", config.Format, \"\")\n\n\treq := new(phrase.DownloadRequest)\n\tcmdFlags.StringVar(&req.Tag, \"tag\", \"\", \"\")\n\tvar updatedSince string\n\tcmdFlags.StringVar(&updatedSince, \"updated-since\", \"\", \"\")\n\tcmdFlags.BoolVar(&req.ConvertEmoji, \"convert-emoji\", false, \"\")\n\tcmdFlags.BoolVar(&req.SkipUnverifiedTranslations, \"skip-unverified-translations\", false, \"\")\n\tcmdFlags.BoolVar(&req.IncludeEmptyTranslations, \"include-empty-translations\", false, \"\")\n\n\tif err := cmdFlags.Parse(args); err != nil {\n\t\treturn 1\n\t}\n\n\tif updatedSince != \"\" {\n\t\tvar err error\n\t\treq.UpdatedSince, err = time.Parse(timeFormat, updatedSince)\n\t\tif err != nil {\n\t\t\tc.UI.Error(fmt.Sprintf(\"Error parsing updated-since (%s), format should be YYYYMMDDHHMMSS\", updatedSince))\n\t\t\treturn 1\n\t\t}\n\t}\n\n\tif config.Format == \"\" {\n\t\tconfig.Format = defaultDownloadFormat\n\t}\n\n\tc.API.AuthToken = config.Secret\n\treq.Encoding = config.Encoding\n\treq.Format = config.Format\n\n\tif err := config.Valid(); err != nil {\n\t\tc.UI.Error(err.Error())\n\t\treturn 1\n\t}\n\n\terr := c.fetch(req, cmdFlags.Args())\n\tif err != nil {\n\t\tc.UI.Error(fmt.Sprintf(\"Error encountered fetching the locales:\\n\\t%s\", err.Error()))\n\t\treturn 1\n\t}\n\treturn 0\n}", "func (c *SystemCommand) Synopsis() string {\n\treturn \"Display system informations\"\n}", "func Help() string {\n\treturn \"Commands are detailled here : https://github.com/eze-kiel/irc-bot/blob/master/README.md\"\n}", "func (c *SSCommand) Synopsis() string {\n\treturn \"Snapshot instance & create AMI\"\n}", "func (c *ReleaseLatestCommand) Help() string {\n\thelpText := `\nUsage: tfupdate release latest [options] <SOURCE>\n\nArguments\n SOURCE A path of release data source.\n Valid format depends on --source-type option.\n - github or gitlab:\n owner/repo\n e.g. terraform-providers/terraform-provider-aws\n - tfregistryModule\n namespace/name/provider\n e.g. terraform-aws-modules/vpc/aws\n - tfregistryProvider (experimental)\n namespace/type\n e.g. hashicorp/aws\n\nOptions:\n -s --source-type A type of release data source.\n Valid values are\n - github (default)\n - gitlab\n - tfregistryModule\n - tfregistryProvider (experimental)\n`\n\treturn strings.TrimSpace(helpText)\n}", "func (sc *SecretsCommand) Synopsis() string {\n\treturn \"Add or remove secrets for your Realm App.\"\n}", "func (c *LockCommand) Synopsis() string {\n\treturn \"Update dependency lock files\"\n}", "func (c *GetAddressInfoCommand) Synopsis() string {\n\treturn c.synopsis\n}", "func PullCommand() cli.Command {\n\treturn cli.Command{\n\t\tName: \"pull\",\n\t\tUsage: \"Pull an image from an Amazon ECR repository.\",\n\t\tArgsUsage: PullImageFormat,\n\t\tBefore: ecscli.BeforeApp,\n\t\tAction: ImagePull,\n\t\tFlags: []cli.Flag{\n\t\t\tcli.StringFlag{\n\t\t\t\tName: ecscli.RegistryIdFlag,\n\t\t\t\tUsage: \"[Optional] Specifies the the Amazon ECR registry ID to pull the image from. By default, images are pulled from the current AWS account.\",\n\t\t\t},\n\t\t},\n\t}\n}", "func (i *ImageInspectCommand) example() string {\n\treturn `$ pouch image inspect docker.io/library/busybox\n{\n \"CreatedAt\": \"2017-12-21 04:30:57\",\n \"Digest\": \"sha256:bbc3a03235220b170ba48a157dd097dd1379299370e1ed99ce976df0355d24f0\",\n \"ID\": \"bbc3a0323522\",\n \"Name\": \"docker.io/library/busybox:latest\",\n \"Size\": 720019,\n \"Tag\": \"latest\"\n}`\n}", "func usage() {\n\tdoc := heredoc.Doc(`\n\t\tExample:\n\t\t./koro docker <name> address add 127.0.0.3/24 dev lo\n\t`)\n\tfmt.Print(doc)\n}", "func (c *config) pull(remote string, branch string) (output string, err error) {\n\tlog.Printf(\"pulling: %v/%v\", remote, branch)\n\n\tdefaultCommand := []string{\"pull\", remote, branch}\n\n\treturn c.command(defaultCommand...)\n}", "func (c *PushCommand) Help() string {\n\treturn `\nUsage: filegate push [options] file\n ` + c.Synopsis() + `.\nOptions:\n --signaling-server=signaling_server Signaling endpoint, used by peers to exchange session description.\n --stun-servers=stun_server_1,stun_server_2,... STUN servers for traversal of NAT gateways.\n`\n}", "func PrintUsage() {\n\tfmt.Fprintln(os.Stdout, \"Usage: gitio [-code=] url\\nIf you will be use any code, set code flag\")\n}", "func (g *Getter) PrintHelp(indent string) {\n\tfmt.Println(indent, \"The get command downloads and installs a Fyne application.\")\n\tfmt.Println(indent, \"A single parameter is required to specify the Go package, as with \\\"go get\\\"\")\n}", "func (sac *SecretsAddCommand) Synopsis() string {\n\treturn \"Add a secret to your Realm App.\"\n}", "func (c *Ping) Help() string {\n\treturn `Usage: PING [message] Returns PONG if no argument is provided, otherwise return a copy of the argument as a bulk.`\n}", "func (d docker) Pull(ctx context.Context, image string) error {\n\tr, err := d.CLI.ImagePull(ctx, image, types.ImagePullOptions{})\n\tif err != nil {\n\t\treturn err\n\t}\n\tio.Copy(os.Stdout, r)\n\tdefer r.Close()\n\treturn nil\n}", "func UsageCommands() string {\n\treturn `want-go (get-simple-card-list|get-card-info|post-card-info|put-card-info|delete-card-info)\n`\n}", "func pullCmd(c *cli.Context) error {\n\tvar fqRegistries []string\n\n\targs := c.Args()\n\tif len(args) == 0 {\n\t\tlogrus.Errorf(\"an image name must be specified\")\n\t\treturn nil\n\t}\n\tif len(args) > 1 {\n\t\tlogrus.Errorf(\"too many arguments. Requires exactly 1\")\n\t\treturn nil\n\t}\n\timage := args[0]\n\tsrcRef, err := alltransports.ParseImageName(image)\n\tif err != nil {\n\t\tfqRegistries, err = getRegistriesToTry(image)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t} else {\n\t\tfqRegistries = append(fqRegistries, srcRef.DockerReference().String())\n\t}\n\truntime, err := getRuntime(c)\n\tdefer runtime.Shutdown(false)\n\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"could not create runtime\")\n\t}\n\tfor _, fqname := range fqRegistries {\n\t\tfmt.Printf(\"Trying to pull %s...\", fqname)\n\t\tif err := runtime.PullImage(fqname, c.Bool(\"all-tags\"), os.Stdout); err != nil {\n\t\t\tfmt.Printf(\" Failed\\n\")\n\t\t} else {\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn errors.Errorf(\"error pulling image from %q\", image)\n}", "func ShowUsage() {\n\tprintln(\"Usage:\\n\\thref_links <html source file>\\n\\tRead content from stdin: href_links -\\ne.g. curl https://example.com/ | href_links -\\nBy default utility tries read ./home.html\")\n\n}", "func (c *NodeInfoCommand) Synopsis() string {\n\treturn c.synopsis\n}", "func (pull *LibcomposePullProperty) Usage() api_usage.Usage {\n\treturn api_property.Usage_Optional()\n}", "func (c *VersionCommand) Usage() {\n\tfmt.Println(`\nPrints the version.\n\nUsage:\n\n\tlitestream version\n`[1:])\n}", "func (cmd *DatasetDownload) Synopsis() string {\n\treturn \"Download results from a running job.\"\n}", "func (sc *SecretsCommand) Help() string {\n\treturn sc.Synopsis()\n}", "func (service Service) PullCommand() Command {\n\treturn Command{\n\t\tMain: \"docker\",\n\t\tArgs: []string{\"pull\", service.Image},\n\t}\n}", "func (cli Client) ImagePull(options v1alpha1.OCIPullOptions) (v1alpha1.OCIPullResponse, error) {\n\n\tpullFlags := []command.Flag{\n\t\t// Buildah registry auth in format username[:password]\n\t\t{Name: \"creds\", Value: options.RegistryAuth, Short: false, OmitEmpty: true},\n\t}\n\n\tcmd := command.Builder(\"buildah\").Command(\"pull\").Flags(pullFlags...).Args(options.Ref).Build()\n\tcli.Logger.WithField(\"cmd\", cmd).Debugln(\"executing pull with command\")\n\n\tstdout, stderr, err := execute(&cmd)\n\tif err != nil {\n\t\tcli.Logger.WithError(err).Errorln(\"error building image...\")\n\t\treturn v1alpha1.OCIPullResponse{}, err\n\t}\n\treturn v1alpha1.OCIPullResponse{\n\t\tBody: stdout,\n\t\tExec: &cmd,\n\t\tStderr: stderr,\n\t}, nil\n}", "func (a *RESTAgent) Synopsis() string {\n\treturn \"Boot REST API agent\"\n}", "func (c *TwitterCommand) Help() string {\n\treturn \"Usage: a-know twitter\"\n}", "func (src *SecretsRemoveCommand) Help() string {\n\treturn `Remove a secret from your Realm Application.\n\nUsage:\n realm-cli secrets remove --name [string] [options]\n realm-cli secrets remove --id [string] [options]\n\nREQUIRED:\n --name [string] OR --id [string]\n\tThe name or ID of your secret.\n` +\n\t\tsrc.SecretsBaseCommand.Help()\n}", "func PrintHelp() {\n\tfmt.Print(usage)\n}", "func (r *Repository) Pull(o *PullOptions) error {\n\tif err := o.Validate(); err != nil {\n\t\treturn err\n\t}\n\n\tremote, err := r.Remote(o.RemoteName)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err = remote.Connect(); err != nil {\n\t\treturn err\n\t}\n\n\tdefer remote.Disconnect()\n\n\thead, err := remote.Ref(o.ReferenceName, true)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err = remote.Connect(); err != nil {\n\t\treturn err\n\t}\n\n\tdefer remote.Disconnect()\n\n\terr = remote.Fetch(&FetchOptions{\n\t\tDepth: o.Depth,\n\t})\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn r.createReferences(head)\n}", "func (c *DialDeleteCommand) usage() {\n\tfmt.Println(`\nDelete an existing dial.\n\nUsage:\n\n\twtf dial delete DIAL_ID\n`[1:])\n}", "func (c *initCommand) Synopsis() string {\n\treturn initSynopsis\n}", "func describe(s selection, args []string) {\n\tfmt.Println(runWithStdin(s.archive(), \"guru\", \"-modified\", \"describe\", s.pos()))\n}", "func help() {\r\n fmt.Printf(\"ORIGAMI\\n\")\r\n fmt.Printf(\"\\tA web app that checks the toner levels of printers at the Elizabethtown College campus.\\n\\n\")\r\n fmt.Printf(\"USAGE\\n\")\r\n fmt.Printf(\"\\tUsage: origami [-f filepath | -h]\\n\\n\")\r\n fmt.Printf(\"OPTIONS\\n\")\r\n fmt.Printf(\"\\t-f: specify the filepath of the config file (\\\"./origami.conf\\\" by default)\\n\")\r\n fmt.Printf(\"\\t-h: this menu\\n\\n\")\r\n fmt.Printf(\"AUTHOR\\n\")\r\n fmt.Printf(\"\\tRory Dudley (aka pinecat: https://github.com/pinecat/origamiv2)\\n\\n\")\r\n fmt.Printf(\"EOF\\n\")\r\n}", "func (b *binding) Pull(ctx context.Context, remote, local string) error {\n\treturn b.Command(\"pull\", remote, local).Run(ctx)\n}", "func Usage() {\n\tblue := color.New(color.FgBlue)\n\tcyan := color.New(color.FgCyan)\n\tyellow := color.New(color.FgYellow)\n\n\tblueBold := blue.Add(color.Bold)\n\n\tfmt.Printf(\"Ultralist v%s, simple task management for tech folks.\\n\", VERSION)\n\n\tblueBold.Println(\"\\nAdding todos\")\n\tfmt.Println(\"the 'a' command adds todos.\")\n\tfmt.Println(\"You can also optionally specify a due date.\")\n\tfmt.Println(\"Specify a due date by putting 'due <date>' at the end, where <date> is in (tod|today|tom|tomorrow|mon|tue|wed|thu|fri|sat|sun)\")\n\tfmt.Println(\"\\nExamples for adding a todo:\")\n\tyellow.Println(\"\\tultralist a Meeting with @bob about +importantPrject due today\")\n\tyellow.Println(\"\\tultralist a +work +verify did @john fix the build\\\\?\")\n\n\tblueBold.Println(\"\\nListing todos\")\n\tfmt.Println(\"When listing todos, you can filter and group the output.\")\n\tfmt.Println()\n\n\tfmt.Println(\"ultralist l due (tod|today|tom|tomorrow|overdue|this week|next week|last week|mon|tue|wed|thu|fri|sat|sun|none)\")\n\tfmt.Println(\"ultralist l overdue\")\n\tfmt.Println()\n\n\tcyan.Println(\"Filtering by date:\")\n\tyellow.Println(\"\\tultralist l due tod\")\n\tfmt.Println(\"\\tlists all todos due today\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist l due tom\")\n\tfmt.Println(\"\\tlists all todos due tomorrow\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist l due mon\")\n\tfmt.Println(\"\\tlists all todos due monday\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist l overdue\")\n\tfmt.Println(\"\\tlists all todos where the due date is in the past\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist agenda\")\n\tfmt.Println(\"\\tlists all todos where the due date is today or in the past\")\n\tfmt.Println()\n\n\tyellow.Println(\"\\tultralist l completed (tod|today|this week)\")\n\tfmt.Println(\"\\tshow completed todos.\")\n\tfmt.Println()\n\n\tyellow.Println(\"\\tultralist l completed (tod|today)\")\n\tfmt.Println(\"\\tlists all todos that were completed today\")\n\tfmt.Println()\n\n\tyellow.Println(\"\\tultralist l completed this week\")\n\tfmt.Println(\"\\tlists all todos that were completed this week\")\n\tfmt.Println()\n\n\tcyan.Println(\"Grouping:\")\n\tfmt.Println(\"You can group todos by context or project.\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist l by c\")\n\tfmt.Println(\"\\tlists all todos grouped by context\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist l by p\")\n\tfmt.Println(\"\\tlists all todos grouped by project\")\n\tfmt.Println()\n\n\tcyan.Println(\"Grouping and filtering:\")\n\tfmt.Println(\"Of course, you can combine grouping and filtering to get a nice formatted list.\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist l due today by c\")\n\tfmt.Println(\"\\tlists all todos due today grouped by context\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist l +project due this week by c\")\n\tfmt.Println(\"\\tlists all todos due today for +project, grouped by context\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist l @frank due tom by p\")\n\tfmt.Println(\"\\tlists all todos due tomorrow concerining @frank for +project, grouped by project\")\n\tfmt.Println()\n\n\tblueBold.Println(\"\\nCompleting and uncompleting \")\n\tfmt.Println(\"Complete and Uncomplete a todo by its Id:\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist c 33\")\n\tfmt.Println(\"\\tCompletes a todo with id 33\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist uc 33\")\n\tfmt.Println(\"\\tUncompletes a todo with id 33\")\n\tfmt.Println()\n\n\tblueBold.Println(\"\\nPrioritizing\")\n\tfmt.Println(\"Todos have a priority flag, which will make them bold when listed.\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist p 33\")\n\tfmt.Println(\"\\tPrioritizes a todo with id 33\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist up 33\")\n\tfmt.Println(\"\\tUn-prioritizes a todo with id 33\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist l p\")\n\tfmt.Println(\"\\tlist all priority todos\")\n\tfmt.Println()\n\n\tblueBold.Println(\"\\nArchiving\")\n\tfmt.Println(\"You can archive todos once they are done, or if you might come back to them.\")\n\tfmt.Println(\"By default, todo will only show unarchived todos.\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist ar 33\")\n\tfmt.Println(\"\\tArchives a todo with id 33\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist ac\")\n\tfmt.Println(\"\\tArchives all completed todos\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist l archived\")\n\tfmt.Println(\"\\tlist all archived todos\")\n\tfmt.Println()\n\n\tblueBold.Println(\"\\nEditing due dates\")\n\tyellow.Println(\"\\tultralist e 33 due mon\")\n\tfmt.Println(\"\\tEdits the todo with 33 and sets the due date to this coming Monday\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist e 33 due none\")\n\tfmt.Println(\"\\tEdits the todo with 33 and removes the due date\")\n\tfmt.Println()\n\n\tblueBold.Println(\"\\nDeleting\")\n\tyellow.Println(\"\\tultralist d 33\")\n\tfmt.Println(\"\\tDeletes a todo with id 33\")\n\tfmt.Println()\n\n\tblueBold.Println(\"\\nManipulating notes\")\n\tyellow.Println(\"\\tultralist ln\")\n\tfmt.Println(\"\\tlists all todos with their notes\")\n\tyellow.Println(\"\\tultralist an 12 check http://this.web.site\")\n\tfmt.Println(\"\\tAdds notes \\\"check http://this.web.site\\\" to the todo with id 12\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist n 12\")\n\tfmt.Println(\"\\tLists notes of the todo with id 12\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist dn 12 3\")\n\tfmt.Println(\"\\tDeletes the 3rd note of the todo with id 12\")\n\tfmt.Println()\n\tyellow.Println(\"\\tultralist en 12 3 check http://that.web.site\")\n\tfmt.Println(\"\\tEditing the 3rd note of the todo with id 12 to \\\"http://that.web.site\\\" \")\n\tfmt.Println()\n\n\tblueBold.Println(\"\\nGarbage Collection\")\n\tyellow.Println(\"\\tultralist gc\")\n\tfmt.Println(\"\\tDeletes all archived todos.\")\n\tfmt.Println()\n\n\tblueBold.Println(\"\\nSyncing with ultralist.io\")\n\tfmt.Println(\"\\tPlease see https://docs.ultralist.io/service.html for instructions for syncing.\")\n\tfmt.Println()\n\n\tfmt.Println(\"Ultralist was lovingly crafted by Grant Ammons (https://twitter.com/gammons).\")\n\tfmt.Println(\"For full documentation, please visit http://docs.ultralist.io\")\n}", "func (c *HDKeyCommand) Synopsis() string {\n\treturn c.synopsis\n}", "func showHelp(a *artifact.Artifact) {\n\tlog.Printf(\"Help!\")\n}", "func usage() {\n\tfmt.Fprintf(stderr, \"Usage: %s [<option> ...] [<cmd> [<arg> ...]]\\n\",\n\t\tos.Args[0])\n\n\tfmt.Fprintln(stderr, `\nIf invoked with the -start option, a moss server is started.\nOtherwise, the given command and its arguments are sent to the server.\nSpecifying no command is equivalent to specifying the 'status' command.\n\nOn server start, commands are read from ~/.mossrc or ~/.config/mossrc.`)\n\n\tfmt.Fprint(stderr, `\nCommands:\n add <item> ... append items to the playlist\n assoc <regexp> <cmd> associate cmd with items that match regexp\n clear [<regexp>] remove items matching regexp, or all [deprecated]\n del <index> ... remove items from the playlist\n index <regexp> ... print indices of items matching each regexp\n insert <item> ... insert items after the current item\n kill stop the server and current command\n ls print the current playlist\n lsassoc print the list of command associations\n mv <index> ... <index> move items to the last given index\n next step forward in the playlist\n pause suspend the current command\n play [<index>] resume current command or start command at index\n prev step backward in the playlist\n status [<format>] print information about the current state\n stop kill the current command\n toggle toggle between play and pause states\n unassoc <regexp> ... remove the command association for each regexp\n`)\n\n\tfmt.Fprintln(stderr, `\nThe status command's <format> argument supports the sequences %c, %f, %i, %n,\n%p, %s, and %t, which are replaced with current command, absolute file path of\ncurrent item, index of current item, playlist length, command process ID,\nstatus, and current item, respectively.\n`)\n\n\tfmt.Fprintln(stderr, \"Options:\")\n\tflag.PrintDefaults()\n}", "func (pull *LibcomposePullProperty) Label() string {\n\treturn \"Pull\"\n}", "func (suc *SecretsUpdateCommand) Help() string {\n\treturn `Update a secret for your Realm Application.\n\nUsage:\n realm-cli secrets update --name [string] --value [string] [options]\n realm-cli secrets update --id [string] --value [string] [options]\n\nREQUIRED:\n --name [string] OR --id [string]\n\tThe name or ID of your secret.\n\n --value [string]\n\tThe value that your secret is being updated to.\n` +\n\t\tsuc.SecretsBaseCommand.Help()\n}", "func Help(prefix string) string {\n\treturn fmt.Sprintf(\"usage: %s [command] [command_args...]\\n\", prefix) +\n\t\t`Available commands:\n - **help** - shows this message\n\t- **next** - shows information about the next race\n\t- **last** - shows information about the last race\n\t- **current** - shows races for the current season\n\t- **results** - shows information about results\n\t\t- **results circuit <circuit>** - shows historical information about the winners at a given circuit for the last years\n\t\t- **results driver <driver>** - shows last results for a driver\n`\n}", "func (r *Repo) Pull() error {\n\tr.Lock()\n\tdefer r.Unlock()\n\n\treturn timeout(*cmdTimeout, func() error {\n\t\treturn r.Master.VCS.Download(r.Path)\n\t})\n}", "func (db *DB) Pull(url, ref string) error {\n\tif ref == \"\" {\n\t\tref = db.ref\n\t}\n\trefspec := fmt.Sprintf(\"%s:%s\", ref, db.ref)\n\tfmt.Printf(\"Creating anonymous remote url=%s refspec=%s\\n\", url, refspec)\n\tremote, err := db.repo.CreateAnonymousRemote(url, refspec)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer remote.Free()\n\tif err := remote.Fetch(nil, nil, fmt.Sprintf(\"libpack.pull %s %s\", url, refspec)); err != nil {\n\t\treturn err\n\t}\n\treturn db.Update()\n}", "func help() {\n\tlog.Infoln(\"#: the number of the peer you want to connect to\")\n\tlog.Infoln(\"r: refresh peer list\")\n\tlog.Infoln(\"q: quit pcp\")\n\tlog.Infoln(\"?: this help message\")\n}", "func (c *GetCommand) Help() string {\n\thelpText := `\nUsage :\n\twikible get [options]\n\n\tGet the wiki pages to code.\n\nOptions:\n\t-i parent id\n\t-a wiki address\n`\n\treturn strings.TrimSpace(helpText)\n}", "func (p *PullCommand) runPull(args []string) error {\n\treturn pullMissingImage(context.Background(), p.cli.Client(), args[0], true)\n}", "func (cmd InspectCmd) Description() string {\n\treturn \"Inspects a Dolt Database and collects stats.\"\n}", "func (cmd MergeCmd) Description() string {\n\treturn \"Merge a branch.\"\n}", "func (sac *SecretsAddCommand) Help() string {\n\treturn `Add a secret to your Realm Application.\n\nUsage: realm-cli secrets add --name [string] --value [string] [options]\n\nREQUIRED:\n --name [string]\n\tThe name of your secret.\n\n --value [string]\n\tThe value of your secret.\n` +\n\t\tsac.SecretsBaseCommand.Help()\n}", "func PrintHelp() {\n\n\thelpString := `\n Usage: ./nexus-repository-cli.exe [option] [parameters...]\n\n [options]\n -list\n List the repositories in Nexus. Optional parameters: repoType, repoPolicy\n -create\n Create a repository in Nexus. Required parameter: repoId, repoType, provider, repoPolicy (only for maven2). Optional parameter: exposed\n -delete\n Delete a repository in Nexus. Required parameter: repoId\n -addRepoToGroup\n Add a reposirory to a group repository. Required parameters: repoId, repositories\n\n [parameters]\n -nexusUrl string\n Nexus server URL (default \"http://localhost:8081/nexus\")\n -exposed\n Set this flag to expose the repository in nexus.\n -username string\n Username for authentication\n -password string\n Password for authentication\n -repoId string\n ID of the Repository\n -repoType string\n Type of a repository. Possible values : hosted/proxy/group\n -repoPolicy string\n Policy of the hosted repository. Possible values : snapshot/release\n -provider string\n Repository provider. Possible values: maven2/npm/nuget\n -remoteStorageUrl string\n Remote storage url to proxy in Nexus\n -repositories string\n Comma separated value of repositories to be added to a group.\n -verbose\n Set this flag for Debug logs.\n\t`\n\n\tflag.Usage = func() {\n\t\tfmt.Fprintf(os.Stderr, helpString)\n\t}\n}", "func (up *BaseProvisionUpOperation) Help() string {\n\treturn \"\"\n}", "func (c *DeleteCommand) Synopsis() string {\n\treturn \"Delete a project, a service or an application\"\n}", "func godoc(s selection, args []string) {\n\tfmt.Println(runWithStdin(s.archive(), \"gogetdoc\", \"-modified\", \"-pos\", s.pos()))\n}", "func (gen *Generator) Pull() error {\n\tLog.Info(\"pull\", fmt.Sprintf(\"performing git pull in: %s\", gen.Template.Directory))\n\tGitPull := templates.CommandOptions{\n\t\tCmd: \"git pull\",\n\t\tDir: gen.Template.Directory.ToString(),\n\t\tUseStdOut: true,\n\t}\n\t_, err := templates.Run(GitPull)\n\treturn err\n}", "func (cmd PullCmd) Name() string {\n\treturn \"pull\"\n}", "func help() {\n\tfmt.Println(\"\\n--------------Command--------------\")\n\tfmt.Println(\"1. status\")\n\tfmt.Println(\"2. input [tipe identitas: string] [nomor identitas: integer]\")\n\tfmt.Println(\"3. leave [nomor loker: integer]\")\n\tfmt.Println(\"4. find [nomor identitas: integer]\")\n\tfmt.Println(\"5. search [tipe identitas: string]\")\n\tfmt.Println(\"6. exit\")\n\tfmt.Println(\"--------------End Command--------------\\n\")\n}", "func (get *BaseCommandGetOperation) Help() string {\n\treturn \"\"\n}", "func (c *ImportRawKeyCommand) Synopsis() string {\n\treturn c.synopsis\n}", "func (slc *SecretsListCommand) Help() string {\n\treturn `List secrets from your Realm Application.\n\nUsage: realm-cli secrets list [options]\n` +\n\t\tslc.SecretsBaseCommand.Help()\n}", "func Usage() {\n\t// To embed the bot user and password comment the line above and uncomment the line below\n\tfmt.Printf(\"Usage: %v -i <ip address> -p <port> -d <domain name>\\n\", runAs)\n\tflag.PrintDefaults()\n}", "func (c *VsmStatsCommand) Synopsis() string {\n\treturn \"Display VSM Stats\"\n}", "func Pull(dir, url, auth string, stdout, stderr io.Writer) error {\n\tvar user, password string\n\tif auth != \"\" {\n\t\ta := strings.Split(auth, \":\")\n\t\tuser, password = a[0], a[1]\n\t}\n\n\tswitch true {\n\tcase strings.HasSuffix(url, \".git\"):\n\t\treturn pullGit(dir, url, user, password, stdout, stderr)\n\tdefault:\n\t\treturn errors.New(\"unknown repo type\")\n\t}\n}", "func Synopsis(text string) string {\n\tvar p Package\n\treturn p.Synopsis(text)\n}", "func NewCommandPull() *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"pull\",\n\t\tShort: \"Pull a resource from a file or from stdin.\",\n\t\tLong: `YAML or JSON formats are accepted.`,\n\t}\n\n\tcmd.AddCommand(NewCommandPullStudy())\n\t//\tcmd.AddCommand(NewCommandPullModel())\n\n\treturn cmd\n}", "func Pull(c Config) {\n\n\tUpdate(c)\n\n}", "func (b *BotCommands) help(m *tb.Message) {\n\tb.Bot.Reply(m, `\n\tHello, So You Want To Search [Nyaa](https://nyaa.si/) huh? Let me help you with that. So, Here are the commands that you can use right now -\n\n\t/latest _<page no which should be a number>_\n\t*This command fetches you the latest animes available on nyaa on the page as certified.*\n\tNote - If no digit is specified, I fetch the 1 Page Results For You ~ UwU ~.\n\n\t/anime _<search term which can contain whitespaces>_\n\t*This command searches for the search term and fetches all the first page results for you.*\n\tNote - Remember, The term should atleast match for what you are looking for.\n\n\t`)\n}", "func pullMaster(opts RepoOpts) {\n\tcmd := exec.Command(\"git\", \"pull\", \"origin\", \"master\")\n\tcmd.Dir = opts.path\n\t_, err := cmd.Output()\n\tCheckIfError(err)\n\tfmt.Println(\"Pulled in Master: for \" + opts.path)\n}", "func help(cfg cli.Config) {\n\tfmt.Printf(`\n usage:\n scan ip/host [option]\n options:\n -p port-range or port number Specified range or port number (default is %s)\n -c TCP connect scan (default is TCP SYN scan)\n -4 Force IPv4\n -6 Force IPv6\n example:\n scan 8.8.8.8 -p 53\n scan www.google.com -p 1-500\n scan freebsd.org -6\n\t`,\n\t\tcfg.Scan.Port)\n}" ]
[ "0.7215579", "0.69286245", "0.66183007", "0.6583596", "0.6178542", "0.6172102", "0.6038832", "0.6024933", "0.58987576", "0.586869", "0.5860437", "0.58348995", "0.5785484", "0.57617813", "0.5743024", "0.57364047", "0.57247514", "0.5713961", "0.5707902", "0.57045203", "0.5675228", "0.5640683", "0.5639063", "0.5608886", "0.55733407", "0.5545326", "0.5541399", "0.5536424", "0.5535502", "0.5503917", "0.5499914", "0.5486503", "0.54814273", "0.54810804", "0.54781514", "0.5474779", "0.5472751", "0.5465289", "0.54639935", "0.54407245", "0.54258", "0.54172295", "0.54069424", "0.5395331", "0.5395256", "0.53870386", "0.5383543", "0.5381347", "0.53787655", "0.5377998", "0.5363008", "0.536189", "0.5358371", "0.5357744", "0.53575444", "0.5356246", "0.5326305", "0.5313874", "0.53094614", "0.5307545", "0.52994573", "0.5296571", "0.52794117", "0.52648675", "0.5257157", "0.52554697", "0.5250427", "0.5247688", "0.52476424", "0.5247044", "0.52441037", "0.52379596", "0.52363306", "0.52346814", "0.5223258", "0.5217802", "0.5206852", "0.51972044", "0.5193839", "0.5193688", "0.5188494", "0.51872146", "0.5184524", "0.51719975", "0.51716524", "0.5166776", "0.51654595", "0.51603806", "0.51599103", "0.51562726", "0.5150859", "0.513842", "0.5136236", "0.5119698", "0.51086974", "0.5108527", "0.51030326", "0.51014394", "0.5091997", "0.50915134" ]
0.77371424
0
add depth to all children
func (fi *finalizeFileInfo) addProperties(depth int) { fi.depth = depth for _, e := range fi.children { e.parent = fi e.addProperties(depth + 1) } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func fixFields(n, parent *node, depth int) {\n\tn.parent = parent\n\tn.depth = depth\n\tfor _, c := range n.children {\n\t\tfixFields(c, n, depth+1)\n\t}\n}", "func (t *FaultDomainTree) Depth() int {\n\tif t == nil {\n\t\treturn 0\n\t}\n\n\tdepth := 0\n\tfor _, c := range t.Children {\n\t\tchildMax := c.Depth() + 1\n\t\tif childMax > depth {\n\t\t\tdepth = childMax\n\t\t}\n\t}\n\treturn depth\n}", "func addParents(e *yang.Entry) {\n\tfor _, c := range e.Dir {\n\t\tc.Parent = e\n\t\taddParents(c)\n\t}\n}", "func (c *Clac) Depth() error {\n\treturn c.Push(value.Int(len(c.Stack())))\n}", "func Depth(index uint) (depth uint) {\n\tindex++\n\tfor (index & 1) == 0 {\n\t\tdepth++\n\t\tindex = rightShift(index)\n\t}\n\treturn\n}", "func (t *ASCIITree) Add(children ...*ASCIITree) {\n\tfor _, child := range children {\n\t\tchild.Parent = t\n\t}\n\tt.children = append(t.children, children...)\n}", "func (o *Options) MaxDepth() int { return o.maxDepth }", "func (mParams *EncodingMatrixLiteral) Depth(actual bool) (depth int) {\n\tif actual {\n\t\tdepth = len(mParams.ScalingFactor)\n\t} else {\n\t\tfor i := range mParams.ScalingFactor {\n\t\t\tfor range mParams.ScalingFactor[i] {\n\t\t\t\tdepth++\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func recursiveAddChildrenCapSum(tree *BlockTree, hash *wire.Hash, cap *big.Int) {\n\tfor _, childNode := range tree.children[*hash] {\n\t\tchildNode.CapSum.Add(childNode.CapSum, cap)\n\t\trecursiveAddChildrenCapSum(tree, childNode.Hash, cap)\n\t}\n}", "func (tree Tree) add(path string) int {\n\t// fmt.Println(\"adding \", path, \"to\", tree)\n\tcomponents := strings.Split(path[1:], \"/\", -1)\n\tadditions := 0\n\tpos := tree\n\tfor _, c := range components {\n\t\tchild, ok := pos[c]\n\t\tif !ok {\n\t\t\tchild = newTree()\n\t\t\tpos[c] = child\n\t\t\tadditions++\n\t\t}\n\t\tpos = child\n\t}\n\t// fmt.Printf(\"add %s -> %#v : %d\\n\", path, components, additions)\n\treturn additions\n}", "func processChildren(theReader *dwarf.Reader, depth int, canSkip bool) {\n\n\n\n\t// Process the children\n\tif (canSkip) {\n\t\ttheReader.SkipChildren();\n\t} else {\n\t\tfor {\n\t\t\ttheChild := readNextEntry(theReader);\n\t\t\tif (theChild == nil || theChild.Tag == 0) {\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\t\n\t\t\tprocessEntry(theReader, depth, theChild);\n\t\t}\n\t}\n}", "func (w *Walker) incrementActiveChildIndex() {\n\tif w.ActiveChildIndex()+1 <= w.ActiveNode().ChildTotal() {\n\t\tw.childIndex[w.currentDepth]++\n\t}\n}", "func trickleDepthInfo(node *h.FSNodeOverDag, maxlinks int) (depth int, repeatNumber int) {\n\tn := node.NumChildren()\n\n\tif n < maxlinks {\n\t\t// We didn't even added the initial `maxlinks` leaf nodes (`FillNodeLayer`).\n\t\treturn 0, 0\n\t}\n\n\tnonLeafChildren := n - maxlinks\n\t// The number of non-leaf child nodes added in `fillTrickleRec` (after\n\t// the `FillNodeLayer` call).\n\n\tdepth = nonLeafChildren/depthRepeat + 1\n\t// \"Deduplicate\" the added `depthRepeat` sub-graphs at each depth\n\t// (rounding it up since we may be on an unfinished depth with less\n\t// than `depthRepeat` sub-graphs).\n\n\trepeatNumber = nonLeafChildren % depthRepeat\n\t// What's left after taking full depths of `depthRepeat` sub-graphs\n\t// is the current `repeatNumber` we're at (this fractional part is\n\t// what we rounded up before).\n\n\treturn\n}", "func WithChildren() LogReadOption { return LogReadOption{withChildren: true} }", "func (svc *batchService) UpdateDepth(id []byte, depth uint8, normalisedBalance *big.Int) error {\n\tb, err := svc.storer.Get(id)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"get: %w\", err)\n\t}\n\terr = svc.storer.Put(b, normalisedBalance, depth)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"put: %w\", err)\n\t}\n\n\tsvc.logger.Debugf(\"batch service: updated depth of batch id %s from %d to %d\", hex.EncodeToString(b.ID), b.Depth, depth)\n\treturn nil\n}", "func (t *tree) RenderTree(out io.Writer, prefix []string, node *node) {\n\tvar lvlPrefix string\n\tfor i, v := range node.children {\n\t\tif i == len(node.children)-1 {\n\t\t\tlvlPrefix = lastPrefix\n\t\t\tio.WriteString(out, strings.Join(prefix, \"\"))\n\t\t\tio.WriteString(out, endLine)\n\t\t} else {\n\t\t\tlvlPrefix = defPrefix\n\t\t\tio.WriteString(out, strings.Join(prefix, \"\"))\n\t\t\tio.WriteString(out, startLine)\n\t\t}\n\t\tio.WriteString(out, v.name)\n\t\tio.WriteString(out, eof)\n\t\tif v.isDir {\n\t\t\tt.RenderTree(out, append(prefix, lvlPrefix), v)\n\t\t}\n\t}\n\tprefix = prefix[:len(prefix)-1]\n}", "func (o *Dig) SetDepth(v int32) {\n\to.Depth = v\n}", "func walkContainersRek(container *Container, ch chan<- *DepthContainer, depth int) {\n\tif container == nil {\n\t\treturn\n\t}\n\n\tch <- &DepthContainer{\n\t\tCont: container,\n\t\tD: depth,\n\t}\n\n\tfor c := container.Child; c != nil; c = c.Next {\n\t\twalkContainersRek(c, ch, depth+1)\n\t}\n}", "func (c *component) drawChildren(mx, my int) {\n\tfor _, child := range c.children {\n\t\tr := child.GetBounds()\n\t\timg := child.Draw(mx, my)\n\t\tif img != nil {\n\t\t\tdraw.Draw(c.Image, r, img, image.ZP, draw.Over)\n\t\t}\n\t}\n}", "func walk(path string, node *Node, creatable, removable, isGOAPI bool) {\n\tfiles := listFiles(path)\n\n\tfor _, filename := range files {\n\t\tfpath := filepath.Join(path, filename)\n\n\t\tfio, _ := os.Lstat(fpath)\n\n\t\tchild := Node{\n\t\t\tId: filepath.ToSlash(fpath), // jQuery API can't accept \"\\\", so we convert it to \"/\"\n\t\t\tName: filename,\n\t\t\tPath: filepath.ToSlash(fpath),\n\t\t\tRemovable: removable,\n\t\t\tIsGoAPI: isGOAPI,\n\t\t\tChildren: []*Node{}}\n\t\tnode.Children = append(node.Children, &child)\n\n\t\tif nil == fio {\n\t\t\tlogger.Warnf(\"Path [%s] is nil\", fpath)\n\n\t\t\tcontinue\n\t\t}\n\n\t\tif fio.IsDir() {\n\t\t\tchild.Type = \"d\"\n\t\t\tchild.Creatable = creatable\n\t\t\tchild.IconSkin = \"ico-ztree-dir \"\n\t\t\tchild.IsParent = true\n\n\t\t\twalk(fpath, &child, creatable, removable, isGOAPI)\n\t\t} else {\n\t\t\tchild.Type = \"f\"\n\t\t\tchild.Creatable = creatable\n\t\t\text := filepath.Ext(fpath)\n\n\t\t\tchild.IconSkin = getIconSkin(ext)\n\t\t}\n\t}\n\n\treturn\n}", "func (node *TreeNode) MaxDepthIterativeDfs() int {\n\tif node == nil {\n\t\treturn 0\n\t}\n\n\ttype Item struct {\n\t\tnode *TreeNode\n\t\tdepth int\n\t}\n\n\tstack := []Item{{node: node, depth: 1}}\n\n\tdepth := 0\n\n\tfor len(stack) > 0 {\n\t\titem := stack[len(stack)-1]\n\t\tstack = stack[:len(stack)-1]\n\n\t\tif item.depth > depth {\n\t\t\tdepth = item.depth\n\t\t}\n\n\t\tif item.node.Left != nil {\n\t\t\tstack = append(stack, Item{node: item.node.Left, depth: depth + 1})\n\t\t}\n\n\t\tif item.node.Right != nil {\n\t\t\tstack = append(stack, Item{node: item.node.Right, depth: depth + 1})\n\t\t}\n\t}\n\n\treturn depth\n}", "func (n *TreeBuilderNode) NumChildren() int { return len(n.Nodes) }", "func setDepth(p0 int, pool []huffmanTree, depth []byte, max_depth int) bool {\n\tvar stack [16]int\n\tvar level int = 0\n\tvar p int = p0\n\tassert(max_depth <= 15)\n\tstack[0] = -1\n\tfor {\n\t\tif pool[p].index_left_ >= 0 {\n\t\t\tlevel++\n\t\t\tif level > max_depth {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tstack[level] = int(pool[p].index_right_or_value_)\n\t\t\tp = int(pool[p].index_left_)\n\t\t\tcontinue\n\t\t} else {\n\t\t\tdepth[pool[p].index_right_or_value_] = byte(level)\n\t\t}\n\n\t\tfor level >= 0 && stack[level] == -1 {\n\t\t\tlevel--\n\t\t}\n\t\tif level < 0 {\n\t\t\treturn true\n\t\t}\n\t\tp = stack[level]\n\t\tstack[level] = -1\n\t}\n}", "func (v *dir) generateChildren(dirmap *dirMap) []plugin.Entry {\n\tdirmap.mux.RLock()\n\tdefer dirmap.mux.RUnlock()\n\n\tparent := dirmap.mp[v.path]\n\tentries := make([]plugin.Entry, 0, len(parent))\n\tfor name, attr := range parent {\n\t\tsubpath := v.path + \"/\" + name\n\t\tif attr.Mode().IsDir() {\n\t\t\tnewEntry := newDir(name, attr, v.impl, subpath)\n\t\t\tnewEntry.SetTTLOf(plugin.ListOp, ListTTL)\n\t\t\tif d, ok := dirmap.mp[subpath]; ok && d != nil {\n\t\t\t\tnewEntry.dirmap = dirmap\n\t\t\t\tnewEntry.Prefetched()\n\t\t\t\tnewEntry.DisableCachingFor(plugin.ListOp)\n\t\t\t}\n\t\t\tentries = append(entries, newEntry)\n\t\t} else {\n\t\t\tnewEntry := newFile(name, attr, v.impl, subpath)\n\t\t\tnewEntry.dirmap = dirmap\n\t\t\tentries = append(entries, newEntry)\n\t\t}\n\t}\n\treturn entries\n}", "func appendRec(ctx context.Context, fsn *h.FSNodeOverDag, db *h.DagBuilderHelper, maxDepth int) (*h.FSNodeOverDag, uint64, error) {\n\tif maxDepth == 0 || db.Done() {\n\t\treturn fsn, fsn.FileSize(), nil\n\t}\n\n\t// Get depth of this 'tree'\n\tdepth, repeatNumber := trickleDepthInfo(fsn, db.Maxlinks())\n\tif depth == 0 {\n\t\t// If direct blocks not filled...\n\t\tif err := db.FillNodeLayer(fsn); err != nil {\n\t\t\treturn nil, 0, err\n\t\t}\n\t\tdepth++\n\t}\n\t// TODO: Same as `appendFillLastChild`, when is this case possible?\n\n\t// If at correct depth, no need to continue\n\tif depth == maxDepth {\n\t\treturn fsn, fsn.FileSize(), nil\n\t}\n\n\tif err := appendFillLastChild(ctx, fsn, depth, repeatNumber, db); err != nil {\n\t\treturn nil, 0, err\n\t}\n\n\t// after appendFillLastChild, our depth is now increased by one\n\tif !db.Done() {\n\t\tdepth++\n\t}\n\n\t// Now, continue filling out tree like normal\n\tfor i := depth; i < maxDepth && !db.Done(); i++ {\n\t\tfor j := 0; j < depthRepeat && !db.Done(); j++ {\n\t\t\tnextChild := db.NewFSNodeOverDag(ft.TFile)\n\t\t\tchildNode, childFileSize, err := fillTrickleRec(db, nextChild, i)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, 0, err\n\t\t\t}\n\n\t\t\tif err := fsn.AddChild(childNode, childFileSize, db); err != nil {\n\t\t\t\treturn nil, 0, err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn fsn, fsn.FileSize(), nil\n}", "func (d *dir) createDirectories() {\n\tfor totalDirectories < directoriesToCreate {\n\t\tnewDir := &dir{\n\t\t\tname: fileName(),\n\t\t\tdepth: d.depth + 1,\n\t\t\tparent: d,\n\t\t}\n\t\td.children = append(d.children, newDir)\n\t\ttotalDirectories++\n\t\tswitch randSource.Intn(4) {\n\t\tcase 0:\n\t\t\tif d.depth < maxDepth {\n\t\t\t\tnewDir.createDirectories()\n\t\t\t}\n\t\tcase 1:\n\t\t\treturn\n\t\t}\n\t}\n}", "func (self *treeNode) addNode(tree *TTree, parent *treeNode, nodes []*treeNode, i int, isHook bool) *treeNode {\n\tif len(parent.Children) == 0 {\n\t\tparent.Children = make([]*treeNode, 0)\n\t}\n\n\t// 如果:找到[已经注册]的分支节点则从该节继续[查找/添加]下一个节点\n\tfor _, n := range parent.Children {\n\t\tif n.Equal(nodes[i]) {\n\t\t\t// 如果:插入的节点层级已经到末尾,则为该节点注册路由\n\t\t\tif i == len(nodes)-1 {\n\t\t\t\t// 原始路由会被替换\n\t\t\t\tif isHook {\n\t\t\t\t\tn.Route.CombineHandler(nodes[i].Route)\n\t\t\t\t} else {\n\t\t\t\t\tn.Route = nodes[i].Route\n\t\t\t\t\ttree.Count.Inc()\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn n\n\t\t}\n\t}\n\n\t// 如果:该节点没有对应分支则插入同级的nodes为新的分支\n\tparent.Children = append(parent.Children, nodes[i])\n\tsort.Sort(parent.Children)\n\treturn nodes[i]\n}", "func (bt *BinaryTree) DepthIte() int {\n\tdepth := 0\n\tcurLevel := []*BinaryTree{bt}\n\tnewLevel := make([]*BinaryTree, 0, 2)\n\tidx := 0\n\tcurLen := len(curLevel)\n\n\tfor curLen > 0 {\n\t\tnode := curLevel[idx]\n\t\tif node.Left != nil {\n\t\t\tnewLevel = append(newLevel, node.Left)\n\t\t}\n\t\tif node.Right != nil {\n\t\t\tnewLevel = append(newLevel, node.Right)\n\t\t}\n\n\t\tidx++\n\t\tif idx == curLen {\n\t\t\t// end of current level; switch\n\t\t\tdepth++\n\t\t\tcurLevel = newLevel\n\t\t\tnewLevel = make([]*BinaryTree, 0, curLen*2)\n\t\t\tidx = 0\n\t\t\tcurLen = len(curLevel)\n\t\t}\n\t}\n\n\treturn depth\n}", "func (n *Node) Depth() int {\n\tif n.Parent == nil {\n\t\treturn 0\n\t}\n\treturn n.Parent.Depth() + 1\n}", "func (w *Walker) extendPath(child NavigableNode) {\n\tw.currentDepth++\n\n\t// Extend the slices if needed (doubling its capacity).\n\tif w.currentDepth >= len(w.path) {\n\t\tw.path = append(w.path, make([]NavigableNode, len(w.path))...)\n\t\tw.childIndex = append(w.childIndex, make([]uint, len(w.childIndex))...)\n\t\t// TODO: Check the performance of this grow mechanism.\n\t}\n\n\t// `child` now becomes the `ActiveNode()`.\n\tw.path[w.currentDepth] = child\n\tw.childIndex[w.currentDepth] = 0\n}", "func (self *TileSprite) SetChildrenA(member []DisplayObject) {\n self.Object.Set(\"children\", member)\n}", "func (t *layerTree) children(ctx context.Context, parent *Image, all bool) ([]*Image, error) {\n\tif parent.TopLayer() == \"\" {\n\t\tif isManifestList, _ := parent.IsManifestList(ctx); isManifestList {\n\t\t\treturn nil, nil\n\t\t}\n\t}\n\n\tparentID := parent.ID()\n\tparentOCI, err := t.toOCI(ctx, parent)\n\tif err != nil {\n\t\tif ErrorIsImageUnknown(err) {\n\t\t\treturn nil, nil\n\t\t}\n\t\treturn nil, err\n\t}\n\n\t// checkParent returns true if child and parent are in such a relation.\n\tcheckParent := func(child *Image) (bool, error) {\n\t\tif parentID == child.ID() {\n\t\t\treturn false, nil\n\t\t}\n\t\tchildOCI, err := t.toOCI(ctx, child)\n\t\tif err != nil {\n\t\t\tif ErrorIsImageUnknown(err) {\n\t\t\t\treturn false, nil\n\t\t\t}\n\t\t\treturn false, err\n\t\t}\n\t\t// History check.\n\t\treturn areParentAndChild(parentOCI, childOCI), nil\n\t}\n\n\tvar children []*Image\n\n\t// Empty images are special in that they do not have any physical layer\n\t// but yet can have a parent-child relation. Hence, compare the\n\t// \"parent\" image to all other known empty images.\n\tif parent.TopLayer() == \"\" {\n\t\tfor i := range t.emptyImages {\n\t\t\tempty := t.emptyImages[i]\n\t\t\tisParent, err := checkParent(empty)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif isParent {\n\t\t\t\tchildren = append(children, empty)\n\t\t\t\tif !all {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn children, nil\n\t}\n\n\tparentNode, exists := t.nodes[parent.TopLayer()]\n\tif !exists {\n\t\t// Note: erroring out in this case has turned out having been a\n\t\t// mistake. Users may not be able to recover, so we're now\n\t\t// throwing a warning to guide them to resolve the issue and\n\t\t// turn the errors non-fatal.\n\t\tlogrus.Warnf(\"Layer %s not found in layer tree. The storage may be corrupted, consider running `podman system reset`.\", parent.TopLayer())\n\t\treturn children, nil\n\t}\n\n\t// addChildrenFrom adds child images of parent to children. Returns\n\t// true if any image is a child of parent.\n\taddChildrenFromNode := func(node *layerNode) (bool, error) {\n\t\tfoundChildren := false\n\t\tfor i, childImage := range node.images {\n\t\t\tisChild, err := checkParent(childImage)\n\t\t\tif err != nil {\n\t\t\t\treturn foundChildren, err\n\t\t\t}\n\t\t\tif isChild {\n\t\t\t\tfoundChildren = true\n\t\t\t\tchildren = append(children, node.images[i])\n\t\t\t\tif all {\n\t\t\t\t\treturn foundChildren, nil\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn foundChildren, nil\n\t}\n\n\t// First check images where parent's top layer is also the parent\n\t// layer.\n\tfor _, childNode := range parentNode.children {\n\t\tfound, err := addChildrenFromNode(childNode)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif found && all {\n\t\t\treturn children, nil\n\t\t}\n\t}\n\n\t// Now check images with the same top layer.\n\tif _, err := addChildrenFromNode(parentNode); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn children, nil\n}", "func (ms Mounts) mountDepth(i int) int {\n\tancestorCount := 0\n\tcurrent := ms[i]\n\tfor found := true; found; {\n\t\tfound = false\n\t\tfor _, mnt := range ms {\n\t\t\tif mnt.ID == current.Parent {\n\t\t\t\tancestorCount++\n\t\t\t\tcurrent = mnt\n\t\t\t\tfound = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\treturn ancestorCount\n}", "func (self *Manager) fixupChildren(placeHolder *PlaceHolder, logger Logger) {\n\tname := logger.GetName()\n\tfor e := placeHolder.Loggers.Front(); e != nil; e = e.Next() {\n\t\tl, _ := e.Value.(Logger)\n\t\tparent := l.GetParent()\n\t\tif !strings.HasPrefix(parent.GetName(), name) {\n\t\t\tlogger.SetParent(parent)\n\t\t\tl.SetParent(logger)\n\t\t}\n\t}\n}", "func appendChildren(parent element, rslt *result, lines []string, i *int, l int, src *source, opts *Options, f *File) error {\n\tfor *i < l {\n\t\t// Fetch a line.\n\t\tln := newLine(*i+1, lines[*i], opts, f)\n\n\t\t// Check if the line is a child of the parent.\n\t\tok, err := ln.childOf(parent)\n\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif !ok {\n\t\t\treturn nil\n\t\t}\n\n\t\tchild, err := newElement(ln, rslt, src, parent, opts)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tparent.AppendChild(child)\n\n\t\t*i++\n\n\t\tif child.CanHaveChildren() {\n\t\t\tif err := appendChildren(child, rslt, lines, i, l, src, opts, f); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func fillTrickleRec(db *h.DagBuilderHelper, node *h.FSNodeOverDag, maxDepth int) (filledNode ipld.Node, nodeFileSize uint64, err error) {\n\t// Always do this, even in the base case\n\tif err := db.FillNodeLayer(node); err != nil {\n\t\treturn nil, 0, err\n\t}\n\n\t// For each depth in [1, `maxDepth`) (or without limit if `maxDepth` is -1,\n\t// initial call from `Layout`) add `depthRepeat` sub-graphs of that depth.\n\tfor depth := 1; maxDepth == -1 || depth < maxDepth; depth++ {\n\t\tif db.Done() {\n\t\t\tbreak\n\t\t\t// No more data, stop here, posterior append calls will figure out\n\t\t\t// where we left off.\n\t\t}\n\n\t\tfor repeatIndex := 0; repeatIndex < depthRepeat && !db.Done(); repeatIndex++ {\n\n\t\t\tchildNode, childFileSize, err := fillTrickleRec(db, db.NewFSNodeOverDag(ft.TFile), depth)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, 0, err\n\t\t\t}\n\n\t\t\tif err := node.AddChild(childNode, childFileSize, db); err != nil {\n\t\t\t\treturn nil, 0, err\n\t\t\t}\n\t\t}\n\t}\n\n\t// Get the final `dag.ProtoNode` with the `FSNode` data encoded inside.\n\tfilledNode, err = node.Commit()\n\tif err != nil {\n\t\treturn nil, 0, err\n\t}\n\n\treturn filledNode, node.FileSize(), nil\n}", "func (menu *Menu) AddToTree(menuNode *Menu) {\n\n\tif menuNode.ParentId == 0 {\n\n\t\tmenu.Children = append(menu.Children, menuNode)\n\t} else if menuNode.ParentId == menu.ID {\n\n\t\tmenu.Children = append(menu.Children, menuNode)\n\n\t} else {\n\n\t\tfor _, u := range menu.Children {\n\n\t\t\tu.AddToTree(menuNode)\n\t\t}\n\n\t}\n\n}", "func (cm *CategoryMap) Tree(offset int) (root []*Category) {\n\tfor _, i := range cm.Index() {\n\t\tc := cm.Get(i)\n\t\t// Are we at the root of the tree?\n\t\tif c.Parent == offset {\n\t\t\troot = append(root, c)\n\t\t\tcontinue\n\t\t}\n\t\tp := cm.Get(c.Parent)\n\t\t// Append Category to its parent's children\n\t\tp.Children = append(p.Children, c)\n\t}\n\treturn\n}", "func (self *Graphics) SetChildrenA(member []DisplayObject) {\n self.Object.Set(\"children\", member)\n}", "func swapAtDepth(root *node, depth int32, current int32) {\n\tif root == nil {\n\t\treturn\n\t}\n\tif current%depth == 0 {\n\t\troot.left, root.right = root.right, root.left\n\t}\n\tswapAtDepth(root.left, depth, current+1)\n\tswapAtDepth(root.right, depth, current+1)\n}", "func (w *WidgetImplement) Draw(self Widget, ctx *canvas.Context) {\n\tif debugFlag {\n\t\tctx.SetStrokeWidth(1.0)\n\t\tctx.BeginPath()\n\t\tctx.Rect(float32(w.x)-0.5, float32(w.y)-0.5, float32(w.w)+1.0, float32(w.h)+1.0)\n\t\tctx.SetStrokeColor(canvas.RGBA(255, 0, 0, 255))\n\t\tctx.Stroke()\n\t}\n\n\tif len(w.children) == 0 {\n\t\treturn\n\t}\n\tctx.Translate(float32(w.x), float32(w.y))\n\t// draw depth 0 items\n\tvar drawLater widgetsAsc = make([]Widget, 0, len(w.children))\n\tfor _, child := range w.children {\n\t\tif child.Visible() {\n\t\t\tdepth := child.Depth()\n\t\t\tif depth == 0 {\n\t\t\t\tcx, cy := child.Position()\n\t\t\t\tcw, ch := child.Size()\n\t\t\t\tif !self.IsClipped(cx, cy, cw, ch) {\n\t\t\t\t\tchild.Draw(child, ctx)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tdrawLater = append(drawLater, child)\n\t\t\t}\n\t\t}\n\t}\n\t// draw by depth order\n\tsort.Sort(drawLater)\n\tfor _, child := range drawLater {\n\t\tcx, cy := child.Position()\n\t\tcw, ch := child.Size()\n\t\tif !self.IsClipped(cx, cy, cw, ch) {\n\t\t\tchild.Draw(child, ctx)\n\t\t}\n\t}\n\tctx.Translate(-float32(w.x), -float32(w.y))\n}", "func (t *Transform) SetDepth(depth float64) *Transform {\n\tt.Depth = depth\n\treturn t\n}", "func (node *AStarNode) Depth() int {\n\tscore := 0\n\ttmpNode := node\n\tfor tmpNode != nil {\n\t\tscore++\n\t\ttmpNode = tmpNode.previous\n\t}\n\treturn score\n}", "func (m *Set) SetChildren(value []Termable)() {\n err := m.GetBackingStore().Set(\"children\", value)\n if err != nil {\n panic(err)\n }\n}", "func (n Node) getDepth() int {\n\t// if there is no Parent element\n\t// we reached the top level\n\tif n.Parent == nil {\n\t\treturn 0\n\t}\n\n\treturn n.Parent.getDepth() + 1\n}", "func Children(n Node) []Node { return n.n().children }", "func TestBuildChildren2(t *testing.T) {\n\trand.Seed(time.Now().UnixNano())\n\tvar k = 6\n\tvar dim = 2\n\tvar pNum = 5\n\tvar nNum = 10\n\tvar its []item.Item\n\n\t// positive side\n\tfor i := 0; i < pNum; i++ {\n\t\tv := make([]float32, 2)\n\t\tfor d := 0; d < dim; d++ {\n\t\t\tv[d] = rand.Float32()\n\t\t}\n\t\titem.Normalize(v)\n\t\tits = append(its, item.Item{\n\t\t\tID: int64(i),\n\t\t\tVec: v,\n\t\t})\n\t}\n\n\t// negative side\n\tfor i := 0; i < nNum; i++ {\n\t\tv := make([]float32, 2)\n\t\tfor d := 0; d < dim; d++ {\n\t\t\tv[d] = -rand.Float32()\n\t\t}\n\t\titem.Normalize(v)\n\t\tits = append(its, item.Item{\n\t\t\tID: int64(i + pNum),\n\t\t\tVec: v,\n\t\t})\n\t}\n\n\tn := Node{\n\t\tVec: []float32{\n\t\t\t1, 0,\n\t\t},\n\t}\n\n\tn.buildChildren(its, k, 2, &sync.Map{})\n\n\tleftChild := n.Children[0]\n\tassert.Equal(t, true, leftChild.IsLeaf())\n\tassert.Equal(t, []int64{0, 1, 2, 3, 4}, leftChild.Leaf)\n\n\trightChild := n.Children[1]\n\tassert.Equal(t, false, rightChild.IsLeaf())\n\tassert.Equal(t, 2, len(rightChild.Children))\n}", "func setDepthAndEntryCount(ds *dataset.Dataset, data qfs.File, mu *sync.Mutex, done chan error) {\n\tdefer data.Close()\n\n\ter, err := dsio.NewEntryReader(ds.Structure, data)\n\tif err != nil {\n\t\tlog.Debug(err.Error())\n\t\tdone <- fmt.Errorf(\"error reading data values: %s\", err.Error())\n\t\treturn\n\t}\n\n\tentries := 0\n\t// baseline of 1 for the original closure\n\tdepth := 1\n\tvar ent dsio.Entry\n\tfor {\n\t\tif ent, err = er.ReadEntry(); err != nil {\n\t\t\tlog.Debug(err.Error())\n\t\t\tbreak\n\t\t}\n\t\t// get the depth of this entry, update depth if larger\n\t\tif d := getDepth(ent.Value, 1); d > depth {\n\t\t\tdepth = d\n\t\t}\n\t\tentries++\n\t}\n\tif err.Error() != \"EOF\" {\n\t\tdone <- fmt.Errorf(\"error reading values at entry %d: %s\", entries, err.Error())\n\t\treturn\n\t}\n\n\tmu.Lock()\n\tds.Structure.Entries = entries\n\tds.Structure.Depth = depth\n\tmu.Unlock()\n\n\tdone <- nil\n}", "func (c *ProjectsLogServicesIndexesListCall) Depth(depth int64) *ProjectsLogServicesIndexesListCall {\n\tc.params_.Set(\"depth\", fmt.Sprintf(\"%v\", depth))\n\treturn c\n}", "func TestBuildChildren1(t *testing.T) {\n\trand.Seed(time.Now().UnixNano())\n\tvar k = 6\n\tvar dim = 2\n\tvar num = 5\n\tvar its []item.Item\n\n\t// positive side\n\tfor i := 0; i < num; i++ {\n\t\tv := make([]float32, 2)\n\t\tfor d := 0; d < dim; d++ {\n\t\t\tv[d] = rand.Float32()\n\t\t}\n\t\titem.Normalize(v)\n\t\tits = append(its, item.Item{\n\t\t\tID: int64(i),\n\t\t\tVec: v,\n\t\t})\n\t}\n\n\t// negative side\n\tfor i := 0; i < num; i++ {\n\t\tv := make([]float32, 2)\n\t\tfor d := 0; d < dim; d++ {\n\t\t\tv[d] = -rand.Float32()\n\t\t}\n\t\titem.Normalize(v)\n\t\tits = append(its, item.Item{\n\t\t\tID: int64(num + i),\n\t\t\tVec: v,\n\t\t})\n\t}\n\n\tn := Node{\n\t\tVec: []float32{\n\t\t\t1, 0,\n\t\t},\n\t}\n\n\tn.buildChildren(its, k, 2, &sync.Map{})\n\n\tleftChild := n.Children[0]\n\tassert.Equal(t, true, leftChild.IsLeaf())\n\tassert.Equal(t, []int64{0, 1, 2, 3, 4}, leftChild.Leaf)\n\n\trightChild := n.Children[1]\n\tassert.Equal(t, true, rightChild.IsLeaf())\n\tassert.Equal(t, []int64{5, 6, 7, 8, 9}, rightChild.Leaf)\n}", "func auxPrintTree(act *Account, level int, indent string) {\n\tfmt.Printf(\"%s[%s] %s (%s)\\n\", strings.Repeat(indent, level), strings.ToUpper(act.Type.label), act.Name, act.Currency)\n\n\tfor _, child := range act.Children {\n\t\tauxPrintTree(child, level+1, indent)\n\t}\n}", "func (tree *Tree23) Depths() (int, int) {\n\treturn tree.minmaxDepth(tree.root)\n}", "func (e *FakeElement) AddChildren(children ...Element) Element {\n\tfor _, c := range children {\n\t\tif d, ok := c.(*FakeElement); ok {\n\t\t\te.Children = append(e.Children, d)\n\t\t\td.parent = e\n\t\t}\n\t}\n\treturn e\n}", "func (t *Tree) Level() int {\n\treturn t.Depth() + 1\n}", "func (l *hierLayer) updateState(cmap map[*replicationElement]string) {\n\t//go through the map and build the tree\n\tl.root = &DfsTreeElement{name: \"/\", fileType: \"dir\", path: \"\", content: \"\",parent:nil,}\n\tstack := lls.New()\n\n\n\t//policy used here is skip\n\n\tstack.Push(l.root)\n\t// untill stack empty\n\tfor !stack.Empty() {\n\t\t// \tpop stack call el\n\t\tra, _ := stack.Pop()\n\t\tel := ra.(*DfsTreeElement)\n\n\t\tif el.fileType == \"dir\" {\n\t\t\tfor _, i := range getChildren(el, cmap) {\n\t\t\t\tii := i\n\t\t\t\tstack.Push(&ii)\n\t\t\t\tel.children = append(el.children, &ii)\n\t\t\t}\n\t\t\t// fmt.Println(el.getPath(), el.children)\n\n\t\t}\n\n\t}\n\n\t//last step is to send the interface layer with update state\n\tl.updateInterface()\n\n}", "func (n *node) dfs(iter func(parent func(int) *node, node *node)) {\n\tparentStack := []*node{n}\n\tfullIter(n.children, func(_ string, child *node) {\n\t\tchild.dfsInner(&parentStack, iter)\n\t})\n}", "func Children(index, depth uint) ([]uint, error) {\n\tif index&1 == 0 {\n\t\treturn nil, errors.New(\"No children\")\n\t}\n\n\tif depth == 0 {\n\t\tdepth = Depth(index)\n\t}\n\toffset := Offset(index, depth) * 2\n\n\treturn []uint{Index(depth-1, offset), Index(depth-1, offset+1)}, nil\n}", "func WithDepth(depth int) Option {\n\treturn func(o *outputOpts) (*outputOpts, error) {\n\t\tif depth < 0 {\n\t\t\treturn nil, fmt.Errorf(\"invalid depth value %d, value must be >= 0\", depth)\n\t\t}\n\t\tc := o.copy()\n\t\tc.depth = depth\n\t\treturn c, nil\n\t}\n}", "func (n *Node) Depth() int {\n\treturn n.depth\n}", "func (e *dataUsageEntry) addChild(hash dataUsageHash) {\n\tif _, ok := e.Children[hash.Key()]; ok {\n\t\treturn\n\t}\n\tif e.Children == nil {\n\t\te.Children = make(dataUsageHashMap, 1)\n\t}\n\te.Children[hash.Key()] = struct{}{}\n}", "func (t *Tree) AddIteratively(data int) (err error) {\n\tt.Total += data\n\tt.NodeCount++\n\tif data < 0 {\n\t\treturn ErrPositiveIntegers\n\t}\n\tNodeToAdd := Node{Data: data}\n\tif t.Root == nil {\n\t\tt.Root = &NodeToAdd\n\t\treturn\n\t}\n\tt.addIteratively(t.Root, NodeToAdd)\n\tif t.Root != nil {\n\t\tnewTree := rebalance(t)\n\t\tif newTree.Root != nil {\n\t\t\t*t = newTree\n\t\t}\n\n\t}\n\treturn\n}", "func (t *DiskTree) addChild(root *disknode, edgename, key string, value []string) {\n\tnewnode := new(disknode)\n\tnewnode.Value = value\n\tnewnode.Edgename = edgename\n\tnewnode.Key = key\n\tnewnode.Hash = smash(key)\n\tnewnode.Parent = root.Hash\n\tif root.Children == nil {\n\t\troot.Children = make(map[string]string)\n\t}\n\troot.Children[string(edgename[0])] = newnode.Hash\n\tdebugf(\"add disk child %+v\\n\", newnode)\n\tt.write(root)\n\tt.write(newnode)\n}", "func (a node) Tree(root treeprint.Tree) error {\n\tsort.Sort(a)\n\tbranch := root\n\tvar err error\n\n\t// generate a node for the Resource\n\tif a.RNode != nil {\n\t\tbranch, err = a.p.doResource(a.RNode, \"Resource\", root)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// attach children to the branch\n\tfor _, n := range a.children {\n\t\tif err := n.Tree(branch); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func auxPrintTree(a *Account, level int, indent string) {\n\tfmt.Printf(\"%s[%s] %s (%s)\\n\", strings.Repeat(indent, level), a.Type, a.Name, a.Currency)\n\n\tfor _, child := range a.Children {\n\t\tauxPrintTree(child, level+1, indent)\n\t}\n}", "func (tree *Tree) persist(root *caching.Node, parent *caching.Node, updateOps chan *caching.Node) {\n\t//if it is a leaf its size is already given\n\tfor _, child := range root.Children {\n\t\tnode, err := tree.GetNode(child.Name)\n\t\t//TODO: handle this somehow\n\t\tif err != nil {\n\t\t}\n\t\ttree.persist(node, root, updateOps)\n\t}\n\tupdateOps <- root\n}", "func Iterate(input interface{}, action *Action, depth int) interface{} {\n\t// TODO: add in cycle detection\n\n\t// Initialize on first call\n\tif action.Processed == nil {\n\t\taction.Processed = make(map[string]Parameters, 0)\n\t\taction.Path = *NewStack()\n\t\taction.Path.Push(\"root\")\n\t}\n\n\t// Some types of not implemented\n\tif IsDifficult(input) {\n\t\tlog.Fatal(\"Can't deal with this\", \"input\", input)\n\t}\n\n\t// Short cut if specified\n\tif action.IgnorePrimitives && IsPrimitive(input) {\n\t\treturn input\n\t}\n\n\tparent := action.Path.StringPeekN(0)\n\taction.Path.Push(action.Name)\n\n\tif IsPointer(input) {\n\t\t// TODO: Needs to be checked.\n\t\tif !IsNilValue(input) {\n\t\t\tinput = reflect.ValueOf(input).Elem().Interface()\n\t\t}\n\t\taction.IsPointer = true\n\t} else {\n\t\taction.IsPointer = false\n\t}\n\n\tif action.Processed[parent].Children == nil {\n\t\taction.Processed[parent] = Parameters{make(map[string]interface{}, 0)}\n\t}\n\n\t// Walk the children first -- post-order traversal\n\tif IsContainer(input) {\n\n\t\t// Save the original values\n\t\tname := action.Path.StringPeekN(0)\n\t\tpointer := action.IsPointer\n\n\t\tchildren := GetChildren(input)\n\t\tfor i := 0; i < len(children); i++ {\n\t\t\taction.Name = children[i].Name + fmt.Sprintf(\"-%d\", depth)\n\n\t\t\tIterate(children[i].Value, action, depth+1)\n\n\t\t\t// Restore the action values, since they were overwritten\n\t\t\taction.Name = name\n\t\t\taction.IsPointer = pointer\n\t\t}\n\t}\n\n\tresult := action.ProcessField(action, input)\n\taction.Path.Pop()\n\n\treturn result\n}", "func FatallnDepth(depth int, args ...interface{}) {\n\tlogging.printlnDepth(severity.FatalLog, logging.logger, logging.filter, depth, args...)\n}", "func (d *Graph) buildTree() {\n\tconfig := build.Default\n\n\t// For each package, look for the dependencies and build out a tree\n\tfor p := range d.Pkgs {\n\t\tpkg, _ := config.Import(d.Pkgs[p], d.SrcDir, build.AllowBinary)\n\t\timports := pkg.Imports\n\n\t\t// Iterate through the imports and build our tree\n\t\tfor i := range imports {\n\t\t\t// The full path of our current import\n\t\t\tpath := imports[i]\n\n\t\t\t// When dealing with multiple packages, we can't assume that imports\n\t\t\t// are unique. Thus the nodes may already exist and we shouldn't do any work\n\t\t\tif d.Nodes[path] != nil {\n\t\t\t\td.Nodes[path].IsDuplicate = true\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// Ignore the GO standard library imports\n\t\t\tif _, ok := stdlib[strings.Split(path, \"/\")[0]]; ok && !d.StdLib {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// Keep track when traversing the path\n\t\t\tvar currentNode = &Node{\n\t\t\t\tPath: path,\n\t\t\t\tIsDep: true,\n\t\t\t\tIsCoreDep: strings.HasPrefix(path, strings.Split(d.Pkgs[p], \"/\")[0]),\n\t\t\t}\n\n\t\t\t// Keep track of the number of dependencies\n\t\t\td.TotalDeps++\n\n\t\t\t// Link our dependency node to its ancestors\n\t\t\tfor path != \"\" {\n\t\t\t\t// Constant time lookup to all of our nodes\n\t\t\t\t// based on their full path string\n\t\t\t\td.Nodes[path] = currentNode\n\n\t\t\t\t// Keep popping off the tip of the path\n\t\t\t\tpath, _ = filepath.Split(path)\n\n\t\t\t\tif len(path) > 0 {\n\t\t\t\t\t// Trailing slash in file path causes issues, remove it\n\t\t\t\t\tif strings.HasSuffix(path, \"/\") {\n\t\t\t\t\t\tpath = path[:len(path)-1]\n\t\t\t\t\t}\n\n\t\t\t\t\t// Create nodes for all directory paths if they don't exist\n\t\t\t\t\tif d.Nodes[path] == nil {\n\t\t\t\t\t\tcurrentNode.addParent(&Node{\n\t\t\t\t\t\t\tPath: path,\n\t\t\t\t\t\t})\n\n\t\t\t\t\t\t// Change the current node to the newly created item\n\t\t\t\t\t\tcurrentNode = currentNode.Parent\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// Otherwise, assume the common ancestor already has it's tree built\n\t\t\t\t\t\tcurrentNode.addParent(d.Nodes[path])\n\t\t\t\t\t\tcurrentNode = nil\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\t// currentNode will be nil if there was already a common ancestor --\n\t\t\t// which means the root node already exists for that import path\n\t\t\tif currentNode != nil {\n\t\t\t\td.RootNode.addChild(currentNode)\n\t\t\t}\n\t\t}\n\t}\n}", "func (g *Graph) UpdateRootDepth(rootDepth uint64) {\n\tvar pendingDepth []*sortByDepthTX\n\tvar pendingSeed []*sortBySeedTX\n\n\tg.Lock()\n\n\tg.rootDepth = rootDepth\n\n\tfor id, depth := range g.missing {\n\t\tif rootDepth > sys.MaxDepthDiff+depth {\n\t\t\tdelete(g.missing, id)\n\n\t\t\tif g.indexer != nil {\n\t\t\t\tg.indexer.Remove(hex.EncodeToString(id[:]))\n\t\t\t}\n\n\t\t\tg.resolveChildren(id)\n\n\t\t\tdelete(g.children, id)\n\t\t}\n\t}\n\n\tfor id, depth := range g.incomplete {\n\t\tif rootDepth > sys.MaxDepthDiff+depth {\n\t\t\tdelete(g.incomplete, id)\n\t\t\tdelete(g.transactions, id)\n\n\t\t\tif g.indexer != nil {\n\t\t\t\tg.indexer.Remove(hex.EncodeToString(id[:]))\n\t\t\t}\n\n\t\t\tg.resolveChildren(id)\n\n\t\t\tdelete(g.children, id)\n\t\t}\n\n\t}\n\n\tg.eligibleIndex.Ascend(func(i btree.Item) bool {\n\t\tif rootDepth <= i.(*sortByDepthTX).Depth {\n\t\t\treturn true\n\t\t}\n\n\t\tpendingDepth = append(pendingDepth, i.(*sortByDepthTX))\n\n\t\treturn true\n\t})\n\n\tg.seedIndex.Ascend(func(i btree.Item) bool {\n\t\tif rootDepth < i.(*sortBySeedTX).Depth {\n\t\t\treturn true\n\t\t}\n\n\t\tpendingSeed = append(pendingSeed, i.(*sortBySeedTX))\n\n\t\treturn true\n\t})\n\n\tfor _, i := range pendingDepth {\n\t\tg.eligibleIndex.Delete(i)\n\t}\n\n\tfor _, i := range pendingSeed {\n\t\tg.seedIndex.Delete(i)\n\t}\n\n\tg.Unlock()\n}", "func (t *Tree) AddRecusively(data int) (err error) {\n\tt.Total += data\n\tt.NodeCount++\n\tif data < 0 {\n\t\treturn ErrPositiveIntegers\n\t}\n\tNodeToAdd := Node{Data: data}\n\tif t.Root == nil {\n\t\tt.Root = &NodeToAdd\n\t\treturn\n\t}\n\tt.addRecusively(t.Root, NodeToAdd)\n\tif t.Root != nil {\n\t\tnewTree := rebalance(t)\n\t\tif newTree.Root != nil {\n\t\t\t*t = newTree\n\t\t}\n\n\t}\n\treturn\n}", "func ListOfDepths(n *Node) []*Node {\n\n\tal := make([]*Node, 0)\n\tq := Queue{}\n\tq.Enqueue(n)\n\n\t// repeat until queue for a given level is not empty (reach bottom of the tree)\n\tfor !q.IsEmpty() {\n\n\t\t// All nodes of the current level of the tree.\n\t\tlevelNodes := len(q.Nodes)\n\n\t\t// head for a linked list that will hold all nodes of the current level\n\t\tvar head *Node\n\n\t\t// last node of a linked list for chaining new nodes\n\t\tvar tail *Node\n\n\t\t// go through all nodes\n\t\tfor levelNodes > 0 {\n\n\t\t\t// remove one\n\t\t\tn := q.Dequeue()\n\n\t\t\t// point head & tail to it if this is a first node on this level\n\t\t\tif head == nil {\n\t\t\t\thead = n\n\t\t\t\ttail = n\n\t\t\t} else {\n\t\t\t\t// chain to tail\n\t\t\t\ttail.Next = n\n\n\t\t\t\t// re-point to tail to the last node\n\t\t\t\ttail = tail.Next\n\t\t\t}\n\n\t\t\t// add children of current node for processing on the next level\n\t\t\tif n.Left != nil {\n\t\t\t\tq.Enqueue(n.Left)\n\t\t\t}\n\t\t\tif n.Right != nil {\n\t\t\t\tq.Enqueue(n.Right)\n\t\t\t}\n\n\t\t\tlevelNodes--\n\t\t}\n\n\t\t// add a head of the linked list for the current level to a slice of level linked lists\n\t\tal = append(al, head)\n\n\t}\n\treturn al\n}", "func (r *PerformanceResults) findAllChildrenGraphLookup(parent string, maxDepth int, tags []string) error {\n\tconf, session, err := cedar.GetSessionWithConfig(r.env)\n\tif err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\tdefer session.Close()\n\n\tmatch := bson.M{\"$match\": bson.M{\"_id\": parent}}\n\tgraphLookup := bson.M{\n\t\t\"$graphLookup\": bson.M{\n\t\t\t\"from\": perfResultCollection,\n\t\t\t\"startWith\": \"$\" + \"_id\",\n\t\t\t\"connectFromField\": \"_id\",\n\t\t\t\"connectToField\": bsonutil.GetDottedKeyName(\"info\", \"parent\"),\n\t\t\t\"as\": \"children\",\n\t\t},\n\t}\n\tif maxDepth >= 0 {\n\t\tfields := graphLookup[\"$graphLookup\"].(bson.M)\n\t\tfields[\"maxDepth\"] = maxDepth\n\t}\n\tvar project bson.M\n\tif len(tags) > 0 {\n\t\tproject = bson.M{\n\t\t\t\"$project\": bson.M{\n\t\t\t\t\"_id\": 0,\n\t\t\t\t\"children\": bson.M{\n\t\t\t\t\t\"$filter\": bson.M{\n\t\t\t\t\t\t\"input\": \"$\" + \"children\",\n\t\t\t\t\t\t\"as\": \"child\",\n\t\t\t\t\t\t\"cond\": bson.M{\n\t\t\t\t\t\t\t\"$eq\": []interface{}{\n\t\t\t\t\t\t\t\ttags,\n\t\t\t\t\t\t\t\t\"$$\" + bsonutil.GetDottedKeyName(\"child\", \"info\", \"tags\"),\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\t} else {\n\t\tproject = bson.M{\"$project\": bson.M{\"_id\": 0, \"children\": 1}}\n\t}\n\tpipeline := []bson.M{\n\t\tmatch,\n\t\tgraphLookup,\n\t\tproject,\n\t}\n\tpipe := session.DB(conf.DatabaseName).C(perfResultCollection).Pipe(pipeline)\n\titer := pipe.Iter()\n\tdefer iter.Close()\n\n\tdoc := struct {\n\t\tChildren []PerformanceResult `bson:\"children,omitempty\"`\n\t}{}\n\tfor iter.Next(&doc) {\n\t\tr.Results = append(r.Results, doc.Children...)\n\t}\n\tif err = iter.Err(); err != nil {\n\t\treturn errors.Wrap(err, \"problem getting children\")\n\t}\n\treturn nil\n}", "func check(menus menuMap, members flagMap, maxDepth, id, depth int) bool {\n\tif depth > maxDepth {\n\t\treturn false\n\t}\n\tif members[id] {\n\t\treturn false\n\t}\n\n\tmembers[id] = true\n\tdepth++\n\n\tfor _, childID := range menus[id].ChildIDs {\n\t\tif !check(menus, members, maxDepth, childID, depth) {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func (d *dataUsageCache) flatten(root dataUsageEntry) dataUsageEntry {\n\tfor id := range root.Children {\n\t\te := d.Cache[id]\n\t\tif len(e.Children) > 0 {\n\t\t\te = d.flatten(e)\n\t\t}\n\t\troot.merge(e)\n\t}\n\troot.Children = nil\n\treturn root\n}", "func children(e *yang.Entry) []*yang.Entry {\n\tvar entries []*yang.Entry\n\n\tfor _, e := range e.Dir {\n\t\tif e.RPC == nil {\n\t\t\tentries = append(entries, e)\n\t\t}\n\t}\n\treturn entries\n}", "func (d *dataUsageCache) copyWithChildren(src *dataUsageCache, hash dataUsageHash, parent *dataUsageHash) {\n\tif d.Cache == nil {\n\t\td.Cache = make(map[string]dataUsageEntry, 100)\n\t}\n\te, ok := src.Cache[hash.String()]\n\tif !ok {\n\t\treturn\n\t}\n\td.Cache[hash.Key()] = e\n\tfor ch := range e.Children {\n\t\tif ch == hash.Key() {\n\t\t\tlogger.LogIf(GlobalContext, errors.New(\"dataUsageCache.copyWithChildren: Circular reference\"))\n\t\t\treturn\n\t\t}\n\t\td.copyWithChildren(src, dataUsageHash(ch), &hash)\n\t}\n\tif parent != nil {\n\t\tp := d.Cache[parent.Key()]\n\t\tp.addChild(hash)\n\t\td.Cache[parent.Key()] = p\n\t}\n}", "func InfoDepth(depth int, args ...interface{}) {\n\tcurrentLogger.InfoDepth(context.Background(), defaultDepth+depth, args...)\n}", "func levelOrder(root *TreeNode) [][]int {\n\tvar (\n\t\tvalues *[]int\n\t)\n\tif root == nil {\n\t\treturn nil\n\t}\n\tret := make([][]int, 0)\n\tnext := []*TreeNode{root}\n\tfor {\n\t\tif len(next) != 0 {\n\t\t\tnext, values = children(next)\n\t\t\tret = append(ret, *values)\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn ret\n}", "func (a *LocalActivations) Depth() int {\n\treturn len(a.activations)\n}", "func (r *RelativePath) Depth() int {\n\treturn len(r.stack)\n}", "func (r *PerformanceResults) findAllChildren(parent string, depth int) error {\n\tif depth == 0 {\n\t\treturn nil\n\t}\n\n\tsearch := bson.M{bsonutil.GetDottedKeyName(\"info\", \"parent\"): parent}\n\tconf, session, err := cedar.GetSessionWithConfig(r.env)\n\tif err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\tdefer session.Close()\n\ttemp := []PerformanceResult{}\n\terr = session.DB(conf.DatabaseName).C(perfResultCollection).Find(search).All(&temp)\n\tif err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\n\tcatcher := grip.NewCatcher()\n\tfor _, result := range temp {\n\t\t// look into that parent\n\t\tcatcher.Add(r.findAllChildren(result.ID, depth-1))\n\t}\n\n\tr.Results = append(r.Results, temp...)\n\n\treturn errors.WithStack(catcher.Resolve())\n}", "func (n *Node) recurseSemantics() {\n\tn.semantics()\n\tif len(n.Children) == 0 {\n\t\treturn\n\t}\n\tfor _, child := range n.Children {\n\t\tchild.recurseSemantics()\n\t}\n}", "func (n *Node) AddChildren(children []*Node) {\n\tfor _, child := range children {\n\t\tn.AddChild(child)\n\t}\n}", "func (di *directoryInfo) walk() {\n\tif di.regexp == nil {\n\t\treturn\n\t}\n\n\tdi.mu.Lock()\n\tdefer di.mu.Unlock()\n\n\tdi.root.Walk(func(fi fileinfo.FileInfo) error {\n\t\tif fi.IsDir() {\n\t\t\treturn nil\n\t\t}\n\n\t\t// append\n\t\tif _, _, err := di.addPath(fi); err != nil {\n\t\t\tlog.Println(err)\n\t\t}\n\n\t\treturn nil\n\t})\n}", "func (e *Entity) AddChild(childs ...*Entity) { // Tribute to Childs from The Thing\n\tfor _, c := range childs {\n\t\t// TODO: Growth code could be optimized out of this loop\n\t\tcurrentCap := cap(e.Children)\n\t\tif len(e.Children) == currentCap {\n\t\t\tnewChildren := make([]*Entity, currentCap, currentCap*2)\n\t\t\tcopy(newChildren, e.Children)\n\t\t\te.Children = append(newChildren, c)\n\t\t} else {\n\t\t\te.Children = append(e.Children, c)\n\t\t}\n\t}\n}", "func (l *Logger) configure() {\n\tfor _, child := range l.children {\n\t\tif child.Threshold == Undefined {\n\t\t\tchild.Threshold = l.Threshold\n\t\t}\n\t\tchild.configure()\n\t}\n}", "func (tn *TreeNode) AddDeep(el Element, separator string) {\n\tparent := tn\n\tnames := strings.Split(el.Name, separator)\n\tfor _, name := range names {\n\t\ttrn := NewTreeNode(name, el.Value)\n\t\tparent = parent.Add(trn)\n\t}\n}", "func InfolnDepth(depth int, args ...interface{}) {\n\tcurrentLogger.InfolnDepth(context.Background(), defaultDepth+depth, args...)\n}", "func DepthMask(flag bool) {\n\tsyscall.Syscall(gpDepthMask, 1, boolToUintptr(flag), 0, 0)\n}", "func dfs(nodes []node, target string, currNodeIndex, depth int) int {\n\tcurrNode := nodes[currNodeIndex]\n\tif currNode.name == target {\n\t\treturn depth\n\t}\n\tfor _, child := range nodes[currNodeIndex].children {\n\t\tresult := dfs(nodes, target, getNodeIndex(nodes, child), depth+1)\n\t\tif result != -1 {\n\t\t\treturn result\n\t\t}\n\t}\n\treturn -1\n}", "func (n *Node) Level() int {\n\treturn n.Depth() + 1\n}", "func (c *Reader) addParents(filepath string) {\n\tdir, f := path.Split(filepath)\n\tif dir == \"\" {\n\t\tdir = \"/\"\n\t} else {\n\t\tdir = strings.TrimSuffix(dir, \"/\")\n\t}\n\tdebug(fmt.Sprintf(\"adding %q as a child of %q\", f, dir))\n\t// TODO(asjoyner): handle file + directory collisions\n\tif parent, ok := c.nodes[dir]; !ok {\n\t\t// if the parent node doesn't yet exist, initialize it\n\t\tc.nodes[dir] = Node{\n\t\t\tFilename: dir,\n\t\t\tChildren: map[string]bool{f: true},\n\t\t}\n\t} else {\n\t\tparent.Children[f] = true\n\t}\n\tif dir != \"/\" {\n\t\tc.addParents(dir)\n\t}\n}", "func minDepth(root *TreeNode) int {\n\tif root == nil {\n\t\treturn 0\n\t}\n\tres := 0\n\tqueue := []*TreeNode{root}\n\tfor len(queue) > 0 {\n\t\tsize := len(queue)\n\t\tres++\n\t\t// fmt.Println(queue[0].Val)\n\t\tfor i := 0; i < size; i++ {\n\t\t\t// node := queue[0]\n\t\t\t// queue = queue[1:]\n\t\t\tnode := queue[i]\n\t\t\tif node.Left == nil && node.Right == nil {\n\t\t\t\treturn res\n\t\t\t}\n\t\t\tif node.Left != nil {\n\t\t\t\tqueue = append(queue, node.Left)\n\t\t\t}\n\t\t\tif node.Right != nil {\n\t\t\t\tqueue = append(queue, node.Right)\n\t\t\t}\n\t\t}\n\t\t// ! 一次弹出,提高性能\n\t\tqueue = queue[size:]\n\t}\n\treturn res\n}", "func (a *GetFlattenedDocumentArgs) SetDepth(depth int) *GetFlattenedDocumentArgs {\n\ta.Depth = &depth\n\treturn a\n}", "func (t *Tree) Depth() int {\n\tvar d int\n\tt.BFS(func(n *Node) {\n\t\tdep := n.Depth()\n\t\tif dep > d {\n\t\t\td = dep\n\t\t}\n\t})\n\treturn d\n}", "func (node *Node) getChildTree(nodes *[]*Node) {\n\t*nodes = append(*nodes, node)\n\tfor _, child := range node.Children {\n\t\tchild.getChildTree(nodes)\n\t}\n}", "func (s *stencilOverdraw) storeNewDepthValues(ctx context.Context,\n\tcb CommandBuilder,\n\tgs *api.GlobalState,\n\tst *State,\n\ta arena.Arena,\n\tdevice VkDevice,\n\tqueue VkQueue,\n\tcmdBuffer VkCommandBuffer,\n\trenderInfo renderInfo,\n\talloc func(v ...interface{}) api.AllocResult,\n\taddCleanup func(func()),\n\tout transform.Writer,\n) error {\n\tif renderInfo.depthIdx == ^uint32(0) {\n\t\treturn nil\n\t}\n\trpInfo := st.RenderPasses().Get(renderInfo.renderPass)\n\tdaInfo := rpInfo.AttachmentDescriptions().Get(renderInfo.depthIdx)\n\n\tif daInfo.StoreOp() != VkAttachmentStoreOp_VK_ATTACHMENT_STORE_OP_STORE {\n\t\treturn nil\n\t}\n\n\tfbInfo := st.Framebuffers().Get(renderInfo.framebuffer)\n\n\toldImageView := fbInfo.ImageAttachments().Get(uint32(fbInfo.ImageAttachments().Len() - 1))\n\tnewImageView := fbInfo.ImageAttachments().Get(renderInfo.depthIdx)\n\n\toldImageDesc := imageDesc{\n\t\toldImageView.Image(),\n\t\toldImageView.SubresourceRange(),\n\t\trpInfo.AttachmentDescriptions().Get(uint32(fbInfo.ImageAttachments().Len() - 1)).FinalLayout(),\n\t\tVkImageAspectFlagBits_VK_IMAGE_ASPECT_DEPTH_BIT,\n\t}\n\tnewImageDesc := imageDesc{\n\t\tnewImageView.Image(),\n\t\tnewImageView.SubresourceRange(),\n\t\tdaInfo.FinalLayout(),\n\t\tVkImageAspectFlagBits_VK_IMAGE_ASPECT_DEPTH_BIT,\n\t}\n\treturn s.transferDepthValues(ctx, cb, gs, st, a,\n\t\tdevice, queue, cmdBuffer,\n\t\tfbInfo.Width(), fbInfo.Height(),\n\t\toldImageDesc, newImageDesc,\n\t\talloc, addCleanup, out)\n}", "func (self *TNode) addnode(aParent *TNode, aNodes []*TNode, i int, aIsHook bool) *TNode {\n\tif len(aParent.Children) == 0 {\n\t\taParent.Children = make([]*TNode, 0)\n\t}\n\n\t// 如果:找到[已经注册]的分支节点则从该节继续[查找/添加]下一个节点\n\tfor _, n := range aParent.Children {\n\t\tif n.Equal(aNodes[i]) {\n\t\t\t// 如果:插入的节点层级已经到末尾,则为该节点注册路由\n\t\t\tif i == len(aNodes)-1 {\n\t\t\t\t// 原始路由会被替换\n\t\t\t\tif aIsHook {\n\t\t\t\t\tn.Route.CombineController(aNodes[i].Route)\n\t\t\t\t} else {\n\t\t\t\t\tn.Route = aNodes[i].Route\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn n\n\t\t}\n\t}\n\n\t// 如果:该节点没有对应分支则插入同级的aNodes为新的分支\n\taParent.Children = append(aParent.Children, aNodes[i])\n\tsort.Sort(aParent.Children)\n\treturn aNodes[i]\n}", "func (repo *Repo) depthReached(i int) bool {\n\tif repo.Manager.Opts.Depth != 0 && repo.Manager.Opts.Depth == i {\n\t\tlog.Warnf(\"Exceeded depth limit (%d)\", i)\n\t\treturn true\n\t}\n\treturn false\n}", "func (ns *Namespace) populateDescendants(d map[string]bool) {\n\tfor _, c := range ns.ChildNames() {\n\t\tif d[c] {\n\t\t\tcontinue\n\t\t}\n\t\td[c] = true\n\t\tcns := ns.forest.Get(c)\n\t\tcns.populateDescendants(d)\n\t}\n}" ]
[ "0.5680863", "0.5596721", "0.550769", "0.5493143", "0.54508895", "0.5405676", "0.5312346", "0.53112936", "0.5299934", "0.5266087", "0.52549666", "0.5252434", "0.52466196", "0.5237852", "0.52225965", "0.52033013", "0.5199025", "0.518541", "0.51852065", "0.5169843", "0.5168419", "0.5163856", "0.51494217", "0.51039326", "0.51005954", "0.50584173", "0.5041225", "0.5040312", "0.50386614", "0.5031396", "0.5030796", "0.5028837", "0.50190663", "0.5013093", "0.49988604", "0.49978426", "0.49942952", "0.49929887", "0.4987857", "0.4975603", "0.4955639", "0.49498904", "0.4949375", "0.49471816", "0.49469328", "0.49235576", "0.49160856", "0.49149206", "0.4912511", "0.49100614", "0.4900277", "0.49000594", "0.48895973", "0.4879162", "0.48709485", "0.486028", "0.48524883", "0.48327902", "0.4830912", "0.48264667", "0.48261154", "0.48244718", "0.48147795", "0.48128736", "0.48065", "0.47921842", "0.47881407", "0.47849837", "0.47835383", "0.47833183", "0.47768116", "0.4776376", "0.47731513", "0.47679487", "0.47673225", "0.47526634", "0.47491175", "0.47450963", "0.4742898", "0.4741928", "0.47344822", "0.47221324", "0.47218046", "0.4719113", "0.47143364", "0.47117013", "0.470806", "0.4703033", "0.4702672", "0.47007868", "0.47001234", "0.46980897", "0.4696482", "0.46923918", "0.46815917", "0.46771514", "0.46749482", "0.46734607", "0.4673435", "0.46697024" ]
0.640293
0
New returns a new instance of an echo HTTP server
func New() Server { return &echoServer{ Instance: echo.New(), } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func New(address string, branch string, secret string, logger *logrus.Logger) http.Handler {\n\tproto := \"tcp\"\n\taddr := address\n\tif strings.HasPrefix(addr, \"unix:\") {\n\t\tproto = \"unix\"\n\t\taddr = addr[5:]\n\t}\n\treturn &Server{\n\t\tproto: proto,\n\t\taddress: addr,\n\t\tbranch: branch,\n\t\tsecret: secret,\n\t\tlogger: logger,\n\t}\n}", "func New(host, port string, h http.Handler) *WebServer {\n\tvar ws WebServer\n\n\tws.Addr = net.JoinHostPort(host, port)\n\tws.Handler = h\n\n\treturn &ws\n}", "func New(app application.App) *http.Server {\n\te := echo.New()\n\tlog := logger.New()\n\n\tb := binder.New()\n\te.Binder = b\n\n\te.Use(metrics.Middleware(app.Metrics))\n\te.Use(logger.Middleware())\n\te.Use(recovery.Middleware())\n\n\tsentryecho.RegisterErrorHandlerWithOptions(e, sentryecho.Options{\n\t\tReporter: &app.Sentry,\n\t\tEnableCustomErrorMessages: true,\n\t})\n\n\thealth.RegisterRoutes(e)\n\tclusters.RegisterRoutes(e, app)\n\n\tsrv := &http.Server{\n\t\tAddr: fmt.Sprintf(\":%d\", app.Config.Port),\n\t\tHandler: e,\n\t\tReadTimeout: 65 * time.Second,\n\t\tWriteTimeout: 65 * time.Second,\n\t}\n\n\tgraceful := signals.Setup()\n\n\tgo func() {\n\t\t<-graceful\n\t\terr := srv.Shutdown(context.Background())\n\t\tif err != nil {\n\t\t\tlog.Err(err).Error(\"server shutdown\")\n\t\t}\n\t}()\n\n\treturn srv\n}", "func New() HelloServer {\n\thttp.DefaultServeMux = new(http.ServeMux)\n\treturn HelloServer{\n\t\t&http.Server{\n\t\t\tAddr: \":7100\",\n\t\t},\n\t}\n}", "func New() *Server {\n\tsv := &Server{\n\t\tE: echo.New(),\n\t\tH: handlers.New(),\n\t}\n\tsv.routes()\n\treturn sv\n}", "func New(c *controller.Controller) *Server {\n\ts := &Server{\n\t\te: echo.New(),\n\t\tc: c,\n\t}\n\n\t// Middleware\n\ts.e.Use(middleware.Logger())\n\ts.e.Use(middleware.Recover())\n\n\ts.populateRoutes()\n\n\treturn s\n}", "func New() *echo.Echo {\n\te := echo.New()\n\n\te.Use(middleware.Recover())\n\te.Use(middleware.CORS())\n\te.Use(middleware.LoggerWithConfig(middleware.LoggerConfig{Format: loggerFormat()}))\n\n\tGetRoutes(e)\n\n\te.HideBanner = true\n\te.Logger.Fatal(e.Start(\":\" + \"9090\"))\n\treturn e\n}", "func New(config Configuration, storage storage.Storage, groups map[string]groups.Group) *HTTPServer {\n\treturn &HTTPServer{\n\t\tConfig: config,\n\t\tStorage: storage,\n\t\tGroups: groups,\n\t}\n}", "func New(server *http.Server) (*Server, error) {\n\tlistener, err := zerodown.Listen(\"tcp\", server.Addr)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &Server{\n\t\tserver: server,\n\t\tlistener: listener,\n\t}, nil\n}", "func New(addr string, host app.HostService, collector *metrics.Collector) app.Server {\n\treturn &server{\n\t\tsrv: telnet.Server{Addr: addr, Handler: nil},\n\t\thost: host,\n\t\tcollector: collector,\n\t}\n}", "func New(h *handler.Handler, c *config.Config) {\n\ttokenAuth = jwtauth.New(\"HS256\", []byte(c.Token), nil)\n\tr := chi.NewRouter()\n\ts := &server{\n\t\thand: h,\n\t\trouter: r,\n\t\taddress: c.Address,\n\t}\n\ts.makeHandlers()\n\ts.startServer()\n}", "func NewHTTP(config *Config, logger kitlog.Logger) *HTTP {\n\tlogger = kitlog.With(logger, \"module\", \"http\")\n\n\tsrv := &http.Server{\n\t\tAddr: config.Addr,\n\t\tReadTimeout: time.Duration(config.ServerTimeout) * time.Second,\n\t\tWriteTimeout: time.Duration(2*config.ServerTimeout) * time.Second,\n\t}\n\n\treturn &HTTP{\n\t\tlogger: logger,\n\t\tsrv: srv,\n\t\tConfig: config,\n\t}\n}", "func New(host, port string, handlers handler.Param) *Server {\n\tfor path, hndl := range handlers {\n\t\thttp.HandleFunc(path, hndl)\n\t}\n\n\ttr := &http.Transport{\n\t\tTLSClientConfig: &tls.Config{InsecureSkipVerify: true},\n\t}\n\treturn &Server{\n\t\tHost: host,\n\t\tPort: port,\n\t\tHandlers: handlers,\n\t\tListener: nil,\n\t\tClient: &http.Client{Transport: tr},\n\t\tSecureClient: nil,\n\t\tCertificate: &certificate.CACertificate{},\n\t}\n}", "func New(au auth.Service) *Server {\n\ts := &Server{\n\t\tAuth: au,\n\t}\n\n\te := echo.New()\n\te.Use(middleware.Logger())\n\te.Use(middleware.Recover())\n\tapiG := e.Group(\"/api\")\n\th := authHandler{s: au}\n\th.addGroup(apiG)\n\n\ts.Host = e\n\treturn s\n}", "func NewHTTP(port uint16, pachClientFactory func(ctx context.Context) *client.APIClient) *HTTP {\n\tmux := http.NewServeMux()\n\thandler := &Server{\n\t\tpachClientFactory: pachClientFactory,\n\t}\n\tmux.Handle(\"/archive/\", CSRFWrapper(handler))\n\tmux.Handle(\"/healthz\", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tw.WriteHeader(http.StatusOK)\n\t\tw.Write([]byte(\"healthy\\n\")) //nolint:errcheck\n\t}))\n\treturn &HTTP{\n\t\tmux: mux,\n\t\tserver: &http.Server{\n\t\t\tAddr: fmt.Sprintf(\":%d\", port),\n\t\t\tHandler: mux,\n\t\t},\n\t}\n}", "func New(addr string) *Server {\n\treturn &Server{\n\t\tServer: &http.Server{\n\t\t\tAddr: addr,\n\t\t},\n\t\tlogger: defaultLogger,\n\t}\n}", "func New() *Server {\n\treturn &Server{srv: &http.Server{\n\t\tReadTimeout: 5 * time.Second,\n\t\tWriteTimeout: 5 * time.Second,\n\t}}\n}", "func New(o *Options, logger *zap.Logger) *Server {\n\thttpClient := &http.Client{\n\t\tTimeout: o.HTTPClientTimeout,\n\t}\n\n\treturn &Server{\n\t\toptions: o,\n\t\thttpClient: httpClient,\n\t\trouter: mux.NewRouter(),\n\t\tlogger: logger,\n\t}\n}", "func New(opt *Options) (server *Server, err error) {\n\tserver = &Server{\n\t\toptions: opt,\n\t\tServer: http.Server{\n\t\t\tAddr: fmt.Sprintf(\"0.0.0.0:%v\", opt.Config.Port),\n\t\t\tErrorLog: slog.New(log.New().Writer(), \"server\", 0),\n\t\t},\n\t}\n\n\tserver.Handler, err = server.createHandler()\n\treturn\n}", "func New(info Info) *WebServer {\n\trouter := bone.New()\n\t// Add more to this later on\n\treturn &WebServer{info.Listen + \":\" + info.Port, router}\n}", "func New() *Server {\n\tws := &Server{}\n\tws.controls = make([]control.Control, 0)\n\tws.middleware = make([]middle.Handler, 0)\n\tws.staticFiles = make(map[string]string)\n\tws.router = mux.NewRouter()\n\n\thttp.Handle(\"/\", ws.router)\n\treturn ws\n}", "func New(address string) *Server {\n\n\tserver := &Server{\n\t\taddress: address,\n\t}\n\n\thttp.HandleFunc(\"/upload\", uploadFileHandler())\n\n\tfs := http.FileServer(http.Dir(uploadPath))\n\thttp.Handle(\"/files/\", http.StripPrefix(\"/files\", fs))\n\n\tlog.Println(\"Starting server at port :8080\")\n\n\tlog.Fatal(http.ListenAndServe(\":\"+server.address, nil))\n\n\treturn server\n}", "func New(config conf.Config) *echo.Echo {\n\tsrv := echo.New()\n\tsrv.HideBanner = true\n\tsrv.HidePort = true\n\tsrv.Use(contextMiddleware(config))\n\tsrv.Use(loggerMiddleware(config))\n\tsrv.Use(cleanupMiddleware())\n\tsrv.Use(errorMiddleware())\n\t// Serve static content from the static folder\n\tsrv.Static(\"/\", \"static\")\n\tsrv.GET(pingEndpoint(config), pingHandler)\n\tsrv.POST(mergeEndpoint(config), mergeHandler)\n\tif config.DisableGoogleChrome() && config.DisableUnoconv() {\n\t\treturn srv\n\t}\n\tif !config.DisableGoogleChrome() {\n\t\tsrv.POST(htmlEndpoint(config), htmlHandler)\n\t\tsrv.POST(urlEndpoint(config), urlHandler)\n\t\tsrv.POST(markdownEndpoint(config), markdownHandler)\n\t}\n\tif !config.DisableUnoconv() {\n\t\tsrv.POST(officeEndpoint(config), officeHandler)\n\t}\n\tif !config.DisableInkscape() {\n\t\tsrv.POST(inkscapeEndpoint(config), inkscapeHandler)\n\t}\n\treturn srv\n}", "func New(trapHost string) *Server {\n\tmux := gmux.NewRouter()\n\tvar websockets []SocketClient\n\treturn &Server{\n\t\tTrapHost: trapHost,\n\t\tWebsockets: websockets,\n\t\tmux: mux,\n\t}\n}", "func NewServer() *Server {}", "func New(\n\taddr string,\n\thandler Handler,\n\tlog *log.Logger,\n\tworkersCount uint8,\n) (srv *Server) {\n\tsrv = &Server{\n\t\taddr: addr,\n\t\thandler: handler,\n\t\tlog: log,\n\t\tClients: newClients(),\n\t\tchStop: make(chan bool, 1),\n\t\tchRequest: make(chan *tRequest, workersCount),\n\t}\n\n\treturn\n}", "func New() *echo.Echo {\n\te := echo.New()\n\n\t// Middlewares\n\te.Use(middleware.Logger())\n\te.Use(middleware.CORS())\n\te.Use(middleware.RequestID())\n\te.Use(middleware.Recover())\n\t// Validator\n\te.Validator = validator.New()\n\t// Error handling\n\te.HTTPErrorHandler = apperror.HTTPErrorHandler\n\n\treturn e\n}", "func New() (IServer, error) {\n\turlString, ok := os.LookupEnv(\"TESTTRACK_CLI_URL\")\n\tif !ok {\n\t\treturn nil, errors.New(\"TESTTRACK_CLI_URL must be set\")\n\t}\n\n\turl, err := url.ParseRequestURI(urlString)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Server{url: url}, nil\n}", "func New(options Options) Server {\n\treturn &server{\n\t\tbindAddress: options.BindAddress,\n\t\tmux: http.NewServeMux(),\n\t}\n}", "func New(config *Config) *Server {\n\ts := &Server{\n\t\tconfig: config,\n\t\trouter: chi.NewRouter(),\n\t\tlogger: newLogger(config.LogDebug),\n\t}\n\n\treturn s\n}", "func NewHTTP(host string, port int) Static {\n\treturn Static{\n\t\tprotocol: ProtocolHTTP,\n\t\thost: host,\n\t\tport: port,\n\t}\n}", "func newHTTPServer(appConfig config.AppConfig, logger services.Logger) services.HTTPServer {\n\treturn services.NewDefaultHTTPServer(appConfig.Port, logger)\n}", "func New(router simplehttp.Router, logger *log.Logger) *Server {\n\treturn FromHTTPServer(&http.Server{}, router, logger)\n}", "func New(c Config) *http.Server {\n\n\thandler := &RateLimitHandler{\n\t\trecords: map[string]*Record{},\n\t\tlimit: c.Limit,\n\t\twindow: c.Window,\n\t}\n\n\ts := &http.Server{\n\t\tAddr: fmt.Sprintf(\"0.0.0.0:%d\", c.Port),\n\t\tHandler: handler,\n\t}\n\treturn s\n}", "func New(options *Options) (*HTTPServer, error) {\n\tvar h HTTPServer\n\tEnableUpload = options.EnableUpload\n\tEnableVerbose = options.Verbose\n\tfolder, err := filepath.Abs(options.Folder)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif _, err := os.Stat(folder); os.IsNotExist(err) {\n\t\treturn nil, errors.New(\"path does not exist\")\n\t}\n\toptions.Folder = folder\n\tvar dir http.FileSystem\n\tdir = http.Dir(options.Folder)\n\tif options.Sandbox {\n\t\tdir = SandboxFileSystem{fs: http.Dir(options.Folder), RootFolder: options.Folder}\n\t}\n\th.layers = h.loglayer(http.FileServer(dir))\n\tif options.BasicAuthUsername != \"\" || options.BasicAuthPassword != \"\" {\n\t\th.layers = h.loglayer(h.basicauthlayer(http.FileServer(dir)))\n\t}\n\th.options = options\n\n\treturn &h, nil\n}", "func New(host string) *service {\n\treturn &service{\n\t\thost: host,\n\t\tmux: http.NewServeMux(),\n\t}\n}", "func New(addr string) (*Server, error) {\n\ts := &Server{\n\t\taddr: addr,\n\t\tshutdownTimeout: time.Minute,\n\t\tSessions: make(chan *Session),\n\t}\n\ts.hs = &http.Server{Handler: s}\n\tln, err := net.Listen(\"tcp\", s.addr)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ts.ln = ln\n\ts.ListeningAddr = fmt.Sprintf(\":%d\", s.ln.Addr().(*net.TCPAddr).Port)\n\treturn s, nil\n}", "func (s *Server) New() (*http.Server, error) {\n\taddr := s.Address\n\tif addr == \"\" {\n\t\taddr = defaultAddr\n\t}\n\th, err := s.Handler()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsrv := &http.Server{\n\t\tHandler: h,\n\t\tAddr: addr,\n\t}\n\treturn srv, nil\n}", "func New() Server {\n\trouter := chi.NewRouter()\n\n\trouter.Use(middleware.Timeout(3 * time.Second))\n\trouter.Use(middleware.Logger)\n\trouter.Use(middleware.Recoverer)\n\trouter.Use(middleware.RealIP)\n\trouter.Use(middleware.RequestID)\n\trouter.Use(middleware.Throttle(1000))\n\trouter.Use(middleware.NoCache)\n\trouter.Use(middleware.SetHeader(\"Content-Type\", \"application/json\"))\n\n\treturn &server{Router: router}\n}", "func New() *Server {\n\ts := &Server{}\n\ts.e = echo.New()\n\ts.e.Use(middleware.Logger())\n\ts.e.Use(middleware.Recover())\n\ts.e.Use(middleware.CORS())\n\ts.initConfig()\n\ts.dbConnection()\n\ts.migrate()\n\ts.registerController()\n\treturn s\n}", "func newHTTPServer(address string, tlsConfig *tls.Config, handler http.Handler) *http.Server {\n\tserver := &http.Server{\n\t\tAddr: address,\n\t\tTLSConfig: tlsConfig,\n\t\tHandler: handler,\n\t}\n\treturn server\n}", "func New(store *vehicle.Store, mngr *extlookup.Manager, cnf config.Config) *WebServer {\n\treturn &WebServer{time.Now(), store, mngr, cnf}\n}", "func New(c *Config) *Server {\n\ts := &Server{\n\t\techo: echo.New(),\n\t\ttasksConfig: c.Tasks,\n\t}\n\n\te := s.echo\n\te.Logger.SetLevel(glog.INFO)\n\t//e.Use(middleware.Logger())\n\n\tssoConf := &sso.OAuth2Config{\n\t\tJWTAuthConfig: sso.JWTAuthConfig{\n\t\t\tPrivateKey: c.PrivateKey,\n\t\t},\n\t\tOAuth2: &oauth2.Config{\n\t\t\tClientID: c.OAuthClientID,\n\t\t\tClientSecret: c.OAuthClientSecret,\n\t\t\tScopes: []string{\"user:email\"},\n\t\t\tEndpoint: sso.GithubEnterpriseEndpoint(githubEnterpriseDomain),\n\t\t},\n\t\tEmailLookupFunc: sso.GithubEnterpriseEmailLookup(githubEnterpriseDomain),\n\t\tNoAuthn: true,\n\t}\n\n\te.Use(sso.OAuth2FromConfig(ssoConf))\n\n\tapi := e.Group(\"/api\")\n\tapi.Use(sso.JWTAuthFromConfig(&ssoConf.JWTAuthConfig))\n\ttasks := api.Group(\"/tasks\")\n\ttasks.GET(\"\", s.getTasks)\n\ttasks.POST(\"/:name/run\", s.runTask)\n\n\tnames := s.tasksConfig.TaskNames()\n\tfor _, n := range names {\n\t\te.GET(fmt.Sprintf(\"/%s\", n), serveStatic(\"build/index.html\"))\n\t}\n\n\te.GET(\"/*\", serveStatic(\"build\"))\n\n\treturn s\n}", "func New(s *service.Service) (engine *bm.Engine, err error) {\n\tvar (\n\t\tcfg struct {\n\t\t\tbm.ServerConfig\n\t\t\tCrossDomains []string\n\t\t}\n\t\tct paladin.TOML\n\t)\n\tif err = paladin.Get(\"http.toml\").Unmarshal(&ct); err != nil {\n\t\treturn\n\t}\n\tif err = ct.Get(\"Server\").UnmarshalTOML(&cfg); err != nil {\n\t\treturn\n\t}\n\tengine = bm.DefaultServer(&cfg.ServerConfig)\n\tengine.Use(s.As.CORS(cfg.CrossDomains))\n\tengine.Use(gzip.Gzip(gzip.DefaultCompression))\n\tinitRouter(engine, s)\n\terr = engine.Start()\n\treturn\n}", "func New(config Configuration, servicesConfig services.Configuration, groupsChannel chan []groups.Group, contentChannel chan content.RuleContentDirectory) *HTTPServer {\n\treturn &HTTPServer{\n\t\tConfig: config,\n\t\tServicesConfig: servicesConfig,\n\t\tGroupsChannel: groupsChannel,\n\t\tContentChannel: contentChannel,\n\t}\n}", "func New(view *rom.View) *Server {\n\trouter := vestigo.NewRouter()\n\n\ts := &Server{\n\t\trom: view,\n\t\thttpServer: &http.Server{\n\t\t\tAddr: \"127.0.0.1:8064\",\n\t\t\tReadTimeout: 5 * time.Second,\n\t\t\tWriteTimeout: 10 * time.Second,\n\t\t\tIdleTimeout: 15 * time.Second,\n\t\t\tHandler: router,\n\t\t},\n\t\tstatic: packr.NewBox(\"../front/dist\"),\n\t\trouter: router,\n\t}\n\n\ts.setupRoutes()\n\n\treturn s\n}", "func New(logger *zap.Logger) *HTTP {\n\th := &HTTP{\n\t\tlogger: logger,\n\t\tcreateClientFunc: createHTTPClient,\n\t}\n\n\treturn h\n}", "func NewHTTP(cfg config.Config) *HTTP {\n\tclient := &http.Client{\n\t\tTimeout: cfg.Timeout,\n\t}\n\treturn &HTTP{\n\t\tclient: client,\n\t\tconfig: cfg,\n\t}\n}", "func New(ctx context.Context, cfg *config.Configuration) (*http.Server, error) {\n\tvar err error\n\n\tonce.Do(func() {\n\t\t// Initialize application\n\t\tapp = &application{\n\t\t\tcfg: cfg,\n\t\t}\n\n\t\t// Initialize core context\n\t\tapp.server, err = setup(ctx, cfg)\n\t})\n\n\t// Return server\n\treturn app.server, err\n}", "func New(middleware ...Handler) *Server {\n\tdebugPrintWARNINGNew()\n\tserv := &Server{\n\t\trouter: make(tree.Trees, 0, 9),\n\t\tnotFound: []Handler{default404Handler},\n\t\tnoMethod: []Handler{default405Handler},\n\t\tmiddleware: middleware,\n\t\tRedirectTrailingSlash: true,\n\t\tRedirectFixedPath: false,\n\t\tMaxMultipartMemory: defaultMultipartMemory,\n\t}\n\n\tserv.pool.New = func() interface{} {\n\t\treturn serv.allocateContext()\n\t}\n\treturn serv\n}", "func New(address string) *Server {\n\treturn &Server{\n\t\taddress: address,\n\t\thandlerGet: NewGetHandler(&get.Getter{}),\n\t\thandlerList: NewListHandler(&list.Lister{}),\n\t\thandlerNotFound: notFoundHandler,\n\t\thandlerRegister: NewRegisterHandler(&register.Registerer{}),\n\t}\n}", "func New(e *goastarter.Endpoints, uh goagrpc.UnaryHandler) *Server {\n\treturn &Server{\n\t\tAddH: NewAddHandler(e.Add, uh),\n\t}\n}", "func New(address string) *Server {\n connection, err := net.Dial(\"tcp\", address)\n if err != nil {\n return nil;\n }\n reader := bufio.NewReader(connection)\n\n\treturn &Server{address: address, connection: connection, reader: reader}\n}", "func New(sigs chan os.Signal) *Server {\n\ts := &Server{mux: http.NewServeMux(), sigs: sigs}\n\n\tif s.logger == nil {\n\t\ts.logger = log.New(os.Stdout, \"\", 0)\n\t}\n\n\ts.db = store.NewStore()\n\n\ts.mux.HandleFunc(\"/\", s.latencyMiddleware(s.index))\n\ts.mux.HandleFunc(\"/hash/\", s.latencyMiddleware(s.hash))\n\ts.mux.HandleFunc(\"/shutdown/\", s.latencyMiddleware(s.shutdown))\n\ts.mux.HandleFunc(\"/stats/\", s.stats)\n\n\treturn s\n}", "func New(opts ...Option) *Server {\n\tsrv := &Server{\n\t\tserver: goproxy.NewProxyHttpServer(),\n\t\tdialer: &net.Dialer{\n\t\t\tTimeout: 30 * time.Second,\n\t\t\tKeepAlive: 30 * time.Second,\n\t\t},\n\t}\n\tsrv.server.Tr = &http.Transport{\n\t\tDialContext: srv.dialContext,\n\t\tTLSHandshakeTimeout: 10 * time.Second,\n\t\tResponseHeaderTimeout: 10 * time.Second,\n\t\tExpectContinueTimeout: 1 * time.Second,\n\t}\n\tsrv.server.ConnectDial = srv.dial\n\tfor _, opt := range opts {\n\t\topt(srv)\n\t}\n\t// srv.server.OnRequest().HandleConnectFunc(srv.onConnect)\n\tsrv.server.OnRequest().DoFunc(srv.onRequest)\n\tsrv.server.OnResponse().DoFunc(srv.onResponse)\n\treturn srv\n}", "func New(cfg *Config) *Server {\n\tdefaultConfig(cfg)\n\tlog.Printf(\"%+v\\n\", cfg)\n\treturn &Server{\n\t\tcfg: cfg,\n\t\thandlers: make([]connectionHandler, cfg.Count),\n\t\tevents: make(chan eventWithData, cfg.Count),\n\t}\n}", "func New(opts ...Option) (*Server, error) {\n\tconfig, err := buildConfig(opts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlogger := config.logger\n\tif logger == nil {\n\t\tvar err error\n\n\t\tlogger, err = log.New(log.WithName(config.name))\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tserver := &http.Server{\n\t\tAddr: config.address,\n\t\tHandler: config.handler,\n\t}\n\n\treturn &Server{\n\t\tname: config.name,\n\t\tlogger: logger,\n\t\tserver: server,\n\t}, nil\n}", "func New(appStateUpdater env.AppStateUpdater, config libkbfs.Config) (\n\ts *Server, err error) {\n\tlogger := config.MakeLogger(\"HTTP\")\n\ts = &Server{\n\t\tappStateUpdater: appStateUpdater,\n\t\tconfig: config,\n\t\tlogger: logger,\n\t\tvlog: config.MakeVLogger(logger),\n\t}\n\tif s.fs, err = lru.New(fsCacheSize); err != nil {\n\t\treturn nil, err\n\t}\n\tif err = s.restart(); err != nil {\n\t\treturn nil, err\n\t}\n\tctx, cancel := context.WithCancel(context.Background())\n\tgo s.monitorAppState(ctx)\n\ts.cancel = cancel\n\tlibmime.Patch(additionalMimeTypes)\n\treturn s, nil\n}", "func New(swaggerStore string, hugoStore string, runMode string, externalIP string, hugoDir string) (*Server, error) {\n\t// Return a new struct\n\treturn &Server{\n\t\tServiceMap: make(map[string]string),\n\t\tSwaggerStore: swaggerStore,\n\t\tHugoStore: hugoStore,\n\t\tRunMode: runMode,\n\t\tExternalIP: externalIP,\n\t\tHugoDir: hugoDir,\n\t}, nil\n}", "func New(opt *common.Options) error {\n\tif opt.Address != \"\" {\n\t\tif opt.Daemon {\n\t\t\treturn daemon.New(opt)\n\t\t}\n\n\t\tserver.Run(opt)\n\t} else if opt.Check {\n\t\tif isConnected() {\n\t\t\tchecker.Do(opt)\n\n\t\t\tif opt.Output != \"\" {\n\t\t\t\tdefer opt.Result.Close()\n\t\t\t}\n\t\t} else {\n\t\t\treturn errors.New(\"no internet connection\")\n\t\t}\n\t} else {\n\t\treturn errors.New(\"no action needed\")\n\t}\n\n\treturn nil\n}", "func New(c *Config, logger *zap.Logger) *Server {\n\treturn &Server{\n\t\tlogger,\n\t}\n}", "func New(configs *Configs, router *mux.Router) Server {\n\tserver := &ServerImpl{\n\t\tConfigs: configs,\n\t\tRouter: router,\n\t\tHTTPServer: newHTTPServer(configs, router),\n\t\tpingEndpoint: configs.PingEndpoint,\n\t\thealthcheckEndpoint: configs.HealthcheckEndpoint,\n\t\tshutdownEndpoint: configs.ShutdownEndpoint,\n\t}\n\tif server.pingEndpoint == \"\" {\n\t\tserver.pingEndpoint = DefaultPingEndpoint\n\t}\n\tif server.healthcheckEndpoint == \"\" {\n\t\tserver.healthcheckEndpoint = DefaultHealthcheckEndpoint\n\t}\n\tif server.shutdownEndpoint == \"\" {\n\t\tserver.shutdownEndpoint = DefaultShutdownEndpoint\n\t}\n\n\trouter.Path(server.pingEndpoint).Name(server.pingEndpoint).Methods(\"GET\").HandlerFunc(server.handleFuncPing)\n\trouter.Path(server.healthcheckEndpoint).Name(server.healthcheckEndpoint).Methods(\"GET\").HandlerFunc(server.handleFuncHealthcheck)\n\trouter.Path(server.shutdownEndpoint).Name(server.shutdownEndpoint).Methods(\"GET\").HandlerFunc(server.handleFuncShutdown)\n\n\treturn server\n}", "func New(config *configuration.Config, vs *library.Library, auth *auth.Manager) *Server {\n\treturn &Server{\n\t\tBase: subapp.NewBase(AppName),\n\t\tconfig: config,\n\t\tlibrary: vs,\n\t\tauthManager: auth,\n\t\trender: render.New(),\n\t}\n}", "func New(storage Storage) Server {\n\ts := &server{\n\t\tstorage: storage,\n\t\tr: chi.NewMux(),\n\t}\n\ts.routes()\n\treturn s\n}", "func New(url string) (*Handler, error) {\n\tc, err := golf.NewClient()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = c.Dial(url)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tl, err := c.NewLogger()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Handler{\n\t\tlogger: l,\n\t\tclient: c,\n\t}, nil\n}", "func New(url string) (*Handler, error) {\n\tc, err := golf.NewClient()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = c.Dial(url)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tl, err := c.NewLogger()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Handler{\n\t\tlogger: l,\n\t\tclient: c,\n\t}, nil\n}", "func New(obs *observance.Obs, CORSOrigins string, timeout ...string) (*echo.Echo, chan struct{}, error) {\n\ttimeoutDuration := defaultTimeout\n\tif len(timeout) > 0 {\n\t\tparsedTimeout, err := time.ParseDuration(timeout[0])\n\t\tif err != nil {\n\t\t\treturn nil, nil, fmt.Errorf(\"timeout could not be parsed: %w\", err)\n\t\t}\n\t\ttimeoutDuration = parsedTimeout\n\t}\n\n\techoServer := echo.New()\n\n\t// Configure Echo.\n\techoServer.HideBanner = true\n\techoServer.HidePort = true\n\n\techoServer.Server.ReadTimeout = timeoutDuration\n\techoServer.Server.WriteTimeout = timeoutDuration\n\techoServer.Server.ReadHeaderTimeout = timeoutDuration\n\tdefaultIdleTimeout := 120 * time.Second\n\tif defaultIdleTimeout > timeoutDuration {\n\t\techoServer.Server.IdleTimeout = defaultIdleTimeout\n\t}\n\t// By default, the value of ReadTimeout is used.\n\t// See https://pkg.go.dev/net/http#Server\n\n\techoServer.HTTPErrorHandler = HTTPErrorHandler(obs)\n\techoServer.Binder = &bindValidator{}\n\techoServer.Validator = NewValidator()\n\techoServer.Logger = Logger{obs.Logger}\n\techoServer.DisableHTTP2 = true\n\n\techoServer.Pre(middleware.RemoveTrailingSlash())\n\techoServer.Use(middleware.Secure())\n\techoServer.Use(middleware.Recover())\n\n\tif CORSOrigins != \"\" {\n\t\torigins := strings.Split(CORSOrigins, \",\")\n\t\techoServer.Use(middleware.CORSWithConfig(middleware.CORSConfig{\n\t\t\tAllowOrigins: origins,\n\t\t}))\n\t}\n\n\t// Set up graceful shutdown.\n\tconnsClosed := make(chan struct{})\n\tsc := make(chan os.Signal, 1)\n\tgo func() {\n\t\ts := <-sc\n\t\tobs.Logger.WithField(\"signal\", s).Warn(\"shutting down gracefully\")\n\n\t\tc, cancel := context.WithTimeout(context.Background(), 9*time.Second)\n\t\tdefer cancel()\n\n\t\terr := echoServer.Shutdown(c)\n\t\tif err != nil {\n\t\t\tobs.Logger.Error(err)\n\t\t}\n\t\tclose(connsClosed)\n\t}()\n\tsignal.Notify(sc, syscall.SIGINT, syscall.SIGTERM)\n\n\treturn echoServer, connsClosed, nil\n}", "func New(h http.Handler, opts *Options) *Server {\n\tsrv := &Server{handler: h}\n\tif opts != nil {\n\t\tsrv.reqlog = opts.RequestLogger\n\t\tsrv.te = opts.TraceExporter\n\t\tfor _, c := range opts.HealthChecks {\n\t\t\tsrv.healthHandler.Add(c)\n\t\t}\n\t\tsrv.sampler = opts.DefaultSamplingPolicy\n\t\tsrv.driver = opts.Driver\n\t}\n\treturn srv\n}", "func New(s *service.Service) (engine *bm.Engine) {\n\tvar (\n\t\thc struct {\n\t\t\tServer *bm.ServerConfig\n\t\t}\n\t)\n\tif err := paladin.Get(\"http.toml\").UnmarshalTOML(&hc); err != nil {\n\t\tif err != paladin.ErrNotExist {\n\t\t\tpanic(err)\n\t\t}\n\t}\n\tsvc = s\n\tengine = bm.DefaultServer(hc.Server)\n\tinitRouter(engine)\n\tif err := engine.Start(); err != nil {\n\t\tpanic(err)\n\t}\n\treturn\n}", "func New(body string, statusCode int) *FakeHTTPServer {\n\treturn &FakeHTTPServer{\n\t\tserver: httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tw.Write([]byte(body))\n\t\t\tw.WriteHeader(statusCode)\n\t\t})),\n\t}\n}", "func New(t *testing.T, requests []ExpectedRequest) *httptest.Server {\n\th := mockHandler(t, requests)\n\treturn httptest.NewServer(h)\n}", "func New(t *testing.T, requests []ExpectedRequest) *httptest.Server {\n\th := mockHandler(t, requests)\n\treturn httptest.NewServer(h)\n}", "func New(e *todo.Endpoints, uh goagrpc.UnaryHandler) *Server {\n\treturn &Server{\n\t\tGetH: NewGetHandler(e.Get, uh),\n\t\tListH: NewListHandler(e.List, uh),\n\t\tAddH: NewAddHandler(e.Add, uh),\n\t\tRemoveH: NewRemoveHandler(e.Remove, uh),\n\t}\n}", "func New(pipeName string, hnd daemon.Handler) *Server {\n\treturn nil\n}", "func New(auth0Creds auth0creds.Auth0Creds, mongoDBURL string, port int) TradingPostServer {\n\treturn &httpServer{\n\t\tauth0Creds: auth0Creds,\n\t\tport: port,\n\t\tmongoDBURL: mongoDBURL,\n\t}\n}", "func New(opts ...Option) (*Server, error) {\n\n\t// init server\n\tserver := &Server{\n\t\trouter: httprouter.New(),\n\t}\n\n\t// init cors\n\tcors := cors.AllowAll()\n\n\t// add middleware for all handlers\n\tserver.srv.Handler = cors.Handler(WithLog(server.router))\n\n\t// append options of the server\n\tfor _, opt := range opts {\n\t\topt(server)\n\t}\n\n\t// add healthcheck endpoint\n\tserver.Handle(http.MethodGet, \"/\", healthcheck)\n\n\treturn server, nil\n}", "func New() *Server {\n\treturn &Server{make([]*websocket.Conn, 0, 10)}\n}", "func New(sto store.Service) *server {\n\ts := &server{sto: sto}\n\n\trouter := mux.NewRouter()\n\n\trouter.Handle(\"/todo\", allowedMethods(\n\t\t[]string{\"OPTIONS\", \"GET\", \"POST\"},\n\t\thandlers.MethodHandler{\n\t\t\t\"GET\": http.HandlerFunc(s.getTodos),\n\t\t\t\"POST\": http.HandlerFunc(s.createTodo),\n\t\t}))\n\n\trouter.Handle(\"/todo/{id}\", idMiddleware(allowedMethods(\n\t\t[]string{\"OPTIONS\", \"GET\", \"PUT\", \"PATCH\", \"DELETE\"},\n\t\thandlers.MethodHandler{\n\t\t\t\"GET\": http.HandlerFunc(s.getTodo),\n\t\t\t\"PUT\": http.HandlerFunc(s.putTodo),\n\t\t\t\"PATCH\": http.HandlerFunc(s.patchTodo),\n\t\t\t\"DELETE\": http.HandlerFunc(s.deleteTodo),\n\t\t})))\n\n\ts.handler = limitBody(defaultHeaders(router))\n\n\treturn s\n}", "func New(e *calc.Endpoints, uh goagrpc.UnaryHandler) *Server {\n\treturn &Server{\n\t\tAddH: NewAddHandler(e.Add, uh),\n\t}\n}", "func newServer(config Config) *http.Server {\n\treturn &http.Server{\n\t\tAddr: fmt.Sprintf(\":%s\", config.Port),\n\t\tHandler: newRouter(config),\n\t}\n}", "func New(bind string) *Server {\n\treturn &Server{bind}\n}", "func New(proxy autocomplete.AVSProxy, cache autocomplete.Database) *Server {\n\treturn &Server{\n\t\tproxy: proxy,\n\t\tcache: cache,\n\t\tlogger: log.New(ioutil.Discard, \"\", 0),\n\t}\n}", "func New(cfg *config.Config, cache cache.Cacher, tr *http.Transport) *HttpHandler {\n\treturn &HttpHandler{handler.Handler{cache, cfg, tr}}\n}", "func New(c *config.Config) *Server {\n\tdefaultAppPort := 8080\n\n\t// Set the port the webserver will listen on\n\tif c.AppPort == 0 {\n\t\tif envAppPort := os.Getenv(\"APP_PORT\"); envAppPort != \"\" {\n\t\t\tvar err error\n\t\t\tc.AppPort, err = strconv.Atoi(envAppPort)\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatalln(\"An error occurred while trying to read the APP_PORT environment variable:\", err)\n\t\t\t}\n\t\t} else {\n\t\t\tc.AppPort = defaultAppPort\n\t\t}\n\t}\n\n\t// Set the redirect URL\n\tc.OAuth2Config.RedirectURL = fmt.Sprintf(\"http://localhost:%d%s\", c.AppPort, callbackURI)\n\tif config.DebugMode {\n\t\tlog.Println(\"RedirectURL:\", c.OAuth2Config.RedirectURL)\n\t}\n\n\ts := &Server{\n\t\tconfig: c,\n\t\tcontext: context.Background(),\n\t\thttpServer: &http.Server{Addr: fmt.Sprintf(\":%d\", c.AppPort)},\n\t}\n\n\thttp.HandleFunc(\"/\", s.handleIndexPage)\n\thttp.HandleFunc(\"/organisation\", s.handleOrganisationPage)\n\thttp.HandleFunc(\"/invoices\", s.handleInvoicePage)\n\thttp.HandleFunc(\"/refresh\", s.handleTokenRefreshRequest)\n\thttp.HandleFunc(\"/segfault\", s.handleSegfaultRequest)\n\thttp.HandleFunc(loginPath, s.redirectToAuthorisationEndpoint)\n\thttp.HandleFunc(callbackURI, s.handleOAuthCallback)\n\n\treturn s\n}", "func New(addr string) *Server {\n if addr == \"\" {\n addr = DefaultAddr\n }\n return &Server{\n addr: DefaultAddr,\n ds: newDataStore(),\n done: make(chan struct{}),\n }\n}", "func New(addr string, port int) *Server {\n\tctx, cancel := context.WithCancel(context.Background())\n\treturn &Server{\n\t\taddr: addr,\n\t\tport: port,\n\t\tctx: ctx,\n\t\tctxCancel: cancel,\n\t}\n}", "func New(host, port string) *Yeelight {\n\ty := &Yeelight{host: host, port: port}\n\treturn y\n}", "func New(b *board.Board, ps [2]match.Player) *HTTPServer {\n ms := new(vector.Vector)\n ms.Push(match.New(b, ps))\n return &HTTPServer{ms}\n}", "func New(ctx context.Context, conf Config) *Server {\n\tsvc := &rpc.Service{}\n\n\ttwirpServer := garo.NewAgentConfigurationServiceServer(svc, nil)\n\tapi := configureAPI(twirpServer, conf.Logger)\n\n\tsrv := http.Server{\n\t\tAddr: conf.Addr,\n\t\tHandler: api,\n\t}\n\n\treturn &Server{&srv, ctx, conf}\n}", "func New(port string) *Server {\n\treturn &Server{\n\t\tport: port,\n\t\tmanager: endly.New(),\n\t}\n}", "func New(e *step.Endpoints, uh goagrpc.UnaryHandler) *Server {\n\treturn &Server{\n\t\tListH: NewListHandler(e.List, uh),\n\t\tAddH: NewAddHandler(e.Add, uh),\n\t\tRemoveH: NewRemoveHandler(e.Remove, uh),\n\t\tUpdateH: NewUpdateHandler(e.Update, uh),\n\t}\n}", "func NewHTTP() *HTTP {\n\treturn HTTPPool.Get().(*HTTP)\n}", "func New(database *gorm.DB, logger *lumberjack.Logger) (*Server, error) {\n\tserver := Server{echo.New()}\n\n\t// Middleware\n\tserver.HTTPErrorHandler = func(err error, context echo.Context) {\n\t\tmessage := err.Error()\n\t\tstatusCode := context.Response().Status\n\t\tcontext.JSON(statusCode, map[string]map[string]interface{}{ // sub level mapping\n\t\t\t\"error\": {\n\t\t\t\t\"message\": message,\n\t\t\t},\n\t\t})\n\t}\n\n\tserver.Pre(middleware.RemoveTrailingSlash())\n\tserver.Use(middleware.Logger())\n\tserver.Use(middleware.Recover())\n\tserver.Use(middleware.CORS())\n\tserver.Use(middleware.LoggerWithConfig(middleware.LoggerConfig{Output: logger})) // Server header\n\n\t// Setup Controller\n\tInitializeControllers(database)\n\n\t// Setup Routers\n\tInitializeRouters(server)\n\n\treturn &server, nil\n}", "func New() *Cmd {\n\tapp := cli.NewApp()\n\tapp.Name = \"server\"\n\tapp.Author = \"\"\n\tapp.Usage = \"server\"\n\tapp.Description = \"A server for a chat app\"\n\tapp.Flags = globalFlags\n\n\tapp.Before = func(c *cli.Context) error {\n\t\treturn nil\n\t}\n\n\tapp.Action = func(c *cli.Context) error {\n\t\taddr := c.String(\"address\")\n\t\techo := c.Bool(\"echo\")\n\n\t\tsrv := server.NewServer(addr, echo)\n\t\tsrv.Run()\n\t\treturn nil\n\t}\n\n\treturn &Cmd{App: app}\n}", "func New(listenAddr string, apiConfig APIConfig) (*Server, error) {\n\tserver := &Server{\n\t\tlistenAddr: listenAddr,\n\t\tapiConfig: apiConfig,\n\t}\n\n\tc := cors.New(cors.Options{\n\t\tAllowedOrigins: []string{\"http://127.0.0.1:5353\"},\n\t})\n\n\t// setup server\n\tserver.router = httprouter.New()\n\thandlers := alice.New(\n\t\tc.Handler,\n\t\t//context.ClearHandler,\n\t\t//addContextHandler,\n\t\tmakeTimeoutHandler(server.apiConfig.APITimeout),\n\t\tloggingHandler,\n\t\trecoverHandler,\n\t)\n\t// serve static content\n\tstatic := http.StripPrefix(\"/static/\", http.FileServer(http.Dir(\"static\")))\n\tserver.router.Handler(http.MethodGet, \"/static/*path\", handlers.Then(neuterDirectoryListing(static)))\n\n\t// setup robots.txt\n\tserver.router.Handler(http.MethodGet, \"/robots.txt\", handlers.ThenFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\thttp.ServeFile(w, r, \"static/robots.txt\")\n\t}))\n\n\t// add rate limiting after static handler\n\tserver.handlers = handlers.Append(makeThrottleHandler(\n\t\tserver.apiConfig.APIRequestsPerMinute,\n\t\tserver.apiConfig.APIRequestsBurst,\n\t\tserver.apiConfig.APIMaxRequestHistory,\n\t))\n\n\t//server.router.NotFound = notFoundJSON\n\treturn server, nil\n}", "func New(mws ...Middleware) *Router {\n\tr := &Router{\n\t\tparent: nil,\n\t\thostrm: newHostMatcher(),\n\t\tmiddlewares: Middlewares{},\n\t\tnamedMiddlewares: make(map[string]Middlewares),\n\t\tpool: newCtxPool(),\n\t}\n\tr.Use(mws...)\n\tr.Configure(\n\t\tWithLogger(lionLogger),\n\t\tWithServer(&http.Server{\n\t\t\tReadTimeout: 5 * time.Second,\n\t\t\tWriteTimeout: 10 * time.Second,\n\t\t}),\n\t)\n\treturn r\n}", "func New(bindAddress string, tableauEndpoint string) *http.Server {\n\treturn &http.Server{\n\t\tAddr: bindAddress,\n\t\tHandler: TabAuth{\n\t\t\t&Client{tableauEndpoint, &http.Client{}},\n\t\t\taccounts(),\n\t\t},\n\t}\n}", "func New(address string) *server {\n\tlog.Println(\"Creating server with address\", address)\n\tserver := &server{\n\t\taddress: address,\n\t}\n\n\tserver.SetNewClientCB(func(c *Client) {})\n\tserver.SetNewMessageCB(func(c *Client, message string) {})\n\tserver.SetClientConnectionClosedCB(func(c *Client, err error) {})\n\n\treturn server\n}", "func newTestServer(logStore logstore.LogStore) *HTTPServer {\n\t// note: address doesn't matter since we will use httptest server\n\tserver := NewHTTP(&Config{BindAddress: \"127.0.0.1:8080\"}, logStore)\n\treturn server\n}", "func New(st Storage) *Server {\n\tsrv := &Server{\n\t\tst: st,\n\t}\n\tsrv.setupRouter()\n\treturn srv\n}" ]
[ "0.71665865", "0.71398956", "0.7123811", "0.7121933", "0.7121786", "0.708344", "0.7081363", "0.70667744", "0.7039178", "0.7034454", "0.6998099", "0.6891638", "0.6876491", "0.68591356", "0.6854021", "0.6827344", "0.6814313", "0.6813561", "0.68123925", "0.6777675", "0.677665", "0.6767749", "0.6760412", "0.67491674", "0.6734992", "0.67346007", "0.6726607", "0.67202526", "0.6713791", "0.6713051", "0.67102855", "0.6682929", "0.66825616", "0.6682097", "0.66684717", "0.66462255", "0.6629878", "0.6626528", "0.66239", "0.66216964", "0.6619436", "0.66189593", "0.65972155", "0.65954226", "0.65932244", "0.658441", "0.6583331", "0.65801144", "0.6570392", "0.65577596", "0.6549561", "0.653755", "0.6535563", "0.6518547", "0.6507647", "0.6497342", "0.6494998", "0.64792573", "0.6474411", "0.64672506", "0.6455421", "0.6452847", "0.64485997", "0.6447708", "0.6447034", "0.6447034", "0.64434636", "0.6441394", "0.64255023", "0.6424299", "0.64087105", "0.64087105", "0.64084053", "0.6405935", "0.64028174", "0.6399553", "0.63860184", "0.63747126", "0.6371757", "0.63711894", "0.6352923", "0.6351475", "0.6350386", "0.63497394", "0.6345454", "0.63443565", "0.6332934", "0.63260514", "0.63169545", "0.6300138", "0.6296809", "0.62936187", "0.62829167", "0.6282782", "0.6280513", "0.62698925", "0.6251708", "0.62477124", "0.62373495", "0.6233194" ]
0.7557735
0
Start the web server
func (s *echoServer) Start() { e := s.Instance // Currently this server is only used for the core API so the logic below // is fine here. If we need to expand this to be used in multiple locations // the below can be done via first-class functions e.Pre(middleware.HTTPSRedirect()) e.Use(middleware.RequestID()) e.Use(middleware.Recover()) e.Use(middleware.LoggerWithConfig(middleware.LoggerConfig{ Output: logger.Instance().Logger().Writer(), })) router.Setup(e) port := utils.GetVariable(consts.API_PORT) port = fmt.Sprintf(":%s", port) certDir := utils.GetVariable(consts.CERT_DIR) e.Logger.Fatal(e.StartTLS(port, fmt.Sprintf("%s/%s", certDir, utils.GetVariable(consts.API_CERT)), fmt.Sprintf("%s/%s", certDir, utils.GetVariable(consts.API_KEY)))) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Start() {\n\twebServer.Engine.Run(\":\" + strconv.Itoa(cfg.Read().App.WebServerPort))\n}", "func StartWebserver() {\n\tlog.Fatal(http.ListenAndServe(\":8080\", nil))\n}", "func (ws *WebServer) Start() {\n\tlog.Logger.Info(\"Launching webserver\")\n\tlastRun := &run.Result{}\n\n\ttemplate, err := sysutil.CreateTemplate(serverTemplatePath)\n\tif err != nil {\n\t\tws.Errors <- err\n\t\treturn\n\t}\n\n\tm := mux.NewRouter()\n\taddStatusEndpoints(m)\n\tstatusPageHandler := &StatusPageHandler{\n\t\ttemplate,\n\t\tlastRun,\n\t\tws.Clock,\n\t}\n\thttp.Handle(\"/\", statusPageHandler)\n\tm.PathPrefix(\"/static/\").Handler(http.StripPrefix(\"/static/\", http.FileServer(http.Dir(\"/static\"))))\n\tforceRunHandler := &ForceRunHandler{\n\t\tws.RunQueue,\n\t}\n\tm.PathPrefix(\"/api/v1/forceRun\").Handler(forceRunHandler)\n\tm.PathPrefix(\"/\").Handler(statusPageHandler)\n\n\tgo func() {\n\t\tfor result := range ws.RunResults {\n\t\t\t*lastRun = result\n\t\t}\n\t}()\n\n\terr = http.ListenAndServe(fmt.Sprintf(\":%v\", ws.ListenPort), m)\n\tws.Errors <- err\n}", "func StartWeb(hcfg Cfg, appcfg []Cfg) {\n\thostCfg = hcfg\n\tvar port = hcfg.Port\n\tc := cors.New(cors.Options{\n\t\tAllowedOrigins: []string{\"*\"},\n\t})\n\n\tlog.Println(\"Port .. \" + port)\n\trouter := http.NewServeMux()\n\trouter.Handle(\"/\"+hcfg.Project+\"/file/\", GetResource(hcfg))\n\tfor c := range appcfg {\n\t\trouter.Handle(\"/\"+appcfg[c].Project+\"/\", AppIndex(appcfg[c]))\n\t\trouter.Handle(\"/\"+appcfg[c].Project+\"/file/\", GetResource(appcfg[c]))\n\t\trouter.Handle(\"/\"+appcfg[c].Project+\"/dload\", Dload(appcfg[c]))\n\t}\n\trouter.Handle(\"/\", Index())\n\trouter.Handle(\"/config\", getConfig())\n\trouter.Handle(\"/validate\", Validate())\n\trouter.Handle(\"/transform\", Transform())\n\trouter.Handle(\"/verify\", DocVerify())\n\trouter.Handle(\"/rebuild\", Rebuild())\n\trouter.Handle(\"/rebuildall\", RebuildAll())\n\tflag.StringVar(&listenAddr, \"listen-addr\", port, \"server listen address\")\n\tflag.Parse()\n\tlogger := log.New(os.Stdout, \"http: \", log.LstdFlags)\n\tlogger.Println(\"Starting HTTP Server. .. \")\n\tnextRequestID := func() string {\n\t\treturn fmt.Sprintf(\"%d\", time.Now().UnixNano())\n\t}\n\tserver := &http.Server{\n\t\tAddr: listenAddr,\n\t\tHandler: tracing(nextRequestID)(logging(logger)(c.Handler(router))),\n\t\tErrorLog: logger,\n\t\tReadTimeout: 5 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tIdleTimeout: 15 * time.Second,\n\t}\n\tdone := make(chan bool)\n\tquit := make(chan os.Signal, 1)\n\tsignal.Notify(quit, os.Interrupt)\n\tgo func() {\n\t\t<-quit\n\t\tlogger.Println(\"Server is shutting down...\")\n\t\tatomic.StoreInt32(&healthy, 0)\n\n\t\tctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)\n\t\tdefer cancel()\n\n\t\tserver.SetKeepAlivesEnabled(false)\n\t\tif err := server.Shutdown(ctx); err != nil {\n\t\t\tlogger.Fatalf(\"Could not gracefully shutdown the server: %v\\n\", err)\n\t\t}\n\t\tclose(done)\n\t}()\n\tlogger.Println(\"Server is ready to handle requests at\", listenAddr)\n\tatomic.StoreInt32(&healthy, 1)\n\tif err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {\n\t\tlogger.Fatalf(\"Could not listen on %s: %v\\n\", listenAddr, err)\n\t}\n\t<-done\n\tlogger.Println(\"Server stopped\")\n}", "func (w *Webserver) Start() error {\n\n\t// listenAndServe the server\n\tgo func() {\n\t\tw.logger.Infof(\"Http server listening at %d!\", w.config.Port)\n\t\terr := w.listenAndServe()\n\t\tif err != nil && err != http.ErrServerClosed {\n\t\t\tw.logger.Errorw(fmt.Sprintf(\"webserver listening at port [%v] stopped\", w.config.Port), \"error\", err.Error())\n\t\t}\n\t}()\n\n\treturn nil\n}", "func StartWebServer(port int) {\n\tsetuphandlers()\n\tportstring := fmt.Sprintf(\":%d\", port)\n\tfmt.Println(\"Running on \", portstring)\n\tlog.Fatal(http.ListenAndServe(portstring, nil))\n}", "func (web *WebServer) Start() {\n\tlog.Println(http.ListenAndServe(web.listen, web.router))\n}", "func main() {\n\twebserver.ServerStart()\n\twebserver.ServerRequest()\n}", "func startWebserver() {\n\tip := \"localhost:1025\"\n\n\thttp.Handle(\"/static/\", http.StripPrefix(\"/static/\", http.FileServer(http.Dir(\"./static\"))))\n\t//router.GET(\"/\", IndexHandler)\n\thttp.HandleFunc(\"/\", IndexHandler)\n\thttp.HandleFunc(\"/quiz\", QuizHandler)\n\thttp.HandleFunc(\"/result\", ResultHandler)\n\thttp.HandleFunc(\"/squish\", SquishHandler)\n\thttp.HandleFunc(\"/rowCollapse\", RowCollapseHandler)\n\thttp.HandleFunc(\"/scroll\", ScrollHandler)\n\thttp.HandleFunc(\"/click\", ClickHandler)\n\n\tfmt.Println(\"running on \" + ip)\n\tlog.Fatal(http.ListenAndServe(ip, nil))\n}", "func StartWebServer(pubSub *pubsub.PubSub) {\n\t// setup web server\n\te := echo.New()\n\te.HideBanner = true\n\te.Use(middleware.Logger())\n\n\t// disable CORS on the web server if desired\n\tdisableCORS = viper.GetBool(\"server_settings.disablecors\")\n\tif disableCORS {\n\t\tlogger.Warn(\"Running in disabled CORS mode. This is very dangerous! Be careful!\")\n\t\te.Use(middleware.CORSWithConfig(middleware.CORSConfig{\n\t\t\tAllowOrigins: []string{\"*\"},\n\t\t\tAllowHeaders: []string{echo.HeaderOrigin, echo.HeaderContentType, echo.HeaderAccept},\n\t\t}))\n\t}\n\n\tc, _ := handlers.NewContainer()\n\n\t// GetLogstationName - Get Logstation Name\n\te.GET(\"/settings/logstation-name\", c.GetLogstationName)\n\n\t// GetSettingsSyntax - Get Syntax Colors\n\te.GET(\"/settings/syntax\", c.GetSettingsSyntax)\n\n\t// package up the built web files and serve them to the clients\n\tfsys, err := fs.Sub(webServerFiles, \"web/dist\")\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error loading the web files into the server. error msg: %s\", err))\n\t}\n\tfileHandler := http.FileServer(http.FS(fsys))\n\te.GET(\"/*\", echo.WrapHandler(fileHandler))\n\n\t// pass message broker channel into websocket handler\n\twsHandlerChan := func(c echo.Context) error {\n\t\treturn WebSocketHandler(c, pubSub)\n\t}\n\te.GET(\"/ws\", wsHandlerChan)\n\n\t// start the web server\n\te.Logger.Fatal(e.Start(viper.GetString(\"server_settings.webserveraddress\") + \":\" + viper.GetString(\"server_settings.webserverport\")))\n}", "func StartWebServer(port string) {\n\tlogrus.Infof(\"Starting Web Server Port[%v] \\n\", port)\n\n\t// init routes\n\tr := NewRouter()\n\thttp.Handle(\"/\", r)\n\n\terr := http.ListenAndServe(\":\" + port, nil)\n\tif err != nil {\n\t\tlogrus.Printf(\"Error starting server %v\", err.Error())\n\t}\n\n}", "func Start(port int32) {\n\tvar (\n\t\taddress string\n\t\t//\tclientFS = http.Dir(\"/webclient\")\n\t\terr error\n\t)\n\taddress = fmt.Sprintf(\":%d\", port)\n\thttp.Handle(\"/client\", http.StripPrefix(\"/client\", http.FileServer(http.Dir(\"./client\"))))\n\tlog.Infof(\"Starting webserver on port %d\", port)\n\terr = http.ListenAndServe(address, nil)\n\tlog.Fatal(err)\n}", "func (ws *WebServer) Start() error {\n\tif ws.server != nil {\n\t\treturn fmt.Errorf(\"WebServer already running\")\n\t}\n\n\tlog.Logger(\"webserver\").Info(\"Launching\")\n\n\ttemplatePath := ws.TemplatePath\n\tif templatePath == \"\" {\n\t\ttemplatePath = defaultServerTemplatePath\n\t}\n\ttemplate, err := createTemplate(templatePath)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tm := mux.NewRouter()\n\taddStatusEndpoints(m)\n\tstatusPageHandler := &StatusPageHandler{\n\t\tws.Authenticator,\n\t\tws.Clock,\n\t\tws.DiffURLFormat,\n\t\tws.KubeClient,\n\t\ttemplate,\n\t\tws.StatusTimeout,\n\t}\n\tforceRunHandler := &ForceRunHandler{\n\t\tws.Authenticator,\n\t\tws.KubeClient,\n\t\tws.RunQueue,\n\t}\n\tm.PathPrefix(\"/static/\").Handler(http.StripPrefix(\"/static/\", http.FileServer(http.Dir(\"static\"))))\n\tm.PathPrefix(\"/api/v1/forceRun\").Handler(forceRunHandler)\n\tm.PathPrefix(\"/\").Handler(statusPageHandler)\n\n\tws.server = &http.Server{\n\t\tAddr: fmt.Sprintf(\":%v\", ws.ListenPort),\n\t\tHandler: m,\n\t\tErrorLog: log.Logger(\"http.Server\").StandardLogger(nil),\n\t}\n\n\tgo func() {\n\t\tif err = ws.server.ListenAndServe(); err != nil {\n\t\t\tif !errors.Is(err, http.ErrServerClosed) {\n\t\t\t\tlog.Logger(\"webserver\").Error(\"Shutdown\", \"error\", err)\n\t\t\t}\n\t\t\tlog.Logger(\"webserver\").Info(\"Shutdown\")\n\t\t}\n\t}()\n\n\treturn nil\n}", "func Start(addr string) {\n\tf = NewServer()\n\thttp.HandleFunc(\"/bayeux\", serveWs)\n\thttp.HandleFunc(\"/\", serveOther)\n\n\t// serve static assets workaround\n\t//http.Handle(\"/file/\", http.StripPrefix(\"/file\", http.FileServer(http.Dir(\"/Users/paul/go/src/github.com/pcrawfor/fayego/runner\"))))\n\n\terr := http.ListenAndServe(addr, nil)\n\tif err != nil {\n\t\tfmt.Println(\"Fatal error \", err.Error())\n\t\tos.Exit(1)\n\t}\n}", "func (w *web) Run() {\n\tw.ready()\n\n\taddr := fmt.Sprintf(\"%s:%s\", w.Host, w.Port)\n\tLogger.Info(\"Starting web server at: %s\", addr)\n\tw.instance.Run(standard.New(addr))\n}", "func (web Web) Run() {\n\tlog.Println(\"Starting webserver\")\n\n\t//Serve static files\n\tfs := http.FileServer(http.Dir(\"static/voipathon\"))\n\thttp.Handle(\"/\", fs)\n\n\tfsTestClient := http.FileServer(http.Dir(\"static/testclient\"))\n\thttp.Handle(\"/testclient/\", http.StripPrefix(\"/testclient/\", fsTestClient))\n\n\thttp.HandleFunc(\"/ws\", web.registerClient)\n\tlog.Println(\"Waiting for connections\")\n\tlog.Fatal(http.ListenAndServe(\":4242\", nil))\n}", "func StartWebServer(music *Music, address string) {\n\tvar err error\n\tvar ip string\n\tif len(address) == 0 {\n\t\tip = \"127.0.0.1\"\n\t}\n\tparts := strings.Split(address, \":\")\n\tif len(parts[0]) > 0 {\n\t\tip = parts[0]\n\t}\n\tport := 4444\n\tif len(parts) > 1 && len(parts[1]) > 0 {\n\t\tport, err = strconv.Atoi(parts[1])\n\t\tif err != nil {\n\t\t\tfmt.Fprintln(os.Stderr, \"Invalid port number:\", parts[1])\n\t\t\tos.Exit(1)\n\t\t}\n\t}\n\n\tbind := fmt.Sprintf(\"%s:%d\", ip, port)\n\tif len(ip) == 0 {\n\t\tfmt.Println(\"Listening on port\", port)\n\t} else {\n\t\tfmt.Printf(\"Listening on http://%s/\\n\", bind)\n\t}\n\n\tweb := NewWeb(music)\n\tmusic.quietMode = true\n\terr = http.ListenAndServe(bind, web)\n\tif err != nil {\n\t\tfmt.Fprintln(os.Stderr, \"Unable to start web server:\", err)\n\t\tos.Exit(1)\n\t}\n}", "func main() {\n\tserver := server.NewHTTPServer()\n\tserver.Start(3000)\n}", "func main() {\n\tservice.StartWebServer(\"8081\")\n}", "func (s *WebServer) Start() error {\n\treturn s.ListenAndServe()\n}", "func StartApplicatin() {\n\tmapUrls()\n\trouter.Run(\":8080\")\n}", "func (ss *StreamerServer) StartWebServer(bindAddr string) {\n\tmux := ss.webServerHandlers(bindAddr)\n\tsrv := &http.Server{\n\t\tAddr: bindAddr,\n\t\tHandler: mux,\n\t}\n\n\tglog.Info(\"Web server listening on \", bindAddr)\n\tsrv.ListenAndServe()\n}", "func (w *WebServer) Start() {\n\tw.start()\n}", "func WebStartUp() {\n\thttp.HandleFunc(\"/\", servePage)\n\thttp.ListenAndServe(\":8080\", nil)\n}", "func startWebserver(input string) {\n ip := \"130.240.170.62:1025\"\n\t//router := httprouter.New()\n\thttp.Handle(\"/static/\", http.StripPrefix(\"/static/\", http.FileServer(http.Dir(\"./static\"))))\n\t//router.GET(\"/\", IndexHandler)\n\thttp.HandleFunc(\"/\", IndexHandler)\n\thttp.HandleFunc(\"/about\", AboutHandler)\n\thttp.HandleFunc(\"/catmagic\", CatMagicHandler)\n http.HandleFunc(\"/toplist/rate\", ToplistRateHandler)\n http.HandleFunc(\"/toplist/comment\", ToplistCommentHandler)\n http.HandleFunc(\"/toplist/favorite\", ToplistFavoriteHandler)\n http.HandleFunc(\"/toplist/latest\", LatestPhotosHandler)\n\t//http.HandleFunc(\"/toplist\", TopListHandler)\n\thttp.HandleFunc(\"/photo/\", PhotoHandler)\n\thttp.HandleFunc(\"/login\", LoginHandler)\n http.HandleFunc(\"/mypage/\", MyPageHandler)\n\n\t//var input int\n\t//fmt.Scan(&input)\n\tif input == \"1\" {\n\t\tfmt.Println(\"running on\", ip)\n\t\tlog.Fatal(http.ListenAndServe(ip, nil))\n\t} else {\n\t\tfmt.Println(\"running on localhost:1025\")\n\t\tlog.Fatal(http.ListenAndServe(\"localhost:1025\", nil))\n\n\t}\n}", "func (s *Server) Start() {\n\tlog.Println(\"Web server started at \" + s.configurationService.Address())\n\tlog.Fatal(http.ListenAndServe(s.configurationService.Address(), s.router()))\n}", "func (display *WebDisplay) LaunchWebServer() {\n\n\thttp.HandleFunc(\"/\", htmlPageHandler)\n\thttp.HandleFunc(\"/image/\", func(w http.ResponseWriter, r *http.Request) { display.imageHandler(w, r) })\n\n\tlog.Print(\"Server listening on 8080\")\n\tlog.Fatal(http.ListenAndServe(\":8080\", nil))\n}", "func (ser *Server) Start() error {\n\tlog.Printf(\"System webapp start at %s\", ser.addr)\n\treturn manners.ListenAndServe(ser.addr, ser.m)\n}", "func startServer() {\n\tapi, err := gobroem.NewAPI(options.db)\n\tif err != nil {\n\t\tlog.Fatal(\"can not open db\", err)\n\t}\n\n\thttp.ListenAndServe(\n\t\tfmt.Sprintf(\"%s:%d\", options.host, options.port),\n\t\tapi.Handler(\"/\", \"/static/\"),\n\t)\n}", "func (wsv *web) run() {\n\tdefer wsv.doCloseDone.Done()\n\tdefer wsv.isRun.Store(false)\n\tdefer func() {\n\t\tif wsv.conf.Socket == \"\" {\n\t\t\treturn\n\t\t}\n\t\tif wsv.conf.Mode == \"unix\" || wsv.conf.Mode == \"unixpacket\" {\n\t\t\t_ = os.Remove(wsv.conf.Socket)\n\t\t}\n\t}()\n\n\t// Configure net/http web server\n\twsv.server = wsv.loadConfiguration()\n\tif wsv.err != nil {\n\t\treturn\n\t}\n\n\t// Configure keep alives of web server\n\tif wsv.conf.KeepAliveDisable {\n\t\twsv.server.SetKeepAlivesEnabled(false)\n\t}\n\t// Begin serve\n\twsv.err = wsv.server.Serve(wsv.listener)\n}", "func StartServer() {\n\thandlePesquisa()\n\n\tlog.Info.Println(\"WebServer started...\")\n\thttp.ListenAndServe(\":8080\", httpLogger.WriteLog(http.DefaultServeMux, os.Stdout))\n}", "func startServer() {\n\t// index file\n\thttp.HandleFunc(\"/\", func(w http.ResponseWriter, r *http.Request) {\n\t\thttp.Redirect(w, r, \"/static/\", http.StatusFound)\n\t}) //设置访问的路由\n\n\t// static file\n\thttp.HandleFunc(\"/static/\", func(w http.ResponseWriter, r *http.Request) {\n\t\thttp.ServeFile(w, r, r.URL.Path[1:])\n\t})\n\n\t// other logic handlers\n\thttp.HandleFunc(\"/rank\", rank)\n\thttp.HandleFunc(\"/top\", top)\n\t//\thttp.HandleFunc(\"/update\", update)\n\n\terr := http.ListenAndServe(\":9090\", nil) //设置监听的端口\n\tif err != nil {\n\t\tlog.Fatal(\"ListenAndServe: \", err)\n\t}\n}", "func runServer() {\n\t// listen and serve on 0.0.0.0:8080 (for windows \"localhost:8080\")\n\tlog.Fatalln(router.Run(fmt.Sprintf(\":%s\", env.AppPort)))\n}", "func startHTTP(h *HTTP) {\n\th.Server.ErrorLog.Printf(\"starting server on '%s'...\", h.Options.Addr.String())\n\th.events <- h.Server.ListenAndServe()\n}", "func (srv Web) Start() error {\n\tfmt.Printf(\"Starting service on port %s\\n\", srv.Settings.Port)\n\treturn http.ListenAndServe(srv.Settings.Port, srv.Router())\n}", "func (o *HttpServer) Start() error {\n\turi := fmt.Sprintf(\"%s:%d\", o.Host, o.Port)\n\tlog.Printf(\"[HTTP] Server listen on %s\\n\", uri)\n\treturn o.Server.ListenAndServe()\n}", "func StartWebserver(port string) {\n\tlog.Info(\"Starting service at port: \" + port)\n\tr := routes.NewRouter()\n\tsrv := &http.Server{\n\t\tAddr: \":\" + port,\n\t\tHandler: r,\n\t\t// Good practice: enforce timeouts for servers you create!\n\t\tWriteTimeout: 15 * time.Second,\n\t\tReadTimeout: 15 * time.Second,\n\t}\n\n\tgo func() {\n\t\t// service connections\n\t\tif err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed {\n\t\t\tlog.Fatalf(\"listen: %s\\n\", err)\n\t\t}\n\t}()\n\n\t// gracefule shutdown\n\t// Wait for interrupt signal to gracefully shutdown the server with\n\t// a timeout of 5 seconds.\n\tquit := make(chan os.Signal)\n\tsignal.Notify(quit, os.Interrupt)\n\t<-quit\n\tlog.Info(\"Shutdown Server ...\")\n\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancel()\n\tif err := srv.Shutdown(ctx); err != nil {\n\t\tlog.Fatal(\"Server Shutdown:\", err)\n\t}\n\tlog.Info(\"Server exiting\")\n}", "func Start(port string) {\n\tgo startHTTPServer(port)\n}", "func (s *server) Run() error {\n\ts.logger.Info(\"starting http server\", logger.String(\"addr\", s.server.Addr))\n\ts.server.Handler = s.gin\n\t// Open listener.\n\ttrackedListener, err := conntrack.NewTrackedListener(\"tcp\", s.addr, s.r)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn s.server.Serve(trackedListener)\n}", "func startHTTP(handlers http.Handler, s Server) {\n\tfmt.Println(time.Now().Format(\"2006-01-02 03:04:05 PM\"), \"Running HTTP \"+httpAddress(s))\n\n\t// Start the HTTP listener\n\tlog.Fatal(http.ListenAndServe(httpAddress(s), handlers))\n}", "func startServer(port string, handler http.Handler) {\n\terr := http.ListenAndServe(port, handler)\n\tif err != nil {\n\t\tlogger.Fatal(\"ListenAndServe: \", err)\n\t}\n}", "func start_HTTP(handler http.Handler, server Server) {\n\tfmt.Println(time.Now().Format(\"2006-01-02 03:04:05 PM\"), \"Running HTTP \"+get_http_address(server))\n\n\t// Start the HTTP listener\n\tlog.Fatal(http.ListenAndServe(get_http_address(server), handler))\n}", "func InitWebServer() {\n\thttp.ListenAndServe(\":8080\", initRouter())\n}", "func (s *Server) Start() {\n\tlog.Infof(\"Starting http server on port %d...\", s.port)\n\n\tgo s.server.ListenAndServe()\n}", "func main() {\n\thttp.ListenAndServe(\"127.0.0.1:8080\", NewServer())\n}", "func startHTTPServer(ch chan<- bool) {\n\tserver := http.Server{\n\t\tAddr: \":80\",\n\t}\n\tlog.Println(\"HTTP server started (listening on port 80).\")\n\tlog.Println(\"HTTP server stopped with error:\", server.ListenAndServe())\n\tch <- true\n}", "func Start() {\n\taddr := fmt.Sprintf(\"%v:%v\", webConfig.Ip4address, webConfig.Ip4port)\n\tserver := &http.Server{\n\t\tAddr: addr,\n\t\tHandler: nil,\n\t\tReadTimeout: 60 * time.Second,\n\t\tWriteTimeout: 60 * time.Second,\n\t}\n\n\t// We don't use ListenAndServe because it lacks a way to close the listener\n\tlog.LogInfo(\"HTTP listening on TCP4 %v\", addr)\n\tvar err error\n\tlistener, err = net.Listen(\"tcp\", addr)\n\tif err != nil {\n\t\tlog.LogError(\"HTTP failed to start TCP4 listener: %v\", err)\n\t\t// TODO More graceful early-shutdown procedure\n\t\tpanic(err)\n\t}\n\n\terr = server.Serve(listener)\n\tif shutdown {\n\t\tlog.LogTrace(\"HTTP server shutting down on request\")\n\t} else if err != nil {\n\t\tlog.LogError(\"HTTP server failed: %v\", err)\n\t}\n}", "func (ms *MarvinServer) Start() {\n\tgo startWebsockets()\n\n\tlog.Printf(\"Started Marvin HTTP Server on %v:%v\\n\", ms.host, ms.port)\n\trouter := createRouter(ms)\n\thttpError := netHTTP.ListenAndServe(ms.host+\":\"+strconv.Itoa(ms.port), router)\n\n\tif httpError != nil {\n\t\tlog.Fatal(httpError)\n\t\treturn\n\t}\n}", "func (s *Server) Start() error {\n\tlog.Printf(\"Hey there! I'm up and running, and can be accessed at: http://localhost:%d\\n\", s.config.AppPort)\n\treturn s.httpServer.ListenAndServe()\n}", "func (s *Server) Start() error {\n\treturn http.ListenAndServe(\":8000\", s.router())\n}", "func (server *Server) Start() {\n\tmux := http.NewServeMux()\n\n\tfileServer := server.attachStaticFileServer(mux)\n\tserver.attachSystemJSRewriteHandler(mux)\n\tserver.attachCustomHandlers(mux)\n\n\tif server.hub != nil {\n\t\t// add HMR support\n\t\tserver.attachIndexInjectionListener(mux, fileServer)\n\t\tserver.attachWebSocketListeners(mux, server.hub)\n\t\tgo server.hub.run()\n\t}\n\n\tserver.srv = &http.Server{\n\t\tAddr: makeServerAddress(server.port),\n\t\tHandler: mux,\n\t}\n\n\tif err := server.srv.ListenAndServe(); err != nil {\n\t\tpanic(err)\n\t}\n}", "func (s *HttpServer) Start() error {\n\n\tif err := http.ListenAndServe(s.Config.Host+\":\"+s.Config.HTTPPort, s.ServeMux); err != nil {\n\t\tlog.Error().Err(err).Msg(\"Unable to start http server\")\n\t\treturn err\n\t}\n\treturn nil\n}", "func (app *Application) Start() error {\n\treturn app.Server.Start()\n}", "func (hSvr *HTTPServer) Start(_ context.Context) error {\n\tgo func() {\n\t\tif err := hSvr.svr.ListenAndServe(); err != nil && err != http.ErrServerClosed {\n\t\t\tlog.L().Fatal(\"Node failed to serve.\", zap.Error(err))\n\t\t}\n\t}()\n\treturn nil\n}", "func (hs *HttpServer) Start() (err error) {\n\tpanic(\"todo - StartServer\")\n\n\t// Start listening to the server port\n\n\t// Accept connection from client\n\n\t// Spawn a go routine to handle request\n\n}", "func startWebAdmin(s *server) {\n\n\tlogger := shared.NewLogger(&s.config.WebAdmin.Logger)\n\n\ts.webadmin = webadmin.New(s.config.WebAdmin, applicationName, logger)\n\n\t// Enable showing indexpage on / that shows all possible routes\n\ts.webadmin.Router.GET(\"/\", webadmin.ShowAllRoutes(s.webadmin.Router, applicationName))\n\ts.webadmin.Router.GET(webadmin.LivenessCheckPath, webadmin.LivenessProbe)\n\ts.webadmin.Router.GET(webadmin.ReadinessCheckPath, s.readiness.ReadinessProbe)\n\ts.webadmin.Router.GET(webadmin.MetricsPath, gin.WrapH(promhttp.Handler()))\n\ts.webadmin.Router.GET(webadmin.ConfigDumpPath, webadmin.ShowStartupConfiguration(s.config))\n\n\ts.webadmin.Start()\n}", "func (e *Engine) Run(port int64) {\n\thttp.HandleFunc(\"/\", handleRequest)\n\tstartHTTPServer(port)\n}", "func RunWeb() {\n\twebserver.New(Config).Run()\n}", "func main() {\n\tfmt.Println(\"APPLICATION BEGIN\")\n\twebserver := new(service.Webserver)\n\tregisterConfig()\n\tregisterErrors()\n\tregisterAllApis()\n\tregisterInitFunc()\n\toverrideConfByEnvVariables()\n\twebserver.Start()\n}", "func Run() {\n\trouter := getRouter()\n\ts := &http.Server{\n\t\tAddr: \"0.0.0.0:8080\",\n\t\tHandler: router,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tMaxHeaderBytes: 1 << 20,\n\t}\n\ts.ListenAndServe()\n}", "func main() {\n\n\t// Dynamic port (used by Heroku for example)\n\tport := os.Getenv(\"PORT\")\n\tif port == \"\" {\n\t\tlog.Println(\"$PORT must be set, using 5000 as default...\")\n\t\tport = \"5000\"\n\t}\n\n\thttp.HandleFunc(\"/\", makeHandler(homeHandler))\n\thttp.HandleFunc(\"/view/\", makeHandler(viewHandler))\n\thttp.HandleFunc(\"/edit/\", makeHandler(editHandler))\n\thttp.HandleFunc(\"/save/\", makeHandler(saveHandler))\n\n\t// For static files\n\thttp.Handle(\"/static/\", http.StripPrefix(\"/static/\", http.FileServer(http.Dir(\"assets/\"))))\n\n\tlog.Println(\"\")\n\tlog.Println(\"Server started... listening on port \" + port)\n\tlog.Println(\"URL: http://localhost:\" + port + \"/\")\n\tlog.Println(\"\")\n\n\tlog.Fatal(http.ListenAndServe(\":\"+port, nil))\n}", "func (s *Server) Run() {\n\tlog.Printf(\"[INFO] activate rest server on port %v\", s.Port)\n\tlog.Fatal(http.ListenAndServe(fmt.Sprintf(\"%v:%v\", s.address, s.Port), s.routes()))\n}", "func main() {\n\t// setup and start the webserver.\n\tportString := fmt.Sprintf(\":%d\", config.Port)\n\n\tinitTemplates(config.TemplatesDir)\n\n\t// custom handler with strict url pattern matching\n\tvh := server.NewStrictHandler()\n\tvh.NotFoundHandler = notFoundHandler\n\tvh.HandlePatterns([]string{\"/\", \"/index.html\"}, indexHandler)\n\tvh.HandlePattern(\"/time/\", server.LimitRequests(timeHandler))\n\tvh.HandlePattern(\"/login/\", loginHandler)\n\tvh.HandlePattern(\"/logout/\", logoutHandler)\n\tvh.HandlePattern(\"/about/\", aboutHandler)\n\tvh.HandlePattern(\"/monitor/\", server.MonitorHandler)\n\tvh.ServeStaticFile(\"/css/style.css\", config.TemplatesDir+\"/style.css\")\n\n\tlog.Infof(\"Timeserver listening on 0.0.0.0%s\", portString)\n\terr := http.ListenAndServe(portString, vh)\n\n\tif err != nil {\n\t\tlog.Critical(\"TimeServer Failure: \", err)\n\t}\n\tlog.Info(\"Timeserver exiting..\")\n}", "func (s *Server) Run() {\n\tgo func() {\n\t\t// start serving\n\t\tif err := s.httpServer.ListenAndServe(); err != nil {\n\t\t\tlog.Errora(err)\n\t\t}\n\t}()\n}", "func StartHTTPD() {\n\tsession.Global.Close()\n\tsession.Global = session.NewCookieManagerOptions(session.NewInMemStore(), &session.CookieMngrOptions{AllowHTTP: true})\n\tdefer session.Global.Close()\n\n\trouter := httprouter.New()\n\n\trouter.GET(\"/\", AuthenticationRequired(IndexHandler))\n\trouter.GET(\"/api/feeds\", AuthenticationRequired(FeedsHandler))\n\trouter.GET(\"/api/folders\", AuthenticationRequired(FoldersHandler))\n\trouter.POST(\"/api/feeds/:uuid/read\", AuthenticationRequired(ItemReadHandler))\n\trouter.POST(\"/api/folders/:folder/read\", AuthenticationRequired(FolderReadHandler))\n\n\tstaticDir := fmt.Sprintf(\"%s/static\", viper.GetString(\"web.installationpath\"))\n\trouter.ServeFiles(\"/static/*filepath\", http.Dir(staticDir))\n\n\thostport := fmt.Sprintf(\"%s:%d\", viper.GetString(\"web.host\"), viper.GetInt(\"web.port\"))\n\tlog.Printf(\"Starting web server on %s\", hostport)\n\tlog.Fatal(http.ListenAndServe(hostport, router))\n}", "func Start(ss *ServiceStore) {\n\trouter := httprouter.New()\n\n\trouter.GET(\"/\", ss.root)\n\trouter.GET(\"/monitor\", ss.monitor)\n\t//router.GET(\"/market/:id\", ss.market)\n\trouter.GET(\"/markets\", ss.markets)\n\trouter.GET(\"/wallets\", ss.wallets)\n\trouter.GET(\"/orderbook\", ss.orderbook)\n\trouter.GET(\"/ws\", ss.websocket)\n\n\t// serve static assets\n\trouter.ServeFiles(\"/web/js/*filepath\", http.Dir(\"web/js\"))\n\trouter.ServeFiles(\"/web/css/*filepath\", http.Dir(\"web/css\"))\n\trouter.ServeFiles(\"/web/vendor/*filepath\", http.Dir(\"web/vendor\"))\n\n\t// start the web server\n\tlog.Println(\"Web server ready.\")\n\tlog.Fatal(http.ListenAndServe(\":5000\", router))\n}", "func (s *Server) Start() {\n\tserver := http.Server{\n\t\tAddr: s.Port,\n\t\tHandler: handlers.LoggingHandler(s.Logger, s.Router),\n\t}\n\n\tfmt.Println(\"Running\")\n\tserver.ListenAndServe()\n}", "func main() {\n\t// command line flags\n\tport := flag.Int(\"port\", 12000, \"port to serve on\")\n\tdir := flag.String(\"directory\", \"web/\", \"directory of web files\")\n\tflag.Parse()\n\t\n\tconnections = make(map[*websocket.Conn]bool)\n\t// handle all requests by serving a file of the same name\n\tfs := http.Dir(*dir)\n\tfileHandler := http.FileServer(fs)\n\thttp.Handle(\"/\", fileHandler)\n\thttp.HandleFunc(\"/ws\", wsHandler)\n\n\tlog.Printf(\"Running on port %d\\n\", *port)\n\n\taddr := fmt.Sprintf(\"127.0.0.1:%d\", *port)\n\t// this call blocks -- the progam runs here forever\n\terr := http.ListenAndServe(addr, nil)\n\tfmt.Println(err.Error())\n}", "func Run(h http.Handler) {\n\tsrv := createServer(h)\n\tgo gracefullyShutDownOnSignal(srv, context.Background())\n\tif err := srv.ListenAndServe(); err != http.ErrServerClosed {\n\t\tlog.Fatalf(\"Unable to to start server: %v\", err)\n\t}\n}", "func (s *server) startWith(readTimeout int64, writeTimeout int64) error\t{\n\ts.sock = &http.Server{\n\t\tAddr: \":\"+ strconv.Itoa(int(s.port)),\n\t\tHandler: s,\n\t\tReadTimeout: time.Duration(readTimeout),\n\t\tWriteTimeout: time.Duration(writeTimeout),\n\t\tMaxHeaderBytes: 1 << 20,\n\t}\n\n\tlogger.Infof(\" Go WebServer started at Port %d \", s.port )\n\treturn s.sock.ListenAndServe();\n}", "func (s *Server) Run() error {\n\t// start fetcher, reporter and doc generator in goroutines\n\tgo s.fetcher.Run()\n\tgo s.reporter.Run()\n\tgo s.docGenerator.Run()\n\n\t// start webserver\n\tlistenAddress := s.listenAddress\n\tif listenAddress == \"\" {\n\t\tlistenAddress = DefaultAddress\n\t}\n\n\tr := mux.NewRouter()\n\n\t// register ping api\n\tr.HandleFunc(\"/_ping\", pingHandler).Methods(\"GET\")\n\n\t// github webhook API\n\tr.HandleFunc(\"/events\", s.gitHubEventHandler).Methods(\"POST\")\n\n\t// travisCI webhook API\n\tr.HandleFunc(\"/ci_notifications\", s.ciNotificationHandler).Methods(\"POST\")\n\n\tlogrus.Infof(\"start http server on address %s\", listenAddress)\n\treturn http.ListenAndServe(listenAddress, r)\n}", "func Run() {\n\n\tgo func() {\n\t\terrors := setupTemplates(\"server/templates\")\n\t\tif errors != nil {\n\t\t\tfmt.Println(errors)\n\t\t}\n\t}()\n\n\tfmt.Println(\"Starting server...\")\n\thttp.HandleFunc(\"/\", index)\n\thttp.HandleFunc(\"/view\", view)\n\thttp.Handle(\"/static/\", http.StripPrefix(\"/static/\", http.FileServer(http.Dir(\"server/static/\"))))\n\thttp.ListenAndServe(\":8080\", nil)\n}", "func (p *program) startHTTP() (err error) {\n\tp.httpServer = &http.Server{\n\t\tAddr: \":8002\",\n\t\tHandler: Router,\n\t\tWriteTimeout: time.Second * 150000,\n\t\tReadTimeout: time.Second * 150000,\n\t\tIdleTimeout: time.Second * 600000,\n\t}\n\n\tp.Logger.Debug(\"http server start -->\", p.httpServer.Addr)\n\n\tif err := p.httpServer.ListenAndServe(); err != nil && err != http.ErrServerClosed {\n\t\tp.Logger.Error(\"start http server error\", err)\n\t}\n\tp.Logger.Debug(\"http server end\")\n\n\treturn\n}", "func StartApp() {\n\turlMappings()\n\trouter.Run(\"localhost:8080\")\n}", "func (s ServerHTTP) Start(c *cli.Context) error {\n\tcfgFile := c.Args().First()\n\n\tvar config = &server.Config{}\n\t_, err := toml.DecodeFile(cfgFile, config)\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tsrv, err := server.Start(config)\n\tif err != nil {\n\t\treturn err\n\t}\n\ts.server = srv\n\treturn nil\n}", "func StartHTTP() {\n\tvar mux = routes.GetRoutes()\n\tvar addr = \":\" + config.PORT\n\tvar server = &http.Server{\n\t\tAddr: addr,\n\t\tHandler: mux,\n\t\tReadTimeout: config.READ_TIMEOUT * time.Second,\n\t\tMaxHeaderBytes: 1 << 20,\n\t}\n\tprintln(\"Listening on \" + addr)\n\tprintln(server.ListenAndServe())\n}", "func startHTTPListener() {\n\thttp.ListenAndServe(\":\"+GetConfig().Port, nil)\n}", "func (server Server) Run() error {\n\terr := server.supervisor.SpawnClient()\n\tif err != nil {\n\t\tserver.logger.Fatalf(\"Error in starting client: %s\", err)\n\t}\n\n\tgo listenForSMS(server.upstreamChannel, server.logger)\n\tserver.logger.Info(\"Listening for SMS\")\n\tserver.logger.Info(\"Starting Webserver\")\n\n\treturn server.webserver.Server.ListenAndServe()\n}", "func (s *HttpServer) Run() {\n\n\tgo s.httpServer()\n\t<-s.quitChan\n}", "func (s *server) start(addr string) {\n\t// Set http handlers\n\thttp.HandleFunc(\"/\", s.wsHandler)\n\n\t// Start server\n\tif err := http.ListenAndServe(addr, nil); err != nil {\n\t\ts.log.Fatal(\"error starting http server\", zap.Error(err))\n\t}\n}", "func Run() error {\n\tvar err error\n\n\ts := NewServer()\n\tport := \":1729\"\n\tfmt.Printf(\"Listening on %s...\\n\", port)\n\thttp.ListenAndServe(port, s)\n\n\treturn err\n}", "func (s *Server) Start() error {\n\ts.RegisterHTTPHandlers()\n\tlog.Print(fmt.Sprintf(\"Listening HTTP on: %s\", s.url))\n\n\thandler := CORSWrap(s.router)\n\treturn http.ListenAndServe(s.url, handler)\n}", "func StartHTTPServer(log *logging.Logger, port int, reportDir string, service Service) {\n\tm := macaron.New()\n\tm.Use(macaron.Logger())\n\tm.Use(macaron.Recovery())\n\tm.Use(macaron.Static(\"\",\n\t\tmacaron.StaticOptions{\n\t\t\tSkipLogging: false,\n\t\t\tFileSystem: bindata.Static(bindata.Options{\n\t\t\t\tAsset: templates.Asset,\n\t\t\t\tAssetDir: templates.AssetDir,\n\t\t\t\tAssetInfo: templates.AssetInfo,\n\t\t\t\tAssetNames: templates.AssetNames,\n\t\t\t\tPrefix: \"\",\n\t\t\t}),\n\t\t},\n\t))\n\tm.Use(macaron.Static(reportDir,\n\t\tmacaron.StaticOptions{\n\t\t\tPrefix: \"reports\",\n\t\t\tSkipLogging: false,\n\t\t},\n\t))\n\tm.Use(macaron.Renderer(macaron.RenderOptions{\n\t\tFuncs: []template.FuncMap{\n\t\t\ttemplate.FuncMap{\n\t\t\t\t\"cssReady\": cssReady,\n\t\t\t\t\"cssTestOK\": cssTestOK,\n\t\t\t\t\"formatTime\": formatTime,\n\t\t\t},\n\t\t},\n\t\tTemplateFileSystem: bindata.Templates(bindata.Options{\n\t\t\tAsset: templates.Asset,\n\t\t\tAssetDir: templates.AssetDir,\n\t\t\tAssetInfo: templates.AssetInfo,\n\t\t\tAssetNames: templates.AssetNames,\n\t\t\tPrefix: \"\",\n\t\t}),\n\t}))\n\tm.Map(log)\n\tm.Map(service)\n\n\tm.Get(\"/\", indexPage)\n\tm.Get(\"/test/:name\", testPage)\n\tm.Get(\"/test/:name/pause\", testPausePage)\n\tm.Get(\"/test/:name/resume\", testResumePage)\n\tm.Get(\"/test/:name/logs\", testLogs)\n\tm.Get(\"/logs/:machine/:mode\", logsPage)\n\tm.Get(\"/chaos\", chaosPage)\n\tm.Get(\"/chaos/pause\", chaosPausePage)\n\tm.Get(\"/chaos/resume\", chaosResumePage)\n\tm.Get(\"/chaos/:id/enable\", chaosActionEnablePage)\n\tm.Get(\"/chaos/:id/disable\", chaosActionDisablePage)\n\n\taddr := fmt.Sprintf(\"0.0.0.0:%d\", port)\n\tlog.Infof(\"HTTP server listening on %s\", addr)\n\tgo func() {\n\t\tif err := http.ListenAndServe(addr, m); err != nil {\n\t\t\tlog.Fatalf(\"Failed to start listener: %#v\", err)\n\t\t}\n\t}()\n}", "func (s *Server) Run() error {\n\tmux := s.createServeMux()\n\tlog.WithField(\"address\", \"http://\"+s.hostPort).Info(\"Starting\")\n\treturn http.ListenAndServe(s.hostPort, mux)\n}", "func (s *Server) Run() error {\n\tmux := s.createServeMux()\n\tlog.WithField(\"address\", \"http://\"+s.hostPort).Info(\"Starting\")\n\treturn http.ListenAndServe(s.hostPort, mux)\n}", "func Run() error {\n\tgo server.ListenAndServe()\n\t// TODO: Improve error handling\n\treturn nil\n}", "func (s httpServer) Run(h http.Handler) {\n\ts.srv.Handler = h\n\tgo s.srv.ListenAndServe()\n}", "func (sw *Switcher) Start() {\n\tsw.Server.Start()\n}", "func (s *Server) Start() error {\n\treturn http.ListenAndServe(s.listenAddr, s.router)\n}", "func (h *Server) Run() {\n\n\th.g.StartServer()\n}", "func (a *App) Start() {\n\ta.router.GET(\"/\", a.Index)\n\tfmt.Println(\"Now listening on localhost:4100\")\n\tlog.Fatal(http.ListenAndServe(\":4100\", a.router))\n}", "func ServerStart(port string) (string, error) {\n\n\t// List of view handlers\n\thandlerStrings = append(handlerStrings, \"/\", \"/blockchain/view/<ID>\", \"/garage/view/<ID>\", \"serviceevent/add/\", \"/vehicle/view/<ID>\")\n\n\thttp.HandleFunc(\"/\", defaultHandler) // Each call to \"/\" will invoke defaultHandler\n\thttp.HandleFunc(\"/blockchain/view/\", blockchainViewHandler)\n\thttp.HandleFunc(\"/garage/view/\", garageViewHandler)\n\thttp.HandleFunc(\"/serviceevent/add/\", writeServiceEventHandler)\n\thttp.HandleFunc(\"/vehicle/view/\", vehicleViewHandler)\n\n\t//log.Fatal(http.ListenAndServe(\"localhost:\"+port, nil))\n\treturn \"Started on: \" + port, http.ListenAndServe(\"localhost:\"+port, nil)\n\n}", "func (s *Server) Run(addr string) {\n\tfmt.Println(\"Listening to port 8080\")\n\tlog.Fatal(http.ListenAndServe(addr, s.Router))\n}", "func StartWebServer(addr url.URL, readTimeout, writeTimeout int, handler http.Handler) *Server {\n stopc := make(chan struct{})\n srv := &Server{\n addrURL: addr,\n httpServer: &http.Server{\n Addr: addr.Host,\n Handler: handler,\n ReadTimeout: time.Duration(readTimeout) * time.Second,\n WriteTimeout: time.Duration(writeTimeout) * time.Second,\n },\n stopc: stopc,\n donec: make(chan struct{}),\n }\n listener, err := net.Listen(\"tcp\", addr.Host)\n if err != nil {\n Error(err.Error())\n }\n go func() {\n defer func() {\n if err := recover(); err != nil {\n Warn(\n \"shutting down server with err \",\n Field(\"error\", fmt.Sprintf(`(%v)`, err)),\n )\n os.Exit(0)\n }\n close(srv.donec)\n }()\n if err := srv.httpServer.Serve(listener); err != nil && err != http.ErrServerClosed {\n Fatal(\n \"shutting down server with err \",\n Field(\"error\", err),\n )\n }\n }()\n return srv\n}", "func (sw *SimpleWebServer) Serve() error {\n\tif sw.running {\n\t\treturn fmt.Errorf(\"already running\")\n\t}\n\tsw.running = true\n\tgo func() {\n\t\t_ = sw.ListenAndServe()\n\t}()\n\n\treturn nil\n}", "func (s *Server) Start() {\n\tlog.Println(\"Starting webhook receiver on port 8080...\")\n\terr := http.ListenAndServe(\":8080\", nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"Couldn't start server: %s\", err)\n\t}\n}", "func (s *Rest) Run(httpPort int) {\n\tlog.Printf(\"[INFO] activate rest HTTP server on port %d\", httpPort)\n\n\trouter := s.routes()\n\n\ts.lock.Lock()\n\ts.httpServer = &http.Server{\n\t\tAddr: fmt.Sprintf(\":%d\", httpPort),\n\t\tHandler: router,\n\t\tReadHeaderTimeout: 5 * time.Second,\n\t\tWriteTimeout: 5 * time.Second,\n\t\tIdleTimeout: 30 * time.Second,\n\t}\n\n\ts.lock.Unlock()\n\n\terr := s.httpServer.ListenAndServe()\n\n\tlog.Printf(\"[WARN] http server terminated, %s\", err)\n}", "func (s *Server) Start() error {\n\taddress := s.Address\n\tif address == \"\" {\n\t\taddress = \"0.0.0.0\"\n\t}\n\taddr := fmt.Sprintf(\"%s:%d\", address, s.Port)\n\ts.httpServer = &http.Server{\n\t\tAddr: addr,\n\t\tHandler: s.Routes(),\n\t}\n\treturn s.httpServer.ListenAndServe()\n}", "func startHTTPServer(app *AppContext) *http.Server {\n\n\tsrv := &http.Server{\n\t\tAddr: \":\" + app.configFile.loggerPort,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t}\n\tgo func() {\n\t\tif err := srv.ListenAndServe(); err != nil {\n\t\t\tlog.Debugf(\"Httpserver: ListenAndServe(): %s\", err)\n\t\t}\n\t}()\n\tlog.Debugf(\"HTTP Server started, listening on :%s\", app.configFile.loggerPort)\n\treturn srv // returning reference so caller can call Shutdown()\n}", "func (s *Server) Start() error {\n\ts.router = configureRouter(s.Config.StaticDir)\n\ts.Logger.Printf(\"serving %v at /static/\", s.Config.StaticDir)\n\ts.httpListener = s.configureHTTPListener()\n\n\tgo func() {\n\t\terr := s.httpListener.ListenAndServe()\n\t\tif err != nil {\n\t\t\t//Normal graceful shutdown error\n\t\t\tif err.Error() == \"http: Server closed\" {\n\t\t\t\ts.Logger.Info(err)\n\t\t\t} else {\n\t\t\t\ts.Logger.Fatal(err)\n\t\t\t}\n\t\t}\n\t}()\n\ts.Logger.Printf(\"listening on %v\", s.Config.Address)\n\treturn nil\n}", "func (s *server) Run(addr string) error {\n\treturn http.ListenAndServe(addr, s.handler)\n}" ]
[ "0.79586124", "0.77553374", "0.7676101", "0.7660521", "0.76436174", "0.7618598", "0.7560046", "0.7443683", "0.7432194", "0.74232006", "0.7418631", "0.74167246", "0.7392946", "0.7371495", "0.7233208", "0.723225", "0.72229004", "0.7216855", "0.7203143", "0.71656066", "0.714282", "0.7118954", "0.7099989", "0.70840806", "0.7065695", "0.7054065", "0.70489573", "0.70428014", "0.7029494", "0.7017287", "0.70105875", "0.70042783", "0.6995337", "0.69940275", "0.6987285", "0.6976213", "0.69562435", "0.69216216", "0.69117975", "0.69111186", "0.6893108", "0.68816817", "0.6877763", "0.6849511", "0.68479794", "0.6847415", "0.6841859", "0.6831578", "0.68196625", "0.6813607", "0.68120766", "0.6805632", "0.6797608", "0.67914236", "0.6787334", "0.67815524", "0.6775637", "0.67699176", "0.67692465", "0.67652285", "0.6764851", "0.6764226", "0.67600906", "0.67570597", "0.6746112", "0.6744344", "0.67395926", "0.67376834", "0.6735472", "0.67192185", "0.6708064", "0.6706585", "0.67018604", "0.66975206", "0.6694573", "0.6694308", "0.6691957", "0.6686036", "0.668393", "0.66823864", "0.6679382", "0.667501", "0.667014", "0.66617686", "0.66617686", "0.66517305", "0.66509724", "0.6643413", "0.66416687", "0.66382873", "0.6636135", "0.6635714", "0.66328686", "0.66325754", "0.6630917", "0.6622771", "0.6610225", "0.66092956", "0.6608506", "0.65981346", "0.65930337" ]
0.0
-1
Hey returns bob's response to what talked to him
func Hey(remark string) string { r := strings.TrimSpace(remark) empty := len(r) == 0 question := strings.HasSuffix(r, "?") yell := strings.ToUpper(r) == r && strings.ContainsAny(r, allUppercaseLetters) switch { case empty: return emptyReply case question && yell: return questionYellReply case question: return questonReply case yell: return yellReply default: return othersReply } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func greet (name string) string {\n var reply string\n\n switch name {\n case \"Olivia\":\n reply = \"Hey! How was your sleep. Did you have any nice dreams?\"\n case \"Kirsten\":\n reply = \"Hey K! How was your day?\"\n default:\n reply = \"Have a good one!\"\n }\n return reply\n}", "func ReplyBye() *Reply { return &Reply{221, []string{\"Bye\"}, nil} }", "func Hey(greeting string) (answer string) {\n\n\tgreeting = strings.TrimSpace(greeting)\n\n\tswitch {\n\n\t\tcase greeting == \"\":\n\t\t\tanswer = \"Fine. Be that way!\"\n\n\t\tcase yelled(greeting, unicode.IsUpper) && !yelled(greeting, unicode.IsLower):\n\t\t\tanswer = \"Whoa, chill out!\"\n\n\t\tcase greeting[len(greeting)-1] == '?':\n\t\t\tanswer = \"Sure.\"\n\n\t\tdefault:\n\t\t\tanswer = \"Whatever.\"\n\t}\n\n\treturn\n\n}", "func Hey(remark string) string {\n\tremark = strings.TrimSpace(remark)\n\n\tswitch {\n\tcase silence(remark):\n\t\treturn responseToSilence\n\tcase yellingQuestion(remark):\n\t\treturn responseToYellingQuestion\n\tcase question(remark):\n\t\treturn responseToQuestion\n\tcase shouting(remark):\n\t\treturn responseToShouting\n\tdefault:\n\t\treturn defaultResponse\n\t}\n}", "func handler(ctx iris.Context) {\n\toutputs, err := ctx.CallFunc(\"greet\", \"Gophers\")\n\tif err != nil {\n\t\tctx.StopWithError(iris.StatusInternalServerError, err)\n\t\treturn\n\t}\n\n\tresponse := outputs[0].Interface().(string)\n\tctx.WriteString(response)\n}", "func Hey(input string) string {\n\tquestionRe := regexp.MustCompile(\"[?]+[\\t\\n\\f\\r ]*$\")\n\tdigitsRe := regexp.MustCompile(\"[[:digit:]]+\")\n\tsilenceRe := regexp.MustCompile(\"^[\\n\\r\\t ]+$\")\n\tnormalRe := regexp.MustCompile(\"[[:alpha:]]+\")\n\n\tif input == strings.ToUpper(input) &&\n\t\tlen(normalRe.FindAllStringSubmatch(input, -1)) > 0 {\n\t\treturn responseYelling\n\t} else if len(questionRe.FindAllStringSubmatch(input, -1)) > 0 {\n\t\treturn responseQuestion\n\t} else if len(silenceRe.FindAllStringSubmatch(input, -1)) > 0 &&\n\t\tlen(digitsRe.FindAllStringSubmatch(input, -1)) == 0 || input == \"\" {\n\t\treturn responseSilence\n\t}\n\n\treturn responseDefault\n}", "func You(name, from string) (string, error) {\n\treturn makeRequest(\"you\", name, from)\n}", "func (g Greeter) SayHello(ctx context.Context, req *pb.HelloRequest) (*pb.HelloReply, error) {\n\treturn &pb.HelloReply {\n\t\t\tMessage: req.Name + \" is my special guy.\",\n\t\t}, nil\n}", "func Hey(remark string) string {\n\tvar response string\n\tremark = strings.TrimSpace(remark)\n\tswitch {\n\tcase remark == \"\":\n\t\tresponse = \"Fine. Be that way!\"\n\tcase remark == strings.ToUpper(remark) && remark != strings.ToLower(remark) && strings.HasSuffix(remark, \"?\"):\n\t\tresponse = \"Calm down, I know what I'm doing!\"\n\tcase remark == strings.ToUpper(remark) && remark != strings.ToLower(remark):\n\t\tresponse = \"Whoa, chill out!\"\n\tcase strings.HasSuffix(remark, \"?\"):\n\t\tresponse = \"Sure.\"\n\tdefault:\n\t\tresponse = \"Whatever.\"\n\t}\n\treturn response\n}", "func (id *Hi) SayHi(c frame.Context) error {\n\treturn c.String(\"success\")\n}", "func (g Greeter) Greet(name *string, reply *string) error {\n\t*reply = \"Hello \" + *name\n\treturn nil\n}", "func Hey(remark string) string {\n\tsanitized := strings.TrimSpace(remark)\n\n\tif len(sanitized) <= 0 {\n\t\treturn ResponseEmpty\n\t}\n\n\tif isQuestion(sanitized) {\n\t\tif isAllCaps(sanitized) {\n\t\t\treturn ResponseYellingQuestion\n\t\t} else {\n\t\t\treturn ResponseGeneralQuestion\n\t\t}\n\t} else if isAllCaps(sanitized) {\n\t\treturn ResponseGeneralYelling\n\t}\n\n\treturn ResponseDefault\n}", "func GetResponse(c context.Context, facebookUser string, message string) string {\r\n\tresponse := \"Hello \" + message\r\n\treturn response\r\n}", "func (h *Human) SayHi() {\n\tfmt.Printf(\"Hi, I am %s you can call me on %s\\n\", h.name, h.phone)\n}", "func (id *Hello) SayHello(c frame.Context) error {\n\tvar res hello.HelloResponse\n\tres.Message = \"welcome~~~\"\n\tlog.Info(\"SayHello receiver....\", c.Bizid(), c.Header().Get(\"A\"))\n\treturn c.JSON(&res)\n}", "func Hi(name string) string {\n\treturn fmt.Sprintf(\"%s, %s\", greetings.HI, name)\n}", "func Hey(remark string) string {\n\tcleanRemark := strings.TrimSpace(remark)\n\tdisposition := \"default\"\n\thasLowercase, _ := regexp.Compile(\"[a-z]\")\n\thasUppercase, _ := regexp.Compile(\"[A-Z]\")\n\n\tif len(cleanRemark) == 0 {\n\t\tdisposition = \"empty\"\n\t} else if cleanRemark[len(cleanRemark)-1:] == \"?\" {\n\t\tdisposition = \"query\"\n\t}\n\n\tif hasUppercase.MatchString(remark) && !hasLowercase.MatchString(remark) {\n\t\tdisposition = \"yelling\"\n\t}\n\n\treturn responses[disposition]\n}", "func (p *Person) SayHi() {\n\tfmt.Printf(\"Hi, I'm %s. You can call me on %s.\\n\", p.name, p.phone)\n}", "func handleHello(args[]interface{}){\n\tm := args[0].(*msg.Hello)\n\ta := args[1].(gate.Agent)\n\tlog.Debug(\"Received <%v>\", m.Name) //~ Print all. (Not just m.Name)\n\tswitch randNo.Intn(2){\n\tcase 0:\n\t\tlog.Debug(\"msg.Hello sent\")\n\t\ta.WriteMsg(&msg.Hello{\n\t\t\tName:\"God\",\n\t\t})\n\tdefault:\n\t\tlog.Debug(\"msg.Gate sent\")\n\t\ta.WriteMsg(&msg.Gate{\n\t\t\tHost:\"www.blizzard.com\",\n\t\t})\n\t}\n}", "func say_message(n string) string {\n\t\treturn \"hello my name is \" + n\n}", "func (this *SmtpWriter) SayHello() error {\n\terr := this.SendResponse(smtpconstants.SMTP_WELCOME_MESSAGE)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlog.Println(\"libmailslurper: INFO - Reading data from client connection...\")\n\treturn nil\n}", "func (b *bot) say(m dggchat.Message, s *dggchat.Session) {\n\tif !isMod(m.Sender) || !strings.HasPrefix(m.Message, \"!say\") {\n\t\treturn\n\t}\n\n\t// message itself can contain spaces\n\tparts := strings.SplitN(m.Message, \" \", 2)\n\tif len(parts) != 2 {\n\t\treturn\n\t}\n\tb.sendMessageDedupe(parts[1], s)\n}", "func (s* server) SayHello(ctx context.Context, req *pb.HelloRequest) (*pb.HelloReply, error){\n\n\tfmt.Printf(\"%s req, %s once\\n\",req , req.Name)\n\tresp := pb.HelloReply{\n\t\tMessage: \"adfadf\",\n\t}\n\treturn &resp, nil\n}", "func (*server) Greet(ctx context.Context, req *greetpb.GreetRequest) (*greetpb.GreetResponse, error) {\n\t//to implement return hello and first name\n\t//the GreetRequest struct \"req\" has a Greeting struct. Greeting struct holds first name and last name\n\tfmt.Printf(\"Greet function was invoked with %v \\n\", req)\n\tfirstName := req.GetGreeting().GetFirstName()\n\t//form a GreetResponse\n\tresult := \"Hello \" + firstName\n\t//include & to dereference the pointer to the GreetResponse\n\tres := &greetpb.GreetResponse{\n\t\tResult: result,\n\t}\n\treturn res, nil\n\n}", "func (client *Client) Say(targetName string, text string) {\n\toverhead := client.PrivmsgOverhead(targetName, false)\n\tcuts := ircutil.CutMessage(text, overhead)\n\n\tfor _, cut := range cuts {\n\t\tclient.SendQueuedf(\"PRIVMSG %s :%s\", targetName, cut)\n\t}\n}", "func (h Response) Tell(message string) {\n\th.emit(\":tell\", strings.Replace(strings.Replace(message, \"\\n\", \" \", -1), \"\\t\", \"\", -1))\n}", "func (s *Greeter) SayHello(\n\tctx context.Context,\n\treq *proto.HelloRequest,\n) (*proto.HelloResponse, error) {\n\treturn &proto.HelloResponse{\n\t\tMessage: \"Hello \" + req.Name + \", this is greetings from simple micro server\",\n\t}, nil\n}", "func (p person) say() {\n\tfmt.Println(\"Hello, my name is:\", p.name)\n}", "func (r *Hello) SayHello(rw http.ResponseWriter) error {\n\tg := Hello{\n\t\tCmd: r.Cmd,\n\t\tText: r.Text,\n\t}\n\n\treturn web.JSONResponse(g, rw)\n}", "func (sa *secretAgent) speak() {\n\tfmt.Println(\"I'm a secret agent - this is my name: \", sa.name)\n}", "func Hey(s string) (out string) {\n\ts = strings.TrimSpace(s)\n\tif s == \"\" {\n\t\treturn \"Fine. Be that way!\"\n\t}\n\tif strings.ToUpper(s) == s && strings.ToLower(s) != s {\n\t\treturn \"Whoa, chill out!\"\n\t}\n\tif strings.HasSuffix(s, \"?\") {\n\t\treturn \"Sure.\"\n\t}\n\n\treturn \"Whatever.\"\n}", "func Hello(w http.ResponseWriter, r *http.Request) {\n\tresp := ResponseMessage{Message: \"hello world\"}\n\tres_json, _ := json.Marshal(resp)\n\tio.WriteString(w, string(res_json))\n}", "func (s *doorServer) GetHello(c context.Context, knock *a.Knock) (*a.Reply, error) {\n\n\tif knock == nil {\n\t\treturn nil, fmt.Errorf(\"nothing received, wont respond\")\n\t}\n\n\tr := a.Reply{Reply: false, ReplyMessage: s.knockFailureMsg}\n\n\tif knock.KnockDoor {\n\t\tr.Reply = true\n\t\tr.ReplyMessage = \"Hello!\"\n\t}\n\n\treturn &r, nil\n}", "func (p *person) speak() {\n\tfmt.Println(\"I'm a person - this is my name: \", p.name)\n}", "func (h *BotSnack) RespondText(c *gomatrix.Client, ev *gomatrix.Event, user, post string) error {\n\tu := NameRE.ReplaceAllString(user, \"$1\")\n\tif ToMe(u, post) {\n\t\treturn SendText(c, ev.RoomID, h.Process(ev.Sender, post))\n\t}\n\treturn nil\n}", "func GetHelloMessage()(string){\n\treturn quote.Hello();\n}", "func AboutToSayIt(ctx context.Context, m messageService.MessageServiceClient, text string) (*messageService.Response, error) {\n\trequest := &messageService.Request{\n\t\tText: text,\n\t\tSubtext: \"New Message\",\n\t}\n\tr, err := m.SayIt(ctx, request)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn r, nil\n}", "func (*server) Greet(context context.Context, r *greetpb.GreetRequest) (*greetpb.GreetResponse, error) {\n\tfmt.Printf(\"Greet func was invoked with request: %v/n\", r)\n\tfirstName := r.GetGreeting().GetFirstName()\n\t//creating the response\n\tresponse := greetpb.GreetResponse{\n\t\tResponse: \"Hello\" + firstName,\n\t}\n\n\treturn &response, nil\n}", "func (g *Greeter) Hello(ctx context.Context, req *greeter.HelloRequest, resp *greeter.HelloResponse) error {\n\thost, _ := os.Hostname()\n\tresp.Greeting = fmt.Sprintf(\"Hello %s from %s\", req.Name, host)\n\tfmt.Println(\"Responing with \" + resp.Greeting)\n\n\treturn nil\n}", "func (p Person) SayMyName() {\n\trand.Seed(time.Now().UnixNano())\n\tr := rand.Intn(len(p.Nicknames))\n\tfmt.Printf(\"SayMyName - %s\\n\", p.Nicknames[r])\n}", "func (s *Server) SayHello(ctx context.Context, r *proto.HelloRequest) (*proto.HelloResponse, error) {\n\tlog.Printf(\"received: %v\", r.Name)\n\n\treturn &proto.HelloResponse{\n\t\tMessage: \"pong\",\n\t}, nil\n}", "func (p person) speak() {\n\tfmt.Println(\"uptown funky-wunk\")\n}", "func isSexy(person string, c chan string) {\n\t// random processing time (0~9 sec)\n\ttime.Sleep(time.Second * time.Duration(rand.Intn(10)))\n\t// send message through channel\n\tc <- \"sexy \" + person\n}", "func (s *Server) SayHello(ctx context.Context, in *PingMessage) (*PingMessage, error) {\n\tlog.Printf(\"Receive message %s from %s\", in.Message, in.Sender)\n\treturn &PingMessage{Sender: s.Name, Message: fmt.Sprintf(\"Hey %s, nice to here from you !\", in.Sender)}, nil\n}", "func SayHello() string {\n\treturn \"Hello wolrd $$!@%#$@!#\"\n}", "func (c *Conn) Say(room, message string) error {\n\tif !c.inRoom(room) {\n\t\treturn errors.New(\"You are not in that room\")\n\t}\n\tr := c.server.rooms.get(room)\n\tif r == nil {\n\t\t// should never happen\n\t\treturn errors.New(\"You were in a room that did not exist\")\n\t}\n\tr.Announce(message, c.username)\n\treturn nil\n}", "func Chatbot(w http.ResponseWriter, r *http.Request) {\n\tp, err := retrieveMessageProperties(r)\n\tif err != nil {\n\t\tlog.Print(err)\n\t\treturn\n\t}\n\tresp, err := generateResponseMessage(p)\n\tif err != nil {\n\t\tlog.Print(err)\n\t\treturn\n\t}\n\tif err := respondToChat(resp, p); err != nil {\n\t\tlog.Print(err)\n\t\treturn\n\t}\n}", "func (g *Greeter) Greet(ctx context.Context, r *greeter.GreetRequest) (*greeter.GreetResponse, error) {\n\tmsg := fmt.Sprintf(\"%s %s\", r.GetGreeting(), r.GetName())\n\tif g.Exclaim {\n\t\tmsg += \"!\"\n\t} else {\n\t\tmsg += \".\"\n\t}\n\treturn &greeter.GreetResponse{Response: msg}, nil\n}", "func (r *RestFulHello) Sayhi(b *rf.Context) {\n\tresult := struct {\n\t\tName string\n\t}{}\n\terr := b.ReadEntity(&result)\n\tif err != nil {\n\t\tb.Write([]byte(err.Error() + \":Hello Guest, this is an error\"))\n\t\treturn\n\t}\n\tb.Write([]byte(result.Name + \":Hello Guest\"))\n\treturn\n}", "func Hey(remark string) string {\n\tremark = strings.TrimSpace(remark)\n\tif isShouting(remark) && isQuestion(remark) {\n\t\treturn \"Calm down, I know what I'm doing!\"\n\t}\n\n\tif isQuestion(remark) {\n\t\treturn \"Sure.\"\n\t}\n\n\tif isShouting(remark) {\n\t\treturn \"Whoa, chill out!\"\n\t}\n\n\tif isSilent(remark) {\n\t\treturn \"Fine. Be that way!\"\n\t}\n\n\treturn \"Whatever.\"\n}", "func Answer(question string) string {\n\n\tswitch question {\n\tcase \"ping\":\n\t\treturn \"pong\"\n\tdefault:\n\t\treturn random()\n\t}\n\n}", "func greet(w http.ResponseWriter, req *http.Request) {\n\tfmt.Fprintf(w, \"Welcome\")\n}", "func sayPolo(chatID int64, text string) error {\n\t// Create the request body struct\n\treqBody := &sendMessageReqBody{\n\t\tChatID: chatID,\n\t\tText: Reverse(text),\n\t}\n\t// Create the JSON body from the struct\n\treqBytes, err := json.Marshal(reqBody)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Send a post request with your token\n\tres, err := http.Post(\"https://api.telegram.org/bot\"+tgBotToken+\"/sendMessage\", \"application/json\", bytes.NewBuffer(reqBytes))\n\tif err != nil {\n\t\treturn err\n\t}\n\tif res.StatusCode != http.StatusOK {\n\t\treturn errors.New(\"unexpected status\" + res.Status)\n\t}\n\n\tfmt.Println(\"Reply: \", reqBody.Text)\n\treturn nil\n}", "func (s *Server) SayHello(ctx context.Context, in *domain.HelloRequest) (*domain.HelloResponse, error) {\n\tlogger.SugaredLogger.Infof(\"Name to greet: %s\", in.Name)\n\treturn &domain.HelloResponse{\n\t\tGreeting: buildGreet(in.Name),\n\t}, nil\n}", "func (man *Person) sayHi() string{\n\t//update new name. since this is de-reference so the operation affects memory address\n\tman.name = \"Jr. \" + man.name;\n\treturn \"My name is \" + man.name\n}", "func (res *greeter) greet() {\n\tfmt.Println(res.greeting, res.name)\n\t// We can also pass the reference for changing the actual value itself\n\t// rather than it's copy.\n\tres.name = \"Anagha\"\n}", "func (e *ExtendedConnection) answer(msg string) {\n\te.Emit(\"answer\", &Answer{msg})\n}", "func (h Response) Ask(prompt, reprompt string) {\n\th.emit(\":ask\", prompt, reprompt)\n}", "func (m *Messenger) Response(to int64) *Response {\n\treturn &Response{\n\t\tto: Recipient{to},\n\t\ttoken: m.token,\n\t}\n}", "func Hi(name string) string {\n\treturn fmt.Sprintf(\"Hi, %s\", name)\n}", "func greetHuman(h human) {\n\th.speak()\n\tfmt.Printf(\"Hi %v (%T), nice to meet you\\n\", h, h)\n\t// We are going to use assertion to greet different types of person:\n\tswitch h.(type) {\n\tcase character:\n\t\tfmt.Printf(\"it's so awesome to get to meet youm %v, I've heard great things about you.\\n\", h.(character).first)\n\tcase knightRadiant:\n\t\tfmt.Printf(\"Wooow, %v, it's so awesome to get to meet someone from the Knight Radiants!! Even more, one of the %v.\\n\", h.(knightRadiant).first, h.(knightRadiant).order)\n\t}\n}", "func Hey(remark string) string {\n\trr := []rune(remark)\n\tl := len(remark)\n\tvar lower, upper, space int\n\n\tfor _, c := range rr {\n\t\tswitch {\n\t\tcase unicode.IsLower(c):\n\t\t\tlower++\n\t\tcase unicode.IsUpper(c):\n\t\t\tupper++\n\t\tcase unicode.IsSpace(c):\n\t\t\tspace++\n\t\t}\n\t}\n\n\t// outputs\n\t// silence or prolonged silence\n\tif l == 0 || space == l {\n\t\treturn sayNothing\n\t}\n\t// check for question\n\tif strings.HasSuffix(strings.TrimRight(remark, \" \"), \"?\") {\n\t\t// check for yelling question\n\t\tif upper > 0 && lower == 0 {\n\t\t\treturn yellingQuestion\n\t\t} // else is a question\n\t\treturn question\n\t}\n\t// check for yelling\n\tif upper > 0 && lower == 0 {\n\t\treturn yelling\n\t}\n\n\treturn anything\n}", "func Cowsay(w http.ResponseWriter, r *http.Request) {\n\t\t\tlog := log15.New()\n\t\t\tdefer r.Body.Close()\n\t\t\terr := r.ParseForm()\n\n\t\t\tif err != nil {\n\t\t\t\tlog.Error(\n\t\t\t\t\t\"Cannot parse form\",\n\t\t\t\t\t\"Error\", err,\n\t\t\t\t\t\"Form\", fmt.Sprintf(\"%+v\", r.Form),\n\t\t\t\t)\n\t\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\ttext := r.PostFormValue(\"text\")\n\n\t\t\tsay, err := cowsay.Say(\n\t\t\t\tcowsay.Phrase(text),\n\t\t\t\tcowsay.Type(\"default\"),\n\t\t\t)\n\n\t\t\tif err != nil {\n\t\t\t\tlog.Error(\n\t\t\t\t\t\"Cowsay error\",\n\t\t\t\t\t\"Error\", err,\n\t\t\t\t)\n\t\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tcodeMark := []byte(\"```\")\n\t\t\tout := append(codeMark, say...)\n\t\t\tout = append(out, codeMark...)\n\t\t\tback := CowsayResponse{\n\t\t\t\tResponse_type: \"in_channel\",\n\t\t\t\tText: string(out),\n\t\t\t}\n\n\t\t\tresp, err := json.Marshal(back)\n\t\t\tif err != nil {\n\t\t\t\tlog.Error(\n\t\t\t\t\t\"Cannot marshal response\",\n\t\t\t\t\t\"Error\", err,\n\t\t\t\t\t\"Response\", string(resp),\n\t\t\t\t)\n\t\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tw.Header().Set(\"content-type\", \"application/json\")\n\t\t\tw.WriteHeader(http.StatusOK)\n\t\t\tw.Write(resp)\n\n}", "func Hey(remark string) string {\n\tremark = strings.TrimSpace(remark)\n\n\tswitch {\n\tcase isUppercase(remark) && isQuestion(remark):\n\t\treturn \"Calm down, I know what I'm doing!\"\n\tcase isUppercase(remark):\n\t\treturn \"Whoa, chill out!\"\n\tcase isQuestion(remark):\n\t\treturn \"Sure.\"\n\tcase isSilent(remark):\n\t\treturn \"Fine. Be that way!\"\n\tdefault:\n\t\treturn \"Whatever.\"\n\t}\n}", "func getThanks(message *client.Message) (string, error) {\n\n\tif message.ForwardInfo == nil {\n\t\treturn \"\", errors.New(\"not a forward\")\n\t}\n\n\tif message.ForwardInfo.Origin.MessageForwardOriginType() == client.TypeMessageForwardOriginChannel {\n\t\treturn \"\", errors.New(l.GetString(l.COMMANDS_THANK_CANT_THANK_CHANNELS))\n\t}\n\n\tif message.ForwardInfo.Origin.MessageForwardOriginType() == client.TypeMessageForwardOriginHiddenUser {\n\t\treturn fmt.Sprintf(l.GetString(l.COMMANDS_THANK_THANK_CAPTION), message.ForwardInfo.Origin.(*client.MessageForwardOriginHiddenUser).SenderName), nil\n\t}\n\n\tif message.ForwardInfo.Origin.MessageForwardOriginType() != client.TypeMessageForwardOriginUser {\n\t\treturn \"\", errors.New(l.GetString(l.COMMANDS_THANK_UNSUPPORTED_FORWARD_TYPE))\n\t}\n\n\tfwd := message.ForwardInfo.Origin.(*client.MessageForwardOriginUser)\n\tuser, err := api.GetUserByID(fwd.SenderUserId)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tif user.Type.UserTypeType() == client.TypeUserTypeBot {\n\t\treturn \"\", errors.New(l.GetString(l.COMMANDS_THANK_CANT_THANK_BOTS))\n\t}\n\n\treturn fmt.Sprintf(l.GetString(l.COMMANDS_THANK_THANK_CAPTION), user.FirstName), nil\n\n}", "func (h Hello) Hello(ctx context.Context, request *example.HelloRequest) (*example.HelloResponse, error) {\n\treturn &example.HelloResponse{\n\t\tResponse: fmt.Sprintf(\"Greetings! You said: %s\", request.Text),\n\t}, nil\n}", "func (bb *BasicBot) Say(msg string) error {\n\tif \"\" == msg {\n\t\treturn errors.New(\"BasicBot.Say: msg was empty.\")\n\t}\n\n\t// check if message is too large for IRC\n\tif len(msg) > 512 {\n\t\treturn errors.New(\"BasicBot.Say: msg exceeded 512 bytes\")\n\t}\n\t\n\t_, err := bb.conn.Write([]byte(fmt.Sprintf(\"PRIVMSG #%s :%s\\r\\n\", bb.Channel, msg)))\n\tif nil != err {\n\t\treturn err\n\t}\n\treturn nil\n}", "func Hey(remark string) string {\n\tif isSilence(remark) {\n\t\treturn \"Fine. Be that way!\"\n\t}\n\n\tif hasLetters(remark) {\n\t\tif isYelling(remark) {\n\t\t\tif isQuestion(remark) {\n\t\t\t\treturn \"Calm down, I know what I'm doing!\"\n\t\t\t}\n\n\t\t\treturn \"Whoa, chill out!\"\n\t\t}\n\t}\n\n\tif isQuestion(remark) {\n\t\treturn \"Sure.\"\n\t}\n\n\treturn \"Whatever.\"\n}", "func (s *server) Greet(ctx context.Context, req *greetpb.GreetRequest) (*greetpb.GreetResponse, error) {\n\tfmt.Printf(\"Greet function was invoked with : %v\\n\", req)\n\tfirstName := req.GetGreeting().GetFirstName()\n\tlastName := req.GetGreeting().GetLastName()\n\tresult := \"Hello \" + firstName + \" \" + lastName + \"\\n\"\n\tres := &greetpb.GreetResponse{Result: result}\n\treturn res, nil\n}", "func Say(client *ircutil.Client, command *ircutil.Command,\n\tmessage *ircutil.Message) {\n\tircutil.SendPrivmsg(client, message.Args[0], strings.Join(message.Args[1:],\n\t\t\" \"))\n}", "func response(ctx *fiber.Ctx, status int, msg fiber.Map) error {\n\treturn ctx.Status(status).JSON(msg)\n}", "func (eb englishBot) getGreeting() string {\n\treturn \"Hi there\"\n}", "func (eb englishBot) getGreeting() string {\n\treturn \"Hello There!\"\n}", "func say(key string) string {\n\tphrases := make(map[string]string)\n\tphrases[\"greeting_v1\"] = \"Hi! 👋 I'm a %s and together we can create some nice stickers!\"\n\tphrases[\"greeting_v2\"] = \"Hi! 👋 I'm a %s, here to make custom stickers\"\n\tphrases[\"greeting_v3\"] = \"Hi! 👋 I'm a %s, chatbot for making custom stickers\"\n\tphrases[\"to_start_send_image\"] = \"To start making a sticker like the one shown below please send me a photo (jpeg or png format, not smaller than 150x150px and not bigger than 3500x2400px, 5Mb max)\"\n\tphrases[\"botName\"] = \"StickerBot\"\n\tphrases[\"perfect_one\"] = \"This one is perfect. I will save it in Templates in case you'll want to reuse it.\"\n\tphrases[\"replace_photo_or_provide_text\"] = \"If you'ld like to replace the image please click \\\"Replace image\\\". If it's Ok, please send me the text for your sticker (%s symbols max).\"\n\tphrases[\"replace_image\"] = \"Replace image\"\n\tphrases[\"resend_image\"] = \"Ok, please send me another one. Remember that it should be in jpeg or png format, not smaller than 150x150px and not bigger than 3500x2400px, 5Mb max.\"\n\tphrases[\"bad_image\"] = \"Sorry but this won't work. Please send me an image in jpeg or png format, not smaller than 150x150px and not bigger than 3500x2400px, 5Mb max.\"\n\tphrases[\"bad_phrase\"] = \"Sorry but your phrase should be longer than %s symbols. Please try again.\"\n\tphrases[\"didnt_get_that\"] = \"Sorry but I didn't get that.\"\n\n\treturn phrases[key]\n}", "func (e *Employee) SayHi() {\n\tfmt.Printf(\"Hi, I am %s, I work in %s. Call me on %s\\n\", e.name,\n\t\te.company, e.phone)\n}", "func (a AstonMartin) sayHiToBond() {\n fmt.Println(\"Hi Bond, James Bond!\")\n}", "func Hey(remark string) string {\n\tr := &Remark{value: strings.Trim(remark, \" \")}\n\n\treturn r.respond()\n}", "func Hi(name string) string {\n\treturn fmt.Sprintf(\"Hi, %s!\", name)\n}", "func Hey(remark string) string {\n\tremark = strings.TrimSpace(remark)\n\tresult := \"\"\n\tswitch {\n\tcase isQuestion(remark) && isShouting(remark):\n\t\tresult = \"Calm down, I know what I'm doing!\"\n\tcase isQuestion(remark) && !isShouting(remark):\n\t\tresult = \"Sure.\"\n\tcase !isQuestion(remark) && isShouting(remark):\n\t\tresult = \"Whoa, chill out!\"\n\tcase isEmpty(remark):\n\t\tresult = \"Fine. Be that way!\"\n\tdefault:\n\t\tresult = \"Whatever.\"\n\t}\n\treturn result\n}", "func (*server) Greet(ctx context.Context, req *greetpb.GreetRequest) (*greetpb.GreetResponse, error) {\n\tlog.Println(\"Greet function invoked\")\n\t// Get the greeting message\n\t// From that we can get the first name\n\t// We want to do this through the api from our pregenerated code\n\tfirstName := req.GetGreeting().GetFirstName()\n\tresult := \"Hello \" + firstName\n\t// Build our response\n\tresponse := &greetpb.GreetResponse{\n\t\tResult: result,\n\t}\n\t// Return the response\n\treturn response, nil\n}", "func respondToRaw(sock mangos.Socket) {\n\n\tmsg, err := sock.RecvMsg()\n\n\tif err != nil {\n\t\treturn\n\t}\n\n\tlog.Println(string(msg.Body))\n\n}", "func MainHandler(resp http.ResponseWriter, _ *http.Request) {\r\n resp.Write([]byte(\"Hi there! I'm PoGoBot!\"))\r\n}", "func Hey(input string) (output string) {\n\tinput = strings.TrimSpace(input)\n\tif input == strings.ToUpper(input) && strings.ContainsAny(input, \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\") {\n\t\treturn \"Whoa, chill out!\"\n\t}\n\tif strings.HasSuffix(input, \"?\") {\n\t\treturn \"Sure.\"\n\t}\n\tif input == \"\" {\n\t\treturn \"Fine. Be that way!\"\n\t}\n\treturn \"Whatever.\"\n}", "func SayHello(name string) string {\n\treturn \"Hello \" + name\n}", "func (MockGreeterServer) SayHello(context.Context, *HelloRequest) (*HelloReply, error) {\n\tvar res HelloReply\n\tif err := faker.FakeData(&res); err != nil {\n\t\treturn nil, err\n\t}\n\treturn &res, nil\n}", "func (*server) Greet(ctx context.Context, req *greetpb.GreetRequest) (*greetpb.GreetResponse, error) {\n\tfmt.Printf(\"Greet function was invoked with %v\", req)\n\n\t// get value from request\n\tfirstName := req.GetGreeting().GetFirstName()\n\n\t// operation\n\tresult := \"hello \" + firstName\n\n\t// create response\n\tres := &greetpb.GreetResponse{\n\t\tResult: result,\n\t}\n\treturn res, nil\n}", "func (s *server) Greeter(ctx context.Context, request *greeter.Greeting) (*greeter.Response, error) {\n\t// Get this pods name\n\tpodName := os.Getenv(\"POD_NAME\")\n\n\t// Send back this pod's name\n\treturn &greeter.Response{Response: podName}, nil\n}", "func (conn *Conn) Who(nick string) { conn.Raw(WHO + \" \" + nick) }", "func (h *facts) simpleResponse(message slack.Msg, text string) {\n\tif text == \"\" {\n\t\treturn\n\t}\n\tr := new(plugin.SlackResponse)\n\tr.Channel = message.Channel\n\tr.Options = append(r.Options, slack.MsgOptionText(text, false))\n\th.sink <- r\n}", "func (cmd *Command) Respond(response *Response) {\n\tcmd.response <- response\n}", "func Hey(remark string) string {\n\tr := strings.Trim(remark, \" \\t\\n\\r\")\n\tif r == \"\" {\n\t\treturn \"Fine. Be that way!\"\n\t}\n\tif IsShoutingQuestion(r) {\n\t\treturn \"Calm down, I know what I'm doing!\"\n\t}\n\tif IsShouting(r) {\n\t\treturn \"Whoa, chill out!\"\n\t}\n\tif IsQuestion(r) {\n\t\treturn \"Sure.\"\n\t}\n\treturn \"Whatever.\"\n}", "func Hey(remark string) string {\n\tremark = strings.TrimSpace(remark)\n\tisQuestion := strings.HasSuffix(remark, \"?\")\n\tisUpper := strings.ToUpper(remark) == remark\n\tisNoLetters := isUpper && strings.ToLower(remark) == remark\n\n\tswitch {\n\tcase isUpper && isQuestion && !isNoLetters:\n\t\treturn \"Calm down, I know what I'm doing!\"\n\tcase isUpper && !isNoLetters:\n\t\treturn \"Whoa, chill out!\"\n\tcase isQuestion:\n\t\treturn \"Sure.\"\n\tcase remark == \"\":\n\t\treturn \"Fine. Be that way!\"\n\tdefault:\n\t\treturn \"Whatever.\"\n\t}\n}", "func printGreeting(b bot) {\n\tfmt.Println(b.getGreeting())\n}", "func GreetUser(w http.ResponseWriter, r *http.Request, p httprouter.Params) {\n\n\tu := Resp{}\n\tu2 := Req{}\n\tjson.NewDecoder(r.Body).Decode(&u2)\n\n\tvar buffer bytes.Buffer\n\tbuffer.WriteString(\"Hello, \")\n\tbuffer.WriteString(u2.Name)\n\tbuffer.WriteString(\"!\")\n\tu.Greeting = buffer.String()\n\n\t// Marshal provided interface into JSON structure\n\tuj, _ := json.Marshal(u)\n\n\t// Write content-type, statuscode, payload\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(201)\n\tfmt.Fprintf(w, \"%s\", uj)\n}", "func (g *Greeter) Hello(ctx context.Context, req *proto.HelloRequest, rsp *proto.HelloResponse) error {\n\trsp.Greeting = \"Hello \" + req.Name\n\treturn nil\n}", "func (r *RestFulHello) Sayhello(b *rf.Context) {\n\tid := b.ReadPathParameter(\"userid\")\n\tlog.Printf(\"get user id: \" + id)\n\tb.Write([]byte(fmt.Sprintf(\"user %s from %d\", id, num)))\n}", "func callGreet() {\n\tc := make(chan string)\n\tgo Greet(c) //start new goroutine,ready state\n\tc <- \"zhang sir\"\n}", "func Handle(req []byte) string {\n\tcurrentTweet := tweet{}\n\n\tif err := json.Unmarshal(req, &currentTweet); err != nil {\n\t\treturn fmt.Sprintf(\"Unable to unmarshal event: %s\", err.Error())\n\t}\n\n\tif strings.Contains(currentTweet.Text, \"RT\") ||\n\t\tcurrentTweet.Text == \"alexellisuk_bot\" ||\n\t\tcurrentTweet.Username == \"colorisebot\" ||\n\t\tcurrentTweet.Username == \"scmsFaAS\" ||\n\t\tcurrentTweet.Username == \"openfaas\" {\n\t\treturn \"filtered the tweet out\"\n\t}\n\n\tdiscordURL := readSecret(\"twitter-discord-webhook-url\")\n\tdiscordMsg := discordMessage{\n\t\tContent: \"@\" + currentTweet.Username + \": \" + currentTweet.Text + \" (via \" + currentTweet.Link + \")\",\n\t\tUsername: \"@\" + currentTweet.Username,\n\t}\n\n\tbodyBytes, _ := json.Marshal(discordMsg)\n\thttpReq, err := http.NewRequest(http.MethodPost, discordURL, bytes.NewReader(bodyBytes))\n\tif err != nil {\n\t\tif err != nil {\n\t\t\tfmt.Fprintf(os.Stderr, \"resErr: %s\", err)\n\t\t\tos.Exit(1)\n\t\t}\n\t}\n\n\thttpReq.Header.Set(\"Content-Type\", \"application/json\")\n\n\tres, err := http.DefaultClient.Do(httpReq)\n\tif err != nil {\n\t\tfmt.Fprintf(os.Stderr, \"resErr: %s\", err)\n\t\tos.Exit(1)\n\t}\n\n\tif res.Body != nil {\n\t\tdefer res.Body.Close()\n\t}\n\n\tbodyRes, _ := ioutil.ReadAll(res.Body)\n\n\tif res.StatusCode != http.StatusAccepted &&\n\t\tres.StatusCode != http.StatusOK &&\n\t\tres.StatusCode != http.StatusNoContent {\n\t\tfmt.Fprintf(os.Stderr, \"unexpected status code: %d, body: %s\", res.StatusCode, string(bodyRes))\n\t\tos.Exit(1)\n\t}\n\n\treturn fmt.Sprintf(\"tweet forwarded [%d]\", res.StatusCode)\n}", "func (s *session) respondOK(format string, args ...interface{}) error {\n\treturn s.writer.PrintfLine(fmt.Sprintf(\"+OK %s\", format), args...)\n}", "func SayHi(name string) {\n fmt.Printf(\"Hi, %s\\n\", name)\n}" ]
[ "0.6887991", "0.6805804", "0.6371363", "0.63041806", "0.6247341", "0.60800767", "0.6031572", "0.6002164", "0.5963795", "0.59617203", "0.59487236", "0.5942278", "0.59154445", "0.5886197", "0.58427376", "0.5841882", "0.5812266", "0.5799674", "0.57988113", "0.5794499", "0.57636154", "0.5757245", "0.5754814", "0.57537985", "0.5737521", "0.571686", "0.57150143", "0.5710275", "0.56996405", "0.5694824", "0.5690012", "0.5672307", "0.5671814", "0.5665923", "0.56476146", "0.56211525", "0.56082845", "0.56053466", "0.5605226", "0.5604117", "0.5589505", "0.5587063", "0.5570717", "0.5562534", "0.55469793", "0.55465454", "0.55432224", "0.5536117", "0.5526833", "0.5526756", "0.55242485", "0.5520883", "0.55181706", "0.55092126", "0.55069685", "0.55061555", "0.5503038", "0.5499513", "0.5499357", "0.5498243", "0.54957455", "0.54929495", "0.54911244", "0.548515", "0.5483098", "0.5478637", "0.547395", "0.5472631", "0.5457804", "0.5447924", "0.5446592", "0.54451954", "0.5444776", "0.54424363", "0.5442407", "0.54415005", "0.54384357", "0.5437717", "0.54324996", "0.54310083", "0.5430652", "0.5430282", "0.5426404", "0.54233265", "0.5420302", "0.5419836", "0.5416053", "0.54160506", "0.5404262", "0.5403648", "0.5402142", "0.53990895", "0.5397159", "0.5394298", "0.5388584", "0.53856397", "0.5377592", "0.53749686", "0.53667355", "0.53664225" ]
0.59117913
13
NewPodmanDriver returns a new DriverPlugin implementation
func NewPodmanDriver(logger hclog.Logger) drivers.DriverPlugin { ctx, cancel := context.WithCancel(context.Background()) return &Driver{ eventer: eventer.NewEventer(ctx, logger), config: &PluginConfig{}, tasks: newTaskStore(), ctx: ctx, signalShutdown: cancel, logger: logger.Named(pluginName), } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewPodmanDriver(logger hclog.Logger) drivers.DriverPlugin {\n\tctx, cancel := context.WithCancel(context.Background())\n\tlogger = logger.Named(pluginName)\n\treturn &Driver{\n\t\teventer: eventer.NewEventer(ctx, logger),\n\t\tconfig: &Config{},\n\t\ttasks: newTaskStore(),\n\t\tctx: ctx,\n\t\tsignalShutdown: cancel,\n\t\tlogger: logger,\n\t}\n}", "func newPluginProvider(pluginBinDir string, provider kubeletconfig.CredentialProvider) (*pluginProvider, error) {\n\tmediaType := \"application/json\"\n\tinfo, ok := runtime.SerializerInfoForMediaType(codecs.SupportedMediaTypes(), mediaType)\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"unsupported media type %q\", mediaType)\n\t}\n\n\tgv, ok := apiVersions[provider.APIVersion]\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"invalid apiVersion: %q\", provider.APIVersion)\n\t}\n\n\tclock := clock.RealClock{}\n\n\treturn &pluginProvider{\n\t\tclock: clock,\n\t\tmatchImages: provider.MatchImages,\n\t\tcache: cache.NewExpirationStore(cacheKeyFunc, &cacheExpirationPolicy{clock: clock}),\n\t\tdefaultCacheDuration: provider.DefaultCacheDuration.Duration,\n\t\tlastCachePurge: clock.Now(),\n\t\tplugin: &execPlugin{\n\t\t\tname: provider.Name,\n\t\t\tapiVersion: provider.APIVersion,\n\t\t\tencoder: codecs.EncoderForVersion(info.Serializer, gv),\n\t\t\tpluginBinDir: pluginBinDir,\n\t\t\targs: provider.Args,\n\t\t\tenvVars: provider.Env,\n\t\t\tenviron: os.Environ,\n\t\t},\n\t}, nil\n}", "func NewPlugin(opts ...Option) *Plugin {\n\tp := &Plugin{}\n\n\tp.SetName(\"generator\")\n\tp.KVStore = &etcd.DefaultPlugin\n\tp.KVScheduler = &kvscheduler.DefaultPlugin\n\n\tfor _, o := range opts {\n\t\to(p)\n\t}\n\n\tp.Setup()\n\n\treturn p\n}", "func NewPlugin(namespace string, dfn plugin.Definition, cfg *plugin.WorkerConfig) *Plugin {\n\treturn &Plugin{\n\t\tName: dfn.Name,\n\t\tUUID: gouuid.NewV4(),\n\t\tResultType: dfn.ResultType,\n\t\tPodSpec: &dfn.PodSpec,\n\t\tNamespace: namespace,\n\t\tConfig: cfg,\n\t}\n}", "func newPlugin() (p *slackscot.Plugin) {\n\tp = new(slackscot.Plugin)\n\tp.Name = \"tester\"\n\tp.Commands = []slackscot.ActionDefinition{{\n\t\tMatch: func(m *slackscot.IncomingMessage) bool {\n\t\t\treturn strings.HasPrefix(m.NormalizedText, \"make\")\n\t\t},\n\t\tUsage: \"make `<something>`\",\n\t\tDescription: \"Have the test bot make something for you\",\n\t\tAnswer: func(m *slackscot.IncomingMessage) *slackscot.Answer {\n\t\t\treturn &slackscot.Answer{Text: \"Ready\"}\n\t\t},\n\t}}\n\n\treturn p\n}", "func NewPlugin(proto, path string, params ...string) *Plugin {\n\tif proto != \"unix\" && proto != \"tcp\" {\n\t\tpanic(\"Invalid protocol. Specify 'unix' or 'tcp'.\")\n\t}\n\tp := &Plugin{\n\t\texe: path,\n\t\tproto: proto,\n\t\tparams: params,\n\t\tinitTimeout: 2 * time.Second,\n\t\texitTimeout: 2 * time.Second,\n\t\thandler: NewDefaultErrorHandler(),\n\t\tmeta: meta(\"pingo\" + randstr(5)),\n\t\tobjsCh: make(chan *objects),\n\t\tconnCh: make(chan *conn),\n\t\tkillCh: make(chan *waiter),\n\t\texitCh: make(chan struct{}),\n\t}\n\treturn p\n}", "func New(_ runtime.Object, h framework.Handle) (framework.Plugin, error) {\n\treturn &PodState{handle: h}, nil\n}", "func newDriver() *driver {\n\treturn &driver{\n\t\tnetworks: map[string]*bridgeNetwork{},\n\t\tportAllocator: portallocator.Get(),\n\t}\n}", "func newPluginContainer() PluginContainer {\n\treturn new(pluginContainer)\n}", "func newPluginContainer() PluginContainer {\n\treturn new(pluginContainer)\n}", "func NewPluginCommand(cmd *cobra.Command, dockerCli *client.DockerCli) {\n}", "func (p *PodmanTestIntegration) Podman(args []string) *PodmanSessionIntegration {\n\tpodmanSession := p.PodmanBase(args)\n\treturn &PodmanSessionIntegration{podmanSession}\n}", "func NewPlugin(plugins func() discovery.Plugins, choices selector.Options) instance.Plugin {\n\tbase := &internal.Base{\n\t\tPlugins: plugins,\n\t\tChoices: choices,\n\t\tSelectFunc: SelectOne,\n\t}\n\treturn &impl{\n\t\tPlugin: base.Init(),\n\t}\n}", "func New(cfg *Config, logger logger.Logger, registerer prometheus.Registerer) (*Plugin, error) {\n\tservice := &Plugin{\n\t\tcfg: cfg,\n\t\tregisterer: registerer,\n\t\tLogger: logger.NewLogger(\"simplePlugin\"),\n\t}\n\treturn service, nil\n}", "func NewDriver() godfish.Driver { return &driver{} }", "func (p *PodmanTestIntegration) Podman(args []string) *PodmanSessionIntegration {\n\tpodmanSession := p.PodmanBase(args, false, false)\n\treturn &PodmanSessionIntegration{podmanSession}\n}", "func NewPlugin() container.Plugin {\n\treturn &plugin{}\n}", "func Init(config Config) pdfium.Pool {\n\t// Create an hclog.Logger\n\tlogger := hclog.New(&hclog.LoggerOptions{\n\t\tName: \"plugin\",\n\t\tOutput: os.Stdout,\n\t\tLevel: hclog.Debug,\n\t})\n\n\tvar handshakeConfig = plugin.HandshakeConfig{\n\t\tProtocolVersion: 1,\n\t\tMagicCookieKey: \"BASIC_PLUGIN\",\n\t\tMagicCookieValue: \"hello\",\n\t}\n\n\t// pluginMap is the map of plugins we can dispense.\n\tvar pluginMap = map[string]plugin.Plugin{\n\t\t\"pdfium\": &commons.PdfiumPlugin{},\n\t}\n\n\t// If we don't have a log callback, make the callback no-op.\n\tif config.LogCallback == nil {\n\t\tconfig.LogCallback = func(s string) {}\n\t}\n\n\tfactory := pool.NewPooledObjectFactory(\n\t\tfunc(goctx.Context) (interface{}, error) {\n\t\t\tnewWorker := &worker{}\n\n\t\t\tclient := plugin.NewClient(&plugin.ClientConfig{\n\t\t\t\tHandshakeConfig: handshakeConfig,\n\t\t\t\tPlugins: pluginMap,\n\t\t\t\tCmd: exec.Command(config.Command.BinPath, config.Command.Args...),\n\t\t\t\tLogger: logger,\n\t\t\t\tStartTimeout: config.Command.StartTimeout,\n\t\t\t})\n\n\t\t\trpcClient, err := client.Client()\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\traw, err := rpcClient.Dispense(\"pdfium\")\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tpdfium := raw.(commons.Pdfium)\n\n\t\t\tpong, err := pdfium.Ping()\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tif pong != \"Pong\" {\n\t\t\t\treturn nil, errors.New(\"Wrong ping/pong result\")\n\t\t\t}\n\n\t\t\tnewWorker.pluginClient = client\n\t\t\tnewWorker.rpcClient = rpcClient\n\t\t\tnewWorker.plugin = pdfium\n\n\t\t\treturn newWorker, nil\n\t\t}, nil, func(ctx goctx.Context, object *pool.PooledObject) bool {\n\t\t\tworker := object.Object.(*worker)\n\t\t\tif worker.pluginClient.Exited() {\n\t\t\t\tconfig.LogCallback(\"Worker exited\")\n\t\t\t\treturn false\n\t\t\t}\n\n\t\t\terr := worker.rpcClient.Ping()\n\t\t\tif err != nil {\n\t\t\t\tconfig.LogCallback(fmt.Sprintf(\"Error on RPC ping: %s\", err.Error()))\n\t\t\t\treturn false\n\t\t\t}\n\n\t\t\tpong, err := worker.plugin.Ping()\n\t\t\tif err != nil {\n\t\t\t\tconfig.LogCallback(fmt.Sprintf(\"Error on plugin ping:: %s\", err.Error()))\n\t\t\t\treturn false\n\t\t\t}\n\n\t\t\tif pong != \"Pong\" {\n\t\t\t\terr = errors.New(\"Wrong ping/pong result\")\n\t\t\t\tconfig.LogCallback(fmt.Sprintf(\"Error on plugin ping:: %s\", err.Error()))\n\t\t\t\treturn false\n\t\t\t}\n\n\t\t\treturn true\n\t\t}, nil, nil)\n\tp := pool.NewObjectPoolWithDefaultConfig(goctx.Background(), factory)\n\tp.Config = &pool.ObjectPoolConfig{\n\t\tBlockWhenExhausted: true,\n\t\tMinIdle: config.MinIdle,\n\t\tMaxIdle: config.MaxIdle,\n\t\tMaxTotal: config.MaxTotal,\n\t\tTestOnBorrow: true,\n\t\tTestOnReturn: true,\n\t\tTestOnCreate: true,\n\t}\n\n\tp.PreparePool(goctx.Background())\n\n\tmultiThreadedMutex.Lock()\n\tdefer multiThreadedMutex.Unlock()\n\n\tpoolRef := uuid.New()\n\n\t// Create a new PDFium pool.\n\tnewPool := &pdfiumPool{\n\t\tpoolRef: poolRef.String(),\n\t\tinstanceRefs: map[string]*pdfiumInstance{},\n\t\tlock: &sync.Mutex{},\n\t\tworkerPool: p,\n\t}\n\n\tpoolRefs[newPool.poolRef] = newPool\n\n\treturn newPool\n}", "func (*manager) PluginType() string { return base.PluginTypeDriver }", "func newPod(busChan MsgChan, opts *podOpts) *Pod {\n\tp := &Pod{\n\t\tonFuncLock: sync.RWMutex{},\n\t\tmessageChan: make(chan Message, defaultPodChanSize),\n\t\tfeedbackChan: make(chan Message, defaultPodChanSize),\n\t\tbusChan: busChan,\n\t\tmessageFilter: newMessageFilter(),\n\t\topts: opts,\n\t\tdead: &atomic.Value{},\n\t}\n\n\t// do some \"delayed setup\"\n\tp.opts.replayOnce = sync.Once{}\n\tp.dead.Store(false)\n\n\tp.start()\n\n\treturn p\n}", "func newPodManager(kClient kubernetes.Interface, policy osdnPolicy, overlayMTU uint32, routableMTU uint32, ovs *ovsController) *podManager {\n\tpm := newDefaultPodManager()\n\tpm.kClient = kClient\n\tpm.policy = policy\n\tpm.overlayMTU = overlayMTU\n\tpm.routableMTU = routableMTU\n\tpm.podHandler = pm\n\tpm.ovs = ovs\n\treturn pm\n}", "func NewPlugin() (shared.Plugin, error) {\n\treturn instance, nil\n}", "func Plugin(replayLayout *device.MemoryLayout) compiler.Plugin {\n\treturn &replayer{replayLayout: replayLayout}\n}", "func (kl *Kubelet) newVolumeMounterFromPlugins(spec *volume.Spec, pod *v1.Pod, opts volume.VolumeOptions) (volume.Mounter, error) {\n\tplugin, err := kl.volumePluginMgr.FindPluginBySpec(spec)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"can't use volume plugins for %s: %v\", spec.Name(), err)\n\t}\n\tphysicalMounter, err := plugin.NewMounter(spec, pod, opts)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to instantiate mounter for volume: %s using plugin: %s with a root cause: %v\", spec.Name(), plugin.GetPluginName(), err)\n\t}\n\tklog.V(10).InfoS(\"Using volume plugin for mount\", \"volumePluginName\", plugin.GetPluginName(), \"volumeName\", spec.Name())\n\treturn physicalMounter, nil\n}", "func NewPlugin() (*Plugin, error) {\n\tstore := NewStore()\n\tdockerClient, err := NewDockerClient(store)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create a docker client: %v\", err)\n\t}\n\treporter := NewReporter(store)\n\tplugin := &Plugin{\n\t\treporter: reporter,\n\t\tclients: []containerClient{\n\t\t\tdockerClient,\n\t\t},\n\t}\n\tfor _, client := range plugin.clients {\n\t\tgo client.Start()\n\t}\n\treturn plugin, nil\n}", "func NewDevicePluginManager(lister DeviceLister) *DevicePluginManager {\n\tstopCh := make(chan struct{})\n\n\t// First important signal channel is the os signal channel. We only care about (somewhat) small subset of available signals.\n\tsigs := make(chan os.Signal, 1)\n\tsignal.Notify(sigs, syscall.SIGTERM, syscall.SIGQUIT, syscall.SIGINT)\n\n\t// The other important channel is filesystem notification channel, responsible for watching device plugin directory.\n\tfsWatcher, _ := fsnotify.NewWatcher()\n\tfsWatcher.Add(pluginapi.DevicePluginPath)\n\n\tdpm := &DevicePluginManager{\n\t\tstopCh: stopCh,\n\t\tsignalCh: sigs,\n\t\tfsWatcher: fsWatcher,\n\t}\n\n\tgo dpm.handleSignals()\n\n\t// We can now move to functionality: first, the initial pool of devices is needed.\n\tdevices := lister.Discover()\n\n\t// As we use the pool to initialize device plugins (the actual gRPC servers) themselves.\n\tfor deviceClass, deviceIDs := range *devices {\n\t\tdpm.plugins = append(dpm.plugins, lister.NewDevicePlugin(deviceClass, deviceIDs))\n\t}\n\n\treturn dpm\n}", "func NewDriver(cfg *config.Config) *Driver {\n\tdriver := &Driver{\n\t\tcfg: cfg,\n\t}\n\n\treturn driver\n}", "func NewPlugin(next http.HandlerFunc) http.HandlerFunc {\n\tp, err := plugin.Open(\"plugin.so\")\n\tif err != nil {\n\t\tif strings.HasPrefix(err.Error(), \"plugin.Open\") {\n\t\t\tfmt.Printf(\"error: could not open plugin file 'plugin.so': %v\\n\", err)\n\t\t}\n\t\treturn next\n\t}\n\tf, err := p.Lookup(\"Handler\")\n\tif err != nil {\n\t\tfmt.Printf(\"error: could not find plugin Handler function %v\\n\", err)\n\t\treturn next\n\t}\n\tpluginFn, ok := f.(func(http.HandlerFunc) http.HandlerFunc)\n\tif !ok {\n\t\tfmt.Println(\"error: plugin Handler function should be 'func(http.HandlerFunc) http.HandlerFunc'\")\n\t\treturn next\n\t}\n\treturn pluginFn(next)\n}", "func NewPlugin(opts ...Option) *Plugin {\n\tp := &Plugin{}\n\n\tp.PluginName = \"service-label\"\n\n\tfor _, o := range opts {\n\t\to(p)\n\t}\n\n\treturn p\n}", "func New() (*Plugin, error) {\n\treturn &Plugin{\n\t\tHandler: admission.NewHandler(admission.Create, admission.Update),\n\t}, nil\n}", "func NewDriver(client *redis.Client, prefix string) *Driver {\n\treturn &Driver{\n\t\tclient: client,\n\t\tprefix: prefix,\n\t}\n}", "func New() *Plugin {\n\treturn &Plugin{}\n}", "func newMongoDriver(name string, opts MongoDBOptions) (*mongoDriver, error) {\n\thost, _ := os.Hostname() // nolint\n\n\tif err := opts.Validate(); err != nil {\n\t\treturn nil, errors.Wrap(err, \"invalid mongo driver options\")\n\t}\n\n\treturn &mongoDriver{\n\t\tname: name,\n\t\topts: opts,\n\t\tinstanceID: fmt.Sprintf(\"%s.%s.%s\", name, host, uuid.New()),\n\t}, nil\n}", "func NewVolumeDriver(mountDir string, driverName string) *VolumeDriver {\n\tvar d *VolumeDriver\n\n\tmountRoot = mountDir\n\n\td = &VolumeDriver{\n\t\trefCounts: refcount.NewRefCountsMap(),\n\t}\n\n\td.mountIDtoName = make(map[string]string)\n\td.refCounts.Init(d, mountDir, driverName)\n\n\tlog.WithFields(log.Fields{\n\t\t\"version\": version,\n\t}).Info(\"vSphere shared plugin started \")\n\n\treturn d\n}", "func NewDriver() *Driver {\n\treturn &Driver{\n\t\tVMDriver: &drivers.VMDriver{\n\t\t\tBaseDriver: &drivers.BaseDriver{},\n\t\t\tCPU: DefaultCPUs,\n\t\t\tMemory: DefaultMemory,\n\t\t},\n\t}\n}", "func NewPlugin(context context.T) (*Plugin, error) {\n\tvar plugin Plugin\n\n\tplugin.context = context\n\tplugin.birdwatcherfacade = facade.NewBirdwatcherFacade(context)\n\tplugin.localRepository = localpackages.NewRepository()\n\tplugin.packageServiceSelector = selectService\n\tplugin.isDocumentArchive = false\n\n\treturn &plugin, nil\n}", "func newPod(name string) *corev1.Pod {\n\treturn &corev1.Pod{\n\t\tTypeMeta: metav1.TypeMeta{},\n\t\tObjectMeta: metav1.ObjectMeta{Name: name},\n\t\tSpec: corev1.PodSpec{},\n\t\tStatus: corev1.PodStatus{},\n\t}\n}", "func NewPluginBuilder(ctx *runctx.RunContext) (shared.PluginBuilder, error) {\n\t// We're a host. Start by launching the plugin process.\n\tlogrus.SetOutput(os.Stdout)\n\n\tbuilders := map[string]shared.PluginBuilder{}\n\n\tfor _, a := range ctx.Cfg.Build.Artifacts {\n\t\tp := a.BuilderPlugin.Name\n\t\tif _, ok := builders[p]; ok {\n\t\t\tcontinue\n\t\t}\n\t\tcmd := exec.Command(p)\n\t\tif _, ok := SkaffoldCorePluginExecutionMap[p]; ok {\n\t\t\texecutable, err := os.Executable()\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.Wrap(err, \"getting executable path\")\n\t\t\t}\n\t\t\tcmd = exec.Command(executable)\n\t\t\tcmd.Env = append(os.Environ(), []string{fmt.Sprintf(\"%s=%s\", constants.SkaffoldPluginKey, constants.SkaffoldPluginValue),\n\t\t\t\tfmt.Sprintf(\"%s=%s\", constants.SkaffoldPluginName, p)}...)\n\t\t}\n\n\t\tclient := plugin.NewClient(&plugin.ClientConfig{\n\t\t\tStderr: os.Stderr,\n\t\t\tSyncStderr: os.Stderr,\n\t\t\tSyncStdout: os.Stdout,\n\t\t\tManaged: true,\n\t\t\tHandshakeConfig: shared.Handshake,\n\t\t\tPlugins: shared.PluginMap,\n\t\t\tCmd: cmd,\n\t\t})\n\n\t\tlogrus.Debugf(\"Starting plugin with command: %+v\", cmd)\n\n\t\t// Connect via RPC\n\t\trpcClient, err := client.Client()\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"connecting via rpc\")\n\t\t}\n\t\tlogrus.Debugf(\"plugin started.\")\n\t\t// Request the plugin\n\t\traw, err := rpcClient.Dispense(p)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"requesting rpc plugin\")\n\t\t}\n\t\tpluginBuilder := raw.(shared.PluginBuilder)\n\t\tbuilders[p] = pluginBuilder\n\t}\n\n\tb := &Builder{\n\t\tBuilders: builders,\n\t}\n\n\tlogrus.Debugf(\"Calling Init() for all plugins.\")\n\tif err := b.Init(ctx); err != nil {\n\t\tplugin.CleanupClients()\n\t\treturn nil, err\n\t}\n\treturn b, nil\n}", "func NewPlugin(name string, fn GenerateConfigsFunc) *Plugin {\n\treturn &Plugin{name: name, generate: fn}\n}", "func InitDockPlugin(netplugin *plugin.NetPlugin) error {\n\tnetPlugin = netplugin\n\thostname, err := os.Hostname()\n\tif err != nil {\n\t\tlog.Fatalf(\"Could not retrieve hostname: %v\", err)\n\t}\n\n\tvar (\n\t\ttenantName = \"default\"\n\t)\n\n\tlog.Debugf(\"Configuring router\")\n\n\trouter := mux.NewRouter()\n\ts := router.Headers(\"Accept\", \"application/vnd.docker.plugins.v1.1+json\").\n\t\tMethods(\"POST\").Subrouter()\n\n\tdispatchMap := map[string]func(http.ResponseWriter, *http.Request){\n\t\t\"/Plugin.Activate\": activate(hostname),\n\t\t\"/Plugin.Deactivate\": deactivate(hostname),\n\t\t\"/NetworkDriver.GetCapabilities\": getCapability(),\n\t\t\"/NetworkDriver.CreateNetwork\": createNetwork(),\n\t\t\"/NetworkDriver.DeleteNetwork\": deleteNetwork(),\n\t\t\"/NetworkDriver.CreateEndpoint\": createEndpoint(tenantName, hostname),\n\t\t\"/NetworkDriver.DeleteEndpoint\": deleteEndpoint(tenantName, hostname),\n\t\t\"/NetworkDriver.EndpointOperInfo\": endpointInfo,\n\t\t\"/NetworkDriver.Join\": join(),\n\t\t\"/NetworkDriver.Leave\": leave(),\n\t}\n\n\tfor dispatchPath, dispatchFunc := range dispatchMap {\n\t\ts.HandleFunc(dispatchPath, logHandler(dispatchPath, dispatchFunc))\n\t}\n\n\ts.HandleFunc(\"/NetworkDriver.{*}\", unknownAction)\n\n\tdriverPath := path.Join(pluginPath, driverName) + \".sock\"\n\tos.Remove(driverPath)\n\tos.MkdirAll(pluginPath, 0700)\n\n\tgo func() {\n\t\tl, err := net.ListenUnix(\"unix\", &net.UnixAddr{Name: driverPath, Net: \"unix\"})\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tlog.Infof(\"docker plugin listening on %s\", driverPath)\n\t\thttp.Serve(l, router)\n\t\tl.Close()\n\t\tlog.Infof(\"docker plugin closing %s\", driverPath)\n\t}()\n\n\treturn nil\n}", "func newPod(ctx context.Context, cl client.Client, ns, name, image string, cmd []string) (*corev1.Pod, error) {\n\tc := corev1.Container{\n\t\tName: name,\n\t\tImage: image,\n\t\tCommand: cmd,\n\t}\n\tp := &corev1.Pod{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: name,\n\t\t\tNamespace: ns,\n\t\t},\n\t\tSpec: corev1.PodSpec{\n\t\t\tContainers: []corev1.Container{c},\n\t\t\t// Kill the pod immediately so it exits quickly on deletion.\n\t\t\tTerminationGracePeriodSeconds: pointer.Int64Ptr(0),\n\t\t},\n\t}\n\tif err := cl.Create(ctx, p); err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create pod %s/%s: %v\", p.Namespace, p.Name, err)\n\t}\n\treturn p, nil\n}", "func NewDriver(root string) *Driver {\n\treturn &Driver{\n\t\troot: root,\n\t}\n}", "func New(tstore plugins.TstoreClient, settings []backend.PluginSetting, dataDir string) (*usermdPlugin, error) {\n\t// Create plugin data directory\n\tdataDir = filepath.Join(dataDir, usermd.PluginID)\n\terr := os.MkdirAll(dataDir, 0700)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &usermdPlugin{\n\t\ttstore: tstore,\n\t\tdataDir: dataDir,\n\t}, nil\n}", "func New(cfg provider.InitConfig) (*P, error) {\n\tif err := os.MkdirAll(unitDir, 0750); err != nil {\n\t\treturn nil, err\n\t}\n\tm, err := manager.New(unitDir, false)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tp := &P{m: m}\n\tswitch system.ID() {\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unsupported system\")\n\tcase \"debian\", \"ubuntu\":\n\t\tp.pkg = new(packages.DebianPackageManager)\n\n\t\t// Just installed pre-requisites instead of pointing to the docs.\n\t\tklog.Infof(\"Installing %s, to prevent installed daemons from starting\", \"policyrcd-script-zg2\")\n\t\tok, err := p.pkg.Install(\"policyrcd-script-zg2\", \"\")\n\t\tif err != nil {\n\t\t\tklog.Warningf(\"Failed to install %s, %s. Continuing anyway\", \"policyrcd-script-zg2\", err)\n\t\t}\n\t\tif ok {\n\t\t\tklog.Infof(\"%s is already installed\", \"policyrcd-script-zg2\")\n\t\t}\n\n\tcase \"arch\":\n\t\tp.pkg = new(packages.ArchlinuxPackageManager)\n\tcase \"noop\":\n\t\tp.pkg = new(packages.NoopPackageManager)\n\t}\n\n\tp.rm = cfg.ResourceManager\n\tp.DaemonPort = cfg.DaemonPort\n\tp.ClusterDomain = cfg.KubeClusterDomain\n\n\tif cfg.ConfigPath == \"\" {\n\t\treturn p, nil\n\t}\n\n\tclientset, err := nodeutil.ClientsetFromEnv(cfg.ConfigPath)\n\tif err != nil {\n\t\treturn p, err\n\t}\n\t// Get new clientset.\n\tw := newWatcher(clientset)\n\tgo func() {\n\t\tif err := w.run(p); err != nil {\n\t\t\tklog.Fatal(err)\n\t\t}\n\t}()\n\tp.w = w\n\treturn p, nil\n}", "func New(config *runtime.Unknown, handle framework.FrameworkHandle) (framework.Plugin, error) {\n\tpodLister := handle.SharedInformerFactory().Core().V1().Pods().Lister()\n\tew := NewEtcdWrapper()\n\tconf := ctrl.GetConfigOrDie()\n\tclient, err := runtimeClient.New(conf, runtimeClient.Options{\n\t\tScheme: scheme,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tam := &Antman{\n\t\tframeworkHandle: handle,\n\t\tpodLister: podLister,\n\t\tclock: util.RealClock{},\n\t\tetcdWrapper: ew,\n\t\truntimeCli: client,\n\t}\n\tpodInformer := handle.SharedInformerFactory().Core().V1().Pods().Informer()\n\tpodInformer.AddEventHandler(\n\t\tcache.FilteringResourceEventHandler{\n\t\t\tFilterFunc: func(obj interface{}) bool {\n\t\t\t\tswitch t := obj.(type) {\n\t\t\t\tcase *v1.Pod:\n\t\t\t\t\treturn true\n\t\t\t\tcase cache.DeletedFinalStateUnknown:\n\t\t\t\t\tif _, ok := t.Obj.(*v1.Pod); ok {\n\t\t\t\t\t\treturn true\n\t\t\t\t\t}\n\t\t\t\t\treturn false\n\t\t\t\tdefault:\n\t\t\t\t\treturn false\n\t\t\t\t}\n\t\t\t},\n\t\t\tHandler: cache.ResourceEventHandlerFuncs{\n\t\t\t\tUpdateFunc: am.podUpdate,\n\t\t\t},\n\t\t},\n\t)\n\treturn am, nil\n}", "func New() (d *Driver) {\n\treturn &Driver{}\n}", "func (o PluginDnsNsReg) NewPlugin(ctx *core.PluginCtx, initJson []byte) (*core.PluginBase, error) {\n\treturn NewDnsNs(ctx, initJson)\n}", "func newPodForCR(cr *sdewanv1alpha1.Sdewan) *corev1.Pod {\n\tlabels := map[string]string{\n\t\t\"app\": cr.Name,\n\t}\n\tpriv := true\n\tvar netmaps []map[string]interface{}\n\tfor _, net := range cr.Spec.Networks {\n\t\tnetmaps = append(netmaps, map[string]interface{}{\n\t\t\t\"name\": net.Name,\n\t\t\t\"interface\": net.Interface,\n\t\t\t\"defaultGateway\": fmt.Sprintf(\"%t\", net.DefaultGateway),\n\t\t})\n\t}\n\tnetjson, _ := json.MarshalIndent(netmaps, \"\", \" \")\n\tvolumes := []corev1.Volume{\n\t\t{\n\t\t\tName: cr.Name,\n\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\tConfigMap: &corev1.ConfigMapVolumeSource{\n\t\t\t\t\tLocalObjectReference: corev1.LocalObjectReference{Name: cr.Name},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\treturn &corev1.Pod{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: cr.Name,\n\t\t\tNamespace: cr.Namespace,\n\t\t\tLabels: labels,\n\t\t\tAnnotations: map[string]string{\n\t\t\t\t\"k8s.v1.cni.cncf.io/networks\": `[{ \"name\": \"ovn-networkobj\"}]`,\n\t\t\t\t\"k8s.plugin.opnfv.org/nfn-network\": `{ \"type\": \"ovn4nfv\", \"interface\": ` + string(netjson) + \"}\",\n\t\t\t},\n\t\t},\n\t\tSpec: corev1.PodSpec{\n\t\t\tContainers: []corev1.Container{\n\t\t\t\t{\n\t\t\t\t\tName: \"sdewan\",\n\t\t\t\t\tImage: OpenwrtTag,\n\t\t\t\t\tCommand: []string{\"/bin/sh\", \"/tmp/sdewan/entrypoint.sh\"},\n\t\t\t\t\tImagePullPolicy: corev1.PullIfNotPresent,\n\t\t\t\t\tSecurityContext: &corev1.SecurityContext{\n\t\t\t\t\t\tPrivileged: &priv,\n\t\t\t\t\t},\n\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: cr.Name,\n\t\t\t\t\t\t\tReadOnly: true,\n\t\t\t\t\t\t\tMountPath: \"/tmp/sdewan\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tNodeSelector: map[string]string{\"kubernetes.io/hostname\": cr.Spec.Node},\n\t\t\tVolumes: volumes,\n\t\t},\n\t}\n}", "func newDriverV2(options *DriverOptions) *DriverV2 {\n\tklog.Warning(\"Using DriverV2\")\n\tdriver := DriverV2{}\n\tdriver.Name = options.DriverName\n\tdriver.Version = driverVersion\n\tdriver.NodeID = options.NodeID\n\tdriver.VolumeAttachLimit = options.VolumeAttachLimit\n\tdriver.volumeLocks = volumehelper.NewVolumeLocks()\n\tdriver.perfOptimizationEnabled = options.EnablePerfOptimization\n\tdriver.cloudConfigSecretName = options.CloudConfigSecretName\n\tdriver.cloudConfigSecretNamespace = options.CloudConfigSecretNamespace\n\tdriver.customUserAgent = options.CustomUserAgent\n\tdriver.userAgentSuffix = options.UserAgentSuffix\n\tdriver.useCSIProxyGAInterface = options.UseCSIProxyGAInterface\n\tdriver.enableOtelTracing = options.EnableOtelTracing\n\tdriver.ioHandler = azureutils.NewOSIOHandler()\n\tdriver.hostUtil = hostutil.NewHostUtil()\n\n\ttopologyKey = fmt.Sprintf(\"topology.%s/zone\", driver.Name)\n\treturn &driver\n}", "func createPlugin(pkg parse.Package, errChan chan error) {\n\tcmd := exec.Command(\"go\", \"build\", \"-buildmode=plugin\",\n\t\t\"-o\", \"./\"+path.Join(PluginFolder, pkg.Name+\".so\"),\n\t\t\"./\"+path.Join(PluginFolder, pkg.Path))\n\n\t// create output buffer for error logging\n\tvar outBuf bytes.Buffer\n\tcmd.Stdout = &outBuf\n\tcmd.Stderr = &outBuf\n\n\t// copy env and turn off go modules\n\tcmd.Env = os.Environ()\n\tcmd.Env = append(cmd.Env, \"GO111MODULE=off\")\n\n\terr := cmd.Run()\n\tif err != nil {\n\t\terr = fmt.Errorf(\"Failed to build plugin for package:%s\\n%s\", pkg.Name, outBuf.String())\n\t}\n\n\terrChan <- err\n}", "func NewDriver(root string, client *pilosa.Client) *Driver {\n\treturn &Driver{\n\t\troot: root,\n\t\tclient: client,\n\t}\n}", "func NewPluginRunner(plugin interface{}, opts ...RunnerOption) *cobra.Command {\n\tk := &PluginRunner{\n\t\tplugin: plugin,\n\t\tconfig: func(*cobra.Command, []string) ([]byte, error) { return nil, nil },\n\t\tgenerate: func() (resmap.ResMap, error) { return resmap.New(), nil },\n\t\ttransform: func(resmap.ResMap) error { return nil },\n\t\tprint: asYaml,\n\t}\n\n\t// Setup the command run stages\n\tk.cmd = &cobra.Command{\n\t\tPreRunE: k.preRun,\n\t\tRunE: k.run,\n\t\tPostRunE: k.postRun,\n\t}\n\n\t// Establish generate and transform functions\n\tif p, ok := plugin.(resmap.Generator); ok {\n\t\tk.generate = p.Generate\n\t}\n\tif p, ok := plugin.(resmap.Transformer); ok {\n\t\tk.generate = k.newResMapFromStdin\n\t\tk.transform = p.Transform\n\t}\n\n\t// Apply the runner options\n\tfor _, opt := range opts {\n\t\topt(k)\n\t}\n\n\treturn k.cmd\n}", "func (o PluginDnsCReg) NewPlugin(ctx *core.PluginCtx, initJson []byte) (*core.PluginBase, error) {\n\treturn NewDnsClient(ctx, initJson)\n}", "func NewPlugin(name string, config *common.PluginConfig) (*ipamPlugin, error) {\n\t// Setup base plugin.\n\tplugin, err := cni.NewPlugin(name, config.Version)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Setup address manager.\n\tam, err := ipam.NewAddressManager()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Create IPAM plugin.\n\tipamPlg := &ipamPlugin{\n\t\tPlugin: plugin,\n\t\tam: am,\n\t}\n\n\tconfig.IpamApi = ipamPlg\n\n\treturn ipamPlg, nil\n}", "func NewPlugin(name string, path string, args []string, config skyconfig.Configuration) Plugin {\n\tfactory := transportFactories[name]\n\tif factory == nil {\n\t\tpanic(fmt.Errorf(\"unable to find plugin transport '%v'\", name))\n\t}\n\tp := Plugin{\n\t\ttransport: factory.Open(path, args, config),\n\t\tgatewayMap: map[string]*router.Gateway{},\n\t}\n\treturn p\n}", "func plugin_init() unsafe.Pointer {\n\t// If your plugin needs to do any initialization, you can\n\t// either put it here or implement a Load() method.\n\t// ...\n\n\t// Then you must call the following function.\n\treturn nbdkit.PluginInitialize(pluginName, &DiskPlugin{})\n}", "func newPodForPolicy(name, ns string, node *corev1.Node) *corev1.Pod {\n\tlabels := map[string]string{\n\t\t\"appName\": name,\n\t\t\"appNamespace\": ns,\n\t}\n\ttrueVal := true\n\thostVolTypeDir := corev1.HostPathDirectory\n\treturn &corev1.Pod{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: GetInstallerPodName(name, ns, node),\n\t\t\tNamespace: config.GetOperatorNamespace(),\n\t\t\tLabels: labels,\n\t\t},\n\t\tSpec: corev1.PodSpec{\n\t\t\tContainers: []corev1.Container{\n\t\t\t\t{\n\t\t\t\t\tName: \"policy-installer\",\n\t\t\t\t\t// TODO(jaosorior): Add udica image or equivalent to a\n\t\t\t\t\t// more formal registry\n\t\t\t\t\tImage: \"quay.io/jaosorior/udica\",\n\t\t\t\t\tCommand: []string{\"/bin/sh\"},\n\t\t\t\t\tArgs: []string{\"-c\", \"semodule -vi /tmp/policy/*.cil /usr/share/udica/templates/*cil;\"},\n\t\t\t\t\tSecurityContext: &corev1.SecurityContext{\n\t\t\t\t\t\tPrivileged: &trueVal,\n\t\t\t\t\t},\n\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"fsselinux\",\n\t\t\t\t\t\t\tMountPath: \"/sys/fs/selinux\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"etcselinux\",\n\t\t\t\t\t\t\tMountPath: \"/etc/selinux\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"varlibselinux\",\n\t\t\t\t\t\t\tMountPath: \"/var/lib/selinux\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"policyvolume\",\n\t\t\t\t\t\t\tMountPath: \"/tmp/policy\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t// This container needs to keep running so we can run the uninstall script.\n\t\t\t\t{\n\t\t\t\t\tName: \"policy-uninstaller\",\n\t\t\t\t\t// NOTE(jaosorior): Add udica image or equivalent to a\n\t\t\t\t\t// more formal registry\n\t\t\t\t\tImage: \"quay.io/jaosorior/udica\",\n\t\t\t\t\tCommand: []string{\"/bin/sh\"},\n\t\t\t\t\tArgs: []string{\"-c\", \"trap 'exit 0' SIGINT SIGTERM; while true; do sleep infinity; done;\"},\n\t\t\t\t\tLifecycle: &corev1.Lifecycle{\n\t\t\t\t\t\tPreStop: &corev1.Handler{\n\t\t\t\t\t\t\tExec: &corev1.ExecAction{\n\t\t\t\t\t\t\t\tCommand: []string{\"/bin/sh\", \"-c\", fmt.Sprintf(\"semodule -vr '%s'\", GetPolicyName(name, ns))},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tSecurityContext: &corev1.SecurityContext{\n\t\t\t\t\t\tPrivileged: &trueVal,\n\t\t\t\t\t},\n\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"fsselinux\",\n\t\t\t\t\t\t\tMountPath: \"/sys/fs/selinux\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"etcselinux\",\n\t\t\t\t\t\t\tMountPath: \"/etc/selinux\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"varlibselinux\",\n\t\t\t\t\t\t\tMountPath: \"/var/lib/selinux\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"policyvolume\",\n\t\t\t\t\t\t\tMountPath: \"/tmp/policy\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t// NOTE(jaosorior): use another service account\n\t\t\tServiceAccountName: \"security-profiles-operator\",\n\t\t\tRestartPolicy: corev1.RestartPolicyNever,\n\t\t\tNodeName: node.Name,\n\t\t\tVolumes: []corev1.Volume{\n\t\t\t\t{\n\t\t\t\t\tName: \"fsselinux\",\n\t\t\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\t\t\tHostPath: &corev1.HostPathVolumeSource{\n\t\t\t\t\t\t\tPath: \"/sys/fs/selinux\",\n\t\t\t\t\t\t\tType: &hostVolTypeDir,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tName: \"etcselinux\",\n\t\t\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\t\t\tHostPath: &corev1.HostPathVolumeSource{\n\t\t\t\t\t\t\tPath: \"/etc/selinux\",\n\t\t\t\t\t\t\tType: &hostVolTypeDir,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tName: \"varlibselinux\",\n\t\t\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\t\t\tHostPath: &corev1.HostPathVolumeSource{\n\t\t\t\t\t\t\tPath: \"/var/lib/selinux\",\n\t\t\t\t\t\t\tType: &hostVolTypeDir,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tName: \"policyvolume\",\n\t\t\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\t\t\tConfigMap: &corev1.ConfigMapVolumeSource{\n\t\t\t\t\t\t\tLocalObjectReference: corev1.LocalObjectReference{\n\t\t\t\t\t\t\t\tName: GetPolicyConfigMapName(name, ns),\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tTolerations: []corev1.Toleration{\n\t\t\t\t{\n\t\t\t\t\tKey: \"node-role.kubernetes.io/master\",\n\t\t\t\t\tOperator: \"Exists\",\n\t\t\t\t\tEffect: \"NoSchedule\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func NewPlugin(name, version, endpointType string) (*Plugin, error) {\n\t// Setup base plugin.\n\tplugin, err := common.NewPlugin(name, version)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Plugin{\n\t\tPlugin: plugin,\n\t\tEndpointType: endpointType,\n\t}, nil\n}", "func New(b bot.Bot) *BeersPlugin {\n\tif _, err := b.DB().Exec(`create table if not exists untappd (\n\t\t\tid integer primary key,\n\t\t\tuntappdUser string,\n\t\t\tchannel string,\n\t\t\tlastCheckin integer,\n\t\t\tchanNick string\n\t\t);`); err != nil {\n\t\tlog.Fatal().Err(err)\n\t}\n\tp := &BeersPlugin{\n\t\tb: b,\n\t\tc: b.Config(),\n\t\tdb: b.DB(),\n\n\t\tuntapdCache: make(map[int]bool),\n\t}\n\n\tp.register()\n\tb.Register(p, bot.Help, p.help)\n\n\tp.registerWeb()\n\n\ttoken := p.c.Get(\"Untappd.Token\", \"NONE\")\n\tif token == \"NONE\" || token == \"\" {\n\t\tlog.Error().Msgf(\"No untappd token. Checking disabled.\")\n\t\treturn p\n\t}\n\n\tfor _, channel := range p.c.GetArray(\"Untappd.Channels\", []string{}) {\n\t\tgo p.untappdLoop(b.DefaultConnector(), channel)\n\t}\n\n\treturn p\n}", "func NewDriver(baseURL string, token string) (*Driver, error) {\n\traw, err := hype.New(baseURL)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\td := &Driver{\n\t\traw,\n\t\ttoken,\n\t\thype.NewHeader(\"Accept\", \"application/json\"),\n\t\thype.NewHeader(\"Content-Type\", \"application/json\"),\n\t\thype.NewHeader(\"User-Agent\", \"fbz/0.1.0 (https://github.com/ess/fbz)\"),\n\t}\n\n\treturn d, nil\n}", "func NewDriver(ctx context.Context, prefix string) workers.Driver {\n\tdriver, err := redis.NewDriver(\n\t\tctx,\n\t\tredis.WithQueuePrefix(prefix),\n\t\tredis.WithRedisPool(client),\n\t)\n\tassert.Nil(err)\n\treturn driver\n}", "func NewWidgetDevicePlugin(resourceConfigs string, resourceName string) *WidgetDevicePlugin {\n\tlog.Debugf(\"other instance of GetWidgetDevices\")\n\tdevices, err := GetWidgetDevices(resourceConfigs)\n\tif err != nil {\n\t\tlog.Errorf(\"Error detecting widget devices: %v\", err)\n\t\treturn nil\n\t}\n\n\t// because we can run multiple instances of this plugin\n\t// we need to uniquely name the socket for each\n\tencodedResourceName := base64.StdEncoding.EncodeToString([]byte(resourceName))\n\tlog.Debugf(\"Base64 encoded Resource Name: %s\", encodedResourceName)\n\n\tvar devs []*pluginapi.Device\n\tdevMap := make(map[string]Device)\n\tfor _, device := range devices {\n\t\t// id := device.RdmaDevice.Name\n\t\tid := device.Name\n\t\tdevs = append(devs, &pluginapi.Device{\n\t\t\tID: id,\n\t\t\tHealth: pluginapi.Healthy,\n\t\t})\n\t\tdevMap[id] = device\n\t}\n\n\treturn &WidgetDevicePlugin{\n\t\t// masterNetDevice: resourceConfigs,\n\t\tsocket: fmt.Sprintf(serverSock, encodedResourceName),\n\t\tdevs: devs,\n\t\tdevices: devMap,\n\t\tstop: make(chan interface{}),\n\t\thealth: make(chan *pluginapi.Device),\n\t}\n}", "func New(L *lua.LState, rootDir, cacheDir, pluginName, host, dbname, user, password string, port int, params map[string]string) error {\n\tpool := plugins.NewPool(rootDir, cacheDir)\n\tconn := &plugins.Connection{\n\t\tHost: host,\n\t\tDBName: dbname,\n\t\tPort: port,\n\t\tUserName: user,\n\t\tPassword: password,\n\t\tParams: params,\n\t}\n\tconnections := make(map[string]*plugins.Connection)\n\tconnections[`target`] = conn\n\tconnections[`storage`] = conn\n\tf := &framework{\n\t\tpool: pool,\n\t\tpluginName: pluginName,\n\t\thost: pluginName,\n\t\tsecrets: secrets.New(``),\n\t}\n\tpool.RegisterHost(f.host, connections)\n\tud := L.NewUserData()\n\tud.Value = f\n\tL.SetMetatable(ud, L.GetTypeMetatable(`testing_framework_ud`))\n\tL.SetGlobal(\"tested_plugin\", ud)\n\treturn nil\n}", "func New() *Plugin {\n\treturn &Plugin{metricsConfigs: make(map[string]configReader.Metric)}\n}", "func NewOTelPlugin(db *DB, opts ...Option) (Plugin, error) {\n\tc, err := newConfig(opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tp := &otelPlugin{\n\t\ttracerProvider: c.tracerProvider,\n\t\tmeterProvider: c.meterProvider,\n\t\toperationName: c.operationName,\n\t\tspanNameFormatter: c.spanNameFormatter,\n\t\ttracer: c.tracer,\n\t\tmeter: c.meter,\n\t\tmetricDuration: c.metricDuration,\n\t}\n\n\tswitch dialector := db.Dialector.(type) {\n\tcase *dialectmysql.Dialector:\n\t\tcfg, err := mysql.ParseDSN(dialector.DSN)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tcfg.Passwd = \"\"\n\n\t\tnetPeerIP, netPeerPort, err := parseAddr(cfg.Addr)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tp.attrs = append(p.attrs, semconv.DBSystemMySQL)\n\t\tp.attrs = append(p.attrs, semconv.DBConnectionStringKey.String(cfg.FormatDSN()))\n\t\tp.attrs = append(p.attrs, semconv.DBUserKey.String(cfg.User))\n\t\tp.attrs = append(p.attrs, semconv.NetPeerIPKey.String(netPeerIP))\n\t\tp.attrs = append(p.attrs, semconv.NetPeerPortKey.Int(netPeerPort))\n\t\tp.attrs = append(p.attrs, parseNetTransport(cfg.Net))\n\t\tp.attrs = append(p.attrs, semconv.DBNameKey.String(cfg.DBName))\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unsupported dialector type\")\n\t}\n\n\treturn p, nil\n}", "func New(config config.Config) Plugins {\n\tpkg := Plugins{\n\t\tclient: http.NewClient(config),\n\t\tpager: &http.LinkHeaderPager{},\n\t}\n\n\treturn pkg\n}", "func New(options ...func(*Plugin)) config.Plugin {\n\tp := &Plugin{\n\t\tcache: &configCache{},\n\t}\n\tfor _, opt := range options {\n\t\topt(p)\n\t}\n\n\treturn p\n}", "func NewDockerVolumePlugin(options *Options) (*DockerVolumePlugin, error) {\n\thostUrl := GetHostURL(hostName, port)\n\tvar err error\n\n\tdvp := &DockerVolumePlugin{\n\t\tstripK8sOpts: options.StripK8sFromOptions,\n\t\tclient: connectivity.NewHTTPClientWithTimeout(hostUrl, dvpSocketTimeout),\n\t\tListOfStorageResourceOptions: options.ListOfStorageResourceOptions,\n\t\tFactorForConversion: options.FactorForConversion,\n\t}\n\n\tif options.SupportsCapabilities {\n\t\t// test connectivity\n\t\t_, err = dvp.Capabilities()\n\t\tif err != nil {\n\t\t\treturn dvp, err\n\t\t}\n\t}\n\n\treturn dvp, nil\n\n}", "func (opts Options) New(req *pluginpb.CodeGeneratorRequest) (*Plugin, error) {\n\tgen := &Plugin{\n\t\tRequest: req,\n\t\tFilesByPath: make(map[string]*File),\n\t\tfileReg: new(protoregistry.Files),\n\t\tenumsByName: make(map[protoreflect.FullName]*Enum),\n\t\tmessagesByName: make(map[protoreflect.FullName]*Message),\n\t\topts: opts,\n\t}\n\n\tpackageNames := make(map[string]GoPackageName) // filename -> package name\n\timportPaths := make(map[string]GoImportPath) // filename -> import path\n\tfor _, param := range strings.Split(req.GetParameter(), \",\") {\n\t\tvar value string\n\t\tif i := strings.Index(param, \"=\"); i >= 0 {\n\t\t\tvalue = param[i+1:]\n\t\t\tparam = param[0:i]\n\t\t}\n\t\tswitch param {\n\t\tcase \"\":\n\t\t\t// Ignore.\n\t\tcase \"module\":\n\t\t\tgen.module = value\n\t\tcase \"paths\":\n\t\t\tswitch value {\n\t\t\tcase \"import\":\n\t\t\t\tgen.pathType = pathTypeImport\n\t\t\tcase \"source_relative\":\n\t\t\t\tgen.pathType = pathTypeSourceRelative\n\t\t\tdefault:\n\t\t\t\treturn nil, fmt.Errorf(`unknown path type %q: want \"import\" or \"source_relative\"`, value)\n\t\t\t}\n\t\tcase \"annotate_code\":\n\t\t\tswitch value {\n\t\t\tcase \"true\", \"\":\n\t\t\t\tgen.annotateCode = true\n\t\t\tcase \"false\":\n\t\t\tdefault:\n\t\t\t\treturn nil, fmt.Errorf(`bad value for parameter %q: want \"true\" or \"false\"`, param)\n\t\t\t}\n\t\tdefault:\n\t\t\tif param[0] == 'M' {\n\t\t\t\timpPath, pkgName := splitImportPathAndPackageName(value)\n\t\t\t\tif pkgName != \"\" {\n\t\t\t\t\tpackageNames[param[1:]] = pkgName\n\t\t\t\t}\n\t\t\t\tif impPath != \"\" {\n\t\t\t\t\timportPaths[param[1:]] = impPath\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif opts.ParamFunc != nil {\n\t\t\t\tif err := opts.ParamFunc(param, value); err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// When the module= option is provided, we strip the module name\n\t// prefix from generated files. This only makes sense if generated\n\t// filenames are based on the import path.\n\tif gen.module != \"\" && gen.pathType == pathTypeSourceRelative {\n\t\treturn nil, fmt.Errorf(\"cannot use module= with paths=source_relative\")\n\t}\n\n\t// Figure out the import path and package name for each file.\n\t//\n\t// The rules here are complicated and have grown organically over time.\n\t// Interactions between different ways of specifying package information\n\t// may be surprising.\n\t//\n\t// The recommended approach is to include a go_package option in every\n\t// .proto source file specifying the full import path of the Go package\n\t// associated with this file.\n\t//\n\t// option go_package = \"google.golang.org/protobuf/types/known/anypb\";\n\t//\n\t// Alternatively, build systems which want to exert full control over\n\t// import paths may specify M<filename>=<import_path> flags.\n\tfor _, fdesc := range gen.Request.ProtoFile {\n\t\t// The \"M\" command-line flags take precedence over\n\t\t// the \"go_package\" option in the .proto source file.\n\t\tfilename := fdesc.GetName()\n\t\timpPath, pkgName := splitImportPathAndPackageName(fdesc.GetOptions().GetGoPackage())\n\t\tif importPaths[filename] == \"\" && impPath != \"\" {\n\t\t\timportPaths[filename] = impPath\n\t\t}\n\t\tif packageNames[filename] == \"\" && pkgName != \"\" {\n\t\t\tpackageNames[filename] = pkgName\n\t\t}\n\t\tswitch {\n\t\tcase importPaths[filename] == \"\":\n\t\t\t// The import path must be specified one way or another.\n\t\t\treturn nil, fmt.Errorf(\n\t\t\t\t\"unable to determine Go import path for %q\\n\\n\"+\n\t\t\t\t\t\"Please specify either:\\n\"+\n\t\t\t\t\t\"\\t• a \\\"go_package\\\" option in the .proto source file, or\\n\"+\n\t\t\t\t\t\"\\t• a \\\"M\\\" argument on the command line.\\n\\n\"+\n\t\t\t\t\t\"See %v for more information.\\n\",\n\t\t\t\tfdesc.GetName(), goPackageDocURL)\n\t\tcase !strings.Contains(string(importPaths[filename]), \".\") &&\n\t\t\t!strings.Contains(string(importPaths[filename]), \"/\"):\n\t\t\t// Check that import paths contain at least a dot or slash to avoid\n\t\t\t// a common mistake where import path is confused with package name.\n\t\t\treturn nil, fmt.Errorf(\n\t\t\t\t\"invalid Go import path %q for %q\\n\\n\"+\n\t\t\t\t\t\"The import path must contain at least one period ('.') or forward slash ('/') character.\\n\\n\"+\n\t\t\t\t\t\"See %v for more information.\\n\",\n\t\t\t\tstring(importPaths[filename]), fdesc.GetName(), goPackageDocURL)\n\t\tcase packageNames[filename] == \"\":\n\t\t\t// If the package name is not explicitly specified,\n\t\t\t// then derive a reasonable package name from the import path.\n\t\t\t//\n\t\t\t// NOTE: The package name is derived first from the import path in\n\t\t\t// the \"go_package\" option (if present) before trying the \"M\" flag.\n\t\t\t// The inverted order for this is because the primary use of the \"M\"\n\t\t\t// flag is by build systems that have full control over the\n\t\t\t// import paths all packages, where it is generally expected that\n\t\t\t// the Go package name still be identical for the Go toolchain and\n\t\t\t// for custom build systems like Bazel.\n\t\t\tif impPath == \"\" {\n\t\t\t\timpPath = importPaths[filename]\n\t\t\t}\n\t\t\tpackageNames[filename] = cleanPackageName(path.Base(string(impPath)))\n\t\t}\n\t}\n\n\t// Consistency check: Every file with the same Go import path should have\n\t// the same Go package name.\n\tpackageFiles := make(map[GoImportPath][]string)\n\tfor filename, importPath := range importPaths {\n\t\tif _, ok := packageNames[filename]; !ok {\n\t\t\t// Skip files mentioned in a M<file>=<import_path> parameter\n\t\t\t// but which do not appear in the CodeGeneratorRequest.\n\t\t\tcontinue\n\t\t}\n\t\tpackageFiles[importPath] = append(packageFiles[importPath], filename)\n\t}\n\tfor importPath, filenames := range packageFiles {\n\t\tfor i := 1; i < len(filenames); i++ {\n\t\t\tif a, b := packageNames[filenames[0]], packageNames[filenames[i]]; a != b {\n\t\t\t\treturn nil, fmt.Errorf(\"Go package %v has inconsistent names %v (%v) and %v (%v)\",\n\t\t\t\t\timportPath, a, filenames[0], b, filenames[i])\n\t\t\t}\n\t\t}\n\t}\n\n\t// The extracted types from the full import set\n\ttypeRegistry := newExtensionRegistry()\n\tfor _, fdesc := range gen.Request.ProtoFile {\n\t\tfilename := fdesc.GetName()\n\t\tif gen.FilesByPath[filename] != nil {\n\t\t\treturn nil, fmt.Errorf(\"duplicate file name: %q\", filename)\n\t\t}\n\t\tf, err := newFile(gen, fdesc, packageNames[filename], importPaths[filename])\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tgen.Files = append(gen.Files, f)\n\t\tgen.FilesByPath[filename] = f\n\t\tif err = typeRegistry.registerAllExtensionsFromFile(f.Desc); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tfor _, filename := range gen.Request.FileToGenerate {\n\t\tf, ok := gen.FilesByPath[filename]\n\t\tif !ok {\n\t\t\treturn nil, fmt.Errorf(\"no descriptor for generated file: %v\", filename)\n\t\t}\n\t\tf.Generate = true\n\t}\n\n\t// Create fully-linked descriptors if new extensions were found\n\tif typeRegistry.hasNovelExtensions() {\n\t\tfor _, f := range gen.Files {\n\t\t\tb, err := proto.Marshal(f.Proto.ProtoReflect().Interface())\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\terr = proto.UnmarshalOptions{Resolver: typeRegistry}.Unmarshal(b, f.Proto)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t}\n\treturn gen, nil\n}", "func NewDriver(nodeID string, endpoint string, synoOption *options.SynologyOptions) (Driver, error) {\n\tglog.Infof(\"Driver: %v\", DriverName)\n\n\tsession, _, err := Login(synoOption)\n\tif err != nil {\n\t\tglog.V(3).Infof(\"Failed to login: %v\", err)\n\t\treturn nil, err\n\t}\n\n\td := &driver{\n\t\tendpoint: endpoint,\n\t\tsynologyHost: synoOption.Host,\n\t\tsession: *session,\n\t}\n\n\tcsiDriver := csicommon.NewCSIDriver(DriverName, version, nodeID)\n\tcsiDriver.AddControllerServiceCapabilities(\n\t\t[]csi.ControllerServiceCapability_RPC_Type{\n\t\t\tcsi.ControllerServiceCapability_RPC_LIST_VOLUMES,\n\t\t\tcsi.ControllerServiceCapability_RPC_CREATE_DELETE_VOLUME,\n\t\t\tcsi.ControllerServiceCapability_RPC_PUBLISH_UNPUBLISH_VOLUME,\n\t\t\tcsi.ControllerServiceCapability_RPC_EXPAND_VOLUME,\n\t\t})\n\tcsiDriver.AddVolumeCapabilityAccessModes(\n\t\t[]csi.VolumeCapability_AccessMode_Mode{csi.VolumeCapability_AccessMode_SINGLE_NODE_WRITER})\n\n\td.csiDriver = csiDriver\n\n\treturn d, nil\n}", "func NewPlugin(service Service) (*Plugin, error) {\n\tif service == nil {\n\t\treturn nil, fmt.Errorf(\"Service Interface Required.\")\n\t}\n\n\treturn &Plugin{\n\t\tService: service,\n\t}, nil\n}", "func NewDevicePluginHandlerImpl(updateCapacityFunc func(v1.ResourceList)) (*DevicePluginHandlerImpl, error) {\n\tglog.V(2).Infof(\"Creating Device Plugin Handler\")\n\thandler := &DevicePluginHandlerImpl{\n\t\tallDevices: make(map[string]sets.String),\n\t\tallocatedDevices: make(map[string]podDevices),\n\t}\n\n\tdeviceManagerMonitorCallback := func(resourceName string, added, updated, deleted []*pluginapi.Device) {\n\t\tvar capacity = v1.ResourceList{}\n\t\tkept := append(updated, added...)\n\t\tif _, ok := handler.allDevices[resourceName]; !ok {\n\t\t\thandler.allDevices[resourceName] = sets.NewString()\n\t\t}\n\t\t// For now, DevicePluginHandler only keeps track of healthy devices.\n\t\t// We can revisit this later when the need comes to track unhealthy devices here.\n\t\tfor _, dev := range kept {\n\t\t\tif dev.Health == pluginapi.Healthy {\n\t\t\t\thandler.allDevices[resourceName].Insert(dev.ID)\n\t\t\t} else {\n\t\t\t\thandler.allDevices[resourceName].Delete(dev.ID)\n\t\t\t}\n\t\t}\n\t\tfor _, dev := range deleted {\n\t\t\thandler.allDevices[resourceName].Delete(dev.ID)\n\t\t}\n\t\tcapacity[v1.ResourceName(resourceName)] = *resource.NewQuantity(int64(handler.allDevices[resourceName].Len()), resource.DecimalSI)\n\t\tupdateCapacityFunc(capacity)\n\t}\n\n\tmgr, err := deviceplugin.NewManagerImpl(pluginapi.KubeletSocket, deviceManagerMonitorCallback)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Failed to initialize device plugin manager: %+v\", err)\n\t}\n\n\thandler.devicePluginManager = mgr\n\thandler.devicePluginManagerMonitorCallback = deviceManagerMonitorCallback\n\t// Loads in allocatedDevices information from disk.\n\terr = handler.readCheckpoint()\n\tif err != nil {\n\t\tglog.Warningf(\"Continue after failing to read checkpoint file. Device allocation info may NOT be up-to-date. Err: %v\", err)\n\t}\n\treturn handler, nil\n}", "func newTestKubelet(t *testing.T, controllerAttachDetachEnabled bool) *TestKubelet {\n\timageList := []kubecontainer.Image{\n\t\t{\n\t\t\tID: \"abc\",\n\t\t\tRepoTags: []string{\"k8s.gcr.io:v1\", \"k8s.gcr.io:v2\"},\n\t\t\tSize: 123,\n\t\t},\n\t\t{\n\t\t\tID: \"efg\",\n\t\t\tRepoTags: []string{\"k8s.gcr.io:v3\", \"k8s.gcr.io:v4\"},\n\t\t\tSize: 456,\n\t\t},\n\t}\n\n\treturn newTestKubeletWithImageList(t, imageList, controllerAttachDetachEnabled, true /*initFakeVolumePlugin*/)\n}", "func NewDriver(name string, loader func(interface{}) error) (Driver, error) {\n\tfactorysMu.RLock()\n\tfactoryi, ok := factories[name]\n\tfactorysMu.RUnlock()\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"file: unknown driver %q (forgotten import?)\", name)\n\t}\n\treturn factoryi(loader)\n}", "func PluginSpecDir() string {\n\treturn filepath.Join(os.Getenv(\"programdata\"), \"docker\", \"plugins\")\n}", "func NewMyDockerVolumePlugin(path string) (*MyDockerVolumePlugin, error) {\n\tmdnp := &MyDockerVolumePlugin{\n\t\trootPathOnDisk: path,\n\t\tmetadata: make(map[string]*volumeInfo),\n\t}\n\treturn mdnp, nil\n}", "func NewProbe(config *config.Config, opts Opts) (*Probe, error) {\n\topts.normalize()\n\n\tctx, cancel := context.WithCancel(context.Background())\n\n\tp := &Probe{\n\t\tOpts: opts,\n\t\tConfig: config,\n\t\tctx: ctx,\n\t\tcancelFnc: cancel,\n\t\tStatsdClient: opts.StatsdClient,\n\t\tdiscarderRateLimiter: rate.NewLimiter(rate.Every(time.Second/5), 100),\n\t\tevent: &model.Event{},\n\t\tPlatformProbe: PlatformProbe{\n\t\t\tonStart: make(chan *procmon.ProcessStartNotification),\n\t\t\tonStop: make(chan *procmon.ProcessStopNotification),\n\t\t},\n\t}\n\tresolvers, err := resolvers.NewResolvers(config, p.StatsdClient)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tp.resolvers = resolvers\n\n\tp.fieldHandlers = &FieldHandlers{resolvers: resolvers}\n\n\t// be sure to zero the probe event before everything else\n\tp.zeroEvent()\n\n\treturn p, nil\n}", "func NewPodUpgrade() *PodUpgrade {\n\treturn &PodUpgrade{}\n}", "func newDefaultPodManager() *podManager {\n\treturn &podManager{\n\t\trunningPods: make(map[string]*runningPod),\n\t\trequests: make(chan *cniserver.PodRequest, 20),\n\t\treattachPods: make(map[string]*corev1.Pod),\n\t}\n}", "func ManaPlugin() *node.Plugin {\n\tonce.Do(func() {\n\t\tmanaPlugin = node.NewPlugin(PluginName, node.Enabled, configureManaPlugin, runManaPlugin)\n\t})\n\treturn manaPlugin\n}", "func newPodEventRecorder(cs kubernetes.Interface, ns string, podName string) Recorder {\n\tvar recorder Recorder\n\tpod, err := cs.CoreV1().Pods(ns).Get(podName, metav1.GetOptions{})\n\tif err != nil {\n\t\tglog.Errorf(\"failed to get pod with name %s for event recorder: %v\", podName, err)\n\t\trecorder = &FakeRecorder{}\n\t} else {\n\t\trecorder = NewRecorder(cs, ns, pod)\n\t}\n\treturn recorder\n}", "func NewDriver(cfg *configv1.InfrastructureStatus, clnt client.Client) Driver {\n\n\tctx := context.Background()\n\tvar driver Driver\n\n\tif cfg.PlatformStatus.Type == \"AWS\" {\n\t\tdriver = s3.NewDriver(ctx, cfg, clnt)\n\t}\n\n\tif cfg.PlatformStatus.Type == \"GCP\" {\n\t\tdriver = gcs.NewDriver(ctx, cfg, clnt)\n\t}\n\n\treturn driver\n}", "func NewDriver(config dbmate.DriverConfig) dbmate.Driver {\n\treturn &Driver{\n\t\tmigrationsTableName: config.MigrationsTableName,\n\t\tdatabaseURL: config.DatabaseURL,\n\t\tlog: config.Log,\n\t}\n}", "func New(next goproxy.Plugin, cache FileCache) goproxy.Plugin {\n\treturn &plugin{next: next, cache: cache}\n}", "func NewMyDockerNetworkPlugin(scope string) (*MyDockerNetworkPlugin, error) {\n\tmdnp := &MyDockerNetworkPlugin{\n\t\tscope: scope, // TODO(Krish): local vs global?\n\t}\n\treturn mdnp, nil\n}", "func NewDriver(p *Periph) *Driver {\n\treturn &Driver{p: p, timeoutRx: -1, timeoutTx: -1}\n}", "func New(path string) (Plugin, error) {\n\tplugin, err := plugin.Open(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tp, err := plugin.Lookup(\"Plugin\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn p.(Plugin), nil\n}", "func Plugin() *node.Plugin {\n\tonce.Do(func() {\n\t\tplugin = node.NewPlugin(PluginName, node.Enabled, configure)\n\t})\n\treturn plugin\n}", "func NewPlugin() (*Plugin, error) {\n\treporter := NewReporter()\n\tplugin := &Plugin{\n\t\treporter: reporter,\n\t}\n\treturn plugin, nil\n}", "func New(c *Config) *manager {\n\tctx, cancel := context.WithCancel(context.Background())\n\treturn &manager{\n\t\tlogger: c.Logger.Named(\"driver_mgr\"),\n\t\tstate: c.State,\n\t\tctx: ctx,\n\t\tcancel: cancel,\n\t\tloader: c.Loader,\n\t\tpluginConfig: c.PluginConfig,\n\t\tupdater: c.Updater,\n\t\teventHandlerFactory: c.EventHandlerFactory,\n\t\tinstances: make(map[string]*instanceManager),\n\t\treattachConfigs: make(map[loader.PluginID]*pstructs.ReattachConfig),\n\t\tallowedDrivers: c.AllowedDrivers,\n\t\tblockedDrivers: c.BlockedDrivers,\n\t\treadyCh: make(chan struct{}),\n\t}\n}", "func (pm *pluginManager) getPlugin(name string) (NodePlugin, error) {\n\tif p, ok := pm.plugins[name]; ok {\n\t\treturn p, nil\n\t}\n\n\tpc, err := pm.pg.Get(name, DockerCSIPluginCap, plugingetter.Lookup)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpa, ok := pc.(plugingetter.PluginAddr)\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"plugin does not implement PluginAddr interface\")\n\t}\n\n\tp := pm.newNodePluginFunc(name, pc, pa, pm.secrets)\n\tpm.plugins[name] = p\n\treturn p, nil\n}", "func NewPluginMount(description string, destination string, name string, options []string, settable []string, source string, type_ string) *PluginMount {\n\tthis := PluginMount{}\n\tthis.Description = description\n\tthis.Destination = destination\n\tthis.Name = name\n\tthis.Options = options\n\tthis.Settable = settable\n\tthis.Source = source\n\tthis.Type = type_\n\treturn &this\n}", "func CreateConftestPlugin(version string) jenkinsv1.Plugin {\n\tbinaries := extensions.CreateBinaries(func(p extensions.Platform) string {\n\t\tgoos := p.Goos\n\t\tgoarch := strings.ToLower(p.Goarch)\n\t\tif goarch == \"amd64\" {\n\t\t\tgoarch = \"x86_64\"\n\t\t}\n\t\text := \".tar.gz\"\n\t\tif p.IsWindows() {\n\t\t\text = \".zip\"\n\t\t}\n\t\treturn fmt.Sprintf(\"https://github.com/open-policy-agent/conftest/releases/download/v%s/conftest_%s_%s_%s%s\", version, version, goos, goarch, ext)\n\t})\n\n\tplugin := jenkinsv1.Plugin{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: ConftestPluginName,\n\t\t},\n\t\tSpec: jenkinsv1.PluginSpec{\n\t\t\tSubCommand: \"kube-score\",\n\t\t\tBinaries: binaries,\n\t\t\tDescription: \"kube score binary\",\n\t\t\tName: ConftestPluginName,\n\t\t\tVersion: version,\n\t\t},\n\t}\n\treturn plugin\n}", "func PluginFactory(ctx context.Context, pluginName string, sys pluginutil.LookRunnerUtil, logger log.Logger) (Database, error) {\n\t// Look for plugin in the plugin catalog\n\tpluginRunner, err := sys.LookupPlugin(ctx, pluginName, consts.PluginTypeDatabase)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tnamedLogger := logger.Named(pluginName)\n\n\tvar transport string\n\tvar db Database\n\tif pluginRunner.Builtin {\n\t\t// Plugin is builtin so we can retrieve an instance of the interface\n\t\t// from the pluginRunner. Then cast it to a Database.\n\t\tdbRaw, err := pluginRunner.BuiltinFactory()\n\t\tif err != nil {\n\t\t\treturn nil, errwrap.Wrapf(\"error initializing plugin: {{err}}\", err)\n\t\t}\n\n\t\tvar ok bool\n\t\tdb, ok = dbRaw.(Database)\n\t\tif !ok {\n\t\t\treturn nil, fmt.Errorf(\"unsupported database type: %q\", pluginName)\n\t\t}\n\n\t\ttransport = \"builtin\"\n\n\t} else {\n\t\t// create a DatabasePluginClient instance\n\t\tdb, err = NewPluginClient(ctx, sys, pluginRunner, namedLogger, false)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// Switch on the underlying database client type to get the transport\n\t\t// method.\n\t\tswitch db.(*DatabasePluginClient).Database.(type) {\n\t\tcase *gRPCClient:\n\t\t\ttransport = \"gRPC\"\n\t\t}\n\n\t}\n\n\ttypeStr, err := db.Type()\n\tif err != nil {\n\t\treturn nil, errwrap.Wrapf(\"error getting plugin type: {{err}}\", err)\n\t}\n\n\t// Wrap with metrics middleware\n\tdb = &databaseMetricsMiddleware{\n\t\tnext: db,\n\t\ttypeStr: typeStr,\n\t}\n\n\t// Wrap with tracing middleware\n\tif namedLogger.IsTrace() {\n\t\tdb = &databaseTracingMiddleware{\n\t\t\tnext: db,\n\t\t\tlogger: namedLogger.With(\"transport\", transport),\n\t\t}\n\t}\n\n\treturn db, nil\n}", "func NewPlugin() *Auth {\n\tplugin := &Auth{\n\t\tname: PluginName,\n\t}\n\treturn plugin\n}", "func NewDriver(name string) (*App, error) {\n\treturn newApp(\"driver.\" + name)\n}", "func newPodForCR(cr *operatorv1alpha1.Minecraft) *corev1.Pod {\n\tvar envVars []corev1.EnvVar\n\tenvVars = []corev1.EnvVar {\n\t\tcorev1.EnvVar {\n\t\tName: \"EULA\",\n\t\tValue: cr.Spec.Uela,\n\t\t},\n\t}\n\n\tvar fsType int64\n\tfsType = int64(1000)\n\n\tlabels := map[string]string{\n\t\t\"app\": cr.Name,\n \"version\": cr.Spec.Version,\n\t\t\"uela\": cr.Spec.Uela,\n\t}\n\treturn &corev1.Pod{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: cr.Name + \"-pod\",\n\t\t\tNamespace: cr.Namespace,\n\t\t\tLabels: labels,\n\t\t},\n\t\tSpec: corev1.PodSpec{\n\t\t\tSecurityContext: &corev1.PodSecurityContext{\n\t\t\t\tFSGroup: &fsType,\n\t\t\t\tRunAsUser: &fsType,\n\t\t\t},\n\t\t\tContainers: []corev1.Container{\n\t\t\t\t{\n\t\t\t\t\tName: \"minecraft\",\n\t\t\t\t\tImage: \"hoeghh/minecraft:\" + cr.Spec.Version,\n\t\t\t\t\tPorts: []corev1.ContainerPort{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tContainerPort: 25565,\n\t\t\t\t\t\t\tName: \"minecraft\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tEnv: envVars,\n\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"minecraft-volume\",\n\t\t\t\t\t\t\tMountPath: \"/minecraft-data\",\n\t\t\t\t\t\t\tReadOnly: false,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tVolumes: []corev1.Volume{\n\t\t\t\t{\n\t\t\t\t\tName: \"minecraft-volume\",\n\t\t\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\t\t\tPersistentVolumeClaim: &corev1.PersistentVolumeClaimVolumeSource{\n\t\t\t\t\t\t\tClaimName: cr.Name + \"-pvc\", //cr.Spec.StorageClassName\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func NewDriver(options *DriverOptions) *Driver {\n\td := Driver{\n\t\tvolLockMap: util.NewLockMap(),\n\t\tsubnetLockMap: util.NewLockMap(),\n\t\tvolumeLocks: newVolumeLocks(),\n\t\tcloudConfigSecretName: options.CloudConfigSecretName,\n\t\tcloudConfigSecretNamespace: options.CloudConfigSecretNamespace,\n\t\tcustomUserAgent: options.CustomUserAgent,\n\t\tuserAgentSuffix: options.UserAgentSuffix,\n\t\tblobfuseProxyEndpoint: options.BlobfuseProxyEndpoint,\n\t\tenableBlobfuseProxy: options.EnableBlobfuseProxy,\n\t\tblobfuseProxyConnTimout: options.BlobfuseProxyConnTimout,\n\t\tenableBlobMockMount: options.EnableBlobMockMount,\n\t\tallowEmptyCloudConfig: options.AllowEmptyCloudConfig,\n\t\tenableGetVolumeStats: options.EnableGetVolumeStats,\n\t\tmountPermissions: options.MountPermissions,\n\t}\n\td.Name = options.DriverName\n\td.Version = driverVersion\n\td.NodeID = options.NodeID\n\n\td.DefaultControllerServer.Driver = &d.CSIDriver\n\td.DefaultIdentityServer.Driver = &d.CSIDriver\n\td.DefaultNodeServer.Driver = &d.CSIDriver\n\n\tvar err error\n\tgetter := func(key string) (interface{}, error) { return nil, nil }\n\tif d.accountSearchCache, err = azcache.NewTimedcache(time.Minute, getter); err != nil {\n\t\tklog.Fatalf(\"%v\", err)\n\t}\n\treturn &d\n}", "func New(b bot.Bot) *LeftpadPlugin {\n\tp := &LeftpadPlugin{\n\t\tbot: b,\n\t\tconfig: b.Config(),\n\t}\n\tb.RegisterRegexCmd(p, bot.Message, leftpadRegex, p.leftpadCmd)\n\treturn p\n}", "func NewCambriconDevicePlugin(o Options) *CambriconDevicePlugin {\n\tdevs, devsInfo := getDevices(o.Mode, int(o.VirtualizationNum))\n\treturn &CambriconDevicePlugin{\n\t\tdevs: devs,\n\t\tdevsInfo: devsInfo,\n\t\tsocket: serverSock,\n\t\tstop: make(chan interface{}),\n\t\thealth: make(chan *pluginapi.Device),\n\t\tdeviceList: newDeviceList(),\n\t\tnodeHostname: o.NodeName,\n\t\toptions: o,\n\t}\n}" ]
[ "0.82380116", "0.60484475", "0.6010558", "0.5986228", "0.5868032", "0.58672225", "0.58576405", "0.58369595", "0.5824131", "0.5824131", "0.5811279", "0.5768812", "0.57687277", "0.5760002", "0.57332784", "0.57026", "0.5667394", "0.5626511", "0.56003225", "0.55854213", "0.5554125", "0.5552047", "0.55462945", "0.554223", "0.55388457", "0.5532813", "0.5521976", "0.549818", "0.54851365", "0.54693747", "0.54262286", "0.5423749", "0.5407599", "0.53660077", "0.53562635", "0.534731", "0.5337522", "0.5329109", "0.5312725", "0.5298115", "0.5271199", "0.52707744", "0.52705646", "0.52613336", "0.5259397", "0.5256064", "0.5232483", "0.52240086", "0.5215809", "0.52149504", "0.52130693", "0.5205653", "0.51885366", "0.51866156", "0.5182889", "0.5174101", "0.51740724", "0.5171137", "0.5168389", "0.51597893", "0.51535684", "0.51503503", "0.51477313", "0.51256514", "0.51129496", "0.5111229", "0.51071537", "0.5087403", "0.5083336", "0.50812316", "0.5080557", "0.5058066", "0.50475353", "0.503839", "0.50350493", "0.50313765", "0.50232196", "0.50211746", "0.5014932", "0.5013085", "0.50122243", "0.50113803", "0.5008564", "0.500648", "0.4997702", "0.4987697", "0.49853054", "0.4984603", "0.4980656", "0.49536157", "0.4953289", "0.49512637", "0.49292645", "0.49210584", "0.49204513", "0.4915959", "0.49119115", "0.49117416", "0.4910976", "0.4906165" ]
0.81757015
1
PluginInfo returns metadata about the podman driver plugin
func (d *Driver) PluginInfo() (*base.PluginInfoResponse, error) { return pluginInfo, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func PluginInfo() map[string]string {\n\treturn map[string]string{\n\t\t\"pluginAPIVersion\": \"0.1.0\",\n\t\t\"type\": \"connector.tcp\",\n\t\t\"id\": \"example-tcp-connector\",\n\t\t\"description\": \"Example TCP Connector Plugin\",\n\t}\n}", "func PluginInfo() map[string]string {\n\treturn map[string]string{\n\t\t\"pluginAPIVersion\": \"0.1.0\",\n\t\t\"type\": \"connector.tcp\",\n\t\t\"id\": \"mysql\",\n\t\t\"description\": \"returns an authenticated connection to a MySQL database\",\n\t}\n}", "func _GetPluginInfo(pluginObj *plugin.Plugin) (map[string]string, error) {\n\trawPluginInfo, err := pluginObj.Lookup(\"PluginInfo\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpluginInfo := *rawPluginInfo.(*map[string]string)\n\n\treturn pluginInfo, nil\n}", "func GetPluginInfo(plugin *examplePlugin) cache.PluginInfo {\n\treturn cache.PluginInfo{\n\t\tSocketPath: plugin.endpoint,\n\t}\n}", "func (s *identityServer) GetPluginInfo(ctx context.Context, r *csi.GetPluginInfoRequest) (*csi.GetPluginInfoResponse, error) {\n\tklog.V(4).Infof(\"IdentityServer.GetPluginInfo called with %+v\", r)\n\tm := map[string]string{\"url\": driverRepo}\n\treturn &csi.GetPluginInfoResponse{\n\t\tName: driverName,\n\t\tVendorVersion: s.driverVersion,\n\t\tManifest: m,\n\t}, nil\n}", "func (m *Manager) PluginInfo(modulePath string) compat.Info {\n\tm.mutex.RLock()\n\tdefer m.mutex.RUnlock()\n\n\tif p, ok := m.plugins[modulePath]; ok {\n\t\treturn p.PluginInfo()\n\t}\n\tfmt.Println(\"Could not get plugin info for\", modulePath)\n\treturn compat.Info{\n\t\tName: \"UNKNOWN\",\n\t\tModulePath: modulePath,\n\t\tDescription: \"Oops something went wrong\",\n\t}\n}", "func (k *xyzProvider) GetPluginInfo(context.Context, *pbempty.Empty) (*pulumirpc.PluginInfo, error) {\n\treturn &pulumirpc.PluginInfo{\n\t\tVersion: k.version,\n\t}, nil\n}", "func GetGotifyPluginInfo() plugin.Info {\n\treturn plugin.Info{\n\t\tModulePath: \"github.com/gotify/plugin-template\",\n\t\tVersion: \"1.0.0\",\n\t\tAuthor: \"Your Name\",\n\t\tWebsite: \"https://gotify.net/docs/plugin\",\n\t\tDescription: \"An example plugin with travis-ci building\",\n\t\tLicense: \"MIT\",\n\t\tName: \"gotify/plugin-template\",\n\t}\n}", "func (p Plugin) Info(ctx context.Context, module string, vsn string) ([]byte, error) {\n\tresp, err := p.c.GetInfo(ctx, &stpb.GetModuleRequest{Module: module, Version: vsn})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn resp.GetData(), nil\n}", "func (*manager) PluginType() string { return base.PluginTypeDevice }", "func (m *RdmaDevPlugin) GetInfo(ctx context.Context, rqt *registerapi.InfoRequest) (*registerapi.PluginInfo, error) {\n\tpluginInfoResponse := &registerapi.PluginInfo{\n\t\tType: registerapi.DevicePlugin,\n\t\tName: m.resourceName,\n\t\tEndpoint: filepath.Join(sockDir, m.socketName),\n\t\tSupportedVersions: []string{\"v1alpha1\", \"v1beta1\"},\n\t}\n\treturn pluginInfoResponse, nil\n}", "func (e *examplePlugin) GetInfo(ctx context.Context, req *registerapi.InfoRequest) (*registerapi.PluginInfo, error) {\n\treturn &registerapi.PluginInfo{\n\t\tType: e.pluginType,\n\t\tName: e.pluginName,\n\t\tEndpoint: e.endpoint,\n\t\tSupportedVersions: e.versions,\n\t}, nil\n}", "func (*manager) PluginType() string { return base.PluginTypeDriver }", "func (*PluginInfoResponse) Descriptor() ([]byte, []int) {\n\treturn file_odpf_optimus_plugins_base_proto_rawDescGZIP(), []int{1}\n}", "func (c *DlaasPlugin) GetMetadata() plugin.PluginMetadata {\n\n\tcommands := make([]plugin.Command, len(metadata.Commands))\n\n\tfor index, command := range metadata.Commands {\n\t\tcommands[index] = plugin.Command{\n\t\t\tNamespace: command.Namespace,\n\t\t\tName: command.Name,\n\t\t\tDescription: command.Description,\n\t\t\tUsage: command.Usage,\n\t\t\tFlags: command.PluginFlags,\n\t\t}\n\t}\n\n\treturn plugin.PluginMetadata{\n\t\tName: pluginName,\n\t\tVersion: plugin.VersionType{\n\t\t\tMajor: 1,\n\t\t\tMinor: 0,\n\t\t\tBuild: 0,\n\t\t},\n\t\tMinCliVersion: plugin.VersionType{\n\t\t\tMajor: 0,\n\t\t\tMinor: 4,\n\t\t\tBuild: 0,\n\t\t},\n\t\tNamespaces: []plugin.Namespace{\n\t\t\t{\n\t\t\t\tName: dlNamespace,\n\t\t\t\tDescription: \"Manage deep learning models on Bluemix\",\n\t\t\t},\n\t\t},\n\t\tCommands: commands,\n\t}\n}", "func (i *IdentityService) GetPluginInfo(context.Context, *csi.GetPluginInfoRequest) (*csi.GetPluginInfoResponse, error) {\n\treturn &csi.GetPluginInfoResponse{\n\t\tName: VendorName,\n\t\tVendorVersion: VendorVersion,\n\t}, nil\n}", "func (i *IdentityService) GetPluginInfo(context.Context, *csi.GetPluginInfoRequest) (*csi.GetPluginInfoResponse, error) {\n\treturn &csi.GetPluginInfoResponse{\n\t\tName: VendorName,\n\t\tVendorVersion: VendorVersion,\n\t}, nil\n}", "func (p *plugin) VendorInfo() *spi.VendorInfo {\n\treturn &spi.VendorInfo{\n\t\tInterfaceSpec: spi.InterfaceSpec{\n\t\t\tName: \"infrakit-instance-vSphere\",\n\t\t\tVersion: \"0.6.0\",\n\t\t},\n\t\tURL: \"https://github.com/docker/infrakit\",\n\t}\n}", "func (c *BasicPlugin) GetMetadata() plugin.PluginMetadata {\n\treturn plugin.PluginMetadata{\n\t\tName: \"firehose-analyzer\",\n\t\tVersion: plugin.VersionType{\n\t\t\tMajor: 1,\n\t\t\tMinor: 0,\n\t\t\tBuild: 0,\n\t\t},\n\t\tMinCliVersion: plugin.VersionType{\n\t\t\tMajor: 6,\n\t\t\tMinor: 7,\n\t\t\tBuild: 0,\n\t\t},\n\t\tCommands: []plugin.Command{\n\t\t\t{\n\t\t\t\tName: \"firehose-analyzer\",\n\t\t\t\tHelpText: \"Displays basic firehose metrics for troubleshooting scaling issues\",\n\n\t\t\t\t// UsageDetails is optional\n\t\t\t\t// It is used to show help of usage of each command\n\t\t\t\tUsageDetails: plugin.Usage{\n\t\t\t\t\tUsage: \"firehose-analyzer\\n cf firehose-analyzer\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func (p *HTML5Plugin) GetMetadata() plugin.PluginMetadata {\n\tmetadata := plugin.PluginMetadata{\n\t\tName: \"html5-plugin\",\n\t\tVersion: parseSemver(Version),\n\t\tMinCliVersion: plugin.VersionType{Major: 6, Minor: 7, Build: 0},\n\t}\n\tfor _, command := range Commands {\n\t\tmetadata.Commands = append(metadata.Commands, command.GetPluginCommand())\n\t}\n\treturn metadata\n}", "func DescribePlugins() string {\n\tpl := ListPlugins()\n\n\tstr := \"Server types:\\n\"\n\tfor _, name := range pl[\"server_types\"] {\n\t\tstr += \" \" + name + \"\\n\"\n\t}\n\n\tstr += \"\\nCaddyfile loaders:\\n\"\n\tfor _, name := range pl[\"caddyfile_loaders\"] {\n\t\tstr += \" \" + name + \"\\n\"\n\t}\n\n\tif len(pl[\"event_hooks\"]) > 0 {\n\t\tstr += \"\\nEvent hook plugins:\\n\"\n\t\tfor _, name := range pl[\"event_hooks\"] {\n\t\t\tstr += \" hook.\" + name + \"\\n\"\n\t\t}\n\t}\n\n\tif len(pl[\"clustering\"]) > 0 {\n\t\tstr += \"\\nClustering plugins:\\n\"\n\t\tfor _, name := range pl[\"clustering\"] {\n\t\t\tstr += \" \" + name + \"\\n\"\n\t\t}\n\t}\n\n\tstr += \"\\nOther plugins:\\n\"\n\tfor _, name := range pl[\"others\"] {\n\t\tstr += \" \" + name + \"\\n\"\n\t}\n\n\treturn str\n}", "func (e *executor) Describe(ctx context.Context) (*api.NodeDescription, error) {\n\tinfo, err := e.backend.SystemInfo()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tplugins := map[api.PluginDescription]struct{}{}\n\taddPlugins := func(typ string, names []string) {\n\t\tfor _, name := range names {\n\t\t\tplugins[api.PluginDescription{\n\t\t\t\tType: typ,\n\t\t\t\tName: name,\n\t\t\t}] = struct{}{}\n\t\t}\n\t}\n\n\t// add v1 plugins\n\taddPlugins(\"Volume\", info.Plugins.Volume)\n\t// Add builtin driver \"overlay\" (the only builtin multi-host driver) to\n\t// the plugin list by default.\n\taddPlugins(\"Network\", append([]string{\"overlay\"}, info.Plugins.Network...))\n\taddPlugins(\"Authorization\", info.Plugins.Authorization)\n\taddPlugins(\"Log\", info.Plugins.Log)\n\n\t// add v2 plugins\n\tv2Plugins, err := e.backend.PluginManager().List(filters.NewArgs())\n\tif err == nil {\n\t\tfor _, plgn := range v2Plugins {\n\t\t\tfor _, typ := range plgn.Config.Interface.Types {\n\t\t\t\tif typ.Prefix != \"docker\" || !plgn.Enabled {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tplgnTyp := typ.Capability\n\t\t\t\tswitch typ.Capability {\n\t\t\t\tcase \"volumedriver\":\n\t\t\t\t\tplgnTyp = \"Volume\"\n\t\t\t\tcase \"networkdriver\":\n\t\t\t\t\tplgnTyp = \"Network\"\n\t\t\t\tcase \"logdriver\":\n\t\t\t\t\tplgnTyp = \"Log\"\n\t\t\t\t}\n\n\t\t\t\tplugins[api.PluginDescription{\n\t\t\t\t\tType: plgnTyp,\n\t\t\t\t\tName: plgn.Name,\n\t\t\t\t}] = struct{}{}\n\t\t\t}\n\t\t}\n\t}\n\n\tpluginFields := make([]api.PluginDescription, 0, len(plugins))\n\tfor k := range plugins {\n\t\tpluginFields = append(pluginFields, k)\n\t}\n\n\tsort.Sort(sortedPlugins(pluginFields))\n\n\t// parse []string labels into a map[string]string\n\tlabels := map[string]string{}\n\tfor _, l := range info.Labels {\n\t\tstringSlice := strings.SplitN(l, \"=\", 2)\n\t\t// this will take the last value in the list for a given key\n\t\t// ideally, one shouldn't assign multiple values to the same key\n\t\tif len(stringSlice) > 1 {\n\t\t\tlabels[stringSlice[0]] = stringSlice[1]\n\t\t}\n\t}\n\n\tdescription := &api.NodeDescription{\n\t\tHostname: info.Name,\n\t\tPlatform: &api.Platform{\n\t\t\tArchitecture: info.Architecture,\n\t\t\tOS: info.OSType,\n\t\t},\n\t\tEngine: &api.EngineDescription{\n\t\t\tEngineVersion: info.ServerVersion,\n\t\t\tLabels: labels,\n\t\t\tPlugins: pluginFields,\n\t\t},\n\t\tResources: &api.Resources{\n\t\t\tNanoCPUs: int64(info.NCPU) * 1e9,\n\t\t\tMemoryBytes: info.MemTotal,\n\t\t\tGeneric: convert.GenericResourcesToGRPC(info.GenericResources),\n\t\t},\n\t}\n\n\treturn description, nil\n}", "func (p *Plugin) GetMetadata() plugin.PluginMetadata {\n\treturn plugin.PluginMetadata{\n\t\tName: \"copy-autoscaler\",\n\t\tVersion: plugin.VersionType{\n\t\t\tMajor: 0,\n\t\t\tMinor: 2,\n\t\t\tBuild: 1,\n\t\t},\n\t\tMinCliVersion: plugin.VersionType{\n\t\t\tMajor: 6,\n\t\t\tMinor: 7,\n\t\t\tBuild: 0,\n\t\t},\n\t\tCommands: []plugin.Command{\n\t\t\t{\n\t\t\t\tName: \"copy-autoscaler\",\n\t\t\t\tHelpText: \"Plugin to copy the autoscaler settings\",\n\n\t\t\t\t// UsageDetails is optional\n\t\t\t\t// It is used to show help of usage of each command\n\t\t\t\tUsageDetails: plugin.Usage{\n\t\t\t\t\tUsage: \"$ cf copy-autoscaler helloworld --find\\n\" +\n\t\t\t\t\t\t\" $ cf copy-autoscaler helloworld --export autoscaler-settings.json\\n\" +\n\t\t\t\t\t\t\" $ cf copy-autoscaler helloworld --import autoscaler-settings.json\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func (*OCR2Config_Plugins) Descriptor() ([]byte, []int) {\n\treturn file_pkg_noderpc_proto_feeds_manager_proto_rawDescGZIP(), []int{4, 2}\n}", "func (c *KibanaMeAppPlugin) GetMetadata() plugin.PluginMetadata {\n\tversionParts := strings.Split(string(VERSION), \".\")\n\tmajor, _ := strconv.Atoi(versionParts[0])\n\tminor, _ := strconv.Atoi(versionParts[1])\n\tpatch, _ := strconv.Atoi(strings.TrimSpace(versionParts[2]))\n\n\treturn plugin.PluginMetadata{\n\t\tName: \"kibana-me-logs\",\n\t\tVersion: plugin.VersionType{\n\t\t\tMajor: major,\n\t\t\tMinor: minor,\n\t\t\tBuild: patch,\n\t\t},\n\t\tCommands: []plugin.Command{\n\t\t\tplugin.Command{\n\t\t\t\tName: \"kibana-me-logs\",\n\t\t\t\tHelpText: \"open kibana-me-logs for an application\",\n\n\t\t\t\tUsageDetails: plugin.Usage{\n\t\t\t\t\tUsage: \"kibana-me-logs <app-name>\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func (p *googleCloudProvider) GetPluginInfo(context.Context, *empty.Empty) (*rpc.PluginInfo, error) {\n\treturn &rpc.PluginInfo{\n\t\tVersion: p.version,\n\t}, nil\n}", "func (p *googleCloudProvider) GetPluginInfo(context.Context, *empty.Empty) (*rpc.PluginInfo, error) {\n\treturn &rpc.PluginInfo{\n\t\tVersion: p.version,\n\t}, nil\n}", "func GetNotifierPlugin(name string) (string, NotifierPluginNewFunc, bool) {\n info, ok := registeredNotifierPlugins[name]\n return info.notifierPluginFilePath, info.notifierPluginNewFunc, ok\n}", "func PluginSupports(supportedVersions ...string) PluginInfo {\n\tif len(supportedVersions) < 1 {\n\t\tpanic(\"programmer error: you must support at least one version\")\n\t}\n\treturn &pluginInfo{\n\t\tCNIVersion_: Current(),\n\t\tSupportedVersions_: supportedVersions,\n\t}\n}", "func (c *Repeater) GetMetadata() plugin.PluginMetadata {\n\treturn plugin.PluginMetadata{\n\t\tName: name,\n\t\tVersion: c.GetVersionType(),\n\t\tCommands: []plugin.Command{\n\t\t\tplugin.Command{\n\t\t\t\tName: name,\n\t\t\t\tHelpText: \"Run the identified command on every app in a space. If the app name is a parameter in the command, use '{}'\",\n\t\t\t\tUsageDetails: plugin.Usage{\n\t\t\t\t\tUsage: fmt.Sprintf(\"cf %s [--org|--global] scale {} -i 2\", name),\n\t\t\t\t\tOptions: map[string]string{\n\t\t\t\t\t\t\"org\": \"Run the identified command on every app in the org, instead of the space\",\n\t\t\t\t\t\t\"global\": \"Run the idenitified command on every app globally, instead of the space\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func (*PluginMetaReply) Descriptor() ([]byte, []int) {\n\treturn file_plugin_proto_rawDescGZIP(), []int{13}\n}", "func GetPluginDetail() *plugins.Plugin {\n\treturn common.GetPluginDetail()\n}", "func RoutesInfo() *Plugin {\n\treturn &Plugin{}\n}", "func PluginSpecDir() string {\n\treturn filepath.Join(os.Getenv(\"programdata\"), \"docker\", \"plugins\")\n}", "func (pm *Manager) Inspect(name string) (tp types.Plugin, err error) {\n\tp, err := pm.get(name)\n\tif err != nil {\n\t\treturn tp, err\n\t}\n\treturn p.p, nil\n}", "func (*PluginInfoRequest) Descriptor() ([]byte, []int) {\n\treturn file_odpf_optimus_plugins_base_proto_rawDescGZIP(), []int{0}\n}", "func (m *Manager) pluginMap(id string) map[string]plugin.Plugin {\n\tpmap := map[string]plugin.Plugin{}\n\n\t// for _, pinfo := range m.Plugins {\n\t// \tpmap[pinfo.ID] = m.pluginImpl\n\t// }\n\n\tpmap[id] = m.pluginImpl\n\n\treturn pmap\n}", "func GetPluginMap(reserved *PluginOpts) map[string]plugin.Plugin {\n\tvar greeterObj greeter.Greeter\n\n\tif reserved != nil {\n\t\tgreeterObj.F = reserved.Greeter\n\t}\n\n\treturn map[string]plugin.Plugin{\n\t\tgreeter.InterfaceName: &greeterObj,\n\t}\n}", "func (p *Prometheus) PluginName() string {\n\treturn \"prometheus\"\n}", "func Meta() *plugin.PluginMeta {\n\treturn plugin.NewPluginMeta(\n\t\tPluginName,\n\t\tVersion,\n\t\tplugin.CollectorPluginType,\n\t\t[]string{plugin.SnapGOBContentType},\n\t\t[]string{plugin.SnapGOBContentType},\n\t\tplugin.ConcurrencyCount(1),\n\t)\n}", "func getAvailablePluginInfo(restPluginsMap restPlugins) []restPluginsAvailable {\n\tvar availablePluginsMap []restPluginsAvailable\n\tfor _, plugin := range restPluginsMap.Plugins {\n\t\tlog.Debug(\"getting: \", plugin.Name, \", available info\")\n\t\tavailablePluginURL := baseURL + \"available/\" + plugin.Key + \"-key\"\n\t\tlog.Debug(\"requesting URL: \" + availablePluginURL)\n\t\treq, err := http.NewRequest(\"GET\", availablePluginURL, nil)\n\t\tif err != nil {\n\t\t\tlog.Error(\"http.NewRequest returned an error:\", err)\n\t\t}\n\n\t\tlog.Debug(\"add authorization header to the request\")\n\t\treq.Header.Add(\"Authorization\", bearer)\n\n\t\tlog.Debug(\"make request... get back a response\")\n\t\tres, err := http.DefaultClient.Do(req)\n\t\tif err != nil {\n\t\t\tlog.Error(\"http.DefaultClient.Do returned an error:\", err)\n\t\t}\n\t\tdefer res.Body.Close()\n\n\t\tif res.StatusCode != 200 {\n\t\t\tlog.Debug(\"response status code: \", res.StatusCode, \" continuing to next plugin\")\n\t\t\tcontinue\n\t\t}\n\n\t\tlog.Debug(\"get the body out of the response\")\n\t\tbody, err := ioutil.ReadAll(res.Body)\n\t\tif err != nil {\n\t\t\tlog.Error(\"ioutil.ReadAll returned an error:\", err)\n\t\t}\n\n\t\tif len(body) < 1 {\n\t\t\tlog.Debug(\"body was empty, continue to next plugin\")\n\t\t\tcontinue\n\t\t}\n\n\t\tlog.Debug(\"create temp map object\")\n\t\tvar tempMap restPluginsAvailable\n\n\t\tlog.Debug(\"unmarshal (turn unicode back into a string) request body into map structure\")\n\t\terr = json.Unmarshal(body, &tempMap)\n\t\tif err != nil {\n\t\t\tlog.Error(\"error Unmarshalling: \", err)\n\t\t\tlog.Info(\"Problem unmarshalling the following string: \", string(body))\n\t\t}\n\n\t\t// add the enabled value from the plugin map to the available map\n\t\ttempMap.Enabled = plugin.Enabled\n\n\t\tlog.Debug(\"adding plugin: \", tempMap.Name, \", and Key: \", tempMap.Key)\n\t\tavailablePluginsMap = append(availablePluginsMap, tempMap)\n\n\t}\n\n\treturn availablePluginsMap\n}", "func (CfRecycleCmd) GetMetadata() plugin.PluginMetadata {\n\treturn plugin.PluginMetadata{\n\t\tName: PluginName,\n\t\tVersion: plugin.VersionType{\n\t\t\tMajor: 1,\n\t\t\tMinor: 0,\n\t\t\tBuild: 0,\n\t\t},\n\t\tCommands: []plugin.Command{\n\t\t\t{\n\t\t\t\tName: \"recycle\",\n\t\t\t\tHelpText: CfRecyclePluginHelpText,\n\t\t\t},\n\t\t},\n\t}\n}", "func (e *PluginExec) GetVersionInfo(pluginPath string) (version.PluginInfo, error) {\n\targs := &Args{\n\t\tCommand: \"VERSION\",\n\n\t\t// set fake values required by plugins built against an older version of skel\n\t\tNetNS: \"dummy\",\n\t\tIfName: \"dummy\",\n\t\tPath: \"dummy\",\n\t}\n\tstdin := []byte(fmt.Sprintf(`{\"cniVersion\":%q}`, version.Current()))\n\tstdoutBytes, err := e.RawExec.ExecPlugin(pluginPath, stdin, args.AsEnv())\n\tif err != nil {\n\t\tif err.Error() == \"unknown CNI_COMMAND: VERSION\" {\n\t\t\treturn version.PluginSupports(\"0.1.0\"), nil\n\t\t}\n\t\treturn nil, err\n\t}\n\n\treturn e.VersionDecoder.Decode(stdoutBytes)\n}", "func ForPlugin(pluginName string) PluginConfig {\n\treturn &pluginConfig{pluginName: pluginName}\n}", "func metainfo(obj MilpaObject) (string, string, error) {\n\tv := reflect.ValueOf(obj)\n\tif v.Kind() != reflect.Ptr {\n\t\treturn \"\", \"\", fmt.Errorf(\"Expected pointer, but got %v\",\n\t\t\tv.Type().Name())\n\t}\n\tv = v.Elem()\n\tif v.Kind() != reflect.Struct {\n\t\treturn \"\", \"\", fmt.Errorf(\"Expected struct, but got %v: %v (%#v)\",\n\t\t\tv.Kind(), v.Type().Name(), v.Interface())\n\t}\n\tkind := v.FieldByName(\"Kind\")\n\tif !kind.IsValid() {\n\t\treturn \"\", \"\", fmt.Errorf(\"%v does not have TypeMeta\", v)\n\t}\n\tapiversion := v.FieldByName(\"APIVersion\")\n\tif !apiversion.IsValid() {\n\t\treturn \"\", \"\", fmt.Errorf(\"%v does not have TypeMeta\", v)\n\t}\n\treturn kind.String(), apiversion.String(), nil\n}", "func (p *servicePlugin) Name() string { return \"protorpc\" }", "func (*PluginOptions) Descriptor() ([]byte, []int) {\n\treturn file_odpf_optimus_plugins_base_proto_rawDescGZIP(), []int{2}\n}", "func (dm *DCOSMetadata) Description() string {\n\treturn \"Plugin for adding metadata to dcos-specific metrics\"\n}", "func NewPodmanDriver(logger hclog.Logger) drivers.DriverPlugin {\n\tctx, cancel := context.WithCancel(context.Background())\n\tlogger = logger.Named(pluginName)\n\treturn &Driver{\n\t\teventer: eventer.NewEventer(ctx, logger),\n\t\tconfig: &Config{},\n\t\ttasks: newTaskStore(),\n\t\tctx: ctx,\n\t\tsignalShutdown: cancel,\n\t\tlogger: logger,\n\t}\n}", "func newPluginProvider(pluginBinDir string, provider kubeletconfig.CredentialProvider) (*pluginProvider, error) {\n\tmediaType := \"application/json\"\n\tinfo, ok := runtime.SerializerInfoForMediaType(codecs.SupportedMediaTypes(), mediaType)\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"unsupported media type %q\", mediaType)\n\t}\n\n\tgv, ok := apiVersions[provider.APIVersion]\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"invalid apiVersion: %q\", provider.APIVersion)\n\t}\n\n\tclock := clock.RealClock{}\n\n\treturn &pluginProvider{\n\t\tclock: clock,\n\t\tmatchImages: provider.MatchImages,\n\t\tcache: cache.NewExpirationStore(cacheKeyFunc, &cacheExpirationPolicy{clock: clock}),\n\t\tdefaultCacheDuration: provider.DefaultCacheDuration.Duration,\n\t\tlastCachePurge: clock.Now(),\n\t\tplugin: &execPlugin{\n\t\t\tname: provider.Name,\n\t\t\tapiVersion: provider.APIVersion,\n\t\t\tencoder: codecs.EncoderForVersion(info.Serializer, gv),\n\t\t\tpluginBinDir: pluginBinDir,\n\t\t\targs: provider.Args,\n\t\t\tenvVars: provider.Env,\n\t\t\tenviron: os.Environ,\n\t\t},\n\t}, nil\n}", "func getPluginInfos(r *http.Request, pullLatest, requireSubfolder bool) ([]Plugin, string, string, string, error) {\n\terr := r.ParseForm()\n\tif err != nil {\n\t\treturn nil, \"\", \"\", \"\", err\n\t}\n\n\trepo := strings.TrimSpace(r.Form.Get(\"clone_url\"))\n\tversion := strings.TrimSpace(r.Form.Get(\"version\"))\n\tsubfolder := strings.TrimSpace(r.Form.Get(\"subfolder\"))\n\tif repo == \"\" {\n\t\treturn nil, \"\", \"\", \"\", fmt.Errorf(\"missing required field(s)\")\n\t}\n\n\t// assume root of repository if no subfolder given, and if one is required\n\tif requireSubfolder && subfolder == \"\" {\n\t\tsubfolder = \".\"\n\t}\n\n\t// get list of plugins\n\tinfos, err := allPluginInfos(repo, version, subfolder, pullLatest)\n\tif err != nil {\n\t\treturn nil, \"\", \"\", \"\", fmt.Errorf(\"getting plugin list: %v\", err)\n\t}\n\n\t// reject as error if any plugin name is not unique within this repo,\n\t// since it is otherwise impossible to distinguish one from another;\n\t// plugins don't have IDs until they're in the database\n\tif duplicate, dupName := anyDuplicatePluginName(infos); duplicate {\n\t\treturn nil, \"\", \"\", \"\",\n\t\t\tfmt.Errorf(\"plugin name '%s' is not unique within repo %s\", dupName, repo)\n\t}\n\n\treturn infos, repo, version, subfolder, nil\n}", "func (p *ProtocGenGoPlugin) Name() string {\n\treturn ProtocGenGoPluginName\n}", "func podInfo(pod v1.Pod) (result string) {\n\tpodname := pod.Name\n\tpodstatus := strings.ToLower(string(pod.Status.Phase))\n\timages := \"\"\n\tfor _, container := range pod.Spec.Containers {\n\t\timages += fmt.Sprintf(\"%v \", container.Image)\n\t}\n\tresult += fmt.Sprintf(\"pod [%v] is %v and uses image(s) %v\\n\", podname, podstatus, images)\n\treturn result\n}", "func (p *Plugins) ParsePlugin(name, tag string) (*Plugin, error) {\n\tscript := `\n\tvar plugin = require('` + name + `')\n\tvar pjson = require('` + name + `/package.json')\n\n\tplugin.name = pjson.name\n\tplugin.version = pjson.version\n\n\tconsole.log(JSON.stringify(plugin))`\n\tcmd, done := p.RunScript(script)\n\tcmd.Stderr = Stderr\n\toutput, err := cmd.Output()\n\tdone()\n\n\tif err != nil {\n\t\treturn nil, merry.Errorf(\"Error installing plugin %s\", name)\n\t}\n\tvar plugin Plugin\n\tplugin.UpdatedAt = time.Now()\n\tplugin.Tag = tag\n\terr = json.Unmarshal(output, &plugin)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Error parsing plugin: %s\\n%s\\n%s\\nIs this a real CLI plugin?\", name, err, string(output))\n\t}\n\tif len(plugin.Commands) == 0 {\n\t\treturn nil, fmt.Errorf(\"Invalid plugin. No commands found.\")\n\t}\n\tfor _, command := range plugin.Commands {\n\t\tif command == nil {\n\t\t\tcontinue\n\t\t}\n\t\tcommand.Plugin = plugin.Name\n\t\tcommand.Help = strings.TrimSpace(command.Help)\n\t}\n\tp.addToCache(&plugin)\n\treturn &plugin, nil\n}", "func GetPluginOuts(plugins []*PluginConfiguration) map[string]string {\n\touts := make(map[string]string)\n\tfor _, plugin := range plugins {\n\t\tif plugin.Out == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\touts[plugin.Label.String()] = plugin.Out\n\t}\n\treturn outs\n}", "func MarshalPlugin(plugin types.Plugin, opts ...MarshalOption) ([]byte, error) {\n\tif err := plugin.CheckAndSetDefaults(); err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\tcfg, err := CollectOptions(opts)\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\tswitch plugin := plugin.(type) {\n\tcase *types.PluginV1:\n\t\tif !cfg.PreserveResourceID {\n\t\t\tcopy := *plugin\n\t\t\tcopy.SetResourceID(0)\n\t\t\tplugin = &copy\n\t\t}\n\t\tvar buf bytes.Buffer\n\t\terr := (&jsonpb.Marshaler{}).Marshal(&buf, plugin)\n\t\tif err != nil {\n\t\t\treturn nil, trace.Wrap(err)\n\t\t}\n\t\treturn buf.Bytes(), nil\n\tdefault:\n\t\treturn nil, trace.BadParameter(\"unsupported plugin resource %T\", plugin)\n\t}\n}", "func GetPluginData(pluginID string) (restClient.Plugin, *errors.Error) {\n\tvar plugin restClient.Plugin\n\n\tconn, err := common.GetDBConnection(common.OnDisk)\n\tif err != nil {\n\t\treturn plugin, errors.PackError(err.ErrNo(), \"error while trying to connect to DB: \", err.Error())\n\t}\n\n\tplugindata, err := conn.Read(\"Plugin\", pluginID)\n\tif err != nil {\n\t\treturn plugin, errors.PackError(err.ErrNo(), \"error while trying to fetch plugin data: \", err.Error())\n\t}\n\n\tif err := json.Unmarshal([]byte(plugindata), &plugin); err != nil {\n\t\treturn plugin, errors.PackError(errors.JSONUnmarshalFailed, err)\n\t}\n\n\tbytepw, errs := common.DecryptWithPrivateKey([]byte(plugin.Password))\n\tif errs != nil {\n\t\treturn restClient.Plugin{}, errors.PackError(errors.DecryptionFailed, \"error: \"+pluginID+\" plugin password decryption failed: \"+errs.Error())\n\t}\n\tplugin.Password = bytepw\n\n\treturn plugin, nil\n}", "func (plugin *ExamplePlugin) String() string {\n\treturn PluginName\n}", "func (s *OidcService) GetOIDCProviderPluginDescriptorCommand(input *GetOIDCProviderPluginDescriptorCommandInput) (output *models.DescriptorView, resp *http.Response, err error) {\n\tpath := \"/oidc/provider/descriptors/{pluginType}\"\n\tpath = strings.Replace(path, \"{pluginType}\", input.PluginType, -1)\n\n\top := &request.Operation{\n\t\tName: \"GetOIDCProviderPluginDescriptorCommand\",\n\t\tHTTPMethod: \"GET\",\n\t\tHTTPPath: path,\n\t\tQueryParams: map[string]string{},\n\t}\n\toutput = &models.DescriptorView{}\n\treq := s.newRequest(op, nil, output)\n\n\tif req.Send() == nil {\n\t\treturn output, req.HTTPResponse, nil\n\t}\n\treturn nil, req.HTTPResponse, req.Error\n}", "func (m *Manager) Plugin(name string) (interface{}, error) {\n\tm.l.RLock()\n\tdefer m.l.RUnlock()\n\tplg, ok := m.plugins[name]\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"plugin not found\")\n\t}\n\tif plg.API == nil {\n\t\treturn nil, fmt.Errorf(\"plugin does not define an API\")\n\t}\n\n\treturn plg.API(), nil\n}", "func metadataCmd(pr *external.PluginRequest) external.PluginResponse {\n\tpluginResponse := external.PluginResponse{\n\t\tAPIVersion: \"v1alpha1\",\n\t\tCommand: \"flags\",\n\t\tUniverse: pr.Universe,\n\t}\n\n\t// Here is an example of parsing multiple flags from a Kubebuilder external plugin request\n\tflagsToParse := pflag.NewFlagSet(\"flagsFlags\", pflag.ContinueOnError)\n\tflagsToParse.Bool(\"init\", false, \"sets the init flag to true\")\n\tflagsToParse.Bool(\"api\", false, \"sets the api flag to true\")\n\tflagsToParse.Bool(\"webhook\", false, \"sets the webhook flag to true\")\n\n\tflagsToParse.Parse(pr.Args)\n\n\tinitFlag, _ := flagsToParse.GetBool(\"init\")\n\tapiFlag, _ := flagsToParse.GetBool(\"api\")\n\twebhookFlag, _ := flagsToParse.GetBool(\"webhook\")\n\n\t// The Phase 2 Plugins implementation will only ever pass a single boolean flag\n\t// argument in the JSON request `args` field. The flag will be `--init` if it is\n\t// attempting to get the flags for the `init` subcommand, `--api` for `create api`,\n\t// `--webhook` for `create webhook`, and `--edit` for `edit`\n\tif initFlag {\n\t\t// Populate the JSON response `metadata` field with a description\n\t\t// and examples for the `init` subcommand\n\t\tpluginResponse.Metadata = scaffolds.InitMeta\n\t} else if apiFlag {\n\t\tpluginResponse.Metadata = scaffolds.ApiMeta\n\t} else if webhookFlag {\n\t\tpluginResponse.Metadata = scaffolds.WebhookMeta\n\t} else {\n\t\tpluginResponse.Error = true\n\t\tpluginResponse.ErrorMsgs = []string{\n\t\t\t\"unrecognized flag\",\n\t\t}\n\t}\n\n\treturn pluginResponse\n}", "func (pm *pluginManager) getPlugin(name string) (NodePlugin, error) {\n\tif p, ok := pm.plugins[name]; ok {\n\t\treturn p, nil\n\t}\n\n\tpc, err := pm.pg.Get(name, DockerCSIPluginCap, plugingetter.Lookup)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpa, ok := pc.(plugingetter.PluginAddr)\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"plugin does not implement PluginAddr interface\")\n\t}\n\n\tp := pm.newNodePluginFunc(name, pc, pa, pm.secrets)\n\tpm.plugins[name] = p\n\treturn p, nil\n}", "func (*PluginListReply) Descriptor() ([]byte, []int) {\n\treturn file_plugin_proto_rawDescGZIP(), []int{14}\n}", "func (*ConnectPlugin) Name() string {\n\treturn \"connect-to-list-plugin\"\n}", "func (Plugin) Name() string { return pluginName }", "func Meta() *plugin.PluginMeta {\n\treturn plugin.NewPluginMeta(name, version, pluginType, []string{plugin.SnapGOBContentType}, []string{plugin.SnapGOBContentType})\n}", "func Meta() *plugin.PluginMeta {\n\treturn plugin.NewPluginMeta(name, version, pluginType, []string{plugin.SnapGOBContentType}, []string{plugin.SnapGOBContentType})\n}", "func (*Plugin) Name() string {\n\treturn \"ds-cosmos\"\n}", "func (*Plugin) Descriptor() ([]byte, []int) {\n\treturn file_plugin_proto_rawDescGZIP(), []int{0}\n}", "func NewPodmanDriver(logger hclog.Logger) drivers.DriverPlugin {\n\tctx, cancel := context.WithCancel(context.Background())\n\treturn &Driver{\n\t\teventer: eventer.NewEventer(ctx, logger),\n\t\tconfig: &PluginConfig{},\n\t\ttasks: newTaskStore(),\n\t\tctx: ctx,\n\t\tsignalShutdown: cancel,\n\t\tlogger: logger.Named(pluginName),\n\t}\n}", "func (a *Agent) GetPluginTypes(p interface{}) (map[string]interface{}, error) {\n\n\tdata := reflect.ValueOf(p).Elem() // extract Value of type interface{} from Value pointer to interface\n\tschema := getFieldType(data.Type())\n\n\ts, ok := schema.(map[string]interface{})\n\n\tif ok {\n\t\treturn s, nil\n\t}\n\n\treturn nil, fmt.Errorf(\"returned schema is not a map\")\n}", "func NewPluginMeta(name string, version int, pluginType PluginType, acceptContentTypes, returnContentTypes []string, opts ...metaOp) *PluginMeta {\n\t// An empty accepted content type default to \"snap.*\"\n\tif len(acceptContentTypes) == 0 {\n\t\tacceptContentTypes = append(acceptContentTypes, \"snap.*\")\n\t}\n\t// Validate content type formats\n\tfor _, s := range acceptContentTypes {\n\t\tb, e := regexp.MatchString(`^[a-z0-9*]+\\.[a-z0-9*]+$`, s)\n\t\tif e != nil {\n\t\t\tpanic(e)\n\t\t}\n\t\tif !b {\n\t\t\tpanic(fmt.Sprintf(\"Bad accept content type [%s] for [%d] [%s]\", name, version, s))\n\t\t}\n\t}\n\tfor _, s := range returnContentTypes {\n\t\tb, e := regexp.MatchString(`^[a-z0-9*]+\\.[a-z0-9*]+$`, s)\n\t\tif e != nil {\n\t\t\tpanic(e)\n\t\t}\n\t\tif !b {\n\t\t\tpanic(fmt.Sprintf(\"Bad return content type [%s] for [%d] [%s]\", name, version, s))\n\t\t}\n\t}\n\n\tp := &PluginMeta{\n\t\tName: name,\n\t\tVersion: version,\n\t\tType: pluginType,\n\t\tAcceptedContentTypes: acceptContentTypes,\n\t\tReturnedContentTypes: returnContentTypes,\n\n\t\t//set the default for concurrency count to 1\n\t\tConcurrencyCount: 1,\n\t}\n\n\tfor _, opt := range opts {\n\t\topt(p)\n\t}\n\n\treturn p\n}", "func (o KafkaConnectorOutput) PluginType() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *KafkaConnector) pulumi.StringOutput { return v.PluginType }).(pulumi.StringOutput)\n}", "func (o ArgoCDSpecOutput) ConfigManagementPlugins() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ArgoCDSpec) *string { return v.ConfigManagementPlugins }).(pulumi.StringPtrOutput)\n}", "func (p *Plugin) NodeGetInfo(\n\tctx context.Context,\n\treq *csi.NodeGetInfoRequest) (\n\t*csi.NodeGetInfoResponse, error) {\n\n\tglog.Info(\"start to GetNodeInfo\")\n\tdefer glog.Info(\"end to GetNodeInfo\")\n\n\tif client == nil {\n\t\tmsg := \"client is nil\"\n\t\tglog.Error(msg)\n\t\treturn nil, status.Error(codes.InvalidArgument, msg)\n\t}\n\n\thostName, err := connector.GetHostName()\n\tif err != nil {\n\t\tmsg := fmt.Sprintf(\"failed to get node name %v\", err)\n\t\tglog.Error(msg)\n\t\treturn nil, status.Error(codes.FailedPrecondition, msg)\n\t}\n\n\tvar initiators []string\n\n\tvolDriverTypes := []string{connector.FcDriver, connector.IscsiDriver}\n\n\tfor _, volDriverType := range volDriverTypes {\n\t\tvolDriver := connector.NewConnector(volDriverType)\n\t\tif volDriver == nil {\n\t\t\tglog.Errorf(\"unsupport volDriver: %s\", volDriverType)\n\t\t\tcontinue\n\t\t}\n\n\t\tinitiator, err := volDriver.GetInitiatorInfo()\n\t\tif err != nil {\n\t\t\tglog.Errorf(\"cannot get initiator for driver volume type %s, err: %v\", volDriverType, err)\n\t\t\tcontinue\n\t\t}\n\n\t\tinitiators = append(initiators, initiator)\n\t}\n\n\tif len(initiators) == 0 {\n\t\tmsg := fmt.Sprintf(\"cannot get any initiator for host %s\", hostName)\n\t\tglog.Error(msg)\n\t\treturn nil, status.Error(codes.FailedPrecondition, msg)\n\t}\n\n\tnodeId := hostName + \",\" + strings.Join(initiators, \",\")\n\n\tglog.Infof(\"node info is %s\", nodeId)\n\n\treturn &csi.NodeGetInfoResponse{\n\t\tNodeId: nodeId,\n\t}, nil\n}", "func GetPluginSpec() model.PluginSpec {\n\treturn model.PluginSpec{\n\t\tSpec: spec,\n\t\tType: model.OUTPUT_PLUGIN,\n\t}\n}", "func (p *ComputationPlugin) GetInfo(definition string, response *dagger.ComputationPluginInfo) error {\n\tp.mx.RLock()\n\tdefer p.mx.RUnlock()\n\tinfo, err := p.impl.GetInfo(definition)\n\t*response = info\n\treturn err\n}", "func listPlugins(dir string) (result []Plugin, err error) {\n\titems, err := os.ReadDir(dir)\n\tif err != nil {\n\t\treturn\n\t}\n\tfor _, item := range items {\n\t\tif item.IsDir() {\n\t\t\tcontinue\n\t\t}\n\t\tname := item.Name()\n\t\tif !strings.HasPrefix(name, pluginPrefix) {\n\t\t\tcontinue\n\t\t}\n\t\tpath := filepath.Join(dir, name)\n\t\tvar exec bool\n\t\texec, err = isExecutable(path)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t\tif !exec {\n\t\t\tfmt.Printf(\"Warning: %s identified as an ocm plugin, but it is not executable.\\n\", path)\n\t\t}\n\t\tif runtime.GOOS == \"windows\" {\n\t\t\tname = strings.TrimSuffix(name, \".exe\")\n\t\t}\n\t\tplugin := Plugin{\n\t\t\tName: name,\n\t\t\tPath: dir,\n\t\t}\n\t\tresult = append(result, plugin)\n\t}\n\treturn\n}", "func (p *PluginAPI) String() string {\n\tvar lines []string\n\tlines = append(lines, \"Struct PluginAPI:\")\n\tlines = append(lines, \"\\t- Generated on: 2016-12-06 08:06:59.815134005 +0100 CET\")\n\tlines = append(lines, \"\\t- Command: go-bind-plugin -plugin-path plugin.so -plugin-package ./plugin -output-name PluginAPI -output-path plugin_api.go -output-package main -dereference-vars -rebuild\")\n\tlines = append(lines, \"\\nPlugin info:\")\n\tlines = append(lines, \"\\t- package: github.com/wendigo/go-bind-plugin-example/plugin\")\n\tlines = append(lines, \"\\t- sha256 sum: 303cd891bd37c209c0ad0798673bd871af8e6bae3673cc7c261abb54e76e6ae9\")\n\tlines = append(lines, \"\\t- size: 2578348 bytes\")\n\tlines = append(lines, \"\\nExported functions (2):\")\n\tlines = append(lines, \"\\t- CalculateSin func(float64) (float64)\")\n\tlines = append(lines, \"\\t- SayHello func(string)\")\n\n\tlines = append(lines, \"\\nExported variables (1):\")\n\tlines = append(lines, \"\\t- CurrentYear int\")\n\n\treturn strings.Join(lines, \"\\n\")\n}", "func (s *OidcService) GetOIDCProviderPluginDescriptorsCommand() (output *models.DescriptorsView, resp *http.Response, err error) {\n\tpath := \"/oidc/provider/descriptors\"\n\top := &request.Operation{\n\t\tName: \"GetOIDCProviderPluginDescriptorsCommand\",\n\t\tHTTPMethod: \"GET\",\n\t\tHTTPPath: path,\n\t}\n\toutput = &models.DescriptorsView{}\n\treq := s.newRequest(op, nil, output)\n\n\tif req.Send() == nil {\n\t\treturn output, req.HTTPResponse, nil\n\t}\n\treturn nil, req.HTTPResponse, req.Error\n}", "func (p *Pool) PluginStatisticPerHost() map[string][]PluginStatistic {\n\tp.mutex.Lock()\n\tdefer p.mutex.Unlock()\n\tresult := make(map[string][]PluginStatistic)\n\tfor host, pls := range p.hosts {\n\t\tif _, ok := result[host]; !ok {\n\t\t\tresult[host] = make([]PluginStatistic, 0)\n\t\t}\n\t\tfor _, pl := range pls.plugins {\n\t\t\tresult[host] = append(result[host], pl.getStatistics())\n\t\t}\n\t}\n\treturn result\n}", "func (Plugin) Version() plugin.Version { return pluginVersion }", "func Meta() *plugin.PluginMeta {\n\treturn plugin.NewPluginMeta(Name, Version, PluginType, []string{plugin.SnapGOBContentType}, []string{plugin.SnapGOBContentType})\n}", "func (r *Resolver) GetInfo() *model.NodeInfo {\n\tioregOutput := runCommandOrFail(\"ioreg\", \"-rd1\", \"-c\", \"IOPlatformExpertDevice\")\n\thostname, _ := os.Hostname()\n\n\treturn &model.NodeInfo{\n\t\tVersion: r.version,\n\t\tUptime: 0,\n\t\tLabels: r.labels,\n\t\tArch: runtime.GOARCH,\n\t\tOS: runtime.GOOS,\n\t\tHostname: hostname,\n\t\tAddresses: getAddresses(),\n\t\tGrpcPort: r.grpcPort,\n\n\t\tMachineID: parseFieldFromIoregOutput(ioregOutput, \"IOPlatformSerialNumber\"),\n\t\tSystemUUID: parseFieldFromIoregOutput(ioregOutput, \"IOPlatformUUID\"),\n\t\tBootID: runCommandOrFail(\"/usr/bin/uuidgen\"),\n\n\t\tFilesystems: resolveFilesystems(),\n\t}\n}", "func (*GeneratePluginsResponse) Descriptor() ([]byte, []int) {\n\treturn file_buf_alpha_registry_v1alpha1_generate_proto_rawDescGZIP(), []int{4}\n}", "func (o ArgoCDSpecPtrOutput) ConfigManagementPlugins() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ArgoCDSpec) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.ConfigManagementPlugins\n\t}).(pulumi.StringPtrOutput)\n}", "func PluginInitialize(name string, impl PluginInterface) unsafe.Pointer {\n\t// Initialize the connection map. Note that connection IDs\n\t// must start counting from 1 since we must never return what\n\t// looks like a NULL pointer to the C code.\n\tconnectionMap = make(map[uintptr]ConnectionInterface)\n\tnextConnectionId = 1\n\n\tpluginImpl = impl\n\n\tplugin := C.struct_nbdkit_plugin{}\n\n\t// Set up the hidden plugin fields as for C.\n\tstruct_size := C.ulong(unsafe.Sizeof(plugin))\n\tplugin._struct_size = struct_size\n\tplugin._api_version = C.NBDKIT_API_VERSION\n\tplugin._thread_model = C.NBDKIT_THREAD_MODEL_PARALLEL\n\n\t// Set up the other fields.\n\tplugin.name = C.CString(name)\n\tplugin.load = (*[0]byte)(C.wrapper_load)\n\tplugin.unload = (*[0]byte)(C.wrapper_unload)\n\tplugin.dump_plugin = (*[0]byte)(C.wrapper_dump_plugin)\n\tplugin.config = (*[0]byte)(C.wrapper_config)\n\tplugin.config_complete = (*[0]byte)(C.wrapper_config_complete)\n\tplugin.get_ready = (*[0]byte)(C.wrapper_get_ready)\n\tplugin.preconnect = (*[0]byte)(C.wrapper_preconnect)\n\tplugin.open = (*[0]byte)(C.wrapper_open)\n\tplugin.close = (*[0]byte)(C.wrapper_close)\n\tplugin.get_size = (*[0]byte)(C.wrapper_get_size)\n\tplugin.can_write = (*[0]byte)(C.wrapper_can_write)\n\tplugin.can_flush = (*[0]byte)(C.wrapper_can_flush)\n\tplugin.is_rotational = (*[0]byte)(C.wrapper_is_rotational)\n\tplugin.can_trim = (*[0]byte)(C.wrapper_can_trim)\n\tplugin.can_zero = (*[0]byte)(C.wrapper_can_zero)\n\tplugin.can_multi_conn = (*[0]byte)(C.wrapper_can_multi_conn)\n\tplugin.pread = (*[0]byte)(C.wrapper_pread)\n\tplugin.pwrite = (*[0]byte)(C.wrapper_pwrite)\n\tplugin.flush = (*[0]byte)(C.wrapper_flush)\n\tplugin.trim = (*[0]byte)(C.wrapper_trim)\n\tplugin.zero = (*[0]byte)(C.wrapper_zero)\n\n\t// Golang plugins don't preserve errno correctly.\n\tplugin.errno_is_preserved = 0\n\n\t// Return a newly malloced copy of the struct. This must be\n\t// globally available to the C code in the server, so it is\n\t// never freed.\n\tp := (*C.struct_nbdkit_plugin)(C.malloc(struct_size))\n\t*p = plugin\n\treturn unsafe.Pointer(p)\n}", "func (c *Controller) GetControllerInfo() controller.Info {\n\treturn controller.NewInfo(\n\t\tControllerID,\n\t\tVersion,\n\t\t\"hot plugin filesystem loader: \"+c.dir,\n\t)\n}", "func (o KafkaConnectorOutput) PluginTitle() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *KafkaConnector) pulumi.StringOutput { return v.PluginTitle }).(pulumi.StringOutput)\n}", "func info(command interface{}) (*CommandInfo, error) {\n\ttypeof := reflect.TypeOf(command)\n\n\t// Going up the pointer chain to find the underlying struct\n\tfor typeof.Kind() == reflect.Ptr {\n\t\ttypeof = typeof.Elem()\n\t}\n\n\tfield, ok := typeof.FieldByName(\"_\")\n\tif !ok {\n\t\treturn nil, fmt.Errorf(`missing meta (\"_\") field in %#v`, command)\n\t}\n\n\tname, nameOk := field.Tag.Lookup(\"name\")\n\tdescription, _ := field.Tag.Lookup(\"description\")\n\n\tif !nameOk {\n\t\treturn nil, fmt.Errorf(`missing \"name\" key in the tag string of %#v`, command)\n\t}\n\n\tinfo := &CommandInfo{\n\t\tName: name,\n\t\tDescription: description,\n\t}\n\n\treturn info, nil\n}", "func (daemon *Daemon) PluginManager() *plugin.Manager { // set up before daemon to avoid this method\n\treturn daemon.pluginManager\n}", "func (cli *Client) PluginList(ctx context.Context) (types.PluginsListResponse, error) {\n\tvar plugins types.PluginsListResponse\n\tresp, err := cli.get(ctx, \"/plugins\", nil, nil)\n\tif err != nil {\n\t\treturn plugins, err\n\t}\n\n\terr = json.NewDecoder(resp.body).Decode(&plugins)\n\tensureReaderClosed(resp)\n\treturn plugins, err\n}", "func getPluginStatus(ctx context.Context, plugin smodel.Plugin) bool {\n\tvar pluginStatus = common.PluginStatus{\n\t\tMethod: http.MethodGet,\n\t\tRequestBody: common.StatusRequest{\n\t\t\tComment: \"\",\n\t\t},\n\t\tPluginIP: plugin.IP,\n\t\tPluginPort: plugin.Port,\n\t\tResponseWaitTime: config.Data.PluginStatusPolling.ResponseTimeoutInSecs,\n\t\tCount: config.Data.PluginStatusPolling.MaxRetryAttempt,\n\t\tRetryInterval: config.Data.PluginStatusPolling.RetryIntervalInMins,\n\t\tCACertificate: &config.Data.KeyCertConf.RootCACertificate,\n\t}\n\tstatus, _, _, err := pluginStatus.CheckStatus()\n\tif err != nil && !status {\n\t\tl.LogWithFields(ctx).Warn(\"while getting the status for plugin \" + plugin.ID + err.Error())\n\t\treturn status\n\t}\n\tl.LogWithFields(ctx).Info(\"Status of plugin\" + plugin.ID + strconv.FormatBool(status))\n\treturn status\n}", "func (p *IntelPlugin) Name() string {\n\treturn p.PluginName\n}", "func (o KafkaConnectorOutput) PluginVersion() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *KafkaConnector) pulumi.StringOutput { return v.PluginVersion }).(pulumi.StringOutput)\n}", "func getPluginDataAndHTML(pluginManager plugin.PanelManager, page plugin.PageScope, ctx plugin.UIContext) pluginData {\n\tincludes, err := pluginManager.Includes(page)\n\tif err != nil {\n\t\tgrip.Errorf(\"error getting include html from plugin manager on %v page: %v\",\n\t\t\tpage, err)\n\t}\n\n\tpanels, err := pluginManager.Panels(page)\n\tif err != nil {\n\t\tgrip.Errorf(\"error getting panel html from plugin manager on %v page: %v\",\n\t\t\tpage, err)\n\t}\n\n\tdata, err := pluginManager.UIData(ctx, page)\n\tif err != nil {\n\t\tgrip.Errorf(\"error getting plugin data on %v page: %+v\", page, err)\n\t}\n\n\treturn pluginData{includes, panels, data}\n}", "func (c *CNIManager) Plugin() ocicni.CNIPlugin {\n\treturn c.plugin\n}", "func GetDevicePluginCount(pluginKind string) int {\n\treturn bKeeper.count(pluginKind)\n}", "func Install(providerName, versionConstraint, installDir string, cacheBinary bool) (discovery.PluginMeta, error) {\n\texpandedInstallDir, err := goHomeDir.Expand(installDir)\n\tif err != nil {\n\t\treturn discovery.PluginMeta{}, err\n\t}\n\n\tproviderInstaller := &discovery.ProviderInstaller{\n\t\tDir: filepath.FromSlash(expandedInstallDir),\n\t\tCache: func() discovery.PluginCache {\n\t\t\tif cacheBinary {\n\t\t\t\treturn discovery.NewLocalPluginCache(filepath.FromSlash(expandedInstallDir + \"/cache\"))\n\t\t\t}\n\t\t\treturn nil\n\t\t}(),\n\t\tPluginProtocolVersion: discovery.PluginInstallProtocolVersion,\n\t\tSkipVerify: false,\n\t\tUi: &cli.BasicUi{\n\t\t\tReader: os.Stdin,\n\t\t\tWriter: &bytes.Buffer{},\n\t\t\tErrorWriter: os.Stderr,\n\t\t},\n\t}\n\n\tproviderConstraint := discovery.AllVersions\n\n\tif versionConstraint != \"\" {\n\t\tconstraints, err := version.NewConstraint(versionConstraint)\n\t\tif err != nil {\n\t\t\treturn discovery.PluginMeta{}, fmt.Errorf(\"failed to parse provider version constraint: %s\", err)\n\t\t}\n\n\t\tproviderConstraint = discovery.NewConstraints(constraints)\n\t}\n\n\tpty := addrs.NewLegacyProvider(providerName)\n\n\tmeta, tfDiagnostics, err := providerInstaller.Get(pty, providerConstraint)\n\tif err != nil {\n\t\ttfDiagnostics = tfDiagnostics.Append(err)\n\t\treturn discovery.PluginMeta{}, tfDiagnostics.Err()\n\t}\n\n\treturn meta, nil\n}", "func (*ProviderInfo) Descriptor() ([]byte, []int) {\n\treturn file_api_hourglass_v1_provider_proto_rawDescGZIP(), []int{1}\n}" ]
[ "0.69334555", "0.6821398", "0.67342067", "0.6526161", "0.6484056", "0.63790214", "0.6310753", "0.6254248", "0.623014", "0.6096925", "0.6095875", "0.60840565", "0.60450107", "0.5966684", "0.58506525", "0.5838877", "0.5838877", "0.58286035", "0.5825507", "0.57368577", "0.57027465", "0.5616712", "0.5556204", "0.55530447", "0.5547227", "0.54580873", "0.54580873", "0.5396906", "0.5365164", "0.5341463", "0.5312307", "0.53005934", "0.5266719", "0.5259924", "0.52476317", "0.52391344", "0.52386457", "0.5214321", "0.51613176", "0.5139924", "0.5136796", "0.5130802", "0.5107548", "0.51009053", "0.5092041", "0.508747", "0.5071978", "0.5071037", "0.5058421", "0.50446135", "0.504406", "0.5032594", "0.50252724", "0.50249606", "0.5014956", "0.5005936", "0.50017387", "0.5001475", "0.4994422", "0.49747944", "0.4964909", "0.49621266", "0.49619335", "0.4960684", "0.49508923", "0.49457076", "0.49457076", "0.49441338", "0.49431512", "0.49420542", "0.49413216", "0.49295974", "0.49240786", "0.49236548", "0.4923528", "0.49208713", "0.490955", "0.49033776", "0.48970467", "0.48933414", "0.4891225", "0.48879042", "0.4883942", "0.48775473", "0.48746425", "0.48730004", "0.4867015", "0.48655784", "0.48637918", "0.48576015", "0.48573548", "0.4857339", "0.48568907", "0.48454273", "0.4845305", "0.48157415", "0.4815398", "0.4815051", "0.48144308", "0.48119184" ]
0.69817346
0
ConfigSchema function allows a plugin to tell Nomad the schema for its configuration. This configuration is given in a plugin block of the client configuration. The schema is defined with the hclspec package.
func (d *Driver) ConfigSchema() (*hclspec.Spec, error) { return configSpec, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func hookConfigurationSchema() *schema.Schema {\n\treturn &schema.Schema{\n\t\tType: schema.TypeList,\n\t\tOptional: true,\n\t\tMaxItems: 1,\n\t\tElem: &schema.Resource{\n\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\"invocation_condition\": func() *schema.Schema {\n\t\t\t\t\tschema := documentAttributeConditionSchema()\n\t\t\t\t\treturn schema\n\t\t\t\t}(),\n\t\t\t\t\"lambda_arn\": {\n\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\tRequired: true,\n\t\t\t\t\tValidateFunc: verify.ValidARN,\n\t\t\t\t},\n\t\t\t\t\"s3_bucket\": {\n\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\tRequired: true,\n\t\t\t\t\tValidateFunc: validation.All(\n\t\t\t\t\t\tvalidation.StringLenBetween(3, 63),\n\t\t\t\t\t\tvalidation.StringMatch(\n\t\t\t\t\t\t\tregexp.MustCompile(`[a-z0-9][\\.\\-a-z0-9]{1,61}[a-z0-9]`),\n\t\t\t\t\t\t\t\"Must be a valid bucket name\",\n\t\t\t\t\t\t),\n\t\t\t\t\t),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func tagMakeConfigSchema(tagAttrName schemaAttr) *schema.Schema {\n\treturn &schema.Schema{\n\t\tType: schema.TypeSet,\n\t\tOptional: true,\n\t\tElem: &schema.Schema{\n\t\t\tType: schema.TypeString,\n\t\t\tValidateFunc: validateTag,\n\t\t},\n\t}\n}", "func ImageConfigSchema() *gojsonschema.Schema {\n\treturn loadSchema(\"image-config.schema.json\")\n}", "func BackendSchema(factoryName string) (*config.ConfigurationSchema, error) {\n\tif backendRegistry[factoryName] == nil {\n\t\treturn nil, fmt.Errorf(\"The adapter %s is not registered Processor cannot be created\", factoryName)\n\t}\n\treturn backendRegistry[factoryName].configurationSchema, nil\n}", "func ClientConfig(m discovery.PluginMeta) *plugin.ClientConfig {\n\tlogger := hclog.New(&hclog.LoggerOptions{\n\t\tName: \"plugin\",\n\t\tLevel: hclog.Trace,\n\t\tOutput: os.Stderr,\n\t})\n\n\treturn &plugin.ClientConfig{\n\t\tCmd: exec.Command(m.Path),\n\t\tHandshakeConfig: Handshake,\n\t\tVersionedPlugins: VersionedPlugins,\n\t\tManaged: true,\n\t\tLogger: logger,\n\t\tAllowedProtocols: []plugin.Protocol{plugin.ProtocolGRPC},\n\t\tAutoMTLS: true,\n\t}\n}", "func (o TableExternalDataConfigurationOutput) Schema() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v TableExternalDataConfiguration) *string { return v.Schema }).(pulumi.StringPtrOutput)\n}", "func (o ParserConfigOutput) Schema() SchemaPackagePtrOutput {\n\treturn o.ApplyT(func(v ParserConfig) *SchemaPackage { return v.Schema }).(SchemaPackagePtrOutput)\n}", "func clientConfig(m discovery.PluginMeta, loglevel hclog.Level) *goPlugin.ClientConfig {\n\tlogger := hclog.New(&hclog.LoggerOptions{\n\t\tName: \"plugin\",\n\t\tLevel: loglevel,\n\t\tOutput: os.Stderr,\n\t})\n\n\treturn &goPlugin.ClientConfig{\n\t\tCmd: exec.Command(m.Path), //nolint:gosec\n\t\tHandshakeConfig: plugin.Handshake,\n\t\tVersionedPlugins: plugin.VersionedPlugins,\n\t\tManaged: true,\n\t\tLogger: logger,\n\t\tAllowedProtocols: []goPlugin.Protocol{goPlugin.ProtocolGRPC},\n\t\tAutoMTLS: true,\n\t}\n}", "func GetClusterConfigSchema(extensionSchema string) string {\n\tvar clusterConfigSchema string\n\tif clusterConfigSchema == \"\" {\n\t\tclusterConfigSchema = fmt.Sprintf(ClusterConfigSpecSchemaTemplate, \"\")\n\t} else {\n\t\tclusterConfigSchema = fmt.Sprintf(ClusterConfigSpecSchemaTemplate, \",\"+extensionSchema)\n\t}\n\treturn fmt.Sprintf(V2SchemaTemplate, MetadataSchema, clusterConfigSchema, DefaultDefinitions)\n}", "func (o TableExternalDataConfigurationPtrOutput) Schema() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *TableExternalDataConfiguration) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Schema\n\t}).(pulumi.StringPtrOutput)\n}", "func (p *hostingdeProvider) Schema(_ context.Context, _ provider.SchemaRequest, resp *provider.SchemaResponse) {\n\tresp.Schema = schema.Schema{\n\t\tAttributes: map[string]schema.Attribute{\n\t\t\t\"account_id\": schema.StringAttribute{\n\t\t\t\tDescription: \"Account ID for hosting.de API. May also be provided via HOSTINGDE_ACCOUNT_ID environment variable.\",\n\t\t\t\tOptional: true,\n\t\t\t},\n\t\t\t\"auth_token\": schema.StringAttribute{\n\t\t\t\tDescription: \"Auth token for hosting.de API. May also be provided via HOSTINGDE_AUTH_TOKEN environment variable.\",\n\t\t\t\tOptional: true,\n\t\t\t\tSensitive: true,\n\t\t\t},\n\t\t},\n\t}\n}", "func (*S4ReportingPluginConfig) Descriptor() ([]byte, []int) {\n\treturn file_core_services_ocr2_plugins_functions_config_config_types_proto_rawDescGZIP(), []int{1}\n}", "func (*ReportingPluginConfig) Descriptor() ([]byte, []int) {\n\treturn file_core_services_ocr2_plugins_functions_config_config_types_proto_rawDescGZIP(), []int{2}\n}", "func (d *Driver) TaskConfigSchema() (*hclspec.Spec, error) {\n\treturn taskConfigSpec, nil\n}", "func autopilotConfigTableSchema() *memdb.TableSchema {\n\treturn &memdb.TableSchema{\n\t\tName: \"autopilot-config\",\n\t\tIndexes: map[string]*memdb.IndexSchema{\n\t\t\t\"id\": &memdb.IndexSchema{\n\t\t\t\tName: \"id\",\n\t\t\t\tAllowMissing: true,\n\t\t\t\tUnique: true,\n\t\t\t\tIndexer: &memdb.ConditionalIndex{\n\t\t\t\t\tConditional: func(obj interface{}) (bool, error) { return true, nil },\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func (o ParserConfigPtrOutput) Schema() SchemaPackagePtrOutput {\n\treturn o.ApplyT(func(v *ParserConfig) *SchemaPackage {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Schema\n\t}).(SchemaPackagePtrOutput)\n}", "func (o ParserConfigResponseOutput) Schema() SchemaPackageResponseOutput {\n\treturn o.ApplyT(func(v ParserConfigResponse) SchemaPackageResponse { return v.Schema }).(SchemaPackageResponseOutput)\n}", "func setupSchema(cli *cli.Context) error {\n\tparams, err := parseConnectParams(cli)\n\tif err != nil {\n\t\treturn handleErr(schema.NewConfigError(err.Error()))\n\t}\n\tconn, err := newConn(params)\n\tif err != nil {\n\t\treturn handleErr(err)\n\t}\n\tdefer conn.Close()\n\tif err := schema.Setup(cli, conn); err != nil {\n\t\treturn handleErr(err)\n\t}\n\treturn nil\n}", "func (p *PostProcessor) ConfigSpec() hcldec.ObjectSpec { return p.config.FlatMapstructure().HCL2Spec() }", "func GetUserConfigSchema(t string) map[string]interface{} {\n\tif _, ok := getUserConfigurationOptionsSchemaFilenames()[t]; !ok {\n\t\tlog.Panicf(\"user configuration options schema type `%s` is not available\", t)\n\t}\n\n\treturn userConfigSchemas[t]\n}", "func (o GoogleCloudHealthcareV1beta1FhirBigQueryDestinationOutput) SchemaConfig() SchemaConfigPtrOutput {\n\treturn o.ApplyT(func(v GoogleCloudHealthcareV1beta1FhirBigQueryDestination) *SchemaConfig { return v.SchemaConfig }).(SchemaConfigPtrOutput)\n}", "func (o ParserConfigResponsePtrOutput) Schema() SchemaPackageResponsePtrOutput {\n\treturn o.ApplyT(func(v *ParserConfigResponse) *SchemaPackageResponse {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn &v.Schema\n\t}).(SchemaPackageResponsePtrOutput)\n}", "func (*ThresholdReportingPluginConfig) Descriptor() ([]byte, []int) {\n\treturn file_core_services_ocr2_plugins_functions_config_config_types_proto_rawDescGZIP(), []int{0}\n}", "func GenSchemaOIDCConnectorV2() map[string]*schema.Schema {\n\treturn map[string]*schema.Schema{\n\t\t// Kind is a resource kind.\n\t\t\"kind\": {\n\t\t\tType: schema.TypeString,\n\t\t\tDescription: \"Kind is a resource kind.\",\n\t\t\tOptional: true,\n\t\t\tDefault: \"oidc\",\n\t\t},\n\t\t// SubKind is an optional resource sub kind, used in some resources.\n\t\t\"sub_kind\": {\n\t\t\tType: schema.TypeString,\n\t\t\tDescription: \"SubKind is an optional resource sub kind, used in some resources.\",\n\t\t\tOptional: true,\n\t\t\tDefault: \"\",\n\t\t},\n\t\t// Version is a resource version.\n\t\t\"version\": {\n\t\t\tType: schema.TypeString,\n\t\t\tDescription: \"Version is a resource version.\",\n\t\t\tOptional: true,\n\t\t\tDefault: \"v2\",\n\t\t},\n\t\t// Metadata holds resource metadata.\n\t\t\"metadata\": {\n\t\t\tType: schema.TypeList,\n\t\t\tMaxItems: 1,\n\t\t\tDescription: \"Metadata is resource metadata\",\n\n\t\t\tOptional: true,\n\t\t\tElem: &schema.Resource{\n\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t// Name is an object name\n\t\t\t\t\t\"name\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Name is an object name\",\n\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\tForceNew: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Namespace is object namespace. The field should be called \"namespace\"\n\t\t\t\t\t// when it returns in Teleport 2.4.\n\t\t\t\t\t\"namespace\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Namespace is object namespace. The field should be called \\\"namespace\\\" when it returns in Teleport 2.4.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tDefault: \"default\",\n\t\t\t\t\t},\n\t\t\t\t\t// Description is object description\n\t\t\t\t\t\"description\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Description is object description\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Labels is a set of labels\n\t\t\t\t\t\"labels\": {\n\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tType: schema.TypeMap,\n\t\t\t\t\t\tDescription: \"Labels is a set of labels\",\n\t\t\t\t\t\tElem: &schema.Schema{\n\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t// Expires is a global expiry time header can be set on any resource in the\n\t\t\t\t\t// system.\n\t\t\t\t\t\"expires\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Expires is a global expiry time header can be set on any resource in the system.\",\n\t\t\t\t\t\tValidateFunc: validation.IsRFC3339Time,\n\t\t\t\t\t\tStateFunc: TruncateMs,\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t// Spec is an OIDC connector specification.\n\t\t\"spec\": {\n\t\t\tType: schema.TypeList,\n\t\t\tMaxItems: 1,\n\t\t\tDescription: \"OIDCConnectorSpecV2 is an OIDC connector specification. It specifies configuration for Open ID Connect compatible external identity provider: https://openid.net/specs/openid-connect-core-1_0.html\",\n\n\t\t\tRequired: true,\n\t\t\tElem: &schema.Resource{\n\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t// IssuerURL is the endpoint of the provider, e.g. https://accounts.google.com.\n\t\t\t\t\t\"issuer_url\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"IssuerURL is the endpoint of the provider, e.g. https://accounts.google.com.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// ClientID is the id of the authentication client (Teleport Auth server).\n\t\t\t\t\t\"client_id\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"ClientID is the id of the authentication client (Teleport Auth server).\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// ClientSecret is used to authenticate the client.\n\t\t\t\t\t\"client_secret\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"ClientSecret is used to authenticate the client.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// RedirectURL is a URL that will redirect the client's browser\n\t\t\t\t\t// back to the identity provider after successful authentication.\n\t\t\t\t\t// This should match the URL on the Provider's side.\n\t\t\t\t\t\"redirect_url\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"RedirectURL is a URL that will redirect the client's browser back to the identity provider after successful authentication. This should match the URL on the Provider's side.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// ACR is an Authentication Context Class Reference value. The meaning of the ACR\n\t\t\t\t\t// value is context-specific and varies for identity providers.\n\t\t\t\t\t\"acr\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"ACR is an Authentication Context Class Reference value. The meaning of the ACR value is context-specific and varies for identity providers.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Provider is the external identity provider.\n\t\t\t\t\t\"provider\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Provider is the external identity provider.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Display is the friendly name for this provider.\n\t\t\t\t\t\"display\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Display is the friendly name for this provider.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Scope specifies additional scopes set by provider.\n\t\t\t\t\t\"scope\": {\n\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tType: schema.TypeList,\n\t\t\t\t\t\tDescription: \"Scope specifies additional scopes set by provider.\",\n\t\t\t\t\t\tElem: &schema.Schema{\n\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t// Prompt is an optional OIDC prompt. An empty string omits prompt.\n\t\t\t\t\t// If not specified, it defaults to select_account for backwards compatibility.\n\t\t\t\t\t\"prompt\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Prompt is an optional OIDC prompt. An empty string omits prompt. If not specified, it defaults to select_account for backwards compatibility.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// ClaimsToRoles specifies a dynamic mapping from claims to roles.\n\t\t\t\t\t\"claims_to_roles\": {\n\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tType: schema.TypeList,\n\t\t\t\t\t\tDescription: \"ClaimsToRoles specifies a dynamic mapping from claims to roles.\",\n\t\t\t\t\t\tElem: &schema.Resource{\n\t\t\t\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t\t\t\t// Claim is a claim name.\n\t\t\t\t\t\t\t\t\"claim\": {\n\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\tDescription: \"Claim is a claim name.\",\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t// Value is a claim value to match.\n\t\t\t\t\t\t\t\t\"value\": {\n\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\tDescription: \"Value is a claim value to match.\",\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t// Roles is a list of static teleport roles to match.\n\t\t\t\t\t\t\t\t\"roles\": {\n\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\tType: schema.TypeList,\n\t\t\t\t\t\t\t\t\tDescription: \"Roles is a list of static teleport roles to match.\",\n\t\t\t\t\t\t\t\t\tElem: &schema.Schema{\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t// GoogleServiceAccountURI is a path to a google service account uri.\n\t\t\t\t\t\"google_service_account_uri\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"GoogleServiceAccountURI is a path to a google service account uri.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// GoogleServiceAccount is a string containing google service account credentials.\n\t\t\t\t\t\"google_service_account\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"GoogleServiceAccount is a string containing google service account credentials.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// GoogleAdminEmail is the email of a google admin to impersonate.\n\t\t\t\t\t\"google_admin_email\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"GoogleAdminEmail is the email of a google admin to impersonate.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func (o GoogleCloudHealthcareV1beta1FhirBigQueryDestinationResponseOutput) SchemaConfig() SchemaConfigResponseOutput {\n\treturn o.ApplyT(func(v GoogleCloudHealthcareV1beta1FhirBigQueryDestinationResponse) SchemaConfigResponse {\n\t\treturn v.SchemaConfig\n\t}).(SchemaConfigResponseOutput)\n}", "func providerSchema() map[string]*schema.Schema {\n\tlog.Printf(\"Returning Solace Provider Schema\")\n\treturn map[string]*schema.Schema{\n\t\tHOST: {\n\t\t\tType: schema.TypeString,\n\t\t\tRequired: true,\n\t\t\tDescription: \"The address of the Solace msg broker\",\n\t\t},\n\t\tPORT: {\n\t\t\tType: schema.TypeInt,\n\t\t\tRequired: true,\n\t\t\tDescription: \"The port of the Solace msg broker\",\n\t\t},\n\t\tADMIN_USER: {\n\t\t\tType: schema.TypeString,\n\t\t\tRequired: true,\n\t\t\tDescription: \"Admin identity to login to the Solace VMR.\",\n\t\t},\n\t\tADMIN_PASSWD: {\n\t\t\tType: schema.TypeString,\n\t\t\tRequired: true,\n\t\t\tSensitive: true,\n\t\t\tDescription: \"Password of the admin identity used to login to the Solace VMR.\",\n\t\t},\n\t}\n}", "func GenSchemaSAMLConnectorV2() map[string]*schema.Schema {\n\treturn map[string]*schema.Schema{\n\t\t// Kind is a resource kind.\n\t\t\"kind\": {\n\t\t\tType: schema.TypeString,\n\t\t\tDescription: \"Kind is a resource kind.\",\n\t\t\tOptional: true,\n\t\t\tDefault: \"saml\",\n\t\t},\n\t\t// SubKind is an optional resource sub kind, used in some resources.\n\t\t\"sub_kind\": {\n\t\t\tType: schema.TypeString,\n\t\t\tDescription: \"SubKind is an optional resource sub kind, used in some resources.\",\n\t\t\tOptional: true,\n\t\t\tDefault: \"\",\n\t\t},\n\t\t// Version is a resource version.\n\t\t\"version\": {\n\t\t\tType: schema.TypeString,\n\t\t\tDescription: \"Version is a resource version.\",\n\t\t\tOptional: true,\n\t\t\tDefault: \"v2\",\n\t\t},\n\t\t// Metadata holds resource metadata.\n\t\t\"metadata\": {\n\t\t\tType: schema.TypeList,\n\t\t\tMaxItems: 1,\n\t\t\tDescription: \"Metadata is resource metadata\",\n\n\t\t\tOptional: true,\n\t\t\tElem: &schema.Resource{\n\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t// Name is an object name\n\t\t\t\t\t\"name\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Name is an object name\",\n\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\tForceNew: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Namespace is object namespace. The field should be called \"namespace\"\n\t\t\t\t\t// when it returns in Teleport 2.4.\n\t\t\t\t\t\"namespace\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Namespace is object namespace. The field should be called \\\"namespace\\\" when it returns in Teleport 2.4.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tDefault: \"default\",\n\t\t\t\t\t},\n\t\t\t\t\t// Description is object description\n\t\t\t\t\t\"description\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Description is object description\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Labels is a set of labels\n\t\t\t\t\t\"labels\": {\n\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tType: schema.TypeMap,\n\t\t\t\t\t\tDescription: \"Labels is a set of labels\",\n\t\t\t\t\t\tElem: &schema.Schema{\n\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t// Expires is a global expiry time header can be set on any resource in the\n\t\t\t\t\t// system.\n\t\t\t\t\t\"expires\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Expires is a global expiry time header can be set on any resource in the system.\",\n\t\t\t\t\t\tValidateFunc: validation.IsRFC3339Time,\n\t\t\t\t\t\tStateFunc: TruncateMs,\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t// Spec is an SAML connector specification.\n\t\t\"spec\": {\n\t\t\tType: schema.TypeList,\n\t\t\tMaxItems: 1,\n\t\t\tDescription: \"SAMLConnectorSpecV2 is a SAML connector specification.\",\n\n\t\t\tRequired: true,\n\t\t\tElem: &schema.Resource{\n\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t// Issuer is the identity provider issuer.\n\t\t\t\t\t\"issuer\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Issuer is the identity provider issuer.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// SSO is the URL of the identity provider's SSO service.\n\t\t\t\t\t\"sso\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"SSO is the URL of the identity provider's SSO service.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Cert is the identity provider certificate PEM.\n\t\t\t\t\t// IDP signs <Response> responses using this certificate.\n\t\t\t\t\t\"cert\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Cert is the identity provider certificate PEM. IDP signs <Response> responses using this certificate.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Display controls how this connector is displayed.\n\t\t\t\t\t\"display\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Display controls how this connector is displayed.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// AssertionConsumerService is a URL for assertion consumer service\n\t\t\t\t\t// on the service provider (Teleport's side).\n\t\t\t\t\t\"assertion_consumer_service\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"AssertionConsumerService is a URL for assertion consumer service on the service provider (Teleport's side).\",\n\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Audience uniquely identifies our service provider.\n\t\t\t\t\t\"audience\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Audience uniquely identifies our service provider.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// ServiceProviderIssuer is the issuer of the service provider (Teleport).\n\t\t\t\t\t\"service_provider_issuer\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"ServiceProviderIssuer is the issuer of the service provider (Teleport).\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// EntityDescriptor is XML with descriptor. It can be used to supply configuration\n\t\t\t\t\t// parameters in one XML file rather than supplying them in the individual elements.\n\t\t\t\t\t\"entity_descriptor\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"EntityDescriptor is XML with descriptor. It can be used to supply configuration parameters in one XML file rather than supplying them in the individual elements.\",\n\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t},\n\t\t\t\t\t// EntityDescriptorURL is a URL that supplies a configuration XML.\n\t\t\t\t\t\"entity_descriptor_url\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"EntityDescriptorURL is a URL that supplies a configuration XML.\",\n\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t},\n\t\t\t\t\t// AttributesToRoles is a list of mappings of attribute statements to roles.\n\t\t\t\t\t\"attributes_to_roles\": {\n\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tType: schema.TypeList,\n\t\t\t\t\t\tDescription: \"AttributesToRoles is a list of mappings of attribute statements to roles.\",\n\t\t\t\t\t\tElem: &schema.Resource{\n\t\t\t\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t\t\t\t// Name is an attribute statement name.\n\t\t\t\t\t\t\t\t\"name\": {\n\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\tDescription: \"Name is an attribute statement name.\",\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t// Value is an attribute statement value to match.\n\t\t\t\t\t\t\t\t\"value\": {\n\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\tDescription: \"Value is an attribute statement value to match.\",\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t// Roles is a list of static teleport roles to map to.\n\t\t\t\t\t\t\t\t\"roles\": {\n\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\tType: schema.TypeList,\n\t\t\t\t\t\t\t\t\tDescription: \"Roles is a list of static teleport roles to map to.\",\n\t\t\t\t\t\t\t\t\tElem: &schema.Schema{\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t// SigningKeyPair is an x509 key pair used to sign AuthnRequest.\n\t\t\t\t\t\"signing_key_pair\": {\n\t\t\t\t\t\tType: schema.TypeList,\n\t\t\t\t\t\tMaxItems: 1,\n\t\t\t\t\t\tDescription: \"AsymmetricKeyPair is a combination of a public certificate and private key that can be used for encryption and signing.\",\n\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tElem: &schema.Resource{\n\t\t\t\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t\t\t\t// PrivateKey is a PEM encoded x509 private key.\n\t\t\t\t\t\t\t\t\"private_key\": {\n\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\tDescription: \"PrivateKey is a PEM encoded x509 private key.\",\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t// Cert is a PEM-encoded x509 certificate.\n\t\t\t\t\t\t\t\t\"cert\": {\n\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\tDescription: \"Cert is a PEM-encoded x509 certificate.\",\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t// Provider is the external identity provider.\n\t\t\t\t\t\"provider\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Provider is the external identity provider.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// EncryptionKeyPair is a key pair used for decrypting SAML assertions.\n\t\t\t\t\t\"encryption_key_pair\": {\n\t\t\t\t\t\tType: schema.TypeList,\n\t\t\t\t\t\tMaxItems: 1,\n\t\t\t\t\t\tDescription: \"AsymmetricKeyPair is a combination of a public certificate and private key that can be used for encryption and signing.\",\n\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tElem: &schema.Resource{\n\t\t\t\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t\t\t\t// PrivateKey is a PEM encoded x509 private key.\n\t\t\t\t\t\t\t\t\"private_key\": {\n\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\tDescription: \"PrivateKey is a PEM encoded x509 private key.\",\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t// Cert is a PEM-encoded x509 certificate.\n\t\t\t\t\t\t\t\t\"cert\": {\n\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\tDescription: \"Cert is a PEM-encoded x509 certificate.\",\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func (*ClientConfig) Descriptor() ([]byte, []int) {\n\treturn file_service_app_config_agent_cmd_grpcserver_proto_api_app_config_proto_rawDescGZIP(), []int{2}\n}", "func (d *Describe) Schema() sql.Schema {\n\treturn sql.Schema{{\n\t\tName: \"name\",\n\t\tType: VarChar25000,\n\t}, {\n\t\tName: \"type\",\n\t\tType: VarChar25000,\n\t}}\n}", "func (c *Client) Schema() error {\n\t_, err := c.db.DB().Exec(Schema)\n\treturn err\n}", "func SchemaRegister(svc string, cluster string, sdb string, table string, inputType string, output string, version int, formatType string, dst string, createTopic bool) error {\n\tavroSchema, err := schema.ConvertToAvro(&db.Loc{Cluster: cluster, Service: svc, Name: sdb}, table, inputType, formatType)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\toutputSchemaName, err := encoder.GetOutputSchemaName(svc, sdb, table, inputType, output, version)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif dst == \"state\" || dst == \"all\" {\n\t\terr = state.InsertSchema(outputSchemaName, formatType, string(avroSchema))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif createTopic {\n\t\ttm := time.Now()\n\t\tc, err := config.Get().GetChangelogTopicName(svc, sdb, table, inputType, \"kafka\", version, tm)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\terr = createKafkaTopic(c, inputType, svc, sdb, table)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\to, err := config.Get().GetOutputTopicName(svc, sdb, table, inputType, \"kafka\", version, tm)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\terr = createKafkaTopic(o, inputType, svc, sdb, table)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tlog.Infof(\"AvroSchema registered for(%v,%v, %v,%v,%v,%v,%v) = %s\", svc, cluster, sdb, table, inputType, output, version, avroSchema)\n\treturn nil\n}", "func ShowSchema(c *mgin.Context) {\n\tindex := c.Param(\"index\")\n\tif schema, err := conf.LoadSchema(index); err != nil {\n\t\tc.Error(http.StatusInternalServerError, err.Error())\n\t} else {\n\t\tc.JSON(http.StatusOK, schema.SchemaConf)\n\t}\n}", "func (*ClientConfiguration) Descriptor() ([]byte, []int) {\n\treturn file_pkg_proto_configuration_grpc_grpc_proto_rawDescGZIP(), []int{0}\n}", "func (*CreateInstanceConfigMetadata) Descriptor() ([]byte, []int) {\n\treturn file_google_spanner_admin_instance_v1_spanner_instance_admin_proto_rawDescGZIP(), []int{19}\n}", "func Schema() *spec.Schema {\n\treturn internal.Schema\n}", "func GenerateSchema(_ context.Context, opts GenerateSchemaOptions) (*GenerateSchemaResult, error) {\n\tif opts.ProviderInfo.Name == \"\" {\n\t\treturn nil, fmt.Errorf(\"opts.ProviderInfo.Name cannot be empty\")\n\t}\n\tsink := opts.DiagnosticsSink\n\tif sink == nil {\n\t\tsink = diag.DefaultSink(os.Stdout, os.Stderr, diag.FormatOptions{\n\t\t\tColor: colors.Never,\n\t\t})\n\t}\n\n\tgenerated, err := realtfgen.GenerateSchemaWithOptions(realtfgen.GenerateSchemaOptions{\n\t\tProviderInfo: opts.ProviderInfo,\n\t\tDiagnosticsSink: sink,\n\t})\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tschema, err := json.Marshal(generated.PackageSpec)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &GenerateSchemaResult{\n\t\tProviderMetadata: tfbridge.ProviderMetadata{\n\t\t\tPackageSchema: schema,\n\t\t},\n\t}, nil\n}", "func GenSchemaTrustedClusterV2() map[string]*schema.Schema {\n\treturn map[string]*schema.Schema{\n\t\t// Kind is a resource kind.\n\t\t\"kind\": {\n\t\t\tType: schema.TypeString,\n\t\t\tDescription: \"Kind is a resource kind.\",\n\t\t\tOptional: true,\n\t\t\tDefault: \"trusted_cluster\",\n\t\t},\n\t\t// SubKind is an optional resource sub kind, used in some resources.\n\t\t\"sub_kind\": {\n\t\t\tType: schema.TypeString,\n\t\t\tDescription: \"SubKind is an optional resource sub kind, used in some resources.\",\n\t\t\tOptional: true,\n\t\t\tDefault: \"\",\n\t\t},\n\t\t// Version is a resource version.\n\t\t\"version\": {\n\t\t\tType: schema.TypeString,\n\t\t\tDescription: \"Version is a resource version.\",\n\t\t\tOptional: true,\n\t\t\tDefault: \"v2\",\n\t\t},\n\t\t// Metadata holds resource metadata.\n\t\t\"metadata\": {\n\t\t\tType: schema.TypeList,\n\t\t\tMaxItems: 1,\n\t\t\tDescription: \"Metadata is resource metadata\",\n\n\t\t\tOptional: true,\n\t\t\tElem: &schema.Resource{\n\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t// Name is an object name\n\t\t\t\t\t\"name\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Name is an object name\",\n\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\tForceNew: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Namespace is object namespace. The field should be called \"namespace\"\n\t\t\t\t\t// when it returns in Teleport 2.4.\n\t\t\t\t\t\"namespace\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Namespace is object namespace. The field should be called \\\"namespace\\\" when it returns in Teleport 2.4.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tDefault: \"default\",\n\t\t\t\t\t},\n\t\t\t\t\t// Description is object description\n\t\t\t\t\t\"description\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Description is object description\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Labels is a set of labels\n\t\t\t\t\t\"labels\": {\n\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tType: schema.TypeMap,\n\t\t\t\t\t\tDescription: \"Labels is a set of labels\",\n\t\t\t\t\t\tElem: &schema.Schema{\n\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t// Expires is a global expiry time header can be set on any resource in the\n\t\t\t\t\t// system.\n\t\t\t\t\t\"expires\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Expires is a global expiry time header can be set on any resource in the system.\",\n\t\t\t\t\t\tValidateFunc: validation.IsRFC3339Time,\n\t\t\t\t\t\tStateFunc: TruncateMs,\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t// Spec is a Trusted Cluster specification.\n\t\t\"spec\": {\n\t\t\tType: schema.TypeList,\n\t\t\tMaxItems: 1,\n\t\t\tDescription: \"TrustedClusterSpecV2 is a Trusted Cluster specification.\",\n\n\t\t\tOptional: true,\n\t\t\tElem: &schema.Resource{\n\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t// Enabled is a bool that indicates if the TrustedCluster is enabled or disabled.\n\t\t\t\t\t// Setting Enabled to false has a side effect of deleting the user and host certificate\n\t\t\t\t\t// authority (CA).\n\t\t\t\t\t\"enabled\": {\n\t\t\t\t\t\tType: schema.TypeBool,\n\t\t\t\t\t\tDescription: \"Enabled is a bool that indicates if the TrustedCluster is enabled or disabled. Setting Enabled to false has a side effect of deleting the user and host certificate authority (CA).\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Roles is a list of roles that users will be assuming when connecting to this cluster.\n\t\t\t\t\t\"roles\": {\n\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tType: schema.TypeList,\n\t\t\t\t\t\tDescription: \"Roles is a list of roles that users will be assuming when connecting to this cluster.\",\n\t\t\t\t\t\tElem: &schema.Schema{\n\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t// Token is the authorization token provided by another cluster needed by this cluster to join.\n\t\t\t\t\t\"token\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Token is the authorization token provided by another cluster needed by this cluster to join.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// ProxyAddress is the address of the web proxy server of the cluster to join. If not set,\n\t\t\t\t\t// it is derived from <metadata.name>:<default web proxy server port>.\n\t\t\t\t\t\"proxy_address\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"ProxyAddress is the address of the web proxy server of the cluster to join. If not set, it is derived from <metadata.name>:<default web proxy server port>.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// ReverseTunnelAddress is the address of the SSH proxy server of the cluster to join. If\n\t\t\t\t\t// not set, it is derived from <metadata.name>:<default reverse tunnel port>.\n\t\t\t\t\t\"reverse_tunnel_address\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"ReverseTunnelAddress is the address of the SSH proxy server of the cluster to join. If not set, it is derived from <metadata.name>:<default reverse tunnel port>.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// RoleMap specifies role mappings to remote roles.\n\t\t\t\t\t\"role_map\": {\n\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tType: schema.TypeList,\n\t\t\t\t\t\tDescription: \"RoleMap specifies role mappings to remote roles.\",\n\t\t\t\t\t\tElem: &schema.Resource{\n\t\t\t\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t\t\t\t// Remote specifies remote role name to map from\n\t\t\t\t\t\t\t\t\"remote\": {\n\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\tDescription: \"Remote specifies remote role name to map from\",\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t// Local specifies local roles to map to\n\t\t\t\t\t\t\t\t\"local\": {\n\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\tType: schema.TypeList,\n\t\t\t\t\t\t\t\t\tDescription: \"Local specifies local roles to map to\",\n\t\t\t\t\t\t\t\t\tElem: &schema.Schema{\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func (b *Builder) ConfigSpec() hcldec.ObjectSpec {\n\treturn b.config.FlatMapstructure().HCL2Spec()\n}", "func (*MetadataStoreClientConfig) Descriptor() ([]byte, []int) {\n\treturn file_ml_metadata_proto_metadata_store_proto_rawDescGZIP(), []int{29}\n}", "func (*OnPremisesConfiguration) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_sql_v1beta4_cloud_sql_resources_proto_rawDescGZIP(), []int{44}\n}", "func (_ NetConfig) OpenAPISchemaType() []string { return []string{\"object\"} }", "func contentDescriptorSchema() *gojsonschema.Schema {\n\treturn loadSchema(\"content-descriptor.schema.json\")\n}", "func BackendSchemas() []*config.ConfigurationSchema {\n\tconfigurationSchemas := []*config.ConfigurationSchema{}\n\tfor _, registry := range backendRegistry {\n\t\tconfigurationSchemas = append(configurationSchemas, registry.configurationSchema)\n\t}\n\treturn configurationSchemas\n}", "func (o GoogleCloudHealthcareV1beta1FhirBigQueryDestinationPtrOutput) SchemaConfig() SchemaConfigPtrOutput {\n\treturn o.ApplyT(func(v *GoogleCloudHealthcareV1beta1FhirBigQueryDestination) *SchemaConfig {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.SchemaConfig\n\t}).(SchemaConfigPtrOutput)\n}", "func (m modelPluginTestDevice) Schema() (map[string]*yang.Entry, error) {\n\treturn td1.UnzipSchema()\n}", "func (*ClientOAuthConfiguration) Descriptor() ([]byte, []int) {\n\treturn file_pkg_proto_configuration_grpc_grpc_proto_rawDescGZIP(), []int{2}\n}", "func (*FormatSchema) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_clickhouse_v1_format_schema_proto_rawDescGZIP(), []int{0}\n}", "func (*MySqlSyncConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_sql_v1beta4_cloud_sql_resources_proto_rawDescGZIP(), []int{29}\n}", "func simpleTestSchema() *configschema.Block {\n\treturn &configschema.Block{\n\t\tAttributes: map[string]*configschema.Attribute{\n\t\t\t\"test_string\": {\n\t\t\t\tType: cty.String,\n\t\t\t\tOptional: true,\n\t\t\t},\n\t\t\t\"test_number\": {\n\t\t\t\tType: cty.String,\n\t\t\t\tOptional: true,\n\t\t\t},\n\t\t\t\"test_bool\": {\n\t\t\t\tType: cty.String,\n\t\t\t\tOptional: true,\n\t\t\t},\n\t\t\t\"test_list\": {\n\t\t\t\tType: cty.String,\n\t\t\t\tOptional: true,\n\t\t\t},\n\t\t\t\"test_map\": {\n\t\t\t\tType: cty.String,\n\t\t\t\tOptional: true,\n\t\t\t},\n\t\t},\n\t}\n}", "func (*FakeDatabaseConfig) Descriptor() ([]byte, []int) {\n\treturn file_ml_metadata_proto_metadata_store_proto_rawDescGZIP(), []int{21}\n}", "func (m *Module) SetSchemaConfig(evSchemas config.EventingSchemas) error {\n\tm.lock.Lock()\n\tdefer m.lock.Unlock()\n\n\t// Reset the existing schema\n\tm.schemas = map[string]model.Fields{}\n\n\tfor _, evSchema := range evSchemas {\n\t\tresourceID := ksuid.New().String()\n\t\tdummyDBSchema := config.DatabaseSchemas{\n\t\t\tresourceID: {\n\t\t\t\tTable: evSchema.ID,\n\t\t\t\tDbAlias: \"dummyDBName\",\n\t\t\t\tSchema: evSchema.Schema,\n\t\t\t},\n\t\t}\n\t\tschemaType, err := schemaHelpers.Parser(dummyDBSchema)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(schemaType[\"dummyDBName\"][evSchema.ID]) != 0 {\n\t\t\tm.schemas[evSchema.ID] = schemaType[\"dummyDBName\"][evSchema.ID]\n\t\t}\n\t}\n\treturn nil\n}", "func CatalogSchema() *gojsonschema.Schema {\n\treturn loadSchema(\"catalog.schema.json\")\n}", "func (*Schema_SchemaField) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_dataplex_v1_metadata_proto_rawDescGZIP(), []int{13, 0}\n}", "func Marshal(c *configs.Config, schemas *terraform.Schemas) ([]byte, error) {\n\tvar output config\n\n\tpcs := make(map[string]providerConfig)\n\tmarshalProviderConfigs(c, schemas, pcs)\n\n\trootModule, err := marshalModule(c, schemas, \"\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\toutput.RootModule = rootModule\n\n\tnormalizeModuleProviderKeys(&rootModule, pcs)\n\n\tfor name, pc := range pcs {\n\t\tif pc.parentKey != \"\" {\n\t\t\tdelete(pcs, name)\n\t\t}\n\t}\n\toutput.ProviderConfigs = pcs\n\n\tret, err := json.Marshal(output)\n\treturn ret, err\n}", "func (*MySqlSyncConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_sql_v1_cloud_sql_resources_proto_rawDescGZIP(), []int{8}\n}", "func (*ConnectionConfig) Descriptor() ([]byte, []int) {\n\treturn file_ml_metadata_proto_metadata_store_proto_rawDescGZIP(), []int{27}\n}", "func (*MongoCfgConfig4_0) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_mongodb_v1_config_mongodb4_0_proto_rawDescGZIP(), []int{1}\n}", "func (*MetadataStoreClientConfig_SSLConfig) Descriptor() ([]byte, []int) {\n\treturn file_ml_metadata_proto_metadata_store_proto_rawDescGZIP(), []int{29, 0}\n}", "func (*AdminConfig) Descriptor() ([]byte, []int) {\n\treturn file_envoy_extensions_common_tap_v3_common_proto_rawDescGZIP(), []int{1}\n}", "func WithConfigType(group, version, kind string) RunnerOption {\n\treturn func(k *PluginRunner) {\n\t\t// Record the GVK information on the command\n\t\tk.cmd.Use = kind + \" FILE\"\n\t\tk.cmd.Short = fmt.Sprintf(\"Executable plugin for the %s kind\", kind)\n\t\tk.cmd.Hidden = true\n\t\tk.cmd.Annotations = map[string]string{\n\t\t\t\"group\": group,\n\t\t\t\"version\": version,\n\t\t\t\"kind\": kind,\n\t\t}\n\n\t\t// TODO We should take an example object and serialize it as the example text\n\n\t\t// Require an argument for the configuration filename\n\t\tk.cmd.Args = cobra.ExactArgs(1)\n\t\tk.config = func(_ *cobra.Command, args []string) ([]byte, error) {\n\t\t\treturn ioutil.ReadFile(args[0])\n\t\t}\n\n\t\t// This is kind of sneaky, but try to pickup the Kustomize root here\n\t\tk.root = os.Getenv(\"KUSTOMIZE_PLUGIN_CONFIG_ROOT\")\n\n\t\t// This is again sneaky and not safe because we need this to always run last\n\t\tk.transform = combineTransformFunc(k.transform, persistResourceOptions)\n\t}\n}", "func (*Host_HealthCheckConfig) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_gloo_projects_gloo_api_v1_options_static_static_proto_rawDescGZIP(), []int{1, 0}\n}", "func (*AgentConfigurationCF) Descriptor() ([]byte, []int) {\n\treturn file_pkg_kascfg_kascfg_proto_rawDescGZIP(), []int{11}\n}", "func (*AdminConfig) Descriptor() ([]byte, []int) {\n\treturn file_envoy_config_common_tap_v2alpha_common_proto_rawDescGZIP(), []int{1}\n}", "func (c *DQLConfig) AddSchema(schemaName, path string) error {\n\tif len(c.Schemas) == 0 {\n\t\tc.Schemas = map[string]*Schema{}\n\t}\n\n\tc.Schemas[schemaName] = &Schema{\n\t\tName: schemaName,\n\t\tPath: path,\n\t}\n\n\t// add schema to ServerelessConfig\n\ts, err := c.ReadServerlessConfig()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn s.AddSchema(schemaName, path).Write()\n}", "func (e ExternalService) validateConfig() error {\n\tsl := gojsonschema.NewSchemaLoader()\n\tsc, err := sl.Compile(gojsonschema.NewStringLoader(e.schema()))\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to compile schema for external service of kind %q\", e.Kind)\n\t}\n\n\tnormalized, err := jsonc.Parse(e.Config)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to normalize JSON\")\n\t}\n\n\tres, err := sc.Validate(gojsonschema.NewBytesLoader(normalized))\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to validate config against schema\")\n\t}\n\n\terrs := new(multierror.Error)\n\tfor _, err := range res.Errors() {\n\t\terrs = multierror.Append(errs, errors.New(err.String()))\n\t}\n\n\treturn errs.ErrorOrNil()\n}", "func (*MySQLDatabaseConfig) Descriptor() ([]byte, []int) {\n\treturn file_ml_metadata_proto_metadata_store_proto_rawDescGZIP(), []int{22}\n}", "func (*MetadataStoreServerConfig) Descriptor() ([]byte, []int) {\n\treturn file_ml_metadata_proto_metadata_store_proto_rawDescGZIP(), []int{30}\n}", "func MarshalPlugin(plugin types.Plugin, opts ...MarshalOption) ([]byte, error) {\n\tif err := plugin.CheckAndSetDefaults(); err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\tcfg, err := CollectOptions(opts)\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\tswitch plugin := plugin.(type) {\n\tcase *types.PluginV1:\n\t\tif !cfg.PreserveResourceID {\n\t\t\tcopy := *plugin\n\t\t\tcopy.SetResourceID(0)\n\t\t\tplugin = &copy\n\t\t}\n\t\tvar buf bytes.Buffer\n\t\terr := (&jsonpb.Marshaler{}).Marshal(&buf, plugin)\n\t\tif err != nil {\n\t\t\treturn nil, trace.Wrap(err)\n\t\t}\n\t\treturn buf.Bytes(), nil\n\tdefault:\n\t\treturn nil, trace.BadParameter(\"unsupported plugin resource %T\", plugin)\n\t}\n}", "func (*ListMetadataSchemasRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_metadata_service_proto_rawDescGZIP(), []int{46}\n}", "func schemaAgent(mode SchemaMode) *schema.Resource {\n\tswitch mode {\n\tcase DataSourceReferenceOnly:\n\t\treturn &schema.Resource{\n\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\"uuid\": &schema.Schema{\n\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\tComputed: true,\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\tcase DataSourceFullEntity:\n\t\treturn &schema.Resource{\n\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\"hostname\": &schema.Schema{\n\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\tComputed: true,\n\t\t\t\t},\n\t\t\t\t\"id\": &schema.Schema{\n\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\tComputed: true,\n\t\t\t\t},\n\t\t\t\t\"ip_address\": &schema.Schema{\n\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\tComputed: true,\n\t\t\t\t},\n\t\t\t\t\"is_deprecated\": &schema.Schema{\n\t\t\t\t\tType: schema.TypeBool,\n\t\t\t\t\tComputed: true,\n\t\t\t\t},\n\t\t\t\t\"meta_data\": &schema.Schema{\n\t\t\t\t\tType: schema.TypeMap,\n\t\t\t\t\tComputed: true,\n\t\t\t\t\tElem: &schema.Schema{Type: schema.TypeString},\n\t\t\t\t},\n\t\t\t\t\"name\": &schema.Schema{\n\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\tComputed: true,\n\t\t\t\t},\n\t\t\t\t\"operating_system\": &schema.Schema{\n\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\tComputed: true,\n\t\t\t\t},\n\t\t\t\t\"public\": &schema.Schema{\n\t\t\t\t\tType: schema.TypeBool,\n\t\t\t\t\tComputed: true,\n\t\t\t\t},\n\t\t\t\t\"user_agent\": &schema.Schema{\n\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\tComputed: true,\n\t\t\t\t},\n\t\t\t\t\"uuid\": &schema.Schema{\n\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\tComputed: true,\n\t\t\t\t},\n\t\t\t\t\"version\": &schema.Schema{\n\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\tComputed: true,\n\t\t\t\t},\n\t\t\t\t\"version_has_known_issues\": &schema.Schema{\n\t\t\t\t\tType: schema.TypeBool,\n\t\t\t\t\tComputed: true,\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\tdefault:\n\t\treturn &schema.Resource{}\n\t}\n}", "func (*MongoCfgConfig4_4) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_mongodb_v1_config_mongodb4_4_proto_rawDescGZIP(), []int{1}\n}", "func DefaultSchemaConfig(kind string) config.SchemaConfig {\n\treturn SchemaConfig(kind, \"v9\", model.Now().Add(-time.Hour*2))\n}", "func (*MongoCfgConfigSet4_0) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_mongodb_v1_config_mongodb4_0_proto_rawDescGZIP(), []int{4}\n}", "func (o SchemaConfigOutput) SchemaType() SchemaConfigSchemaTypePtrOutput {\n\treturn o.ApplyT(func(v SchemaConfig) *SchemaConfigSchemaType { return v.SchemaType }).(SchemaConfigSchemaTypePtrOutput)\n}", "func (*DocumentOnTypeFormattingRegistrationOptions) Descriptor() ([]byte, []int) {\n\treturn file_protocol_rpc_rpc_proto_rawDescGZIP(), []int{189}\n}", "func (t *OpenconfigPlatform_Components_Component_Chassis_Config) Validate(opts ...ygot.ValidationOption) error {\n\tif err := ytypes.Validate(SchemaTree[\"OpenconfigPlatform_Components_Component_Chassis_Config\"], t, opts...); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (e ExternalService) Configuration() (cfg interface{}, _ error) {\n\tswitch strings.ToLower(e.Kind) {\n\tcase \"awscodecommit\":\n\t\tcfg = &schema.AWSCodeCommitConnection{}\n\tcase \"bitbucketserver\":\n\t\tcfg = &schema.BitbucketServerConnection{}\n\tcase \"github\":\n\t\tcfg = &schema.GitHubConnection{}\n\tcase \"gitlab\":\n\t\tcfg = &schema.GitLabConnection{}\n\tcase \"gitolite\":\n\t\tcfg = &schema.GitoliteConnection{}\n\tcase \"phabricator\":\n\t\tcfg = &schema.PhabricatorConnection{}\n\tcase \"other\":\n\t\tcfg = &schema.OtherExternalServiceConnection{}\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unknown external service kind %q\", e.Kind)\n\t}\n\treturn cfg, jsonc.Unmarshal(e.Config, cfg)\n}", "func (*ServerConfiguration) Descriptor() ([]byte, []int) {\n\treturn file_pkg_proto_configuration_grpc_grpc_proto_rawDescGZIP(), []int{3}\n}", "func (*BootstrapConfigDump) Descriptor() ([]byte, []int) {\n\treturn file_envoy_admin_v3_config_dump_proto_rawDescGZIP(), []int{1}\n}", "func (*MongoCfgConfigSet4_4) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_mongodb_v1_config_mongodb4_4_proto_rawDescGZIP(), []int{4}\n}", "func (*AdminConfig) Descriptor() ([]byte, []int) {\n\treturn file_envoy_extensions_common_tap_v4alpha_common_proto_rawDescGZIP(), []int{1}\n}", "func (*MetadataStoreServerConfig_SSLConfig) Descriptor() ([]byte, []int) {\n\treturn file_ml_metadata_proto_metadata_store_proto_rawDescGZIP(), []int{30, 0}\n}", "func (*OCR2Config_Plugins) Descriptor() ([]byte, []int) {\n\treturn file_pkg_noderpc_proto_feeds_manager_proto_rawDescGZIP(), []int{4, 2}\n}", "func (*Schema) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_dataplex_v1_metadata_proto_rawDescGZIP(), []int{13}\n}", "func (format *MetadataCopy) Configure(conf core.PluginConfigReader) {\n}", "func Schema() (schema *graphql.Schema, err error) {\n\tbytes, err := readFile(\"./schema.graphql\")\n\tif err != nil {\n\t\treturn schema, err\n\t}\n\tschemaDef := string(bytes)\n\tschema = graphql.MustParseSchema(schemaDef, &Resolver{}, graphql.UseStringDescriptions())\n\treturn schema, nil\n}", "func (*ClientMonitoringConfig) Descriptor() ([]byte, []int) {\n\treturn file_go_chromium_org_luci_cipd_api_config_v1_config_proto_rawDescGZIP(), []int{0}\n}", "func (*SqlActiveDirectoryConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_sql_v1_cloud_sql_resources_proto_rawDescGZIP(), []int{30}\n}", "func (*MongosConfig4_0) Descriptor() ([]byte, []int) {\n\treturn file_yandex_cloud_mdb_mongodb_v1_config_mongodb4_0_proto_rawDescGZIP(), []int{2}\n}", "func pluginConfigFromJSON(data json.RawMessage) (*pluginConfig, error) {\n\tcfgProto := &configpb.GoogleMeshCaConfig{}\n\tm := jsonpb.Unmarshaler{AllowUnknownFields: true}\n\tif err := m.Unmarshal(bytes.NewReader(data), cfgProto); err != nil {\n\t\treturn nil, fmt.Errorf(\"meshca: failed to unmarshal config: %v\", err)\n\t}\n\n\tif api := cfgProto.GetServer().GetApiType(); api != v3corepb.ApiConfigSource_GRPC {\n\t\treturn nil, fmt.Errorf(\"meshca: server has apiType %s, want %s\", api, v3corepb.ApiConfigSource_GRPC)\n\t}\n\n\tpc := &pluginConfig{}\n\tgs := cfgProto.GetServer().GetGrpcServices()\n\tif l := len(gs); l != 1 {\n\t\treturn nil, fmt.Errorf(\"meshca: number of gRPC services in config is %d, expected 1\", l)\n\t}\n\tgrpcService := gs[0]\n\tgoogGRPC := grpcService.GetGoogleGrpc()\n\tif googGRPC == nil {\n\t\treturn nil, errors.New(\"meshca: missing google gRPC service in config\")\n\t}\n\tpc.serverURI = googGRPC.GetTargetUri()\n\tif pc.serverURI == \"\" {\n\t\tpc.serverURI = defaultMeshCaEndpoint\n\t}\n\n\tcallCreds := googGRPC.GetCallCredentials()\n\tif len(callCreds) == 0 {\n\t\treturn nil, errors.New(\"meshca: missing call credentials in config\")\n\t}\n\tvar stsCallCreds *v3corepb.GrpcService_GoogleGrpc_CallCredentials_StsService\n\tfor _, cc := range callCreds {\n\t\tif stsCallCreds = cc.GetStsService(); stsCallCreds != nil {\n\t\t\tbreak\n\t\t}\n\t}\n\tif stsCallCreds == nil {\n\t\treturn nil, errors.New(\"meshca: missing STS call credentials in config\")\n\t}\n\tif stsCallCreds.GetSubjectTokenPath() == \"\" {\n\t\treturn nil, errors.New(\"meshca: missing subjectTokenPath in STS call credentials config\")\n\t}\n\tpc.stsOpts = makeStsOptsWithDefaults(stsCallCreds)\n\n\tvar err error\n\tif pc.callTimeout, err = ptypes.Duration(grpcService.GetTimeout()); err != nil {\n\t\tpc.callTimeout = defaultCallTimeout\n\t}\n\tif pc.certLifetime, err = ptypes.Duration(cfgProto.GetCertificateLifetime()); err != nil {\n\t\tpc.certLifetime = defaultCertLifetime\n\t}\n\tif pc.certGraceTime, err = ptypes.Duration(cfgProto.GetRenewalGracePeriod()); err != nil {\n\t\tpc.certGraceTime = defaultCertGraceTime\n\t}\n\tswitch cfgProto.GetKeyType() {\n\tcase configpb.GoogleMeshCaConfig_KEY_TYPE_UNKNOWN, configpb.GoogleMeshCaConfig_KEY_TYPE_RSA:\n\t\tpc.keyType = defaultKeyTypeRSA\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"meshca: unsupported key type: %s, only support RSA keys\", pc.keyType)\n\t}\n\tpc.keySize = int(cfgProto.GetKeySize())\n\tif pc.keySize == 0 {\n\t\tpc.keySize = defaultKeySize\n\t}\n\tpc.location = cfgProto.GetLocation()\n\tif pc.location == \"\" {\n\t\tpc.location = readZoneFunc(makeHTTPDoer())\n\t}\n\n\treturn pc, nil\n}", "func GenSchemaAuthPreferenceV2() map[string]*schema.Schema {\n\treturn map[string]*schema.Schema{\n\t\t// Kind is a resource kind\n\t\t\"kind\": {\n\t\t\tType: schema.TypeString,\n\t\t\tDescription: \"Kind is a resource kind\",\n\t\t\tOptional: true,\n\t\t\tDefault: \"cluster_auth_preference\",\n\t\t},\n\t\t// SubKind is an optional resource sub kind, used in some resources\n\t\t\"sub_kind\": {\n\t\t\tType: schema.TypeString,\n\t\t\tDescription: \"SubKind is an optional resource sub kind, used in some resources\",\n\t\t\tOptional: true,\n\t\t\tDefault: \"\",\n\t\t},\n\t\t// Version is a resource version\n\t\t\"version\": {\n\t\t\tType: schema.TypeString,\n\t\t\tDescription: \"Version is a resource version\",\n\t\t\tOptional: true,\n\t\t\tDefault: \"v2\",\n\t\t},\n\t\t// Metadata is resource metadata\n\t\t\"metadata\": {\n\t\t\tType: schema.TypeList,\n\t\t\tMaxItems: 1,\n\t\t\tDescription: \"Metadata is resource metadata\",\n\n\t\t\tOptional: true,\n\t\t\tElem: &schema.Resource{\n\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t// Namespace is object namespace. The field should be called \"namespace\"\n\t\t\t\t\t// when it returns in Teleport 2.4.\n\t\t\t\t\t\"namespace\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Namespace is object namespace. The field should be called \\\"namespace\\\" when it returns in Teleport 2.4.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tDefault: \"default\",\n\t\t\t\t\t},\n\t\t\t\t\t// Description is object description\n\t\t\t\t\t\"description\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Description is object description\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// Labels is a set of labels\n\t\t\t\t\t\"labels\": {\n\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tType: schema.TypeMap,\n\t\t\t\t\t\tDescription: \"Labels is a set of labels\",\n\t\t\t\t\t\tElem: &schema.Schema{\n\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t// Expires is a global expiry time header can be set on any resource in the\n\t\t\t\t\t// system.\n\t\t\t\t\t\"expires\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Expires is a global expiry time header can be set on any resource in the system.\",\n\t\t\t\t\t\tValidateFunc: validation.IsRFC3339Time,\n\t\t\t\t\t\tStateFunc: TruncateMs,\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t// Spec is an AuthPreference specification\n\t\t\"spec\": {\n\t\t\tType: schema.TypeList,\n\t\t\tMaxItems: 1,\n\t\t\tDescription: \"AuthPreferenceSpecV2 is the actual data we care about for AuthPreference.\",\n\n\t\t\tRequired: true,\n\t\t\tElem: &schema.Resource{\n\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t// Type is the type of authentication.\n\t\t\t\t\t\"type\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"Type is the type of authentication.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tDefault: \"local\",\n\t\t\t\t\t},\n\t\t\t\t\t// SecondFactor is the type of second factor.\n\t\t\t\t\t\"second_factor\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"SecondFactor is the type of second factor.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tDefault: \"otp\",\n\t\t\t\t\t},\n\t\t\t\t\t// ConnectorName is the name of the OIDC or SAML connector. If this value is\n\t\t\t\t\t// not set the first connector in the backend will be used.\n\t\t\t\t\t\"connector_name\": {\n\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\tDescription: \"ConnectorName is the name of the OIDC or SAML connector. If this value is not set the first connector in the backend will be used.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// U2F are the settings for the U2F device.\n\t\t\t\t\t\"u2f\": {\n\t\t\t\t\t\tType: schema.TypeList,\n\t\t\t\t\t\tMaxItems: 1,\n\t\t\t\t\t\tDescription: \"U2F defines settings for U2F device.\",\n\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tElem: &schema.Resource{\n\t\t\t\t\t\t\tSchema: map[string]*schema.Schema{\n\t\t\t\t\t\t\t\t// AppID returns the application ID for universal second factor.\n\t\t\t\t\t\t\t\t\"app_id\": {\n\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\tDescription: \"AppID returns the application ID for universal second factor.\",\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t// Facets returns the facets for universal second factor.\n\t\t\t\t\t\t\t\t\"facets\": {\n\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\tType: schema.TypeList,\n\t\t\t\t\t\t\t\t\tDescription: \"Facets returns the facets for universal second factor.\",\n\t\t\t\t\t\t\t\t\tElem: &schema.Schema{\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t// DeviceAttestationCAs contains the trusted attestation CAs for U2F\n\t\t\t\t\t\t\t\t// devices.\n\t\t\t\t\t\t\t\t\"device_attestation_c_as\": {\n\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\tType: schema.TypeList,\n\t\t\t\t\t\t\t\t\tDescription: \"DeviceAttestationCAs contains the trusted attestation CAs for U2F devices.\",\n\t\t\t\t\t\t\t\t\tElem: &schema.Schema{\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t// RequireSessionMFA causes all sessions in this cluster to require MFA\n\t\t\t\t\t// checks.\n\t\t\t\t\t\"require_session_mfa\": {\n\t\t\t\t\t\tType: schema.TypeBool,\n\t\t\t\t\t\tDescription: \"RequireSessionMFA causes all sessions in this cluster to require MFA checks.\",\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t// DisconnectExpiredCert provides disconnect expired certificate setting -\n\t\t\t\t\t// if true, connections with expired client certificates will get disconnected\n\t\t\t\t\t\"disconnect_expired_cert\": SchemaBoolOption(),\n\t\t\t\t\t// AllowLocalAuth is true if local authentication is enabled.\n\t\t\t\t\t\"allow_local_auth\": SchemaBoolOption(),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func (*FederatedAuthConfigSpec_Template) Descriptor() ([]byte, []int) {\n\treturn file_github_com_solo_io_solo_apis_api_gloo_fed_fed_enterprise_gloo_v1_auth_config_proto_rawDescGZIP(), []int{0, 0}\n}", "func (*UpdateInstanceConfigMetadata) Descriptor() ([]byte, []int) {\n\treturn file_google_spanner_admin_instance_v1_spanner_instance_admin_proto_rawDescGZIP(), []int{20}\n}", "func ValidatePlugin(c *Config) error {\n\t// Look for legacy plugin first\n\tplugin := c.Plugin\n\tif plugin == \"\" {\n\t\tplugin = c.General.Plugin\n\t} else {\n\t\tc.General.Plugin = plugin\n\t}\n\tif plugin == \"\" {\n\t\tglog.Infof(\"No plugin specified in config file - defaulting to %q plugin\", DefaultPlugin)\n\t\tc.General.Plugin = DefaultPlugin\n\t\treturn nil\n\t}\n\tswitch plugin {\n\tcase \"bridge\":\n\t\tc.General.CNIPlugin = BridgePlugin{c}\n\tcase \"ptp\":\n\t\tc.General.CNIPlugin = PointToPointPlugin{c}\n\tdefault:\n\t\treturn fmt.Errorf(\"plugin %q not supported\", plugin)\n\t}\n\treturn nil\n}", "func (*InstanceConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_spanner_admin_instance_v1_spanner_instance_admin_proto_rawDescGZIP(), []int{1}\n}", "func (t *TikvHandlerTool) Schema() (infoschema.InfoSchema, error) {\n\tdom, err := session.GetDomain(t.Store)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn dom.InfoSchema(), nil\n}", "func (*Gateway_Conf) Descriptor() ([]byte, []int) {\n\treturn file_mesh_v1alpha1_gateway_proto_rawDescGZIP(), []int{0, 2}\n}", "func (*ConfigurationFile) Descriptor() ([]byte, []int) {\n\treturn file_pkg_kascfg_kascfg_proto_rawDescGZIP(), []int{25}\n}", "func ExampleConfiguration(pluginNames []string) (*ApiplexConfig, error) {\n\tc := ApiplexConfig{\n\t\tRedis: apiplexConfigRedis{\n\t\t\tHost: \"127.0.0.1\",\n\t\t\tPort: 6379,\n\t\t\tDB: 0,\n\t\t},\n\t\tQuotas: map[string]apiplexQuota{\n\t\t\t\"default\": apiplexQuota{\n\t\t\t\tMinutes: 5,\n\t\t\t\tMaxIP: 50,\n\t\t\t\tMaxKey: 5000,\n\t\t\t},\n\t\t\t\"keyless\": apiplexQuota{\n\t\t\t\tMinutes: 5,\n\t\t\t\tMaxIP: 20,\n\t\t\t},\n\t\t},\n\t\tServe: apiplexConfigServe{\n\t\t\tPort: 5000,\n\t\t\tAPI: \"/\",\n\t\t\tUpstreams: []string{\"http://your-actual-api:8000/\"},\n\t\t\tPortalAPI: \"/portal/api/\",\n\t\t\tPortal: \"/portal/\",\n\t\t\tSigningKey: uniuri.NewLen(64),\n\t\t},\n\t}\n\tplugins := apiplexConfigPlugins{}\n\tfor _, pname := range pluginNames {\n\t\tpInfo, ok := registeredPlugins[pname]\n\t\tif !ok {\n\t\t\treturn nil, fmt.Errorf(\"No plugin '%s' available.\", pname)\n\t\t}\n\n\t\tpluginPtr := reflect.New(pInfo.pluginType)\n\t\tdefConfig := pluginPtr.MethodByName(\"DefaultConfig\").Call([]reflect.Value{})[0].Interface().(map[string]interface{})\n\t\tpconfig := apiplexPluginConfig{Plugin: pname, Config: defConfig}\n\n\t\tswitch pluginPtr.Interface().(type) {\n\t\tcase AuthPlugin:\n\t\t\tplugins.Auth = append(plugins.Auth, pconfig)\n\t\tcase ManagementBackendPlugin:\n\t\t\tplugins.Backend = append(plugins.Backend, pconfig)\n\t\tcase BackendPlugin:\n\t\t\tplugins.Backend = append(plugins.Backend, pconfig)\n\t\tcase PreUpstreamPlugin:\n\t\t\tplugins.PreUpstream = append(plugins.PreUpstream, pconfig)\n\t\tcase PostUpstreamPlugin:\n\t\t\tplugins.PostUpstream = append(plugins.PostUpstream, pconfig)\n\t\tcase PostAuthPlugin:\n\t\t\tplugins.PostAuth = append(plugins.PostAuth, pconfig)\n\t\tcase LoggingPlugin:\n\t\t\tplugins.Logging = append(plugins.Logging, pconfig)\n\t\t}\n\t}\n\tc.Plugins = plugins\n\treturn &c, nil\n}", "func (r *Resource) WriteConfig(filename string) error {\n\tr.Lock()\n\tdefer r.Unlock()\n\n\tvar b bytes.Buffer\n\n\tb.WriteString(fmt.Sprintf(\"# meta-data-json:{\\\"updated\\\": \\\"%s\\\"}\\n\", time.Now().UTC()))\n\tb.WriteString(fmt.Sprintf(\"resource %s {\\n\", r.name))\n\n\tvar hosts []string\n\tfor _, h := range r.host {\n\t\thosts = append(hosts, h.Name)\n\n\t\tb.WriteString(indentf(1, \"on %s {\\n\", h.Name))\n\t\tb.WriteString(indentf(2, \"node-id %d;\\n\", h.ID))\n\t\tb.WriteString(indentf(2, \"address %s:%d;\\n\", h.IP, r.port))\n\t\tfor _, v := range h.volume {\n\t\t\tb.WriteString(indentf(2, \"volume %d {\\n\", v.id))\n\t\t\tb.WriteString(indentf(3, \"device minor %d;\\n\", v.minor))\n\t\t\tb.WriteString(indentf(3, \"disk %s;\\n\", v.backingDevice))\n\t\t\tb.WriteString(indentf(3, \"meta-disk internal;\\n\"))\n\t\t\tb.WriteString(indentf(2, \"}\\n\")) // end volume section\n\t\t}\n\t\tb.WriteString(indentf(1, \"}\\n\")) // end on section\n\t\tb.WriteString(\"\\n\")\n\t}\n\n\tb.WriteString(indentf(1, \"connection-mesh {\\n\"))\n\tb.WriteString(indentf(2, \"hosts %s;\\n\", strings.Join(hosts, \" \")))\n\tb.WriteString(indentf(1, \"}\\n\"))\n\n\tb.WriteString(\"}\") // end resource section\n\n\treturn ioutil.WriteFile(filename, b.Bytes(), 0644)\n}" ]
[ "0.6281331", "0.5918551", "0.5734096", "0.5686296", "0.5664485", "0.5613437", "0.5569505", "0.5495627", "0.5485626", "0.54754454", "0.54539895", "0.5391567", "0.5384628", "0.5379263", "0.53001755", "0.529274", "0.5225631", "0.5206114", "0.52046055", "0.5195457", "0.5191281", "0.5095159", "0.5090088", "0.5060136", "0.5039013", "0.50187767", "0.5003207", "0.49667984", "0.49562058", "0.49506292", "0.49381107", "0.49352422", "0.4929907", "0.49294278", "0.4929304", "0.49215752", "0.49092466", "0.49033594", "0.48666948", "0.4863618", "0.48453167", "0.4842632", "0.4842476", "0.48388237", "0.48244855", "0.48228672", "0.48210117", "0.48037267", "0.48024225", "0.47782674", "0.4774291", "0.47684157", "0.4755899", "0.47543225", "0.47511184", "0.47342914", "0.47266537", "0.47136274", "0.47130957", "0.47080556", "0.47040543", "0.46974456", "0.4695887", "0.46944904", "0.4691553", "0.46868482", "0.46735677", "0.4672732", "0.4662881", "0.46572277", "0.46564108", "0.46494964", "0.46487522", "0.46439853", "0.46398136", "0.4636972", "0.46293348", "0.46248096", "0.46247542", "0.4623401", "0.46224976", "0.461959", "0.4613841", "0.4607794", "0.46026853", "0.45978713", "0.45965564", "0.45956972", "0.45932347", "0.45910934", "0.4581596", "0.45775127", "0.45773968", "0.45745736", "0.45681584", "0.4565546", "0.45616704", "0.45562378", "0.4555248", "0.45525682" ]
0.6769226
0
SetConfig function is called when starting the plugin for the first time. The Config given has two different configuration fields. The first PluginConfig, is an encoded configuration from the plugin block of the client config. The second, AgentConfig, is the Nomad agent's configuration which is given to all plugins.
func (d *Driver) SetConfig(cfg *base.Config) error { var pluginConfig PluginConfig if len(cfg.PluginConfig) != 0 { if err := base.MsgPackDecode(cfg.PluginConfig, &pluginConfig); err != nil { return err } } d.config = &pluginConfig if cfg.AgentConfig != nil { d.nomadConfig = cfg.AgentConfig.Driver } clientConfig := api.DefaultClientConfig() if pluginConfig.SocketPath != "" { clientConfig.SocketPath = pluginConfig.SocketPath } d.podman = api.NewClient(d.logger, clientConfig) return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func SetConfig(c cfg.RPCConfig) {\n\tconfig = c\n}", "func SetConfig(c cfg.RPCConfig) {\n\tconfig = c\n}", "func (nc *NabtoClient) SetConfig() {\n\tviper.Set(\"nabto\", nc)\n\tviper.WriteConfig()\n\tnc.ApplyConfig()\n}", "func (c *Client) SetConfig(conf *ClientConfig) (err error) {\n\tif conf == nil {\n\t\tconf = _defaultCliConf\n\t}\n\tif conf.Dial <= 0 {\n\t\tconf.Dial = time.Second * 10\n\t}\n\tif conf.Timeout <= 0 {\n\t\tconf.Timeout = time.Millisecond * 300\n\t}\n\tif conf.KeepAliveInterval <= 0 {\n\t\tconf.KeepAliveInterval = time.Second * 60\n\t}\n\tif conf.KeepAliveTimeout <= 0 {\n\t\tconf.KeepAliveTimeout = time.Second * 20\n\t}\n\tc.mutex.Lock()\n\tc.conf = conf\n\tc.mutex.Unlock()\n\treturn nil\n}", "func (m *resmgr) SetConfig(conf *config.RawConfig) error {\n\tm.Info(\"applying new configuration from agent...\")\n\treturn m.setConfig(conf)\n}", "func (c *Client) SetConfig(conf *ClientConfig) (err error) {\n\tif conf == nil {\n\t\tconf = _defaultConf\n\t}\n\tif conf.DialTimeout <= 0 {\n\t\tconf.DialTimeout = _defaultConf.DialTimeout\n\t}\n\tif conf.Timeout <= 0 {\n\t\tconf.Timeout = _defaultConf.Timeout\n\t}\n\tif conf.PoolSize <= 0 {\n\t\tconf.PoolSize = _defaultConf.PoolSize\n\t}\n\n\tc.mutex.Lock()\n\tc.conf = conf\n\tif c.breaker == nil {\n\t\tc.breaker = breaker.NewGroup(conf.Breaker)\n\t} else {\n\t\tc.breaker.Reload(conf.Breaker)\n\t}\n\tc.mutex.Unlock()\n\treturn nil\n}", "func SetConfig(c *config.Config) {\n\tcfg = c\n}", "func SetConfig(c *Config) {\n\tconfig = c\n\treturn\n}", "func (conf *Config) SetConfig(newConfig *Config) {\n\tconf.UnicastConfig.SetConfig(newConfig.UnicastConfig)\n\tconf.ExtensionConfig.SetConfig(newConfig.ExtensionConfig)\n}", "func (i *MonitorInstance) InitConfig(\n\tctx context.Context,\n\te ctxt.Executor,\n\tclusterName,\n\tclusterVersion,\n\tdeployUser string,\n\tpaths meta.DirPaths,\n) error {\n\tgOpts := *i.topo.BaseTopo().GlobalOptions\n\tif err := i.BaseInstance.InitConfig(ctx, e, gOpts, deployUser, paths); err != nil {\n\t\treturn err\n\t}\n\n\tenableTLS := gOpts.TLSEnabled\n\t// transfer run script\n\tspec := i.InstanceSpec.(*PrometheusSpec)\n\n\tcfg := &scripts.PrometheusScript{\n\t\tPort: spec.Port,\n\t\tWebExternalURL: fmt.Sprintf(\"http://%s\", utils.JoinHostPort(spec.Host, spec.Port)),\n\t\tRetention: getRetention(spec.Retention),\n\t\tEnableNG: spec.NgPort > 0,\n\n\t\tDeployDir: paths.Deploy,\n\t\tLogDir: paths.Log,\n\t\tDataDir: paths.Data[0],\n\n\t\tNumaNode: spec.NumaNode,\n\t}\n\n\tfp := filepath.Join(paths.Cache, fmt.Sprintf(\"run_prometheus_%s_%d.sh\", i.GetHost(), i.GetPort()))\n\tif err := cfg.ConfigToFile(fp); err != nil {\n\t\treturn err\n\t}\n\n\tdst := filepath.Join(paths.Deploy, \"scripts\", \"run_prometheus.sh\")\n\tif err := e.Transfer(ctx, fp, dst, false, 0, false); err != nil {\n\t\treturn err\n\t}\n\n\tif _, _, err := e.Execute(ctx, \"chmod +x \"+dst, false); err != nil {\n\t\treturn err\n\t}\n\n\ttopoHasField := func(field string) (reflect.Value, bool) {\n\t\treturn findSliceField(i.topo, field)\n\t}\n\tmonitoredOptions := i.topo.GetMonitoredOptions()\n\n\t// transfer config\n\tcfig := config.NewPrometheusConfig(clusterName, clusterVersion, enableTLS)\n\tif monitoredOptions != nil {\n\t\tcfig.AddBlackbox(i.GetHost(), uint64(monitoredOptions.BlackboxExporterPort))\n\t}\n\tcfig.ScrapeInterval = spec.ScrapeInterval\n\tcfig.ScrapeTimeout = spec.ScrapeTimeout\n\tuniqueHosts := set.NewStringSet()\n\n\tif servers, found := topoHasField(\"PDServers\"); found {\n\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\tpd := servers.Index(i).Interface().(*PDSpec)\n\t\t\tuniqueHosts.Insert(pd.Host)\n\t\t\tcfig.AddPD(pd.Host, uint64(pd.ClientPort))\n\t\t}\n\t}\n\tif servers, found := topoHasField(\"TiKVServers\"); found {\n\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\tkv := servers.Index(i).Interface().(*TiKVSpec)\n\t\t\tuniqueHosts.Insert(kv.Host)\n\t\t\tcfig.AddTiKV(kv.Host, uint64(kv.StatusPort))\n\t\t}\n\t}\n\tif servers, found := topoHasField(\"TiDBServers\"); found {\n\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\tdb := servers.Index(i).Interface().(*TiDBSpec)\n\t\t\tuniqueHosts.Insert(db.Host)\n\t\t\tcfig.AddTiDB(db.Host, uint64(db.StatusPort))\n\t\t}\n\t}\n\tif servers, found := topoHasField(\"TiFlashServers\"); found {\n\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\tflash := servers.Index(i).Interface().(*TiFlashSpec)\n\t\t\tuniqueHosts.Insert(flash.Host)\n\t\t\tcfig.AddTiFlashLearner(flash.Host, uint64(flash.FlashProxyStatusPort))\n\t\t\tcfig.AddTiFlash(flash.Host, uint64(flash.StatusPort))\n\t\t}\n\t}\n\tif servers, found := topoHasField(\"PumpServers\"); found {\n\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\tpump := servers.Index(i).Interface().(*PumpSpec)\n\t\t\tuniqueHosts.Insert(pump.Host)\n\t\t\tcfig.AddPump(pump.Host, uint64(pump.Port))\n\t\t}\n\t}\n\tif servers, found := topoHasField(\"Drainers\"); found {\n\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\tdrainer := servers.Index(i).Interface().(*DrainerSpec)\n\t\t\tuniqueHosts.Insert(drainer.Host)\n\t\t\tcfig.AddDrainer(drainer.Host, uint64(drainer.Port))\n\t\t}\n\t}\n\tif servers, found := topoHasField(\"CDCServers\"); found {\n\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\tcdc := servers.Index(i).Interface().(*CDCSpec)\n\t\t\tuniqueHosts.Insert(cdc.Host)\n\t\t\tcfig.AddCDC(cdc.Host, uint64(cdc.Port))\n\t\t}\n\t}\n\tif servers, found := topoHasField(\"TiKVCDCServers\"); found {\n\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\ttikvCdc := servers.Index(i).Interface().(*TiKVCDCSpec)\n\t\t\tuniqueHosts.Insert(tikvCdc.Host)\n\t\t\tcfig.AddTiKVCDC(tikvCdc.Host, uint64(tikvCdc.Port))\n\t\t}\n\t}\n\tif servers, found := topoHasField(\"Monitors\"); found {\n\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\tmonitoring := servers.Index(i).Interface().(*PrometheusSpec)\n\t\t\tuniqueHosts.Insert(monitoring.Host)\n\t\t}\n\t}\n\tif servers, found := topoHasField(\"Grafanas\"); found {\n\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\tgrafana := servers.Index(i).Interface().(*GrafanaSpec)\n\t\t\tuniqueHosts.Insert(grafana.Host)\n\t\t\tcfig.AddGrafana(grafana.Host, uint64(grafana.Port))\n\t\t}\n\t}\n\tif servers, found := topoHasField(\"Alertmanagers\"); found {\n\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\talertmanager := servers.Index(i).Interface().(*AlertmanagerSpec)\n\t\t\tuniqueHosts.Insert(alertmanager.Host)\n\t\t\tcfig.AddAlertmanager(alertmanager.Host, uint64(alertmanager.WebPort))\n\t\t}\n\t}\n\tif servers, found := topoHasField(\"Masters\"); found {\n\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\tmaster := reflect.Indirect(servers.Index(i))\n\t\t\thost, port := master.FieldByName(\"Host\").String(), master.FieldByName(\"Port\").Int()\n\t\t\tuniqueHosts.Insert(host)\n\t\t\tcfig.AddDMMaster(host, uint64(port))\n\t\t}\n\t}\n\n\tif servers, found := topoHasField(\"Workers\"); found {\n\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\tworker := reflect.Indirect(servers.Index(i))\n\t\t\thost, port := worker.FieldByName(\"Host\").String(), worker.FieldByName(\"Port\").Int()\n\t\t\tuniqueHosts.Insert(host)\n\t\t\tcfig.AddDMWorker(host, uint64(port))\n\t\t}\n\t}\n\n\tif monitoredOptions != nil {\n\t\tfor host := range uniqueHosts {\n\t\t\tcfig.AddNodeExpoertor(host, uint64(monitoredOptions.NodeExporterPort))\n\t\t\tcfig.AddBlackboxExporter(host, uint64(monitoredOptions.BlackboxExporterPort))\n\t\t\tcfig.AddMonitoredServer(host)\n\t\t}\n\t}\n\n\tremoteCfg, err := encodeRemoteCfg2Yaml(spec.RemoteConfig)\n\tif err != nil {\n\t\treturn err\n\t}\n\tcfig.SetRemoteConfig(string(remoteCfg))\n\n\t// doesn't work\n\tif _, err := i.setTLSConfig(ctx, false, nil, paths); err != nil {\n\t\treturn err\n\t}\n\n\tfor _, alertmanager := range spec.ExternalAlertmanagers {\n\t\tcfig.AddAlertmanager(alertmanager.Host, uint64(alertmanager.WebPort))\n\t}\n\tcfig.AddPushgateway(spec.PushgatewayAddrs)\n\n\tif spec.RuleDir != \"\" {\n\t\tfilter := func(name string) bool { return strings.HasSuffix(name, \".rules.yml\") }\n\t\terr := i.IteratorLocalConfigDir(ctx, spec.RuleDir, filter, func(name string) error {\n\t\t\tcfig.AddLocalRule(name)\n\t\t\treturn nil\n\t\t})\n\t\tif err != nil {\n\t\t\treturn errors.Annotate(err, \"add local rule\")\n\t\t}\n\t}\n\n\tif err := i.installRules(ctx, e, paths.Deploy, clusterName, clusterVersion); err != nil {\n\t\treturn errors.Annotate(err, \"install rules\")\n\t}\n\n\tif err := i.initRules(ctx, e, spec, paths, clusterName); err != nil {\n\t\treturn err\n\t}\n\n\tif spec.NgPort > 0 {\n\t\tpds := []string{}\n\t\tif servers, found := topoHasField(\"PDServers\"); found {\n\t\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\t\tpd := servers.Index(i).Interface().(*PDSpec)\n\t\t\t\tpds = append(pds, fmt.Sprintf(\"\\\"%s\\\"\", utils.JoinHostPort(pd.Host, pd.ClientPort)))\n\t\t\t}\n\t\t}\n\t\tngcfg := &config.NgMonitoringConfig{\n\t\t\tClusterName: clusterName,\n\t\t\tAddress: utils.JoinHostPort(i.GetListenHost(), spec.NgPort),\n\t\t\tAdvertiseAddress: utils.JoinHostPort(i.GetHost(), spec.NgPort),\n\t\t\tPDAddrs: strings.Join(pds, \",\"),\n\t\t\tTLSEnabled: enableTLS,\n\n\t\t\tDeployDir: paths.Deploy,\n\t\t\tDataDir: paths.Data[0],\n\t\t\tLogDir: paths.Log,\n\t\t}\n\n\t\tif servers, found := topoHasField(\"Monitors\"); found {\n\t\t\tfor i := 0; i < servers.Len(); i++ {\n\t\t\t\tmonitoring := servers.Index(i).Interface().(*PrometheusSpec)\n\t\t\t\tcfig.AddNGMonitoring(monitoring.Host, uint64(monitoring.NgPort))\n\t\t\t}\n\t\t}\n\t\tfp = filepath.Join(paths.Cache, fmt.Sprintf(\"ngmonitoring_%s_%d.toml\", i.GetHost(), i.GetPort()))\n\t\tif err := ngcfg.ConfigToFile(fp); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdst = filepath.Join(paths.Deploy, \"conf\", \"ngmonitoring.toml\")\n\t\tif err := e.Transfer(ctx, fp, dst, false, 0, false); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tfp = filepath.Join(paths.Cache, fmt.Sprintf(\"prometheus_%s_%d.yml\", i.GetHost(), i.GetPort()))\n\tif err := cfig.ConfigToFile(fp); err != nil {\n\t\treturn err\n\t}\n\tif spec.AdditionalScrapeConf != nil {\n\t\terr = mergeAdditionalScrapeConf(fp, spec.AdditionalScrapeConf)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tdst = filepath.Join(paths.Deploy, \"conf\", \"prometheus.yml\")\n\tif err := e.Transfer(ctx, fp, dst, false, 0, false); err != nil {\n\t\treturn err\n\t}\n\n\treturn checkConfig(ctx, e, i.ComponentName(), i.ComponentSource(), clusterVersion, i.OS(), i.Arch(), i.ComponentName()+\".yml\", paths, nil)\n}", "func NewConfig() *Config {\n\tconf := new(Config)\n\tconf.Agent = \"http://127.0.0.1:8080\"\n\n\treturn conf\n}", "func (d *Driver) SetConfig(cfg Config) {\n\td.p.StoreCONFIG(cfg)\n}", "func SetConfig(cfg c.Config) {\n\tconfig = cfg\n}", "func SetConfig(config *rest.Config) UpdateSettingsFunc {\n\treturn func(cache *clusterCache) {\n\t\tif !reflect.DeepEqual(cache.config, config) {\n\t\t\tlog.WithField(\"server\", cache.config.Host).Infof(\"Changing cluster config to: %v\", config)\n\t\t\tcache.config = config\n\t\t}\n\t}\n}", "func (ce *MqttConfigExecutor) StartConfig(config *gateways.ConfigContext) {\n\tce.GatewayConfig.Log.Info().Str(\"config-key\", config.Data.Src).Msg(\"operating on configuration...\")\n\tm, err := parseConfig(config.Data.Config)\n\tif err != nil {\n\t\tconfig.ErrChan <- err\n\t}\n\tce.GatewayConfig.Log.Info().Str(\"config-key\", config.Data.Src).Interface(\"config-value\", *m).Msg(\"mqtt configuration\")\n\n\tgo ce.listenEvents(m, config)\n\n\tfor {\n\t\tselect {\n\t\tcase <-config.StartChan:\n\t\t\tconfig.Active = true\n\t\t\tce.GatewayConfig.Log.Info().Str(\"config-key\", config.Data.Src).Msg(\"configuration is running\")\n\n\t\tcase data := <-config.DataChan:\n\t\t\tce.GatewayConfig.DispatchEvent(&gateways.GatewayEvent{\n\t\t\t\tSrc: config.Data.Src,\n\t\t\t\tPayload: data,\n\t\t\t})\n\n\t\tcase <-config.StopChan:\n\t\t\tce.GatewayConfig.Log.Info().Str(\"config-name\", config.Data.Src).Msg(\"stopping configuration\")\n\t\t\tconfig.DoneChan <- struct{}{}\n\t\t\tce.GatewayConfig.Log.Info().Str(\"config-name\", config.Data.Src).Msg(\"configuration stopped\")\n\t\t\treturn\n\t\t}\n\t}\n}", "func InitConfig() {\n\tif cfg == nil {\n\t\tcfg = &Settings{}\n\t}\n\n\tviper.SetConfigType(configType)\n\tviper.SetEnvPrefix(\"SNEAK\")\n\tviper.AutomaticEnv()\n\tviper.SetConfigName(configName)\n\n\thome, err := homedir.Dir()\n\tif err != nil {\n\t\tgui.ExitWithError(err)\n\t}\n\n\tcfgPath := fmt.Sprintf(\"%s/.sneak\", home)\n\n\t// check for whether the directory and config file already exist\n\terr = fs.CreateDir(cfgPath)\n\tif err != nil {\n\t\tgui.ExitWithError(err)\n\t}\n\n\terr = createBoxNotesSubdir(cfgPath)\n\tif err != nil {\n\t\tgui.ExitWithError(fmt.Sprintf(\"could not create dedicated notes directory: %s\", err))\n\t}\n\n\tviper.AddConfigPath(cfgPath)\n\tviper.Set(\"cfg_dir\", cfgPath)\n\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tgui.Warn(fmt.Sprintf(\"not seeing a config file where i'd expect it in %s - one sec...\", cfgPath), nil)\n\t}\n}", "func (sf *ClientOption) SetConfig(cfg Config) *ClientOption {\n\tif err := cfg.Valid(); err != nil {\n\t\tsf.config = DefaultConfig()\n\t} else {\n\t\tsf.config = cfg\n\t}\n\treturn sf\n}", "func InitConfig(botName string, token string, chatId int) BotConfig {\n\tvar config BotConfig\n\tconfig.Token = token\n\tconfig.BotName = botName\n\tconfig.ChatId = chatId\n\n\treturn config\n}", "func (p *Plugin) LoadConfig(location string) error {\n\tp.dir = location\n\tlog.Debug(\"Loading configuration from \" + location)\n\treturn nil\n}", "func (c *TunaSessionClient) SetConfig(conf *Config) error {\n\tc.Lock()\n\tdefer c.Unlock()\n\terr := mergo.Merge(c.config, conf, mergo.WithOverride)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif conf.TunaIPFilter != nil {\n\t\tc.config.TunaIPFilter = conf.TunaIPFilter\n\t}\n\tif conf.TunaNknFilter != nil {\n\t\tc.config.TunaNknFilter = conf.TunaNknFilter\n\t}\n\treturn nil\n}", "func (policy *PolicySvc) SetConfig(config common.ServiceConfig) error {\n\t// TODO this is a copy-paste of topology service, to refactor\n\tlog.Println(config)\n\tpolicy.config = config\n\t//\tstoreConfig := config.ServiceSpecific[\"store\"].(map[string]interface{})\n\tlog.Printf(\"Policy port: %d\", config.Common.Api.Port)\n\tpolicy.store = policyStore{}\n\tstoreConfig := config.ServiceSpecific[\"store\"].(map[string]interface{})\n\tpolicy.store.ServiceStore = &policy.store\n\treturn policy.store.SetConfig(storeConfig)\n}", "func (o *SyntheticsBrowserTest) SetConfig(v SyntheticsBrowserTestConfig) {\n\to.Config = v\n}", "func SetConfig(cfg *Config) {\n\tglobalConfig = cfg\n}", "func SetConfig(settings *config.Settings) {\n cfg = settings\n}", "func (adm Admin) SetConfig(cluster string, scope string, properties map[string]string) error {\n\tswitch strings.ToUpper(scope) {\n\tcase \"CLUSTER\":\n\t\tif allow, ok := properties[_allowParticipantAutoJoinKey]; ok {\n\t\t\tbuilder := KeyBuilder{cluster}\n\t\t\tpath := builder.clusterConfig()\n\n\t\t\tif strings.ToLower(allow) == \"true\" {\n\t\t\t\tadm.zkClient.UpdateSimpleField(path, _allowParticipantAutoJoinKey, \"true\")\n\t\t\t}\n\t\t}\n\tcase \"CONSTRAINT\":\n\tcase \"PARTICIPANT\":\n\tcase \"PARTITION\":\n\tcase \"RESOURCE\":\n\t}\n\n\treturn nil\n}", "func (b *Bridge) SetConfig(args *ConfigArg) error {\n\tif !b.isAvailable() {\n\t\treturn ErrBridgeNotAvailable\n\t} else if b.updateInProgress {\n\t\treturn ErrBridgeUpdating\n\t}\n\n\turl := b.baseURL.String() + \"api/\" + b.Username + \"/config\"\n\tbuf := new(bytes.Buffer)\n\n\terr := json.NewEncoder(buf).Encode(args.args)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq, err := http.NewRequest(http.MethodPut, url, buf)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\tclient := &http.Client{}\n\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar respEntries responseEntries\n\terr = json.NewDecoder(resp.Body).Decode(&respEntries)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfor _, respEntry := range respEntries {\n\t\tvar e responseEntry\n\t\tif err = json.Unmarshal(respEntry, &e); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif e.Error.Type > 0 {\n\t\t\tif args.errors == nil {\n\t\t\t\targs.errors = make(map[string]ResponseError)\n\t\t\t}\n\n\t\t\tkeys := strings.Split(e.Error.Address, \"/\")\n\t\t\tkey := keys[len(keys)-1]\n\n\t\t\targs.errors[key] = e.Error\n\t\t} else {\n\t\t\tfor path, jsonValue := range e.Success {\n\t\t\t\tkeys := strings.Split(path, \"/\")\n\n\t\t\t\tkey := keys[len(keys)-1]\n\n\t\t\t\tif key == \"name\" {\n\t\t\t\t\tvar v string\n\t\t\t\t\tif err = json.Unmarshal(*jsonValue, &v); err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\n\t\t\t\t\targs.args[key] = v\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (_options *UpdateDestinationOptions) SetConfig(config *DestinationConfig) *UpdateDestinationOptions {\n\t_options.Config = config\n\treturn _options\n}", "func (s *ClientState) SetConfig(config *Config) *Config {\n\ts.mutex.Lock()\n\tdefer s.mutex.Unlock()\n\toldconfig := s.opts.Config\n\ts.opts.Config = config\n\treturn oldconfig\n}", "func (_options *CreateDestinationOptions) SetConfig(config *DestinationConfig) *CreateDestinationOptions {\n\t_options.Config = config\n\treturn _options\n}", "func (c *Client) SetConfig(cfg config.Configuration) error {\n\tbody, err := json.Marshal(cfg)\n\tif err != nil {\n\t\treturn maskAny(err)\n\t}\n\tif _, err := c.httpPost(\"system/config\", string(body)); err != nil {\n\t\treturn maskAny(err)\n\t}\n\treturn nil\n}", "func (s *SyndClient) SetConfig(config []byte, restart bool) (err error) {\n\tctx, _ := context.WithTimeout(context.Background(), 10*time.Second)\n\tconfMsg := &pb.Conf{\n\t\tConf: config,\n\t\tRestartRequired: restart,\n\t}\n\tstatus, err := s.client.SetConf(ctx, confMsg)\n\tif err != nil {\n\t\treturn err\n\t}\n\treport := [][]string{\n\t\t[]string{\"Status:\", fmt.Sprintf(\"%v\", status.Status)},\n\t\t[]string{\"Version:\", fmt.Sprintf(\"%v\", status.Version)},\n\t}\n\tfmt.Print(brimtext.Align(report, nil))\n\treturn nil\n}", "func InitConfig() (err error) {\n\tConf = NewConfig()\n\tgconf = goconf.New()\n\tif err = gconf.Parse(confFile); err != nil {\n\t\treturn err\n\t}\n\tif err = gconf.Unmarshal(Conf); err != nil {\n\t\treturn err\n\t}\n\tvar serverIDi int64\n\tfor _, serverID := range gconf.Get(\"comet.addrs\").Keys() {\n\t\taddr, err := gconf.Get(\"comet.addrs\").String(serverID)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tserverIDi, err = strconv.ParseInt(serverID, 10, 32)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tConf.Comets[int32(serverIDi)] = addr\n\t}\n\treturn\n}", "func (c *Client) SetPluginConfig(pluginType, name, version string, key string, value ctypes.ConfigValue) *SetPluginConfigResult {\n\tr := &SetPluginConfigResult{}\n\tb, err := json.Marshal(map[string]ctypes.ConfigValue{key: value})\n\tif err != nil {\n\t\tr.Err = err\n\t\treturn r\n\t}\n\tresp, err := c.do(\"PUT\", fmt.Sprintf(\"/plugins/%s/%s/%s/config\", pluginType, url.QueryEscape(name), version), ContentTypeJSON, b)\n\tif err != nil {\n\t\tr.Err = err\n\t\treturn r\n\t}\n\n\tswitch resp.Meta.Type {\n\tcase rbody.SetPluginConfigItemType:\n\t\t// Success\n\t\tconfig := resp.Body.(*rbody.SetPluginConfigItem)\n\t\tr = &SetPluginConfigResult{config, nil}\n\tcase rbody.ErrorType:\n\t\tr.Err = resp.Body.(*rbody.Error)\n\tdefault:\n\t\tr.Err = ErrAPIResponseMetaType\n\t}\n\treturn r\n}", "func (e *EngineOperations) InitConfig(cfg *config.Common) {\n\te.CommonConfig = cfg\n}", "func (c *Command) SetConfig(config *commonEthereum.Config) {\n\tc.config = config\n}", "func InitConfig() (err error) {\n\tConf = NewConfig()\n\tgconf = goconf.New()\n\tif err = gconf.Parse(confFile); err != nil {\n\t\treturn err\n\t}\n\tif err := gconf.Unmarshal(Conf); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func SetConfig(cfg ServiceConfig) {\n\tserviceConfig = cfg\n}", "func ClientConfig(m discovery.PluginMeta) *plugin.ClientConfig {\n\tlogger := hclog.New(&hclog.LoggerOptions{\n\t\tName: \"plugin\",\n\t\tLevel: hclog.Trace,\n\t\tOutput: os.Stderr,\n\t})\n\n\treturn &plugin.ClientConfig{\n\t\tCmd: exec.Command(m.Path),\n\t\tHandshakeConfig: Handshake,\n\t\tVersionedPlugins: VersionedPlugins,\n\t\tManaged: true,\n\t\tLogger: logger,\n\t\tAllowedProtocols: []plugin.Protocol{plugin.ProtocolGRPC},\n\t\tAutoMTLS: true,\n\t}\n}", "func SetConfig(c *Config) {\n\tdefaultCfg = &Config{\n\t\tEncoding: c.Encoding,\n\t\tLevel: c.Level,\n\t\tDevelopment: c.Development,\n\t}\n}", "func (adm Admin) SetConfig(cluster string, scope string, properties map[string]string) error {\n\tconn := newConnection(adm.ZkSvr)\n\terr := conn.Connect()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer conn.Disconnect()\n\n\tswitch strings.ToUpper(scope) {\n\tcase \"CLUSTER\":\n\t\tif allow, ok := properties[\"allowParticipantAutoJoin\"]; ok {\n\t\t\tkeys := KeyBuilder{cluster}\n\t\t\tpath := keys.clusterConfig()\n\n\t\t\tif strings.ToLower(allow) == \"true\" {\n\t\t\t\tconn.UpdateSimpleField(path, \"allowParticipantAutoJoin\", \"true\")\n\t\t\t}\n\t\t}\n\tcase \"CONSTRAINT\":\n\tcase \"PARTICIPANT\":\n\tcase \"PARTITION\":\n\tcase \"RESOURCE\":\n\t}\n\n\treturn nil\n}", "func (o *LoggingBindParams) SetConfig(config *models.LoggingBindConfig) {\n\to.Config = config\n}", "func Config(dsn string, watch bool) Option {\n\treturn func(s *Server) error {\n\t\tconfigStore, err := config.NewStore(dsn, watch)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to apply Config option\")\n\t\t}\n\n\t\ts.configStore = configStore\n\t\treturn nil\n\t}\n}", "func (p *Parser) SetConfig(opt *Config) { p.cfg = opt }", "func Config(ctx context.Context) *configurator.Config {\n\treturn ctx.Value(vppAgentConfigKey).(*configurator.Config)\n}", "func (s *Server) SetConfig(c ServerConfig) error {\n\ts.config = c\n\t// Static.\n\tif c.ServerRoot != \"\" {\n\t\ts.SetServerRoot(c.ServerRoot)\n\t}\n\tif len(c.SearchPaths) > 0 {\n\t\tpaths := c.SearchPaths\n\t\tc.SearchPaths = []string{}\n\t\tfor _, v := range paths {\n\t\t\ts.AddSearchPath(v)\n\t\t}\n\t}\n\t// HTTPS.\n\tif c.TLSConfig == nil && c.HTTPSCertPath != \"\" {\n\t\ts.EnableHTTPS(c.HTTPSCertPath, c.HTTPSKeyPath)\n\t}\n\tSetGraceful(c.Graceful)\n\n\tintlog.Printf(\"SetConfig: %+v\", s.config)\n\treturn nil\n}", "func SetConfig(cfg *PinoyConfig) {\n\tpcfg = cfg\n}", "func InitConfig(path string) *Config {\n\n\tviper.SetConfigName(\"config\")\n\tviper.AddConfigPath(path)\n\tviper.SetConfigType(\"yaml\")\n\tviper.AutomaticEnv()\n\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tcfg := GetConfig()\n\n\t// setup logging\n\tformatter := new(prefixed.TextFormatter)\n\tformatter.FullTimestamp = true\n\tlog.SetFormatter(formatter)\n\tlog.SetLevel(cfg.LogLevel)\n\treturn cfg\n}", "func (o *Options) Config() (*config.Config, error) {\n\tc := &config.Config{}\n\tif err := o.ApplyTo(c); err != nil {\n\t\treturn nil, err\n\t}\n\n\tclient, leaderElectionClient, eventClient, err := createClients(c.ComponentConfig.ClientConnection,\n\t\tc.ComponentConfig.LeaderElection.RenewDeadline.Duration)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tc.EventBroadcaster = events.NewEventBroadcasterAdapter(eventClient)\n\n\t// Set up leader election if enabled.\n\tvar leaderElectionConfig *leaderelection.LeaderElectionConfig\n\tif c.ComponentConfig.LeaderElection.LeaderElect {\n\t\t// Use the scheduler name in the first profile to record leader election.\n\t\tcoreRecorder := c.EventBroadcaster.DeprecatedNewLegacyRecorder(c.ComponentConfig.Profiles[0].SchedulerName)\n\t\tleaderElectionConfig, err = makeLeaderElectionConfig(c.ComponentConfig.LeaderElection, leaderElectionClient, coreRecorder)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tc.Client = client\n\tc.InformerFactory = informers.NewSharedInformerFactory(client, 0)\n\tc.PodInformer = scheduler.NewPodInformer(client, 0)\n\tc.LeaderElection = leaderElectionConfig\n\n\treturn c, nil\n}", "func InitConfig() (*Config, error) {\n\tv := viper.New()\n\n\terr := godotenv.Load()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tv.SetDefault(\"db_file\", \"db.sqlite3\")\n\tv.SetDefault(\"notification_timeout\", 10)\n\tv.SetDefault(\"monobank_integration\", false)\n\n\tv.SetConfigName(\"config\")\n\tv.AddConfigPath(\".\")\n\tv.AutomaticEnv()\n\n\terr = v.ReadInConfig()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tconfig := &Config{}\n\tconfig.NotificationTimeout = time.Duration(v.GetInt(\"notification_timeout\")) * time.Second\n\tconfig.DbFile = v.GetString(\"db_file\")\n\tconfig.UserName1 = v.GetString(\"USERNAME_1\")\n\tconfig.UserName2 = v.GetString(\"USERNAME_2\")\n\tconfig.SenderID1 = v.GetInt(\"SENDER_ID_1\")\n\tconfig.SenderID2 = v.GetInt(\"SENDER_ID_2\")\n\tconfig.TelegramToken = os.Getenv(\"TELEGRAM_TOKEN\")\n\tconfig.APIServer = v.GetInt(\"API_SERVER_PORT\")\n\n\tconfig.MonobankIntegrationEnabled = v.GetBool(\"monobank_integration\")\n\tif config.MonobankIntegrationEnabled {\n\t\tconfig.MonobankWebhookURL = os.Getenv(\"MONOBANK_WEBHOOK_URL\")\n\t\tconfig.MonobankPort = v.GetInt(\"MONOBANK_PORT\")\n\t\tconfig.MonobankToken1 = os.Getenv(\"MONOBANK_TOKEN_1\")\n\t\tconfig.MonobankToken2 = os.Getenv(\"MONOBANK_TOKEN_2\")\n\t\tconfig.MonobankAccount1 = v.GetString(\"MONOBANK_ACCOUNT_1\")\n\t\tconfig.MonobankAccount2 = v.GetString(\"MONOBANK_ACCOUNT_2\")\n\t}\n\n\treturn config, nil\n}", "func (pc *MockProviderContext) SetConfig(config config.Config) {\n\tpc.config = config\n}", "func (baseCfg *BaseConfig) InitConfig(configPath string) {\n\tconfig, err := LoadConfig(configPath)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tbaseCfg.PN_GLOBAL_ROUTER, err = jsonparser.GetString(config, \"PN_GLOBAL_ROUTER\")\n\tif err != nil {\n\t\tlog.Fatalf(\"PN_GLOBAL_ROUTER %v:\", err)\n\t}\n\n\tbaseCfg.PN_GLOBAL_PORTAL, err = jsonparser.GetString(config, \"PN_GLOBAL_PORTAL\")\n\tif err != nil {\n\t\tlog.Fatalf(\"PN_GLOBAL_PORTAL %v:\", err)\n\t}\n\n\tbaseCfg.PN_GLOBAL_JWT_PASSPHRASE, err = jsonparser.GetString(config, \"PN_GLOBAL_JWT_PASSPHRASE\")\n\tif err != nil {\n\t\tlog.Fatalf(\"PN_GLOBAL_JWT_PASSPHRASE %v:\", err)\n\t}\n\n\tbaseCfg.MY_POD_NAMESPACE = os.Getenv(\"MY_POD_NAMESPACE\")\n\n\tbaseCfg.DB_PATH = os.Getenv(\"DB_PATH\")\n\tif baseCfg.DB_PATH == \"\" {\n\t\tbaseCfg.DB_PATH = \"./marvin-connector.db\"\n\t}\n\n\tbaseCfg.CONNECTOR_ADDRESS = os.Getenv(\"CONNECTOR_ADDRESS\")\n\n\tbaseCfg.NEW_PUBLISHED_POLLING_INTERVAL = os.Getenv(\"NEW_PUBLISHED_POLLING_INTERVAL\")\n\tif baseCfg.NEW_PUBLISHED_POLLING_INTERVAL == \"\" {\n\t\tbaseCfg.NEW_PUBLISHED_POLLING_INTERVAL = \"2h\"\n\t}\n\n\tb, err := strconv.ParseBool(os.Getenv(\"CHECK_UPGRADE_STATUS_ENABLED\"))\n\tif err != nil {\n\t\tlog.Printf(\"[Warning] parse bool CHECK_UPGRADE_STATUS_ENABLED failed. Not a boolean\")\n\t\tbaseCfg.CHECK_UPGRADE_STATUS_ENABLED = false\n\t} else {\n\t\tbaseCfg.CHECK_UPGRADE_STATUS_ENABLED = b\n\t}\n\n\tbaseCfg.UPGRADE_STATUS_POLLING_INTERVAL = os.Getenv(\"UPGRADE_STATUS_POLLING_INTERVAL\")\n\tif baseCfg.UPGRADE_STATUS_POLLING_INTERVAL == \"\" {\n\t\tbaseCfg.UPGRADE_STATUS_POLLING_INTERVAL = \"1m\"\n\t}\n\n\tbaseCfg.JOB_TIMEOUT = os.Getenv(\"JOB_TIMEOUT\")\n\tif baseCfg.JOB_TIMEOUT == \"\" {\n\t\tbaseCfg.JOB_TIMEOUT = \"2m\"\n\t}\n\n\tlog.Printf(\"SECRET.PN_GLOBAL_ROUTER %v\", baseCfg.PN_GLOBAL_ROUTER)\n\tlog.Printf(\"SECRET.PN_GLOBAL_PORTAL %v\", baseCfg.PN_GLOBAL_PORTAL)\n\tlog.Printf(\"ENV.MY_POD_NAMESPACE %v\", baseCfg.MY_POD_NAMESPACE)\n\tlog.Printf(\"ENV.DB_PATH %v\", baseCfg.DB_PATH)\n\tlog.Printf(\"ENV.CONNECTOR_ADDRESS %v\", baseCfg.CONNECTOR_ADDRESS)\n\tlog.Printf(\"ENV.NEW_PUBLISHED_POLLING_INTERVAL %v\", baseCfg.NEW_PUBLISHED_POLLING_INTERVAL)\n\tlog.Printf(\"ENV.UPGRADE_STATUS_POLLING_INTERVAL %v\", baseCfg.UPGRADE_STATUS_POLLING_INTERVAL)\n\tlog.Printf(\"ENV.JOB_TIMEOUT %v\", baseCfg.JOB_TIMEOUT)\n\n}", "func InitConfig(cfgFile string) {\n\tif cfgFile != \"\" {\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tCheckErr(err.Error())\n\t\t}\n\t\tviper.SetConfigType(\"yaml\")\n\t\tviper.SetConfigFile(filepath.Join(home, \".gotils.yml\"))\n\t}\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tCheckErr(err.Error())\n\t}\n}", "func (c *Config) LoadConfig(configPath string) error {\n\tvar configFile map[string]toml.Primitive\n\tcontents, err := replaceEnvsFile(configPath)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Error loading config file: %v\", err)\n\t}\n\tif _, err = toml.Decode(contents, &configFile); err != nil {\n\t\treturn fmt.Errorf(\"Error parsing config file: %v\", err)\n\t}\n\n\t// Agent config\n\tagentConfig, ok := configFile[\"agent\"]\n\tif !ok {\n\t\treturn fmt.Errorf(\"%s: missing [agent] config\", configPath)\n\t}\n\n\tignoreFields := map[string]interface{}{}\n\terr = toml.PrimitiveDecodeStrict(agentConfig, c.Agent, ignoreFields)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Error parsing [agent] config: %v\", err)\n\t}\n\n\tdelete(configFile, \"agent\")\n\n\t// Plugin configs\n\tfor pluginName, pluginConf := range configFile {\n\t\tif err = c.addPlugin(pluginName, pluginConf); err != nil {\n\t\t\treturn fmt.Errorf(\"Error parsing [%s] config: %v\", pluginName, err)\n\t\t}\n\t}\n\n\tfor _, kinds := range plugins.PluginEventKinds {\n\t\tfor _, kind := range kinds {\n\t\t\tif _, ok := c.EventKinds[kind]; !ok {\n\t\t\t\tc.EventKinds[kind] = true\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func InitConfig(t *testing.T) (conf *config.Config, configPath string,\n\tkubeConfigPath string, cleanup func(*testing.T)) {\n\tt.Helper()\n\ttestDir, cleanup := TempDir(t, \"airship-test\")\n\n\tconfigData, err := ioutil.ReadFile(testAirshipConfig)\n\tif err != nil {\n\t\tt.Logf(\"Could not read file %q\", testAirshipConfig)\n\t}\n\tkubeConfigData, err := ioutil.ReadFile(testKubeConfig)\n\tif err != nil {\n\t\tt.Logf(\"Could not read file %q\", kubeConfigData)\n\t}\n\n\tconfigPath = filepath.Join(testDir, \"config\")\n\terr = ioutil.WriteFile(configPath, configData, 0600)\n\trequire.NoError(t, err)\n\n\tkubeConfigPath = filepath.Join(testDir, \"kubeconfig\")\n\terr = ioutil.WriteFile(kubeConfigPath, kubeConfigData, 0600)\n\trequire.NoError(t, err)\n\n\tconf = config.NewConfig()\n\terr = conf.LoadConfig()\n\trequire.NoError(t, err)\n\treturn conf, configPath, kubeConfigPath, cleanup\n}", "func (l *LinuxConfig) SetConfig(c []byte) {\n\tl.Config = c\n}", "func InitConfig(){\n\n\tviper.SetConfigName(\"config\")\n\tviper.AddConfigPath(\"others\")\n\n\n\tviper.SetConfigType(\"toml\")\n\terr := viper.ReadInConfig()\n\n\tif err != nil {\n\n\t\tfmt.Println(err.Error())\n\t\tfmt.Printf(\"Exiting application. got error %v\",err.Error())\n\t\tos.Exit(1)\n\t}\n\n\terr = viper.Unmarshal(&Conf)\n\tif err != nil{\n\t\tfmt.Println(err)\n\t}\n\tfmt.Printf(\"config %v\",Conf)\n\tConf.loaded = true\n\n\tlogFileMaxSize := 100\n\n\terr = logger.Init(Conf.LogConfig.LogDirectory,\n\t\t800,\n\t\t20,\n\t\tuint32(logFileMaxSize),\n\t\tConf.LogConfig.EnableLogTrace,\n\t\tConf.LogConfig.Log)\n\n\tif err != nil {\n\t\tfmt.Println(\"Error in intializing logger, is : \", err)\n\t}\n\n\terr = logger.SetFilenamePrefix(Conf.LogConfig.LogFilePrefix, Conf.LogConfig.LogFilePrefix)\n\tlogger.SetLogThrough(false)\n\n\tif err != nil {\n\t\tfmt.Println(\"Error is : \", err.Error())\n\t}\n\n}", "func InitConfig() {\n\tglobalConfig.BackendServerAddr = \"http://localhost:8080\"\n}", "func (s *cpuSource) SetConfig(conf source.Config) {\n\tswitch v := conf.(type) {\n\tcase *Config:\n\t\ts.config = v\n\t\ts.initCpuidFilter()\n\tdefault:\n\t\tpanic(fmt.Sprintf(\"invalid config type: %T\", conf))\n\t}\n}", "func (m *PooledWrapper) SetConfig(_ context.Context, _ ...wrapping.Option) (*wrapping.WrapperConfig, error) {\n\treturn nil, nil\n}", "func (c *ConfigComponent) SetConfig(updated *config.Config) error {\n\treturn c.setConfigUnsynced(updated)\n}", "func Config(nrWorkers int, waitForWorker, waitForCompletion time.Duration) {\n\tconfig = configuration{nrWorkers, waitForWorker, waitForCompletion}\n}", "func InitConfig(configName string) func() {\n\treturn func() {\n\t\tConfig.ConfigFile = viper.GetString(ConfigFile) // enable ability to specify config file via flag\n\t\tConfig.ConfigDir = viper.GetString(ConfigDir)\n\t\tviper.SetEnvPrefix(\"cilium\")\n\n\t\t// INFO: 启动时候用的 --config-dir=/tmp/cilium/config-map, 每一个文件名 filename 是 key,文件内容是 value\n\t\tif Config.ConfigDir != \"\" {\n\t\t\tif _, err := os.Stat(Config.ConfigDir); os.IsNotExist(err) {\n\t\t\t\tlog.Fatalf(\"Non-existent configuration directory %s\", Config.ConfigDir)\n\t\t\t}\n\n\t\t\tif m, err := ReadDirConfig(Config.ConfigDir); err != nil {\n\t\t\t\tlog.Fatalf(\"Unable to read configuration directory %s: %s\", Config.ConfigDir, err)\n\t\t\t} else {\n\t\t\t\terr := MergeConfig(m)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Fatalf(\"Unable to merge configuration: %s\", err)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif Config.ConfigFile != \"\" {\n\t\t\tviper.SetConfigFile(Config.ConfigFile)\n\t\t} else {\n\t\t\tviper.SetConfigName(configName) // name of config file (without extension)\n\t\t\tviper.AddConfigPath(\"$HOME\") // adding home directory as first search path\n\t\t}\n\n\t\t// If a config file is found, read it in.\n\t\tif err := viper.ReadInConfig(); err == nil {\n\t\t\tlog.WithField(logfields.Path, viper.ConfigFileUsed()).\n\t\t\t\tInfo(\"Using config from file\")\n\t\t} else if Config.ConfigFile != \"\" {\n\t\t\tlog.WithField(logfields.Path, Config.ConfigFile).\n\t\t\t\tFatal(\"Error reading config file\")\n\t\t} else {\n\t\t\tlog.WithField(logfields.Reason, err).Info(\"Skipped reading configuration file\")\n\t\t}\n\t}\n}", "func NewConfig(stdout, stderr io.Writer) (*Config, error) {\n\tcfg := Config{\n\t\tStdout: stdout,\n\t\tStderr: stderr,\n\t}\n\tif err := envconfig.Process(\"plugin\", &cfg); err != nil {\n\t\treturn nil, err\n\t}\n\n\tif err := envconfig.Process(\"\", &cfg); err != nil {\n\t\treturn nil, err\n\t}\n\n\tif justNumbers.MatchString(cfg.Timeout) {\n\t\tcfg.Timeout = fmt.Sprintf(\"%ss\", cfg.Timeout)\n\t}\n\n\tif cfg.Debug && cfg.Stderr != nil {\n\t\tcfg.logDebug()\n\t}\n\n\tcfg.deprecationWarn()\n\n\treturn &cfg, nil\n}", "func SetConfig(config *Config) {\n\t// Đọc file config.local.json\n\tconfigFile, err := os.Open(\"config.local.json\")\n\tdefer configFile.Close()\n\tif err != nil {\n\t\t// Nếu không có file config.local.json thì đọc file config.development.json\n\t\tconfigFile, err = os.Open(\"config.default.json\")\n\t\tdefer configFile.Close()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}\n\n\t// Parse dữ liệu JSON lưu vào struct blog\n\tjsonParser := json.NewDecoder(configFile)\n\terr = jsonParser.Decode(&config)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}", "func (s *UpdateResolverRuleInput) SetConfig(v *ResolverRuleConfig) *UpdateResolverRuleInput {\n\ts.Config = v\n\treturn s\n}", "func (s *UpdateResolverRuleInput) SetConfig(v *ResolverRuleConfig) *UpdateResolverRuleInput {\n\ts.Config = v\n\treturn s\n}", "func NewConfig() *Config {\n\tc := &Config{\n\t\tAPI: &APIConfig{\n\t\t\tHost: \"127.0.0.1\",\n\t\t\tPort: 3031,\n\t\t},\n\t\tBackendURLs: []string{},\n\t\tCacheDir: \"/var/cache/sensu\",\n\t\tEnvironment: \"default\",\n\t\tKeepaliveInterval: 20,\n\t\tKeepaliveTimeout: 120,\n\t\tOrganization: \"default\",\n\t\tPassword: \"P@ssw0rd!\",\n\t\tSocket: &SocketConfig{\n\t\t\tHost: \"127.0.0.1\",\n\t\t\tPort: 3030,\n\t\t},\n\t\tUser: \"agent\",\n\t}\n\n\thostname, err := os.Hostname()\n\tif err != nil {\n\t\tlogger.WithError(err).Error(\"error getting hostname\")\n\t\t// TODO(greg): wat do?\n\t\tc.AgentID = \"unidentified-sensu-agent\"\n\t}\n\tc.AgentID = hostname\n\n\treturn c\n}", "func SetConfig() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tc.Set(\"CorsOrigin\", \"*\")\n\t\tc.Set(\"Verbose\", true)\n\t\tc.Next()\n\t}\n}", "func InitConfig() {\n\terr := gonfig.GetConf(\"./config.json\", &Configuration)\n\tutils.Check(err)\n}", "func initConfig(centralConfig corecfg.CentralConfig) (interface{}, error) {\n\trootProps := RootCmd.GetProperties()\n\t// Parse the config from bound properties and setup gateway config\n\tgatewayConfig = &config.GatewayConfig{\n\t\tSpecPath: rootProps.StringPropertyValue(\"3scale-api-gateway.specPath\"),\n\t\tConfigKey1: rootProps.StringPropertyValue(\"3scale-api-gateway.config_key_1\"),\n\t\tConfigKey2: rootProps.StringPropertyValue(\"3scale-api-gateway.config_key_2\"),\n\t\tConfigKey3: rootProps.StringPropertyValue(\"3scale-api-gateway.config_key_3\"),\n\t}\n\n\tagentConfig := config.AgentConfig{\n\t\tCentralCfg: centralConfig,\n\t\tGatewayCfg: gatewayConfig,\n\t}\n\treturn agentConfig, nil\n}", "func (w *Wioctl) SetConfig(cfg *Config) {\n\tw.lock.Lock()\n\tw.cfg = cfg\n\tw.lock.Unlock()\n}", "func InitConfig() {\n\n\tviper.SetDefault(\"Threads\", \"4\")\n\tviper.SetDefault(\"RootDirectory\", \"/opt/lht\")\n\tviper.SetConfigName(\"lht\") // name of config file (without extension)\n\tviper.AddConfigPath(\"/etc\") // adding /etc directory as first search path\n\n\tif !project.CheckInstalled() {\n\t\tfmt.Fprintf(os.Stderr, \"lht is not configured.. running installation.\\n\")\n\t\tif err := project.Install(); err != nil {\n\t\t\tfmt.Fprintf(os.Stderr, \"could not install: %v\\n\", err)\n\t\t\tif err == project.ErrNotRoot {\n\t\t\t\tfmt.Fprintf(os.Stderr, \"please run `sudo lht` to install default files\")\n\t\t\t}\n\t\t\tos.Exit(1)\n\t\t}\n\t\tfmt.Fprintf(os.Stderr, \"lht has been installed.\\n\")\n\t\tos.Exit(0)\n\t}\n\n\t// config file should be found since we have checked the installation\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tfmt.Fprintf(os.Stderr, \"could not read configuration file: %v\\n\", err)\n\t\tos.Exit(1)\n\t}\n}", "func (d *RMQ) SetConfig(key string, value interface{}) common.Config {\n\treturn nil\n}", "func InitConfig() {\n\tv := viper.New()\n\tv.AddConfigPath(\".\")\n\tv.SetConfigFile(\"config.json\")\n\tv.SetConfigType(\"json\")\n\n\t// default values\n\tv.SetDefault(\"MYSQL_SCHEMA\", \"transcription-service\")\n\tv.SetDefault(\"MYSQL_PORT\", 3306)\n\n\terr := v.ReadInConfig()\n\n\t// local config file does not exist\n\tif err != nil {\n\t\tv.BindEnv(\"MYSQL_ROOT_PWD\")\n\t\tv.BindEnv(\"MYSQL_HOSTNAME\")\n\t\tv.BindEnv(\"MYSQL_USER\")\n\t\tv.BindEnv(\"AWS_BUCKET_NAME\")\n\t} else {\n\t\t// overwrite if env is present\n\t\tv.AutomaticEnv()\n\t}\n\n\tconfig = new(Configuration)\n\n\tbucketName := v.GetString(\"AWS_BUCKET_NAME\")\n\n\tconfig.AwsBucketName = bucketName\n\n\tvar dbConfig DbConfiguration\n\tdbConfig.port = v.GetInt(\"MYSQL_PORT\")\n\tdbConfig.hostname = v.GetString(\"MYSQL_HOSTNAME\")\n\tdbConfig.username = v.GetString(\"MYSQL_USER\")\n\tdbConfig.password = v.GetString(\"MYSQL_ROOT_PWD\")\n\tdbConfig.schemaName = v.GetString(\"MYSQL_SCHEMA\")\n\n\tconfig.DbConfig = &dbConfig\n}", "func InitConfig() {\n\tviper.SetConfigType(\"json\") // or viper.SetConfigType(\"JSON\")\n\tviper.SetConfigName(CONFFILENAME) // name of config file (without extension)\n\tviper.AddConfigPath(os.Getenv(PROXYCONFPATH)) // path to look for the config file in\n\tviper.AddConfigPath(\".\") // optionally look for config in the working directory\n\tviper.AddConfigPath(filepath.Join(os.Getenv(\"GOPATH\"), GITHUBCINFOATH)) // optionally look for config in the github directory\n\n\t// Find and read the config file, handle errors reading the config file\n\tif err := viper.ReadInConfig(); err != nil {\n\t\t// The version of Viper we use claims the config type isn't supported when in fact the file hasn't been found\n\t\t// Display a more helpful message to avoid confusing the user.\n\t\tif strings.Contains(fmt.Sprint(err), \"Unsupported Config Type\") {\n\t\t\tlog.Panicf(\"Could not find config file. \"+\n\t\t\t\t\"Please make sure that %s or current dir is set to a path which contains %s.yaml\", PROXYCONFPATH, CONFFILENAME)\n\t\t}\n\n\t\tlog.Panicln(errors.WithMessage(err, fmt.Sprintf(\"Error when reading %s.yaml config file\", CONFFILENAME)))\n\t}\n\n\tif err := viper.Unmarshal(&conf); err != nil {\n\t\tlog.Panicln(\"Error to unmarshal config, error:\", err)\n\t}\n}", "func New() *Config {\n\tc := &Config{\n\t\tAgent: &AgentConfig{\n\t\t\tEventReceiverCount: 5,\n\t\t\tEventQueueLimit: 50,\n\t\t\tHealthCheckPort: 10240,\n\t\t\tLogLevel: \"info\",\n\t\t},\n\t\tPlugins: make([]*pluginrunner.PluginRunner, 0),\n\t\tEventKinds: make(map[events.EventKind]bool),\n\t}\n\treturn c\n}", "func SetConfig(filepath string) *Config {\n\tc := new(Config)\n\tc.filepath = filepath\n\n\treturn c\n}", "func SetLocalConfig() error {\n\tclient := &http.Client{}\n\n\treq, err := http.NewRequest(\"GET\", CONFIGDOMAIN+GETCONFEP, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\treq.Header.Add(\"Accept\", \"application/json\")\n\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefer resp.Body.Close()\n\n\t// Deconde the JSON response into the local config object\n\tConfig.Lock()\n\terr = json.NewDecoder(resp.Body).Decode(&Config)\n\tConfig.Unlock()\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func configInit() {\n\tLoadConfig(configPath, \"config\")\n\tConfig().SetDefault(\"log-level\", \"debug\")\n\tConfig().SetDefault(\"addr\", \"localhost:8081\")\n}", "func InitConfig() {\n\tlog.Println(\"initconfig\")\n\n\tif FileName != \"\" {\n\t\tviper.SetConfigFile(FileName)\n\t} else {\n\t\tviper.SetConfigName(\"config\") // name of config file (without extension)\n\t\tviper.AddConfigPath(\"/etc/voucher/\") // path to look for the config file in\n\t\tviper.AddConfigPath(\"$HOME/.voucher\")\n\t\tviper.AddConfigPath(\"./config\")\n\t\tviper.AddConfigPath(\".\") // optionally look for config in the working directory\n\t}\n\terr := viper.ReadInConfig()\n\tif err != nil {\n\t\tlog.Fatalf(\"config file: %s \\n\", err)\n\t}\n\tviper.AutomaticEnv()\n}", "func InitConfig() (*Config, error) {\n\tv := viper.New()\n\tv.AutomaticEnv()\n\n\tc := Config{\n\t\tHTTPPort: v.GetString(httpPort),\n\t\tPostgreSQLConfig: PostgreSQLConfig{\n\t\t\tHost: v.GetString(postgreSQLHost),\n\t\t\tUser: v.GetString(postgreSQLUser),\n\t\t\tPassword: v.GetString(postgreSQLPassword),\n\t\t\tDB: v.GetString(postgreSQLDB),\n\t\t\tPort: v.GetInt64(postgreSQLPort),\n\t\t\tSubscriptionsTable: PostgreSQLTableConfig{\n\t\t\t\tSchema: v.GetString(postgreSQLSubscriptionsTableSchema),\n\t\t\t\tName: v.GetString(postgreSQLSubscriptionsTableName),\n\t\t\t},\n\t\t},\n\t}\n\n\tif err := validateConfig(v); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &c, nil\n}", "func SetConfig(config *Config) error {\n\tvar (\n\t\tnewDebugLogger logger.Logger\n\t\tnewLogger logger.Logger\n\t\terr error\n\t\tdebugLoggerConfig = logger.Config{Level: logger.DebugLevel}\n\t\tloggerConfig = logger.Config{Level: logger.InfoLevel}\n\t\tengine = Zerolog\n\t)\n\n\tif config != nil {\n\t\tengine = config.Engine\n\n\t\tloggerConfig = logger.Config{\n\t\t\tLevel: logger.StringToLevel(config.Level),\n\t\t\tLogFile: config.LogFile,\n\t\t\tTimeFormat: config.TimeFormat,\n\t\t\tCaller: config.Caller,\n\t\t}\n\n\t\t// copy\n\t\tdebugLoggerConfig = loggerConfig\n\n\t\t// custom output file\n\t\tdebugLoggerConfig.LogFile = config.DebugFile\n\t}\n\n\tloggerConfig.UseColor = isDev\n\tdebugLoggerConfig.UseColor = isDev\n\tloggerConfig.UseJSON = !isDev\n\tdebugLoggerConfig.UseJSON = !isDev\n\n\tnewLogger, err = NewLogger(engine, &loggerConfig)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// extra check because it is very difficult to debug if the log itself causes the panic\n\tif newLogger != nil {\n\t\tinfoLogger = newLogger\n\t\twarnLogger = newLogger\n\t\terrorLogger = newLogger\n\t\tfatalLogger = newLogger\n\t}\n\n\tnewDebugLogger, err = NewLogger(engine, &debugLoggerConfig)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif newDebugLogger != nil {\n\t\tdebugLogger = newDebugLogger\n\t}\n\n\treturn nil\n}", "func (client *HTTPClient) NewConfig(config *Config) {\n\tclient.sendRequest(\"POST\", config, nil, &HTTPClientMetrics{NewConfig: true})\n}", "func LoadConfig() (*Config, error) {\n\tconfig := Config{\n\t\tMetricBatchIntervalMilliseconds: 60000,\n\t\tMetricSourceID: \"metron\",\n\t\tIncomingUDPPort: 3457,\n\t\tDebugPort: 14824,\n\t\tGRPC: GRPC{\n\t\t\tPort: 3458,\n\t\t},\n\t}\n\terr := envstruct.Load(&config)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif config.RouterAddr == \"\" {\n\t\treturn nil, fmt.Errorf(\"RouterAddr is required\")\n\t}\n\n\tconfig.RouterAddrWithAZ, err = idna.ToASCII(config.RouterAddrWithAZ)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tconfig.RouterAddrWithAZ = strings.Replace(config.RouterAddrWithAZ, \"@\", \"-\", -1)\n\n\treturn &config, nil\n}", "func NewConfig(cs config.Service, gm *config.GameModel, configPath string, logger log.Logger) *ConfigBridge {\n\tb := NewConfigBridge(nil)\n\n\tb.configPath = configPath\n\n\t// Setup dependencies.\n\tb.config = cs\n\tb.logger = logger\n\n\t// Setup model.\n\tb.SetGames(gm)\n\n\t// Set initial state.\n\tb.SetPrerequisitesLoaded(false)\n\tb.SetPrerequisitesError(false)\n\n\treturn b\n}", "func (Config *MailConfig) SetConfig(s, mp, ms, mr, sbj string, p int, msg string) {\n\n\tConfig.Server = s // Set server\n\tConfig.MailPassword = mp // Set password\n\tConfig.MailSender = ms // Set who is sending\n\tConfig.MailReceiver = mr // Set who is receiving\n\tConfig.Port = p // Set the SMTP port\n\tConfig.Message = msg // Set the message to be send\n\tConfig.Subject = sbj // Set the message subject\n\n}", "func SetIceConfig(ctx *cli.Context, cfg *ice.Config) {\n\tif ctx.GlobalIsSet(trackedAppFlag.Name) {\n\t\tcfg.Monitor.TrackedApp = ctx.GlobalString(trackedAppFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(algorithmIDFlag.Name) {\n\t\tcfg.Monitor.AlgorithmID = ctx.GlobalString(algorithmIDFlag.Name)\n\t\tcfg.CIoTANet.AlgorithmID = ctx.GlobalString(algorithmIDFlag.Name)\n\t\tcfg.IceNet.AlgorithmID = ctx.GlobalString(algorithmIDFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(detectionAlgIDFlag.Name) {\n\t\tcfg.Monitor.DetectionAlgorithmID = ctx.GlobalString(detectionAlgIDFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(initialModelFlag.Name) {\n\t\tcfg.Monitor.InitialModelPath = ctx.GlobalString(initialModelFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(scoreCalcSkid.Name) {\n\t\tcfg.Monitor.ScoreCalcSkid = ctx.GlobalUint64(scoreCalcSkid.Name)\n\t}\n\tif ctx.GlobalIsSet(trainingTimeFlag.Name) {\n\t\tcfg.Monitor.TrainingTime = ctx.GlobalUint(trainingTimeFlag.Name)\n\t\tcfg.CIoTANet.TrainingTime = ctx.GlobalUint(trainingTimeFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(trainingScoreLevelFlag.Name) {\n\t\tcfg.Monitor.TrainingScoreLevel = ctx.GlobalFloat64(trainingScoreLevelFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(filterFromFlag.Name) {\n\t\tcfg.Monitor.FilterFrom = ctx.GlobalUint64(filterFromFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(filterToFlag.Name) {\n\t\tcfg.Monitor.FilterTo = ctx.GlobalUint64(filterToFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(regionSizeFlag.Name) {\n\t\tcfg.Monitor.RegionSize = ctx.GlobalUint(regionSizeFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(trajectoryLengthFlag.Name) {\n\t\tcfg.Monitor.TrajectoryLength = ctx.GlobalUint(trajectoryLengthFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(anomalyThresholdFlag.Name) {\n\t\tcfg.Monitor.AnomalyThreshold = ctx.GlobalFloat64(anomalyThresholdFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(batchSizeFlag.Name) {\n\t\tcfg.Monitor.BatchSize = ctx.GlobalInt(batchSizeFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(testAppFilePathFlag.Name) {\n\t\tcfg.Monitor.TestAppFilePath = ctx.GlobalString(testAppFilePathFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(useProcMapFlag.Name) {\n\t\tcfg.Monitor.UseProcMap = ctx.GlobalBool(useProcMapFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(logJumpsFlag.Name) {\n\t\tcfg.Monitor.LogJumps = ctx.GlobalBool(logJumpsFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(networkingFlag.Name) {\n\t\tcfg.CIoTANet.Networking = ctx.GlobalString(networkingFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(senderCycleTimeFlag.Name) {\n\t\tcfg.CIoTANet.CycleTime = ctx.GlobalUint(senderCycleTimeFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(nccPortFlag.Name) {\n\t\tcfg.IceNet.NccPort = ctx.GlobalUint(nccPortFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(nccBucketSizeFlag.Name) {\n\t\tcfg.IceNet.NccBucketSize = ctx.GlobalUint(nccBucketSizeFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(nccKeyValidityFlag.Name) {\n\t\tcfg.IceNet.NccKeyExpirationPeriod = ctx.GlobalUint(nccKeyValidityFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(alphaFlag.Name) {\n\t\tcfg.Ciota.Alpha = ctx.GlobalFloat64(alphaFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(pcFlag.Name) {\n\t\tcfg.Ciota.Pc = ctx.GlobalFloat64(pcFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(maxDriftTimeFlag.Name) {\n\t\tcfg.Ciota.MaxDriftTime = ctx.GlobalUint(maxDriftTimeFlag.Name)\n\t}\n\tcfg.IceNet.CiotaParams = &cfg.Ciota\n\tif ctx.GlobalIsSet(blockSizeFlag.Name) {\n\t\tcfg.Blockchain.BlockSize = ctx.GlobalUint64(blockSizeFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(maxChainSizeFlag.Name) {\n\t\tcfg.Blockchain.MaxChainSize = ctx.GlobalUint(maxChainSizeFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(notifyMonitoringFlag.Name) {\n\t\tcfg.StatsClient.Enabled = true\n\n\t\tif ctx.GlobalIsSet(appIDFlag.Name) {\n\t\t\tappID := ctx.GlobalString(appIDFlag.Name)\n\t\t\tversion := params.VersionWithCommit(gitCommit)\n\t\t\tcfg.StatsClient.AppID = appID\n\t\t\tcfg.StatsClient.Version = version\n\t\t\tcfg.StatsClient.NodeName = common.MakeName(appID, version)\n\t\t}\n\n\t\tif ctx.GlobalIsSet(statsIntervalFlag.Name) {\n\t\t\tcfg.StatsService.StatsInterval = ctx.GlobalUint64(statsIntervalFlag.Name)\n\t\t}\n\t\tif ctx.GlobalIsSet(wsSecretFlag.Name) {\n\t\t\tcfg.StatsClient.WSSecret = ctx.GlobalString(wsSecretFlag.Name)\n\t\t}\n\t\tif ctx.GlobalIsSet(monitoringSystemAddressFlag.Name) {\n\t\t\tcfg.StatsClient.MonitoringAddress = ctx.GlobalString(monitoringSystemAddressFlag.Name)\n\t\t\tinitClientSecurityConfig(ctx, cfg)\n\t\t}\n\t}\n\tif ctx.GlobalIsSet(nodeLatitudeFlag.Name) && ctx.GlobalIsSet(nodeLongitudeFlag.Name) &&\n\t\tctx.GlobalIsSet(nodeLabelFlag.Name) {\n\t\tlatitudeValue := ctx.GlobalFloat64(nodeLatitudeFlag.Name)\n\t\tlongitudeValue := ctx.GlobalFloat64(nodeLongitudeFlag.Name)\n\t\tif latitudeValue > -90.0 && latitudeValue < 90.0 &&\n\t\t\tlongitudeValue > -180.0 && longitudeValue < 180.0 {\n\n\t\t\tlocation := &monitoring.Location{\n\t\t\t\tLatitude: latitudeValue,\n\n\t\t\t\tLongitude: longitudeValue,\n\t\t\t\tLabel: ctx.GlobalString(nodeLabelFlag.Name),\n\t\t\t}\n\t\t\tcfg.StatsClient.Location = location\n\t\t} else {\n\t\t\tlog.Warn(\"Location coordinates are not within correct range. Using default location.\")\n\t\t}\n\t} else {\n\t\tif ctx.GlobalIsSet(nodeLatitudeFlag.Name) || ctx.GlobalIsSet(nodeLongitudeFlag.Name) ||\n\t\t\tctx.GlobalIsSet(nodeLabelFlag.Name) {\n\t\t\tlog.Warn(\"Some of the location values are not provided. Using default location.\")\n\t\t}\n\t}\n\tif ctx.GlobalIsSet(simulationFlag.Name) {\n\t\tcfg.Monitor.Simulation = true\n\t}\n\tif ctx.GlobalIsSet(forceBidirectFlag.Name) {\n\t\tcfg.CIoTANet.ForceBidirect = true\n\t}\n\tif ctx.GlobalIsSet(exchangeFormatFlag.Name) {\n\t\tcfg.Blockchain.ExchangeFormat = ctx.GlobalString(exchangeFormatFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(profileFlag.Name) {\n\t\tcfg.Profile = ctx.GlobalBool(profileFlag.Name)\n\t}\n\tcfg.IceNet.BlockchainConfig = &cfg.Blockchain\n\n\tif ctx.GlobalIsSet(whiteListedIPFlag.Name) {\n\t\tcfg.StatsClient.WhiteListedIP = ctx.GlobalString(whiteListedIPFlag.Name)\n\t}\n\tif ctx.GlobalIsSet(systemStateCheckDelayFlag.Name) {\n\t\tcfg.SystemStateMonitor.StateCheckInterval = time.Duration(ctx.GlobalUint(systemStateCheckDelayFlag.Name)) * time.Second\n\t}\n\tif ctx.GlobalIsSet(systemStateTimeToLiveFlag.Name) {\n\t\tcfg.SystemStateMonitor.StateCheckInterval = time.Duration(ctx.GlobalUint(systemStateTimeToLiveFlag.Name)) * time.Minute\n\t}\n\tif ctx.GlobalIsSet(systemStateMonitorDisabledFlag.Name) {\n\t\tcfg.SystemStateMonitor.Enabled = false\n\t}\n\tif ctx.GlobalIsSet(autoHealingFlag.Name) {\n\t\tcfg.SystemStateMonitor.AutoHealing = true\n\t}\n\tif ctx.GlobalIsSet(remediationMappingLocationFlag.Name) {\n\t\tcfg.SystemStateMonitor.RemediationFileLocation = ctx.GlobalString(remediationMappingLocationFlag.Name)\n\t}\n\n\t//Set values for training time here so that we also override them if we load the config using config file and\n\t//not only when we use flags.\n\tif cfg.Monitor.InitialModelPath != \"\" {\n\t\tcfg.Monitor.TrainingTime = 0\n\t\tcfg.CIoTANet.TrainingTime = 0\n\t}\n}", "func initializeConfig() (*viper.Viper, error) {\n\tv = viper.New()\n\n\tv.SetEnvPrefix(\"DCC\")\n\tv.AutomaticEnv()\n\n\tif cfgFile != \"\" {\n\t\tv.SetConfigFile(cfgFile)\n\t} else {\n\t\tif cfgPath == \"\" {\n\t\t\tv.AddConfigPath(\".\")\n\t\t} else {\n\t\t\tv.AddConfigPath(cfgPath)\n\t\t}\n\t}\n\n\terr := v.ReadInConfig()\n\tif err != nil {\n\t\tif _, ok := err.(viper.ConfigParseError); !ok {\n\t\t\treturn v, fmt.Errorf(\"unable to parse Config file : %v\", err)\n\t\t}\n\t}\n\tm := v.GetStringMap(\"agent\")\n\n\tif level := os.Getenv(\"LOG_LEVEL\"); level != \"\" {\n\t\tm[\"loglevel\"] = level\n\t}\n\n\tif EnvUUID := os.Getenv(\"UUID\"); EnvUUID != \"\" {\n\t\tm[\"uuid\"] = EnvUUID\n\t}\n\tif env := os.Getenv(\"ENV\"); env != \"\" {\n\t\tm[\"env\"] = env\n\t}\n\n\tif pwd := os.Getenv(\"PASSWORD\"); pwd != \"\" {\n\t\tm[\"password\"] = pwd\n\t}\n\tif port := v.GetInt(\"agent.healthport\"); port != 0 {\n\t\tm[\"healthport\"] = port\n\t} else {\n\t\tm[\"healthport\"] = 8080\n\t}\n\n\thostname, err := os.Hostname()\n\tif err != nil {\n\t\treturn v, fmt.Errorf(\"unable to get hostname : %v\", err)\n\t}\n\tm[\"hostname\"] = hostname\n\tm[\"version\"] = version + \".\" + githash\n\tm[\"date\"] = date\n\n\tu1, ok := m[\"uuid\"]\n\tif ok {\n\t\tif _, err = uuid.Parse(u1.(string)); err != nil {\n\t\t\treturn v, fmt.Errorf(\"unable to Parse uuid : %v\", err)\n\t\t}\n\t} else {\n\t\tm[\"uuid\"] = uuid.New()\n\t}\n\n\tv.Set(\"agent\", m)\n\n\tc := v.GetStringMap(\"controller\")\n\tif ctrlWorker := os.Getenv(\"CTRL_WORKER\"); ctrlWorker != \"\" {\n\t\tc[\"worker\"], err = strconv.Atoi(ctrlWorker)\n\t\tif err != nil {\n\t\t\tc[\"worker\"] = 1\n\t\t}\n\t}\n\n\tv.Set(\"controller\", c)\n\n\treturn v, nil\n}", "func (b *Builder) InitConfig(ctx *interpolate.Context) (warnings []string, errors []error) {\n\tvar (\n\t\twarns []string\n\t\terrs []error\n\t)\n\n\twarns, errs = b.config.RemoteFileConfig.Prepare(ctx)\n\twarnings = append(warnings, warns...)\n\terrors = append(errors, errs...)\n\n\twarns, errs = b.config.ImageConfig.Prepare(ctx)\n\twarnings = append(warnings, warns...)\n\terrors = append(errors, errs...)\n\n\twarns, errs = b.config.QemuConfig.Prepare(ctx)\n\twarnings = append(warnings, warns...)\n\terrors = append(errors, errs...)\n\n\treturn warnings, errors\n}", "func Config(watchAPIConfig *config.Config) gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tc.Set(\"config\", *watchAPIConfig)\n\t\tc.Next()\n\t}\n}", "func UpdateConfig(authConfig model.AuthConfig) error {\n\tif authConfig.Provider == \"shibbolethconfig\" {\n\t\tauthConfig.ShibbolethConfig.IDPMetadataFilePath = IDPMetadataFile\n\t\tauthConfig.ShibbolethConfig.SPSelfSignedCertFilePath = selfSignedCertFile\n\t\tauthConfig.ShibbolethConfig.SPSelfSignedKeyFilePath = selfSignedKeyFile\n\t\tauthConfig.ShibbolethConfig.RancherAPIHost = GetRancherAPIHost()\n\t}\n\n\tnewProvider, err := initProviderWithConfig(&authConfig)\n\tif err != nil {\n\t\tlog.Errorf(\"UpdateConfig: Cannot update the config, error initializing the provider %v\", err)\n\t\treturn err\n\t}\n\t//store the config to db\n\tlog.Infof(\"newProvider %v\", newProvider.GetName())\n\n\tproviderSettings := newProvider.GetSettings()\n\n\tgenObjConfig := make(map[string]map[string]string)\n\tgenObjConfig[newProvider.GetName()] = providerSettings\n\terr = updateSettings(genObjConfig, newProvider.GetProviderSecretSettings(), newProvider.GetName(), authConfig.Enabled)\n\tif err != nil {\n\t\tlog.Errorf(\"UpdateConfig: Error Storing the provider settings %v\", err)\n\t\treturn err\n\t}\n\n\t//add the generic settings\n\tcommonSettings := make(map[string]string)\n\tcommonSettings[accessModeSetting] = authConfig.AccessMode\n\tcommonSettings[userTypeSetting] = newProvider.GetUserType()\n\tcommonSettings[identitySeparatorSetting] = newProvider.GetIdentitySeparator()\n\tcommonSettings[allowedIdentitiesSetting] = getAllowedIDString(authConfig.AllowedIdentities, newProvider.GetIdentitySeparator())\n\tcommonSettings[providerNameSetting] = authConfig.Provider\n\tcommonSettings[providerSetting] = authConfig.Provider\n\tcommonSettings[externalProviderSetting] = \"true\"\n\tcommonSettings[noIdentityLookupSupportedSetting] = strconv.FormatBool(!newProvider.IsIdentityLookupSupported())\n\terr = updateCommonSettings(commonSettings)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"UpdateConfig: Error Storing the common settings\")\n\t}\n\n\t//set the security setting last specifically\n\tcommonSettings = make(map[string]string)\n\tcommonSettings[securitySetting] = strconv.FormatBool(authConfig.Enabled)\n\tcommonSettings[authServiceConfigUpdateTimestamp] = time.Now().String()\n\terr = updateCommonSettings(commonSettings)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"UpdateConfig: Error Storing the provider securitySetting\")\n\t}\n\n\t//switch the in-memory provider\n\tif provider == nil {\n\t\tif authConfig.Provider == \"shibbolethconfig\" {\n\t\t\tSamlServiceProvider = authConfig.ShibbolethConfig.SamlServiceProvider\n\t\t}\n\t\tprovider = newProvider\n\t\tauthConfigInMemory = authConfig\n\t} else {\n\t\t//reload the in-memory provider\n\t\tlog.Infof(\"Calling reload\")\n\t\tskipped, err := Reload(true)\n\t\tfor skipped {\n\t\t\tif err != nil {\n\t\t\t\tlog.Errorf(\"Failed to reload the auth provider from db on updateConfig: %v\", err)\n\t\t\t\treturn err\n\t\t\t}\n\t\t\ttime.Sleep(30 * time.Millisecond)\n\t\t\tskipped, err = Reload(true)\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Failed to reload the auth provider from db on updateConfig: %v\", err)\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func InitConfig() (*Configuration, error) {\n\tConf = new(Configuration)\n\tviper.SetConfigName(\"config\")\n\tviper.SetConfigType(\"yaml\")\n\tviper.AddConfigPath(\".\")\n\terr := viper.ReadInConfig()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"fatal error config file: %w\", err))\n\t}\n\terr = viper.Unmarshal(Conf)\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"fatal error config unmarshal: %w\", err))\n\t}\n\treturn Conf, nil\n}", "func (s *HoursOfOperation) SetConfig(v []*HoursOfOperationConfig) *HoursOfOperation {\n\ts.Config = v\n\treturn s\n}", "func (c *Config) Init() {\n\tif c.DNSNames == nil {\n\t\tc.DNSNames = []string{\"localhost\", \"127.0.0.1\", \"::1\"}\n\t}\n\tif c.TLS == nil {\n\t\tc.TLS = &DefaultTLSOptions\n\t}\n\tif c.AuthorityConfig == nil {\n\t\tc.AuthorityConfig = &AuthConfig{}\n\t}\n\tif c.CommonName == \"\" {\n\t\tc.CommonName = \"Step Online CA\"\n\t}\n\tif c.CRL != nil && c.CRL.Enabled && c.CRL.CacheDuration == nil {\n\t\tc.CRL.CacheDuration = DefaultCRLCacheDuration\n\t}\n\tc.AuthorityConfig.init()\n}", "func (in *ActionVpsConfigCreateInput) SetConfig(value string) *ActionVpsConfigCreateInput {\n\tin.Config = value\n\n\tif in._selectedParameters == nil {\n\t\tin._selectedParameters = make(map[string]interface{})\n\t}\n\n\tin._selectedParameters[\"Config\"] = nil\n\treturn in\n}", "func (o *FormField) SetConfig(v FormConfig) {\n\to.Config = &v\n}", "func (i *TiFlashInstance) InitConfig(\n\tctx context.Context,\n\te ctxt.Executor,\n\tclusterName,\n\tclusterVersion,\n\tdeployUser string,\n\tpaths meta.DirPaths,\n) error {\n\ttopo := i.topo.(*Specification)\n\tif err := i.BaseInstance.InitConfig(ctx, e, topo.GlobalOptions, deployUser, paths); err != nil {\n\t\treturn err\n\t}\n\tspec := i.InstanceSpec.(*TiFlashSpec)\n\n\tcfg := &scripts.TiFlashScript{\n\t\tRequiredCPUFlags: getTiFlashRequiredCPUFlagsWithVersion(clusterVersion, spec.Arch),\n\n\t\tDeployDir: paths.Deploy,\n\t\tLogDir: paths.Log,\n\n\t\tNumaNode: spec.NumaCores,\n\t\tNumaCores: spec.NumaCores,\n\t}\n\n\tfp := filepath.Join(paths.Cache, fmt.Sprintf(\"run_tiflash_%s_%d.sh\", i.GetHost(), i.GetPort()))\n\tif err := cfg.ConfigToFile(fp); err != nil {\n\t\treturn err\n\t}\n\tdst := filepath.Join(paths.Deploy, \"scripts\", \"run_tiflash.sh\")\n\n\tif err := e.Transfer(ctx, fp, dst, false, 0, false); err != nil {\n\t\treturn err\n\t}\n\n\tif _, _, err := e.Execute(ctx, \"chmod +x \"+dst, false); err != nil {\n\t\treturn err\n\t}\n\n\tconf, err := i.InitTiFlashLearnerConfig(ctx, clusterVersion, topo.ServerConfigs.TiFlashLearner, paths)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// merge config files for imported instance\n\tif i.IsImported() {\n\t\tconfigPath := ClusterPath(\n\t\t\tclusterName,\n\t\t\tAnsibleImportedConfigPath,\n\t\t\tfmt.Sprintf(\n\t\t\t\t\"%s-learner-%s-%d.toml\",\n\t\t\t\ti.ComponentName(),\n\t\t\t\ti.GetHost(),\n\t\t\t\ti.GetPort(),\n\t\t\t),\n\t\t)\n\t\timportConfig, err := os.ReadFile(configPath)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tconf, err = mergeImported(importConfig, conf)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\terr = i.mergeTiFlashLearnerServerConfig(ctx, e, conf, spec.LearnerConfig, paths)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Init the configuration using cfg and server_configs\n\tif conf, err = i.initTiFlashConfig(ctx, clusterVersion, topo.ServerConfigs.TiFlash, paths); err != nil {\n\t\treturn err\n\t}\n\n\t// merge config files for imported instance\n\tif i.IsImported() {\n\t\tconfigPath := ClusterPath(\n\t\t\tclusterName,\n\t\t\tAnsibleImportedConfigPath,\n\t\t\tfmt.Sprintf(\n\t\t\t\t\"%s-%s-%d.toml\",\n\t\t\t\ti.ComponentName(),\n\t\t\t\ti.GetHost(),\n\t\t\t\ti.GetPort(),\n\t\t\t),\n\t\t)\n\t\timportConfig, err := os.ReadFile(configPath)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// TODO: maybe we also need to check the imported config?\n\t\t// if _, err = checkTiFlashStorageConfigWithVersion(clusterVersion, importConfig); err != nil {\n\t\t// \treturn err\n\t\t// }\n\t\tconf, err = mergeImported(importConfig, conf)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// Check the configuration of instance level\n\tif conf, err = i.mergeTiFlashInstanceConfig(clusterVersion, conf, spec.Config); err != nil {\n\t\treturn err\n\t}\n\n\treturn i.MergeServerConfig(ctx, e, conf, nil, paths)\n}", "func NewConfig() *Config {\n\tcfg := &Config{}\n\tcfg.FlagSet = flag.NewFlagSet(\"syncer\", flag.ContinueOnError)\n\tfs := cfg.FlagSet\n\n\tfs.BoolVar(&cfg.printVersion, \"V\", false, \"prints version and exit\")\n\tfs.StringVar(&cfg.configFile, \"config\", \"\", \"path to config file\")\n\tfs.IntVar(&cfg.ServerID, \"server-id\", 101, \"MySQL slave server ID\")\n\tfs.IntVar(&cfg.WorkerCount, \"c\", 16, \"parallel worker count\")\n\tfs.IntVar(&cfg.Batch, \"b\", 10, \"batch commit count\")\n\tfs.StringVar(&cfg.StatusAddr, \"status-addr\", \"\", \"status addr\")\n\tfs.StringVar(&cfg.Meta, \"meta\", \"syncer.meta\", \"syncer meta info\")\n\tfs.StringVar(&cfg.LogLevel, \"L\", \"info\", \"log level: debug, info, warn, error, fatal\")\n\tfs.StringVar(&cfg.LogFile, \"log-file\", \"\", \"log file path\")\n\tfs.StringVar(&cfg.LogRotate, \"log-rotate\", \"day\", \"log file rotate type, hour/day\")\n\tfs.BoolVar(&cfg.EnableGTID, \"enable-gtid\", false, \"enable gtid mode\")\n\n\treturn cfg\n}", "func NewConfig() *Config {\n\treturn &Config{\n\t\tEnable: true,\n\t\tAddress: \"0.0.0.0:30003\",\n\t}\n}", "func InitConfig(t string, klogmax int, ktaskmax int, ratelimit int) {\n\tif t != \"\" {\n\t\ttarget = t\n\t}\n\n\ttempRemote, err := url.Parse(target)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tremote = tempRemote\n\tmaxCharsKrakenLog = klogmax\n\tmaxCharsFailedTask = ktaskmax\n\trateLimit = ratelimit\n}" ]
[ "0.6838962", "0.6838962", "0.6446589", "0.6371859", "0.6350033", "0.6323569", "0.6291822", "0.6218653", "0.62132025", "0.6208128", "0.62070537", "0.6196594", "0.61428446", "0.61320055", "0.6111926", "0.6038208", "0.6037457", "0.60338557", "0.60234916", "0.60036546", "0.5998846", "0.5998775", "0.59695446", "0.5942464", "0.59391695", "0.59384537", "0.5922398", "0.59054226", "0.58956903", "0.58882326", "0.5886951", "0.58833414", "0.5867643", "0.585352", "0.5848057", "0.5843414", "0.5834003", "0.5805807", "0.5805168", "0.58000475", "0.5787482", "0.57749444", "0.5773259", "0.57646567", "0.5760771", "0.5754151", "0.57405275", "0.5734031", "0.5712156", "0.5697148", "0.5690633", "0.5686755", "0.56839186", "0.5681655", "0.56805", "0.5670422", "0.5663236", "0.5661461", "0.5645122", "0.5642701", "0.5641641", "0.5638775", "0.56368333", "0.5629165", "0.56289905", "0.56289905", "0.56197244", "0.56148714", "0.5609531", "0.56078833", "0.5607526", "0.56046796", "0.55968654", "0.556675", "0.55576426", "0.55494326", "0.55416757", "0.5532214", "0.5518914", "0.55181605", "0.55149686", "0.5510047", "0.5499069", "0.5495477", "0.5495371", "0.54866683", "0.54827195", "0.54824615", "0.54758847", "0.5473626", "0.5471187", "0.5463213", "0.5463122", "0.545442", "0.543081", "0.542712", "0.542231", "0.54212713", "0.54160815", "0.54038376" ]
0.77980506
0