text
stringlengths
27
775k
use std::collections::HashMap; pub fn can_reorder_doubled(arr: Vec<i32>) -> bool { let mut table = HashMap::new(); arr.into_iter().for_each(|v| { let entry = table.entry(v).or_insert(0); *entry += 1 }); let mut keys = table.keys().cloned().collect::<Vec<_>>(); keys.sort_by(|a, b| match (*a < 0, *b < 0) { (true, true) => { if a < b { std::cmp::Ordering::Greater } else { std::cmp::Ordering::Less } } (true, false) => std::cmp::Ordering::Less, (false, true) => std::cmp::Ordering::Greater, _ => a.partial_cmp(&b).unwrap(), }); //println!("keys: {:?}", keys); for k in keys { if *table.get(&k).unwrap() == 0 { continue; } let temp_k = k * 2; if let Some(v) = table.get(&temp_k) { let new_v = v - *table.get(&k).unwrap(); if new_v < 0 { return false; } table.insert(temp_k, new_v); } else { return false; } } true } fn main() { assert!(!can_reorder_doubled(vec![3, 1, 3, 6])); assert!(!can_reorder_doubled(vec![2, 1, 2, 6])); assert!(can_reorder_doubled(vec![4, -2, 2, -4])); assert!(!can_reorder_doubled(vec![1, 2, 4, 16, 8, 4])); assert!(!can_reorder_doubled(vec![-5, -2])); }
class QueueModel { String title, url, album, artist, id, lyrics; QueueModel( {this.title, this.url, this.album, this.artist, this.id, this.lyrics}); Map<String, dynamic> toMap() { return { 'title': title, 'url': url, 'album': album, 'artist': artist, 'lyrics': lyrics, 'id': id, }; } }
# frozen_string_literal: true module PageObjects module Support class UserShow < PageObjects::Base set_url "/support/users/{id}" sections :provider_rows, PageObjects::Sections::Provider, ".qa-provider_row" end end end
# pylint: disable=no-self-use """ Module that deals with all logic related to consent forms """ import os import random import traceback import datetime from flask import request from flask import current_app from flask_jwt_extended import jwt_required from flask_restful import Resource from api.endpoints.constants import ANSWERS, COLUMNS_RESULTS from api.models import ParticipantAnswer, Question, ParticipantInformationType, QuestionType, \ QuestionChoice, Participant, Version, VersionRandom from api.models.helpers import commit_db_session from api.endpoints.quiz_factory import QuizFactory import api.endpoints.validation as valid class QuizAnswers(Resource): """Resource that deals with saving answers into database""" @jwt_required def post(self): """ On a post request on the /answers endpoint we add the quiz answers :return: If the request is valid, a 201 CREATED status code, otherwise a 400 code """ validators = { "data": valid.validate_answers, "id": valid.validate_int, "version": valid.validate_string } data = valid.validate(valid.read_form_data(request), validators) if not data: return ANSWERS[400], 400 participant = Participant.query.filter_by(id=data['id']).first() participant.quiz_version = Version[data["version"]] # Iterate through every answer and insert demographics ones # into Participant, and the rest into ParticipantAnswer for answer in data['data']: q_type = Question.query.filter_by( id=answer["question_id"]).first().q_type i_type = Question.query.filter_by( id=answer["question_id"]).first().information if q_type == QuestionType.mc_single_answer \ and i_type == ParticipantInformationType.age: age_string = QuestionChoice.query.filter_by( choice_num=answer['answers'], question_id=answer["question_id"]).first().text if age_string != "Anders": participant.age = int(age_string) elif q_type == QuestionType.mc_single_answer \ and i_type == ParticipantInformationType.gender: gender = QuestionChoice.query.filter_by( choice_num=answer['answers'], question_id=answer["question_id"]).first().text participant.gender = gender #LET OP DATA VAN MULTIPLE CHOICE GAAT NIET ZOMAAR ERGENS HEEN! elif q_type == QuestionType.mc_multiple_answer \ and i_type == ParticipantInformationType.ethnicity: ethinicities = [] for choice_num in answer['answers']: eth = QuestionChoice.query.filter_by( choice_num=choice_num, question_id=answer["question_id"]).first().text ethinicities.append(eth) participant.ethnicity = ethinicities elif q_type == QuestionType.mc_multiple_answer \ and i_type == ParticipantInformationType.experience: experiences = [] for choice_num in answer['answers']: exp = QuestionChoice.query.filter_by( choice_num=choice_num, question_id=answer["question_id"]).first().text experiences.append(exp) participant.experience = experiences elif q_type == QuestionType.mc_multiple_answer \ and i_type == ParticipantInformationType.familiar: familiars = [] for choice_num in answer['answers']: fam = QuestionChoice.query.filter_by( choice_num=choice_num, question_id=answer["question_id"]).first().text familiars.append(fam) participant.familiar = familiars elif i_type == ParticipantInformationType.researcher_notes: participant.researcher_notes = answer['open_answer'] else: ParticipantAnswer.create_participant_answer( p_id=answer["participant_id"], q_id=answer["question_id"], img_link=answer["img_id"] if "img_id" in answer else None, # for likert answers=answer["answers"] if "answers" in answer else None, open_answer=answer["open_answer"] if "open_answer" in answer else None, r_time=answer["response_time"] if 'response_time' in answer else None, before_video=answer["before_video"], timestamp=answer["timestamp"] if 'timestamp' in answer else None), commit_db_session() return ANSWERS[201], 201 class QuizQuestions(Resource): """Resource that deals with retrieving scenario from database""" @jwt_required def get(self): """ On a get request on the /quiz endpoint we return a quiz with questions :return: quiz and status 200 """ version = request.args.get("version") try: filename = os.path.join(current_app.static_folder, "IATs/{}.json".format(Version[version].value)) return QuizFactory(filename).create_collection_quiz(), 200 except: traceback.print_exc() return ANSWERS[404], 404 class DemoQuiz(Resource): """Resource that returns a quiz for the demo app""" @jwt_required def get(self): """ On a get request on the /demo endpoint we return a demo quiz with questions :return: demo quiz and status 200 """ try: filename = os.path.join(current_app.static_folder, "IATs/{}.json".format("demo")) return QuizFactory(filename).create_demo_quiz(), 200 except: traceback.print_exc() return ANSWERS[404], 404 class RandomQuiz(Resource): """Resource that deals with retrieving random scenario from database""" @jwt_required def get(self): """ On a get request on the /random-quiz endpoint we return a random quiz with questions :return: random quiz and status 200 """ scenario_list = random.choice(list(VersionRandom)) scenario = random.choice(scenario_list) try: filename = os.path.join(current_app.static_folder, "IATs/{}.json".format(scenario.value)) return QuizFactory(filename).create_collection_quiz(), 200 except: return ANSWERS[404], 404 class QuizVersions(Resource): """Resource that returns a mapping for the different scenarios""" def get(self): """ On a get on the /quiz-versions endpoint we return a version mapping :return the version mapping """ ret = dict() for enum in Version: ret[enum.name] = enum.value return ret class QuizResults(Resource): """Resource that deals with retrieving answers from database""" @jwt_required def get(self): """ On a get request on the /results endpoint we return all the answers stored :return: If the request is valid, a JSON object with the answers and code 200 """ columns = COLUMNS_RESULTS data = [] for answer in ParticipantAnswer.query.all(): array = [] participant = Participant.query.filter_by(id=answer.participant_id).first() array.append(participant.first_name + " " + participant.last_name) array.append(str(answer.question_id)) array.append(str(answer.question.q_type.name)) array.append(str(answer.question.text)) if answer.question.q_type == QuestionType.open_question: array.append(str(answer.open_question_answer)) else: array.append(str(answer.answers)) array.append(answer.img_link) array.append(answer.response_time) array.append(answer.before_video) array.append(answer.timestamp) data.append(array) return { "columns": columns, "data": data }, 200
module Terrafile class Dependency def initialize(name:, source:, version:) @name = name @source = source @version = version end attr_reader :name, :source, :version def self.build_from_terrafile (YAML.safe_load(File.read(TERRAFILE_PATH)) || []).map do |module_name, details| new( name: module_name, version: details['version'], source: details['source'] ) end end def fetch return Helper.clone(source, name) unless Helper.dir_exists?(name) Dir.chdir(name) do Helper.pull_repo unless Helper.repo_up_to_date?(version) end end def checkout Dir.chdir(name) do Helper.run!("git checkout #{version} 1> /dev/null") end rescue Error => error raise unless error.message.match?(/reference is not a tree/) Kernel.puts "[*] WARN: #{error} ." \ "The 'version' should be the branch name or tag, rather than the SHA." end end end
pluginManagement { repositories { mavenLocal() mavenCentral() gradlePluginPortal() } } plugins { id("de.fayard.refreshVersions") version "0.10.1" id("com.gradle.enterprise") version "3.6.3" } rootProject.name = "sandbox" includeBuild("../") include(":node", ":browser", ":both", ":mpp")
require 'test_helper' class Api::ProxyConfigsTest < ActionDispatch::IntegrationTest def setup @provider = FactoryBot.create(:provider_account) login_provider @provider host! @provider.admin_domain end def test_index service = FactoryBot.create(:simple_service, account: @provider) service.service_tokens.create!(value: 'token') p_config = FactoryBot.create(:proxy_config, proxy: service.proxy, environment: 'production') s_config = FactoryBot.create(:proxy_config, proxy: service.proxy, environment: 'sandbox') get admin_service_proxy_configs_path(service_id: service, environment: 'production') assert_equal [p_config.id], assigns['proxy_configs'].map(&:id) get admin_service_proxy_configs_path(service_id: service, environment: 'sandbox') assert_equal [s_config.id], assigns['proxy_configs'].map(&:id) end end
#!/bin/bash # Runs the Hubot butler bot, using 'symphony' adapter BUILD_FOLDER=./butler-build BOT_NAME=$1 cd $BUILD_FOLDER . ./env.sh ./bin/hubot -a symphony --name $BOT_NAME
#/bin/sh #reveal-md slide_reveal.md -w reveal-md slide.md --static .
<?php namespace MonkeyLearn; use MonkeyLearn\Config; use MonkeyLearn\MonkeyLearnException; class HandleErrors { static function check_batch_limits($data, $batch_size) { if ($batch_size > Config::MAX_BATCH_SIZE || $batch_size < Config::MIN_BATCH_SIZE) { throw new MonkeyLearnException( "batch_size has to be between {Config::MIN_BATCH_SIZE} and {Config::MAX_BATCH_SIZE}" ); } if (!$data) { throw new MonkeyLearnException( "The data can't be empty." ); } } } ?>
import { Spin } from "antd"; import React, { useEffect } from "react"; import { useDispatch } from "react-redux"; import { useLocation, useParams } from "react-router"; import { FilterArea, ProductList } from "../../components"; import { MainLayout } from "../../layouts"; import { useSelector } from "../../redux/hooks"; import { getProductSearch } from "../../redux/productSearch/slice"; import styles from "./SearchPage.module.css"; interface SearchPageProps {} export const SearchPage: React.FC<SearchPageProps> = () => { const { keywords } = useParams<"keywords">(); console.log("keywords", keywords); const loading = useSelector((state) => state.productSearch.loading); const productList = useSelector((state) => state.productSearch.data); const pagination = useSelector((state) => state.productSearch.pagination); const error = useSelector((state) => state.productSearch.error); const location = useLocation(); const dispatch = useDispatch(); useEffect(() => { dispatch(getProductSearch({ keywords, nextPage: 1, pageSize: 5 })); // 监听location表示我们监听网页url的变化 // eslint-disable-next-line }, [location]); const onPageChange = (nextPage, pageSize) => { dispatch(getProductSearch({ keywords, nextPage, pageSize })); }; if (loading || !productList) { return ( <Spin size={"large"} style={{ marginTop: 200, marginBottom: 200, marginLeft: "auto", marginRight: "auto", width: "100%", }} /> ); } if (error) { return <div>网站出错了: {error}</div>; } return ( <MainLayout> {/* 分类过滤器 */} <div className={styles["product-list-container"]}> <FilterArea /> </div> {/* 产品列表 */} <div className={styles["product-list-container"]}> <ProductList data={productList} paging={pagination} onPageChange={onPageChange} /> </div> </MainLayout> ); };
package health type HealthState string // String representations of the canonical health states var ( Critical = HealthState("critical") Unknown = HealthState("unknown") Warning = HealthState("warning") Passing = HealthState("passing") ) // Integer enum representations of the canonical health states. These are not guaranteed // to be consistent across versions. Only externalize the strings! The enum value is used // to order HealthStates. const ( criticalInt = iota unknownInt warningInt passingInt ) // ToHealthState converts a string to its corresponding HealthState value. Unrecognized // values become Unknown. func ToHealthState(str string) HealthState { switch s := HealthState(str); s { case Critical, Unknown, Warning, Passing: return s default: return Unknown } } // Int converts a HealthState to an enum representation suitable for comparisons. func (s HealthState) Int() int { switch s { case Critical: return criticalInt case Unknown: return unknownInt case Warning: return warningInt case Passing: return passingInt default: return criticalInt } } // Compare two HealthStates. Return 0 if equal, a value less than 0 if a < b and a value // greater than 0 if a > b. The ordering is Passing > Warning > Unknown > Critical. func Compare(a, b HealthState) int { return a.Int() - b.Int() }
package com.coursework.velotracker.ViewModels import androidx.lifecycle.MutableLiveData import androidx.lifecycle.ViewModel import com.coursework.velotracker.BL.Model.Training.ParcelableTraining class SharedViewModel(): ViewModel() { var parcelableTraining:MutableLiveData<ParcelableTraining> = MutableLiveData<ParcelableTraining>() fun sendMessage(parcelableTraining: ParcelableTraining){ this.parcelableTraining.value = parcelableTraining } }
module tensor_module use, intrinsic :: iso_fortran_env, only : int64 use :: data_storage_module, only : data_storage implicit none private public :: tensor type, abstract :: tensor class(data_storage), allocatable :: storage integer :: datatype, rank integer(int64) :: number_of_elements integer(int64), dimension(:), allocatable :: dims contains procedure :: setup => setup procedure :: get_datatype => get_datatype procedure :: get_number_of_elements => get_number_of_elements procedure :: get_dims => get_dims procedure :: get_rank => get_rank procedure :: set_dims_and_size => set_dims_and_size procedure :: is_allocated => is_allocated procedure :: release => release procedure :: cleanup => cleanup procedure :: clear => clear end type tensor contains subroutine setup(this, storage, datatype, dims) class(tensor), intent(inout) :: this class(data_storage), intent(in), optional :: storage integer, intent(in), optional :: datatype integer(int64), dimension(:), intent(in), optional :: dims if ( present(storage) ) allocate(this%storage, source=storage) if ( present(datatype) ) this%datatype = datatype if ( present(dims) ) call this%set_dims_and_size(dims) end subroutine setup subroutine set_dims_and_size(this, dims) class(tensor), intent(inout) :: this integer(int64), dimension(:), intent(in) :: dims this%rank = size(dims) this%dims = dims this%number_of_elements = product(dims) if (this%rank == 0) this%number_of_elements = 1 end subroutine set_dims_and_size pure logical function is_allocated(this) class(tensor), intent(in) :: this is_allocated = allocated(this%storage) end function is_allocated pure integer function get_datatype(this) class(tensor), intent(in) :: this get_datatype = this%datatype end function get_datatype function get_dims(this) result(dims) class(tensor), intent(in) :: this integer(int64), dimension(:), allocatable :: dims if (allocated(this%dims) ) then dims = this%dims else error stop "tensor::get_dims:Not allocated." end if end function get_dims pure integer function get_rank(this) class(tensor), intent(in) :: this get_rank = this%rank end function get_rank integer(int64) function get_number_of_elements(this) class(tensor), intent(in) :: this get_number_of_elements = this%number_of_elements end function get_number_of_elements subroutine release(this) class(tensor), intent(inout) :: this if ( allocated(this%storage)) deallocate(this%storage) if (allocated(this%dims)) deallocate(this%dims) call this%clear() end subroutine release subroutine cleanup(this) class(tensor), intent(inout) :: this if ( allocated(this%storage) ) call this%storage%deallocate_data() call this%release() end subroutine cleanup subroutine clear(this) class(tensor), intent(inout) :: this this%datatype = 0 this%number_of_elements = 0 this%rank = 0 end subroutine clear end module tensor_module
package de.twometer.neko.util import org.lwjgl.glfw.GLFW.glfwGetTime class Timer(tps: Int) { private val delay: Double = 1.0 / tps private var lastReset = 0.0 private var lastFrame = 0.0 val tickProgress: Double get() = 1.0 - ((lastReset + delay - glfwGetTime()) / delay) val elapsed: Boolean get() = glfwGetTime() - lastReset > delay var deltaTime = 0.0 private set var fps = 0.0 private set private var lastFpsReset = 0.0 private var frameTimeAccum = 0.0 private var frames = 0.0 fun reset() { lastReset = glfwGetTime() val timeSinceFpsReset = glfwGetTime() - lastFpsReset if (timeSinceFpsReset > 1) { fps = timeSinceFpsReset / (frameTimeAccum / frames) frames = 0.0 frameTimeAccum = 0.0 lastFpsReset = glfwGetTime() } } fun onFrame() { val now = glfwGetTime() deltaTime = now - lastFrame lastFrame = now frameTimeAccum += deltaTime frames++ } }
require 'rails_helper' RSpec.describe PaymentsController, type: :controller do let(:c100_application) { instance_double(C100Application) } let(:payment_intent) { instance_double(PaymentIntent) } describe '#validate' do before do allow(controller).to receive(:current_c100_application).and_return(c100_application) end context 'when there is no application in the session' do let(:c100_application) { nil } it 'raises an exception' do expect { get :validate, params: { id: 'intent-uuid', nonce: '123456' } }.to raise_error(ActiveRecord::RecordNotFound) end end context 'when there is an application in the session but details do not match' do it 'raises an exception' do expect { get :validate, params: { id: 'intent-uuid', nonce: '123456' } }.to raise_error(ActiveRecord::RecordNotFound) end end context 'for a request with missing mandatory params' do it 'redirects to the invalid session error page' do expect { get :validate, params: { id: 'intent-uuid' } # nonce param is omitted }.to raise_error(ActionController::ParameterMissing) end end context 'for a valid request' do before do allow( PaymentIntent ).to receive(:find_by!).with( id: 'intent-uuid', nonce: '123456', c100_application: c100_application ).and_return(payment_intent) allow( C100App::PaymentsFlowControl ).to receive(:new).with(c100_application).and_return(double(next_url: 'https://payments.example.com')) end it 'invalidates the current URL to avoid reuse and redirects to the payments vendor' do expect(payment_intent).to receive(:revoke_nonce!) get :validate, params: { id: 'intent-uuid', nonce: '123456' } expect(response).to redirect_to('https://payments.example.com') end end end end
use crate::boid::Boid; use crate::math::Vector2D; use crate::settings::Settings; use gloo::timers::callback::Interval; use yew::{html, Component, Context, Html, Properties}; pub const SIZE: Vector2D = Vector2D::new(1600.0, 1000.0); #[derive(Debug)] pub enum Msg { Tick, } #[derive(Clone, Debug, PartialEq, Properties)] pub struct Props { pub settings: Settings, #[prop_or_default] pub generation: usize, #[prop_or_default] pub paused: bool, } #[derive(Debug)] pub struct Simulation { boids: Vec<Boid>, interval: Interval, } impl Component for Simulation { type Message = Msg; type Properties = Props; fn create(ctx: &Context<Self>) -> Self { let settings = &ctx.props().settings; let boids = (0..settings.boids) .map(|_| Boid::new_random(settings)) .collect(); let interval = { let link = ctx.link().clone(); Interval::new(settings.tick_interval_ms as u32, move || { link.send_message(Msg::Tick) }) }; Self { boids, interval } } fn update(&mut self, ctx: &Context<Self>, msg: Self::Message) -> bool { match msg { Msg::Tick => { let Props { ref settings, paused, .. } = *ctx.props(); if paused { false } else { Boid::update_all(settings, &mut self.boids); true } } } } fn changed(&mut self, ctx: &Context<Self>) -> bool { self.boids.clear(); let settings = &ctx.props().settings; self.boids .resize_with(settings.boids, || Boid::new_random(settings)); // as soon as the previous task is dropped it is cancelled. // We don't need to worry about manually stopping it. self.interval = { let link = ctx.link().clone(); Interval::new(settings.tick_interval_ms as u32, move || { link.send_message(Msg::Tick) }) }; true } fn view(&self, _ctx: &Context<Self>) -> Html { let view_box = format!("0 0 {} {}", SIZE.x, SIZE.y); html! { <svg class="simulation-window" viewBox={view_box}> { for self.boids.iter().map(Boid::render) } </svg> } } }
#pragma once /** Utility header for header only cuda vector and cpu vector implementations */ #include <cstdio> #include <cassert> #include "TracerError.h" #ifdef METU_CUDA #include <cuda.h> #include <cuda_runtime.h> inline static constexpr void GPUAssert(cudaError_t code, const char *file, int line) { if(code != cudaSuccess) { fprintf(stderr, "Cuda Failure: %s %s %d\n", cudaGetErrorString(code), file, line); assert(false); } } inline static constexpr void GPUMemoryCheck(cudaError_t code) { //if(code == cudaErrorMemoryAllocation) if(code != cudaSuccess) { //fprintf(stderr, "Cuda Failure: %s %s %d\n", cudaGetErrorString(code), file, line); throw TracerException(TracerError::GPU_OUT_OF_MEMORY, cudaGetErrorString(code)); } } #define CUDA_MEMORY_CHECK(func){GPUMemoryCheck((func));} #else #define __device__ #define __host__ typedef int cudaError_t; inline static constexpr void GPUAssert(cudaError_t code, const char *file, int line) {} #endif #ifdef __CUDA_ARCH__ #define UNROLL_LOOP #pragma unroll #define UNROLL_LOOP_COUNT(count) _Pragma(unroll(count)) #else #define UNROLL_LOOP #define UNROLL_LOOP_COUNT(count) #endif #ifdef METU_DEBUG constexpr bool METU_DEBUG_BOOL = true; #define CUDA_CHECK(func) GPUAssert((func), __FILE__, __LINE__) #define CUDA_KERNEL_CHECK() \ CUDA_CHECK(cudaDeviceSynchronize()); \ CUDA_CHECK(cudaGetLastError()) #else constexpr bool METU_DEBUG_BOOL = false; #define CUDA_CHECK(func) func #define CUDA_KERNEL_CHECK() //#define CUDA_KERNEL_CHECK() \ // CUDA_CHECK(cudaGetLastError()) //#define CUDA_CHECK(func) GPUAssert((func), __FILE__, __LINE__) //#define CUDA_KERNEL_CHECK() \ // CUDA_CHECK(cudaDeviceSynchronize()); \ // CUDA_CHECK(cudaGetLastError()) // TODO: Check this from time to time.. // Ok after kernels i need to put get last error // in order to properly synchronize i did not understand this // hoping for a driver bug instead of some bug resides in the // deep dark parts of the code. #endif
#!/usr/bin/env python # -*- coding:utf-8 -*- # Author: [email protected] """ 枚举 """ from enum import Enum __all__ = ['Action'] class Action(Enum): STORE = 'store' STORE_TRUE = 'store_true' APPEND = 'append'
--CREATE TABLE Logs --( LogId INT IDENTITY PRIMARY KEY, -- AccountId INT NOT NULL REFERENCES Accounts(Id) , -- OldSum MONEY NOT NULL, -- NewSum MONEY NOT NULL --) --CREATE TABLE LogsWithTime --( LogId INT IDENTITY PRIMARY KEY, -- AccountId INT NOT NULL REFERENCES Accounts(Id) , -- OldSum MONEY NOT NULL, -- NewSum MONEY NOT NULL, -- DateOfChange DATETIME --) --CREATE OR ALTER TRIGGER tr_OnAccountChangeOnBalance --ON Accounts FOR UPDATE --AS -- INSERT Logs(AccountId, OldSum, NewSum) -- SELECT i.Id, d.Balance, i.Balance -- FROM deleted d -- JOIN inserted i ON i.Id = d.Id -- WHERE i.Balance != d.Balance --GO --UPDATE Accounts SET Balance = Balance + 2222 WHERE Id = 5 --SELECT * -- FROM Logs CREATE TRIGGER tr_OnAccountChangeOnBalance ON Accounts FOR UPDATE AS DECLARE @NewSum DECIMAL (36,2) = (SELECT Balance FROM inserted) DECLARE @OldSum DECIMAL (36,2) = (SELECT Balance FROM deleted) DECLARE @AccountId INT = (SELECT Id FROM inserted) INSERT INTO Logs(AccountId, NewSum, OldSum) VALUES (@AccountId, @NewSum, @OldSum) GO UPDATE Accounts SET Balance += 10 WHERE Id = 1 SELECT * FROM Logs
--- title: 관리 포털에 로그인할 때 관리할 계약이 표시되지 않음 description: 슈퍼 관리자 또는 관리자가 관리 포털에 로그인했지만 계약이 표시되지 않음 ms.topic: include ms.assetid: e276637d-8a22-4bb2-a574-7ba9442b92f0 author: CaityBuschlen ms.author: cabuschl ms.date: 06/02/2021 user.type: admin tags: agreement subscription.type: vl, cloud, retail, partner sap.id: 17a2bf94-0d03-2629-dfd8-e8935f9126ec ms.openlocfilehash: 4fa8ece98373c4dfec5b52ca424738af3b664ec8 ms.sourcegitcommit: 364e106fcbf4fb6af534e81d8b700901f79f4ec8 ms.translationtype: HT ms.contentlocale: ko-KR ms.lasthandoff: 09/26/2021 ms.locfileid: "129318361" --- ## <a name="when-i-sign-into-the-admin-portal-i-dont-see-any-agreements-to-manage"></a>관리 포털에 로그인할 때 관리할 계약이 표시되지 않음 할당된 전자 메일 주소를 사용하여 관리 포털에 로그인하고 있는지 확인합니다. 사용할 전자 메일 주소를 모르는 경우에는 받은 편지함에서 슈퍼 관리자 또는 관리자 환영 전자 메일을 확인합니다. 환영 전자 메일을 찾을 수 없으면 슈퍼 관리자에게 문의하여 로그인 정보를 확인합니다. 여러 테넌트가 있는 경우 오른쪽 위에 있는 테넌트 선택기를 확인하여 올바른 테넌트에 있는지 확인합니다. 여러 테넌트가 있고 모두에 연결된 계약이 없는 경우 해당 테넌트에 있을 때 관리할 계약이 없다는 메시지가 표시될 수 있습니다. ## <a name="expired-agreement"></a>만료된 계약 더 이상 액세스할 수 없으므로 계약이 만료된 후에도 이 메시지가 표시될 수 있습니다. 관리하는 다른 활성 계약은 영향을 받지 않습니다.
import tkinter as tk import tkinter.font import tkinter.scrolledtext as tkscroledtext from functools import partial from uuid import uuid4 def create_frames(root): frames = { 'config': tk.LabelFrame(root, name='config', text='Configuration'), 'options': tk.LabelFrame(root, name='options', text='Options'), 'entries': tk.LabelFrame(root, name='entries', text='Entries'), 'console': tk.LabelFrame(root, name='console', text='Console'), 'status': tk.LabelFrame(root, name='status', text='Status'), } frames['config'].pack(fill='both') frames['options'].pack(fill='both') frames['entries'].pack(fill='both') frames['console'].pack(fill='both', expand='yes') frames['status'].pack(fill='both') return frames def create_config(frame, callbacks): variables = {} input_path_variable = tk.StringVar() input_label = tk.Label(frame, text='Input file') input_path = tk.Entry(frame, textvariable=input_path_variable) browse_button = tk.Button(frame, text='Browse', command=callbacks['on_browse']) start_button = tk.Button(frame, text='Start', command=callbacks['on_start']) tk.Grid.columnconfigure(frame, 1, weight=1) input_label.grid(row=0, column=0) input_path.grid(row=0, column=1, sticky='NSEW') browse_button.grid(row=0, column=2) start_button.grid(row=0, column=3) variables['input_path'] = input_path_variable variables['input_path_entry'] = input_path variables['browse_button'] = browse_button variables['start_button'] = start_button return variables def create_entries(frame, options, entries_per_row=5): variables = {} for i, option in enumerate(options): internal_name, display_name = option[:2] row = (i // entries_per_row) * 2 column = i % entries_per_row entry_variable = tk.StringVar() label = tk.Label(frame, text=display_name) entry = tk.Entry(frame, textvariable=entry_variable) label.grid(row=row, column=column) entry.grid(row=row + 1, column=column, sticky='NSEW') variables[f'entry_{internal_name}'] = entry_variable return variables def create_checkboxes(frame, options, entries_per_row=5): variables = {} for i, option in enumerate(options): internal_name, display_name = option[:2] row = i // entries_per_row column = i % entries_per_row variable = tk.BooleanVar() button = tk.Checkbutton(frame, variable=variable, text=display_name) tk.Grid.columnconfigure(frame, column, weight=1) button.grid(row=row, column=column, sticky='NSW') variables[f'checkbox_{internal_name}'] = variable return variables def create_console(frame): text = tkscroledtext.ScrolledText(frame, name='text', height=0) text.pack(fill='both', expand='yes') text.bind('<Key>', lambda e: 'break') # makes readonly return {'console': text} def create_status(frame): variable = tk.StringVar() entry = tk.Entry(frame, textvariable=variable) entry.bind('<Key>', lambda e: 'break') # makes readonly entry.pack(fill='both') return {'status': variable} def _get_gui_root(configuration): font_size = configuration.get('font_size', 10) entries_per_row = configuration.get('entries_per_row', 5) title = configuration.get('title', str(uuid4())) geometry = configuration.get('geometry', '600x400+0+0') root = tk.Tk() default = tkinter.font.nametofont('TkFixedFont').cget('family') font_family = configuration.get('font_family', default) font = tkinter.font.Font(family=font_family, size=font_size) root.option_add('*Font', font) root.title(title) root.geometry(geometry) if configuration.get('always_on_top', False): root.wm_attributes('-topmost', 1) frames = create_frames(root) variables = {} partial_callbacks = {} for name, callback in configuration['callbacks'].items(): partial_callbacks[name] = partial(callback, variables) checkboxes = configuration.get('checkboxes', []) entries = configuration.get('entries', []) variables.update(create_config(frames['config'], partial_callbacks)) variables.update(create_checkboxes(frames['options'], checkboxes, entries_per_row)) variables.update(create_entries(frames['entries'], entries, entries_per_row)) variables.update(create_console(frames['console'])) variables.update(create_status(frames['status'])) return root, variables def get_gui(configuration=None): if configuration is None: configuration = {} root, variables = _get_gui_root(configuration) return root, variables
let baseStr = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ+/'; let valCharMap = {} let charValMap = {} baseStr.split('').forEach((item, idx) =>{ valCharMap[idx] = item charValMap[item] = idx }) function decimalToNScale(n) { if(n > 64) { throw new RangeError('不支持64以上进制'); } return (num) =>{ if(typeof num !== 'number') { throw new TypeError('请输入number类型的值'); } // 正负数 let isPostiveNum = num > 0 ? true : false; // 整数部分 let leftNum = parseInt(Math.abs(num)); // 小数部分 let rightNum = `${num}`.split('.')[1] || 0; let concatNum = [...decimalToNScaleByInt(n, leftNum), ...decimalToNScaleByDouble(n, `0.${rightNum}` * 1)] return isPostiveNum ? concatNum.join('') : ['-',...concatNum].join('') } } function decimalToNScaleByInt(n, intNum) { if(n > intNum) return [`${valCharMap[intNum]}`]; let stack = []; let flag; while(intNum / n !== 0) { flag = intNum % n stack.unshift(valCharMap[flag]) intNum = (intNum - flag) / n } return stack } function decimalToNScaleByDouble(n, smallNum) { if(smallNum === 0) return []; let stack = ['.']; let flag; while(smallNum !== 0) { smallNum = n * smallNum; flag = parseInt(smallNum); smallNum = smallNum - flag; stack.push(valCharMap[flag]) } flag = null; return stack } function nScaleToDecimalByInt(n, leftNum) { let arr = leftNum.reverse(); let len = leftNum.length; let ret = 0; for(let i = 0; i< len; i++) { ret += charValMap[arr[i]] * (n ** i) } return ret } // function nScaleToDecimalByDouble(n, rightNum) { let len = rightNum.length; let ret = 0; for(let i = 0; i< len; i++) { let char = rightNum[i]; ret += charValMap[char] *( n ** (-i-1)) } return ret } function nScaleToDecrimal(n) { if(n > 64) { throw new RangeError('不支持64以上进制'); } return (str) =>{ let isPostiveNum = str.startsWith('-') ? -1 : 1; let nums = str.replace('-', '').split('.'); let leftNum = nums[0].split(''); let rightNum = nums[1] ? nums[1].split('') : 0; console.log(isPostiveNum , leftNum, rightNum) return isPostiveNum *(nScaleToDecimalByInt(n, leftNum) + nScaleToDecimalByDouble(n, rightNum)) } } export { decimalToNScale, nScaleToDecrimal }
# TODO-List-Day-66 This a todo list made by using only HTML, CSS, and JavaScript.
<?php /* |-------------------------------------------------------------------------- | Web Routes |-------------------------------------------------------------------------- | | Here is where you can register web routes for your application. These | routes are loaded by the RouteServiceProvider within a group which | contains the "web" middleware group. Now create something great! | */ /*Route::get('/', function () { return view('shop.index'); });*/ Auth::routes(); //Главная Route::get('/', 'MainController@index')->name('home'); Route::get('/news', 'MainController@getNews')->name('news'); Route::group(['prefix' => 'dashboard'], function () { Route::group(['middleware' => 'auth'], function (){ Route::get('/', function (){ return view('dashboard.main'); }) ->name('dashboard'); Route::resource('/news', 'NewsController', array( /* 'only' => [ 'index', 'create', 'store' ],*/ /* 'middleware' => [ 'store' => 'ResizeImage' ]*/ )); Route::resource('/categories', 'CategoriesController'); Route::resource('/products', 'ProductController'); Route::get('logout', array( 'as' => 'logout', function(){ Auth::logout(); Session::flush(); return Redirect::route('home'); } )); }); }); Route::group(['middleware' => 'auth'], function () { Route::get('/profile', 'UserController@getProfile')->name('user.profile'); }); /*Добавление товара в корзину, где product_id - id товара из БД*/ Route::get('/add-to-cart/{product_id}', 'CartController@addToCart')->name('product.add'); Route::get('/shopping-cart/', [ 'uses' => 'CartController@getCart', 'as' => 'product.shoppingCart' ]); /*Страница оформления заказа*/ Route::get('/checkout', 'CheckoutController@getCheckout')->name('get.checkout'); Route::post('/checkout', 'CheckoutController@postCheckout')->name('post.checkout'); Route::get('/clear_cart', 'CartController@clearCart')->name('clear.cart'); Route::get('/reduce/{id}', [ 'uses' => 'CartController@getReduceByOne', 'as' => 'product.reduceByOne' ]); Route::get('/increase/{id}', [ 'uses' => 'CartController@getIncreaseByOne', 'as' => 'product.increaseByOne' ]); Route::get('/remove/{id}', [ 'uses' => 'CartController@getRemoveItem', 'as' => 'product.remove' ]); // Категории Route::get('/category/{alias}', 'CategoriesController@showCategoryByAlias')->name('client.category');
import os from hashkernel.bakery import CakeRole from hashstore.bakery.lite import dal from hashstore.bakery.lite.node import ( ServerConfigBase, GlueBase, CakeShardBase, User, UserType, UserState, Permission, Portal, ServerKey, PermissionType as PT) from hashstore.bakery.lite.node.blobs import BlobStore from hashstore.utils.db import Dbf from hashkernel.hashing import shard_name_int, SaltedSha class CakeStore: def __init__(self, store_dir): self.store_dir = store_dir self._blob_store = None self.srvcfg_db = Dbf( ServerConfigBase.metadata, os.path.join(self.store_dir, 'server.db') ) self.glue_db = Dbf( GlueBase.metadata, os.path.join(self.store_dir, 'glue.db') ) self.max_shards = None self.shards_db = None def cake_shard_db(self, cake): if self.max_shards is None: self.max_shards = self.server_config().num_cake_shards self.shards_db = [Dbf( CakeShardBase.metadata, os.path.join(self.store_dir, 'shard_' + shard_name_int(i) + '.db') ) for i in range(self.max_shards)] db = self.shards_db[cake.shard_num(self.max_shards)] if not(db.exists()): db.ensure_db() return db def blob_store(self): if self._blob_store is None: self._blob_store = BlobStore( os.path.join(self.store_dir, 'backend') ) return self._blob_store def initdb(self, external_ip, port, num_cake_shards=10): if not os.path.exists(self.store_dir): os.makedirs(self.store_dir) self.srvcfg_db.ensure_db() os.chmod(self.srvcfg_db.path, 0o600) self.glue_db.ensure_db() self.blob_store() with self.srvcfg_db.session_scope() as srv_session: skey = srv_session.query(ServerKey).one_or_none() if skey is None: skey = ServerKey() skey.num_cake_shards = num_cake_shards elif skey.num_cake_shards != num_cake_shards: raise ValueError( f'reshard required: ' f'{skey.num_cake_shards} != {num_cake_shards}') skey.port = port skey.external_ip = external_ip srv_session.merge(skey) with self.glue_db.session_scope() as glue_session: make_system_user = lambda n: User( email=f'{n}@' , user_type=UserType[n], user_state=UserState.active, passwd=SaltedSha.from_secret('*'), full_name=f'{n} user' ) #ensure guest guest = dal.query_users_by_type( glue_session,UserType.guest).one_or_none() if guest is None: guest = make_system_user('guest') glue_session.add(guest) glue_session.flush() index_portal = guest.id.transform_portal( role=CakeRole.NEURON) with self.cake_shard_db(index_portal).session_scope() as \ shard_session: shard_session.add(Portal(id=index_portal)) #ensure system system = dal.query_users_by_type( glue_session, UserType.system).one_or_none() if system is None: system = make_system_user('system') glue_session.add(system) glue_session.add( Permission(permission_type=PT.Admin, user=system)) def server_config(self): with self.srvcfg_db.session_scope() as session: return session.query(ServerKey).one()
package at.hannesmoser.gleam.transforms.generator import io.github.serpro69.kfaker.Faker import org.apache.beam.sdk.schemas.Schema import org.apache.beam.sdk.schemas.Schema.FieldType import org.apache.beam.sdk.schemas.logicaltypes.EnumerationType import org.apache.beam.sdk.values.Row import org.joda.time.Instant internal const val AGGREGATE_LEN = 10 /** * Generates a value for a given field type */ @Suppress("UNCHECKED_CAST", "ReturnCount") internal fun <T> generateValue( faker: Faker, type: FieldType ): T { if (type.typeName.isPrimitiveType) { return generatePrimitiveValue(faker, type) as T } if (type.typeName.isCollectionType) { return generateCollectionValue(faker, type) as T } if (type.typeName.isCompositeType) { return generateCompositeValue(faker, type) as T } if (type.typeName.isLogicalType) { return generateLogicalType(faker, type) as T } throw IllegalArgumentException("${type.typeName} is not supported") } private fun generatePrimitiveValue(faker: Faker, type: Schema.FieldType) = when (type.typeName) { Schema.TypeName.BOOLEAN -> faker.random.nextBoolean() Schema.TypeName.BYTE -> faker.random.nextInt( Byte.MIN_VALUE.toInt(), Byte.MAX_VALUE.toInt() ).toByte() Schema.TypeName.BYTES -> faker.artist.names().toByteArray() Schema.TypeName.DATETIME -> Instant.ofEpochMilli( faker.random.nextInt( Instant.EPOCH.millis.toInt(), Instant.now().millis.toInt() ).toLong() ).toDateTime() Schema.TypeName.DECIMAL -> faker.random.nextDouble().toBigDecimal() Schema.TypeName.DOUBLE -> faker.random.nextDouble() Schema.TypeName.FLOAT -> faker.random.nextFloat() Schema.TypeName.INT16 -> faker.random.nextInt( Short.MIN_VALUE.toInt(), Short.MAX_VALUE.toInt() ).toShort() Schema.TypeName.INT32 -> faker.random.nextInt() Schema.TypeName.INT64 -> faker.random.nextLong() Schema.TypeName.STRING -> faker.commerce.productName() else -> throw IllegalArgumentException("${type.typeName} is not a primitive type") } @Suppress("IMPLICIT_CAST_TO_ANY") private fun generateCollectionValue(faker: Faker, type: Schema.FieldType) = when (type.typeName) { Schema.TypeName.ARRAY -> { val len = faker.random.nextInt(0, AGGREGATE_LEN - 1) (0..len).map { faker.movie.title() } } Schema.TypeName.ITERABLE -> { val len = faker.random.nextInt(0, AGGREGATE_LEN - 1) (0..len).map { faker.animal.name() } } Schema.TypeName.MAP -> { val len = faker.random.nextInt(0, AGGREGATE_LEN - 1) (0..len).associate { Pair( faker.programmingLanguage.name(), faker.programmingLanguage.creator() ) } } else -> throw IllegalArgumentException("${type.typeName} is not a collection type") } private fun generateCompositeValue(faker: Faker, type: Schema.FieldType) = when (type.typeName) { Schema.TypeName.ROW -> Row.withSchema(type.rowSchema) .withFieldValues(generateFieldValues(faker, type.rowSchema!!)) .build() else -> throw IllegalArgumentException("${type.typeName} is not a composite type") } @Suppress("UnusedPrivateMember") private fun generateLogicalType(faker: Faker, type: FieldType) = when (type.typeName) { Schema.TypeName.LOGICAL_TYPE -> EnumerationType.create( "RED", "GREEN", "BLUE" ) else -> throw IllegalArgumentException("${type.typeName} is not a logical type") }
{-# LANGUAGE OverloadedStrings #-} module Raindrops (convert) where import Data.Maybe (fromMaybe) import qualified Data.Text as T import Data.Text (Text) convert :: Int -> Text convert n = fromMaybe (T.pack $ show n) $ sound "Pling" 3 <> sound "Plang" 5 <> sound "Plong" 7 where sound noise factor | n `rem` factor == 0 = Just noise | otherwise = Nothing
import { MouseEvent, useEffect, useState } from 'react' import HeroCard from './HeroCard' const Hero = ({ episodes: initialEpisodes }: { episodes: TEpisode[] }) => { // Only render episodes with title and cover image const episodes = initialEpisodes.filter( (episode) => episode.title && (episode.urls?.image || episode.channel.urls.logo_image.original) ) const [activeIndex, setActiveIndex] = useState(0) const prevIndex = (e: MouseEvent) => { e.preventDefault() if (activeIndex <= 0) return setActiveIndex((oldIndex) => oldIndex - 1) } const nextIndex = (e: MouseEvent) => { e.preventDefault() if (activeIndex > episodes.length) return setActiveIndex((oldIndex) => oldIndex + 1) } useEffect(() => { const timer = setTimeout(() => { activeIndex === episodes.length - 1 ? setActiveIndex(0) : setActiveIndex((oldIndex) => oldIndex + 1) }, 10000) return () => { clearTimeout(timer) } }, [activeIndex, episodes]) if (!episodes || episodes.length === 0) return null return ( <section className="hero-container"> {episodes.map((episode, index) => ( <HeroCard episode={episode} index={index} active={activeIndex} listLength={episodes.length} key={episode.id} onPrevClick={prevIndex} onNextClick={nextIndex} /> ))} <style jsx>{` .hero-container { position: relative; min-height: min-content; margin: 2.5rem 0 2rem; height: 39rem; border-bottom: var(--default-border); } @media screen and (min-width: 1024px) { .hero-container { padding: 0; margin: 0 0 2rem; height: 28rem; display: flex; align-items: center; } } @media screen and (min-width: 1440px) { .hero-container { margin: 0 -4rem 2rem; } } `}</style> </section> ) } export default Hero
<?php namespace Application\Model\Admin\Question; class Table extends \System\Libraries\Table { public function __construct() { parent::__construct(); $this->columns = array('<input type="checkbox" />', 'No.', 'Nội dung câu hỏi', 'Loại câu hỏi', 'Điểm'); //$this-> } protected function Source() { $data = new DataTable(); return $data->getQuery(); } protected function row($data, $index) { if (!$data->score) $data->score = '<em> Unknown </em>'; return "<tr>" . "<td> <input type=\"checkbox\" name=\"id[]\" value=\"$data->id\" /> </td>" . "<td> $index </td>" . "<td> $data->content </td>" . "<td> $data->typename </td>" . "<td> $data->score </td>" . "</tr>"; } }
import objectframework.models.ObjectContexts import org.json4s.jackson.JsonMethods._ import phoenix.failures.AddressFailures.NoCountryFound import phoenix.models.cord.lineitems._ import phoenix.models.location.Addresses import phoenix.models.product.{Mvp, SimpleContext} import phoenix.models.rules.QueryStatement import phoenix.models.shipping import phoenix.models.shipping.{ShippingMethod, ShippingMethods} import phoenix.responses.ShippingMethodsResponse import phoenix.services.carts.CartTotaler import phoenix.utils.seeds.Factories import testutils._ import testutils.apis.{PhoenixAdminApi, PhoenixStorefrontApi} import testutils.fixtures.BakedFixtures import core.db._ import phoenix.payloads.CartPayloads.CreateCart import phoenix.responses.cord.CartResponse import cats.implicits._ import faker.Lorem import phoenix.payloads.LineItemPayloads.UpdateLineItemsPayload import testutils.fixtures.api.ApiFixtures class ShippingMethodsIntegrationTest extends IntegrationTestBase with PhoenixAdminApi with PhoenixStorefrontApi with DefaultJwtAdminAuth with BakedFixtures with ApiFixtures { "GET /v1/shipping-methods/:refNum" - { "Evaluates shipping rule: order total is greater than $25" - { "Shipping method is returned when actual order total is greater than $25" in new ShippingMethodsFixture { val conditions = parse(""" | { | "comparison": "and", | "conditions": [{ | "rootObject": "Order", "field": "grandtotal", "operator": "greaterThan", "valInt": 25 | }] | } """.stripMargin).extract[QueryStatement] val shippingMethod = shipping.ShippingMethods .create(Factories.shippingMethods.head.copy(conditions = Some(conditions))) .gimme val methodResponse = shippingMethodsApi.forCart(cart.refNum).as[Seq[ShippingMethodsResponse]].headOption.value methodResponse.id must === (shippingMethod.id) methodResponse.name must === (shippingMethod.adminDisplayName) methodResponse.price must === (shippingMethod.price) } } "Evaluates shipping rule: order total is greater than $100" - { "No shipping rules found when order total is less than $100" in new ShippingMethodsFixture { val conditions = parse(""" | { | "comparison": "and", | "conditions": [{ | "rootObject": "Order", "field": "grandtotal", "operator": "greaterThan", "valInt": 100 | }] | } """.stripMargin).extract[QueryStatement] val shippingMethod = shipping.ShippingMethods .create(Factories.shippingMethods.head.copy(conditions = Some(conditions))) .gimme shippingMethodsApi.forCart(cart.refNum).as[Seq[ShippingMethodsResponse]] mustBe 'empty } } "Evaluates shipping rule: shipping to CA, OR, or WA" - { "Shipping method is returned when the order is shipped to CA" in new WestCoastShippingMethodsFixture { val methodResponse = shippingMethodsApi.forCart(cart.refNum).as[Seq[ShippingMethodsResponse]].headOption.value methodResponse.id must === (shippingMethod.id) methodResponse.name must === (shippingMethod.adminDisplayName) methodResponse.price must === (shippingMethod.price) } } "Evaluates shipping rule: order total is between $10 and $100, and is shipped to CA, OR, or WA" - { "Is true when the order total is $27 and shipped to CA" in new ShippingMethodsStateAndPriceCondition { val methodResponse = shippingMethodsApi.forCart(cart.refNum).as[Seq[ShippingMethodsResponse]].headOption.value methodResponse.id must === (shippingMethod.id) methodResponse.name must === (shippingMethod.adminDisplayName) methodResponse.price must === (shippingMethod.price) } } "Evaluates shipping rule: ships to CA but has a restriction for hazardous items" - { "Shipping method is returned when the order has no hazardous SKUs" in new ShipToCaliforniaButNotHazardous { val methodResponse = shippingMethodsApi.forCart(cart.refNum).as[Seq[ShippingMethodsResponse]].headOption.value methodResponse.id must === (shippingMethod.id) methodResponse.name must === (shippingMethod.adminDisplayName) methodResponse.price must === (shippingMethod.price) methodResponse.isEnabled must === (true) } } } "Search /v1/my/cart/shipping-methods" - { "Has active methods" in new UsShipping { shippingMethodsApi.active().as[Seq[ShippingMethodsResponse]].size mustBe >(0) } "Get shipping method by country code" in new UsShipping { withNewCustomerAuth(TestLoginData.random) { implicit auth ⇒ cartsApi.create(CreateCart(customerId = auth.customerId.some)).as[CartResponse] storefrontCartsApi.shippingMethods .searchByRegion("us") .as[Seq[ShippingMethodsResponse]] .size mustBe >(0) } } "Make sure that searchByRegion is aware of a cart content" in new UsShipping { withNewCustomerAuth(TestLoginData.random) { implicit auth ⇒ val cart = cartsApi.create(CreateCart(customerId = auth.customerId.some)).as[CartResponse] storefrontCartsApi.shippingMethods .searchByRegion("us") .as[Seq[ShippingMethodsResponse]] .exists(_.price == 0) mustBe false // no free shipping cartsApi(cart.referenceNumber).lineItems .add(Seq(UpdateLineItemsPayload(skuCode, 50))) // over 50 bucks .mustBeOk() storefrontCartsApi.shippingMethods .searchByRegion("us") .as[Seq[ShippingMethodsResponse]] .exists(_.price == 0) mustBe true // YES! free shipping } } "No shipping to Russia ;(" in new UsShipping { withNewCustomerAuth(TestLoginData.random) { implicit auth ⇒ cartsApi.create(CreateCart(customerId = auth.customerId.some)).as[CartResponse] storefrontCartsApi.shippingMethods .searchByRegion("rus") .as[Seq[ShippingMethodsResponse]] .size must === (0) } } "No shipping methods for non existent country" in { withNewCustomerAuth(TestLoginData.random) { implicit auth ⇒ cartsApi.create(CreateCart(customerId = auth.customerId.some)).as[CartResponse] storefrontCartsApi.shippingMethods .searchByRegion("uss") .mustFailWith400(NoCountryFound("uss")) } } } trait Fixture extends EmptyCustomerCart_Baked with StoreAdmin_Seed trait ShippingMethodsFixture extends Fixture { val californiaId = 4129 val michiganId = 4148 val oregonId = 4164 val washingtonId = 4177 val address = (for { productContext ← * <~ ObjectContexts.mustFindById404(SimpleContext.id) address ← * <~ Addresses.create( Factories.address.copy(accountId = customer.accountId, regionId = californiaId)) _ ← * <~ address.bindToCart(cart.refNum) product ← * <~ Mvp.insertProduct(productContext.id, Factories.products.head.copy(title = "Donkey", price = 27)) _ ← * <~ CartLineItems.create(CartLineItem(cordRef = cart.refNum, skuId = product.skuId)) _ ← * <~ CartTotaler.saveTotals(cart) } yield address).gimme } trait WestCoastShippingMethodsFixture extends ShippingMethodsFixture { val conditions = parse(s""" |{ |"comparison": "or", |"conditions": [ |{ |"rootObject": "ShippingAddress", |"field": "regionId", |"operator": "equals", |"valInt": $californiaId |}, { |"rootObject": "ShippingAddress", |"field": "regionId", |"operator": "equals", |"valInt": $oregonId |}, { |"rootObject": "ShippingAddress", |"field": "regionId", |"operator": "equals", |"valInt": $washingtonId |} |] |} """.stripMargin).extract[QueryStatement] val shippingMethod = ShippingMethods .create(Factories.shippingMethods.head.copy(conditions = Some(conditions))) .gimme } trait ShippingMethodsStateAndPriceCondition extends ShippingMethodsFixture { val conditions = parse(s""" |{ |"comparison": "and", |"statements": [ |{ |"comparison": "or", |"conditions": [ |{ |"rootObject": "ShippingAddress", |"field": "regionId", |"operator": "equals", |"valInt": $californiaId |}, { |"rootObject": "ShippingAddress", |"field": "regionId", |"operator": "equals", |"valInt": $oregonId |}, { |"rootObject": "ShippingAddress", |"field": "regionId", |"operator": "equals", |"valInt": $washingtonId |} |] |}, { |"comparison": "and", |"conditions": [ |{ |"rootObject": "Order", |"field": "grandtotal", |"operator": "greaterThanOrEquals", |"valInt": 10 |}, { |"rootObject": "Order", |"field": "grandtotal", |"operator": "lessThan", |"valInt": 100 |} |] |} |] |} """.stripMargin).extract[QueryStatement] val shippingMethod = shipping.ShippingMethods .create(Factories.shippingMethods.head.copy(conditions = Some(conditions))) .gimme } trait ShipToCaliforniaButNotHazardous extends ShippingMethodsFixture { val conditions = parse(s""" |{ |"comparison": "and", |"conditions": [ |{ |"rootObject": "ShippingAddress", |"field": "regionId", |"operator": "equals", |"valInt": $californiaId |} |] |} """.stripMargin).extract[QueryStatement] val restrictions = parse(""" | { | "comparison": "and", | "conditions": [ | { | "rootObject": "Order", | "field": "skus.isHazardous", | "operator": "equals", | "valBoolean": true | } | ] | } """.stripMargin).extract[QueryStatement] val shippingMethod = (for { shippingMethod ← shipping.ShippingMethods.create( Factories.shippingMethods.head.copy(conditions = Some(conditions), restrictions = Some(restrictions))) } yield shippingMethod).gimme } trait UsShipping extends ProductSku_ApiFixture { val shippingMethods: Seq[ShippingMethod] = Factories.shippingMethods.map(sm ⇒ shipping.ShippingMethods.create(sm).gimme) require(shippingMethods.length > 0) } }
#!/bin/sh echo "Cleaning Up..." aws sesv2 delete-contact --contact-list-name ExampleContactListName --email-address [email protected] aws sesv2 delete-contact-list --contact-list-name ExampleContactListName echo "Done!"
#!/bin/bash # author: yonglong.wyl # date: 2021/06/09 !<<EOF #简单实例 echo "input website:" read website #没带任何参数,默认一直等待用户输入 echo "your website is: ${website}" exit 0 #退出当前的Shell 进程,0:执行成功,n(n>0): 其他值代表执行失败 # 执行exit可以使shell以指定的状态值退出 # exit 也可以用在script,离开正在执行的script,回到shell EOF !<<EOF # 演示 -p 参数 read -p "输入网址名:" website echo "你输入的网站名是 ${website}" exit 0 EOF !<<EOF # 演示-t 参数 if read -t 1 -p "输入网址:" website then echo "你输入的网址:${website}" else echo -e "\n抱歉,你输入超时了" fi exit 0 EOF !<<EOF # -n 计数输入字符 read -n1 -p "Do you want to continue [Y/N]?" answer case $answer in Y|y) echo "fine, continue";; N|n) echo "ok, good bye";; *) echo "error chice";; esac exit 0; EOF !<<EOF count=1 cat echo_file.txt | while read line do echo "Line $count:$line" count=$[ $count +1 ] done echo "finish" exit 0; EOF read -e -p "输入文件名:" str
/// -*- tab-width: 4; Mode: C++; c-basic-offset: 4; indent-tabs-mode: nil -*- #include <AP_HAL/AP_HAL.h> #if CONFIG_HAL_BOARD == HAL_BOARD_PX4 #include "AnalogIn.h" #include <drivers/drv_adc.h> #include <stdio.h> #include <sys/types.h> #include <sys/stat.h> #include <fcntl.h> #include <unistd.h> #include <nuttx/analog/adc.h> #include <nuttx/config.h> #include <arch/board/board.h> #include <uORB/topics/battery_status.h> #include <uORB/topics/servorail_status.h> #include <uORB/topics/system_power.h> #include <GCS_MAVLink/GCS_MAVLink.h> #include <errno.h> #include "GPIO.h" #define ANLOGIN_DEBUGGING 0 // base voltage scaling for 12 bit 3.3V ADC #define PX4_VOLTAGE_SCALING (3.3f/4096.0f) #if ANLOGIN_DEBUGGING # define Debug(fmt, args ...) do {printf("%s:%d: " fmt "\n", __FUNCTION__, __LINE__, ## args); } while(0) #else # define Debug(fmt, args ...) #endif extern const AP_HAL::HAL& hal; /* scaling table between ADC count and actual input voltage, to account for voltage dividers on the board. */ static const struct { uint8_t pin; float scaling; } pin_scaling[] = { #ifdef CONFIG_ARCH_BOARD_PX4FMU_V1 // PX4 has 4 FMU analog input pins { 10, (5.7f*3.3f)/4096 }, // FMU battery on multi-connector pin 5, // 5.7:1 scaling { 11, 6.6f/4096 }, // analog airspeed input, 2:1 scaling { 12, 3.3f/4096 }, // analog2, on SPI port pin 3 { 13, 16.8f/4096 }, // analog3, on SPI port pin 4 #elif defined(CONFIG_ARCH_BOARD_PX4FMU_V2) { 2, 3.3f/4096 }, // 3DR Brick voltage, usually 10.1:1 // scaled from battery voltage { 3, 3.3f/4096 }, // 3DR Brick current, usually 17:1 scaled // for APM_PER_VOLT { 4, 6.6f/4096 }, // VCC 5V rail sense { 10, 3.3f/4096 }, // spare ADC { 11, 3.3f/4096 }, // spare ADC { 12, 3.3f/4096 }, // spare ADC { 13, 3.3f/4096 }, // AUX ADC pin 4 { 14, 3.3f/4096 }, // AUX ADC pin 3 { 15, 6.6f/4096 }, // analog airspeed sensor, 2:1 scaling #else #error "Unknown board type for AnalogIn scaling" #endif }; using namespace PX4; PX4AnalogSource::PX4AnalogSource(int16_t pin, float initial_value) : _pin(pin), _stop_pin(-1), _settle_time_ms(0), _value(initial_value), _value_ratiometric(initial_value), _latest_value(initial_value), _sum_count(0), _sum_value(0), _sum_ratiometric(0) { #ifdef PX4_ANALOG_VCC_5V_PIN if (_pin == ANALOG_INPUT_BOARD_VCC) { _pin = PX4_ANALOG_VCC_5V_PIN; } #endif } void PX4AnalogSource::set_stop_pin(uint8_t p) { _stop_pin = p; } float PX4AnalogSource::read_average() { if (_sum_count == 0) { return _value; } hal.scheduler->suspend_timer_procs(); _value = _sum_value / _sum_count; _value_ratiometric = _sum_ratiometric / _sum_count; _sum_value = 0; _sum_ratiometric = 0; _sum_count = 0; hal.scheduler->resume_timer_procs(); return _value; } float PX4AnalogSource::read_latest() { return _latest_value; } /* return scaling from ADC count to Volts */ float PX4AnalogSource::_pin_scaler(void) { float scaling = PX4_VOLTAGE_SCALING; uint8_t num_scalings = ARRAY_SIZE(pin_scaling); for (uint8_t i=0; i<num_scalings; i++) { if (pin_scaling[i].pin == _pin) { scaling = pin_scaling[i].scaling; break; } } return scaling; } /* return voltage in Volts */ float PX4AnalogSource::voltage_average() { return _pin_scaler() * read_average(); } /* return voltage in Volts, assuming a ratiometric sensor powered by the 5V rail */ float PX4AnalogSource::voltage_average_ratiometric() { voltage_average(); return _pin_scaler() * _value_ratiometric; } /* return voltage in Volts */ float PX4AnalogSource::voltage_latest() { return _pin_scaler() * read_latest(); } void PX4AnalogSource::set_pin(uint8_t pin) { if (_pin == pin) { return; } hal.scheduler->suspend_timer_procs(); _pin = pin; _sum_value = 0; _sum_ratiometric = 0; _sum_count = 0; _latest_value = 0; _value = 0; _value_ratiometric = 0; hal.scheduler->resume_timer_procs(); } /* apply a reading in ADC counts */ void PX4AnalogSource::_add_value(float v, float vcc5V) { _latest_value = v; _sum_value += v; if (vcc5V < 3.0f) { _sum_ratiometric += v; } else { // this compensates for changes in the 5V rail relative to the // 3.3V reference used by the ADC. _sum_ratiometric += v * 5.0f / vcc5V; } _sum_count++; if (_sum_count == 254) { _sum_value /= 2; _sum_ratiometric /= 2; _sum_count /= 2; } } PX4AnalogIn::PX4AnalogIn() : _current_stop_pin_i(0), _board_voltage(0), _servorail_voltage(0), _power_flags(0) {} void PX4AnalogIn::init(void* machtnichts) { _adc_fd = open(ADC0_DEVICE_PATH, O_RDONLY | O_NONBLOCK); if (_adc_fd == -1) { hal.scheduler->panic("Unable to open " ADC0_DEVICE_PATH); } _battery_handle = orb_subscribe(ORB_ID(battery_status)); _servorail_handle = orb_subscribe(ORB_ID(servorail_status)); _system_power_handle = orb_subscribe(ORB_ID(system_power)); } /* move to the next stop pin */ void PX4AnalogIn::next_stop_pin(void) { // find the next stop pin. We start one past the current stop pin // and wrap completely, so we do the right thing is there is only // one stop pin for (uint8_t i=1; i <= PX4_ANALOG_MAX_CHANNELS; i++) { uint8_t idx = (_current_stop_pin_i + i) % PX4_ANALOG_MAX_CHANNELS; PX4::PX4AnalogSource *c = _channels[idx]; if (c && c->_stop_pin != -1) { // found another stop pin _stop_pin_change_time = hal.scheduler->millis(); _current_stop_pin_i = idx; // set that pin high hal.gpio->pinMode(c->_stop_pin, 1); hal.gpio->write(c->_stop_pin, 1); // set all others low for (uint8_t j=0; j<PX4_ANALOG_MAX_CHANNELS; j++) { PX4::PX4AnalogSource *c2 = _channels[j]; if (c2 && c2->_stop_pin != -1 && j != idx) { hal.gpio->pinMode(c2->_stop_pin, 1); hal.gpio->write(c2->_stop_pin, 0); } } break; } } } /* called at 1kHz */ void PX4AnalogIn::_timer_tick(void) { // read adc at 100Hz uint32_t now = hal.scheduler->micros(); uint32_t delta_t = now - _last_run; if (delta_t < 10000) { return; } _last_run = now; struct adc_msg_s buf_adc[PX4_ANALOG_MAX_CHANNELS]; // cope with initial setup of stop pin if (_channels[_current_stop_pin_i] == NULL || _channels[_current_stop_pin_i]->_stop_pin == -1) { next_stop_pin(); } /* read all channels available */ int ret = read(_adc_fd, &buf_adc, sizeof(buf_adc)); if (ret > 0) { // match the incoming channels to the currently active pins for (uint8_t i=0; i<ret/sizeof(buf_adc[0]); i++) { #ifdef CONFIG_ARCH_BOARD_PX4FMU_V2 if (buf_adc[i].am_channel == 4) { // record the Vcc value for later use in // voltage_average_ratiometric() _board_voltage = buf_adc[i].am_data * 6.6f / 4096; } #endif } for (uint8_t i=0; i<ret/sizeof(buf_adc[0]); i++) { Debug("chan %u value=%u\n", (unsigned)buf_adc[i].am_channel, (unsigned)buf_adc[i].am_data); for (uint8_t j=0; j<PX4_ANALOG_MAX_CHANNELS; j++) { PX4::PX4AnalogSource *c = _channels[j]; if (c != NULL && buf_adc[i].am_channel == c->_pin) { // add a value if either there is no stop pin, or // the stop pin has been settling for enough time if (c->_stop_pin == -1 || (_current_stop_pin_i == j && hal.scheduler->millis() - _stop_pin_change_time > c->_settle_time_ms)) { c->_add_value(buf_adc[i].am_data, _board_voltage); if (c->_stop_pin != -1 && _current_stop_pin_i == j) { next_stop_pin(); } } } } } } #ifdef CONFIG_ARCH_BOARD_PX4FMU_V1 // check for new battery data on FMUv1 if (_battery_handle != -1) { struct battery_status_s battery; bool updated = false; if (orb_check(_battery_handle, &updated) == 0 && updated) { orb_copy(ORB_ID(battery_status), _battery_handle, &battery); if (battery.timestamp != _battery_timestamp) { _battery_timestamp = battery.timestamp; for (uint8_t j=0; j<PX4_ANALOG_MAX_CHANNELS; j++) { PX4::PX4AnalogSource *c = _channels[j]; if (c == NULL) continue; if (c->_pin == PX4_ANALOG_ORB_BATTERY_VOLTAGE_PIN) { c->_add_value(battery.voltage_v / PX4_VOLTAGE_SCALING, 0); } if (c->_pin == PX4_ANALOG_ORB_BATTERY_CURRENT_PIN) { // scale it back to voltage, knowing that the // px4io code scales by 90.0/5.0 c->_add_value(battery.current_a * (5.0f/90.0f) / PX4_VOLTAGE_SCALING, 0); } } } } } #endif #ifdef CONFIG_ARCH_BOARD_PX4FMU_V2 // check for new servorail data on FMUv2 if (_servorail_handle != -1) { struct servorail_status_s servorail; bool updated = false; if (orb_check(_servorail_handle, &updated) == 0 && updated) { orb_copy(ORB_ID(servorail_status), _servorail_handle, &servorail); if (servorail.timestamp != _servorail_timestamp) { _servorail_timestamp = servorail.timestamp; _servorail_voltage = servorail.voltage_v; for (uint8_t j=0; j<PX4_ANALOG_MAX_CHANNELS; j++) { PX4::PX4AnalogSource *c = _channels[j]; if (c == NULL) continue; if (c->_pin == PX4_ANALOG_ORB_SERVO_VOLTAGE_PIN) { c->_add_value(servorail.voltage_v / PX4_VOLTAGE_SCALING, 0); } if (c->_pin == PX4_ANALOG_ORB_SERVO_VRSSI_PIN) { c->_add_value(servorail.rssi_v / PX4_VOLTAGE_SCALING, 0); } } } } } if (_system_power_handle != -1) { struct system_power_s system_power; bool updated = false; if (orb_check(_system_power_handle, &updated) == 0 && updated) { orb_copy(ORB_ID(system_power), _system_power_handle, &system_power); uint16_t flags = 0; if (system_power.usb_connected) flags |= MAV_POWER_STATUS_USB_CONNECTED; if (system_power.brick_valid) flags |= MAV_POWER_STATUS_BRICK_VALID; if (system_power.servo_valid) flags |= MAV_POWER_STATUS_SERVO_VALID; if (system_power.periph_5V_OC) flags |= MAV_POWER_STATUS_PERIPH_OVERCURRENT; if (system_power.hipower_5V_OC) flags |= MAV_POWER_STATUS_PERIPH_HIPOWER_OVERCURRENT; if (_power_flags != 0 && _power_flags != flags && hal.util->get_soft_armed()) { // the power status has changed while armed flags |= MAV_POWER_STATUS_CHANGED; } _power_flags = flags; } } #endif } AP_HAL::AnalogSource* PX4AnalogIn::channel(int16_t pin) { for (uint8_t j=0; j<PX4_ANALOG_MAX_CHANNELS; j++) { if (_channels[j] == NULL) { _channels[j] = new PX4AnalogSource(pin, 0.0f); return _channels[j]; } } hal.console->println("Out of analog channels"); return NULL; } #endif // CONFIG_HAL_BOARD
#include <value.h> #include <vm.h> #include <gc.h> #include "lib.h" static VAL BinaryUtils; static VAL BinaryUtils_readU64(js_vm_t* vm, void* state, VAL this, uint32_t argc, VAL* argv) { VAL buff; uint32_t offset; js_scan_args(vm, argc, argv, "SI", &buff, &offset); js_string_t* str = &js_value_get_pointer(buff)->string; if(offset + 8 > str->length) { js_throw_error(vm->lib.RangeError, "tried to read past end of buffer of length %d (offset was %d)", str->length, offset); } uint64_t u32 = *(uint64_t*)&str->buff[offset]; return js_value_make_double(u32); } static VAL BinaryUtils_readS64(js_vm_t* vm, void* state, VAL this, uint32_t argc, VAL* argv) { VAL buff; uint32_t offset; js_scan_args(vm, argc, argv, "SI", &buff, &offset); js_string_t* str = &js_value_get_pointer(buff)->string; if(offset + 8 > str->length) { js_throw_error(vm->lib.RangeError, "tried to read past end of buffer of length %d (offset was %d)", str->length, offset); } int64_t s32 = *(int64_t*)&str->buff[offset]; return js_value_make_double(s32); } static VAL BinaryUtils_readU32(js_vm_t* vm, void* state, VAL this, uint32_t argc, VAL* argv) { VAL buff; uint32_t offset; js_scan_args(vm, argc, argv, "SI", &buff, &offset); js_string_t* str = &js_value_get_pointer(buff)->string; if(offset + 4 > str->length) { js_throw_error(vm->lib.RangeError, "tried to read past end of buffer of length %d (offset was %d)", str->length, offset); } uint32_t u32 = *(uint32_t*)&str->buff[offset]; return js_value_make_double(u32); } static VAL BinaryUtils_readS32(js_vm_t* vm, void* state, VAL this, uint32_t argc, VAL* argv) { VAL buff; uint32_t offset; js_scan_args(vm, argc, argv, "SI", &buff, &offset); js_string_t* str = &js_value_get_pointer(buff)->string; if(offset + 4 > str->length) { js_throw_error(vm->lib.RangeError, "tried to read past end of buffer of length %d (offset was %d)", str->length, offset); } int32_t s32 = *(int32_t*)&str->buff[offset]; return js_value_make_double(s32); } static VAL BinaryUtils_readU16(js_vm_t* vm, void* state, VAL this, uint32_t argc, VAL* argv) { VAL buff; uint32_t offset; js_scan_args(vm, argc, argv, "SI", &buff, &offset); js_string_t* str = &js_value_get_pointer(buff)->string; if(offset + 2 > str->length) { js_throw_error(vm->lib.RangeError, "tried to read past end of buffer of length %d (offset was %d)", str->length, offset); } uint16_t u16 = *(uint16_t*)&str->buff[offset]; return js_value_make_double(u16); } static VAL BinaryUtils_readS16(js_vm_t* vm, void* state, VAL this, uint32_t argc, VAL* argv) { VAL buff; uint32_t offset; js_scan_args(vm, argc, argv, "SI", &buff, &offset); js_string_t* str = &js_value_get_pointer(buff)->string; if(offset + 2 > str->length) { js_throw_error(vm->lib.RangeError, "tried to read past end of buffer of length %d (offset was %d)", str->length, offset); } int16_t s16 = *(int16_t*)&str->buff[offset]; return js_value_make_double(s16); } static VAL BinaryUtils_readU8(js_vm_t* vm, void* state, VAL this, uint32_t argc, VAL* argv) { VAL buff; uint32_t offset; js_scan_args(vm, argc, argv, "SI", &buff, &offset); js_string_t* str = &js_value_get_pointer(buff)->string; if(offset + 1 > str->length) { js_throw_error(vm->lib.RangeError, "tried to read past end of buffer of length %d (offset was %d)", str->length, offset); } uint8_t u8 = *(uint8_t*)&str->buff[offset]; return js_value_make_double(u8); } static VAL BinaryUtils_readS8(js_vm_t* vm, void* state, VAL this, uint32_t argc, VAL* argv) { VAL buff; uint32_t offset; js_scan_args(vm, argc, argv, "SI", &buff, &offset); js_string_t* str = &js_value_get_pointer(buff)->string; if(offset + 1 > str->length) { js_throw_error(vm->lib.RangeError, "tried to read past end of buffer of length %d (offset was %d)", str->length, offset); } int8_t s8 = *(int8_t*)&str->buff[offset]; return js_value_make_double(s8); } void lib_binary_utils_init(js_vm_t* vm) { BinaryUtils = js_make_object(vm); js_gc_register_global(&BinaryUtils, sizeof(BinaryUtils)); js_object_put(vm->global_scope->global_object, js_cstring("BinaryUtils"), BinaryUtils); js_object_put(BinaryUtils, js_cstring("readU64"), js_value_make_native_function(vm, NULL, js_cstring("readU64"), BinaryUtils_readU64, NULL)); js_object_put(BinaryUtils, js_cstring("readS64"), js_value_make_native_function(vm, NULL, js_cstring("readS64"), BinaryUtils_readS64, NULL)); js_object_put(BinaryUtils, js_cstring("readU32"), js_value_make_native_function(vm, NULL, js_cstring("readU32"), BinaryUtils_readU32, NULL)); js_object_put(BinaryUtils, js_cstring("readS32"), js_value_make_native_function(vm, NULL, js_cstring("readS32"), BinaryUtils_readS32, NULL)); js_object_put(BinaryUtils, js_cstring("readU16"), js_value_make_native_function(vm, NULL, js_cstring("readU16"), BinaryUtils_readU16, NULL)); js_object_put(BinaryUtils, js_cstring("readS16"), js_value_make_native_function(vm, NULL, js_cstring("readS16"), BinaryUtils_readS16, NULL)); js_object_put(BinaryUtils, js_cstring("readU8"), js_value_make_native_function(vm, NULL, js_cstring("readU8"), BinaryUtils_readU8, NULL)); js_object_put(BinaryUtils, js_cstring("readS8"), js_value_make_native_function(vm, NULL, js_cstring("readS8"), BinaryUtils_readS8, NULL)); }
export default { statics: { background: require("./assets/elements.png"), logo: require("./assets/logo.png"), buttonPlay: require("./assets/play.png"), buttonHighscore: require("./assets/highscore.png"), buttonInstructions: require("./assets/intructions.png"), backgroundBlimp: require("./assets/bg_blimp.png"), logoGameMode: require("./assets/logo_3.png"), buttonBack: require('./assets/back.png') }, gamemodes: { easy: require('./assets/easy.png'), medium: require('./assets/medium.png'), hard: require('./assets/hard.png'), verbs: require('./assets/verbs.png'), animals: require('./assets/animals.png'), timed: require('./assets/times.png') }, game: { cloud: require('./assets/clouds.png'), topBar: require('./assets/bar_top.png'), blimp: require('./assets/blimp.png'), blimpButton: require('./assets/blimp_button.png'), forfeit: require('./assets/forfeit.png'), guessHere: require('./assets/guess_button.png'), blurredBackground: require('./assets/blured_bg.png') }, fonts: { main: require('./assets/fonts/Subscribe.ttf'), italic: require('./assets/fonts/Subscribe-Italic.ttf') }, instruction: { } }
package org.ml4j.gpt3.prompt.processors; import java.io.File; import java.io.IOException; import java.math.BigDecimal; import java.nio.file.Files; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.ml4j.gpt3.GPT3Request; public class DefaultPromptFileProcessor implements FileProcessor { private int maxTokens; private Integer topP; private Integer n; private Boolean stream; private String stop; public DefaultPromptFileProcessor(int maxTokens, Integer topP, Integer n, Boolean stream, String stop) { this.maxTokens = maxTokens; this.topP = topP; this.n = n; this.stream = stream; this.stop = stop; } @Override public boolean isSupported(File file) { return file.isFile(); } private BigDecimal getTemperature(File file) { throw new UnsupportedOperationException("Not yet implemented"); } private int getSplitIndex(File file) { throw new UnsupportedOperationException("Not yet implemented"); } @Override public Map<GPT3Request, List<String>> processExample(File example) throws IOException { Map<GPT3Request, List<String>> processed = new HashMap<>(); if (example.isFile()) { String text = new String(Files.readAllBytes(example.toPath())); int splitIndex = getSplitIndex(example); String prompt = text.substring(0, splitIndex); String output = text.substring(splitIndex); GPT3Request request = new GPT3Request(); request.setPrompt(prompt); request.setTemperature(getTemperature(example)); request.setMaxTokens(maxTokens); request.setN(n); request.setTopP(topP); request.setStop(stop); request.setStream(stream); processed.put(request, Arrays.asList(output)); } return processed; } }
package com.wsinz.network.items import com.wsinz.network.base.BaseResponse import com.wsinz.network.items.modelresponse.UserItemsResponse import io.reactivex.Single interface ItemsListFeedApi { fun getUserItems(authToken: String): Single<UserItemsResponse> fun deleteItem(authToken: String, itemToken: String): Single<BaseResponse> }
## 操作系统OS Windows or Linux or Mac ## 应用版本 在设置里有显示当前版本号,如果没有版本号,说明该版本<=1.1.4,请先下载最新版本尝试后再发起issue,并填下版本号 ## 问题描述 问题复现流程,操作截图等,建议把刚点开连接时展示的Redis Info信息页截图,方便查看Redis概况
using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Shashlik.Kernel; using Shashlik.Kernel.Attributes; // ReSharper disable CheckNamespace namespace Sbt.Invoice.Service { [ConditionOnProperty(typeof(bool), "Jinkong.Timer.Enable", true, DefaultValue = true)] [Order(120)] public class TimerAutowire : IApplicationStartAutowire { public TimerAutowire(ScheduledService scheduledService, IEnumerable<ITimer> timers) { ScheduledService = scheduledService; Timers = timers; } private ScheduledService ScheduledService { get; } private IEnumerable<ITimer> Timers { get; } public async Task OnStart(CancellationToken cancellationToken) { foreach (var item in Timers) ScheduledService.AddTimer(item); ScheduledService.Start(); await Task.CompletedTask; } } }
#!/bin/bash shopt -s expand_aliases export WEBDIR=/var/www/html export MYSQLDIR=/var/www/mysql alias msql="mysql -uroot -p\"$MYSQL_ROOT_PASSWORD\" -e" alias wpc="su www-data -s /bin/bash -c" SQLHEADER=$(cat <<EOF -- MySQL dump 10.13 Distrib 5.5.52, for debian-linux-gnu (i686) -- -- Host: localhost Database: xc218_db1 -- ------------------------------------------------------ -- Server version 5.5.52-0+deb7u1 /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8 */; /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; /*!40103 SET TIME_ZONE='+00:00' */; /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; EOF ) function exit_clean { exit 1 } function start_mysql { if [ -z "$1" ]; then PASSWORD="$MYSQL_ROOT_PASSWORD" else PASSWORD="$1" fi /entrypoint.sh mysqld & until mysql -uroot -p"$PASSWORD" -e ";" do echo "Can't connect to mysql, retrying in 5 seconds." sleep 5 done } function stop_mysql { mysqladmin -uroot -p"$MYSQL_ROOT_PASSWORD" shutdown while ! [ -z "$(ps aux | grep mysqld | grep -v grep)" ] do sleep 1 done } function search-replace { if ! [ -z "$WEB_DOMAIN" ] && ! [ -z "$WEB_TEST_DOMAIN" ]; then echo "Search replacing: $WEB_DOMAIN with $WEB_TEST_DOMAIN" wpc "wp --path=$WEBDIR search-replace --all-tables $WEB_DOMAIN $WEB_TEST_DOMAIN" if ! [ "$?" -eq 0 ]; then return 1 fi echo "Search replacing: www.$WEB_TEST_DOMAIN $WEB_TEST_DOMAIN" wpc "wp --path=$WEBDIR search-replace --all-tables www.$WEB_TEST_DOMAIN $WEB_TEST_DOMAIN" if ! [ "$?" -eq 0 ]; then return 1 fi if [ "$SSL_ENABLED" == "true" ]; then echo "Search replacing: http://$WEB_TEST_DOMAIN with https://$WEB_TEST_DOMAIN" wpc "wp --path=$WEBDIR search-replace --all-tables http://$WEB_TEST_DOMAIN https://$WEB_TEST_DOMAIN" if ! [ "$?" -eq 0 ]; then return 1 fi else echo "Search replacing: https://$WEB_TEST_DOMAIN with http://$WEB_TEST_DOMAIN" wpc "wp --path=$WEBDIR search-replace --all-tables https://$WEB_TEST_DOMAIN http://$WEB_TEST_DOMAIN" if ! [ "$?" -eq 0 ]; then return 1 fi fi echo "Search replacing: @$WEB_TEST_DOMAIN with @$WEB_DOMAIN" wpc "wp --path=$WEBDIR search-replace --all-tables @$WEB_TEST_DOMAIN @$WEB_DOMAIN" if ! [ "$?" -eq 0 ]; then return 1 fi fi return 0 } function init_from_sql { name="$(grep DB_ $WEBDIR/wp-config.php)" re="define ?\( ?'DB_NAME' ?, ?'([^']+)' ?\);" if [[ $name =~ $re ]]; then export MYSQL_DATABASE=${BASH_REMATCH[1]}; else echo "Could not get DB Name from wp-config.php. Aborting." exit_clean fi re="define ?\( ?'DB_USER' ?, ?'([^']+)' ?\);" if [[ $name =~ $re ]]; then export MYSQL_USER=${BASH_REMATCH[1]}; else echo "Could not get DB User from wp-config.php. Aborting." exit_clean fi re="define ?\( ?'DB_PASSWORD' ?, ?'([^']+)' ?\);" if [[ $name =~ $re ]]; then export MYSQL_PASSWORD=${BASH_REMATCH[1]}; else echo "Could not get DB Password from wp-config.php. Aborting." exit_clean fi start_mysql LATEST_SQL="$(ls -t $MYSQLDIR/*.sql | head -n1)" grep "MySQL dump" "$LATEST_SQL" 1>/dev/null if [ "$?" -eq 1 ]; then echo "MySQL header missing, adding to sql file..." (echo "$SQLHEADER" && cat "$LATEST_SQL") > "$LATEST_SQL.tmp" mv "$LATEST_SQL.tmp" "$LATEST_SQL" fi echo "Importing SQL file..." mysql -u"$MYSQL_USER" -p"$MYSQL_PASSWORD" "$MYSQL_DATABASE" < "$LATEST_SQL" if ! [ "$?" -eq 0 ]; then echo "Could not import DB, exiting..." exit_clean fi if ! [ -z "$WEB_TEST_DOMAIN" ]; then until search-replace do echo "Search Replace failed, retrying in 30 seconds" sleep 30 done fi stop_mysql echo "Successfully imported and prepared backup sql image." } function init_from_backup { start_mysql OLD_DOMAIN=$(wpc "wp --path=$WEBDIR option get siteurl" | sed "s/.*\/\/\(.*\)/\1/g") echo "Search replacing: $OLD_DOMAIN with $WEB_TEST_DOMAIN" wpc "wp --path=$WEBDIR search-replace --all-tables $OLD_DOMAIN $WEB_TEST_DOMAIN" if ! [ "$?" -eq 0 ]; then return 1 fi stop_mysql echo "Successfully prepared backup files." } if [ -e "$WEBDIR/db_initialized" ]; then echo "DB allready initialized, continuing..." else if [ -z "$CLONE_INIT" ]; then init_from_sql elif ! [ -z "$WEB_TEST_DOMAIN" ]; then init_from_backup fi touch "$WEBDIR/db_initialized" fi
<?php namespace Stevenmaguire\Services\Trello\Exceptions; use Exception as BaseException; class Exception extends BaseException { /** * Response body * * @var object */ protected $responseBody; /** * Retrieves the response body property of exception. * * @return object */ public function getResponseBody() { return $this->responseBody; } /** * Updates the response body property of exception. * * @param object * * @return Exception */ public function setResponseBody($responseBody) { $this->responseBody = $responseBody; return $this; } }
#!/bin/sh export GVAR="Global Var" LVAR="Local Var" echo $GVAR echo $LVAR echo $VAR1 echo $VAR2 echo "done."
const assert = require('assert') const { Node, LinkedList} = require('../merge-linked/linked-list') const { removenth } = require('./remove') if (require.main === module) { let n1 = new Node(1) let n2 = new Node(2) let n3 = new Node(3) let n4 = new Node(4) let n5 = new Node(5) n1.next = n2 n2.next = n3 n3.next = n4 n4.next = n5 let newhead = removenth(n1, 2) let last = newhead let arr = [] while (last !== null) { arr.push(last.data) last = last.next } assert.deepStrictEqual(arr, [1, 2, 3, 5]) n1 = new Node(1) n2 = new Node(2) n1.next = n2 newhead = removenth(n1, 1) last = newhead arr = [] while (last !== null) { arr.push(last.data) last = last.next } assert.deepStrictEqual(arr, [1]) }
module GA ( initialPopulation, ga, nextGen ) where import GABase import Random import Selection import Cross import Mutation import Replace type NextGenerationFunctionGenerator = SelectionMethod -> Int -> CrossMethod -> Double -> MutateMethod -> Double -> ReplaceMethod -> SelectionMethod -> FitnessFunction -> NextGenerationFunction type NextGenerationFunction = SeededPopulation -> SeededPopulation ga :: Int -> EndCheckFunction -> NextGenerationFunction -> SeededPopulation -> Population ga iterations endCheck nextGenF (pop,seed) | iterations==0 = pop | endCheck pop = pop | otherwise = ga (iterations - 1) endCheck nextGenF (nextGenF (pop,seed)) initialPopulation :: ChromosomeGenerator -> Seed -> Int -> SeededPopulation initialPopulation chromosomeGenerator seed popSize = (map chromosomeGenerator (tail seeds), head seeds) where seeds = randSeeds seed (popSize + 1) nextGen :: NextGenerationFunctionGenerator nextGen selectMethod k crossMethod pCross mutateMethod pMutation replaceMethod selectMethod4replace fitness (population, seed) = (replaceMethod population mutated s4 selectMethod4replace fitness, sEnd) where mutated = mutate mutateMethod pMutation s3 crossed crossed = cross crossMethod s2 pCross selected selected = selectMethod population k fitness s1 (s1:s2:s3:s4:sEnd:_) = randSeeds seed 5
package org.nms.anxova.string.process; import java.util.List; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.nms.anxova.process.beans.BaseElement; import org.nms.anxova.process.beans.IElement; import org.nms.anxova.string.process.impl.StringExtractorProcessor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import com.sun.istack.internal.NotNull; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = "classpath:*testContext-stringExtractorProcessorComplexTest.xml") public class StringExtractorProcessComplexTest { /** * Logger. */ private final static Logger logger = LoggerFactory.getLogger(StringExtractorProcessComplexTest.class); @Autowired @NotNull private StringExtractorProcessor processor; @Autowired @NotNull private BaseElement<String> element; @Autowired @NotNull private String resultValue; @Before public void before(){ } @Test public void processTest(){ List<IElement> result = processor.process(element); Assert.assertNotNull("Null result",result); Assert.assertFalse("Empty result",result.isEmpty()); Assert.assertEquals(resultValue, result.get(0).getElement().toString()); // [DBG] for(Object r:result){ System.out.println("Obtained " + r.toString()); } //[ENDDBG] } }
--- layout: post title: "AI Inside" posturl: http://www.commitstrip.com/en/2017/06/07/ai-inside/ tags: - Comics - Fun --- {% include post_info_header.md %} "I knew it. It's just 'IFs'" <!--more--> {% include post_info_footer.md %}
 using System; using System.Runtime.InteropServices; namespace AtenSharp.Raw { // High-performance linear algebra operations. internal static class Lapack { // Solve AX=B // // Corresponds to the following TH definition: // // TH_API void THLapack_(gesv)( // int n, int nrhs, scalar_t *a, int lda, int *ipiv, // scalar_t *b, int ldb, int* info); [DllImport ("caffe2")] internal extern static void THByteLapack_gesv( int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Solve a triangular system of the form A * X = B or A^T * X = B // // Corresponds to the following TH definition: // // TH_API void THLapack_(trtrs)( // char uplo, char trans, char diag, int n, int nrhs, // scalar_t *a, int lda, scalar_t *b, int ldb, int* info); [DllImport ("caffe2")] internal extern static void THByteLapack_trtrs( byte uplo, byte trans, byte diag, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Evaluate ||AX-B|| // // Corresponds to the following TH definition: // // TH_API void THLapack_(gels)( // char trans, int m, int n, int nrhs, scalar_t *a, int lda, // scalar_t *b, int ldb, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THByteLapack_gels( byte trans, int m, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Eigenvals // // Corresponds to the following TH definition: // // TH_API void THLapack_(syev)( // char jobz, char uplo, int n, scalar_t *a, int lda, // scalar_t *w, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THByteLapack_syev( byte jobz, byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ w, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Non-sym eigenvals // // Corresponds to the following TH definition: // // TH_API void THLapack_(geev)( // char jobvl, char jobvr, int n, scalar_t *a, int lda, // scalar_t *wr, scalar_t *wi, scalar_t* vl, int ldvl, // scalar_t *vr, int ldvr, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THByteLapack_geev( byte jobvl, byte jobvr, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ wr, IntPtr /* scalar_t* */ wi, IntPtr /* scalar_t* */ vl, int ldvl, IntPtr /* scalar_t* */ vr, int ldvr, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // SVD // // Corresponds to the following TH definition: // // TH_API void THLapack_(gesdd)( // char jobz, int m, int n, scalar_t *a, int lda, scalar_t *s, // scalar_t *u, int ldu, scalar_t *vt, int ldvt, // scalar_t *work, int lwork, int *iwork, int *info); [DllImport ("caffe2")] internal extern static void THByteLapack_gesdd( byte jobz, int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ s, IntPtr /* scalar_t* */ u, int ldu, IntPtr /* scalar_t* */ vt, int ldvt, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ iwork, IntPtr /* int* */ info); // LU decomposition // // Corresponds to the following TH definition: // // TH_API void THLapack_(getrf)( // int m, int n, scalar_t *a, int lda, int *ipiv, int *info); [DllImport ("caffe2")] internal extern static void THByteLapack_getrf( int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* int* */ info); // LU decomposition // // Corresponds to the following TH definition: // // TH_API void THLapack_(getrs)( // char trans, int n, int nrhs, scalar_t *a, int lda, // int *ipiv, scalar_t *b, int ldb, int *info); [DllImport ("caffe2")] internal extern static void THByteLapack_getrs( byte trans, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Matrix Inverse // // Corresponds to the following TH definition: // // TH_API void THLapack_(getri)( // int n, scalar_t *a, int lda, int *ipiv, // scalar_t *work, int lwork, int* info); [DllImport ("caffe2")] internal extern static void THByteLapack_getri( int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // *** Positive Definite matrices // Cholesky factorization // // Corresponds to the following TH definition: // // TH_API void THLapack_(potrf)( // char uplo, int n, scalar_t *a, int lda, int *info); [DllImport ("caffe2")] internal extern static void THByteLapack_potrf( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ info); // Matrix inverse based on Cholesky factorization // // Corresponds to the following TH definition: // // TH_API void THLapack_(potri)( // char uplo, int n, scalar_t *a, int lda, int *info); [DllImport ("caffe2")] internal extern static void THByteLapack_potri( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ info); // Solve A*X = B with a symmetric positive definite matrix A // using the Cholesky factorization. // // Corresponds to the following TH definition: // // TH_API void THLapack_(potrs)( // char uplo, int n, int nrhs, scalar_t *a, int lda, // scalar_t *b, int ldb, int *info); [DllImport ("caffe2")] internal extern static void THByteLapack_potrs( char uplo, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Cholesky factorization with complete pivoting. // // Corresponds to the following TH definition: // // TH_API void THLapack_(pstrf)( // char uplo, int n, scalar_t *a, int lda, int *piv, // int *rank, scalar_t tol, scalar_t *work, int *info); [DllImport ("caffe2")] internal extern static void THByteLapack_pstrf( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ piv, IntPtr /* int* */ rank, byte tol, IntPtr /* scalar_t* */ work, IntPtr /* int* */ info); // QR decomposition. // // Corresponds to the following TH definition: // // TH_API void THLapack_(geqrf)( // int m, int n, scalar_t *a, int lda, // scalar_t *tau, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THByteLapack_geqrf( int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Build Q from output of geqrf // // Corresponds to the following TH definition: // // TH_API void THLapack_(orgqr)( // int m, int n, int k, scalar_t *a, int lda, // scalar_t *tau, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THByteLapack_orgqr( int m, int n, int k, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Multiply Q with a matrix from output of geqrf // // Corresponds to the following TH definition: // // TH_API void THLapack_(ormqr)( // char side, char trans, int m, int n, int k, // scalar_t *a, int lda, scalar_t *tau, scalar_t *c, int ldc, // scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THByteLapack_ormqr( byte side, byte trans, int m, int n, int k, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ c, int ldc, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Solve AX=B // // Corresponds to the following TH definition: // // TH_API void THLapack_(gesv)( // int n, int nrhs, scalar_t *a, int lda, int *ipiv, // scalar_t *b, int ldb, int* info); [DllImport ("caffe2")] internal extern static void THShortLapack_gesv( int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Solve a triangular system of the form A * X = B or A^T * X = B // // Corresponds to the following TH definition: // // TH_API void THLapack_(trtrs)( // char uplo, char trans, char diag, int n, int nrhs, // scalar_t *a, int lda, scalar_t *b, int ldb, int* info); [DllImport ("caffe2")] internal extern static void THShortLapack_trtrs( byte uplo, byte trans, byte diag, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Evaluate ||AX-B|| // // Corresponds to the following TH definition: // // TH_API void THLapack_(gels)( // char trans, int m, int n, int nrhs, scalar_t *a, int lda, // scalar_t *b, int ldb, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THShortLapack_gels( byte trans, int m, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Eigenvals // // Corresponds to the following TH definition: // // TH_API void THLapack_(syev)( // char jobz, char uplo, int n, scalar_t *a, int lda, // scalar_t *w, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THShortLapack_syev( byte jobz, byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ w, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Non-sym eigenvals // // Corresponds to the following TH definition: // // TH_API void THLapack_(geev)( // char jobvl, char jobvr, int n, scalar_t *a, int lda, // scalar_t *wr, scalar_t *wi, scalar_t* vl, int ldvl, // scalar_t *vr, int ldvr, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THShortLapack_geev( byte jobvl, byte jobvr, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ wr, IntPtr /* scalar_t* */ wi, IntPtr /* scalar_t* */ vl, int ldvl, IntPtr /* scalar_t* */ vr, int ldvr, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // SVD // // Corresponds to the following TH definition: // // TH_API void THLapack_(gesdd)( // char jobz, int m, int n, scalar_t *a, int lda, scalar_t *s, // scalar_t *u, int ldu, scalar_t *vt, int ldvt, // scalar_t *work, int lwork, int *iwork, int *info); [DllImport ("caffe2")] internal extern static void THShortLapack_gesdd( byte jobz, int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ s, IntPtr /* scalar_t* */ u, int ldu, IntPtr /* scalar_t* */ vt, int ldvt, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ iwork, IntPtr /* int* */ info); // LU decomposition // // Corresponds to the following TH definition: // // TH_API void THLapack_(getrf)( // int m, int n, scalar_t *a, int lda, int *ipiv, int *info); [DllImport ("caffe2")] internal extern static void THShortLapack_getrf( int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* int* */ info); // LU decomposition // // Corresponds to the following TH definition: // // TH_API void THLapack_(getrs)( // char trans, int n, int nrhs, scalar_t *a, int lda, // int *ipiv, scalar_t *b, int ldb, int *info); [DllImport ("caffe2")] internal extern static void THShortLapack_getrs( byte trans, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Matrix Inverse // // Corresponds to the following TH definition: // // TH_API void THLapack_(getri)( // int n, scalar_t *a, int lda, int *ipiv, // scalar_t *work, int lwork, int* info); [DllImport ("caffe2")] internal extern static void THShortLapack_getri( int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // *** Positive Definite matrices // Cholesky factorization // // Corresponds to the following TH definition: // // TH_API void THLapack_(potrf)( // char uplo, int n, scalar_t *a, int lda, int *info); [DllImport ("caffe2")] internal extern static void THShortLapack_potrf( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ info); // Matrix inverse based on Cholesky factorization // // Corresponds to the following TH definition: // // TH_API void THLapack_(potri)( // char uplo, int n, scalar_t *a, int lda, int *info); [DllImport ("caffe2")] internal extern static void THShortLapack_potri( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ info); // Solve A*X = B with a symmetric positive definite matrix A // using the Cholesky factorization. // // Corresponds to the following TH definition: // // TH_API void THLapack_(potrs)( // char uplo, int n, int nrhs, scalar_t *a, int lda, // scalar_t *b, int ldb, int *info); [DllImport ("caffe2")] internal extern static void THShortLapack_potrs( char uplo, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Cholesky factorization with complete pivoting. // // Corresponds to the following TH definition: // // TH_API void THLapack_(pstrf)( // char uplo, int n, scalar_t *a, int lda, int *piv, // int *rank, scalar_t tol, scalar_t *work, int *info); [DllImport ("caffe2")] internal extern static void THShortLapack_pstrf( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ piv, IntPtr /* int* */ rank, short tol, IntPtr /* scalar_t* */ work, IntPtr /* int* */ info); // QR decomposition. // // Corresponds to the following TH definition: // // TH_API void THLapack_(geqrf)( // int m, int n, scalar_t *a, int lda, // scalar_t *tau, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THShortLapack_geqrf( int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Build Q from output of geqrf // // Corresponds to the following TH definition: // // TH_API void THLapack_(orgqr)( // int m, int n, int k, scalar_t *a, int lda, // scalar_t *tau, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THShortLapack_orgqr( int m, int n, int k, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Multiply Q with a matrix from output of geqrf // // Corresponds to the following TH definition: // // TH_API void THLapack_(ormqr)( // char side, char trans, int m, int n, int k, // scalar_t *a, int lda, scalar_t *tau, scalar_t *c, int ldc, // scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THShortLapack_ormqr( byte side, byte trans, int m, int n, int k, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ c, int ldc, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Solve AX=B // // Corresponds to the following TH definition: // // TH_API void THLapack_(gesv)( // int n, int nrhs, scalar_t *a, int lda, int *ipiv, // scalar_t *b, int ldb, int* info); [DllImport ("caffe2")] internal extern static void THIntLapack_gesv( int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Solve a triangular system of the form A * X = B or A^T * X = B // // Corresponds to the following TH definition: // // TH_API void THLapack_(trtrs)( // char uplo, char trans, char diag, int n, int nrhs, // scalar_t *a, int lda, scalar_t *b, int ldb, int* info); [DllImport ("caffe2")] internal extern static void THIntLapack_trtrs( byte uplo, byte trans, byte diag, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Evaluate ||AX-B|| // // Corresponds to the following TH definition: // // TH_API void THLapack_(gels)( // char trans, int m, int n, int nrhs, scalar_t *a, int lda, // scalar_t *b, int ldb, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THIntLapack_gels( byte trans, int m, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Eigenvals // // Corresponds to the following TH definition: // // TH_API void THLapack_(syev)( // char jobz, char uplo, int n, scalar_t *a, int lda, // scalar_t *w, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THIntLapack_syev( byte jobz, byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ w, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Non-sym eigenvals // // Corresponds to the following TH definition: // // TH_API void THLapack_(geev)( // char jobvl, char jobvr, int n, scalar_t *a, int lda, // scalar_t *wr, scalar_t *wi, scalar_t* vl, int ldvl, // scalar_t *vr, int ldvr, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THIntLapack_geev( byte jobvl, byte jobvr, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ wr, IntPtr /* scalar_t* */ wi, IntPtr /* scalar_t* */ vl, int ldvl, IntPtr /* scalar_t* */ vr, int ldvr, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // SVD // // Corresponds to the following TH definition: // // TH_API void THLapack_(gesdd)( // char jobz, int m, int n, scalar_t *a, int lda, scalar_t *s, // scalar_t *u, int ldu, scalar_t *vt, int ldvt, // scalar_t *work, int lwork, int *iwork, int *info); [DllImport ("caffe2")] internal extern static void THIntLapack_gesdd( byte jobz, int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ s, IntPtr /* scalar_t* */ u, int ldu, IntPtr /* scalar_t* */ vt, int ldvt, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ iwork, IntPtr /* int* */ info); // LU decomposition // // Corresponds to the following TH definition: // // TH_API void THLapack_(getrf)( // int m, int n, scalar_t *a, int lda, int *ipiv, int *info); [DllImport ("caffe2")] internal extern static void THIntLapack_getrf( int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* int* */ info); // LU decomposition // // Corresponds to the following TH definition: // // TH_API void THLapack_(getrs)( // char trans, int n, int nrhs, scalar_t *a, int lda, // int *ipiv, scalar_t *b, int ldb, int *info); [DllImport ("caffe2")] internal extern static void THIntLapack_getrs( byte trans, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Matrix Inverse // // Corresponds to the following TH definition: // // TH_API void THLapack_(getri)( // int n, scalar_t *a, int lda, int *ipiv, // scalar_t *work, int lwork, int* info); [DllImport ("caffe2")] internal extern static void THIntLapack_getri( int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // *** Positive Definite matrices // Cholesky factorization // // Corresponds to the following TH definition: // // TH_API void THLapack_(potrf)( // char uplo, int n, scalar_t *a, int lda, int *info); [DllImport ("caffe2")] internal extern static void THIntLapack_potrf( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ info); // Matrix inverse based on Cholesky factorization // // Corresponds to the following TH definition: // // TH_API void THLapack_(potri)( // char uplo, int n, scalar_t *a, int lda, int *info); [DllImport ("caffe2")] internal extern static void THIntLapack_potri( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ info); // Solve A*X = B with a symmetric positive definite matrix A // using the Cholesky factorization. // // Corresponds to the following TH definition: // // TH_API void THLapack_(potrs)( // char uplo, int n, int nrhs, scalar_t *a, int lda, // scalar_t *b, int ldb, int *info); [DllImport ("caffe2")] internal extern static void THIntLapack_potrs( char uplo, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Cholesky factorization with complete pivoting. // // Corresponds to the following TH definition: // // TH_API void THLapack_(pstrf)( // char uplo, int n, scalar_t *a, int lda, int *piv, // int *rank, scalar_t tol, scalar_t *work, int *info); [DllImport ("caffe2")] internal extern static void THIntLapack_pstrf( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ piv, IntPtr /* int* */ rank, int tol, IntPtr /* scalar_t* */ work, IntPtr /* int* */ info); // QR decomposition. // // Corresponds to the following TH definition: // // TH_API void THLapack_(geqrf)( // int m, int n, scalar_t *a, int lda, // scalar_t *tau, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THIntLapack_geqrf( int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Build Q from output of geqrf // // Corresponds to the following TH definition: // // TH_API void THLapack_(orgqr)( // int m, int n, int k, scalar_t *a, int lda, // scalar_t *tau, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THIntLapack_orgqr( int m, int n, int k, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Multiply Q with a matrix from output of geqrf // // Corresponds to the following TH definition: // // TH_API void THLapack_(ormqr)( // char side, char trans, int m, int n, int k, // scalar_t *a, int lda, scalar_t *tau, scalar_t *c, int ldc, // scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THIntLapack_ormqr( byte side, byte trans, int m, int n, int k, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ c, int ldc, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Solve AX=B // // Corresponds to the following TH definition: // // TH_API void THLapack_(gesv)( // int n, int nrhs, scalar_t *a, int lda, int *ipiv, // scalar_t *b, int ldb, int* info); [DllImport ("caffe2")] internal extern static void THLongLapack_gesv( int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Solve a triangular system of the form A * X = B or A^T * X = B // // Corresponds to the following TH definition: // // TH_API void THLapack_(trtrs)( // char uplo, char trans, char diag, int n, int nrhs, // scalar_t *a, int lda, scalar_t *b, int ldb, int* info); [DllImport ("caffe2")] internal extern static void THLongLapack_trtrs( byte uplo, byte trans, byte diag, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Evaluate ||AX-B|| // // Corresponds to the following TH definition: // // TH_API void THLapack_(gels)( // char trans, int m, int n, int nrhs, scalar_t *a, int lda, // scalar_t *b, int ldb, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THLongLapack_gels( byte trans, int m, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Eigenvals // // Corresponds to the following TH definition: // // TH_API void THLapack_(syev)( // char jobz, char uplo, int n, scalar_t *a, int lda, // scalar_t *w, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THLongLapack_syev( byte jobz, byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ w, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Non-sym eigenvals // // Corresponds to the following TH definition: // // TH_API void THLapack_(geev)( // char jobvl, char jobvr, int n, scalar_t *a, int lda, // scalar_t *wr, scalar_t *wi, scalar_t* vl, int ldvl, // scalar_t *vr, int ldvr, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THLongLapack_geev( byte jobvl, byte jobvr, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ wr, IntPtr /* scalar_t* */ wi, IntPtr /* scalar_t* */ vl, int ldvl, IntPtr /* scalar_t* */ vr, int ldvr, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // SVD // // Corresponds to the following TH definition: // // TH_API void THLapack_(gesdd)( // char jobz, int m, int n, scalar_t *a, int lda, scalar_t *s, // scalar_t *u, int ldu, scalar_t *vt, int ldvt, // scalar_t *work, int lwork, int *iwork, int *info); [DllImport ("caffe2")] internal extern static void THLongLapack_gesdd( byte jobz, int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ s, IntPtr /* scalar_t* */ u, int ldu, IntPtr /* scalar_t* */ vt, int ldvt, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ iwork, IntPtr /* int* */ info); // LU decomposition // // Corresponds to the following TH definition: // // TH_API void THLapack_(getrf)( // int m, int n, scalar_t *a, int lda, int *ipiv, int *info); [DllImport ("caffe2")] internal extern static void THLongLapack_getrf( int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* int* */ info); // LU decomposition // // Corresponds to the following TH definition: // // TH_API void THLapack_(getrs)( // char trans, int n, int nrhs, scalar_t *a, int lda, // int *ipiv, scalar_t *b, int ldb, int *info); [DllImport ("caffe2")] internal extern static void THLongLapack_getrs( byte trans, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Matrix Inverse // // Corresponds to the following TH definition: // // TH_API void THLapack_(getri)( // int n, scalar_t *a, int lda, int *ipiv, // scalar_t *work, int lwork, int* info); [DllImport ("caffe2")] internal extern static void THLongLapack_getri( int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // *** Positive Definite matrices // Cholesky factorization // // Corresponds to the following TH definition: // // TH_API void THLapack_(potrf)( // char uplo, int n, scalar_t *a, int lda, int *info); [DllImport ("caffe2")] internal extern static void THLongLapack_potrf( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ info); // Matrix inverse based on Cholesky factorization // // Corresponds to the following TH definition: // // TH_API void THLapack_(potri)( // char uplo, int n, scalar_t *a, int lda, int *info); [DllImport ("caffe2")] internal extern static void THLongLapack_potri( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ info); // Solve A*X = B with a symmetric positive definite matrix A // using the Cholesky factorization. // // Corresponds to the following TH definition: // // TH_API void THLapack_(potrs)( // char uplo, int n, int nrhs, scalar_t *a, int lda, // scalar_t *b, int ldb, int *info); [DllImport ("caffe2")] internal extern static void THLongLapack_potrs( char uplo, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Cholesky factorization with complete pivoting. // // Corresponds to the following TH definition: // // TH_API void THLapack_(pstrf)( // char uplo, int n, scalar_t *a, int lda, int *piv, // int *rank, scalar_t tol, scalar_t *work, int *info); [DllImport ("caffe2")] internal extern static void THLongLapack_pstrf( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ piv, IntPtr /* int* */ rank, long tol, IntPtr /* scalar_t* */ work, IntPtr /* int* */ info); // QR decomposition. // // Corresponds to the following TH definition: // // TH_API void THLapack_(geqrf)( // int m, int n, scalar_t *a, int lda, // scalar_t *tau, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THLongLapack_geqrf( int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Build Q from output of geqrf // // Corresponds to the following TH definition: // // TH_API void THLapack_(orgqr)( // int m, int n, int k, scalar_t *a, int lda, // scalar_t *tau, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THLongLapack_orgqr( int m, int n, int k, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Multiply Q with a matrix from output of geqrf // // Corresponds to the following TH definition: // // TH_API void THLapack_(ormqr)( // char side, char trans, int m, int n, int k, // scalar_t *a, int lda, scalar_t *tau, scalar_t *c, int ldc, // scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THLongLapack_ormqr( byte side, byte trans, int m, int n, int k, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ c, int ldc, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Solve AX=B // // Corresponds to the following TH definition: // // TH_API void THLapack_(gesv)( // int n, int nrhs, scalar_t *a, int lda, int *ipiv, // scalar_t *b, int ldb, int* info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_gesv( int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Solve a triangular system of the form A * X = B or A^T * X = B // // Corresponds to the following TH definition: // // TH_API void THLapack_(trtrs)( // char uplo, char trans, char diag, int n, int nrhs, // scalar_t *a, int lda, scalar_t *b, int ldb, int* info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_trtrs( byte uplo, byte trans, byte diag, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Evaluate ||AX-B|| // // Corresponds to the following TH definition: // // TH_API void THLapack_(gels)( // char trans, int m, int n, int nrhs, scalar_t *a, int lda, // scalar_t *b, int ldb, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_gels( byte trans, int m, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Eigenvals // // Corresponds to the following TH definition: // // TH_API void THLapack_(syev)( // char jobz, char uplo, int n, scalar_t *a, int lda, // scalar_t *w, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_syev( byte jobz, byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ w, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Non-sym eigenvals // // Corresponds to the following TH definition: // // TH_API void THLapack_(geev)( // char jobvl, char jobvr, int n, scalar_t *a, int lda, // scalar_t *wr, scalar_t *wi, scalar_t* vl, int ldvl, // scalar_t *vr, int ldvr, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_geev( byte jobvl, byte jobvr, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ wr, IntPtr /* scalar_t* */ wi, IntPtr /* scalar_t* */ vl, int ldvl, IntPtr /* scalar_t* */ vr, int ldvr, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // SVD // // Corresponds to the following TH definition: // // TH_API void THLapack_(gesdd)( // char jobz, int m, int n, scalar_t *a, int lda, scalar_t *s, // scalar_t *u, int ldu, scalar_t *vt, int ldvt, // scalar_t *work, int lwork, int *iwork, int *info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_gesdd( byte jobz, int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ s, IntPtr /* scalar_t* */ u, int ldu, IntPtr /* scalar_t* */ vt, int ldvt, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ iwork, IntPtr /* int* */ info); // LU decomposition // // Corresponds to the following TH definition: // // TH_API void THLapack_(getrf)( // int m, int n, scalar_t *a, int lda, int *ipiv, int *info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_getrf( int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* int* */ info); // LU decomposition // // Corresponds to the following TH definition: // // TH_API void THLapack_(getrs)( // char trans, int n, int nrhs, scalar_t *a, int lda, // int *ipiv, scalar_t *b, int ldb, int *info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_getrs( byte trans, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Matrix Inverse // // Corresponds to the following TH definition: // // TH_API void THLapack_(getri)( // int n, scalar_t *a, int lda, int *ipiv, // scalar_t *work, int lwork, int* info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_getri( int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // *** Positive Definite matrices // Cholesky factorization // // Corresponds to the following TH definition: // // TH_API void THLapack_(potrf)( // char uplo, int n, scalar_t *a, int lda, int *info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_potrf( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ info); // Matrix inverse based on Cholesky factorization // // Corresponds to the following TH definition: // // TH_API void THLapack_(potri)( // char uplo, int n, scalar_t *a, int lda, int *info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_potri( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ info); // Solve A*X = B with a symmetric positive definite matrix A // using the Cholesky factorization. // // Corresponds to the following TH definition: // // TH_API void THLapack_(potrs)( // char uplo, int n, int nrhs, scalar_t *a, int lda, // scalar_t *b, int ldb, int *info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_potrs( char uplo, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Cholesky factorization with complete pivoting. // // Corresponds to the following TH definition: // // TH_API void THLapack_(pstrf)( // char uplo, int n, scalar_t *a, int lda, int *piv, // int *rank, scalar_t tol, scalar_t *work, int *info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_pstrf( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ piv, IntPtr /* int* */ rank, double tol, IntPtr /* scalar_t* */ work, IntPtr /* int* */ info); // QR decomposition. // // Corresponds to the following TH definition: // // TH_API void THLapack_(geqrf)( // int m, int n, scalar_t *a, int lda, // scalar_t *tau, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_geqrf( int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Build Q from output of geqrf // // Corresponds to the following TH definition: // // TH_API void THLapack_(orgqr)( // int m, int n, int k, scalar_t *a, int lda, // scalar_t *tau, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_orgqr( int m, int n, int k, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Multiply Q with a matrix from output of geqrf // // Corresponds to the following TH definition: // // TH_API void THLapack_(ormqr)( // char side, char trans, int m, int n, int k, // scalar_t *a, int lda, scalar_t *tau, scalar_t *c, int ldc, // scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THDoubleLapack_ormqr( byte side, byte trans, int m, int n, int k, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ c, int ldc, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Solve AX=B // // Corresponds to the following TH definition: // // TH_API void THLapack_(gesv)( // int n, int nrhs, scalar_t *a, int lda, int *ipiv, // scalar_t *b, int ldb, int* info); [DllImport ("caffe2")] internal extern static void THFloatLapack_gesv( int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Solve a triangular system of the form A * X = B or A^T * X = B // // Corresponds to the following TH definition: // // TH_API void THLapack_(trtrs)( // char uplo, char trans, char diag, int n, int nrhs, // scalar_t *a, int lda, scalar_t *b, int ldb, int* info); [DllImport ("caffe2")] internal extern static void THFloatLapack_trtrs( byte uplo, byte trans, byte diag, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Evaluate ||AX-B|| // // Corresponds to the following TH definition: // // TH_API void THLapack_(gels)( // char trans, int m, int n, int nrhs, scalar_t *a, int lda, // scalar_t *b, int ldb, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THFloatLapack_gels( byte trans, int m, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Eigenvals // // Corresponds to the following TH definition: // // TH_API void THLapack_(syev)( // char jobz, char uplo, int n, scalar_t *a, int lda, // scalar_t *w, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THFloatLapack_syev( byte jobz, byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ w, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Non-sym eigenvals // // Corresponds to the following TH definition: // // TH_API void THLapack_(geev)( // char jobvl, char jobvr, int n, scalar_t *a, int lda, // scalar_t *wr, scalar_t *wi, scalar_t* vl, int ldvl, // scalar_t *vr, int ldvr, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THFloatLapack_geev( byte jobvl, byte jobvr, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ wr, IntPtr /* scalar_t* */ wi, IntPtr /* scalar_t* */ vl, int ldvl, IntPtr /* scalar_t* */ vr, int ldvr, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // SVD // // Corresponds to the following TH definition: // // TH_API void THLapack_(gesdd)( // char jobz, int m, int n, scalar_t *a, int lda, scalar_t *s, // scalar_t *u, int ldu, scalar_t *vt, int ldvt, // scalar_t *work, int lwork, int *iwork, int *info); [DllImport ("caffe2")] internal extern static void THFloatLapack_gesdd( byte jobz, int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ s, IntPtr /* scalar_t* */ u, int ldu, IntPtr /* scalar_t* */ vt, int ldvt, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ iwork, IntPtr /* int* */ info); // LU decomposition // // Corresponds to the following TH definition: // // TH_API void THLapack_(getrf)( // int m, int n, scalar_t *a, int lda, int *ipiv, int *info); [DllImport ("caffe2")] internal extern static void THFloatLapack_getrf( int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* int* */ info); // LU decomposition // // Corresponds to the following TH definition: // // TH_API void THLapack_(getrs)( // char trans, int n, int nrhs, scalar_t *a, int lda, // int *ipiv, scalar_t *b, int ldb, int *info); [DllImport ("caffe2")] internal extern static void THFloatLapack_getrs( byte trans, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Matrix Inverse // // Corresponds to the following TH definition: // // TH_API void THLapack_(getri)( // int n, scalar_t *a, int lda, int *ipiv, // scalar_t *work, int lwork, int* info); [DllImport ("caffe2")] internal extern static void THFloatLapack_getri( int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ ipiv, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // *** Positive Definite matrices // Cholesky factorization // // Corresponds to the following TH definition: // // TH_API void THLapack_(potrf)( // char uplo, int n, scalar_t *a, int lda, int *info); [DllImport ("caffe2")] internal extern static void THFloatLapack_potrf( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ info); // Matrix inverse based on Cholesky factorization // // Corresponds to the following TH definition: // // TH_API void THLapack_(potri)( // char uplo, int n, scalar_t *a, int lda, int *info); [DllImport ("caffe2")] internal extern static void THFloatLapack_potri( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ info); // Solve A*X = B with a symmetric positive definite matrix A // using the Cholesky factorization. // // Corresponds to the following TH definition: // // TH_API void THLapack_(potrs)( // char uplo, int n, int nrhs, scalar_t *a, int lda, // scalar_t *b, int ldb, int *info); [DllImport ("caffe2")] internal extern static void THFloatLapack_potrs( char uplo, int n, int nrhs, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ b, int ldb, IntPtr /* int* */ info); // Cholesky factorization with complete pivoting. // // Corresponds to the following TH definition: // // TH_API void THLapack_(pstrf)( // char uplo, int n, scalar_t *a, int lda, int *piv, // int *rank, scalar_t tol, scalar_t *work, int *info); [DllImport ("caffe2")] internal extern static void THFloatLapack_pstrf( byte uplo, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* int* */ piv, IntPtr /* int* */ rank, float tol, IntPtr /* scalar_t* */ work, IntPtr /* int* */ info); // QR decomposition. // // Corresponds to the following TH definition: // // TH_API void THLapack_(geqrf)( // int m, int n, scalar_t *a, int lda, // scalar_t *tau, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THFloatLapack_geqrf( int m, int n, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Build Q from output of geqrf // // Corresponds to the following TH definition: // // TH_API void THLapack_(orgqr)( // int m, int n, int k, scalar_t *a, int lda, // scalar_t *tau, scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THFloatLapack_orgqr( int m, int n, int k, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); // Multiply Q with a matrix from output of geqrf // // Corresponds to the following TH definition: // // TH_API void THLapack_(ormqr)( // char side, char trans, int m, int n, int k, // scalar_t *a, int lda, scalar_t *tau, scalar_t *c, int ldc, // scalar_t *work, int lwork, int *info); [DllImport ("caffe2")] internal extern static void THFloatLapack_ormqr( byte side, byte trans, int m, int n, int k, IntPtr /* scalar_t* */ a, int lda, IntPtr /* scalar_t* */ tau, IntPtr /* scalar_t* */ c, int ldc, IntPtr /* scalar_t* */ work, int lwork, IntPtr /* int* */ info); } }
(in-package :bknr-user) (define-persistent-class question () ((name :read :index-type string-unique-index :index-reader question-with-name :index-values all-questions) (quizz :read :initform nil :index-type hash-index :index-reader quizz-questions) (question :update) (answers :update :initform nil))) (defun answer-to-keyword (answer) (setf answer (regex-replace "^(\\s+)" answer "")) (setf answer (regex-replace "(\\s+)$" answer "")) (setf answer (regex-replace-all "([^a-zA-Z0-9\\s-]+)" answer "")) (make-keyword-from-string (regex-replace-all "(\\s+)" answer "-"))) (defmethod answer-correct-p ((question question) answer) (let ((keyword (answer-to-keyword answer))) (member keyword (question-answers question)))) (define-persistent-class multiple-choice-question (question) ((possible-answers :update :initform nil))) (define-persistent-class quizz () ((name :update :index-type string-unique-index :index-reader quizz-with-name :index-values all-quizz) (description :update :initform "") (keywords :update :transient t :index-type hash-list-index :index-reader get-keyword-quizz :index-keys all-quizz-keywords) (best-scores :update :initform nil)))
#!/bin/bash rm -r consul rm nohup.out mkdir consul print_help() { cat <<EOF Usage:cmd bind_ip example: ./consul_agent.sh 192.168.1.123 EOF } main() { if [ $# -lt 1 ]; then print_help return fi bind_ip=$1 nohup consul agent -server -ui -bootstrap-expect=1 -node=s1 -bind=${bind_ip} -client=0.0.0.0 -data-dir=/var/lib/consul -pid-file=consul/consul.pid -log-file=consul/consul.log & } main $@
import { createComponent } from '../../utils' const labelClassModifiers = { small: 'label-sm', large: 'label-lg', inline: 'form-inline', } const Label = createComponent('label', 'form-label', labelClassModifiers) export default Label
#ifdef CH_LANG_CC /* * _______ __ * / ___/ / ___ __ _ / / ___ * / /__/ _ \/ _ \/ V \/ _ \/ _ \ * \___/_//_/\___/_/_/_/_.__/\___/ * Please refer to Copyright.txt, in Chombo's root directory. */ #endif #include <cmath> #include <cstdio> #include <iostream> #include "ParmParse.H" #include "LoadBalance.H" #include "BRMeshRefine.H" #include "EBIndexSpace.H" #include "EBISLayout.H" #include "EBCellFactory.H" #include "CONSTANTS.H" #include "PoissonUtilities.H" #include "EBAMRPoissonOp.H" #include "EBFABView.H" #include "EBDebugDump.H" #include "EBGeomDebugDump.H" #include "DebugDump.H" #include "EBAMRDataOps.H" #include "MonomialIF.H" #include "WrappedGShop.H" #include "MomentIterator.H" #include "Factorial.H" #include "CH_Attach.H" /******/ #include "CutCellMoments.H" /******/ enum TestType { VOL_MOM = 0, EB_MOM, EB_NORMAL_MOM, NUM_TESTS }; /************/ void putIDMIntoFAB(EBCellFAB & a_datum, IndMomSpaceDim & a_testOrder, const VolIndex & a_vof) { MomentIterator<SpaceDim, CH_EBIS_ORDER> volmomit; for(volmomit.reset(); volmomit.ok(); ++volmomit) { int ivar = a_testOrder.indexOf(volmomit()); a_datum(a_vof, ivar) = a_testOrder[volmomit()]; } } /************/ void generateData(LevelData<EBCellFAB> & a_datum, const DisjointBoxLayout & a_grids, const EBISLayout & a_ebisl, const GridParameters & a_params, const TestType & a_type, const int & a_idir, const bool & a_shiftToCoar) { for(DataIterator dit = a_grids.dataIterator(); dit.ok(); ++dit) { IntVectSet ivs; if(a_type == VOL_MOM) { ivs = IntVectSet(a_grids[dit()]); } else { ivs = a_ebisl[dit()].getIrregIVS(a_grids[dit()]); } for(VoFIterator vofit(ivs, a_ebisl[dit()].getEBGraph()); vofit.ok(); ++vofit) { IndMomSpaceDim ebisOrder; if(a_type == VOL_MOM) { ebisOrder = a_ebisl[dit()].getEBData().getVolumeMoments(vofit()); } else if(a_type == EB_MOM) { ebisOrder = a_ebisl[dit()].getEBData().getEBMoments(vofit()); } else if(a_type == EB_NORMAL_MOM) { ebisOrder = a_ebisl[dit()].getEBData().getEBNormalMoments(vofit(), a_idir); } if(a_shiftToCoar) { Real fineDx = a_params.coarsestDx; Real coarDx = 2.*fineDx; VolIndex fineVoF= vofit(); IntVect coarIV = coarsen(fineVoF.gridIndex(), 2); VolIndex coarVoF(coarIV, 0); RealVect fineLoc = EBArith::getVoFLocation(fineVoF, fineDx*RealVect::Unit, RealVect::Zero); RealVect coarLoc = EBArith::getVoFLocation(coarVoF, coarDx*RealVect::Unit, RealVect::Zero); RealVect shiftRV = fineLoc - coarLoc; RvSpaceDim shiftitm; EBArith::convertToITM(shiftitm, shiftRV); ebisOrder.shift(shiftitm); } putIDMIntoFAB(a_datum[dit()], ebisOrder, vofit()); } } } /*******/ void sumFineValues(Vector<Real> & a_fineSum, const EBCellFAB & a_solutFine, const Vector<VolIndex> & a_fineVoFs) { CH_assert(a_fineSum.size() == a_solutFine.nComp()); for(int ivar = 0; ivar < a_solutFine.nComp(); ivar++) { Real value = 0; for(int ivof = 0; ivof < a_fineVoFs.size(); ivof++) { value += a_solutFine(a_fineVoFs[ivof], ivar); } a_fineSum[ivar] = value; } } /*******/ void sumFineMinusCoarse(LevelData<EBCellFAB> & a_errorMedi, const LevelData<EBCellFAB> & a_solutMedi, const DisjointBoxLayout & a_gridsMedi, const EBISLayout & a_ebislMedi, const GridParameters & a_paramMedi, const LevelData<EBCellFAB> & a_solutFine, const DisjointBoxLayout & a_gridsFine, const EBISLayout & a_ebislFine, const GridParameters & a_paramFine, const TestType& a_type) { //fine has to be shifted to coarse location int ncomp = a_solutMedi.nComp(); for(DataIterator dit = a_gridsMedi.dataIterator(); dit.ok(); ++dit) { a_errorMedi[dit()].setVal(0.); IntVectSet ivs; if(a_type == VOL_MOM) { ivs = IntVectSet(a_gridsMedi[dit()]); } else { ivs = a_ebislMedi[dit()].getIrregIVS(a_gridsMedi[dit()]); } for(VoFIterator vofit(ivs, a_ebislMedi[dit()].getEBGraph()); vofit.ok(); ++vofit) { const VolIndex& vofMedi = vofit(); //because this is a screwy geometry test, the graph coarsening can be weird here // small cells on the coarse level can be covered where they were not on the finer level EBISBox ebisBoxMedi = a_errorMedi[dit()].getEBISBox(); if(!ebisBoxMedi.isCovered(vofMedi.gridIndex())) { Vector<VolIndex> fineVoFs = a_ebislMedi[dit()].refine(vofMedi); Vector<Real> fineSum(ncomp, 0.); //these are moments so we just sum the values (sum of integrals = integral of sum) sumFineValues(fineSum, a_solutFine[dit()], fineVoFs); for(int ivar = 0; ivar < ncomp; ivar++) { a_errorMedi[dit()](vofMedi, ivar) = a_solutMedi[dit()](vofMedi, ivar) - fineSum[ivar]; } } } } } /*****/ void solutionErrorTest(const GridParameters & a_paramFine, const GridParameters & a_paramMedi, const GridParameters & a_paramCoar, const DisjointBoxLayout & a_gridsFine, const DisjointBoxLayout & a_gridsMedi, const DisjointBoxLayout & a_gridsCoar, const EBISLayout & a_ebislFine, const EBISLayout & a_ebislMedi, const EBISLayout & a_ebislCoar, const EBISLayout & a_ebislCoFi, const EBISLayout & a_ebislCoMe, const TestType & a_type, const int & a_idir) { IndMomSpaceDim idmproxy; int nvar = idmproxy.size(); string prefix; if(a_type == VOL_MOM) { prefix = string("volume_moment"); } else if(a_type == EB_MOM) { prefix = string("eb_moment"); } else if(a_type == EB_NORMAL_MOM) { prefix = string("ebNormalMoment_") + convertInt(a_idir); } else { MayDay::Error("bogus type"); } EBCellFactory factFine(a_ebislFine); EBCellFactory factMedi(a_ebislMedi); EBCellFactory factCoar(a_ebislCoar); LevelData<EBCellFAB> solutFine(a_gridsFine, nvar, IntVect::Zero, factFine); LevelData<EBCellFAB> solutMedi(a_gridsMedi, nvar, IntVect::Zero, factMedi); LevelData<EBCellFAB> solutCoar(a_gridsCoar, nvar, IntVect::Zero, factCoar); LevelData<EBCellFAB> errorMedi(a_gridsMedi, nvar, IntVect::Zero, factMedi); LevelData<EBCellFAB> errorCoar(a_gridsCoar, nvar, IntVect::Zero, factCoar); EBLevelDataOps::setToZero(solutFine); EBLevelDataOps::setToZero(solutMedi); EBLevelDataOps::setToZero(solutCoar); //fine has to be shifted to coarse location bool shiftToCoar; //need to shift to coarse locations so this will be at the same location as the coarse shiftToCoar = true; pout() << "generating fine solution" << endl; generateData(solutFine, a_gridsFine, a_ebislFine, a_paramFine, a_type, a_idir, shiftToCoar); //for this bit, medi is the coarse solution so no shifting shiftToCoar = false; pout() << "generating medi solution" << endl; generateData(solutMedi, a_gridsMedi, a_ebislMedi, a_paramMedi, a_type, a_idir, shiftToCoar); pout() << "generating medi error from medi and fine solutions" << endl; sumFineMinusCoarse(errorMedi, solutMedi, a_gridsMedi, a_ebislCoFi, a_paramMedi, solutFine, a_gridsFine, a_ebislFine, a_paramFine, a_type); //for this bit, medi is the finer solution so it has to get shifted shiftToCoar = true; pout() << "generating medi solution" << endl; generateData(solutMedi, a_gridsMedi, a_ebislMedi, a_paramMedi, a_type, a_idir, shiftToCoar); //this *is* the coarse soltuion so no shift shiftToCoar = false; pout() << "generating coar solution" << endl; generateData(solutCoar, a_gridsCoar, a_ebislCoar, a_paramCoar, a_type, a_idir, shiftToCoar); pout() << "generating coar error from medi and coar solutions" << endl; sumFineMinusCoarse(errorCoar, solutCoar, a_gridsCoar, a_ebislCoMe, a_paramCoar, solutMedi, a_gridsMedi, a_ebislMedi, a_paramMedi, a_type); Vector<Real> orders; Box domCoar = a_paramCoar.coarsestDomain.domainBox(); if(a_type == VOL_MOM) { Vector<string> names(nvar); getMomentNames<CH_EBIS_ORDER>(names, string("m")); //the 1 is a verbosity flag. leave it at one. trust me. EBArith::compareError(orders, errorMedi, errorCoar, a_gridsMedi, a_gridsCoar, a_ebislMedi, a_ebislCoar, domCoar, 1, NULL, names, prefix); } else if((a_type == EB_MOM) || (a_type == EB_NORMAL_MOM)) { Vector<string> names(nvar); getMomentNames<CH_EBIS_ORDER>(names, string("b")); BaseIVFactory<Real> bivrFactMedi(a_ebislMedi); BaseIVFactory<Real> bivrFactCoar(a_ebislCoar); LevelData<BaseIVFAB<Real> > sparseErrorMedi(a_gridsMedi, nvar, IntVect::Zero, bivrFactMedi); LevelData<BaseIVFAB<Real> > sparseErrorCoar(a_gridsCoar, nvar, IntVect::Zero, bivrFactCoar); copyDenseToSparse(sparseErrorMedi, errorMedi); copyDenseToSparse(sparseErrorCoar, errorCoar); EBArith::compareIrregError(orders, sparseErrorMedi, sparseErrorCoar, a_gridsMedi, a_gridsCoar, a_ebislMedi, a_ebislCoar, domCoar, prefix, names); } else { MayDay::Error("bogus type"); } /**/ pout() << "Outputting moments to file" << endl; string solutFileFine = prefix + string("_Fine.hdf5"); string solutFileMedi = prefix + string("_Medi.hdf5"); string solutFileCoar = prefix + string("_Coar.hdf5"); writeEBLevelName(solutFine, solutFileFine); writeEBLevelName(solutMedi, solutFileMedi); writeEBLevelName(solutCoar, solutFileCoar); pout() << "Outputting error to file" << endl; string errorFileMedi = prefix + string("_Error_Medi.hdf5"); string errorFileCoar = prefix + string("_Error_Coar.hdf5"); writeEBLevelName(errorCoar, errorFileCoar); writeEBLevelName(errorMedi, errorFileMedi); /**/ } /***************/ int main(int a_argc, char* a_argv[]) { #ifdef CH_MPI MPI_Init(&a_argc,&a_argv); #endif { // Check for an input file char* inFile = NULL; if (a_argc > 1) { inFile = a_argv[1]; } else { pout() << "Usage: <executable name> <inputfile>" << endl; pout() << "No input file specified" << endl; return -1; } ParmParse pp(a_argc-2,a_argv+2,NULL,inFile); GridParameters paramFine, paramMedi, paramCoar; DisjointBoxLayout gridsFine, gridsMedi, gridsCoar; //read params from file getGridParameters(paramFine, true); paramMedi = paramFine; paramMedi.coarsen(2); paramCoar = paramMedi; paramCoar.coarsen(2); Vector<int> procs; Vector<Box> boxes; domainSplit(paramFine.coarsestDomain, boxes, paramFine.maxGridSize , paramFine.blockFactor); LoadBalance(procs, boxes); gridsFine = DisjointBoxLayout(boxes, procs, paramFine.coarsestDomain); coarsen(gridsMedi, gridsFine, 2); coarsen(gridsCoar, gridsMedi, 2); pout() << "rct: defining FINE geometry" << endl; definePoissonGeometry(paramFine); pout() << "saving fine geometry into eblg" << endl; EBLevelGrid eblgFine(gridsFine, paramFine.coarsestDomain, 2, Chombo_EBIS::instance()); pout() << "making CoFi info into eblg" << endl; EBLevelGrid eblgCoFi(gridsMedi, paramMedi.coarsestDomain, 2, Chombo_EBIS::instance()); barrier(); pout() << "clearing singleton" << endl; Chombo_EBIS::instance()->clear(); pout() << "rct: defining MEDI geometry" << endl; definePoissonGeometry(paramMedi); pout() << "saving medi geometry into eblg" << endl; EBLevelGrid eblgMedi(gridsMedi, paramMedi.coarsestDomain, 2, Chombo_EBIS::instance()); pout() << "making CoMe info into eblg" << endl; EBLevelGrid eblgCoMe(gridsCoar, paramCoar.coarsestDomain, 2, Chombo_EBIS::instance()); barrier(); pout() << "clearing singleton" << endl; Chombo_EBIS::instance()->clear(); pout() << "rct: defining Coar geometry" << endl; definePoissonGeometry(paramCoar); pout() << "saving medi geometry into eblg" << endl; EBLevelGrid eblgCoar(gridsCoar, paramCoar.coarsestDomain, 2, Chombo_EBIS::instance()); EBISLayout ebislFine = eblgFine.getEBISL(); EBISLayout ebislMedi = eblgMedi.getEBISL(); EBISLayout ebislCoar = eblgCoar.getEBISL(); EBISLayout ebislCoFi = eblgCoFi.getEBISL(); EBISLayout ebislCoMe = eblgCoMe.getEBISL(); //all thse calls to geometry because we are testing the //accuracy of geometry generation //the CoFi and CoMe stuff is because you cannot call refine on //coar and medi stuff because as far as it is concerned, it is the //finest level. They also might have slightly different graphs so this finesses that //problem as well barrier(); pout() << "test of volume moments " << endl; solutionErrorTest(paramFine,paramMedi,paramCoar, gridsFine,gridsMedi,gridsCoar, ebislFine,ebislMedi,ebislCoar, ebislCoFi, ebislCoMe, VOL_MOM, 0); pout() << "test eb area moments" << endl; solutionErrorTest(paramFine,paramMedi,paramCoar, gridsFine,gridsMedi,gridsCoar, ebislFine,ebislMedi,ebislCoar, ebislCoFi, ebislCoMe, EB_MOM , 0); pout() << "test eb normal moments" << endl; for(int idir = 0; idir < SpaceDim; idir++) { solutionErrorTest(paramFine,paramMedi,paramCoar, gridsFine,gridsMedi,gridsCoar, ebislFine,ebislMedi,ebislCoar, ebislCoFi, ebislCoMe, EB_NORMAL_MOM, idir); } pout() << "clearing singleton " << endl; Chombo_EBIS::instance()->clear(); } #ifdef CH_MPI CH_TIMER_REPORT(); MPI_Finalize(); #endif }
<?php namespace App\Http\Controllers; use Illuminate\Http\Request; use App\Models\Examen; use Carbon\Carbon; class ExamenController extends Controller{ public function index(){ $datosExamen = Examen::all(); return response()->json($datosExamen); } public function guardar(Request $request){ $datosExamen = new Examen; if ($request->hasFile('imagen')){ $nombreArchivoOriginal = $request->file('imagen')->getClientOriginalName(); $nuevoNombre = Carbon::now()->timestamp."_".$nombreArchivoOriginal; $carpetaDestino='./upload/'; $request->file('imagen')->move($carpetaDestino,$nuevoNombre); $datosExamen->titulo= $request->titulo; $datosExamen->imagen= ltrim($carpetaDestino,'.'). $nuevoNombre; //$request->imagen; $datosExamen->customer_id=$request->customer_id; $datosExamen->fecha_examen=$request->fecha_examen; $datosExamen->save(); } // $request->file('imagen'); // return response()->json($request->input('titulo')); // return response()->json($request->file('imagen')->getClientOriginalName()); // return response()->json($nuevoNombre); return response()->json("REGISTRO GUARDADO"); } public function ver($id){ $datosExamen = new Examen; $datosExamen = $datosExamen->find($id); return response()->json($datosExamen); } public function eliminar($id){ $datosExamen = Examen::find($id); if ($datosExamen){ $rutaArchivo = base_path('public').$datosExamen->imagen; if(file_exists($rutaArchivo)){ unlink($rutaArchivo); } $datosExamen ->delete(); } return response()->json("Registro borrado"); } public function actualizar(Request $request,$id){ $datosExamen = Examen::find($id); if ($request->hasFile('imagen')){ if ($datosExamen){ $rutaArchivo = base_path('public').$datosExamen->imagen; if(file_exists($rutaArchivo)){ unlink($rutaArchivo); } $datosExamen ->delete(); } $nombreArchivoOriginal = $request->file('imagen')->getClientOriginalName(); $nuevoNombre = Carbon::now()->timestamp."_".$nombreArchivoOriginal; $carpetaDestino='./upload/'; $request->file('imagen')->move($carpetaDestino,$nuevoNombre); $datosExamen->imagen= ltrim($carpetaDestino,'.'). $nuevoNombre; //$request->imagen; $datosExamen->save(); } if ($request->input('titulo')) { $datosExamen -> titulo=$request->input('titulo'); } if ($request->input('customer_id')) { $datosExamen -> customer_id=$request->input('customer_id'); } if ($request->input('fecha_examen')) { $datosExamen -> fecha_examen=$request->input('fecha_examen'); } $datosExamen -> save(); return response()->json("Datos actualizados"); } }
$(document).ready(function() { $('.delete_form').on('beforeSubmit', function() { return confirm('Подтвердите удаление'); }); $('.uploadFileForm').on('beforeSubmit', function() { $(this).find('.submitButton').replaceWith('<span>Идет загрузка...</span>'); }); $('#multipleDeleteForm').hide(); $('#multipleDeleteForm').on('beforeSubmit', function() { return confirm('Подтвердите удаление'); }); $('.multipleDeleteInput').change(function() { var length = $('.multipleDeleteInput:checked').length; if (length > 0) { $('#multipleDeleteForm').show(); } else { $('#multipleDeleteForm').hide(); } if ($(this).prop('checked')) { $(this).closest('.indexImageDiv').addClass('selected'); } else { $(this).closest('.indexImageDiv').removeClass('selected'); } }); });
package hartman.websub.publisher.atom import org.springframework.data.annotation.Id import java.time.ZonedDateTime data class AtomLink(val rel: String, val href: String) data class AtomEntry( val id: String, val title: String, val updated: ZonedDateTime, val author: String?, val summary: String?, val content: String? ) data class AtomFeed( @Id val id: String, val title: String, val updated: ZonedDateTime, val description: String?, val archive: Boolean = false, val links: List<AtomLink> = emptyList(), val entries: List<AtomEntry> = emptyList() )
using Newtonsoft.Json; namespace commercetools.Zones { /// <summary> /// A geographical location representing a country with an optional state. /// </summary> /// <see href="http://dev.commercetools.com/http-api-projects-zones.html#location"/> public class Location { #region Properties [JsonProperty(PropertyName = "country")] public string Country { get; set; } [JsonProperty(PropertyName = "state")] public string State { get; set; } #endregion #region Constructors /// <summary> /// Constructor. /// </summary> public Location() { } /// <summary> /// Initializes this instance with JSON data from an API response. /// </summary> /// <param name="data">JSON object</param> public Location(dynamic data) { if (data == null) { return; } this.Country = data.country; this.State = data.state; } #endregion } }
{-# LANGUAGE ExistentialQuantification, TypeInType #-} module BadTelescope4 where import Data.Proxy import Data.Kind data SameKind :: k -> k -> Type data Bad a (c :: Proxy b) (d :: Proxy a) (x :: SameKind b d) data Borked a (b :: k) = forall (c :: k). B (Proxy c) -- this last one is OK. But there was a bug involving renaming -- that failed here, so the test case remains.
/* Copyright 2019-2020 vChain, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package gw import ( "context" "encoding/base64" "errors" "fmt" "github.com/codenotary/immudb/pkg/api/schema" "net/http" "testing" "github.com/codenotary/immudb/pkg/client" immuclient "github.com/codenotary/immudb/pkg/client" "github.com/codenotary/immudb/pkg/client/clienttest" "github.com/codenotary/immugw/pkg/json" "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/stretchr/testify/require" ) func testVerifiedZaddHandler(t *testing.T, mux *runtime.ServeMux, ic immuclient.ImmuClient) { prefixPattern := "VerifiedZaddHandler - Test case: %s" method := "POST" path := "/db/verified/zadd" for _, tc := range verifiedZaddHandlerTestCases(mux, ic) { handlerFunc := func(res http.ResponseWriter, req *http.Request) { tc.verifiedZaddHandler.VerifiedZadd(res, req, nil) } err := testHandler( t, fmt.Sprintf(prefixPattern, tc.name), method, path, tc.payload, handlerFunc, tc.testFunc, ) require.NoError(t, err) } } type verifiedZaddHandlerTestCase struct { name string verifiedZaddHandler VerifiedZaddHandler payload string testFunc func(*testing.T, string, int, map[string]interface{}) } func verifiedZaddHandlerTestCases(mux *runtime.ServeMux, ic immuclient.ImmuClient) []verifiedZaddHandlerTestCase { rt := newDefaultRuntime() json := json.DefaultJSON() szh := NewVerifiedZaddHandler(mux, ic, rt, json) icd := client.DefaultClient() verifiedZaddWErr := func(context.Context, []byte, float64, []byte, uint64) (*schema.TxMetadata, error) { return nil, errors.New("verifiedZadd error") } validSet := base64.StdEncoding.EncodeToString([]byte("verifiedZaddSet1")) validKey := base64.StdEncoding.EncodeToString([]byte("setKey1")) validPayload := fmt.Sprintf( `{ "zAddRequest": { "set": "%s", "score": %f, "key": "%s" } }`, validSet, 1.0, validKey, ) return []verifiedZaddHandlerTestCase{ { "Sending correct request", szh, validPayload, func(t *testing.T, testCase string, status int, body map[string]interface{}) { requireResponseStatus(t, testCase, http.StatusOK, status) }, }, { "Sending request with non-existent key", szh, fmt.Sprintf( `{ "zAddRequest": { "set": "%s", "score": %f, "key": "%s" } }`, validSet, 1.0, base64.StdEncoding.EncodeToString([]byte("verifiedZaddUnknownKey")), ), func(t *testing.T, testCase string, status int, body map[string]interface{}) { requireResponseStatus(t, testCase, http.StatusNotFound, status) }, }, { "Sending request with incorrect JSON field", szh, fmt.Sprintf( `{ "zAddRequestsss": { "set": "%s", "score": %f, "key": "%s" } }`, validSet, 1.0, validKey, ), func(t *testing.T, testCase string, status int, body map[string]interface{}) { requireResponseStatus(t, testCase, http.StatusBadRequest, status) requireResponseFieldsEqual( t, testCase, map[string]interface{}{"error": "incorrect JSON payload"}, body) }, }, { "Missing key field", szh, fmt.Sprintf( `{ "zAddRequest": { "set": "%s", "score": %f } }`, validSet, 1.0, ), func(t *testing.T, testCase string, status int, body map[string]interface{}) { requireResponseStatus(t, testCase, http.StatusBadRequest, status) requireResponseFieldsEqual( t, testCase, map[string]interface{}{"error": "illegal arguments"}, body) }, }, { "Send plain text instead of base64 encoded", szh, fmt.Sprintf( `{ "zAddRequest": { "set": "%s", "score": %f, "key": "myFirstKey" } }`, validSet, 1.0, ), func(t *testing.T, testCase string, status int, body map[string]interface{}) { requireResponseStatus(t, testCase, http.StatusBadRequest, status) requireResponseFieldsEqual( t, testCase, map[string]interface{}{"error": "illegal base64 data at input byte 8"}, body) }, }, { "AnnotateContext error", NewVerifiedZaddHandler(mux, ic, newTestRuntimeWithAnnotateContextErr(), json), validPayload, func(t *testing.T, testCase string, status int, body map[string]interface{}) { requireResponseStatus(t, testCase, http.StatusInternalServerError, status) requireResponseFieldsEqual( t, testCase, map[string]interface{}{"error": "annotate context error"}, body) }, }, { "VerifiedZadd error", NewVerifiedZaddHandler(mux, &clienttest.ImmuClientMock{ImmuClient: icd, VerifiedZAddF: verifiedZaddWErr}, rt, json), validPayload, func(t *testing.T, testCase string, status int, body map[string]interface{}) { requireResponseStatus(t, testCase, http.StatusInternalServerError, status) requireResponseFieldsEqual( t, testCase, map[string]interface{}{"error": "verifiedZadd error"}, body) }, }, { "JSON marshal error", NewVerifiedZaddHandler(mux, ic, rt, newTestJSONWithMarshalErr()), validPayload, func(t *testing.T, testCase string, status int, body map[string]interface{}) { requireResponseStatus(t, testCase, http.StatusInternalServerError, status) requireResponseFieldsEqual( t, testCase, map[string]interface{}{"error": "JSON marshal error"}, body) }, }, } }
# React Forms React Forms library provides a set of tools for [React][] to handle form rendering and validation. It provides a **schema language** to define form structure and validation and a set of **form components** to render schemas into UI. Data flow between React Forms components provides strong **immutability** guarantees. Form state accessible from a single location and can be snapshoted with almost no overhead. Almost every aspect of React Forms is designed to be **extendable**. It is easy to customize the behaviour of the existing components or create completely new ones. See [documentation][docs] for more information on how to use React Forms. ## Credits React Forms is free software created by [Prometheus Research, LLC][] and is released under the MIT license. [React]: http://facebook.github.io/react/ [Prometheus Research, LLC]: http://prometheusresearch.com [docs]: http://prometheusresearch.github.io/react-forms/
library bitcoin.scripts.output.pay_to_pubkey; import "dart:typed_data"; import "package:bitcoin/core.dart"; import "package:bitcoin/script.dart"; class PayToPubKeyOutputScript extends Script { /** * Create a new output for a given public key. * * The public key can be either of type Uint8List or KeyPair. * * If [encoded] is set to false, the script will be built using chunks. This improves * performance when the script is intended for execution. */ factory PayToPubKeyOutputScript(dynamic pubKey) { if (pubKey is KeyPair) pubKey = pubKey.publicKey; if (!(pubKey is Uint8List)) throw new ArgumentError("The public key can be either of type Uint8List or KeyPair."); return new PayToPubKeyOutputScript.convert( new ScriptBuilder().data(pubKey).op(ScriptOpCodes.OP_CHECKSIG).build(), true); } PayToPubKeyOutputScript.convert(Script script, [bool skipCheck = false]) : super(script.program) { if (!skipCheck && !matchesType(script)) throw new ScriptException("Given script is not an instance of this script type."); } KeyPair get pubKey => new KeyPair.public(chunks[0].data); Address getAddress([NetworkParameters params]) => pubKey.getAddress(params); static bool matchesType(Script script) { return script.chunks.length == 2 && script.chunks[0].data.length > 1 && script.chunks[1].opCode == ScriptOpCodes.OP_CHECKSIG; } }
/* * Copyright The OpenTelemetry Authors * SPDX-License-Identifier: Apache-2.0 */ package io.opentelemetry.javaagent.instrumentation.awslambda.v1_0; import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.instrumentation.awslambda.v1_0.AwsLambdaMessageTracer; import io.opentelemetry.instrumentation.awslambda.v1_0.AwsLambdaTracer; public final class AwsLambdaInstrumentationHelper { private static final AwsLambdaTracer FUNCTION_TRACER = new AwsLambdaTracer(GlobalOpenTelemetry.get()); public static AwsLambdaTracer functionTracer() { return FUNCTION_TRACER; } private static final AwsLambdaMessageTracer MESSAGE_TRACER = new AwsLambdaMessageTracer(GlobalOpenTelemetry.get()); public static AwsLambdaMessageTracer messageTracer() { return MESSAGE_TRACER; } private AwsLambdaInstrumentationHelper() {} }
package de.visualdigits.bannermatic import java.io.{File, FileOutputStream} import de.visualdigits.bannermatic.model.pixelmatrix.Color import de.visualdigits.bannermatic.model.pixelmatrix.`type`.{Align, Placement, VAlign} import org.apache.commons.io.IOUtils import org.junit.Assert import org.junit.runner.RunWith import org.scalatest.FunSuite import org.scalatestplus.junit.JUnitRunner @RunWith(classOf[JUnitRunner]) class RenderBannerTest extends FunSuite { test("Render a banner") { val image: File = new File(ClassLoader.getSystemClassLoader.getResource("images/rose-red.png").toURI) val bg = BannerMatic.renderImage(image, 60, pixelRatio = 3.0 / 7.0) val fg = BannerMatic.renderText("TEXTBOX", 100, fgColor = Color.RED) val banner = bg.overlay(Align.center, VAlign.bottom, fg, Placement.outside) // IOUtils.write(banner.toString, new FileOutputStream(new File("/Users/knull/Pictures/banner.txt"))) } }
#!/bin/bash LOCAL_ADDRESS=$(ip route get 8.8.8.8 | awk '{print $NF; exit}') LOCAL_DNS=$(dig +short -x $LOCAL_ADDRESS) PUBLIC_ADDRESS=$(dig +short myip.opendns.com @resolver1.opendns.com) PUBLIC_DNS=$(dig +short -x $PUBLIC_ADDRESS) #Put the DBMS install commands here #ulimit -n 65536 YUGABYTE_VERSION=1.0.3.0 rm /etc/hosts echo 127.0.0.1 `hostname` $LOCAL_DNS $PUBLIC_DNS | sudo tee /etc/hosts # install java sudo apt-get update -y apt-get install openjdk-8-jre-headless -y # install python-minimal for cql cli apt-get install python-minimal -y # download cassandra binary wget https://downloads.yugabyte.com/yugabyte-ce-$YUGABYTE_VERSION-linux.tar.gz mkdir yugabyte mkdir yb-disk tar -pxvzf yugabyte-ce-$YUGABYTE_VERSION-linux.tar.gz -C yugabyte --strip 1 ./yugabyte/bin/post_install.sh
require 'neography' require 'benchmark' # If you want to see more, uncomment the next few lines # require 'net-http-spy' # Net::HTTP.http_logger_options = {:body => true} # just the body # Net::HTTP.http_logger_options = {:verbose => true} # see everything def generate_text(length=8) chars = 'abcdefghjkmnpqrstuvwxyz' key = '' length.times { |i| key << chars[rand(chars.length)] } key end RSpec.configure do |c| c.filter_run_excluding :slow => true, :break_gremlin => true end
package handlerlist import ( "fmt" "github.com/kgysu/oc-apm/client/util" "github.com/kgysu/oc-apm/web/html/pages/list" "github.com/kgysu/oc-apm/web/server/serverutil" "net/http" ) func HandleListPage(rw http.ResponseWriter, req *http.Request) { serverutil.SetHeaders(rw, req, "text/html") labelSelector, kinds := parseReq(req) appFromNamespace, err := util.GetAppFromNamespace("list", labelSelector) if err != nil { fmt.Fprint(rw, err) return } content := htmllist.CreateItemsListHtml(appFromNamespace.GetItemsByKinds(kinds), labelSelector, kinds) _, err = fmt.Fprint(rw, content) if err != nil { fmt.Print(err) fmt.Fprint(rw, err) return } } func parseReq(req *http.Request) (string, string) { label := req.FormValue("label") kinds := req.FormValue("kinds") return label, kinds }
package git import ( "fmt" "os" "path/filepath" "strings" ) func findGitFile(fileName string) (string, string, error) { var err error dir, err := os.Getwd() if err != nil { return "", "", err } for { gitDir := filepath.Join(dir, ".git/"+fileName) exists, err := fileExists(gitDir) if err != nil { return "", "", err } if exists { return dir, gitDir, nil } dirPath := strings.TrimSuffix(dir, "/") if dirPath == "" { return "", "", nil } p, _ := filepath.Split(dirPath) if dir == "/" || p == dir { return "", "", nil } dir = p } } // fileExists checks if path exists and is a file func fileExists(path string) (bool, error) { fileInfo, err := os.Stat(path) if err == nil { return !fileInfo.IsDir(), nil } if os.IsNotExist(err) { return false, nil } return false, fmt.Errorf("failed to check if file exists %s %w", path, err) }
# global_miles_airline_api # # This file was automatically generated by APIMATIC v2.0 # ( https://apimatic.io ). require 'date' require 'json' require 'faraday' require 'certifi' require 'logging' require_relative 'global_miles_airline_api/api_helper.rb' require_relative 'global_miles_airline_api/global_miles_airline_api_client.rb' # Http require_relative 'global_miles_airline_api/http/http_call_back.rb' require_relative 'global_miles_airline_api/http/http_client.rb' require_relative 'global_miles_airline_api/http/http_method_enum.rb' require_relative 'global_miles_airline_api/http/http_request.rb' require_relative 'global_miles_airline_api/http/http_response.rb' require_relative 'global_miles_airline_api/http/http_context.rb' require_relative 'global_miles_airline_api/http/faraday_client.rb' # Models require_relative 'global_miles_airline_api/models/base_model.rb' require_relative 'global_miles_airline_api/models/flight_create_request.rb' require_relative 'global_miles_airline_api/models/response.rb' require_relative 'global_miles_airline_api/models/amount.rb' require_relative 'global_miles_airline_api/models/flight_max.rb' require_relative 'global_miles_airline_api/models/passenger_max.rb' require_relative 'global_miles_airline_api/models/award_miles_rules.rb' require_relative 'global_miles_airline_api/models/member_search_request.rb' require_relative 'global_miles_airline_api/models/passenger_min.rb' require_relative 'global_miles_airline_api/models/new_member_request.rb' require_relative 'global_miles_airline_api/models/flight_min.rb' require_relative 'global_miles_airline_api/models/member_search_response.rb' require_relative 'global_miles_airline_api/models/flight_simple.rb' require_relative 'global_miles_airline_api/models/user.rb' require_relative 'global_miles_airline_api/models/flight_status_response.rb' require_relative 'global_miles_airline_api/models/flight_status_request.rb' require_relative 'global_miles_airline_api/models/new_member_response.rb' require_relative 'global_miles_airline_api/models/gender_type_enum.rb' require_relative 'global_miles_airline_api/models/language_type_enum.rb' require_relative 'global_miles_airline_api/models/award_type_enum.rb' require_relative 'global_miles_airline_api/models/flight_status_type_enum.rb' require_relative 'global_miles_airline_api/models/extra_info_type_enum.rb' require_relative 'global_miles_airline_api/models/routing_type_enum.rb' require_relative 'global_miles_airline_api/models/tier_type_enum.rb' require_relative 'global_miles_airline_api/models/cabin_type_enum.rb' # Exceptions require_relative 'global_miles_airline_api/exceptions/api_exception.rb' require_relative 'global_miles_airline_api/configuration.rb' # Controllers require_relative 'global_miles_airline_api/controllers/base_controller.rb' require_relative 'global_miles_airline_api/controllers/members_controller.rb' require_relative 'global_miles_airline_api/controllers/flights_controller.rb'
package gtt43a import ( "encoding/binary" "fmt" "unicode/utf16" ) /**/ type GTT25PropertyType []byte var GaugeValue GTT25PropertyType = []byte{0x03, 0x02} var LabelText GTT25PropertyType = []byte{0x09, 0x06} var LabelFontSize GTT25PropertyType = []byte{0x09, 0x0A} var SliderValue GTT25PropertyType = []byte{0x0A, 0x08} var ButtonState GTT25PropertyType = []byte{0x15, 0x0C} var ButtonText GTT25PropertyType = []byte{0x15, 0x03} var SliderLabelText GTT25PropertyType = []byte{0x0A, 0x09} func (typeP GTT25PropertyType) Value() []byte { return []byte(typeP) } func ApduSetPropertyValueU16(id int, prpType GTT25PropertyType, value int) []byte { data := []byte{0xFE, 0xFA, 0x01, 0x06} idb := make([]byte, 2) binary.BigEndian.PutUint16(idb, uint16(id)) valueb := make([]byte, 2) binary.BigEndian.PutUint16(valueb, uint16(value)) data = append(data, idb...) data = append(data, prpType.Value()...) data = append(data, valueb...) return data } //Set Property ValueU16 GTT25Object func (m *display) SetPropertyValueU16(id int, prpType GTT25PropertyType) func(value int) error { return func(value int) error { data := ApduSetPropertyValueU16(id, prpType, value) /**/ res, err := m.SendRecv(data) if err != nil { return err } if len(res) < 3 { return fmt.Errorf("error in response: [% X]", res) } if res[len(res)-1] != byte(0xFE) { return fmt.Errorf("error in request U16, status code: [%X]", res[2]) } /**/ return nil } } func ApduSetPropertyValueS16(id int, prpType GTT25PropertyType, value int) []byte { data := []byte{0xFE, 0xFA, 0x01, 0x08} idb := make([]byte, 2) binary.BigEndian.PutUint16(idb, uint16(id)) valueb := make([]byte, 2) binary.BigEndian.PutUint16(valueb, uint16(value)) data = append(data, idb...) data = append(data, prpType.Value()...) data = append(data, valueb...) return data } //Set Property ValueS16 GTT25Object func (m *display) SetPropertyValueS16(id int, prpType GTT25PropertyType) func(value int) error { return func(value int) error { data := ApduSetPropertyValueS16(id, prpType, value) /**/ res, err := m.SendRecv(data) if err != nil { return err } if len(res) < 3 { return fmt.Errorf("error in response: [% X]", res) } if res[len(res)-1] != byte(0xFE) { return fmt.Errorf("error in request S16, status code: [%X]", res[2]) } /**/ return nil } } func ApduSetPropertyValueU8(id int, prpType GTT25PropertyType, value int) []byte { data := []byte{0xFE, 0xFA, 0x01, 0x04} idb := make([]byte, 2) binary.BigEndian.PutUint16(idb, uint16(id)) data = append(data, idb...) data = append(data, prpType.Value()...) data = append(data, byte(value)) return data } //Set Property ValueU8 GTT25Object func (m *display) SetPropertyValueU8(id int, prpType GTT25PropertyType) func(value int) error { return func(value int) error { data := ApduSetPropertyValueU8(id, prpType, value) /**/ res, err := m.SendRecv(data) if err != nil { return err } if len(res) < 3 { return fmt.Errorf("error in response: [% X]", res) } if res[len(res)-1] != byte(0xFE) { return fmt.Errorf("error in request U8, status code: [%X]", res[2]) } /**/ return nil } } func ApduSetPropertyText(id int, prpType GTT25PropertyType, text string) []byte { data := []byte{0xFE, 0xFA, 0x01, 0x0A} idb := make([]byte, 2) binary.BigEndian.PutUint16(idb, uint16(id)) data = append(data, idb...) data = append(data, prpType.Value()...) data = append(data, 0x00) value16 := utf16.Encode([]rune(text)) value := make([]byte, 0) for _, v := range value16 { tempB := make([]byte, 2) binary.LittleEndian.PutUint16(tempB, uint16(v)) value = append(value, tempB...) } lenb := make([]byte, 2) binary.BigEndian.PutUint16(lenb, uint16(len(value))) data = append(data, lenb...) data = append(data, value...) return data } //Set Property Text GTT25Object func (m *display) SetPropertyText(id int, prpType GTT25PropertyType) func(text string) error { return func(text string) error { data := ApduSetPropertyText(id, prpType, text) /**/ res, err := m.SendRecv(data) if err != nil { return err } if len(res) < 3 { return fmt.Errorf("error in response: [% X]", res) } if res[len(res)-1] != byte(0xFE) { return fmt.Errorf("error in request, status code: [%X]", res[2]) } /**/ return nil } } func ApduGetPropertyValueU16(id int, prpType GTT25PropertyType) []byte { data := []byte{0xFE, 0xFA, 0x01, 0x07} idb := make([]byte, 2) binary.BigEndian.PutUint16(idb, uint16(id)) data = append(data, idb...) data = append(data, prpType.Value()...) return data } //Get Property ValueU16 GTT25Object func (m *display) GetPropertyValueU16(id int, prpType GTT25PropertyType) func() ([]byte, error) { return func() ([]byte, error) { data := ApduGetPropertyValueU16(id, prpType) res, err := m.SendRecv(data) if err != nil { return nil, err } if len(res) < 3 { return nil, fmt.Errorf("error in response: [% X]", res) } if res[len(res)-3] != byte(0xFE) { return nil, fmt.Errorf("error in request U16, status code: [%X]", res[2]) } return res[len(res)-2:], nil } } func ApduGetPropertyValueS16(id int, prpType GTT25PropertyType) []byte { data := []byte{0xFE, 0xFA, 0x01, 0x09} idb := make([]byte, 2) binary.BigEndian.PutUint16(idb, uint16(id)) data = append(data, idb...) data = append(data, prpType.Value()...) return data } //Get Property ValueS16 GTT25Object func (m *display) GetPropertyValueS16(id int, prpType GTT25PropertyType) func() ([]byte, error) { return func() ([]byte, error) { data := ApduGetPropertyValueS16(id, prpType) res, err := m.SendRecv(data) if err != nil { return nil, err } if len(res) < 3 { return nil, fmt.Errorf("error in response: [% X]", res) } if res[len(res)-3] != byte(0xFE) { return nil, fmt.Errorf("error in request S16, status code: [%X]", res[2]) } return res[len(res)-2:], nil } } func ApduGetPropertyValueU8(id int, prpType GTT25PropertyType) []byte { data := []byte{0xFE, 0xFA, 0x01, 0x05} idb := make([]byte, 2) binary.BigEndian.PutUint16(idb, uint16(id)) data = append(data, idb...) data = append(data, prpType.Value()...) return data } //Get Property ValueU8 GTT25Object func (m *display) GetPropertyValueU8(id int, prpType GTT25PropertyType) func() (byte, error) { return func() (byte, error) { data := ApduGetPropertyValueU8(id, prpType) res, err := m.SendRecv(data) if err != nil { return 0, err } if len(res) < 3 { return 0x00, fmt.Errorf("error in response: [% X]", res) } if res[len(res)-2] != byte(0xFE) { return byte(0x00), fmt.Errorf("error in request U8, status code: [%X]", res[2]) } return res[len(res)-1], nil } }
using System.Collections; using System.Collections.Generic; using UnityEngine; public class GazeMarker : MonoBehaviour { public static List<Vector3> gazePath = new List<Vector3>(); public static List<List<Vector3>> savedGazePath = new List<List<Vector3>>(); private RayCaster rCaster; void Start() { // rCaster = GameObject.FindGameObjectWithTag("EditorOnly").GetComponent<RayCastF>(); rCaster = Camera.main.GetComponent<RayCaster>(); } // Update is called once per frame void Update() { GameObject hitObject; RaycastHit[] hits; // everything that the raycast hit hits = Physics.RaycastAll(rCaster.ray); for (int i = 0; i < hits.Length; i++) { RaycastHit hit = hits[i]; hitObject = hit.collider.gameObject; //save the gaze marker position gazePath.Add(gameObject.transform.localPosition); } } }
package com.groupnine.oss.seller.service; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.Random; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.Part; import org.apache.commons.io.FileUtils; import com.groupnine.oss.pub.entity.Order; import com.groupnine.oss.seller.dao.SellerDao; import com.groupnine.oss.seller.dao.SellerDaoImpl; import com.groupnine.oss.seller.entity.Goods; import com.groupnine.oss.seller.entity.GoodsAttr; import com.groupnine.oss.seller.entity.GoodsImage; import com.groupnine.oss.seller.entity.Shop; import com.groupnine.oss.seller.entity.StatisticsData; import com.groupnine.oss.user.entity.Receiver; public class SellerServiceImpl implements SellerService { SellerDao dao = new SellerDaoImpl(); public Shop getShopById(int userId) { return dao.getShopById(userId); } public boolean updateShopInfo(Shop shop) { return dao.updateShopInfo(shop); } public boolean updateGoodsInfo(Goods goods) { return dao.updateGoodsInfo(goods); } public boolean sendGoods(Order order) { return dao.sendGoods(order); } public int addNewGoods(Goods goods) { return dao.addNewGoods(goods); } public boolean addGoodsImage(HttpServletRequest request, int goodsId) { boolean flag = false; int count = -1; ArrayList<Part> images; try { images = (ArrayList<Part>) request.getParts(); count = images.size(); for (Part image : images) { if (image.getContentType() == null) { continue; } // System.out.println(image.getContentType()); InputStream imageInputStream = null; if (image != null && image.getSize() != 0) { try { imageInputStream = image.getInputStream(); if (imageInputStream != null) { String imagedir = request.getServletContext() .getInitParameter("imagedir") + File.separator; // 图片名格式:20161123204206613375.jpg。 // 代表 2016-11-23 20:42:06.613 + 3 位 0 - 9 间随机数字 SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmssSSS"); StringBuilder imageName = new StringBuilder( dateFormat.format(new Date())); Random random = new Random(); for (int i = 0; i < 3; ++i) { imageName.append(random.nextInt(10)); } imageName.append(".jpg"); String targetFile = imagedir + imageName; try { FileUtils.copyInputStreamToFile(imageInputStream, new File(targetFile)); count--; dao.addGoodsImage("/images/goods/" + imageName, goodsId); // 更新数据库 // System.out.println(imagedir); // System.out.println(imageName); // System.out.println(count); } catch (IOException e) { e.printStackTrace(); } } } catch (IOException e1) { e1.printStackTrace(); } finally { imageInputStream.close(); } } } } catch (IOException | ServletException e3) { e3.printStackTrace(); } if (count == 0) { flag = true; } else { flag = false; } return flag; } public boolean deleteGoods(int goodsId) { return dao.deleteGoods(goodsId); } public boolean addGoodsAttr(GoodsAttr attr) { return dao.addGoodsAttr(attr); } public boolean updateGoodsAttr(GoodsAttr attr) { return dao.updateGoodsAttr(attr); } public boolean deleteGoodsAttr(int attributeId) { return dao.deleteGoodsAttr(attributeId); } public boolean deleteGoodsImage(int imageId) { return dao.deleteGoodsImage(imageId); } public Goods getGoodsById(int goodsId) { return dao.getGoodsById(goodsId); } public ArrayList<GoodsImage> getImages(int goodsId) { return dao.getImages(goodsId); } public ArrayList<String> getImagesUrl(int goodsId) { return dao.getImagesUrl(goodsId); } public ArrayList<GoodsAttr> getAttrs(int goodsId) { return dao.getAttrs(goodsId); } public ArrayList<StatisticsData> getSingleSales(int days, int goodsId) { return dao.getSingleSales(days, goodsId); } public boolean registerShop(Shop shop) { return dao.registerShop(shop); } public ArrayList<Order> getHistoryOrder(int shopId, int page) { return dao.getHistoryOrder(shopId, page); } public ArrayList<Receiver> getReceiver(int userId) { return dao.getReceiver(userId); } public ArrayList<Order> getUnfinishedOrder(int shopId, int page) { return dao.getUnfinishedOrder(shopId, page); } public Order getOrderDetail(int orderId) { return dao.getOrderDetail(orderId); } public ArrayList<StatisticsData> getTotalSales(int days, int shopId) { return dao.getTotalSales(days, shopId); } public ArrayList<Goods> getGoodsBriefs(int shopId, int page) { return dao.getGoodsBriefs(shopId, page); } public boolean deleteGoodsAttrs(int goodsId) { return dao.deleteGoodsAttrs(goodsId); } }
#define MAX_UTHREADS 64 #define SIGALRM 14 #define UTHREAD_QUANTA 5 #define STACKSZ 4096 typedef void (*start_func)(void*); enum thread_state {RUNNING, READY, SLEEPING, BLOCKED, TERMINATED}; struct threadtrapframe { uint edi; uint esi; uint ebp; uint oesp; uint ebx; uint edx; uint ecx; uint eax; ushort gs; ushort padding1; ushort fs; ushort padding2; ushort es; ushort padding3; ushort ds; ushort padding4; uint trapno; uint err; uint eip; ushort cs; ushort padding5; uint eflags; uint esp; ushort ss; ushort padding6; }; struct thread{ int tid; unsigned char* stack; enum thread_state state; struct threadtrapframe tf; int indexInThreadtable; int wakeUpTime; int blockedOnSemaphore; uint joinOnTid; }; int uthread_sleep(int); void uthread_join(int); int uthread_self(); int uthread_create(start_func, void*); int uthread_init(void); void uthread_exit(); //SEMAPHORES: int bsem_alloc(); void bsem_free(int); void bsem_down(int); void bsem_up(int);
-- MySQL dump 10.15 Distrib 10.0.34-MariaDB, for debian-linux-gnu (x86_64) -- -- Host: localhost Database: scanner -- ------------------------------------------------------ -- Server version 10.0.34-MariaDB-0ubuntu0.16.04.1 /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; /*!40103 SET TIME_ZONE='+00:00' */; /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; -- -- Table structure for table `account_tbl` -- DROP TABLE IF EXISTS `account_tbl`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `account_tbl` ( `idx` bigint(20) NOT NULL AUTO_INCREMENT, `account_name` varchar(40) DEFAULT NULL, `created` varchar(40) DEFAULT NULL, `block_num` bigint(20) DEFAULT NULL, PRIMARY KEY (`idx`) ) ENGINE=InnoDB AUTO_INCREMENT=23 DEFAULT CHARSET=utf8mb4; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `action_tbl` -- DROP TABLE IF EXISTS `action_tbl`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `action_tbl` ( `idx` bigint(20) NOT NULL AUTO_INCREMENT, `action_name` varchar(40) DEFAULT NULL, `txn_id` varchar(64) DEFAULT NULL, `authorization` varchar(80) DEFAULT NULL, `contract` varchar(12) DEFAULT NULL, PRIMARY KEY (`idx`) ) ENGINE=InnoDB AUTO_INCREMENT=89 DEFAULT CHARSET=utf8mb4; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `block_tbl` -- DROP TABLE IF EXISTS `block_tbl`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `block_tbl` ( `idx` bigint(20) NOT NULL AUTO_INCREMENT, `block_num` bigint(20) unsigned DEFAULT NULL, `block_id` varchar(64) DEFAULT NULL, `timestamp` varchar(30) DEFAULT NULL, `transactions` int(11) DEFAULT NULL, `producer` varchar(30) DEFAULT NULL, PRIMARY KEY (`idx`), UNIQUE KEY `block_num` (`block_num`), UNIQUE KEY `block_id` (`block_id`) ) ENGINE=InnoDB AUTO_INCREMENT=61 DEFAULT CHARSET=utf8mb4; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `producer_tbl` -- DROP TABLE IF EXISTS `producer_tbl`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `producer_tbl` ( `idx` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(30) NOT NULL, `email` varchar(120) NOT NULL, `article` text NOT NULL, `accnt_name` varchar(13) NOT NULL, `slogan` varchar(60) DEFAULT NULL, `location` varchar(60) DEFAULT NULL, `homepage` varchar(60) DEFAULT NULL, `maps_lat` varchar(20) DEFAULT NULL, `maps_lng` varchar(20) DEFAULT NULL, PRIMARY KEY (`idx`), UNIQUE KEY `name` (`name`) ) ENGINE=InnoDB AUTO_INCREMENT=10 DEFAULT CHARSET=utf8mb4; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `transaction_tbl` -- DROP TABLE IF EXISTS `transaction_tbl`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `transaction_tbl` ( `idx` bigint(20) NOT NULL AUTO_INCREMENT, `txn_id` varchar(64) DEFAULT NULL, `expiration` varchar(40) DEFAULT NULL, `actions` int(11) DEFAULT NULL, `block_id` varchar(64) DEFAULT NULL, PRIMARY KEY (`idx`), UNIQUE KEY `txn_id` (`txn_id`) ) ENGINE=InnoDB AUTO_INCREMENT=59 DEFAULT CHARSET=utf8mb4; /*!40101 SET character_set_client = @saved_cs_client */; /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; -- Dump completed on 2018-09-01 4:04:17
@testset "distributions" begin Random.seed!(1234) # Create random vectors and matrices dim = 3 a = rand(dim) b = rand(dim) c = rand(dim) A = rand(dim, dim) B = rand(dim, dim) C = rand(dim, dim) # Create random numbers alpha = rand() beta = rand() gamma = rand() # Create matrix `X` such that `X` and `I - X` are positive definite if `A ≠ 0`. function to_beta_mat(A) S = A * A' + I invL = inv(cholesky(S).L) return invL * invL' end # Create positive values. to_positive(x) = exp.(x) to_positive(x::AbstractArray{<:AbstractArray}) = to_positive.(x) # The following definition should not be needed # It seems there is a bug in the default `rand_tangent` that causes a # StackOverflowError though function ChainRulesTestUtils.rand_tangent(::Random.AbstractRNG, ::typeof(to_positive)) return NoTangent() end # Tests that have a `broken` field can be executed but, according to FiniteDifferences, # fail to produce the correct result. These tests can be checked with `@test_broken`. univariate_distributions = DistSpec[ ## Univariate discrete distributions DistSpec(Bernoulli, (0.45,), 1), DistSpec(Bernoulli, (0.45,), [1, 1]), DistSpec(Bernoulli, (0.45,), 0), DistSpec(Bernoulli, (0.45,), [0, 0]), DistSpec((a, b) -> BetaBinomial(10, a, b), (2.0, 1.0), 5), DistSpec((a, b) -> BetaBinomial(10, a, b), (2.0, 1.0), [5, 5]), DistSpec(p -> Binomial(10, p), (0.5,), 5), DistSpec(p -> Binomial(10, p), (0.5,), [5, 5]), DistSpec(p -> Categorical(p / sum(p)), ([0.45, 0.55],), 1), DistSpec(p -> Categorical(p / sum(p)), ([0.45, 0.55],), [1, 1]), DistSpec(Geometric, (0.45,), 3), DistSpec(Geometric, (0.45,), [3, 3]), DistSpec(NegativeBinomial, (3.5, 0.5), 1), DistSpec(NegativeBinomial, (3.5, 0.5), [1, 1]), DistSpec(Poisson, (0.5,), 1), DistSpec(Poisson, (0.5,), [1, 1]), DistSpec(Skellam, (1.0, 2.0), -2), DistSpec(Skellam, (1.0, 2.0), [-2, -2]), DistSpec(PoissonBinomial, ([0.5, 0.5],), 0), DistSpec(TuringPoissonBinomial, ([0.5, 0.5],), 0), DistSpec(TuringPoissonBinomial, ([0.5, 0.5],), [0, 0]), ## Univariate continuous distributions DistSpec(Arcsine, (), 0.5), DistSpec(Arcsine, (1.0,), 0.5), DistSpec(Arcsine, (0.0, 2.0), 0.5), DistSpec(Beta, (), 0.4), DistSpec(Beta, (1.5,), 0.4), DistSpec(Beta, (1.5, 2.0), 0.4), DistSpec(BetaPrime, (), 0.4), DistSpec(BetaPrime, (1.5,), 0.4), DistSpec(BetaPrime, (1.5, 2.0), 0.4), DistSpec(Biweight, (), 0.5), DistSpec(Biweight, (1.0,), 0.5), DistSpec(Biweight, (1.0, 2.0), 0.5), DistSpec(Cauchy, (), 0.5), DistSpec(Cauchy, (1.0,), 0.5), DistSpec(Cauchy, (1.0, 2.0), 0.5), DistSpec(Chernoff, (), 0.5, broken=(:Zygote,)), DistSpec(Chi, (1.0,), 0.5), DistSpec(Chisq, (1.0,), 0.5), DistSpec(Cosine, (1.0, 1.0), 0.5), DistSpec(Epanechnikov, (1.0, 1.0), 0.5), DistSpec(s -> Erlang(1, s), (1.0,), 0.5), # First arg is integer DistSpec(Exponential, (1.0,), 0.5), DistSpec(FDist, (1.0, 1.0), 0.5), DistSpec(Frechet, (), 0.5), DistSpec(Frechet, (1.0,), 0.5), DistSpec(Frechet, (1.0, 2.0), 0.5), DistSpec(Gamma, (), 0.4), DistSpec(Gamma, (1.5,), 0.4), DistSpec(Gamma, (1.5, 2.0), 0.4), DistSpec(GeneralizedExtremeValue, (1.0, 1.0, 1.0), 0.5), DistSpec(GeneralizedPareto, (), 0.5), DistSpec(GeneralizedPareto, (1.0, 2.0), 0.5), DistSpec(GeneralizedPareto, (0.0, 2.0, 3.0), 0.5), DistSpec(Gumbel, (), 0.5), DistSpec(Gumbel, (1.0,), 0.5), DistSpec(Gumbel, (1.0, 2.0), 0.5), DistSpec(InverseGamma, (), 0.5), DistSpec(InverseGamma, (1.0,), 0.5), DistSpec(InverseGamma, (1.0, 2.0), 0.5), DistSpec(InverseGaussian, (), 0.5), DistSpec(InverseGaussian, (1.0,), 0.5), DistSpec(InverseGaussian, (1.0, 2.0), 0.5), DistSpec(Kolmogorov, (), 0.5), DistSpec(Laplace, (), 0.5), DistSpec(Laplace, (1.0,), 0.5), DistSpec(Laplace, (1.0, 2.0), 0.5), DistSpec(Levy, (), 0.5), DistSpec(Levy, (0.0,), 0.5), DistSpec(Levy, (0.0, 2.0), 0.5), DistSpec((a, b) -> LocationScale(a, b, Normal()), (1.0, 2.0), 0.5), DistSpec(Logistic, (), 0.5), DistSpec(Logistic, (1.0,), 0.5), DistSpec(Logistic, (1.0, 2.0), 0.5), DistSpec(LogitNormal, (), 0.5), DistSpec(LogitNormal, (1.0,), 0.5), DistSpec(LogitNormal, (1.0, 2.0), 0.5), DistSpec(LogNormal, (), 0.5), DistSpec(LogNormal, (1.0,), 0.5), DistSpec(LogNormal, (1.0, 2.0), 0.5), # Dispatch error caused by ccall DistSpec(NoncentralBeta, (1.0, 2.0, 1.0), 0.5, broken=(:Tracker, :ForwardDiff, :Zygote, :ReverseDiff)), DistSpec(NoncentralChisq, (1.0, 2.0), 0.5, broken=(:Tracker, :ForwardDiff, :Zygote, :ReverseDiff)), DistSpec(NoncentralF, (1.0, 2.0, 1.0), 0.5, broken=(:Tracker, :ForwardDiff, :Zygote, :ReverseDiff)), DistSpec(NoncentralT, (1.0, 2.0), 0.5, broken=(:Tracker, :ForwardDiff, :Zygote, :ReverseDiff)), DistSpec(Normal, (), 0.5), DistSpec(Normal, (1.0,), 0.5), DistSpec(Normal, (1.0, 2.0), 0.5), DistSpec(NormalCanon, (1.0, 2.0), 0.5), DistSpec(NormalInverseGaussian, (1.0, 2.0, 1.0, 1.0), 0.5), DistSpec(Pareto, (), 1.5), DistSpec(Pareto, (1.0,), 1.5), DistSpec(Pareto, (1.0, 1.0), 1.5), DistSpec(PGeneralizedGaussian, (), 0.5), DistSpec(PGeneralizedGaussian, (1.0, 1.0, 1.0), 0.5), DistSpec(Rayleigh, (), 0.5), DistSpec(Rayleigh, (1.0,), 0.5), DistSpec(Semicircle, (1.0,), 0.5), DistSpec(SymTriangularDist, (), 0.5), DistSpec(SymTriangularDist, (1.0,), 0.5), DistSpec(SymTriangularDist, (1.0, 2.0), 0.5), DistSpec(TDist, (1.0,), 0.5), DistSpec(TriangularDist, (1.0, 3.0), 1.5), DistSpec(TriangularDist, (1.0, 3.0, 2.0), 1.5), DistSpec(Triweight, (1.0, 1.0), 1.0), DistSpec( (mu, sigma, l, u) -> truncated(Normal(mu, sigma), l, u), (0.0, 1.0, 1.0, 2.0), 1.5 ), DistSpec(Uniform, (), 0.5), DistSpec(Uniform, (alpha, alpha + beta), alpha + beta * gamma), DistSpec(TuringUniform, (), 0.5), DistSpec(TuringUniform, (alpha, alpha + beta), alpha + beta * gamma), DistSpec(VonMises, (), 1.0), DistSpec(Weibull, (), 1.0), DistSpec(Weibull, (1.0,), 1.0), DistSpec(Weibull, (1.0, 1.0), 1.0), ] # Tests cannot be executed, so cannot be checked with `@test_broken`. broken_univariate_distributions = DistSpec[ # Broken in Distributions even without autodiff DistSpec(() -> KSDist(1), (), 0.5), # `pdf` method not defined DistSpec(() -> KSOneSided(1), (), 0.5), # `pdf` method not defined DistSpec(StudentizedRange, (1.0, 2.0), 0.5), # `srdistlogpdf` method not defined # Stackoverflow caused by SpecialFunctions.besselix DistSpec(VonMises, (1.0,), 1.0), DistSpec(VonMises, (1, 1), 1), # Some tests are broken on some Julia versions, therefore it can't be checked reliably DistSpec(PoissonBinomial, ([0.5, 0.5],), [0, 0]; broken=(:Zygote,)), ] # Tests that have a `broken` field can be executed but, according to FiniteDifferences, # fail to produce the correct result. These tests can be checked with `@test_broken`. multivariate_distributions = DistSpec[ ## Multivariate discrete distributions # Vector x DistSpec(p -> Multinomial(2, p ./ sum(p)), (fill(0.5, 2),), [2, 0]), DistSpec(p -> Multinomial(2, p ./ sum(p)), (fill(0.5, 2),), [2 1; 0 1]), # Vector x DistSpec((m, A) -> MvNormal(m, to_posdef(A)), (a, A), b), DistSpec((m, s) -> MvNormal(m, to_posdef_diagonal(s)), (a, b), c), DistSpec((m, s) -> MvNormal(m, s^2 * I), (a, alpha), b), DistSpec(A -> MvNormal(to_posdef(A)), (A,), a), DistSpec(s -> MvNormal(to_posdef_diagonal(s)), (a,), b), DistSpec(s -> MvNormal(zeros(dim), s^2 * I), (alpha,), a), DistSpec((m, A) -> TuringMvNormal(m, to_posdef(A)), (a, A), b), DistSpec((m, s) -> TuringMvNormal(m, to_posdef_diagonal(s)), (a, b), c), DistSpec((m, s) -> TuringMvNormal(m, s^2 * I), (a, alpha), b), DistSpec(A -> TuringMvNormal(to_posdef(A)), (A,), a), DistSpec(s -> TuringMvNormal(to_posdef_diagonal(s)), (a,), b), DistSpec(s -> TuringMvNormal(zeros(dim), s^2 * I), (alpha,), a), DistSpec((m, A) -> MvLogNormal(m, to_posdef(A)), (a, A), b, to_positive), DistSpec((m, s) -> MvLogNormal(m, to_posdef_diagonal(s)), (a, b), c, to_positive), DistSpec((m, s) -> MvLogNormal(m, s^2 * I), (a, alpha), b, to_positive), DistSpec(A -> MvLogNormal(to_posdef(A)), (A,), a, to_positive), DistSpec(s -> MvLogNormal(to_posdef_diagonal(s)), (a,), b, to_positive), DistSpec(s -> MvLogNormal(zeros(dim), s^2 * I), (alpha,), a, to_positive), DistSpec(alpha -> Dirichlet(to_positive(alpha)), (a,), b, to_simplex), # Matrix case DistSpec((m, A) -> MvNormal(m, to_posdef(A)), (a, A), B), DistSpec((m, s) -> MvNormal(m, to_posdef_diagonal(s)), (a, b), A), DistSpec((m, s) -> MvNormal(m, s^2 * I), (a, alpha), A), DistSpec(A -> MvNormal(to_posdef(A)), (A,), B), DistSpec(s -> MvNormal(to_posdef_diagonal(s)), (a,), A), DistSpec(s -> MvNormal(zeros(dim), s^2 * I), (alpha,), A), DistSpec((m, A) -> TuringMvNormal(m, to_posdef(A)), (a, A), B), DistSpec((m, s) -> TuringMvNormal(m, to_posdef_diagonal(s)), (a, b), A), DistSpec((m, s) -> TuringMvNormal(m, s^2 * I), (a, alpha), A), DistSpec(A -> TuringMvNormal(to_posdef(A)), (A,), B), DistSpec(s -> TuringMvNormal(to_posdef_diagonal(s)), (a,), A), DistSpec(s -> TuringMvNormal(zeros(dim), s^2 * I), (alpha,), A), DistSpec((m, A) -> MvLogNormal(m, to_posdef(A)), (a, A), B, to_positive), DistSpec((m, s) -> MvLogNormal(m, to_posdef_diagonal(s)), (a, b), A, to_positive), DistSpec((m, s) -> MvLogNormal(m, s^2 * I), (a, alpha), A, to_positive), DistSpec(A -> MvLogNormal(to_posdef(A)), (A,), B, to_positive), DistSpec(s -> MvLogNormal(to_posdef_diagonal(s)), (a,), A, to_positive), DistSpec(s -> MvLogNormal(zeros(dim), s^2 * I), (alpha,), A, to_positive), DistSpec(alpha -> Dirichlet(to_positive(alpha)), (a,), A, to_simplex), ] # Tests cannot be executed, so cannot be checked with `@test_broken`. broken_multivariate_distributions = DistSpec[ # Dispatch error DistSpec((m, A) -> MvNormalCanon(m, to_posdef(A)), (a, A), b), DistSpec((m, p) -> MvNormalCanon(m, to_posdef_diagonal(p)), (a, b), c), DistSpec((m, p) -> MvNormalCanon(m, p^2 * I), (a, alpha), b), DistSpec(A -> MvNormalCanon(to_posdef(A)), (A,), a), DistSpec(p -> MvNormalCanon(to_posdef_diagonal(p)), (a,), b), DistSpec(p -> MvNormalCanon(zeros(dim), p^2 * I), (alpha,), a), DistSpec((m, A) -> MvNormalCanon(m, to_posdef(A)), (a, A), B), DistSpec((m, p) -> MvNormalCanon(m, to_posdef_diagonal(p)), (a, b), A), DistSpec((m, p) -> MvNormalCanon(m, p^2 * I), (a, alpha), A), DistSpec(A -> MvNormalCanon(to_posdef(A)), (A,), B), DistSpec(p -> MvNormalCanon(to_posdef_diagonal(p)), (a,), A), DistSpec(p -> MvNormalCanon(zeros(dim), p^2 * I), (alpha,), A), ] # Tests that have a `broken` field can be executed but, according to FiniteDifferences, # fail to produce the correct result. These tests can be checked with `@test_broken`. matrixvariate_distributions = DistSpec[ # Matrix x # We should use # DistSpec((n1, n2) -> MatrixBeta(dim, n1, n2), (3.0, 3.0), A, to_beta_mat), # but the default implementation of `rand_tangent` causes a StackOverflowError # Thus we use the following workaround DistSpec((n1, n2) -> MatrixBeta(3, n1, n2), (3.0, 3.0), A, to_beta_mat), DistSpec(() -> MatrixNormal(dim, dim), (), A, to_posdef, broken=(:Zygote,)), DistSpec((df, A) -> Wishart(df, to_posdef(A)), (3.0, A), B, to_posdef), DistSpec((df, A) -> InverseWishart(df, to_posdef(A)), (3.0, A), B, to_posdef), DistSpec((df, A) -> TuringWishart(df, to_posdef(A)), (3.0, A), B, to_posdef), DistSpec((df, A) -> TuringInverseWishart(df, to_posdef(A)), (3.0, A), B, to_posdef), # Vector of matrices x # Also here we should use # DistSpec( # (n1, n2) -> MatrixBeta(dim, n1, n2), # (3.0, 3.0), # [A, B], # x -> map(to_beta_mat, x), #), # but the default implementation of `rand_tangent` causes a StackOverflowError # Thus we use the following workaround DistSpec( (n1, n2) -> MatrixBeta(3, n1, n2), (3.0, 3.0), [A, B], x -> map(to_beta_mat, x), ), DistSpec( (df, A) -> Wishart(df, to_posdef(A)), (3.0, A), [B, C], x -> map(to_posdef, x), ), DistSpec( (df, A) -> InverseWishart(df, to_posdef(A)), (3.0, A), [B, C], x -> map(to_posdef, x), ), DistSpec( (df, A) -> TuringWishart(df, to_posdef(A)), (3.0, A), [B, C], x -> map(to_posdef, x), ), DistSpec( (df, A) -> TuringInverseWishart(df, to_posdef(A)), (3.0, A), [B, C], x -> map(to_posdef, x), ), ] # Tests cannot be executed, so cannot be checked with `@test_broken`. broken_matrixvariate_distributions = DistSpec[ # Other # TODO different tests are broken on different combinations of backends DistSpec( (A, B, C) -> MatrixNormal(A, to_posdef(B), to_posdef(C)), (A, B, B), C, to_posdef, ), # TODO different tests are broken on different combinations of backends DistSpec( (df, A, B, C) -> MatrixTDist(df, A, to_posdef(B), to_posdef(C)), (1.0, A, B, B), C, to_posdef, ), # TODO different tests are broken on different combinations of backends DistSpec( (n1, n2, A) -> MatrixFDist(n1, n2, to_posdef(A)), (3.0, 3.0, A), B, to_posdef, ), ] @testset "Univariate distributions" begin println("\nTesting: Univariate distributions\n") for d in univariate_distributions @info "Testing: $(nameof(dist_type(d)))" test_ad(d) end end @testset "Multivariate distributions" begin println("\nTesting: Multivariate distributions\n") for d in multivariate_distributions @info "Testing: $(nameof(dist_type(d)))" test_ad(d) end # Test `filldist` and `arraydist` distributions of univariate distributions n = 2 # always use two distributions for d in univariate_distributions d.x isa Number || continue # Broken distributions D = dist_type(d) D <: Union{VonMises,TriangularDist} && continue # Skellam only fails in these tests with ReverseDiff # Ref: https://github.com/TuringLang/DistributionsAD.jl/issues/126 # PoissonBinomial fails with Zygote # Matrix case does not work with Skellam: # https://github.com/TuringLang/DistributionsAD.jl/pull/172#issuecomment-853721493 filldist_broken = if D <: Skellam ((d.broken..., :Zygote, :ReverseDiff), (d.broken..., :Zygote, :ReverseDiff)) elseif D <: PoissonBinomial ((d.broken..., :Zygote), (d.broken..., :Zygote)) elseif D <: Chernoff # Zygote is not broken with `filldist` ((), ()) else (d.broken, d.broken) end arraydist_broken = if D <: PoissonBinomial ((d.broken..., :Zygote), (d.broken..., :Zygote)) else (d.broken, d.broken) end # Create `filldist` distribution f = d.f f_filldist = (θ...,) -> filldist(f(θ...), n) d_filldist = f_filldist(d.θ...) # Create `arraydist` distribution f_arraydist = (θ...,) -> arraydist([f(θ...) for _ in 1:n]) d_arraydist = f_arraydist(d.θ...) for (i, sz) in enumerate(((n,), (n, 2))) # Matrix case doesn't work for continuous distributions for some reason # now but not too important (?!) if length(sz) == 2 && D <: ContinuousDistribution continue end # Compute compatible sample x = fill(d.x, sz) # Test AD @info "Testing: filldist($(nameof(D)), $sz)" test_ad( DistSpec( f_filldist, d.θ, x, d.xtrans; broken=filldist_broken[i], ) ) @info "Testing: arraydist($(nameof(D)), $sz)" test_ad( DistSpec( f_arraydist, d.θ, x, d.xtrans; broken=arraydist_broken[i], ) ) end end end @testset "Matrixvariate distributions" begin println("\nTesting: Matrixvariate distributions\n") for d in matrixvariate_distributions @info "Testing: $(nameof(dist_type(d)))" test_ad(d) end # Test `filldist` and `arraydist` distributions of univariate distributions n = (2, 2) # always use 2 x 2 distributions for d in univariate_distributions d.x isa Number || continue D = dist_type(d) D <: DiscreteDistribution && continue # Broken distributions D <: Union{VonMises,TriangularDist} && continue # Create `filldist` distribution f = d.f f_filldist = (θ...,) -> filldist(f(θ...), n...) # Create `arraydist` distribution # Zygote's fill definition does not like non-numbers, so we use a workaround f_arraydist = (θ...,) -> arraydist(reshape([f(θ...) for _ in 1:prod(n)], n)) # Matrix `x` x_mat = fill(d.x, n) # Zygote is not broken with `filldist` + Chernoff filldist_broken = D <: Chernoff ? () : d.broken # Test AD @info "Testing: filldist($(nameof(D)), $n)" test_ad( DistSpec( f_filldist, d.θ, x_mat, d.xtrans; broken=filldist_broken, ) ) @info "Testing: arraydist($(nameof(D)), $n)" test_ad( DistSpec( f_arraydist, d.θ, x_mat, d.xtrans; broken=d.broken, ) ) # Vector of matrices `x` x_vec_of_mat = [fill(d.x, n) for _ in 1:2] # Test AD @info "Testing: filldist($(nameof(D)), $n, 2)" test_ad( DistSpec( f_filldist, d.θ, x_vec_of_mat, d.xtrans; broken=filldist_broken, ) ) @info "Testing: arraydist($(nameof(D)), $n, 2)" test_ad( DistSpec( f_arraydist, d.θ, x_vec_of_mat, d.xtrans; broken=d.broken, ) ) end # test `filldist` and `arraydist` distributions of multivariate distributions n = 2 # always use two distributions for d in multivariate_distributions d.x isa AbstractVector || continue D = dist_type(d) D <: DiscreteDistribution && continue # Tests are failing for matrix covariance vectorized MvNormal if D <: Union{ MvNormal,MvLogNormal, DistributionsAD.TuringDenseMvNormal, DistributionsAD.TuringDiagMvNormal, DistributionsAD.TuringScalMvNormal, TuringMvLogNormal } any(x isa Matrix for x in d.θ) && continue end # Create `filldist` distribution f = d.f f_filldist = (θ...,) -> filldist(f(θ...), n) # Create `arraydist` distribution f_arraydist = (θ...,) -> arraydist([f(θ...) for _ in 1:n]) # Matrix `x` x_mat = repeat(d.x, 1, n) # Test AD @info "Testing: filldist($(nameof(D)), $n)" test_ad( DistSpec( f_filldist, d.θ, x_mat, d.xtrans; broken=d.broken, ) ) @info "Testing: arraydist($(nameof(D)), $n)" test_ad( DistSpec( f_arraydist, d.θ, x_mat, d.xtrans; broken=d.broken, ) ) # Vector of matrices `x` x_vec_of_mat = [repeat(d.x, 1, n) for _ in 1:2] # Test AD @info "Testing: filldist($(nameof(D)), $n, 2)" test_ad( DistSpec( f_filldist, d.θ, x_vec_of_mat, d.xtrans; broken=d.broken, ) ) @info "Testing: arraydist($(nameof(D)), $n, 2)" test_ad( DistSpec( f_arraydist, d.θ, x_vec_of_mat, d.xtrans; broken=d.broken, ) ) end end end
class WelcomeController < ApplicationController theme 'triangle' layout 'landing' def index end end
<?php namespace Admin\Controller; use Think\Controller; use Think\Upload; class HmessageController extends Controller { public function index(){ //主持人信息遍历 // $db = M('host'); // $select = $db->select(); // $this->assign('hostselect', $select); //// $this->show(); // $db = M('host'); // 实例化User对象 // // 进行分页数据查询 注意page方法的参数的前面部分是当前的页数使用 $_GET[p]获取 // $list = $db->page($_GET['p'].',4')->select(); // $this->assign('hostselect',$list);// 赋值数据集 // $count = $db->count();// 查询满足要求的总记录数 // $Page = new \Think\Page($count,4);// 实例化分页类 传入总记录数和每页显示的记录数 // $show = $Page->show();// 分页显示输出 // $this->assign('page',$show);// 赋值分页输出 // $this->show(); // 输出模板 $host = M('host'); // 实例化User对象 $count = $host->count();// 查询满足要求的总记录数 $Page = new \Think\Page($count,4);// 实例化分页类 传入总记录数和每页显示的记录数(25) $show = $Page->show();// 分页显示输出 // 进行分页数据查询 注意limit方法的参数要使用Page类的属性 $list = $host->limit($Page->firstRow.','.$Page->listRows)->select(); $this->assign('hostselect',$list);// 赋值数据集 $this->assign('page',$show);// 赋值分页输出 $this->display(); // 输出模板 } public function hostchange() { //id遍历 $id[hostid]=$_GET['id']; $db = M('host'); $select = $db->where($id)->select(); $this->assign('hostselect', $select); $this->show(); } public function upload() {//节目修改 $upload = new Upload(); //实例化上传类 $upload->maxSize = 10485760;// 设置附件上传大小 $upload->exts = array('jpg', 'gif', 'png', 'jpeg','mp3','mp4','wav');// 设置附件上传类型 $upload->savePath = ''; // 设置附件上传(子)目录 $info = $upload->upload(); // 上传文件 if (!$info) {// 上传错误提示错误信息 $this->error($upload->getError()); } else {// 上传成功 $_POST['hostbig'] = '/Uploads/'.$info[0]['savepath'].$info[0]['savename']; $this->success('提交成功!'); } $host = D("host"); $id[hostid]=$_POST['hostid']; $hostname=$_POST['hostname']; $hostheight=$_POST['hostheight']; $hostmarjor=$_POST['hostmarjor']; $hostposition=$_POST['hostposition']; $hostbig=$_POST['hostbig']; $data_arr[hostname]=$hostname; $data_arr[hostheight]=$hostheight; $data_arr[hostmarjor]=$hostmarjor; $data_arr[hostposition]=$hostposition; $data_arr[hostbig]=$hostbig; $host->create(); $host->where($id)->save($data_arr); // 保存表单数据 包括附件数据 } }
--- title: Requesting Parameter Values author: Natalia Kazakova legacyId: 117554 --- # Requesting Parameter Values The Web Dashboard provides a built-in **Dashboard Parameters** dialog, which provides the capability to change dashboard parameter values. This dialog is created automatically, depending on the parameter type and visibility settings. ![Parameters_DashboardParametersDialog_Web](../../../../images/img21818.png) To invoke the Dashboard Parameters dialog in the Web Dashboard, click the **Parameters** button (the ![Parameters_ParametersButtonWin_Title](../../../../images/img21814.png) icon) in the [dashboard title](../../dashboard-layout/dashboard-title.md). Select the required parameter values in the Dashboard Parameters dialog and click the **Submit** button to apply the changes. To restore the default values, click the **Reset** button.
<?php namespace NGS\Symfony\Form\Type; use NGS\Symfony\Form\DataTransformer\IdentifiableToUriTransformer; use Symfony\Component\Form\Extension\Core\Type\TextType; use Symfony\Component\Form\FormBuilderInterface; use Symfony\Component\OptionsResolver\OptionsResolverInterface; /** * Extended reference field with custom Transformer for handling Identifiable type */ class ReferenceType extends TextType { public function setDefaultOptions(OptionsResolverInterface $resolver) { $resolver->setRequired(array('class')); $resolver->setOptional(array('transformer', 'dsl_client')); $resolver->setDefaults(array( 'compound' => false, 'dsl_client' => null, )); } public function buildForm(FormBuilderInterface $builder, array $options) { if(isset($options['transformer'])) $builder->addViewTransformer(new $options['transformer']); else $builder->addViewTransformer(new IdentifiableToUriTransformer($options['class'], $options['dsl_client'])); } public function getName() { return 'ngs_reference'; } public function getParent() { return 'text'; } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using System; using System.Collections.Generic; using System.Threading.Tasks; using BuildXL.Cache.ContentStore.Hashing; using BuildXL.Cache.ContentStore.Interfaces.Results; using BuildXL.Cache.ContentStore.Interfaces.Sessions; using BuildXL.Cache.ContentStore.Tracing.Internal; namespace BuildXL.Cache.ContentStore.Vsts { /// <nodoc /> public interface IReadOnlyBackingContentSession : IReadOnlyContentSession { /// <summary> /// Bulk operations for pins with a specific TTL /// </summary> Task<IEnumerable<Task<PinResult>>> PinAsync(OperationContext context, IReadOnlyList<ContentHash> contentHashes, DateTime keepUntil); /// <summary> /// Expiry Cache /// </summary> BackingContentStoreExpiryCache ExpiryCache { get; } /// <summary> /// Uri Cache /// </summary> DownloadUriCache UriCache { get; } } /// <nodoc /> public interface IBackingContentSession : IReadOnlyBackingContentSession, IContentSession { } }
package com.awscherb.cardkeeper.ui.create import android.content.Context import android.view.LayoutInflater import android.view.View import android.view.ViewGroup import android.widget.TextView import androidx.recyclerview.widget.RecyclerView import com.awscherb.cardkeeper.R class CodeTypesAdapter( private val context: Context, private val onClick: (CreateType) -> Unit ) : RecyclerView.Adapter<CodeTypesViewHolder>() { override fun getItemCount() = TYPES.size override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): CodeTypesViewHolder = CodeTypesViewHolder( LayoutInflater.from(context).inflate(R.layout.adapter_code_type, parent, false) ) override fun onBindViewHolder(holder: CodeTypesViewHolder, position: Int) { holder.apply { codeName.text = TYPES[position].title itemView.setOnClickListener { onClick(TYPES[position]) } } } companion object { private val TYPES = arrayOf<CreateType>( CreateType.Aztec, CreateType.Code128, CreateType.DataMatrix, CreateType.PDF417, CreateType.QRCode ) } } class CodeTypesViewHolder(itemView: View): RecyclerView.ViewHolder(itemView) { val codeName: TextView = itemView.findViewById(R.id.adapter_code_type_name) }
// Type definitions for chai-withintoleranceof // Project: https://github.com/RmiTtro/chai-withintoleranceof // Definitions by: Matthew McEachen <https://github.com/mceachen> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped /// <reference types="chai" /> interface WithinTolerance { (expected: number, tol: number | number[], message?: string): Chai.Assertion } declare namespace Chai { interface Assertion extends LanguageChains, NumericComparison, TypeComparison { withinToleranceOf: WithinTolerance; withinTolOf: WithinTolerance; } }
require 'hydroponic_bean/protocol' module HydroponicBean class Connection include HydroponicBean::Protocol attr_accessor :waiting alias_method :waiting?, :waiting def initialize @_read, @_write = IO.pipe @worker, @producer = false @waiting = false HydroponicBean.add_connection(self) end def worker?; @worker; end def worker!; @worker = true; end def producer?; @producer; end def producer!; @producer = true; end def closed? @_write.closed? end # Necessary interface used by beaneater def write(command) parse(StringIO.new(command)) end def read(bytes) @_read.read(bytes) end def readline @_read.readline end def close @_read.close @_write.close HydroponicBean.remove_connection(self) end protected def output(data) @_write.write(data) end end end
package g0201_0300.s0297_serialize_and_deserialize_binary_tree; import com_github_leetcode.TreeNode; /* * Definition for a binary tree node. * public class TreeNode { * int val; * TreeNode left; * TreeNode right; * TreeNode(int x) { val = x; } * } */ public class Codec { private static final int BASE_OFFSET = 1000; private static final String DELIM = "*"; private int offset; // Encodes a tree to a single string. public String serialize(TreeNode root) { StringBuilder sb = new StringBuilder(); offset = 0; serialize(root, sb); return sb.toString(); } public void serialize(TreeNode root, StringBuilder sb) { // all nodes fit into 4 bits. // IFF we offset at 0. So encode(val) = val + min(default - 1000) if (root == null) { sb.append(DELIM); return; } String s = Integer.toHexString(root.val + BASE_OFFSET); StringBuilder sb2 = new StringBuilder(); for (int i = 0; i < 3 - s.length(); i++) { sb2.append('0'); } sb2.append(s); sb.append(sb2.toString()); serialize(root.left, sb); serialize(root.right, sb); } // Decodes your encoded data to tree. public TreeNode deserialize(String data) { if (data.charAt(offset) == '*') { offset++; return null; } TreeNode root = new TreeNode( Integer.parseInt(data.substring(offset, offset + 3), 16) - BASE_OFFSET); offset += 3; root.left = deserialize(data); root.right = deserialize(data); return root; } } // Your Codec object will be instantiated and called as such: // Codec ser = new Codec(); // Codec deser = new Codec(); // TreeNode ans = deser.deserialize(ser.serialize(root));
-- SPDX-License-Identifier: Apache-2.0 -- Licensed to the Ed-Fi Alliance under one or more agreements. -- The Ed-Fi Alliance licenses this file to you under the Apache License, Version 2.0. -- See the LICENSE and NOTICES files in the project root for more information. PRINT N'Updating [edfi].[StudentAcademicRecord]' GO ALTER TABLE [edfi].[StudentAcademicRecord] ALTER COLUMN [CumulativeEarnedCredits] [decimal] (9, 3) NULL GO ALTER TABLE [edfi].[StudentAcademicRecord] ALTER COLUMN [CumulativeAttemptedCredits] [decimal] (9, 3) NULL GO ALTER TABLE [edfi].[StudentAcademicRecord] ALTER COLUMN [SessionEarnedCredits] [decimal] (9, 3) NULL GO ALTER TABLE [edfi].[StudentAcademicRecord] ALTER COLUMN [SessionAttemptedCredits] [decimal] (9, 3) NULL GO PRINT N'Converting type to descriptor on [edfi].[StudentAcademicRecord]' GO EXEC [migration_tempdata].[sp_ConvertTypeToDescriptor] 'edfi', 'StudentAcademicRecord', 'edfi', 'CreditTypeDescriptor', 'edfi', 'CreditType', 'CumulativeAttemptedCreditTypeId', 'CumulativeAttemptedCreditTypeDescriptorId' GO EXEC [migration_tempdata].[sp_ConvertTypeToDescriptor] 'edfi', 'StudentAcademicRecord', 'edfi', 'CreditTypeDescriptor', 'edfi', 'CreditType', 'CumulativeEarnedCreditTypeId', 'CumulativeEarnedCreditTypeDescriptorId' GO EXEC [migration_tempdata].[sp_ConvertTypeToDescriptor] 'edfi', 'StudentAcademicRecord', 'edfi', 'CreditTypeDescriptor', 'edfi', 'CreditType', 'SessionAttemptedCreditTypeId', 'SessionAttemptedCreditTypeDescriptorId' GO EXEC [migration_tempdata].[sp_ConvertTypeToDescriptor] 'edfi', 'StudentAcademicRecord', 'edfi', 'CreditTypeDescriptor', 'edfi', 'CreditType', 'SessionEarnedCreditTypeId', 'SessionEarnedCreditTypeDescriptorId' GO ALTER TABLE [edfi].[StudentAcademicRecord] DROP COLUMN [CumulativeAttemptedCreditTypeId], [CumulativeEarnedCreditTypeId], [SessionAttemptedCreditTypeId], [SessionEarnedCreditTypeId] GO
import * as fonts from "../fonts"; import * as colors from "../colors"; import { color } from "csx"; import { types } from "typestyle"; import { deepMergeStyles, multi } from "../helpers"; export const button = ( params: { width?: number | string; } = {} ): types.NestedCSSProperties => ({ fontSize: "18px", fontFamily: fonts.CONDENSED, textTransform: "uppercase", fontWeight: 400, borderRadius: "100px", border: "none", display: "inline-flex", alignItems: "center", justifyContent: "center", width: params.width, padding: 10, cursor: "pointer", textAlign: "center", $nest: { ...multi(["&[disabled]", "&.disabled"], { cursor: "not-allowed", opacity: 0.7, }), "svg.small-loader": { $nest: { ...multi(["&", "rect", "path"], { fill: "rgba(255,255,255,0.9)", }), }, }, }, }); const primaryButtonActiveBg = color(colors.ORANGE).darken(0.05).toHexString(); export const primaryButton = (params: { width?: number | string; bgMode: "light" | "dark"; }): types.NestedCSSProperties => deepMergeStyles(button(params), { color: "#fff", background: colors.ORANGE, border: "1px solid rgba(0,0,0,0.0)", $nest: { "&:not([disabled]):not(.disabled):hover": { background: primaryButtonActiveBg, }, "&:not([disabled]):not(.disabled):focus": { background: primaryButtonActiveBg, border: `1px solid ${colors.LIGHT_ORANGE}`, boxShadow: `0 0 1px 1px ${colors.LIGHT_ORANGE}`, }, }, }); export const secondaryButton = (params: { width?: number | string; bgMode: "light" | "dark"; }): types.NestedCSSProperties => deepMergeStyles(button(params), { background: params.bgMode == "dark" ? "rgba(255,255,255,0.2)" : "rgba(0,0,0,0.1)", color: "rgba(0,0,0,0.6)", $nest: { "&:not([disabled]):not(.disabled):hover": { background: params.bgMode == "dark" ? "rgba(255,255,255,0.4)" : "rgba(0,0,0,0.15)", }, }, }); export const tertiaryButton = (params: { width?: number | string; bgMode: "light" | "dark"; }): types.NestedCSSProperties => deepMergeStyles(button(params), { background: params.bgMode == "light" ? colors.DARKER_BLUE : colors.LIGHT_BLUE, color: "#fff", $nest: { "&:not([disabled]):not(.disabled):hover": { background: params.bgMode == "light" ? color(colors.DARKER_BLUE).darken(0.05).toHexString() : color(colors.LIGHT_BLUE).darken(0.05).toHexString(), }, }, }); export const backLink = (params: { bgMode: "light" | "dark"; fontSize?: string; }): types.NestedCSSProperties => ({ color: params.bgMode == "dark" ? "rgba(255,255,255,0.6)" : "rgba(0,0,0,0.5)", fontFamily: fonts.CONDENSED, fontSize: params.fontSize ?? "15px", textTransform: "uppercase", display: "inline-block", padding: "0 20px 20px 20px", cursor: "pointer", borderBottom: "1px solid transparent", $nest: { "&:hover": { color: params.bgMode == "dark" ? "#fff" : "rgba(0,0,0,0.6)", borderBottom: `1px solid ${ params.bgMode == "dark" ? colors.LIGHTEST_BLUE : "rgba(0,0,0,0.6)" }`, }, }, }); export const imgLink = (params: { bgMode: "light" | "dark"; fontSize?: string; }): types.NestedCSSProperties => ({ display: "flex", alignItems: "center", color: params.bgMode == "dark" ? "rgba(255,255,255,0.6)" : "rgba(0,0,0,0.4)", padding: "0 15px 27.5px", fontSize: params.fontSize ?? "13.5px", borderBottom: `1px solid transparent`, $nest: { "> svg": { width: 15, height: 15, fill: "rgba(255,255,255,0.4)", marginRight: 8, }, "&:hover": { color: "rgba(255,255,255,0.9)", borderBottom: `1px solid ${colors.LIGHTEST_BLUE}`, $nest: { "> svg": { fill: "rgba(255,255,255,0.7)", }, }, }, }, }); export const modalLink = (params: { bgMode: "light" | "dark"; fontSize?: string; }): types.NestedCSSProperties => ({ position: "absolute", right: 0, top: "50%", transform: "translateY(-50%)", display: "flex", alignItems: "center", cursor: "pointer", $nest: { span: { fontSize: "12.5px", fontWeight: 500, fontFamily: fonts.MAIN, textTransform: "initial", color: params.bgMode == "dark" ? "rgba(255,255,255,0.4)" : "rgba(0,0,0,0.3)", }, svg: { width: 17, height: 17, fill: params.bgMode == "dark" ? "rgba(255,255,255,0.4)" : "rgba(0,0,0,0.3)", marginRight: 6, }, "&:hover": { color: params.bgMode == "dark" ? "rgba(255,255,255,0.7)" : "rgba(0,0,0,0.5)", $nest: { svg: { fill: params.bgMode == "dark" ? "rgba(255,255,255,0.7)" : "rgba(0,0,0,0.5)", }, }, }, }, });
package main import ( "context" "net/http" "time" // _ "net/http/pprof" "github.com/gsmcwhirter/go-util/v8/deferutil" "github.com/gsmcwhirter/go-util/v8/logging/level" "github.com/gsmcwhirter/go-util/v8/pprofsidecar" "golang.org/x/sync/errgroup" "github.com/gsmcwhirter/discord-bot-lib/v23/bot" ) func start(c config) error { // See https://discordapp.com/developers/docs/topics/permissions#permissions-bitwise-permission-flags botPermissions := 0x00000040 // add reactions botPermissions |= 0x00000400 // view channel (including read messages) botPermissions |= 0x00000800 // send messages botPermissions |= 0x00002000 // manage messages botPermissions |= 0x00004000 // embed links botPermissions |= 0x00008000 // attach files botPermissions |= 0x00010000 // read message history botPermissions |= 0x00020000 // mention everyone botPermissions |= 0x04000000 // change own nickname botIntents := 1 << 0 // guilds botIntents |= 1 << 1 // guild members botIntents |= 1 << 9 // guild messages botIntents |= 1 << 10 // guild message reactions botIntents |= 1 << 12 // direct messages botIntents |= 1 << 13 // direct message reactions deps, err := createDependencies(c, botPermissions, botIntents) if err != nil { return err } defer deps.Close() err = deps.Bot().AuthenticateAndConnect() if err != nil { return err } defer deferutil.CheckDefer(deps.Bot().Disconnect) deps.MessageHandler().ConnectToBot(deps.Bot()) ctx, cancel := context.WithCancel(context.Background()) defer cancel() mux := http.NewServeMux() if deps.promHandler != nil { mux.Handle("/metrics", deps.promHandler) } prom := &http.Server{ Addr: c.PrometheusHostPort, ReadTimeout: 5 * time.Second, WriteTimeout: 5 * time.Second, Handler: mux, } level.Info(deps.Logger()).Message("pprof hostport", "val", c.PProfHostPort) err = pprofsidecar.Run(ctx, c.PProfHostPort, nil, runAll(deps, deps.Bot(), prom)) level.Error(deps.Logger()).Err("error in start; quitting", err) return err } func runAll(deps *dependencies, b *bot.DiscordBot, srv *http.Server) func(context.Context) error { return func(ctx context.Context) error { g, ctx := errgroup.WithContext(ctx) g.Go(func() error { return b.Run(ctx) }) g.Go(serverStartFunc(deps, srv)) g.Go(serverShutdownFunc(ctx, deps, srv)) g.Go(func() error { return deps.statsHub.Start(ctx) }) return g.Wait() } } func serverStartFunc(deps *dependencies, s *http.Server) func() error { return func() error { level.Info(deps.logger).Message("starting server", "listen", s.Addr) return s.ListenAndServe() } } func serverShutdownFunc(ctx context.Context, deps *dependencies, s *http.Server) func() error { return func() error { <-ctx.Done() // something said we are done level.Info(deps.Logger()).Message("stopping server", "listen", s.Addr) shutdownCtx, cncl := context.WithTimeout(context.Background(), 2*time.Second) defer cncl() return s.Shutdown(shutdownCtx) } }
/* Navicat Premium Data Transfer Source Server : localpg Source Server Type : PostgreSQL Source Server Version : 140002 Source Host : localhost:5432 Source Catalog : my_blog_db Source Schema : public Target Server Type : PostgreSQL Target Server Version : 140002 File Encoding : 65001 Date: 23/05/2022 01:59:07 */ -- ---------------------------- -- Table structure for tb_admin_user -- ---------------------------- DROP TABLE IF EXISTS "public"."tb_admin_user"; CREATE TABLE "public"."tb_admin_user" ( "id" varchar(32) COLLATE "pg_catalog"."default" NOT NULL, "login_user_name" varchar(50) COLLATE "pg_catalog"."default" NOT NULL, "login_password" varchar(50) COLLATE "pg_catalog"."default" NOT NULL, "nick_name" varchar(50) COLLATE "pg_catalog"."default", "locked" int2, "role" int2, "avatar" varchar(255) COLLATE "pg_catalog"."default" ) ; COMMENT ON COLUMN "public"."tb_admin_user"."id" IS '管理员id'; COMMENT ON COLUMN "public"."tb_admin_user"."login_user_name" IS '管理员登陆名称'; COMMENT ON COLUMN "public"."tb_admin_user"."login_password" IS '管理员登陆密码'; COMMENT ON COLUMN "public"."tb_admin_user"."nick_name" IS '管理员显示昵称'; COMMENT ON COLUMN "public"."tb_admin_user"."locked" IS '是否锁定 0未锁定 1已锁定无法登陆'; COMMENT ON COLUMN "public"."tb_admin_user"."role" IS '0普通用户,1管理员'; COMMENT ON COLUMN "public"."tb_admin_user"."avatar" IS '用户头像'; COMMENT ON TABLE "public"."tb_admin_user" IS '后台管理员信息表'; -- ---------------------------- -- Table structure for tb_blog_category -- ---------------------------- DROP TABLE IF EXISTS "public"."tb_blog_category"; CREATE TABLE "public"."tb_blog_category" ( "relation_id" varchar(64) COLLATE "pg_catalog"."default" NOT NULL, "blog_id" varchar(64) COLLATE "pg_catalog"."default", "category_id" varchar(64) COLLATE "pg_catalog"."default", "create_time" timestamp(6) ) ; -- ---------------------------- -- Table structure for tb_blog_config -- ---------------------------- DROP TABLE IF EXISTS "public"."tb_blog_config"; CREATE TABLE "public"."tb_blog_config" ( "config_field" varchar(20) COLLATE "pg_catalog"."default" NOT NULL, "config_name" varchar(50) COLLATE "pg_catalog"."default" NOT NULL, "config_value" varchar(200) COLLATE "pg_catalog"."default" NOT NULL, "create_time" timestamp(6) NOT NULL, "update_time" timestamp(6) NOT NULL, "id" varchar(32) COLLATE "pg_catalog"."default" NOT NULL ) ; COMMENT ON COLUMN "public"."tb_blog_config"."config_field" IS '字段名'; COMMENT ON COLUMN "public"."tb_blog_config"."config_name" IS '配置名'; COMMENT ON COLUMN "public"."tb_blog_config"."config_value" IS '配置项的值'; COMMENT ON COLUMN "public"."tb_blog_config"."create_time" IS '创建时间'; COMMENT ON COLUMN "public"."tb_blog_config"."update_time" IS '修改时间'; COMMENT ON COLUMN "public"."tb_blog_config"."id" IS 'id'; -- ---------------------------- -- Table structure for tb_blog_info -- ---------------------------- DROP TABLE IF EXISTS "public"."tb_blog_info"; CREATE TABLE "public"."tb_blog_info" ( "blog_id" varchar(64) COLLATE "pg_catalog"."default" NOT NULL, "blog_title" varchar(200) COLLATE "pg_catalog"."default" NOT NULL, "blog_sub_url" varchar(200) COLLATE "pg_catalog"."default", "blog_preface" varchar(255) COLLATE "pg_catalog"."default", "blog_content" text COLLATE "pg_catalog"."default" NOT NULL, "blog_status" int2 NOT NULL, "blog_views" int8, "enable_comment" int2 NOT NULL, "is_deleted" int2 NOT NULL, "create_time" timestamptz(6) NOT NULL, "update_time" timestamptz(6) ) ; COMMENT ON COLUMN "public"."tb_blog_info"."blog_id" IS '博客表主键id'; COMMENT ON COLUMN "public"."tb_blog_info"."blog_title" IS '博客标题'; COMMENT ON COLUMN "public"."tb_blog_info"."blog_sub_url" IS '博客自定义路径url'; COMMENT ON COLUMN "public"."tb_blog_info"."blog_preface" IS '博客前言'; COMMENT ON COLUMN "public"."tb_blog_info"."blog_content" IS '博客内容'; COMMENT ON COLUMN "public"."tb_blog_info"."blog_status" IS '0-草稿 1-发布'; COMMENT ON COLUMN "public"."tb_blog_info"."blog_views" IS '阅读量'; COMMENT ON COLUMN "public"."tb_blog_info"."enable_comment" IS '0-允许评论 1-不允许评论'; COMMENT ON COLUMN "public"."tb_blog_info"."is_deleted" IS '是否删除 0=否 1=是'; COMMENT ON COLUMN "public"."tb_blog_info"."create_time" IS '添加时间'; COMMENT ON COLUMN "public"."tb_blog_info"."update_time" IS '修改时间'; COMMENT ON TABLE "public"."tb_blog_info" IS '博客信息表'; -- ---------------------------- -- Table structure for tb_blog_tag -- ---------------------------- DROP TABLE IF EXISTS "public"."tb_blog_tag"; CREATE TABLE "public"."tb_blog_tag" ( "relation_id" varchar(64) COLLATE "pg_catalog"."default" NOT NULL, "blog_id" varchar(64) COLLATE "pg_catalog"."default" NOT NULL, "tag_id" varchar(32) COLLATE "pg_catalog"."default" NOT NULL, "create_time" timestamp(6) ) ; COMMENT ON COLUMN "public"."tb_blog_tag"."relation_id" IS '关系表id'; COMMENT ON COLUMN "public"."tb_blog_tag"."blog_id" IS '博客id'; COMMENT ON COLUMN "public"."tb_blog_tag"."tag_id" IS '标签id'; COMMENT ON COLUMN "public"."tb_blog_tag"."create_time" IS '添加时间'; COMMENT ON TABLE "public"."tb_blog_tag" IS '博客跟标签的关系表'; -- ---------------------------- -- Table structure for tb_category -- ---------------------------- DROP TABLE IF EXISTS "public"."tb_category"; CREATE TABLE "public"."tb_category" ( "category_id" varchar(64) COLLATE "pg_catalog"."default" NOT NULL, "category_name" varchar(50) COLLATE "pg_catalog"."default" NOT NULL, "category_icon" varchar(50) COLLATE "pg_catalog"."default", "category_rank" int4 NOT NULL, "is_deleted" int2 NOT NULL, "create_time" timestamp(6) NOT NULL ) ; COMMENT ON COLUMN "public"."tb_category"."category_id" IS '分类表主键'; COMMENT ON COLUMN "public"."tb_category"."category_name" IS '分类的名称'; COMMENT ON COLUMN "public"."tb_category"."category_icon" IS '分类的图标'; COMMENT ON COLUMN "public"."tb_category"."category_rank" IS '分类的排序值 被使用的越多数值越大'; COMMENT ON COLUMN "public"."tb_category"."is_deleted" IS '是否删除 0=否 1=是'; COMMENT ON COLUMN "public"."tb_category"."create_time" IS '创建时间'; COMMENT ON TABLE "public"."tb_category" IS '博客分类'; -- ---------------------------- -- Table structure for tb_comment -- ---------------------------- DROP TABLE IF EXISTS "public"."tb_comment"; CREATE TABLE "public"."tb_comment" ( "comment_id" varchar(64) COLLATE "pg_catalog"."default" NOT NULL, "blog_id" varchar(64) COLLATE "pg_catalog"."default" NOT NULL, "commentator" varchar(50) COLLATE "pg_catalog"."default" NOT NULL, "email" varchar(100) COLLATE "pg_catalog"."default" NOT NULL, "website_url" varchar(50) COLLATE "pg_catalog"."default", "comment_body" text COLLATE "pg_catalog"."default" NOT NULL, "comment_create_time" timestamp(6) NOT NULL, "commentator_ip" varchar(20) COLLATE "pg_catalog"."default", "reply_body" varchar(200) COLLATE "pg_catalog"."default", "reply_create_time" timestamp(6), "comment_status" int2 NOT NULL, "is_deleted" int2 NOT NULL, "user_agent" varchar(255) COLLATE "pg_catalog"."default", "os" varchar(255) COLLATE "pg_catalog"."default" ) ; COMMENT ON COLUMN "public"."tb_comment"."comment_id" IS '主键id'; COMMENT ON COLUMN "public"."tb_comment"."blog_id" IS '关联的blog主键'; COMMENT ON COLUMN "public"."tb_comment"."commentator" IS '评论者名称'; COMMENT ON COLUMN "public"."tb_comment"."email" IS '评论人的邮箱'; COMMENT ON COLUMN "public"."tb_comment"."website_url" IS '网址'; COMMENT ON COLUMN "public"."tb_comment"."comment_body" IS '评论内容'; COMMENT ON COLUMN "public"."tb_comment"."comment_create_time" IS '评论提交时间'; COMMENT ON COLUMN "public"."tb_comment"."commentator_ip" IS '评论时的ip地址'; COMMENT ON COLUMN "public"."tb_comment"."reply_body" IS '回复内容'; COMMENT ON COLUMN "public"."tb_comment"."reply_create_time" IS '回复时间'; COMMENT ON COLUMN "public"."tb_comment"."comment_status" IS '是否审核通过 0-未审核 1-审核通过'; COMMENT ON COLUMN "public"."tb_comment"."is_deleted" IS '是否删除 0-未删除 1-已删除'; COMMENT ON COLUMN "public"."tb_comment"."user_agent" IS '用户使用的浏览器'; COMMENT ON COLUMN "public"."tb_comment"."os" IS '用户的系统'; COMMENT ON TABLE "public"."tb_comment" IS '评论信息表'; -- ---------------------------- -- Table structure for tb_img -- ---------------------------- DROP TABLE IF EXISTS "public"."tb_img"; CREATE TABLE "public"."tb_img" ( "id" varchar(32) COLLATE "pg_catalog"."default" NOT NULL, "img_name" varchar(255) COLLATE "pg_catalog"."default", "img_size" int4, "img_path" varchar(255) COLLATE "pg_catalog"."default", "img_url" varchar(255) COLLATE "pg_catalog"."default", "img_type" varchar(255) COLLATE "pg_catalog"."default", "media_type" varchar(255) COLLATE "pg_catalog"."default", "upload_time" timestamp(6), "thumbnail_path" varchar(255) COLLATE "pg_catalog"."default", "md5" varchar(255) COLLATE "pg_catalog"."default" ) ; -- ---------------------------- -- Table structure for tb_link -- ---------------------------- DROP TABLE IF EXISTS "public"."tb_link"; CREATE TABLE "public"."tb_link" ( "link_id" varchar(64) COLLATE "pg_catalog"."default" NOT NULL, "link_type" int2 NOT NULL, "link_name" varchar(50) COLLATE "pg_catalog"."default" NOT NULL, "link_url" varchar(100) COLLATE "pg_catalog"."default" NOT NULL, "link_description" varchar(100) COLLATE "pg_catalog"."default" NOT NULL, "link_rank" int4 NOT NULL, "is_deleted" int2 NOT NULL, "create_time" timestamp(6) NOT NULL, "update_time" timestamp(6) ) ; COMMENT ON COLUMN "public"."tb_link"."link_id" IS '友链表主键id'; COMMENT ON COLUMN "public"."tb_link"."link_type" IS '友链类别 0-友链 1-推荐 2-个人网站'; COMMENT ON COLUMN "public"."tb_link"."link_name" IS '网站名称'; COMMENT ON COLUMN "public"."tb_link"."link_url" IS '网站链接'; COMMENT ON COLUMN "public"."tb_link"."link_description" IS '网站描述'; COMMENT ON COLUMN "public"."tb_link"."link_rank" IS '用于列表排序'; COMMENT ON COLUMN "public"."tb_link"."is_deleted" IS '是否删除 0-未删除 1-已删除'; COMMENT ON COLUMN "public"."tb_link"."create_time" IS '添加时间'; COMMENT ON TABLE "public"."tb_link" IS '友情链接表'; -- ---------------------------- -- Table structure for tb_tag -- ---------------------------- DROP TABLE IF EXISTS "public"."tb_tag"; CREATE TABLE "public"."tb_tag" ( "tag_id" varchar(64) COLLATE "pg_catalog"."default" NOT NULL, "tag_name" varchar(100) COLLATE "pg_catalog"."default" NOT NULL, "is_deleted" int2 NOT NULL, "create_time" timestamp(6) NOT NULL ) ; COMMENT ON COLUMN "public"."tb_tag"."tag_id" IS '标签表主键id'; COMMENT ON COLUMN "public"."tb_tag"."tag_name" IS '标签名称'; COMMENT ON COLUMN "public"."tb_tag"."is_deleted" IS '是否删除 0=否 1=是'; COMMENT ON COLUMN "public"."tb_tag"."create_time" IS '创建时间'; COMMENT ON TABLE "public"."tb_tag" IS '标签表'; -- ---------------------------- -- Primary Key structure for table tb_admin_user -- ---------------------------- ALTER TABLE "public"."tb_admin_user" ADD CONSTRAINT "tb_admin_user_pkey" PRIMARY KEY ("id", "login_user_name"); -- ---------------------------- -- Primary Key structure for table tb_blog_category -- ---------------------------- ALTER TABLE "public"."tb_blog_category" ADD CONSTRAINT "tb_blog_category_pkey" PRIMARY KEY ("relation_id"); -- ---------------------------- -- Primary Key structure for table tb_blog_config -- ---------------------------- ALTER TABLE "public"."tb_blog_config" ADD CONSTRAINT "tb_blog_config_pkey" PRIMARY KEY ("id"); -- ---------------------------- -- Primary Key structure for table tb_blog_info -- ---------------------------- ALTER TABLE "public"."tb_blog_info" ADD CONSTRAINT "tb_blog_info_pkey" PRIMARY KEY ("blog_id"); -- ---------------------------- -- Primary Key structure for table tb_blog_tag -- ---------------------------- ALTER TABLE "public"."tb_blog_tag" ADD CONSTRAINT "tb_blog_tag_pkey" PRIMARY KEY ("relation_id"); -- ---------------------------- -- Primary Key structure for table tb_category -- ---------------------------- ALTER TABLE "public"."tb_category" ADD CONSTRAINT "tb_category_pkey" PRIMARY KEY ("category_id"); -- ---------------------------- -- Primary Key structure for table tb_comment -- ---------------------------- ALTER TABLE "public"."tb_comment" ADD CONSTRAINT "tb_comment_pkey" PRIMARY KEY ("comment_id"); -- ---------------------------- -- Primary Key structure for table tb_img -- ---------------------------- ALTER TABLE "public"."tb_img" ADD CONSTRAINT "tb_img_pkey" PRIMARY KEY ("id"); -- ---------------------------- -- Primary Key structure for table tb_link -- ---------------------------- ALTER TABLE "public"."tb_link" ADD CONSTRAINT "tb_link_pkey" PRIMARY KEY ("link_id"); -- ---------------------------- -- Primary Key structure for table tb_tag -- ---------------------------- ALTER TABLE "public"."tb_tag" ADD CONSTRAINT "tb_tag_pkey" PRIMARY KEY ("tag_id");
/* * @author Philip Stutz * @author Mihaela Verman * * Copyright 2013 University of Zurich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.signalcollect.triplerush import org.scalatest.FlatSpec import org.scalatest.ShouldMatchers import org.scalatest.prop.Checkers import com.signalcollect.triplerush.QueryParticle._ import org.scalacheck.Arbitrary._ import org.scalacheck.Arbitrary import org.scalacheck.Gen import org.scalacheck.Gen._ class QueryParticleSpec extends FlatSpec with ShouldMatchers with Checkers { val maxId = 4 lazy val genTriple: Gen[TriplePattern] = for { s <- Gen.choose(1, maxId) p <- Gen.choose(1, maxId) o <- Gen.choose(1, maxId) } yield TriplePattern(s, p, o) lazy val genSubjectPattern: Gen[TriplePattern] = for { p <- Gen.choose(1, maxId) o <- Gen.choose(1, maxId) } yield TriplePattern(-1, p, o) implicit lazy val arbTriple: Arbitrary[TriplePattern] = Arbitrary(genTriple) "QueryParticle" should "correctly encode ids" in { check( (id: Int) => { val qp = QueryParticle( patterns = Seq( TriplePattern(-1, 1, 2), TriplePattern(-1, 3, -2)), queryId = 1, numberOfSelectVariables = 2) qp.writeQueryId(id) qp.queryId == id }, minSuccessful(10)) } it should "correctly encode tickets" in { check( (tickets: Long) => { val qp = QueryParticle( patterns = Seq( TriplePattern(-1, 1, 2), TriplePattern(-1, 3, -2)), queryId = 1, numberOfSelectVariables = 2) qp.writeTickets(tickets) qp.tickets == tickets }, minSuccessful(10)) } it should "correctly encode triple patterns" in { check( (a: TriplePattern, b: TriplePattern, c: TriplePattern) => { val patterns = Array(a, b, c) val qp = QueryParticle( patterns = patterns, queryId = 1, numberOfSelectVariables = 3) qp.patterns.toList == patterns.toList }, minSuccessful(10)) } }
using System; using System.Windows.Threading; namespace Tools.Extension { static public class DispatcherExtension { static public void InvokeAction(this Dispatcher dispatcher, Action action) { dispatcher?.Invoke(new Action(() => { action?.Invoke(); })); } static public void InvokeAction<T>(this Dispatcher dispatcher, Action<T> action, T param) { dispatcher?.Invoke(new Action(() => { action?.Invoke(param); })); } } }
<?php /** * Copyright 2015, Eduardo Trujillo <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. * * This file is part of the Illuminated package */ namespace Chromabits\Illuminated\Database\Migrations; use Chromabits\Nucleus\Foundation\BaseObject; use InvalidArgumentException; /** * Class Batch. * * @author Eduardo Trujillo <[email protected]> * @package Chromabits\Illuminated\Database\Migrations */ abstract class Batch extends BaseObject { /** * Return a string array of the class names of migrations to run. * * - Order matters: Output migrations in the order they should happen. * - Values can be either the name of a migration class or an alias. * See getAliases(). * * @return string[] */ abstract public function getMigrations(); /** * Return a string to string map that matches the name of a migration with * the name of a concrete class that implements it. * * This is useful for maintaining compatibility with preexisting Laravel * migrations. Laravel migrations usually have names in the following * format: * * 2015_05_30_000000_create_jobs_table * * An example entry in this map should look like: * * '2015_05_30_000000_create_jobs_table' => 'CreateJobsTable', * * @return array */ public function getAliases() { return []; } /** * Check that the migration definition is valid. */ final public function validate() { $migrations = $this->getMigrations(); foreach ($migrations as $key => $value) { if (!is_int($key)) { throw new InvalidArgumentException( 'Migration: ' . (string) $value . ' has an invalid key' . ' format. Only use integer keys.' ); } if (!is_string($value) && !($value instanceof Batch)) { throw new InvalidArgumentException( 'Migration: ' . (string) $value . ' has an invalid value' . ' format. Allowed values: string or an instance of' . ' another Batch class.' ); } } } /** * Recursively explores the migrations tree and returns a flattened version. * * @return array */ final public function getExpanded() { $result = []; foreach ($this->getMigrations() as $migration) { if ($migration instanceof Batch) { $result = array_merge($result, $migration->getExpanded()); continue; } $result[] = $migration; } return $result; } /** * Recursively explores the migrations tree looking for aliases and returns * a flattened version. * * Collisions will be overridden by the newest definition. * * @return array */ final public function getExpandedAliases() { $aliases = $this->getAliases(); foreach ($this->getMigrations() as $migration) { if ($migration instanceof Batch) { $aliases = array_merge( $aliases, $migration->getExpandedAliases() ); } } return $aliases; } /** * Resolve any aliases for a migration. * * @param string $name * * @return mixed */ public function resolve($name) { $aliases = $this->getExpandedAliases(); if (array_key_exists($name, $aliases)) { return $aliases[$name]; } return $name; } }
import { EMPTY_ADDRESS, ROOT_NODE } from './utils' import { Domain } from '../generated/schema' export function createDomain(id: string): Domain { let domain = new Domain(id) if(id == ROOT_NODE) { domain.owner = EMPTY_ADDRESS domain.label = '' domain.fqn = '' domain.save() } return domain } export function getDomain(id: string): Domain|null { let domain = Domain.load(id) if(domain == null) { return createDomain(id) } return domain }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Text.RegularExpressions; namespace Third { //Check is AD is key sensitive class Third { static List<Planet> listPlanets = new List<Planet>(); static void Main() { int messagesCount = int.Parse(Console.ReadLine()); for (int i = 0; i < messagesCount; i++) { string encrypredPlanet = Console.ReadLine(); int decryptCounre = 0; foreach (var s in encrypredPlanet) { if (s=='s'||s=='S'|| s == 't' || s == 'T' || s == 'a' || s == 'A' || s == 'r' || s == 'R') { decryptCounre++; } } var msgTodecrypt = new StringBuilder(); foreach (var s in encrypredPlanet) { msgTodecrypt.Append((char)(s - decryptCounre)); } var dataForPlanet = msgTodecrypt.ToString(); DecodePlanetInfo(dataForPlanet); } var numOfAttacked = listPlanets .Where(x => x.AttackOrDestroy == true) .Count(); var numDestoryedPlanet = listPlanets.Count - numOfAttacked; var sortedAttacked = listPlanets .Where(x => x.AttackOrDestroy == true) .OrderBy(x => x.Name) .ToList(); var sortedDestroyed = listPlanets .Where(x => x.AttackOrDestroy == false) .OrderBy(x => x.Name) .ToList(); Console.WriteLine($"Attacked planets: {numOfAttacked}"); if (sortedAttacked.Count>0) { foreach (var planet in sortedAttacked) { Console.WriteLine($"-> {planet.Name}"); } } Console.WriteLine($"Destroyed planets: {numDestoryedPlanet}"); if (sortedDestroyed.Count > 0) { foreach (var planet in sortedDestroyed) { Console.WriteLine($"-> {planet.Name}"); } } } private static void DecodePlanetInfo(string dataForPlanet) { string pattern = @"[^@\-!:>]*@([a-zA-Z]+)[^@\-!:>]*:([0-9]+)[^@\-!:>]*!([adAD])![^@\-!:>]*->([0-9]+).*"; var rgx = new Regex(pattern); var match = rgx.Match(dataForPlanet); if (match.Groups[1].Success&& match.Groups[2].Success&&match.Groups[3].Success&&match.Groups[4].Success) { string planetName = match.Groups[1].Value; long population = long.Parse(match.Groups[2].Value); bool attackType = match.Groups[3].Value.ToUpper() == "A" ?true:false; long soldierCount = long.Parse(match.Groups[4].Value); listPlanets.Add(new Planet(planetName,population,attackType,soldierCount)); } else { return; } } } public class Planet { public string Name { get; set; } public long Population { get; set; } public bool AttackOrDestroy { get; set; } public long SoldierCount { get; set; } public Planet(string name, long population, bool ifAttackTrue,long soldCount) { this.Name = name; this.Population = population; this.AttackOrDestroy = ifAttackTrue; this.SoldierCount = soldCount; } } }
require "list_node" def reverse_list(head) prev_node = nil curr_node = head while curr_node != nil next_temp = curr_node.next curr_node.next = prev_node prev_node = curr_node curr_node = next_temp end prev_node end def reverse_list_1(head) prev_node = nil curr_node = head next_node = curr_node.next if curr_node while next_node != nil next_next_node = next_node.next curr_node.next = prev_node next_node.next = curr_node prev_node = curr_node curr_node = next_node next_node = next_next_node end curr_node end
helpers do # methods defined here are able to be called in any place. def current_user if session[:user_id] @current_user ||= User.find_by_id(session[:user_id]) end end def logged_in? !current_user.nil? end def logged_in_redirect(login_route, redirect_url) # byebug if logged_in? then case when login_route.class == String redirect login_route when login_route.class == Symbol erb login_route else puts "unrecognized input link" end else case when redirect_url.class == String redirect redirect_url when redirect_url.class == Symbol erb redirect_url else puts "unrecognized input link" end end end end
--- author: leon comments: true date: 2022-01-01 10:10+00:00 layout: post title: '[算法]关于vector的push_back扩容过程的经典问题' categories: - 算法 tags: - 算法 --- 在面试c++开发过程中,我经常问STL vector的内容,这里面挖掘的内容比较丰富,不限于: - vector的内存配置器实现(经典内存池) - vector扩容策略 - at和[]操作符的区别 - 迭代器失效问题 - 打码实现一个vector ## 内存配置器 在STL标准下,内存配置器(allocator)是有标准的 ```c++ // 申请内存空间 pointer allocator::allocate(size_type n, const void* = 0) // 释放内存空间 void allocator::deallocate(pointer p, size_type n) // 调用对象构造函数,等同于 new((void*)p) T(x) // new((void*)p) T(x) 为placement new,即在指定内存空间下构造函数 void allocator::construct(pointer p, const T& x) // 调用对象析构函数,等同于 p->~T() void allocator::destroy(pointer p) ``` SGI实现的allocator至少有两级, - 第二级配置器使用自由链表实现内存池,可以有效减小频繁的申请和释放以及内存碎片问题。 - 第一级配置器直接简单封装的malloc和free。 ![](/images/cpp-allocator-design.jpg) ## 扩容策略和均摊分析 接下来考虑常见的插值场景(第二个问题),当vector扩容时,内存申请策略为什么一般是扩展1.5或者2倍? ```c++ vector<int> = v; // start with an empty vector v.push_back(1); // v = [1] and capacity = 1 v.push_back(2); // v = [1,2] and capacity = 2 v.push_back(3); // v = [1,2,3] and capacity = 4 v.push_back(4); // v = [1,2,3,4] and capacity = 4 v.push_back(5); // v = [1,2,3,4,5] and capacity = 8 v.push_back(6); // v = [1,2,3,4,5,6] and capacity = 8 v.push_back(7); // v = [1,2,3,4,5,6,7] and capacity = 8 v.push_back(8); // v = [1,2,3,4,5,6,7,8] and capacity = 8 v.push_back(9); // v = [1,2,3,4,5,6,7,8,9] and capacity = 16 ``` ```c++ template<class T> void vector<T>::push_back(const T& val) { if (capac == 0) reserve(1); else if (sz==capac) reserve(2*capac); // 2倍扩容 alloc.construct(&elem[sz], val); ++sz; } template<class T> void vector<T>::reserve(int newalloc) { if(newalloc <= capac) return; T* p = alloc.allocate(newalloc); for(int i=0; i<sz; ++i) alloc.construct(&p[i],elem[i]); // copy // deallocation omitted ... elem = p; capac = newalloc; } ``` 我们可以统计元素的插入和复制的操作,计算一下每次插入的代价: |operation|capacity|cost(copy+insert)| |-|-|-| |push_back(1)|1| 1 |push_back(2)|2| 1 + 1 |push_back(3)|4| 2 + 1 |push_back(4)|4| 1 |push_back(5)|8| 4 + 1 |push_back(6)|8| 1 |push_back(7)|8| 1 |push_back(8)|8| 1 |push_back(9)|16| 8 + 1 归纳一下,代价函数和总代价为: $$ C_i = \begin{cases} 1+2k &(if\ {(i−1)=2k}) \\ 1 &(otherwise) \\ \end {cases} $$ $$ T(n) = \sum_{i=1}^n{C_i} \le n+ \sum_{i=1}^{|\lg n|}{2^i} = n + 2n -1 = 3n - 1 $$ 均摊一下,平均代价为 $$ c = \frac {T(n)}{n} = (3n − 1)/n < 3. $$ 因此,每一个操作的均摊成本为3,换句话说,每一个push_back操作的平均成本为$O(1)$。 均摊分析(Amortized Analysis)几种常用的技术: - 聚合分析,决定n个操作序列的耗费上界$T(n)$,然后计算平均耗费为 $T(n)/n$。 - 记账法,确定每个操作的耗费,结合它的直接执行时间及它在对运行时中未来操作的影响。通常来说,许多短操作增量累加成“债”,而通过减少长操作的次数来“偿还”。 - 势能法,类似记账方法,但通过预先储蓄“势能”而在需要的时候释放。 ## 参考: - https://zh.wikipedia.org/wiki/%E5%B9%B3%E6%91%8A%E5%88%86%E6%9E%90 - https://johnysswlab.com/the-price-of-dynamic-memory-allocation/
# Tumblr download script: It will help to download all kind of images from tumblr blogs. ### First install script requirements ```sh $ pip install -r requirements.txt ``` ### Run it like this for http://quotes.tumblr.com ```sh $ python td.py quotes ``` OR ```sh $ ./td.py quotes ``` It will download all your pics into current folder. ### Specify how many pages you want to download Download images from 10 pages ```sh $ ./td.py quotes -n 10 ```
#include <cgreen/cgreen.h> #include <cgreen/constraint_syntax_helpers.h> Ensure(failing_test_is_listed_by_xml_reporter) { assert_that(false); } Ensure(passing_test_is_listed_by_xml_reporter) { assert_that(true); } Ensure(error_message_gets_escaped_by_xml_reporter) { char *test_string = "<?xml version=\"1.0\" encoding=\"ISO-8859-1\" ?>\n" "<example name=\"arbitrary xml content\">\n" " <content>I'm the content & have chars which have to be escaped, " "if put in outer XML.</content>\n" "</example>"; char *expected_string = "I'm not to be found!"; assert_that(test_string, contains_string(expected_string)); }
package org.apache.helix; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.ArrayList; import java.util.Iterator; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import org.I0Itec.zkclient.exception.ZkNoNodeException; import org.apache.log4j.Logger; // TODO: move to mananger.zk /** * Support committing updates to data such that they are ordered for each key */ public class GroupCommit { private static Logger LOG = Logger.getLogger(GroupCommit.class); private static class Queue { final AtomicReference<Thread> _running = new AtomicReference<Thread>(); final ConcurrentLinkedQueue<Entry> _pending = new ConcurrentLinkedQueue<Entry>(); } private static class Entry { final String _key; final ZNRecord _record; AtomicBoolean _sent = new AtomicBoolean(false); Entry(String key, ZNRecord record) { _key = key; _record = record; } } private final Queue[] _queues = new Queue[100]; /** * Set up a group committer and its associated queues */ public GroupCommit() { // Don't use Arrays.fill(); for (int i = 0; i < _queues.length; ++i) { _queues[i] = new Queue(); } } private Queue getQueue(String key) { return _queues[(key.hashCode() & Integer.MAX_VALUE) % _queues.length]; } /** * Do a group update for data associated with a given key * @param accessor accessor with the ability to pull from the current data * @param options see {@link AccessOption} * @param key the data identifier * @param record the data to be merged in * @return true if successful, false otherwise */ public boolean commit(BaseDataAccessor<ZNRecord> accessor, int options, String key, ZNRecord record) { return commit(accessor, options, key, record, false); } public boolean commit(BaseDataAccessor<ZNRecord> accessor, int options, String key, ZNRecord record, boolean removeIfEmpty) { Queue queue = getQueue(key); Entry entry = new Entry(key, record); queue._pending.add(entry); while (!entry._sent.get()) { if (queue._running.compareAndSet(null, Thread.currentThread())) { ArrayList<Entry> processed = new ArrayList<Entry>(); try { if (queue._pending.peek() == null) return true; // remove from queue Entry first = queue._pending.poll(); processed.add(first); String mergedKey = first._key; ZNRecord merged = null; try { // accessor will fallback to zk if not found in cache merged = accessor.get(mergedKey, null, options); } catch (ZkNoNodeException e) { // OK. } /** * If the local cache does not contain a value, need to check if there is a * value in ZK; use it as initial value if exists */ if (merged == null) { merged = new ZNRecord(first._record); } else { merged.merge(first._record); } Iterator<Entry> it = queue._pending.iterator(); while (it.hasNext()) { Entry ent = it.next(); if (!ent._key.equals(mergedKey)) continue; processed.add(ent); merged.merge(ent._record); // System.out.println("After merging:" + merged); it.remove(); } // System.out.println("size:"+ processed.size()); if (removeIfEmpty && merged.getMapFields().isEmpty()) { accessor.remove(mergedKey, options); } else { accessor.set(mergedKey, merged, options); } } finally { queue._running.set(null); for (Entry e : processed) { synchronized (e) { e._sent.set(true); e.notify(); } } } } else { synchronized (entry) { try { entry.wait(10); } catch (InterruptedException e) { LOG.error("Interrupted while committing change, key: " + key + ", record: " + record, e); // Restore interrupt status Thread.currentThread().interrupt(); return false; } } } } return true; } }
package client import ( "context" "fmt" "github.com/lomoval/otus-golang-project-sysmon/api" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/credentials/insecure" "google.golang.org/grpc/status" "net" "strings" ) type Client struct { host string port string table *table } func New(host string, port string) *Client { return &Client{host: host, port: port} } func (c *Client) Start(ctx context.Context, groupName string, avgInterval int, notifyInterval int) error { conn, err := grpc.Dial( net.JoinHostPort(c.host, c.port), grpc.WithTransportCredentials(insecure.NewCredentials())) if err != nil { return err } client := api.NewMetricsClient(conn) m, err := client.GetMetrics(ctx, &api.GetMetricsRequest{ NotifyInterval: int32(notifyInterval), AverageCalcInterval: int32(avgInterval), }) if err != nil { s := status.Convert(err) if s.Code() == codes.Canceled { return nil } return err } println("waiting first notification from server...") for { response, err := m.Recv() if err != nil { s := status.Convert(err) if s.Code() == codes.Canceled { return nil } return err } found := false for _, group := range response.GetGroups() { if strings.EqualFold(group.GetName(), strings.ToLower(groupName)) { if err := c.printTable(group); err != nil { return err } found = true break } } if !found { fmt.Printf("no metrics group with name '%s'\n", groupName) } } } func (c *Client) printTable(group *api.MetricGroup) error { switch c.table { case nil: var err error c.table, err = newTable(group) if err != nil { return err } default: // Move cursor to start of table to redraw for i := 0; i < c.table.height(); i++ { print("\033[F") } } c.table.addLine(group) c.table.print() return nil }
# @transmute/did-key-test-vectors This module aggregates all test vectors associated with `did:key` packages.
require 'spec_helper' module StellarLookout RSpec.describe Operation, type: %i[model] do describe "associations" do it { is_expected.to belong_to(:ward) } it do is_expected.to belong_to(:txn). with_primary_key(:external_id). with_foreign_key(:txn_external_id) end end describe "attributes" do it { is_expected.to have_attribute(:paging_token, String) } it { is_expected.to have_attribute(:source_account, String) } it { is_expected.to have_attribute(:type, String) } it { is_expected.to have_attribute(:type_i, Integer) } it { is_expected.to have_attribute(:asset_type, String) } it { is_expected.to have_attribute(:from, String) } it { is_expected.to have_attribute(:to, String) } it { is_expected.to have_attribute(:amount, BigDecimal) } end end end